foo.*
.project
.pydevproject
+/sla/trashcan
\ No newline at end of file
--- /dev/null
+from types import StringTypes
+try:
+ set
+except NameError:
+ from sets import Set
+ set = Set
+
+import time
+import datetime # Jordan
+#from manifold.util.parameter import Parameter, Mixed, python_type
+from manifold.util.predicate import Predicate, eq
+from itertools import ifilter
+
+class Filter(set):
+ """
+ A filter is a set of predicates
+ """
+
+ #def __init__(self, s=()):
+ # super(Filter, self).__init__(s)
+
+ @staticmethod
+ def from_list(l):
+ f = Filter()
+ try:
+ for element in l:
+ f.add(Predicate(*element))
+ except Exception, e:
+ print "Error in setting Filter from list", e
+ return None
+ return f
+
+ @staticmethod
+ def from_dict(d):
+ f = Filter()
+ for key, value in d.items():
+ if key[0] in Predicate.operators.keys():
+ f.add(Predicate(key[1:], key[0], value))
+ else:
+ f.add(Predicate(key, '=', value))
+ return f
+
+ def to_list(self):
+ ret = []
+ for predicate in self:
+ ret.append(predicate.to_list())
+ return ret
+
+
+ @staticmethod
+ def from_clause(clause):
+ """
+ NOTE: We can only handle simple clauses formed of AND fields.
+ """
+ raise Exception, "Not implemented"
+
+ def filter_by(self, predicate):
+ self.add(predicate)
+ return self
+
+ def __str__(self):
+ return ' AND '.join([str(pred) for pred in self])
+
+ def __repr__(self):
+ return '<Filter: %s>' % ' AND '.join([str(pred) for pred in self])
+
+ def __key(self):
+ return tuple([hash(pred) for pred in self])
+
+ def __hash__(self):
+ return hash(self.__key())
+
+ def __additem__(self, value):
+ if value.__class__ != Predicate:
+ raise TypeError("Element of class Predicate expected, received %s" % value.__class__.__name__)
+ set.__additem__(self, value)
+
+ def keys(self):
+ return set([x.key for x in self])
+
+ # XXX THESE FUNCTIONS SHOULD ACCEPT MULTIPLE FIELD NAMES
+
+ def has(self, key):
+ for x in self:
+ if x.key == key:
+ return True
+ return False
+
+ def has_op(self, key, op):
+ for x in self:
+ if x.key == key and x.op == op:
+ return True
+ return False
+
+ def has_eq(self, key):
+ return self.has_op(key, eq)
+
+ def get(self, key):
+ ret = []
+ for x in self:
+ if x.key == key:
+ ret.append(x)
+ return ret
+
+ def delete(self, key):
+ to_del = []
+ for x in self:
+ if x.key == key:
+ to_del.append(x)
+ for x in to_del:
+ self.remove(x)
+
+ #self = filter(lambda x: x.key != key, self)
+
+ def get_op(self, key, op):
+ if isinstance(op, (list, tuple, set)):
+ for x in self:
+ if x.key == key and x.op in op:
+ return x.value
+ else:
+ for x in self:
+ if x.key == key and x.op == op:
+ return x.value
+ return None
+
+ def get_eq(self, key):
+ return self.get_op(key, eq)
+
+ def set_op(self, key, op, value):
+ for x in self:
+ if x.key == key and x.op == op:
+ x.value = value
+ return
+ raise KeyError, key
+
+ def set_eq(self, key, value):
+ return self.set_op(key, eq, value)
+
+ def get_predicates(self, key):
+ # XXX Would deserve returning a filter (cf usage in SFA gateway)
+ ret = []
+ for x in self:
+ if x.key == key:
+ ret.append(x)
+ return ret
+
+# def filter(self, dic):
+# # We go through every filter sequentially
+# for predicate in self:
+# print "predicate", predicate
+# dic = predicate.filter(dic)
+# return dic
+
+ def match(self, dic, ignore_missing=True):
+ for predicate in self:
+ if not predicate.match(dic, ignore_missing):
+ return False
+ return True
+
+ def filter(self, l):
+ output = []
+ for x in l:
+ if self.match(x):
+ output.append(x)
+ return output
+
+ def get_field_names(self):
+ field_names = set()
+ for predicate in self:
+ field_names |= predicate.get_field_names()
+ return field_names
+
+#class OldFilter(Parameter, dict):
+# """
+# A type of parameter that represents a filter on one or more
+# columns of a database table.
+# Special features provide support for negation, upper and lower bounds,
+# as well as sorting and clipping.
+#
+#
+# fields should be a dictionary of field names and types.
+# As of PLCAPI-4.3-26, we provide support for filtering on
+# sequence types as well, with the special '&' and '|' modifiers.
+# example : fields = {'node_id': Parameter(int, "Node identifier"),
+# 'hostname': Parameter(int, "Fully qualified hostname", max = 255),
+# ...}
+#
+#
+# filter should be a dictionary of field names and values
+# representing the criteria for filtering.
+# example : filter = { 'hostname' : '*.edu' , site_id : [34,54] }
+# Whether the filter represents an intersection (AND) or a union (OR)
+# of these criteria is determined by the join_with argument
+# provided to the sql method below
+#
+# Special features:
+#
+# * a field starting with '&' or '|' should refer to a sequence type
+# the semantic is then that the object value (expected to be a list)
+# should contain all (&) or any (|) value specified in the corresponding
+# filter value. See other examples below.
+# example : filter = { '|role_ids' : [ 20, 40 ] }
+# example : filter = { '|roles' : ['tech', 'pi'] }
+# example : filter = { '&roles' : ['admin', 'tech'] }
+# example : filter = { '&roles' : 'tech' }
+#
+# * a field starting with the ~ character means negation.
+# example : filter = { '~peer_id' : None }
+#
+# * a field starting with < [ ] or > means lower than or greater than
+# < > uses strict comparison
+# [ ] is for using <= or >= instead
+# example : filter = { ']event_id' : 2305 }
+# example : filter = { '>time' : 1178531418 }
+# in this example the integer value denotes a unix timestamp
+#
+# * if a value is a sequence type, then it should represent
+# a list of possible values for that field
+# example : filter = { 'node_id' : [12,34,56] }
+#
+# * a (string) value containing either a * or a % character is
+# treated as a (sql) pattern; * are replaced with % that is the
+# SQL wildcard character.
+# example : filter = { 'hostname' : '*.jp' }
+#
+# * the filter's keys starting with '-' are special and relate to sorting and clipping
+# * '-SORT' : a field name, or an ordered list of field names that are used for sorting
+# these fields may start with + (default) or - for denoting increasing or decreasing order
+# example : filter = { '-SORT' : [ '+node_id', '-hostname' ] }
+# * '-OFFSET' : the number of first rows to be ommitted
+# * '-LIMIT' : the amount of rows to be returned
+# example : filter = { '-OFFSET' : 100, '-LIMIT':25}
+#
+# Here are a few realistic examples
+#
+# GetNodes ( { 'node_type' : 'regular' , 'hostname' : '*.edu' , '-SORT' : 'hostname' , '-OFFSET' : 30 , '-LIMIT' : 25 } )
+# would return regular (usual) nodes matching '*.edu' in alphabetical order from 31th to 55th
+#
+# GetPersons ( { '|role_ids' : [ 20 , 40] } )
+# would return all persons that have either pi (20) or tech (40) roles
+#
+# GetPersons ( { '&role_ids' : 10 } )
+# GetPersons ( { '&role_ids' : 10 } )
+# GetPersons ( { '|role_ids' : [ 10 ] } )
+# GetPersons ( { '|role_ids' : [ 10 ] } )
+# all 4 forms are equivalent and would return all admin users in the system
+# """
+#
+# def __init__(self, fields = {}, filter = {}, doc = "Attribute filter"):
+# # Store the filter in our dict instance
+# dict.__init__(self, filter)
+#
+# # Declare ourselves as a type of parameter that can take
+# # either a value or a list of values for each of the specified
+# # fields.
+# self.fields = dict ( [ ( field, Mixed (expected, [expected]))
+# for (field,expected) in fields.iteritems() ] )
+#
+# # Null filter means no filter
+# Parameter.__init__(self, self.fields, doc = doc, nullok = True)
+#
+# def sql(self, api, join_with = "AND"):
+# """
+# Returns a SQL conditional that represents this filter.
+# """
+#
+# # So that we always return something
+# if join_with == "AND":
+# conditionals = ["True"]
+# elif join_with == "OR":
+# conditionals = ["False"]
+# else:
+# assert join_with in ("AND", "OR")
+#
+# # init
+# sorts = []
+# clips = []
+#
+# for field, value in self.iteritems():
+# # handle negation, numeric comparisons
+# # simple, 1-depth only mechanism
+#
+# modifiers={'~' : False,
+# '<' : False, '>' : False,
+# '[' : False, ']' : False,
+# '-' : False,
+# '&' : False, '|' : False,
+# '{': False ,
+# }
+# def check_modifiers(field):
+# if field[0] in modifiers.keys():
+# modifiers[field[0]] = True
+# field = field[1:]
+# return check_modifiers(field)
+# return field
+# field = check_modifiers(field)
+#
+# # filter on fields
+# if not modifiers['-']:
+# if field not in self.fields:
+# raise PLCInvalidArgument, "Invalid filter field '%s'" % field
+#
+# # handling array fileds always as compound values
+# if modifiers['&'] or modifiers['|']:
+# if not isinstance(value, (list, tuple, set)):
+# value = [value,]
+#
+# if isinstance(value, (list, tuple, set)):
+# # handling filters like '~slice_id':[]
+# # this should return true, as it's the opposite of 'slice_id':[] which is false
+# # prior to this fix, 'slice_id':[] would have returned ``slice_id IN (NULL) '' which is unknown
+# # so it worked by coincidence, but the negation '~slice_ids':[] would return false too
+# if not value:
+# if modifiers['&'] or modifiers['|']:
+# operator = "="
+# value = "'{}'"
+# else:
+# field=""
+# operator=""
+# value = "FALSE"
+# else:
+# value = map(str, map(api.db.quote, value))
+# if modifiers['&']:
+# operator = "@>"
+# value = "ARRAY[%s]" % ", ".join(value)
+# elif modifiers['|']:
+# operator = "&&"
+# value = "ARRAY[%s]" % ", ".join(value)
+# else:
+# operator = "IN"
+# value = "(%s)" % ", ".join(value)
+# else:
+# if value is None:
+# operator = "IS"
+# value = "NULL"
+# elif isinstance(value, StringTypes) and \
+# (value.find("*") > -1 or value.find("%") > -1):
+# operator = "LIKE"
+# # insert *** in pattern instead of either * or %
+# # we dont use % as requests are likely to %-expansion later on
+# # actual replacement to % done in PostgreSQL.py
+# value = value.replace ('*','***')
+# value = value.replace ('%','***')
+# value = str(api.db.quote(value))
+# else:
+# operator = "="
+# if modifiers['<']:
+# operator='<'
+# if modifiers['>']:
+# operator='>'
+# if modifiers['[']:
+# operator='<='
+# if modifiers[']']:
+# operator='>='
+# #else:
+# # value = str(api.db.quote(value))
+# # jordan
+# if isinstance(value, StringTypes) and value[-2:] != "()": # XXX
+# value = str(api.db.quote(value))
+# if isinstance(value, datetime.datetime):
+# value = str(api.db.quote(str(value)))
+#
+# #if prefix:
+# # field = "%s.%s" % (prefix,field)
+# if field:
+# clause = "\"%s\" %s %s" % (field, operator, value)
+# else:
+# clause = "%s %s %s" % (field, operator, value)
+#
+# if modifiers['~']:
+# clause = " ( NOT %s ) " % (clause)
+#
+# conditionals.append(clause)
+# # sorting and clipping
+# else:
+# if field not in ('SORT','OFFSET','LIMIT'):
+# raise PLCInvalidArgument, "Invalid filter, unknown sort and clip field %r"%field
+# # sorting
+# if field == 'SORT':
+# if not isinstance(value,(list,tuple,set)):
+# value=[value]
+# for field in value:
+# order = 'ASC'
+# if field[0] == '+':
+# field = field[1:]
+# elif field[0] == '-':
+# field = field[1:]
+# order = 'DESC'
+# if field not in self.fields:
+# raise PLCInvalidArgument, "Invalid field %r in SORT filter"%field
+# sorts.append("%s %s"%(field,order))
+# # clipping
+# elif field == 'OFFSET':
+# clips.append("OFFSET %d"%value)
+# # clipping continued
+# elif field == 'LIMIT' :
+# clips.append("LIMIT %d"%value)
+#
+# where_part = (" %s " % join_with).join(conditionals)
+# clip_part = ""
+# if sorts:
+# clip_part += " ORDER BY " + ",".join(sorts)
+# if clips:
+# clip_part += " " + " ".join(clips)
+## print 'where_part=',where_part,'clip_part',clip_part
+# return (where_part,clip_part)
+#
--- /dev/null
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Query representation
+#
+# Copyright (C) UPMC Paris Universitas
+# Authors:
+# Jordan Augé <jordan.auge@lip6.fr>
+# Marc-Olivier Buob <marc-olivier.buob@lip6.fr>
+# Thierry Parmentelat <thierry.parmentelat@inria.fr>
+
+from types import StringTypes
+from manifold.core.filter import Filter, Predicate
+from manifold.util.frozendict import frozendict
+from manifold.util.type import returns, accepts
+from manifold.util.clause import Clause
+import copy
+
+import json
+import uuid
+
+def uniqid ():
+ return uuid.uuid4().hex
+
+debug=False
+#debug=True
+
+class ParameterError(StandardError): pass
+
+class Query(object):
+ """
+ Implements a TopHat query.
+
+ We assume this is a correct DAG specification.
+
+ 1/ A field designates several tables = OR specification.
+ 2/ The set of fields specifies a AND between OR clauses.
+ """
+
+ #---------------------------------------------------------------------------
+ # Constructor
+ #---------------------------------------------------------------------------
+
+ def __init__(self, *args, **kwargs):
+
+ self.query_uuid = uniqid()
+
+ # Initialize optional parameters
+ self.clear()
+
+ #l = len(kwargs.keys())
+ len_args = len(args)
+
+ if len(args) == 1:
+ if isinstance(args[0], dict):
+ kwargs = args[0]
+ args = []
+
+ # Initialization from a tuple
+
+ if len_args in range(2, 7) and type(args) == tuple:
+ # Note: range(x,y) <=> [x, y[
+
+ # XXX UGLY
+ if len_args == 3:
+ self.action = 'get'
+ self.params = {}
+ self.timestamp = 'now'
+ self.object, self.filters, self.fields = args
+ elif len_args == 4:
+ self.object, self.filters, self.params, self.fields = args
+ self.action = 'get'
+ self.timestamp = 'now'
+ else:
+ self.action, self.object, self.filters, self.params, self.fields, self.timestamp = args
+
+ # Initialization from a dict
+ elif "object" in kwargs:
+ if "action" in kwargs:
+ self.action = kwargs["action"]
+ del kwargs["action"]
+ else:
+ print "W: defaulting to get action"
+ self.action = "get"
+
+
+ self.object = kwargs["object"]
+ del kwargs["object"]
+
+ if "filters" in kwargs:
+ self.filters = kwargs["filters"]
+ del kwargs["filters"]
+ else:
+ self.filters = Filter()
+
+ if "fields" in kwargs:
+ self.fields = set(kwargs["fields"])
+ del kwargs["fields"]
+ else:
+ self.fields = set()
+
+ # "update table set x = 3" => params == set
+ if "params" in kwargs:
+ self.params = kwargs["params"]
+ del kwargs["params"]
+ else:
+ self.params = {}
+
+ if "timestamp" in kwargs:
+ self.timestamp = kwargs["timestamp"]
+ del kwargs["timestamp"]
+ else:
+ self.timestamp = "now"
+
+ if kwargs:
+ raise ParameterError, "Invalid parameter(s) : %r" % kwargs.keys()
+ #else:
+ # raise ParameterError, "No valid constructor found for %s : args = %r" % (self.__class__.__name__, args)
+
+ self.sanitize()
+
+ def sanitize(self):
+ if not self.filters: self.filters = Filter()
+ if not self.params: self.params = {}
+ if not self.fields: self.fields = set()
+ if not self.timestamp: self.timestamp = "now"
+
+ if isinstance(self.filters, list):
+ f = self.filters
+ self.filters = Filter()
+ for x in f:
+ pred = Predicate(x)
+ self.filters.add(pred)
+ elif isinstance(self.filters, Clause):
+ self.filters = Filter.from_clause(self.filters)
+
+ if isinstance(self.fields, list):
+ self.fields = set(self.fields)
+
+ for field in self.fields:
+ if not isinstance(field, StringTypes):
+ raise TypeError("Invalid field name %s (string expected, got %s)" % (field, type(field)))
+
+ #---------------------------------------------------------------------------
+ # Helpers
+ #---------------------------------------------------------------------------
+
+ def copy(self):
+ return copy.deepcopy(self)
+
+ def clear(self):
+ self.action = 'get'
+ self.object = None
+ self.filters = Filter()
+ self.params = {}
+ self.fields = set()
+ self.timestamp = 'now' # ignored for now
+
+ def to_sql(self, platform='', multiline=False):
+ get_params_str = lambda : ', '.join(['%s = %r' % (k, v) for k, v in self.get_params().items()])
+ get_select_str = lambda : ', '.join(self.get_select())
+
+ table = self.get_from()
+ select = 'SELECT %s' % (get_select_str() if self.get_select() else '*')
+ where = 'WHERE %s' % self.get_where() if self.get_where() else ''
+ at = 'AT %s' % self.get_timestamp() if self.get_timestamp() else ''
+ params = 'SET %s' % get_params_str() if self.get_params() else ''
+
+ sep = ' ' if not multiline else '\n '
+ if platform: platform = "%s:" % platform
+ strmap = {
+ 'get' : '%(select)s%(sep)s%(at)s%(sep)sFROM %(platform)s%(table)s%(sep)s%(where)s%(sep)s',
+ 'update': 'UPDATE %(platform)s%(table)s%(sep)s%(params)s%(sep)s%(where)s%(sep)s%(select)s',
+ 'create': 'INSERT INTO %(platform)s%(table)s%(sep)s%(params)s%(sep)s%(select)s',
+ 'delete': 'DELETE FROM %(platform)s%(table)s%(sep)s%(where)s'
+ }
+
+ return strmap[self.action] % locals()
+
+ @returns(StringTypes)
+ def __str__(self):
+ return self.to_sql(multiline=True)
+
+ @returns(StringTypes)
+ def __repr__(self):
+ return self.to_sql()
+
+ def __key(self):
+ return (self.action, self.object, self.filters, frozendict(self.params), frozenset(self.fields))
+
+ def __hash__(self):
+ return hash(self.__key())
+
+ #---------------------------------------------------------------------------
+ # Conversion
+ #---------------------------------------------------------------------------
+
+ def to_dict(self):
+ return {
+ 'action': self.action,
+ 'object': self.object,
+ 'timestamp': self.timestamp,
+ 'filters': self.filters.to_list(),
+ 'params': self.params,
+ 'fields': list(self.fields)
+ }
+
+ def to_json (self, analyzed_query=None):
+ query_uuid=self.query_uuid
+ a=self.action
+ o=self.object
+ t=self.timestamp
+ f=json.dumps (self.filters.to_list())
+ p=json.dumps (self.params)
+ c=json.dumps (list(self.fields))
+ # xxx unique can be removed, but for now we pad the js structure
+ unique=0
+
+ if not analyzed_query:
+ aq = 'null'
+ else:
+ aq = analyzed_query.to_json()
+ sq="{}"
+
+ result= """ new ManifoldQuery('%(a)s', '%(o)s', '%(t)s', %(f)s, %(p)s, %(c)s, %(unique)s, '%(query_uuid)s', %(aq)s, %(sq)s)"""%locals()
+ if debug: print 'ManifoldQuery.to_json:',result
+ return result
+
+ # this builds a ManifoldQuery object from a dict as received from javascript through its ajax request
+ # we use a json-encoded string - see manifold.js for the sender part
+ # e.g. here's what I captured from the server's output
+ # manifoldproxy.proxy: request.POST <QueryDict: {u'json': [u'{"action":"get","object":"resource","timestamp":"latest","filters":[["slice_hrn","=","ple.inria.omftest"]],"params":[],"fields":["hrn","hostname"],"unique":0,"query_uuid":"436aae70a48141cc826f88e08fbd74b1","analyzed_query":null,"subqueries":{}}']}>
+ def fill_from_POST (self, POST_dict):
+ try:
+ json_string=POST_dict['json']
+ dict=json.loads(json_string)
+ for (k,v) in dict.iteritems():
+ setattr(self,k,v)
+ except:
+ print "Could not decode incoming ajax request as a Query, POST=",POST_dict
+ if (debug):
+ import traceback
+ traceback.print_exc()
+ self.sanitize()
+
+ #---------------------------------------------------------------------------
+ # Accessors
+ #---------------------------------------------------------------------------
+
+ @returns(StringTypes)
+ def get_action(self):
+ return self.action
+
+ @returns(frozenset)
+ def get_select(self):
+ return frozenset(self.fields)
+
+ @returns(StringTypes)
+ def get_from(self):
+ return self.object
+
+ @returns(Filter)
+ def get_where(self):
+ return self.filters
+
+ @returns(dict)
+ def get_params(self):
+ return self.params
+
+ @returns(StringTypes)
+ def get_timestamp(self):
+ return self.timestamp
+
+#DEPRECATED#
+#DEPRECATED# def make_filters(self, filters):
+#DEPRECATED# return Filter(filters)
+#DEPRECATED#
+#DEPRECATED# def make_fields(self, fields):
+#DEPRECATED# if isinstance(fields, (list, tuple)):
+#DEPRECATED# return set(fields)
+#DEPRECATED# else:
+#DEPRECATED# raise Exception, "Invalid field specification"
+
+ #---------------------------------------------------------------------------
+ # LINQ-like syntax
+ #---------------------------------------------------------------------------
+
+ @classmethod
+ #@returns(Query)
+ def action(self, action, object):
+ """
+ (Internal usage). Craft a Query according to an action name
+ See methods: get, update, delete, execute.
+ Args:
+ action: A String among {"get", "update", "delete", "execute"}
+ object: The name of the queried object (String)
+ Returns:
+ The corresponding Query instance
+ """
+ query = Query()
+ query.action = action
+ query.object = object
+ return query
+
+ @classmethod
+ #@returns(Query)
+ def get(self, object):
+ """
+ Craft the Query which fetches the records related to a given object
+ Args:
+ object: The name of the queried object (String)
+ Returns:
+ The corresponding Query instance
+ """
+ return self.action("get", object)
+
+ @classmethod
+ #@returns(Query)
+ def update(self, object):
+ """
+ Craft the Query which updates the records related to a given object
+ Args:
+ object: The name of the queried object (String)
+ Returns:
+ The corresponding Query instance
+ """
+ return self.action("update", object)
+
+ @classmethod
+ #@returns(Query)
+ def create(self, object):
+ """
+ Craft the Query which create the records related to a given object
+ Args:
+ object: The name of the queried object (String)
+ Returns:
+ The corresponding Query instance
+ """
+ return self.action("create", object)
+
+ @classmethod
+ #@returns(Query)
+ def delete(self, object):
+ """
+ Craft the Query which delete the records related to a given object
+ Args:
+ object: The name of the queried object (String)
+ Returns:
+ The corresponding Query instance
+ """
+ return self.action("delete", object)
+
+ @classmethod
+ #@returns(Query)
+ def execute(self, object):
+ """
+ Craft the Query which execute a processing related to a given object
+ Args:
+ object: The name of the queried object (String)
+ Returns:
+ The corresponding Query instance
+ """
+ return self.action("execute", object)
+
+ #@returns(Query)
+ def at(self, timestamp):
+ """
+ Set the timestamp carried by the query
+ Args:
+ timestamp: The timestamp (it may be a python timestamp, a string
+ respecting the "%Y-%m-%d %H:%M:%S" python format, or "now")
+ Returns:
+ The self Query instance
+ """
+ self.timestamp = timestamp
+ return self
+
+ def filter_by(self, *args):
+ """
+ Args:
+ args: It may be:
+ - the parts of a Predicate (key, op, value)
+ - None
+ - a Filter instance
+ - a set/list/tuple of Predicate instances
+ """
+ if len(args) == 1:
+ filters = args[0]
+ if filters == None:
+ self.filters = Filter()
+ return self
+ if not isinstance(filters, (set, list, tuple, Filter)):
+ filters = [filters]
+ for predicate in filters:
+ self.filters.add(predicate)
+ elif len(args) == 3:
+ predicate = Predicate(*args)
+ self.filters.add(predicate)
+ else:
+ raise Exception, 'Invalid expression for filter'
+ return self
+
+ def select(self, *fields):
+
+ # Accept passing iterables
+ if len(fields) == 1:
+ tmp, = fields
+ if not tmp:
+ fields = None
+ elif isinstance(tmp, (list, tuple, set, frozenset)):
+ fields = tuple(tmp)
+
+ if not fields:
+ # Delete all fields
+ self.fields = set()
+ return self
+
+ for field in fields:
+ self.fields.add(field)
+ return self
+
+ def set(self, params):
+ self.params.update(params)
+ return self
+
+ def __or__(self, query):
+ assert self.action == query.action
+ assert self.object == query.object
+ assert self.timestamp == query.timestamp # XXX
+ filter = self.filters | query.filters
+ # fast dict union
+ # http://my.safaribooksonline.com/book/programming/python/0596007973/python-shortcuts/pythoncook2-chp-4-sect-17
+ params = dict(self.params, **query.params)
+ fields = self.fields | query.fields
+ return Query.action(self.action, self.object).filter_by(filter).select(fields)
+
+ def __and__(self, query):
+ assert self.action == query.action
+ assert self.object == query.object
+ assert self.timestamp == query.timestamp # XXX
+ filter = self.filters & query.filters
+ # fast dict intersection
+ # http://my.safaribooksonline.com/book/programming/python/0596007973/python-shortcuts/pythoncook2-chp-4-sect-17
+ params = dict.fromkeys([x for x in self.params if x in query.params])
+ fields = self.fields & query.fields
+ return Query.action(self.action, self.object).filter_by(filter).select(fields)
+
+ def __le__(self, query):
+ return ( self == self & query ) or ( query == self | query )
+
+class AnalyzedQuery(Query):
+
+ # XXX we might need to propagate special parameters sur as DEBUG, etc.
+
+ def __init__(self, query=None, metadata=None):
+ self.clear()
+ self.metadata = metadata
+ if query:
+ self.query_uuid = query.query_uuid
+ self.analyze(query)
+ else:
+ self.query_uuid = uniqid()
+
+ @returns(StringTypes)
+ def __str__(self):
+ out = []
+ fields = self.get_select()
+ fields = ", ".join(fields) if fields else '*'
+ out.append("SELECT %s FROM %s WHERE %s" % (
+ fields,
+ self.get_from(),
+ self.get_where()
+ ))
+ cpt = 1
+ for method, subquery in self.subqueries():
+ out.append(' [SQ #%d : %s] %s' % (cpt, method, str(subquery)))
+ cpt += 1
+
+ return "\n".join(out)
+
+ def clear(self):
+ super(AnalyzedQuery, self).clear()
+ self._subqueries = {}
+
+ def subquery(self, method):
+ # Allows for the construction of a subquery
+ if not method in self._subqueries:
+ analyzed_query = AnalyzedQuery(metadata=self.metadata)
+ analyzed_query.action = self.action
+ try:
+ type = self.metadata.get_field_type(self.object, method)
+ except ValueError ,e: # backwards 1..N
+ type = method
+ analyzed_query.object = type
+ self._subqueries[method] = analyzed_query
+ return self._subqueries[method]
+
+ def get_subquery(self, method):
+ return self._subqueries.get(method, None)
+
+ def remove_subquery(self, method):
+ del self._subqueries[method]
+
+ def get_subquery_names(self):
+ return set(self._subqueries.keys())
+
+ def get_subqueries(self):
+ return self._subqueries
+
+ def subqueries(self):
+ for method, subquery in self._subqueries.iteritems():
+ yield (method, subquery)
+
+ def filter_by(self, filters):
+ if not isinstance(filters, (set, list, tuple, Filter)):
+ filters = [filters]
+ for predicate in filters:
+ if predicate and '.' in predicate.key:
+ method, subkey = predicate.key.split('.', 1)
+ # Method contains the name of the subquery, we need the type
+ # XXX type = self.metadata.get_field_type(self.object, method)
+ sub_pred = Predicate(subkey, predicate.op, predicate.value)
+ self.subquery(method).filter_by(sub_pred)
+ else:
+ super(AnalyzedQuery, self).filter_by(predicate)
+ return self
+
+ def select(self, *fields):
+
+ # XXX passing None should reset fields in all subqueries
+
+ # Accept passing iterables
+ if len(fields) == 1:
+ tmp, = fields
+ if isinstance(tmp, (list, tuple, set, frozenset)):
+ fields = tuple(tmp)
+
+ for field in fields:
+ if field and '.' in field:
+ method, subfield = field.split('.', 1)
+ # Method contains the name of the subquery, we need the type
+ # XXX type = self.metadata.get_field_type(self.object, method)
+ self.subquery(method).select(subfield)
+ else:
+ super(AnalyzedQuery, self).select(field)
+ return self
+
+ def set(self, params):
+ for param, value in self.params.items():
+ if '.' in param:
+ method, subparam = param.split('.', 1)
+ # Method contains the name of the subquery, we need the type
+ # XXX type = self.metadata.get_field_type(self.object, method)
+ self.subquery(method).set({subparam: value})
+ else:
+ super(AnalyzedQuery, self).set({param: value})
+ return self
+
+ def analyze(self, query):
+ self.clear()
+ self.action = query.action
+ self.object = query.object
+ self.filter_by(query.filters)
+ self.set(query.params)
+ self.select(query.fields)
+
+ def to_json (self):
+ query_uuid=self.query_uuid
+ a=self.action
+ o=self.object
+ t=self.timestamp
+ f=json.dumps (self.filters.to_list())
+ p=json.dumps (self.params)
+ c=json.dumps (list(self.fields))
+ # xxx unique can be removed, but for now we pad the js structure
+ unique=0
+
+ aq = 'null'
+ sq=", ".join ( [ "'%s':%s" % (object, subquery.to_json())
+ for (object, subquery) in self._subqueries.iteritems()])
+ sq="{%s}"%sq
+
+ result= """ new ManifoldQuery('%(a)s', '%(o)s', '%(t)s', %(f)s, %(p)s, %(c)s, %(unique)s, '%(query_uuid)s', %(aq)s, %(sq)s)"""%locals()
+ if debug: print 'ManifoldQuery.to_json:',result
+ return result
--- /dev/null
+# Inspired from GENI error codes
+
+import time
+import pprint
+
+class ResultValue(dict):
+
+ # type
+ SUCCESS = 0
+ WARNING = 1
+ ERROR = 2
+
+ # origin
+ CORE = 0
+ GATEWAY = 1
+
+ # code
+ SUCCESS = 0
+ SERVERBUSY = 32001
+ BADARGS = 1
+ ERROR = 2
+ FORBIDDEN = 3
+ BADVERSION = 4
+ SERVERERROR = 5
+ TOOBIG = 6
+ REFUSED = 7
+ TIMEDOUT = 8
+ DBERROR = 9
+ RPCERROR = 10
+
+ # description
+ ERRSTR = {
+ SUCCESS : 'Success',
+ SERVERBUSY : 'Server is (temporarily) too busy; try again later',
+ BADARGS : 'Bad Arguments: malformed',
+ ERROR : 'Error (other)',
+ FORBIDDEN : 'Operation Forbidden: eg supplied credentials do not provide sufficient privileges (on the given slice)',
+ BADVERSION : 'Bad Version (eg of RSpec)',
+ SERVERERROR : 'Server Error',
+ TOOBIG : 'Too Big (eg request RSpec)',
+ REFUSED : 'Operation Refused',
+ TIMEDOUT : 'Operation Timed Out',
+ DBERROR : 'Database Error',
+ RPCERROR : ''
+ }
+
+ ALLOWED_FIELDS = set(['origin', 'type', 'code', 'value', 'description', 'traceback', 'ts'])
+
+ def __init__(self, **kwargs):
+
+ # Checks
+ given = set(kwargs.keys())
+ cstr_success = set(['code', 'origin', 'value']) <= given
+ cstr_error = set(['code', 'type', 'origin', 'description']) <= given
+ assert given <= self.ALLOWED_FIELDS, "Wrong fields in ResultValue constructor: %r" % (given - self.ALLOWED_FIELDS)
+ assert cstr_success or cstr_error, 'Incomplete set of fields in ResultValue constructor: %r' % given
+
+ dict.__init__(self, **kwargs)
+
+ # Set missing fields to None
+ for field in self.ALLOWED_FIELDS - given:
+ self[field] = None
+ if not 'ts' in self:
+ self['ts'] = time.time()
+
+
+ # Internal MySlice errors : return ERROR
+ # Internal MySlice warnings : return RESULT WITH WARNINGS
+ # Debug : add DEBUG INFORMATION
+ # Gateway errors : return RESULT WITH WARNING
+ # all Gateways errors : return ERROR
+
+ @classmethod
+ def get_result_value(self, results, result_value_array):
+ # let's analyze the results of the query plan
+ # XXX we should inspect all errors to determine whether to return a
+ # result or not
+ if not result_value_array:
+ # No error
+ return ResultValue(code=self.SUCCESS, origin=[self.CORE, 0], value=results)
+ else:
+ # Handle errors
+ return ResultValue(code=self.WARNING, origin=[self.CORE, 0], description=result_value_array, value=results)
+
+ @classmethod
+ def get_error(self, error):
+ return ResultValue(code=error, origin=[self.CORE, 0], value=self.ERRSTR[error])
+
+ @classmethod
+ def get_success(self, result):
+ return ResultValue(code=self.SUCCESS, origin=[self.CORE, 0], value=result)
+
+ def ok_value(self):
+ return self['value']
+
+ def error(self):
+ err = "%r" % self['description']
+
+ @staticmethod
+ def to_html (raw_dict):
+ return pprint.pformat (raw_dict).replace("\\n","<br/>")
+
+# 67 <code>
+# 68 <value>9</value>
+# 69 <label>DBERROR</label>
+# 70 <description>Database Error</description>
+# 71 </code>
+# 72 <code>
+# 73 <value>10</value>
+# 74 <label>RPCERROR</label>
+# 75 <description>RPC Error</description>
+# 76 </code>
+# 77 <code>
+# 78 <value>11</value>
+# 79 <label>UNAVAILABLE</label>
+# 80 <description>Unavailable (eg server in lockdown)</description>
+# 81 </code>
+# 82 <code>
+# 83 <value>12</value>
+# 84 <label>SEARCHFAILED</label>
+# 85 <description>Search Failed (eg for slice)</description>
+# 86 </code>
+# 87 <code>
+# 88 <value>13</value>
+# 89 <label>UNSUPPORTED</label>
+# 90 <description>Operation Unsupported</description>
+# 91 </code>
+# 92 <code>
+# 93 <value>14</value>
+# 94 <label>BUSY</label>
+# 95 <description>Busy (resource, slice, or server); try again
+# later</description>
+# 96 </code>
+# 97 <code>
+# 98 <value>15</value>
+# 99 <label>EXPIRED</label>
+# 100 <description>Expired (eg slice)</description>
+# 101 </code>
+# 102 <code>
+# 103 <value>16</value>
+# 104 <label>INPROGRESS</label>
+# 105 <description>In Progress</description>
+# 106 </code>
+# 107 <code>
+# 108 <value>17</value>
+# 109 <label>ALREADYEXISTS</label>
+# 110 <description>Already Exists (eg slice)</description>
+# 111 </code>
+# 112 <!-- 18+ not in original ProtoGENI implementation or Error Code --
+# 113 -- proposal. -->
+# 114 <code>
+# 115 <value>18</value>
+# 116 <label>MISSINGARGS</label>
+# 117 <description>Required argument(s) missing</description>
+# 118 </code>
+# 119 <code>
+# 120 <value>19</value>
+# 121 <label>OUTOFRANGE</label>
+# 122 <description>Input Argument outside of legal range</description>
+# 123 </code>
+# 124 <code>
+# 125 <value>20</value>
+# 126 <label>CREDENTIAL_INVALID</label>
+# 127 <description>Not authorized: Supplied credential is
+# invalid</description>
+# 128 </code>
+# 129 <code>
+# 130 <value>21</value>
+# 131 <label>CREDENTIAL_EXPIRED</label>
+# 132 <description>Not authorized: Supplied credential expired</description>
+# 133 </code>
+# 134 <code>
+# 135 <value>22</value>
+# 136 <label>CREDENTIAL_MISMATCH</label>
+# 137 <description>Not authorized: Supplied credential does not match client
+# certificate or does not match the given slice URN</description>
+# 138 </code>
+# 139 <code>
+# 140 <value>23</value>
+# 141 <label>CREDENTIAL_SIGNER_UNTRUSTED</label>
+# 142 <description>Not authorized: Supplied credential not signed by a trusted
+# authority</description>
+# 143 </code>
+# 144 <code>
+# 145 <value>24</value>
+# 146 <label>VLAN_UNAVAILABLE</label>
+# 147 <description>VLAN tag(s) requested not available (likely stitching
+# failure)</description>
+# 148 </code>
+# 149 </geni-error-codes>
+# 150
+# <!--
+# || 0 || SUCCESS || "Success" ||
+# || 1 || BADARGS || "Bad Arguments: malformed arguments" ||
+# || 2 || ERROR || "Error (other)" ||
+# || 3 || FORBIDDEN || "Operation Forbidden: eg supplied credentials do # not provide sufficient privileges (on given slice)" ||
+# || 4 || BADVERSION || "Bad Version (eg of RSpec)" ||
+# || 5 || SERVERERROR || "Server Error" ||
+# || 6 || TOOBIG || "Too Big (eg request RSpec)" ||
+# || 7 || REFUSED || "Operation Refused" ||
+# || 8 || TIMEDOUT || "Operation Timed Out" ||
+# || 9 || DBERROR || "Database Error" ||
+# || 10 || RPCERROR || "RPC Error" ||
+# || 11 || UNAVAILABLE || "Unavailable (eg server in lockdown)" ||
+# || 12 || SEARCHFAILED || "Search Failed (eg for slice)" ||
+# || 13 || UNSUPPORTED || "Operation Unsupported" ||
+# || 14 || BUSY || "Busy (resource, slice, or server); try again # later" ||
+# || 15 || EXPIRED || "Expired (eg slice)" ||
+# || 16 || INPROGRESS || "In Progress" ||
+# || 17 || ALREADYEXISTS || "Already Exists (eg the slice}" ||
+# || 18 || MISSINGARGS || "Required argument(s) missing" ||
+# || 19 || OUTOFRANGE || "Requested expiration time or other argument not # valid" ||
+# || 20 || CREDENTIAL_INVALID || "Not authorized: Supplied credential is # invalid" ||
+# || 21 || CREDENTIAL_EXPIRED || "Not authorized: Supplied credential # expired" ||
+# || 22 || CREDENTIAL_MISMATCH || "Not authorized: Supplied credential # does not match the supplied client certificate or does not match the given slice # URN" ||
+# || 23 || CREDENTIAL_SIGNER_UNTRUSTED || "Not authorized: Supplied # credential not signed by trusted authority" ||
+# || 24 || VLAN_UNAVAILABLE || "VLAN tag(s) requested not available # (likely stitching failure)" ||
+#
+# 18+ not in original ProtoGENI implementation or Error Code proposal.
+#
+# Maping to SFA Faults:
+# SfaAuthenticationFailure: FORBIDDEN
+# SfaDBErrr: DBERROR
+# SfaFault: ERROR
+# SfaPermissionDenied: FORBIDDEN
+# SfaNotImplemented: UNSUPPORTED
+# SfaAPIError: SERVERERROR
+# MalformedHrnException: BADARGS
+# NonExistingRecord: SEARCHFAILED
+# ExistingRecord: ALREADYEXISTS
+# NonexistingCredType: SEARCHFAILED? FORBIDDEN? CREDENTIAL_INVALID?
+# NonexitingFile: SEARCHFAILED
+# InvalidRPCParams: RPCERROR
+# ConnectionKeyGIDMismatch: FORBIDDEN? CREDENTIAL_MISMATCH?
+# MissingCallerGID: SEARCHFAILED? CREDENTIAL_MISMATCH?
+# RecordNotFound: SEARCHFAILED
+# PlanetLAbRecordDoesNotExist: SEARCHFAILED
+# PermissionError: FORBIDDEN
+# InsufficientRights: FORBIDDEN
+# MissingDelegateBit: CREDENTIAL_INVALID? FORBIDDEN?
+# ChildRightsNotSubsetOfParent: CREDENTIAL_INVALID
+# CertMissingParent: FORBIDDEN? CREDENTIAL_INVALID?
+# CertNotSignedByParent: FORBIDDEN
+# GidParentHrn: FORBIDDEN? CREDENTIAL_INVALID?
+# GidInvalidParentHrn: FORBIDDEN? CREDENTIAL_INVALID?
+# SliverDoesNotExist: SEARCHFAILED
+# MissingTrustedRoots: SERVERERROR
+# MissingSfaInfo: SERVERERROR
+# InvalidRSpec: BADARGS
+# InvalidRSpecElement: BADARGS
+# AccountNotEnabled: REFUSED? FORBIDDEN?
+# CredentialNotVerifiable: CREDENTIAL_INVALID
+# CertExpired: EXPIRED? FORBIDDEN?
+# -->
--- /dev/null
+# Manifold API Python interface
+import copy, xmlrpclib
+
+from myslice.configengine import ConfigEngine
+
+from django.contrib import messages
+from manifoldresult import ManifoldResult, ManifoldCode, ManifoldException
+from manifold.core.result_value import ResultValue
+
+debug=False
+debug=True
+debug_deep=False
+#debug_deep=True
+
+########## ugly stuff for hopefully nicer debug messages
+def mytruncate (obj, l):
+ # we will add '..'
+ l1=l-2
+ repr="%s"%obj
+ return (repr[:l1]+'..') if len(repr)>l1 else repr
+
+from time import time, gmtime, strftime
+from math import trunc
+def mytime (start=None):
+ gm=gmtime()
+ t=time()
+ msg=strftime("%H:%M:%S-", gmtime())+"%03d"%((t-trunc(t))*1000)
+ if start is not None: msg += " (%03fs)"%(t-start)
+ return t,msg
+##########
+
+class ManifoldAPI:
+
+ def __init__ (self, auth=None, cainfo=None):
+
+ self.auth = auth
+ self.cainfo = cainfo
+ self.errors = []
+ self.trace = []
+ self.calls = {}
+ self.multicall = False
+ self.url = ConfigEngine().manifold_url()
+ self.server = xmlrpclib.Server(self.url, verbose=False, allow_none=True)
+
+ def __repr__ (self): return "ManifoldAPI[%s]"%self.url
+
+ def _print_value (self, value):
+ print "+++",'value',
+ if isinstance (value,list): print "[%d]"%len(value),
+ elif isinstance (value,dict): print "{%d}"%len(value),
+ print mytruncate (value,80)
+
+ # a one-liner to give a hint of what the return value looks like
+ def _print_result (self, result):
+ if not result: print "[no/empty result]"
+ elif isinstance (result,str): print "result is '%s'"%result
+ elif isinstance (result,list): print "result is a %d-elts list"%len(result)
+ elif isinstance (result,dict):
+ print "result is a dict with %d keys : %s"%(len(result),result.keys())
+ for (k,v) in result.iteritems():
+ if v is None: continue
+ if k=='value': self._print_value(v)
+ else: print '+++',k,':',mytruncate (v,30)
+ else: print "[dont know how to display result] %s"%result
+
+ # how to display a call
+ def _repr_query (self,methodName, query):
+ try: action=query['action']
+ except: action="???"
+ try: subject=query['object']
+ except: subject="???"
+ # most of the time, we run 'forward'
+ if methodName=='forward': return "forward(%s(%s))"%(action,subject)
+ else: return "%s(%s)"%(action,subject)
+
+ # xxx temporary code for scaffolding a ManifolResult on top of an API that does not expose error info
+ # as of march 2013 we work with an API that essentially either returns the value, or raises
+ # an xmlrpclib.Fault exception with always the same 8002 code
+ # since most of the time we're getting this kind of issues for expired sessions
+ # (looks like sessions are rather short-lived), for now the choice is to map these errors on
+ # a SESSION_EXPIRED code
+ def __getattr__(self, methodName):
+ def func(*args, **kwds):
+ # shorthand
+ def repr(): return self._repr_query (methodName, args[0])
+ try:
+ if debug:
+ start,msg = mytime()
+ print "====>",msg,"ManifoldAPI.%s"%repr(),"url",self.url
+ # No password in the logs
+ logAuth = copy.copy(self.auth)
+ for obfuscate in ['Authring','session']:
+ if obfuscate in logAuth: logAuth[obfuscate]="XXX"
+ if debug_deep: print "=> auth",logAuth
+ if debug_deep: print "=> args",args,"kwds",kwds
+ annotations = {
+ 'authentication': self.auth
+ }
+ args += (annotations,)
+ result=getattr(self.server, methodName)(*args, **kwds)
+ print "%s%r" %(methodName, args)
+
+ if debug:
+ print '<= result=',
+ self._print_result(result)
+ end,msg = mytime(start)
+ print "<====",msg,"backend call %s returned"%(repr())
+
+ return ResultValue(**result)
+
+ except Exception,error:
+ print "** MANIFOLD API ERROR **"
+ if debug:
+ print "===== xmlrpc catch-all exception:",error
+ import traceback
+ traceback.print_exc(limit=3)
+ if "Connection refused" in error:
+ raise ManifoldException ( ManifoldResult (code=ManifoldCode.SERVER_UNREACHABLE,
+ output="%s answered %s"%(self.url,error)))
+ # otherwise
+ print "<==== ERROR On ManifoldAPI.%s"%repr()
+ raise ManifoldException ( ManifoldResult (code=ManifoldCode.SERVER_UNREACHABLE, output="%s"%error) )
+
+ return func
+
+def _execute_query(request, query, manifold_api_session_auth):
+ manifold_api = ManifoldAPI(auth=manifold_api_session_auth)
+ print "-"*80
+ print query
+ print query.to_dict()
+ print "-"*80
+ result = manifold_api.forward(query.to_dict())
+ if result['code'] == 2:
+ # this is gross; at the very least we need to logout()
+ # but most importantly there is a need to refine that test, since
+ # code==2 does not necessarily mean an expired session
+ # XXX only if we know it is the issue
+ del request.session['manifold']
+ # Flush django session
+ request.session.flush()
+ #raise Exception, 'Error running query: %r' % result
+
+ if result['code'] == 1:
+ print "WARNING"
+ print result['description']
+
+ # XXX Handle errors
+ #Error running query: {'origin': [0, 'XMLRPCAPI'], 'code': 2, 'description': 'No such session: No row was found for one()', 'traceback': 'Traceback (most recent call last):\n File "/usr/local/lib/python2.7/dist-packages/manifold/core/xmlrpc_api.py", line 68, in xmlrpc_forward\n user = Auth(auth).check()\n File "/usr/local/lib/python2.7/dist-packages/manifold/auth/__init__.py", line 245, in check\n return self.auth_method.check()\n File "/usr/local/lib/python2.7/dist-packages/manifold/auth/__init__.py", line 95, in check\n raise AuthenticationFailure, "No such session: %s" % e\nAuthenticationFailure: No such session: No row was found for one()\n', 'type': 2, 'ts': None, 'value': None}
+
+ return result['value']
+
+def execute_query(request, query):
+ if not 'manifold' in request.session or not 'auth' in request.session['manifold']:
+ request.session.flush()
+ raise Exception, "User not authenticated"
+ manifold_api_session_auth = request.session['manifold']['auth']
+ return _execute_query(request, query, manifold_api_session_auth)
+
+def execute_admin_query(request, query):
+ admin_user, admin_password = ConfigEngine().manifold_admin_user_password()
+ admin_auth = {'AuthMethod': 'password', 'Username': admin_user, 'AuthString': admin_password}
+ return _execute_query(request, query, admin_auth)
--- /dev/null
+import json
+import os.path
+
+# this is for django objects only
+#from django.core import serializers
+from django.http import HttpResponse, HttpResponseForbidden
+
+#from manifold.manifoldquery import ManifoldQuery
+from manifold.core.query import Query
+from manifold.core.result_value import ResultValue
+from manifold.manifoldapi import ManifoldAPI
+from manifold.manifoldresult import ManifoldException
+from manifold.util.log import Log
+from myslice.configengine import ConfigEngine
+
+debug=False
+#debug=True
+
+# pretend the server only returns - empty lists to 'get' requests - this is to mimick
+# misconfigurations or expired credentials or similar corner case situations
+debug_empty=False
+#debug_empty=True
+
+# this view is what the javascript talks to when it sends a query
+# see also
+# myslice/urls.py
+# as well as
+# static/js/manifold.js
+def proxy (request,format):
+ """the view associated with /manifold/proxy/
+with the query passed using POST"""
+
+ # expecting a POST
+ if request.method != 'POST':
+ print "manifoldproxy.api: unexpected method %s -- exiting"%request.method
+ return
+ # we only support json for now
+ # if needed in the future we should probably cater for
+ # format_in : how is the query encoded in POST
+ # format_out: how to serve the results
+ if format != 'json':
+ print "manifoldproxy.proxy: unexpected format %s -- exiting"%format
+ return
+ try:
+ # translate incoming POST request into a query object
+ if debug: print 'manifoldproxy.proxy: request.POST',request.POST
+ manifold_query = Query()
+ #manifold_query = ManifoldQuery()
+ manifold_query.fill_from_POST(request.POST)
+ # retrieve session for request
+
+ # We allow some requests to use the ADMIN user account
+ if (manifold_query.get_from() == 'local:user' and manifold_query.get_action() == 'create') \
+ or (manifold_query.get_from() == 'local:platform' and manifold_query.get_action() == 'get'):
+ admin_user, admin_password = ConfigEngine().manifold_admin_user_password()
+ manifold_api_session_auth = {'AuthMethod': 'password', 'Username': admin_user, 'AuthString': admin_password}
+ else:
+ print request.session['manifold']
+ manifold_api_session_auth = request.session['manifold']['auth']
+
+ if debug_empty and manifold_query.action.lower()=='get':
+ json_answer=json.dumps({'code':0,'value':[]})
+ print "By-passing : debug_empty & 'get' request : returning a fake empty list"
+ return HttpResponse (json_answer, mimetype="application/json")
+
+ # actually forward
+ manifold_api= ManifoldAPI(auth=manifold_api_session_auth)
+ if debug: print '===> manifoldproxy.proxy: sending to backend', manifold_query
+ # for the benefit of the python code, manifoldAPI raises an exception if something is wrong
+ # however in this case we want to propagate the complete manifold result to the js world
+
+ result = manifold_api.forward(manifold_query.to_dict())
+
+ # XXX TEMP HACK
+ if 'description' in result and result['description'] \
+ and isinstance(result['description'], (tuple, list, set, frozenset)):
+ result [ 'description' ] = [ ResultValue.to_html (x) for x in result['description'] ]
+
+ json_answer=json.dumps(result)
+
+ return HttpResponse (json_answer, mimetype="application/json")
+
+ except Exception,e:
+ print "** PROXY ERROR **",e
+ import traceback
+ traceback.print_exc()
+
+####################
+# see CSRF_FAILURE_VIEW in settings.py
+# the purpose of redefining this was to display the failure reason somehow
+# this however turns out disappointing/not very informative
+failure_answer=[ "csrf_failure" ]
+def csrf_failure(request, reason=""):
+ print "CSRF failure with reason '%s'"%reason
+ return HttpResponseForbidden (json.dumps (failure_answer), mimetype="application/json")
--- /dev/null
+def enum(*sequential, **named):
+ enums = dict(zip(sequential, range(len(sequential))), **named)
+ return type('Enum', (), enums)
+
+ManifoldCode = enum (
+ UNKNOWN_ERROR=-1,
+ SUCCESS=0,
+ SESSION_EXPIRED=1,
+ NOT_IMPLEMENTED=2,
+ SERVER_UNREACHABLE=3,
+)
+
+_messages_ = { -1 : "Unknown", 0: "OK", 1: "Session Expired", 2: "Not Implemented", 3: "Backend server unreachable"}
+
+# being a dict this can be used with json.dumps
+class ManifoldResult (dict):
+ def __init__ (self, code=ManifoldCode.SUCCESS, value=None, output=""):
+ self['code']=code
+ self['value']=value
+ self['output']=output
+ self['description'] = '' # Jordan: needed by javascript code
+
+ def from_json (self, json_string):
+ d=json.dumps(json_string)
+ for k in ['code','value','output']:
+ self[k]=d[k]
+
+ # raw accessors
+ def code (self): return self['code']
+ def output (self): return self['output']
+
+ # this returns None if there's a problem, the value otherwise
+ def ok_value (self):
+ if self['code']==ManifoldCode.SUCCESS:
+ return self['value']
+
+ # both data in a single string
+ def error (self):
+ return "code=%s -- %s"%(self['code'],self['output'])
+
+
+ def __repr__ (self):
+ code=self['code']
+ result="[MFresult %s (code=%s)"%(_messages_.get(code,"???"),code)
+ if code==0:
+ value=self['value']
+ if isinstance(value,list): result += " [value=list with %d elts]"%len(value)
+ elif isinstance(value,dict): result += " [value=dict with keys %s]"%value.keys()
+ else: result += " [value=%s: %s]"%(type(value).__name__,value)
+ else:
+ result += " [output=%s]"%self['output']
+ result += "]"
+ return result
+
+# probably simpler to use a single class and transport the whole result there
+# instead of a clumsy set of derived classes
+class ManifoldException (Exception):
+ def __init__ (self, manifold_result):
+ self.manifold_result=manifold_result
+ def __repr__ (self):
+ return "Manifold Exception %s"%(self.manifold_result.error())
--- /dev/null
+import json
+import os.path
+
+from manifold.manifoldresult import ManifoldResult
+from manifold.manifoldapi import ManifoldAPI
+
+from django.contrib import messages
+
+debug=False
+#debug=True
+
+class MetaData:
+
+ def __init__ (self, auth):
+ self.auth=auth
+ self.hash_by_object={}
+
+ def fetch (self, request):
+ manifold_api = ManifoldAPI(self.auth)
+ fields = ['table', 'column.name', 'column.qualifier', 'column.type',
+ 'column.is_array', 'column.description', 'column.default', 'key', 'capability']
+ #fields = ['table', 'column.column',
+ # 'column.description','column.header', 'column.title',
+ # 'column.unit', 'column.info_type',
+ # 'column.resource_type', 'column.value_type',
+ # 'column.allowed_values', 'column.platforms.platform',
+ # 'column.platforms.platform_url']
+ request={ 'action': 'get',
+ 'object': 'local:object', # proposed to replace metadata:table
+ 'fields': fields ,
+ }
+ result = manifold_api.forward(request)
+
+ # xxx need a way to export error messages to the UI
+ if result['code'] == 1: # warning
+ # messages.warning(request, result['description'])
+ print ("METADATA WARNING -",request,result['description'])
+ elif result['code'] == 2:
+ # messages.error(request, result['description'])
+ print ("METADATA ERROR -",request,result['description'])
+ # XXX FAIL HERE XXX
+ return
+
+ rows = result.ok_value()
+# API errors will be handled by the outer logic
+# if not rows:
+# print "Failed to retrieve metadata",rows_result.error()
+# rows=[]
+ self.hash_by_object = dict ( [ (row['table'], row) for row in rows ] )
+
+ def to_json(self):
+ return json.dumps(self.hash_by_object)
+
+ def details_by_object (self, object):
+ return self.hash_by_object[object]
+
+ def sorted_fields_by_object (self, object):
+ return self.hash_by_object[object]['column'].sort()
+
+ def get_field_type(self, object, field):
+ if debug: print "Temp fix for metadata::get_field_type() -> consider moving to manifold.core.metadata soon"
+ return field
--- /dev/null
+.template {
+ visibility: hidden;
+ position: absolute;
+}
--- /dev/null
+/* Buffered DOM updates */
+var Buffer = Class.extend({
+
+ init: function(callback, callback_this) {
+ this._callback = callback;
+ this._callback_this = callback_this;
+ this._timerid = null;
+ this._num_elements = 0;
+ this._elements = Array();
+
+ this._interval = 1000;
+
+ return this;
+ },
+
+ add: function(element)
+ {
+ this._elements.push(element);
+ if (this._num_elements == 0) {
+ this._timerid = setInterval(
+ (function(self) { //Self-executing func which takes 'this' as self
+ return function() { //Return a function in the context of 'self'
+ messages.debug("running callback");
+ clearInterval(self._timerid);
+ self._callback.apply(self._callback_this);
+ }
+ })(this),
+ this._interval);
+ }
+ this._num_elements++;
+ },
+
+ get: function() {
+ var elements = this._elements;
+ this._elements = Array();
+ this._num_elements = 0;
+ return elements;
+ },
+
+});
--- /dev/null
+/* Simple JavaScript Inheritance
+ * By John Resig http://ejohn.org/
+ * MIT Licensed.
+ */
+// Inspired by base2 and Prototype
+(function(){
+ var initializing = false, fnTest = /xyz/.test(function(){xyz;}) ? /\b_super\b/ : /.*/;
+
+ // The base Class implementation (does nothing)
+ this.Class = function(){};
+
+ // Create a new Class that inherits from this class
+ Class.extend = function(prop) {
+ var _super = this.prototype;
+
+ // Instantiate a base class (but only create the instance,
+ // don't run the init constructor)
+ initializing = true;
+ var prototype = new this();
+ initializing = false;
+
+ // Copy the properties over onto the new prototype
+ for (var name in prop) {
+ // Check if we're overwriting an existing function
+ prototype[name] = typeof prop[name] == "function" &&
+ typeof _super[name] == "function" && fnTest.test(prop[name]) ?
+ (function(name, fn){
+ return function() {
+ var tmp = this._super;
+
+ // Add a new ._super() method that is the same method
+ // but on the super-class
+ this._super = _super[name];
+
+ // The method only need to be bound temporarily, so we
+ // remove it when we're done executing
+ var ret = fn.apply(this, arguments);
+ this._super = tmp;
+
+ return ret;
+ };
+ })(name, prop[name]) :
+ prop[name];
+ }
+
+ // The dummy class constructor
+ function Class() {
+ // All construction is actually done in the init method
+ if ( !initializing && this.init )
+ this.init.apply(this, arguments);
+ }
+
+ // Populate our constructed prototype object
+ Class.prototype = prototype;
+
+ // Enforce the constructor to be what we expect
+ Class.prototype.constructor = Class;
+
+ // And make this class extendable
+ Class.extend = arguments.callee;
+
+ return Class;
+ };
+})();
--- /dev/null
+function ManifoldQuery(action, object, timestamp, filters, params, fields, unique, query_uuid, aq, sq) {
+ // get, update, delete, create
+ var action;
+ // slice, user, network...
+ var object;
+ // timestamp, now, latest(cache) : date of the results queried
+ var timestamp;
+ // key(field),op(=<>),value
+ var filters;
+ // todo
+ var params;
+ // hostname, ip,...
+ var fields;
+ // 0,1 : list of element of an object or single object
+ var unique;
+ // query_uuid : unique identifier of a query
+ var query_uuid;
+ // Query : root query (no sub-Query)
+ var analyzed_query;
+ // {} : Assoc Table of sub-queries ["resources"->subQ1, "users"->subQ2]
+ var subqueries;
+
+/*-------------------------------------------------------------
+ Query properties are SQL like :
+---------------------------------------------------------------
+SELECT fields FROM object WHERE filter;
+UPDATE object SET field=value WHERE filter; / returns SELECT
+DELETE FROM object WHERE filter
+INSERT INTO object VALUES(field=value)
+-------------------------------------------------------------*/
+
+ this.__repr = function () {
+ res = "ManifoldQuery ";
+// res += " id=" + this.query_uuid;
+ res += " a=" + this.action;
+ res += " o=" + this.object;
+ res += " ts=" + this.timestamp;
+ res += " flts=" + this.filters;
+ res += " flds=" + this.fields;
+ res += " prms=" + this.params;
+ return res;
+ }
+
+ this.clone = function() {
+ //
+ var q = new ManifoldQuery();
+ q.action = this.action;
+ q.object = this.object;
+ q.timestamp = this.timestamp;
+ q.filters = this.filters.slice();
+ q.fields = this.fields.slice();
+ q.query_uuid = this.query_uuid;
+
+ if (this.analyzed_query)
+ q.analyzed_query = this.analyzed_query.clone();
+ else
+ q.analyzed_query = null;
+
+ if (this.subqueries) {
+ q.subqueries = {}
+ for (method in this.subqueries)
+ q.subqueries[method] = this.subqueries[method].clone();
+ }
+
+ // deep extend not working for custom objects
+ // $.extend(true, q, this);
+ return q;
+ }
+
+ this.add_filter = function(key, op, value) {
+ this.filters.push(new Array(key, op, value));
+ }
+ this.update_filter = function(key, op, value) {
+ // Need to be improved...
+ // remove all occurrences of key if operation is not defined
+ if(!op){
+ this.filters = jQuery.grep(this.filters, function(val, i) {
+ return val[0] != key;
+ });
+ // Else remove the key+op filters
+ }else{
+ this.filters = jQuery.grep(this.filters, function(val, i) {return (val[0] != key || val[1] != op);});
+ }
+ this.filters.push(new Array(key, op, value));
+ }
+
+ this.remove_filter = function (key,op,value) {
+ // if operator is null then remove all occurences of this key
+ if(!op){
+ this.filters = jQuery.grep(this.filters, function(val, i) {
+ return val[0] != key;
+ });
+ }else{
+ this.filters = jQuery.grep(this.filters, function(val, i) {return (val[0] != key || val[1] != op);});
+ }
+ }
+
+ // FIXME These functions computing diff's between queries are meant to be shared
+ this.diff_fields = function(otherQuery) {
+ var f1 = this.fields;
+ var f2 = otherQuery.fields;
+
+ /* added elements are the ones in f2 not in f1 */
+ var added = jQuery.grep(f2, function (x) { return jQuery.inArray(x, f1) == -1 });
+ /* removed elements are the ones in f1 not in f2 */
+ var removed = jQuery.grep(f1, function (x) { return jQuery.inArray(x, f2) == -1 });
+
+ return {'added':added, 'removed':removed};
+ }
+
+ // FIXME Modify filter to filters
+ this.diff_filter = function(otherQuery) {
+ var f1 = this.filters;
+ var f2 = otherQuery.filters;
+
+ /* added elements are the ones in f2 not in f1 */
+ var added = jQuery.grep(f2, function (x) { return !arrayInArray(x, f1)});
+ /* removed elements are the ones in f1 not in f2 */
+ var removed = jQuery.grep(f1, function (x) { return !arrayInArray(x, f2)});
+
+ return {'added':added, 'removed':removed};
+ }
+
+ // Callaback received 3 parameters: query, data, parent_query
+ this.iter_subqueries = function(callback, data)
+ {
+ rec = function(query, callback, data, parent_query) {
+ callback(query, data, parent_query);
+ jQuery.each(query.subqueries, function(object, subquery) {
+ rec(subquery, callback, data, query);
+ });
+ };
+
+ if (this.analyzed_query !== undefined)
+ query = this.analyzed_query;
+ else
+ query = this;
+
+ rec(query, callback, data, null);
+ }
+
+ this.select = function(field)
+ {
+ this.fields.push(field);
+ }
+
+ this.unselect = function(field)
+ {
+ this.fields = $.grep(this.fields, function(x) { return x != field; });
+ }
+
+// we send queries as a json string now
+// this.as_POST = function() {
+// return {'action': this.action, 'object': this.object, 'timestamp': this.timestamp,
+// 'filters': this.filters, 'params': this.params, 'fields': this.fields};
+// }
+ this.analyze_subqueries = function() {
+ /* adapted from the PHP function in com_tophat/includes/query.php */
+ var q = new ManifoldQuery();
+ q.query_uuid = this.query_uuid;
+ q.action = this.action;
+ q.object = this.object;
+ q.timestamp = this.timestamp;
+
+ /* Filters */
+ jQuery.each(this.filters, function(i, filter) {
+ var k = filter[0];
+ var op = filter[1];
+ var v = filter[2];
+ var pos = k.indexOf('.');
+ if (pos != -1) {
+ var object = k.substr(0, pos);
+ var field = k.substr(pos+1);
+ if (!q.subqueries[object]) {
+ q.subqueries[object] = new ManifoldQuery();
+ q.subqueries[object].action = q.action;
+ q.subqueries[object].object = object;
+ q.subqueries[object].timestamp = q.timestamp;
+ }
+ q.subqueries[object].filters.push(Array(field, op, v));
+ } else {
+ q.filters.push(filter);
+ }
+ });
+
+ /* Params */
+ jQuery.each(this.params, function(param, value) {
+ var pos = param.indexOf('.');
+ if (pos != -1) {
+ var object = param.substr(0, pos);
+ var field = param.substr(pos+1);
+ if (!q.subqueries[object]) {
+ q.subqueries[object] = new ManifoldQuery();
+ q.subqueries[object].action = q.action;
+ q.subqueries[object].object = object;
+ q.subqueries[object].timestamp = q.timestamp;
+ }
+ q.subqueries[object].params[field] = value;
+ } else {
+ q.params[field] = value;
+ }
+ });
+
+ /* Fields */
+ jQuery.each(this.fields, function(i, v) {
+ var pos = v.indexOf('.');
+ if (pos != -1) {
+ var object = v.substr(0, pos);
+ var field = v.substr(pos+1);
+ if (!q.subqueries[object]) {
+ q.subqueries[object] = new ManifoldQuery();
+ q.subqueries[object].action = q.action;
+ q.subqueries[object].object = object;
+ q.subqueries[object].timestamp = q.timestamp;
+ }
+ q.subqueries[object].fields.push(field);
+ } else {
+ q.fields.push(v);
+ }
+ });
+ this.analyzed_query = q;
+ }
+
+ /* constructor */
+ if (typeof action == "undefined")
+ this.action = "get";
+ else
+ this.action = action;
+
+ if (typeof object == "undefined")
+ this.object = null;
+ else
+ this.object = object;
+
+ if (typeof timestamp == "undefined")
+ this.timestamp = "now";
+ else
+ this.timestamp = timestamp;
+
+ if (typeof filters == "undefined")
+ this.filters = [];
+ else
+ this.filters = filters;
+
+ if (typeof params == "undefined")
+ this.params = {};
+ else
+ this.params = params;
+
+ if (typeof fields == "undefined")
+ this.fields = [];
+ else
+ this.fields = fields;
+
+ if (typeof unique == "undefined")
+ this.unique = false;
+ else
+ this.unique = unique;
+
+ this.query_uuid = query_uuid;
+
+ if (typeof aq == "undefined")
+ this.analyzed_query = null;
+ else
+ this.analyzed_query = aq;
+
+ if (typeof sq == "undefined")
+ this.subqueries = {};
+ else
+ this.subqueries = sq;
+}
--- /dev/null
+// utilities
+function debug_dict_keys (msg, o) {
+ var keys=[];
+ for (var k in o) keys.push(k);
+ messages.debug ("debug_dict_keys: " + msg + " keys= " + keys);
+}
+function debug_dict (msg, o) {
+ for (var k in o) messages.debug ("debug_dict: " + msg + " [" + k + "]=" + o[k]);
+}
+function debug_value (msg, value) {
+ messages.debug ("debug_value: " + msg + " " + value);
+}
+function debug_query (msg, query) {
+ if (query === undefined) messages.debug ("debug_query: " + msg + " -> undefined");
+ else if (query == null) messages.debug ("debug_query: " + msg + " -> null");
+ else if ('query_uuid' in query) messages.debug ("debug_query: " + msg + query.__repr());
+ else messages.debug ("debug_query: " + msg + " query= " + query);
+}
+
+// http://javascriptweblog.wordpress.com/2011/08/08/fixing-the-javascript-typeof-operator/
+Object.toType = (function toType(global) {
+ return function(obj) {
+ if (obj === global) {
+ return "global";
+ }
+ return ({}).toString.call(obj).match(/\s([a-z|A-Z]+)/)[1].toLowerCase();
+ }
+})(this);
+
+/* ------------------------------------------------------------ */
+
+// Constants that should be somehow moved to a plugin.js file
+var FILTER_ADDED = 1;
+var FILTER_REMOVED = 2;
+var CLEAR_FILTERS = 3;
+var FIELD_ADDED = 4;
+var FIELD_REMOVED = 5;
+var CLEAR_FIELDS = 6;
+var NEW_RECORD = 7;
+var CLEAR_RECORDS = 8;
+var FIELD_STATE_CHANGED = 9;
+
+var IN_PROGRESS = 101;
+var DONE = 102;
+
+/* Update requests related to subqueries */
+var SET_ADD = 201;
+var SET_REMOVED = 202;
+
+// request
+var FIELD_REQUEST_CHANGE = 301;
+var FIELD_REQUEST_ADD = 302;
+var FIELD_REQUEST_REMOVE = 303;
+var FIELD_REQUEST_ADD_RESET = 304;
+var FIELD_REQUEST_REMOVE_RESET = 305;
+// status
+var FIELD_REQUEST_PENDING = 401;
+var FIELD_REQUEST_SUCCESS = 402;
+var FIELD_REQUEST_FAILURE = 403;
+
+/* Query status */
+var STATUS_NONE = 500; // Query has not been started yet
+var STATUS_GET_IN_PROGRESS = 501; // Query has been sent, no result has been received
+var STATUS_GET_RECEIVED = 502; // Success
+var STATUS_GET_ERROR = 503; // Error
+var STATUS_UPDATE_PENDING = 504;
+var STATUS_UPDATE_IN_PROGRESS = 505;
+var STATUS_UPDATE_RECEIVED = 506;
+var STATUS_UPDATE_ERROR = 507;
+
+/* Requests for query cycle */
+var RUN_UPDATE = 601;
+
+/* MANIFOLD types */
+var TYPE_VALUE = 1;
+var TYPE_RECORD = 2;
+var TYPE_LIST_OF_VALUES = 3;
+var TYPE_LIST_OF_RECORDS = 4;
+
+
+// A structure for storing queries
+
+function QueryExt(query, parent_query_ext, main_query_ext, update_query_ext, disabled) {
+
+ /* Constructor */
+ if (typeof query == "undefined")
+ throw "Must pass a query in QueryExt constructor";
+ this.query = query;
+ this.parent_query_ext = (typeof parent_query_ext == "undefined") ? null : parent_query_ext;
+ this.main_query_ext = (typeof main_query_ext == "undefined") ? null : main_query_ext;
+ this.update_query_ext = (typeof update_query_ext == "undefined") ? null : update_query_ext;
+ this.update_query_orig_ext = (typeof update_query_orig_ext == "undefined") ? null : update_query_orig_ext;
+ this.disabled = (typeof update_query_ext == "undefined") ? false : disabled;
+
+ this.status = null;
+ this.results = null;
+ // update_query null unless we are a main_query (aka parent_query == null); only main_query_fields can be updated...
+}
+
+function QueryStore() {
+
+ this.main_queries = {};
+ this.analyzed_queries = {};
+
+ /* Insertion */
+
+ this.insert = function(query) {
+ // We expect only main_queries are inserted
+
+ /* If the query has not been analyzed, then we analyze it */
+ if (query.analyzed_query == null) {
+ query.analyze_subqueries();
+ }
+
+ /* We prepare the update query corresponding to the main query and store both */
+ /* Note: they have the same UUID */
+
+ // XXX query.change_action() should become deprecated
+ update_query = query.clone();
+ update_query.action = 'update';
+ update_query.analyzed_query.action = 'update';
+ update_query.params = {};
+ update_query_ext = new QueryExt(update_query);
+
+ /* We remember the original query to be able to reset it */
+ update_query_orig_ext = new QueryExt(update_query.clone());
+
+
+ /* We store the main query */
+ query_ext = new QueryExt(query, null, null, update_query_ext, update_query_orig_ext, false);
+ manifold.query_store.main_queries[query.query_uuid] = query_ext;
+ /* Note: the update query does not have an entry! */
+
+
+ // The query is disabled; since it is incomplete until we know the content of the set of subqueries
+ // XXX unless we have no subqueries ???
+ // we will complete with params when records are received... this has to be done by the manager
+ // SET_ADD, SET_REMOVE will change the status of the elements of the set
+ // UPDATE will change also, etc.
+ // XXX We need a proper structure to store this information...
+
+ // We also need to insert all queries and subqueries from the analyzed_query
+ // XXX We need the root of all subqueries
+ query.iter_subqueries(function(sq, data, parent_query) {
+ if (parent_query)
+ parent_query_ext = manifold.query_store.find_analyzed_query_ext(parent_query.query_uuid);
+ else
+ parent_query_ext = null;
+ // XXX parent_query_ext == false
+ // XXX main.subqueries = {} # Normal, we need analyzed_query
+ sq_ext = new QueryExt(sq, parent_query_ext, query_ext)
+ manifold.query_store.analyzed_queries[sq.query_uuid] = sq_ext;
+ });
+
+ // XXX We have spurious update queries...
+ }
+
+ /* Searching */
+
+ this.find_query_ext = function(query_uuid) {
+ return this.main_queries[query_uuid];
+ }
+
+ this.find_query = function(query_uuid) {
+ return this.find_query_ext(query_uuid).query;
+ }
+
+ this.find_analyzed_query_ext = function(query_uuid) {
+ return this.analyzed_queries[query_uuid];
+ }
+
+ this.find_analyzed_query = function(query_uuid) {
+ return this.find_analyzed_query_ext(query_uuid).query;
+ }
+}
+
+/*!
+ * This namespace holds functions for globally managing query objects
+ * \Class Manifold
+ */
+var manifold = {
+
+ /**************************************************************************
+ * Helper functions
+ **************************************************************************/
+
+ separator: '__',
+
+ get_type: function(variable) {
+ switch(Object.toType(variable)) {
+ case 'number':
+ case 'string':
+ return TYPE_VALUE;
+ case 'object':
+ return TYPE_RECORD;
+ case 'array':
+ if ((variable.length > 0) && (Object.toType(variable[0]) === 'object'))
+ return TYPE_LIST_OF_RECORDS;
+ else
+ return TYPE_LIST_OF_VALUES;
+ }
+ },
+
+ /**************************************************************************
+ * Metadata management
+ **************************************************************************/
+
+ metadata: {
+
+ get_table: function(method) {
+ var table = MANIFOLD_METADATA[method];
+ return (typeof table === 'undefined') ? null : table;
+ },
+
+ get_columns: function(method) {
+ var table = this.get_table(method);
+ if (!table) {
+ return null;
+ }
+
+ return (typeof table.column === 'undefined') ? null : table.column;
+ },
+
+ get_key: function(method) {
+ var table = this.get_table(method);
+ if (!table)
+ return null;
+
+ return (typeof table.key === 'undefined') ? null : table.key;
+ },
+
+
+ get_column: function(method, name) {
+ var columns = this.get_columns(method);
+ if (!columns)
+ return null;
+
+ $.each(columns, function(i, c) {
+ if (c.name == name)
+ return c
+ });
+ return null;
+ },
+
+ get_type: function(method, name) {
+ var table = this.get_table(method);
+ if (!table)
+ return null;
+
+ return (typeof table.type === 'undefined') ? null : table.type;
+ }
+
+ },
+
+ /**************************************************************************
+ * Query management
+ **************************************************************************/
+
+ query_store: new QueryStore(),
+
+ // XXX Remaining functions are deprecated since they are replaced by the query store
+
+ /*!
+ * Associative array storing the set of queries active on the page
+ * \memberof Manifold
+ */
+ all_queries: {},
+
+ /*!
+ * Insert a query in the global hash table associating uuids to queries.
+ * If the query has no been analyzed yet, let's do it.
+ * \fn insert_query(query)
+ * \memberof Manifold
+ * \param ManifoldQuery query Query to be added
+ */
+ insert_query : function (query) {
+ // NEW API
+ manifold.query_store.insert(query);
+
+ // FORMER API
+ if (query.analyzed_query == null) {
+ query.analyze_subqueries();
+ }
+ manifold.all_queries[query.query_uuid]=query;
+ },
+
+ /*!
+ * Returns the query associated to a UUID
+ * \fn find_query(query_uuid)
+ * \memberof Manifold
+ * \param string query_uuid The UUID of the query to be returned
+ */
+ find_query : function (query_uuid) {
+ return manifold.all_queries[query_uuid];
+ },
+
+ /**************************************************************************
+ * Query execution
+ **************************************************************************/
+
+ // trigger a query asynchroneously
+ proxy_url : '/manifold/proxy/json/',
+
+ // reasonably low-noise, shows manifold requests coming in and out
+ asynchroneous_debug : true,
+ // print our more details on result publication and related callbacks
+ pubsub_debug : false,
+
+ /**
+ * \brief We use js function closure to be able to pass the query (array)
+ * to the callback function used when data is received
+ */
+ success_closure: function(query, publish_uuid, callback) {
+ return function(data, textStatus) {
+ manifold.asynchroneous_success(data, query, publish_uuid, callback);
+ }
+ },
+
+ run_query: function(query, callback) {
+ // default value for callback = null
+ if (typeof callback === 'undefined')
+ callback = null;
+
+ var query_json = JSON.stringify(query);
+
+ /* Nothing related to pubsub here... for the moment at least. */
+ //query.iter_subqueries(function (sq) {
+ // manifold.raise_record_event(sq.query_uuid, IN_PROGRESS);
+ //});
+
+ $.post(manifold.proxy_url, {'json': query_json} , manifold.success_closure(query, null, callback));
+ },
+
+ // Executes all async. queries - intended for the javascript header to initialize queries
+ // input queries are specified as a list of {'query_uuid': <query_uuid> }
+ // each plugin is responsible for managing its spinner through on_query_in_progress
+ asynchroneous_exec : function (query_exec_tuples) {
+
+ // Loop through input array, and use publish_uuid to publish back results
+ $.each(query_exec_tuples, function(index, tuple) {
+ var query=manifold.find_query(tuple.query_uuid);
+ var query_json=JSON.stringify (query);
+ var publish_uuid=tuple.publish_uuid;
+ // by default we publish using the same uuid of course
+ if (publish_uuid==undefined) publish_uuid=query.query_uuid;
+ if (manifold.pubsub_debug) {
+ messages.debug("sending POST on " + manifold.proxy_url + query.__repr());
+ }
+
+ query.iter_subqueries(function (sq) {
+ manifold.raise_record_event(sq.query_uuid, IN_PROGRESS);
+ });
+
+ // not quite sure what happens if we send a string directly, as POST data is named..
+ // this gets reconstructed on the proxy side with ManifoldQuery.fill_from_POST
+ $.post(manifold.proxy_url, {'json':query_json},
+ manifold.success_closure(query, publish_uuid, tuple.callback));
+ })
+ },
+
+ /**
+ * \brief Forward a query to the manifold backend
+ * \param query (dict) the query to be executed asynchronously
+ * \param callback (function) the function to be called when the query terminates
+ */
+ forward: function(query, callback) {
+ var query_json = JSON.stringify(query);
+ $.post(manifold.proxy_url, {'json': query_json} ,
+ manifold.success_closure(query, query.query_uuid, callback));
+ },
+
+ /*!
+ * Returns whether a query expects a unique results.
+ * This is the case when the filters contain a key of the object
+ * \fn query_expects_unique_result(query)
+ * \memberof Manifold
+ * \param ManifoldQuery query Query for which we are testing whether it expects a unique result
+ */
+ query_expects_unique_result: function(query) {
+ /* XXX we need functions to query metadata */
+ //var keys = MANIFOLD_METADATA[query.object]['keys']; /* array of array of field names */
+ /* TODO requires keys in metadata */
+ return true;
+ },
+
+ /*!
+ * Publish result
+ * \fn publish_result(query, results)
+ * \memberof Manifold
+ * \param ManifoldQuery query Query which has received results
+ * \param array results results corresponding to query
+ */
+ publish_result: function(query, result) {
+ if (typeof result === 'undefined')
+ result = [];
+
+ // NEW PLUGIN API
+ manifold.raise_record_event(query.query_uuid, CLEAR_RECORDS);
+ if (manifold.pubsub_debug)
+ messages.debug(".. publish_result (1) ");
+ var count=0;
+ $.each(result, function(i, record) {
+ manifold.raise_record_event(query.query_uuid, NEW_RECORD, record);
+ count += 1;
+ });
+ if (manifold.pubsub_debug)
+ messages.debug(".. publish_result (2) has used NEW API on " + count + " records");
+ manifold.raise_record_event(query.query_uuid, DONE);
+ if (manifold.pubsub_debug)
+ messages.debug(".. publish_result (3) has used NEW API to say DONE");
+
+ // OLD PLUGIN API BELOW
+ /* Publish an update announce */
+ var channel="/results/" + query.query_uuid + "/changed";
+ if (manifold.pubsub_debug)
+ messages.debug(".. publish_result (4) OLD API on channel" + channel);
+
+ $.publish(channel, [result, query]);
+
+ if (manifold.pubsub_debug)
+ messages.debug(".. publish_result (5) END q=" + query.__repr());
+ },
+
+ /*!
+ * Recursively publish result
+ * \fn publish_result_rec(query, result)
+ * \memberof Manifold
+ * \param ManifoldQuery query Query which has received result
+ * \param array result result corresponding to query
+ *
+ * Note: this function works on the analyzed query
+ */
+ publish_result_rec: function(query, result) {
+ /* If the result is not unique, only publish the top query;
+ * otherwise, publish the main object as well as subqueries
+ * XXX how much recursive are we ?
+ */
+ if (manifold.pubsub_debug)
+ messages.debug (">>>>> publish_result_rec " + query.object);
+ if (manifold.query_expects_unique_result(query)) {
+ /* Also publish subqueries */
+ $.each(query.subqueries, function(object, subquery) {
+ manifold.publish_result_rec(subquery, result[0][object]);
+ /* TODO remove object from result */
+ });
+ }
+ if (manifold.pubsub_debug)
+ messages.debug ("===== publish_result_rec " + query.object);
+
+ manifold.publish_result(query, result);
+
+ if (manifold.pubsub_debug)
+ messages.debug ("<<<<< publish_result_rec " + query.object);
+ },
+
+ setup_update_query: function(query, records) {
+ // We don't prepare an update query if the result has more than 1 entry
+ if (records.length != 1)
+ return;
+ var query_ext = manifold.query_store.find_query_ext(query.query_uuid);
+
+ var record = records[0];
+
+ var update_query_ext = query_ext.update_query_ext;
+ var update_query = update_query_ext.query;
+ var update_query_ext = query_ext.update_query_ext;
+ var update_query_orig = query_ext.update_query_orig_ext.query;
+
+ // Testing whether the result has subqueries (one level deep only)
+ // iif the query has subqueries
+ var count = 0;
+ var obj = query.analyzed_query.subqueries;
+ for (method in obj) {
+ if (obj.hasOwnProperty(method)) {
+ var key = manifold.metadata.get_key(method);
+ if (!key)
+ continue;
+ if (key.length > 1)
+ continue;
+ key = key[0];
+ var sq_keys = [];
+ var subrecords = record[method];
+ if (!subrecords)
+ continue
+ $.each(subrecords, function (i, subrecord) {
+ sq_keys.push(subrecord[key]);
+ });
+ update_query.params[method] = sq_keys;
+ update_query_orig.params[method] = sq_keys.slice();
+ count++;
+ }
+ }
+
+ if (count > 0) {
+ update_query_ext.disabled = false;
+ update_query_orig_ext.disabled = false;
+ }
+ },
+
+ process_get_query_records: function(query, records) {
+ this.setup_update_query(query, records);
+
+ /* Publish full results */
+ var tmp_query = manifold.find_query(query.query_uuid);
+ manifold.publish_result_rec(tmp_query.analyzed_query, records);
+ },
+
+ /**
+ *
+ * What we need to do when receiving results from an update query:
+ * - differences between what we had, what we requested, and what we obtained
+ * . what we had : update_query_orig (simple fields and set fields managed differently)
+ * . what we requested : update_query
+ * . what we received : records
+ * - raise appropriate events
+ *
+ * The normal process is that results similar to Get will be pushed in the
+ * pubsub mechanism, thus repopulating everything while we only need
+ * diff's. This means we need to move the publish functionalities in the
+ * previous 'process_get_query_records' function.
+ */
+ process_update_query_records: function(query, records) {
+ // First issue: we request everything, and not only what we modify, so will will have to ignore some fields
+ var query_uuid = query.query_uuid;
+ var query_ext = manifold.query_store.find_analyzed_query_ext(query_uuid);
+ var update_query = query_ext.main_query_ext.update_query_ext.query;
+ var update_query_orig = query_ext.main_query_ext.update_query_orig_ext.query;
+
+ // Since we update objects one at a time, we can get the first record
+ var record = records[0];
+
+ // Let's iterate over the object properties
+ for (var field in record) {
+ switch (this.get_type(record[field])) {
+ case TYPE_VALUE:
+ // Did we ask for a change ?
+ var update_value = update_query[field];
+ if (!update_value)
+ // Not requested, if it has changed: OUT OF SYNC
+ // How we can know ?
+ // We assume it won't have changed
+ continue;
+
+ var result_value = record[field];
+ if (!result_value)
+ throw "Internal error";
+
+ data = {
+ request: FIELD_REQUEST_CHANGE,
+ key : field,
+ value : update_value,
+ status: (update_value == result_value) ? FIELD_REQUEST_SUCCESS : FIELD_REQUEST_FAILURE,
+ }
+ manifold.raise_record_event(query_uuid, FIELD_STATE_CHANGED, data);
+
+ break;
+ case TYPE_RECORD:
+ throw "Not implemented";
+ break;
+
+ case TYPE_LIST_OF_VALUES:
+ // Same as list of records, but we don't have to extract keys
+ var result_keys = record[field]
+
+ // The rest of exactly the same (XXX factorize)
+ var update_keys = update_query_orig.params[field];
+ var query_keys = update_query.params[field];
+ var added_keys = $.grep(query_keys, function (x) { return $.inArray(x, update_keys) == -1 });
+ var removed_keys = $.grep(update_keys, function (x) { return $.inArray(x, query_keys) == -1 });
+
+
+ $.each(added_keys, function(i, key) {
+ if ($.inArray(key, result_keys) == -1) {
+ data = {
+ request: FIELD_REQUEST_ADD,
+ key : field,
+ value : key,
+ status: FIELD_REQUEST_FAILURE,
+ }
+ } else {
+ data = {
+ request: FIELD_REQUEST_ADD,
+ key : field,
+ value : key,
+ status: FIELD_REQUEST_SUCCESS,
+ }
+ }
+ manifold.raise_record_event(query_uuid, FIELD_STATE_CHANGED, data);
+ });
+ $.each(removed_keys, function(i, key) {
+ if ($.inArray(key, result_keys) == -1) {
+ data = {
+ request: FIELD_REQUEST_REMOVE,
+ key : field,
+ value : key,
+ status: FIELD_REQUEST_SUCCESS,
+ }
+ } else {
+ data = {
+ request: FIELD_REQUEST_REMOVE,
+ key : field,
+ value : key,
+ status: FIELD_REQUEST_FAILURE,
+ }
+ }
+ manifold.raise_record_event(query_uuid, FIELD_STATE_CHANGED, data);
+ });
+
+
+ break;
+ case TYPE_LIST_OF_RECORDS:
+ // example: slice.resource
+ // - update_query_orig.params.resource = resources in slice before update
+ // - update_query.params.resource = resource requested in slice
+ // - keys from field = resources obtained
+ var key = manifold.metadata.get_key(field);
+ if (!key)
+ continue;
+ if (key.length > 1) {
+ throw "Not implemented";
+ continue;
+ }
+ key = key[0];
+
+ /* XXX should be modified for multiple keys */
+ var result_keys = $.map(record[field], function(x) { return x[key]; });
+
+ var update_keys = update_query_orig.params[field];
+ var query_keys = update_query.params[field];
+ var added_keys = $.grep(query_keys, function (x) { return $.inArray(x, update_keys) == -1 });
+ var removed_keys = $.grep(update_keys, function (x) { return $.inArray(x, query_keys) == -1 });
+
+
+ $.each(added_keys, function(i, key) {
+ if ($.inArray(key, result_keys) == -1) {
+ data = {
+ request: FIELD_REQUEST_ADD,
+ key : field,
+ value : key,
+ status: FIELD_REQUEST_FAILURE,
+ }
+ } else {
+ data = {
+ request: FIELD_REQUEST_ADD,
+ key : field,
+ value : key,
+ status: FIELD_REQUEST_SUCCESS,
+ }
+ }
+ manifold.raise_record_event(query_uuid, FIELD_STATE_CHANGED, data);
+ });
+ $.each(removed_keys, function(i, key) {
+ if ($.inArray(key, result_keys) == -1) {
+ data = {
+ request: FIELD_REQUEST_REMOVE,
+ key : field,
+ value : key,
+ status: FIELD_REQUEST_SUCCESS,
+ }
+ } else {
+ data = {
+ request: FIELD_REQUEST_REMOVE,
+ key : field,
+ value : key,
+ status: FIELD_REQUEST_FAILURE,
+ }
+ }
+ manifold.raise_record_event(query_uuid, FIELD_STATE_CHANGED, data);
+ });
+
+
+ break;
+ }
+ }
+
+ // XXX Now we need to adapt 'update' and 'update_orig' queries as if we had done a get
+ this.setup_update_query(query, records);
+ },
+
+ process_query_records: function(query, records) {
+ if (query.action == 'get') {
+ this.process_get_query_records(query, records);
+ } else if (query.action == 'update') {
+ this.process_update_query_records(query, records);
+ }
+ },
+
+ // if set callback is provided it is called
+ // most of the time publish_uuid will be query.query_uuid
+ // however in some cases we wish to publish the result under a different uuid
+ // e.g. an updater wants to publish its result as if from the original (get) query
+ asynchroneous_success : function (data, query, publish_uuid, callback) {
+ // xxx should have a nicer declaration of that enum in sync with the python code somehow
+
+ var start = new Date();
+ if (manifold.asynchroneous_debug)
+ messages.debug(">>>>>>>>>> asynchroneous_success query.object=" + query.object);
+
+ if (data.code == 2) { // ERROR
+ // We need to make sense of error codes here
+ alert("Your session has expired, please log in again");
+ window.location="/logout/";
+ if (manifold.asynchroneous_debug) {
+ duration=new Date()-start;
+ messages.debug ("<<<<<<<<<< asynchroneous_success " + query.object + " -- error returned - logging out " + duration + " ms");
+ }
+ return;
+ }
+ if (data.code == 1) { // WARNING
+ messages.error("Some errors have been received from the manifold backend at " + MANIFOLD_URL + " [" + data.description + "]");
+ // publish error code and text message on a separate channel for whoever is interested
+ if (publish_uuid)
+ $.publish("/results/" + publish_uuid + "/failed", [data.code, data.description] );
+
+ }
+
+ // If a callback has been specified, we redirect results to it
+ if (!!callback) {
+ callback(data);
+ if (manifold.asynchroneous_debug) {
+ duration=new Date()-start;
+ messages.debug ("<<<<<<<<<< asynchroneous_success " + query.object + " -- callback ended " + duration + " ms");
+ }
+ return;
+ }
+
+ if (manifold.asynchroneous_debug)
+ messages.debug ("========== asynchroneous_success " + query.object + " -- before process_query_records [" + query.query_uuid +"]");
+
+ // once everything is checked we can use the 'value' part of the manifoldresult
+ var result=data.value;
+ if (result) {
+ /* Eventually update the content of related queries (update, etc) */
+ this.process_query_records(query, result);
+
+ /* Publish results: disabled here, done in the previous call */
+ //tmp_query = manifold.find_query(query.query_uuid);
+ //manifold.publish_result_rec(tmp_query.analyzed_query, result);
+ }
+ if (manifold.asynchroneous_debug) {
+ duration=new Date()-start;
+ messages.debug ("<<<<<<<<<< asynchroneous_success " + query.object + " -- done " + duration + " ms");
+ }
+
+ },
+
+ /**************************************************************************
+ * Plugin API helpers
+ **************************************************************************/
+
+ raise_event_handler: function(type, query_uuid, event_type, value) {
+ if (manifold.pubsub_debug)
+ messages.debug("raise_event_handler, quuid="+query_uuid+" type="+type+" event_type="+event_type);
+ if ((type != 'query') && (type != 'record'))
+ throw 'Incorrect type for manifold.raise_event()';
+ // xxx we observe quite a lot of incoming calls with an undefined query_uuid
+ // this should be fixed upstream in manifold I expect
+ if (query_uuid === undefined) {
+ messages.warning("undefined query in raise_event_handler");
+ return;
+ }
+
+ // notify the change to objects that either listen to this channel specifically,
+ // or to the wildcard channel
+ var channels = [ manifold.get_channel(type, query_uuid), manifold.get_channel(type, '*') ];
+
+ $.each(channels, function(i, channel) {
+ if (value === undefined) {
+ if (manifold.pubsub_debug) messages.debug("triggering [no value] on channel="+channel+" and event_type="+event_type);
+ $('.pubsub').trigger(channel, [event_type]);
+ } else {
+ if (manifold.pubsub_debug) messages.debug("triggering [value="+value+"] on channel="+channel+" and event_type="+event_type);
+ $('.pubsub').trigger(channel, [event_type, value]);
+ }
+ });
+ },
+
+ raise_query_event: function(query_uuid, event_type, value) {
+ manifold.raise_event_handler('query', query_uuid, event_type, value);
+ },
+
+ raise_record_event: function(query_uuid, event_type, value) {
+ manifold.raise_event_handler('record', query_uuid, event_type, value);
+ },
+
+
+ raise_event: function(query_uuid, event_type, value) {
+ // Query uuid has been updated with the key of a new element
+ query_ext = manifold.query_store.find_analyzed_query_ext(query_uuid);
+ query = query_ext.query;
+
+ switch(event_type) {
+ case FIELD_STATE_CHANGED:
+ // value is an object (request, key, value, status)
+ // update is only possible is the query is not pending, etc
+ // SET_ADD is on a subquery, FIELD_STATE_CHANGED on the query itself
+ // we should map SET_ADD on this...
+
+ // 1. Update internal query store about the change in status
+
+ // 2. Update the update query
+ update_query = query_ext.main_query_ext.update_query_ext.query;
+ update_query_orig = query_ext.main_query_ext.update_query_orig_ext.query;
+
+ switch(value.request) {
+ case FIELD_REQUEST_CHANGE:
+ if (update_query.params[value.key] === undefined)
+ update_query.params[value.key] = Array();
+ update_query.params[value.key] = value.value;
+ break;
+ case FIELD_REQUEST_ADD:
+ if ($.inArray(value.value, update_query_orig.params[value.key]) != -1)
+ value.request = FIELD_REQUEST_ADD_RESET;
+ if (update_query.params[value.key] === undefined)
+ update_query.params[value.key] = Array();
+ update_query.params[value.key].push(value.value);
+ break;
+ case FIELD_REQUEST_REMOVE:
+ if ($.inArray(value.value, update_query_orig.params[value.key]) == -1)
+ value.request = FIELD_REQUEST_REMOVE_RESET;
+
+ var arr = update_query.params[value.key];
+ arr = $.grep(arr, function(x) { return x != value.value; });
+ if (update_query.params[value.key] === undefined)
+ update_query.params[value.key] = Array();
+ update_query.params[value.key] = arr;
+
+ break;
+ case FIELD_REQUEST_ADD_RESET:
+ case FIELD_REQUEST_REMOVE_RESET:
+ // XXX We would need to keep track of the original query
+ throw "Not implemented";
+ break;
+ }
+
+ // 3. Inform others about the change
+ // a) the main query...
+ manifold.raise_record_event(query_uuid, event_type, value);
+
+ // b) subqueries eventually (dot in the key)
+ // Let's unfold
+ var path_array = value.key.split('.');
+ var value_key = value.key.split('.');
+
+ var cur_query = query;
+ if (cur_query.analyzed_query)
+ cur_query = cur_query.analyzed_query;
+ $.each(path_array, function(i, method) {
+ cur_query = cur_query.subqueries[method];
+ value_key.shift(); // XXX check that method is indeed shifted
+ });
+ value.key = value_key;
+
+ manifold.raise_record_event(cur_query.query_uuid, event_type, value);
+
+ // XXX make this DOT a global variable... could be '/'
+ break;
+
+ case SET_ADD:
+ case SET_REMOVED:
+
+ // update is only possible is the query is not pending, etc
+ // CHECK status !
+
+ // XXX we can only update subqueries of the main query. Check !
+ // assert query_ext.parent_query == query_ext.main_query
+ // old // update_query = query_ext.main_query_ext.update_query_ext.query;
+
+ // This SET_ADD is called on a subquery, so we have to
+ // recontruct the path of the key in the main_query
+ // We then call FIELD_STATE_CHANGED which is the equivalent for the main query
+
+ var path = "";
+ var sq = query_ext;
+ while (sq.parent_query_ext) {
+ if (path != "")
+ path = '.' + path;
+ path = sq.query.object + path;
+ sq = sq.parent_query_ext;
+ }
+
+ main_query = query_ext.main_query_ext.query;
+ data = {
+ request: (event_type == SET_ADD) ? FIELD_REQUEST_ADD : FIELD_REQUEST_REMOVE,
+ key : path,
+ value : value,
+ status: FIELD_REQUEST_PENDING,
+ };
+ this.raise_event(main_query.query_uuid, FIELD_STATE_CHANGED, data);
+
+ // old //update_query.params[path].push(value);
+ // old // console.log('Updated query params', update_query);
+ // NOTE: update might modify the fields in Get
+ // NOTE : we have to modify all child queries
+ // NOTE : parts of a query might not be started (eg slice.measurements, how to handle ?)
+
+ // if everything is done right, update_query should not be null.
+ // It is updated when we received results from the get query
+ // object = the same as get
+ // filter = key : update a single object for now
+ // fields = the same as get
+ manifold.raise_query_event(query_uuid, event_type, value);
+
+ break;
+
+ case RUN_UPDATE:
+ manifold.run_query(query_ext.main_query_ext.update_query_ext.query);
+ break;
+
+ case FILTER_ADDED:
+// Thierry - this is probably wrong but intended as a hotfix
+// http://trac.myslice.info/ticket/32
+// manifold.raise_query_event(query_uuid, event_type, value);
+ break;
+ case FILTER_REMOVED:
+ manifold.raise_query_event(query_uuid, event_type, value);
+ break;
+ case FIELD_ADDED:
+ main_query = query_ext.main_query_ext.query;
+ main_update_query = query_ext.main_query_ext.update_query;
+ query.select(value);
+
+ // Here we need the full path through all subqueries
+ path = ""
+ // XXX We might need the query name in the QueryExt structure
+ main_query.select(value);
+
+ // XXX When is an update query associated ?
+ // XXX main_update_query.select(value);
+
+ manifold.raise_query_event(query_uuid, event_type, value);
+ break;
+
+ case FIELD_REMOVED:
+ query = query_ext.query;
+ main_query = query_ext.main_query_ext.query;
+ main_update_query = query_ext.main_query_ext.update_query;
+ query.unselect(value);
+ main_query.unselect(value);
+
+ // We need to inform about changes in these queries to the respective plugins
+ // Note: query & main_query have the same UUID
+ manifold.raise_query_event(query_uuid, event_type, value);
+ break;
+ }
+ // We need to inform about changes in these queries to the respective plugins
+ // Note: query, main_query & update_query have the same UUID
+ manifold.raise_query_event(query_uuid, event_type, value);
+ // We are targeting the same object with get and update
+ // The notion of query is bad, we should have a notion of destination, and issue queries on the destination
+ // NOTE: Editing a subquery == editing a local view on the destination
+
+ // XXX We might need to run the new query again and manage the plugins in the meantime with spinners...
+ // For the time being, we will collect all columns during the first query
+ },
+
+ /* Publish/subscribe channels for internal use */
+ get_channel: function(type, query_uuid) {
+ if ((type !== 'query') && (type != 'record'))
+ return null;
+ return '/' + type + '/' + query_uuid;
+ },
+
+}; // manifold object
+/* ------------------------------------------------------------ */
+
+(function($) {
+
+ // OLD PLUGIN API: extend jQuery/$ with pubsub capabilities
+ // https://gist.github.com/661855
+ var o = $({});
+ $.subscribe = function( channel, selector, data, fn) {
+ /* borrowed from jQuery */
+ if ( data == null && fn == null ) {
+ // ( channel, fn )
+ fn = selector;
+ data = selector = undefined;
+ } else if ( fn == null ) {
+ if ( typeof selector === "string" ) {
+ // ( channel, selector, fn )
+ fn = data;
+ data = undefined;
+ } else {
+ // ( channel, data, fn )
+ fn = data;
+ data = selector;
+ selector = undefined;
+ }
+ }
+ /* </ugly> */
+
+ /* We use an indirection function that will clone the object passed in
+ * parameter to the subscribe callback
+ *
+ * FIXME currently we only clone query objects which are the only ones
+ * supported and editable, we might have the same issue with results but
+ * the page load time will be severely affected...
+ */
+ o.on.apply(o, [channel, selector, data, function() {
+ for(i = 1; i < arguments.length; i++) {
+ if ( arguments[i].constructor.name == 'ManifoldQuery' )
+ arguments[i] = arguments[i].clone();
+ }
+ fn.apply(o, arguments);
+ }]);
+ };
+
+ $.unsubscribe = function() {
+ o.off.apply(o, arguments);
+ };
+
+ $.publish = function() {
+ o.trigger.apply(o, arguments);
+ };
+
+}(jQuery));
+
+/* ------------------------------------------------------------ */
+
+//http://stackoverflow.com/questions/5100539/django-csrf-check-failing-with-an-ajax-post-request
+//make sure to expose csrf in our outcoming ajax/post requests
+$.ajaxSetup({
+ beforeSend: function(xhr, settings) {
+ function getCookie(name) {
+ var cookieValue = null;
+ if (document.cookie && document.cookie != '') {
+ var cookies = document.cookie.split(';');
+ for (var i = 0; i < cookies.length; i++) {
+ var cookie = jQuery.trim(cookies[i]);
+ // Does this cookie string begin with the name we want?
+ if (cookie.substring(0, name.length + 1) == (name + '=')) {
+ cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
+ break;
+ }
+ }
+ }
+ return cookieValue;
+ }
+ if (!(/^http:.*/.test(settings.url) || /^https:.*/.test(settings.url))) {
+ // Only send the token to relative URLs i.e. locally.
+ xhr.setRequestHeader("X-CSRFToken", getCookie('csrftoken'));
+ }
+ }
+});
--- /dev/null
+// MANIFOLD_METADATA was formerly known as all_headers
+var metadata = {
+ get : function () {
+ return MANIFOLD_METADATA;
+ },
+ // returns all fields of a given object
+ fields : function (object) {
+ var result=new Array();
+ jQuery.each(MANIFOLD_METADATA, function(s,obj){
+ if(s==object){
+ jQuery.each(obj['column'], function(i,f){
+ result.push(f);
+ });
+ return false;
+ }
+ });
+ result.sort(sort_by('column', false, function(a){return a.toUpperCase()}));
+ //result=jQuery(result).sort("column", "asc");
+ return result;
+ },
+ // returns all properties of a given field
+ field : function (object, field) {
+ var result=new Array();
+ jQuery.each(MANIFOLD_METADATA, function(s,obj){
+ if(s==object){
+ jQuery.each(obj['column'], function(i,f){
+ if(f['column']==field){
+ result.push(f);
+ return false;
+ }
+ });
+ return false;
+ }
+ });
+ return result[0];
+ },
+ // returns the value of a property from a field within a object (type of object : resource,node,lease,slice...)
+ property : function (object, field, property) {
+ var result=null;
+ jQuery.each(MANIFOLD_METADATA, function(s,obj){
+ if(s==object){
+ jQuery.each(obj['column'], function(i,f){
+ if(f['column']==field){
+ result=f[property];
+ return false;
+ }
+ });
+ return false;
+ }
+ });
+ return result;
+ },
+} // metadata object
--- /dev/null
+// INHERITANCE
+// http://alexsexton.com/blog/2010/02/using-inheritance-patterns-to-organize-large-jquery-applications/
+// We will use John Resig's proposal
+
+// http://pastie.org/517177
+
+// NOTE: missing a destroy function
+
+$.plugin = function(name, object) {
+ $.fn[name] = function(options) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ return this.each(function() {
+ var instance = $.data(this, name);
+ if (instance) {
+ instance[options].apply(instance, args);
+ } else {
+ instance = $.data(this, name, new object(options, this));
+ }
+ });
+ };
+};
+
+// set to either
+// * false or undefined or none : no debug
+// * true : trace all event calls
+// * [ 'in_progress', 'query_done' ] : would only trace to these events
+var plugin_debug=false;
+plugin_debug = [ 'in_progress', 'query_done' ];
+
+var Plugin = Class.extend({
+
+ init: function(options, element) {
+ // Mix in the passed in options with the default options
+ this.options = $.extend({}, this.default_options, options);
+
+ // Save the element reference, both as a jQuery
+ // reference and a normal reference
+ this.element = element;
+ this.$element = $(element);
+ // programmatically add specific class for publishing events
+ // used in manifold.js for triggering API events
+ if ( ! this.$element.hasClass('pubsub')) this.$element.addClass('pubsub');
+
+ // return this so we can chain/use the bridge with less code.
+ return this;
+ },
+
+ has_query_handler: function() {
+ return (typeof this.on_filter_added === 'function');
+ },
+
+ // do we need to log API calls ?
+ _is_in : function (obj, arr) {
+ for(var i=0; i<arr.length; i++) {
+ if (arr[i] == obj) return true;
+ }
+ },
+ _deserves_logging: function (event) {
+ if ( ! plugin_debug ) return false;
+ else if ( plugin_debug === true) return true;
+ else if (this._is_in (event, plugin_debug)) return true;
+ return false;
+ },
+
+ _query_handler: function(prefix, event_type, data) {
+ // We suppose this.query_handler_prefix has been defined if this
+ // callback is triggered
+ var event, fn;
+ switch(event_type) {
+ case FILTER_ADDED:
+ event = 'filter_added';
+ break;
+ case FILTER_REMOVED:
+ event = 'filter_removed';
+ break;
+ case CLEAR_FILTERS:
+ event = 'filter_clear';
+ break;
+ case FIELD_ADDED:
+ event = 'field_added';
+ break;
+ case FIELD_REMOVED:
+ event = 'field_removed';
+ break;
+ case CLEAR_FIELDS:
+ event = 'field_clear';
+ break;
+ default:
+ return;
+ } // switch
+
+ fn = 'on_' + prefix + event;
+ if (typeof this[fn] === 'function') {
+ if (this._deserves_logging (event)) {
+ var classname=this.classname;
+ messages.debug("Plugin._query_handler: calling "+fn+" on "+classname);
+ }
+ // call with data as parameter
+ // XXX implement anti loop
+ this[fn](data);
+ }
+ },
+
+ _record_handler: function(prefix, event_type, record) {
+ // We suppose this.query_handler_prefix has been defined if this
+ // callback is triggered
+ var event, fn;
+ switch(event_type) {
+ case NEW_RECORD:
+ event = 'new_record';
+ break;
+ case CLEAR_RECORDS:
+ event = 'clear_records';
+ break;
+ case IN_PROGRESS:
+ event = 'query_in_progress';
+ break;
+ case DONE:
+ event = 'query_done';
+ break;
+ case FIELD_STATE_CHANGED:
+ event = 'field_state_changed';
+ break;
+ default:
+ return;
+ } // switch
+
+ fn = 'on_' + prefix + event;
+ if (typeof this[fn] === 'function') {
+ if (this._deserves_logging (event)) {
+ var classname=this.classname;
+ messages.debug("Plugin._record_handler: calling "+fn+" on "+classname);
+ }
+ // call with data as parameter
+ // XXX implement anti loop
+ this[fn](record);
+ }
+ },
+
+ get_handler_function: function(type, prefix) {
+
+ return $.proxy(function(e, event_type, record) {
+ return this['_' + type + '_handler'](prefix, event_type, record);
+ }, this);
+ },
+
+ listen_query: function(query_uuid, prefix) {
+ // default: prefix = ''
+ prefix = (typeof prefix === 'undefined') ? '' : (prefix + '_');
+
+ this.$element.on(manifold.get_channel('query', query_uuid), this.get_handler_function('query', prefix));
+ this.$element.on(manifold.get_channel('record', query_uuid), this.get_handler_function('record', prefix));
+ },
+
+ default_options: {},
+
+ /* Helper functions for naming HTML elements (ID, classes), with support for filters and fields */
+
+ id: function() {
+ var ret = this.options.plugin_uuid;
+ for (var i = 0; i < arguments.length; i++) {
+ ret = ret + manifold.separator + arguments[i];
+ }
+ return ret;
+ },
+
+ elmt: function() {
+ if (arguments.length == 0) {
+ return $('#' + this.id());
+ } else {
+ // We make sure to search _inside_ the dom tag of the plugin
+ return $('#' + this.id.apply(this, arguments), this.elmt());
+ }
+ },
+
+ elts: function(cls) {
+ return $('.' + cls, this.elmt());
+ },
+
+ id_from_filter: function(filter, use_value) {
+ use_value = typeof use_value !== 'undefined' ? use_value : true;
+
+ var key = filter[0];
+ var op = filter[1];
+ var value = filter[2];
+ var op_str = this.getOperatorLabel(op);
+ var s = manifold.separator;
+
+ if (use_value) {
+ return 'filter' + s + key + s + op_str + s + value;
+ } else {
+ return 'filter' + s + key + s + op_str;
+ }
+ },
+
+ str_from_filter: function(filter) {
+ return filter[0] + ' ' + filter[1] + ' ' + filter[2];
+ },
+
+ array_from_id: function(id) {
+ var ret = id.split(manifold.separator);
+ ret.shift(); // remove plugin_uuid at the beginning
+ return ret;
+ },
+
+ id_from_field: function(field) {
+ return 'field' + manifold.separator + field;
+ },
+
+ field_from_id: function(id) {
+ var array;
+ if (typeof id === 'string') {
+ array = id.split(manifold.separator);
+ } else { // We suppose we have an array ('object')
+ array = id;
+ }
+ // array = ['field', FIELD_NAME]
+ return array[1];
+ },
+
+ id_from_key: function(key_field, value) {
+
+ return key_field + manifold.separator + this.escape_id(value).replace(/\\/g, '');
+ },
+
+ // NOTE
+ // at some point in time we used to have a helper function named 'flat_id' here
+ // the goals was to sort of normalize id's but it turned out we can get rid of that
+ // in a nutshell, we would have an id (can be urn, hrn, whatever) and
+ // we want to be able to retrieve a DOM element based on that (e.g. a checkbox)
+ // so we did something like <tag id="some-id-that-comes-from-the-db">
+ // and then $("#some-id-that-comes-from-the-db")
+ // however the syntax for that selector prevents from using some characters in id
+ // and so for some of our ids this won't work
+ // instead of 'flattening' we now do this instead
+ // <tag some_id="then!we:can+use.what$we!want">
+ // and to retrieve it
+ // $("[some_id='then!we:can+use.what$we!want']")
+ // which thanks to the quotes, works; and you can use this with id as well in fact
+ // of course if now we have quotes in the id it's going to squeak, but well..
+
+ // escape (read: backslashes) some meta-chars in input
+ escape_id: function(id) {
+ if( id !== undefined){
+ return id.replace( /(:|\.|\[|\])/g, "\\$1" );
+ }else{
+ return "undefined-id";
+ }
+ },
+
+ id_from_record: function(method, record) {
+ var keys = manifold.metadata.get_key(method);
+ if (!keys)
+ return;
+ if (keys.length > 1)
+ return;
+
+ var key = keys[0];
+ switch (Object.toType(key)) {
+ case 'string':
+ if (!(key in record))
+ return null;
+ return this.id_from_key(key, record[key]);
+
+ default:
+ throw 'Not implemented';
+ }
+ },
+
+ key_from_id: function(id) {
+ // NOTE this works only for simple keys
+
+ var array;
+ if (typeof id === 'string') {
+ array = id.split(manifold.separator);
+ } else { // We suppose we have an array ('object')
+ array = id;
+ }
+
+ // arguments has the initial id but lacks the key field name (see id_from_key), so we are even
+ // we finally add +1 for the plugin_uuid at the beginning
+ return array[arguments.length + 1];
+ },
+
+ // TOGGLE
+ // plugin-helper.js is about managing toggled state
+ // it would be beneficial to merge it in here
+ toggle_on: function () { return this.toggle("true"); },
+ toggle_off: function () { return this.toggle("false"); },
+ toggle: function (status) {
+ plugin_helper.set_toggle_status (this.options.plugin_uuid,status);
+ },
+
+ /* SPIN */
+ // use spin() to get our default spin settings (called presets)
+ // use spin(true) to get spin's builtin defaults
+ // you can also call spin_presets() yourself and tweak what you need to, like topmenuvalidation does
+ spin: function (presets) {
+ var presets = ( presets === undefined ) ? spin_presets() : presets;
+ try { this.$element.spin(presets); }
+ catch (err) { messages.debug("Cannot turn on spin " + err); }
+ },
+
+ unspin: function() {
+ try { this.$element.spin(false); }
+ catch (err) { messages.debug("Cannot turn off spin " + err); }
+ },
+
+ /* TEMPLATE */
+
+ load_template: function(name, ctx) {
+ return Mustache.render(this.elmt(name).html(), ctx);
+ },
+
+});
--- /dev/null
+/* Buffered DOM updates */
+var RecordGenerator = Class.extend({
+
+ init: function(query, generators, number)
+ {
+ this._query = query;
+ this._generators = generators;
+ this._number = number;
+ },
+
+ random_int: function(options)
+ {
+ var default_options = {
+ max: 1000
+ }
+
+ if (typeof options == 'object')
+ options = $.extend(default_options, options);
+ else
+ options = default_options;
+
+ return Math.floor(Math.random()*(options.max+1));
+ },
+
+ random_string: function()
+ {
+ var default_options = {
+ possible: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789",
+ len: this.random_int({max: 15})
+ }
+
+ if (typeof options == 'object')
+ options = $.extend(default_options, options);
+ else
+ options = default_options;
+
+ var text = "";
+
+ for( var i=0; i < options.len; i++ )
+ text += options.possible.charAt(Math.floor(Math.random() * options.possible.length));
+
+ return text;
+
+ },
+
+ generate_record: function()
+ {
+ var self = this;
+ var record = {};
+
+ $.each(this._query.fields, function(i, field) {
+ record[field] = self[self._generators[field]]();
+ });
+
+ // Publish records
+ manifold.raise_record_event(self._query.query_uuid, NEW_RECORD, record);
+
+ },
+
+ run: function()
+ {
+ var record;
+ manifold.raise_record_event(this._query.query_uuid, CLEAR_RECORDS);
+ for (var i = 0; i < this._number; i++) {
+ record = this.generate_record();
+ /* XXX publish record */
+ }
+ manifold.raise_record_event(this._query.query_uuid, DONE);
+
+ }
+});
--- /dev/null
+# Written by Brendan O'Connor, brenocon@gmail.com, www.anyall.org
+# * Originally written Aug. 2005
+# * Posted to gist.github.com/16173 on Oct. 2008
+
+# Copyright (c) 2003-2006 Open Source Applications Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re, sys, types
+
+"""
+Have all your function & method calls automatically logged, in indented outline
+form - unlike the stack snapshots in an interactive debugger, it tracks call
+structure & stack depths across time!
+
+It hooks into all function calls that you specify, and logs each time they're
+called. I find it especially useful when I don't know what's getting called
+when, or need to continuously test for state changes. (by hacking this file)
+
+Originally inspired from the python cookbook:
+http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/198078
+
+Currently you can
+ - tag functions or individual methods to be autologged
+ - tag an entire class's methods to be autologged
+ - tag an entire module's classes and functions to be autologged
+
+TODO:
+ - allow tagging of ALL modules in the program on startup?
+
+CAVEATS:
+ - certain classes barf when you logclass() them -- most notably,
+ SWIG-generated wrappers, and perhaps others.
+
+USAGE: see examples on the bottom of this file.
+
+
+Viewing tips
+============
+
+If your terminal can't keep up, try xterm or putty, they seem to be highest
+performance. xterm is available for all platforms through X11...
+
+Also try: (RunChandler > log &); tail -f log
+
+Also, you can "less -R log" afterward and get the colors correct.
+
+If you have long lines, less -RS kills wrapping, enhancing readability. Also
+can chop at formatAllArgs().
+
+If you want long lines to be chopped realtime, try piping through less::
+
+ RunChandler | less -RS
+
+but then you have to hit 'space' lots to prevent chandler from freezing.
+less's 'F' command is supposed to do this correctly but doesn't work for me.
+"""
+
+
+#@@@ should use the standard python logging system?
+log = sys.stdout
+
+# Globally incremented across function calls, so tracks stack depth
+indent = 0
+indStr = ' '
+
+
+# ANSI escape codes for terminals.
+# X11 xterm: always works, all platforms
+# cygwin dosbox: run through |cat and then colors work
+# linux: works on console & gnome-terminal
+# mac: untested
+
+
+BLACK = "\033[0;30m"
+BLUE = "\033[0;34m"
+GREEN = "\033[0;32m"
+CYAN = "\033[0;36m"
+RED = "\033[0;31m"
+PURPLE = "\033[0;35m"
+BROWN = "\033[0;33m"
+GRAY = "\033[0;37m"
+BOLDGRAY = "\033[1;30m"
+BOLDBLUE = "\033[1;34m"
+BOLDGREEN = "\033[1;32m"
+BOLDCYAN = "\033[1;36m"
+BOLDRED = "\033[1;31m"
+BOLDPURPLE = "\033[1;35m"
+BOLDYELLOW = "\033[1;33m"
+WHITE = "\033[1;37m"
+
+NORMAL = "\033[0m"
+
+
+def indentlog(message):
+ global log, indStr, indent
+ print >>log, "%s%s" %(indStr*indent, message)
+ log.flush()
+
+def shortstr(obj):
+ """
+ Where to put gritty heuristics to make an object appear in most useful
+ form. defaults to __str__.
+ """
+ if "wx." in str(obj.__class__) or obj.__class__.__name__.startswith("wx"):
+ shortclassname = obj.__class__.__name__
+ ##shortclassname = str(obj.__class__).split('.')[-1]
+ if hasattr(obj, "blockItem") and hasattr(obj.blockItem, "blockName"):
+ moreInfo = "block:'%s'" %obj.blockItem.blockName
+ else:
+ moreInfo = "at %d" %id(obj)
+ return "<%s %s>" % (shortclassname, moreInfo)
+ else:
+ return str(obj)
+
+def formatAllArgs(args, kwds):
+ """
+ makes a nice string representation of all the arguments
+ """
+ allargs = []
+ for item in args:
+ allargs.append('%s' % shortstr(item))
+ for key,item in kwds.items():
+ allargs.append('%s=%s' % (key,shortstr(item)))
+ formattedArgs = ', '.join(allargs)
+ if len(formattedArgs) > 150:
+ return formattedArgs[:146] + " ..."
+ return formattedArgs
+
+
+def logmodules(listOfModules):
+ for m in listOfModules:
+ bindmodule(m)
+
+def logmodule(module, logMatch=".*", logNotMatch="nomatchasfdasdf"):
+ """
+ WARNING: this seems to break if you import SWIG wrapper classes
+ directly into the module namespace ... logclass() creates weirdness when
+ used on them, for some reason.
+
+ @param module: could be either an actual module object, or the string
+ you can import (which seems to be the same thing as its
+ __name__). So you can say logmodule(__name__) at the end
+ of a module definition, to log all of it.
+ """
+
+ allow = lambda s: re.match(logMatch, s) and not re.match(logNotMatch, s)
+
+ if isinstance(module, str):
+ d = {}
+ exec "import %s" % module in d
+ import sys
+ module = sys.modules[module]
+
+ names = module.__dict__.keys()
+ for name in names:
+ if not allow(name): continue
+
+ value = getattr(module, name)
+ if isinstance(value, type):
+ setattr(module, name, logclass(value))
+ print>>log,"autolog.logmodule(): bound %s" %name
+ elif isinstance(value, types.FunctionType):
+ setattr(module, name, logfunction(value))
+ print>>log,"autolog.logmodule(): bound %s" %name
+
+def logfunction(theFunction, displayName=None):
+ """a decorator."""
+ if not displayName: displayName = theFunction.__name__
+
+ def _wrapper(*args, **kwds):
+ global indent
+ argstr = formatAllArgs(args, kwds)
+
+ # Log the entry into the function
+ indentlog("%s%s%s (%s) " % (BOLDRED,displayName,NORMAL, argstr))
+ log.flush()
+
+ indent += 1
+ returnval = theFunction(*args,**kwds)
+ indent -= 1
+
+ # Log return
+ ##indentlog("return: %s"% shortstr(returnval)
+ return returnval
+ return _wrapper
+
+def logmethod(theMethod, displayName=None):
+ """use this for class or instance methods, it formats with the object out front."""
+ if not displayName: displayName = theMethod.__name__
+ def _methodWrapper(self, *args, **kwds):
+ "Use this one for instance or class methods"
+ global indent
+
+ argstr = formatAllArgs(args, kwds)
+ selfstr = shortstr(self)
+
+ #print >> log,"%s%s. %s (%s) " % (indStr*indent,selfstr,methodname,argstr)
+ indentlog("%s.%s%s%s (%s) " % (selfstr, BOLDRED,theMethod.__name__,NORMAL, argstr))
+ log.flush()
+
+ indent += 1
+
+ if theMethod.__name__ == 'OnSize':
+ indentlog("position, size = %s%s %s%s" %(BOLDBLUE, self.GetPosition(), self.GetSize(), NORMAL))
+
+ returnval = theMethod(self, *args,**kwds)
+
+ indent -= 1
+
+ return returnval
+ return _methodWrapper
+
+
+def logclass(cls, methodsAsFunctions=False,
+ logMatch=".*", logNotMatch="asdfnomatch"):
+ """
+ A class "decorator". But python doesn't support decorator syntax for
+ classes, so do it manually::
+
+ class C(object):
+ ...
+ C = logclass(C)
+
+ @param methodsAsFunctions: set to True if you always want methodname first
+ in the display. Probably breaks if you're using class/staticmethods?
+ """
+
+ allow = lambda s: re.match(logMatch, s) and not re.match(logNotMatch, s) and \
+ s not in ('__str__','__repr__')
+
+ namesToCheck = cls.__dict__.keys()
+
+ for name in namesToCheck:
+ if not allow(name): continue
+ # unbound methods show up as mere functions in the values of
+ # cls.__dict__,so we have to go through getattr
+ value = getattr(cls, name)
+
+ if methodsAsFunctions and callable(value):
+ setattr(cls, name, logfunction(value))
+ elif isinstance(value, types.MethodType):
+ #a normal instance method
+ if value.im_self == None:
+ setattr(cls, name, logmethod(value))
+
+ #class & static method are more complex.
+ #a class method
+ elif value.im_self == cls:
+ w = logmethod(value.im_func,
+ displayName="%s.%s" %(cls.__name__, value.__name__))
+ setattr(cls, name, classmethod(w))
+ else: assert False
+
+ #a static method
+ elif isinstance(value, types.FunctionType):
+ w = logfunction(value,
+ displayName="%s.%s" %(cls.__name__, value.__name__))
+ setattr(cls, name, staticmethod(w))
+ return cls
+
+class LogMetaClass(type):
+ """
+ Alternative to logclass(), you set this as a class's __metaclass__.
+
+ It will not work if the metaclass has already been overridden (e.g.
+ schema.Item or zope.interface (used in Twisted)
+
+ Also, it should fail for class/staticmethods, that hasnt been added here
+ yet.
+ """
+
+ def __new__(cls,classname,bases,classdict):
+ logmatch = re.compile(classdict.get('logMatch','.*'))
+ lognotmatch = re.compile(classdict.get('logNotMatch', 'nevermatchthisstringasdfasdf'))
+
+ for attr,item in classdict.items():
+ if callable(item) and logmatch.match(attr) and not lognotmatch.match(attr):
+ classdict['_H_%s'%attr] = item # rebind the method
+ classdict[attr] = logmethod(item) # replace method by wrapper
+
+ return type.__new__(cls,classname,bases,classdict)
+
+
+
+# ---------------------------- Tests and examples ----------------------------
+
+if __name__=='__main__':
+ print; print "------------------- single function logging ---------------"
+ @logfunction
+ def test():
+ return 42
+
+ test()
+
+ print; print "------------------- single method logging -----------------"
+ class Test1(object):
+ def __init__(self):
+ self.a = 10
+
+ @logmethod
+ def add(self,a,b): return a+b
+
+ @logmethod
+ def fac(self,val):
+ if val == 1:
+ return 1
+ else:
+ return val * self.fac(val-1)
+
+ @logfunction
+ def fac2(self, val):
+ if val == 1:
+ return 1
+ else:
+ return val * self.fac2(val-1)
+
+ t = Test1()
+ t.add(5,6)
+ t.fac(4)
+ print "--- tagged as @logfunction, doesn't understand 'self' is special:"
+ t.fac2(4)
+
+
+ print; print """-------------------- class "decorator" usage ------------------"""
+ class Test2(object):
+ #will be ignored
+ def __init__(self):
+ self.a = 10
+ def ignoreThis(self): pass
+
+
+ def add(self,a,b):return a+b
+ def fac(self,val):
+ if val == 1:
+ return 1
+ else:
+ return val * self.fac(val-1)
+
+ Test2 = logclass(Test2, logMatch='fac|add')
+
+ t2 = Test2()
+ t2.add(5,6)
+ t2.fac(4)
+ t2.ignoreThis()
+
+
+ print; print "-------------------- metaclass usage ------------------"
+ class Test3(object):
+ __metaclass__ = LogMetaClass
+ logNotMatch = 'ignoreThis'
+
+ def __init__(self): pass
+
+ def fac(self,val):
+ if val == 1:
+ return 1
+ else:
+ return val * self.fac(val-1)
+ def ignoreThis(self): pass
+ t3 = Test3()
+ t3.fac(4)
+ t3.ignoreThis()
+
+ print; print "-------------- testing static & classmethods --------------"
+ class Test4(object):
+ @classmethod
+ def cm(cls, a, b):
+ print cls
+ return a+b
+
+ def im(self, a, b):
+ print self
+ return a+b
+
+ @staticmethod
+ def sm(a,b): return a+b
+
+ Test4 = logclass(Test4)
+
+ Test4.cm(4,3)
+ Test4.sm(4,3)
+
+ t4 = Test4()
+ t4.im(4,3)
+ t4.sm(4,3)
+ t4.cm(4,3)
+
+ #print; print "-------------- static & classmethods: where to put decorators? --------------"
+ #class Test5(object):
+ #@classmethod
+ #@logmethod
+ #def cm(cls, a, b):
+ #print cls
+ #return a+b
+ #@logmethod
+ #def im(self, a, b):
+ #print self
+ #return a+b
+
+ #@staticmethod
+ #@logfunction
+ #def sm(a,b): return a+b
+
+
+ #Test5.cm(4,3)
+ #Test5.sm(4,3)
+
+ #t5 = Test5()
+ #t5.im(4,3)
+ #t5.sm(4,3)
+ #t5.cm(4,3)
--- /dev/null
+from manifold.operators import LAST_RECORD
+import threading
+
+#------------------------------------------------------------------
+# Class callback
+#------------------------------------------------------------------
+
+class Callback:
+ def __init__(self, deferred=None, router=None, cache_id=None):
+ #def __init__(self, deferred=None, event=None, router=None, cache_id=None):
+ self.results = []
+ self._deferred = deferred
+
+ #if not self.event:
+ self.event = threading.Event()
+ #else:
+ # self.event = event
+
+ # Used for caching...
+ self.router = router
+ self.cache_id = cache_id
+
+ def __call__(self, value):
+ # End of the list of records sent by Gateway
+ if value == LAST_RECORD:
+ if self.cache_id:
+ # Add query results to cache (expires in 30min)
+ #print "Result added to cached under id", self.cache_id
+ self.router.cache[self.cache_id] = (self.results, time.time() + CACHE_LIFETIME)
+
+ if self._deferred:
+ # Send results back using deferred object
+ self._deferred.callback(self.results)
+ else:
+ # Not using deferred, trigger the event to return results
+ self.event.set()
+ return self.event
+
+ # Not LAST_RECORD add the value to the results
+ self.results.append(value)
+
+ def wait(self):
+ self.event.wait()
+ self.event.clear()
+
+ def get_results(self):
+ self.wait()
+ return self.results
+
--- /dev/null
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Implements a clause
+# - a "tree" (more precisely a predecessor map, typically computed thanks to a DFS)
+# - a set of needed fields (those queried by the user)
+#
+# Copyright (C) UPMC Paris Universitas
+# Authors:
+# Jordan Augé <jordan.auge@lip6.fr>
+# Marc-Olivier Buob <marc-olivier.buob@lip6.fr>
+
+import pyparsing as pp
+import operator, re
+
+from manifold.util.predicate import Predicate
+from types import StringTypes
+
+# XXX When to use Keyword vs. Regex vs. CaselessLiteral
+# XXX capitalization ?
+
+# Instead of CaselessLiteral, try using CaselessKeyword. Keywords are better
+# choice for grammar keywords, since they inherently avoid mistaking the leading
+# 'in' of 'inside' as the keyword 'in' in your grammar.
+
+
+class Clause(object):
+
+ def __new__(cls, *args, **kwargs):
+ if len(args) == 1 and isinstance(args[0], StringTypes):
+ return ClauseStringParser().parse(args[0])
+ return super(Clause, cls).__new__(cls, *args, **kwargs)
+
+ def __init__(self, *args, **kwargs):
+ if len(args) == 2:
+ # unary
+ self.operator = Predicate.operators[args[0]]
+ self.operands = [args[1]]
+ elif len(args) == 3:
+ self.operator = Predicate.operators[args[1]]
+ self.operands = [args[0], args[2]]
+ else:
+ raise Exception, "Clause can only be unary or binary"
+
+ def opstr(self, operator):
+ ops = [string for string, op in Predicate.operators.items() if op == operator]
+ return ops[0] if ops else ''
+
+ def __repr__(self):
+ if len(self.operands) == 1:
+ return "%s(%s)" % (self.operator, self.operands[0])
+ else:
+ return "(%s %s %s)" % (self.operands[0], self.opstr(self.operator), self.operands[1])
+
+class ClauseStringParser(object):
+
+ def __init__(self):
+ """
+ BNF HERE
+ """
+
+ #integer = pp.Word(nums)
+ #floatNumber = pp.Regex(r'\d+(\.\d*)?([eE]\d+)?')
+ point = pp.Literal( "." )
+ e = pp.CaselessLiteral( "E" )
+
+ # Regex string representing the set of possible operators
+ # Example : ">=|<=|!=|>|<|="
+ OPERATOR_RX = '|'.join([re.sub('\|', '\|', o) for o in Predicate.operators.keys()])
+
+ # predicate
+ field = pp.Word(pp.alphanums + '_')
+ operator = pp.Regex(OPERATOR_RX).setName("operator")
+ value = pp.QuotedString('"') #| pp.Combine( pp.Word( "+-"+ pp.nums, pp.nums) + pp.Optional( point + pp.Optional( pp.Word( pp.nums ) ) ) + pp.Optional( e + pp.Word( "+-"+pp.nums, pp.nums ) ) )
+
+ predicate = (field + operator + value).setParseAction(self.handlePredicate)
+
+ # clause of predicates
+ and_op = pp.CaselessLiteral("and") | pp.Keyword("&&")
+ or_op = pp.CaselessLiteral("or") | pp.Keyword("||")
+ not_op = pp.Keyword("!")
+
+ predicate_precedence_list = [
+ (not_op, 1, pp.opAssoc.RIGHT, lambda x: self.handleClause(*x)),
+ (and_op, 2, pp.opAssoc.LEFT, lambda x: self.handleClause(*x)),
+ (or_op, 2, pp.opAssoc.LEFT, lambda x: self.handleClause(*x))
+ ]
+ clause = pp.operatorPrecedence(predicate, predicate_precedence_list)
+
+ self.bnf = clause
+
+ def handlePredicate(self, args):
+ return Predicate(*args)
+
+ def handleClause(self, args):
+ return Clause(*args)
+
+ def parse(self, string):
+ return self.bnf.parseString(string,parseAll=True)
+
+if __name__ == "__main__":
+ print ClauseStringParser().parse('country == "Europe" || ts > "01-01-2007" && country == "France"')
+ print Clause('country == "Europe" || ts > "01-01-2007" && country == "France"')
--- /dev/null
+# ANSI escape codes for terminals.
+# X11 xterm: always works, all platforms
+# cygwin dosbox: run through |cat and then colors work
+# linux: works on console & gnome-terminal
+# mac: untested
+
+BLACK = "\033[0;30m"
+BLUE = "\033[0;34m"
+GREEN = "\033[0;32m"
+CYAN = "\033[0;36m"
+RED = "\033[0;31m"
+PURPLE = "\033[0;35m"
+BROWN = "\033[0;33m"
+GRAY = "\033[0;37m"
+BOLDGRAY = "\033[1;30m"
+BOLDBLUE = "\033[1;34m"
+BOLDGREEN = "\033[1;32m"
+BOLDCYAN = "\033[1;36m"
+BOLDRED = "\033[1;31m"
+BOLDPURPLE = "\033[1;35m"
+BOLDYELLOW = "\033[1;33m"
+WHITE = "\033[1;37m"
+
+MYGREEN = '\033[92m'
+MYBLUE = '\033[94m'
+MYWARNING = '\033[93m'
+MYRED = '\033[91m'
+MYHEADER = '\033[95m'
+MYEND = '\033[0m'
+
+NORMAL = "\033[0m"
+
+if __name__ == '__main__':
+ # Display color names in their color
+ for name, color in locals().items():
+ if name.startswith('__'): continue
+ print color, name, MYEND
+
--- /dev/null
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Daemon: superclass used to implement a daemon easily
+#
+# Copyright (C)2009-2012, UPMC Paris Universitas
+# Authors:
+# Marc-Olivier Buob <marc-olivier.buob@lip6.fr>
+
+# see also: http://www.jejik.com/files/examples/daemon3x.py
+
+# This is used to import the daemon package instead of the local module which is
+# named identically...
+from __future__ import absolute_import
+
+from manifold.util.singleton import Singleton
+from manifold.util.log import Log
+from manifold.util.options import Options
+
+import atexit, os, signal, lockfile, logging, sys
+
+class Daemon(object):
+ __metaclass__ = Singleton
+
+ DEFAULTS = {
+ # Running
+ "uid" : os.getuid(),
+ "gid" : os.getgid(),
+ "working_directory" : "/",
+ "debugmode" : False,
+ "no_daemon" : False,
+ "pid_filename" : "/var/run/%s.pid" % Options().get_name()
+ }
+
+ #-------------------------------------------------------------------------
+ # Checks
+ #-------------------------------------------------------------------------
+
+ def check_python_daemon(self):
+ """
+ \brief Check whether python-daemon is properly installed
+ \return True if everything is file, False otherwise
+ """
+ # http://www.python.org/dev/peps/pep-3143/
+ ret = False
+ try:
+ import daemon
+ getattr(daemon, "DaemonContext")
+ ret = True
+ except AttributeError, e:
+ print e
+ # daemon and python-daemon conflict with each other
+ Log.critical("Please install python-daemon instead of daemon. Remove daemon first.")
+ except ImportError:
+ Log.critical("Please install python-daemon - easy_install python-daemon.")
+ return ret
+
+ #------------------------------------------------------------------------
+ # Initialization
+ #------------------------------------------------------------------------
+
+ def make_handler_rsyslog(self, rsyslog_host, rsyslog_port, log_level):
+ """
+ \brief (Internal usage) Prepare logging via rsyslog
+ \param rsyslog_host The hostname of the rsyslog server
+ \param rsyslog_port The port of the rsyslog server
+ \param log_level Log level
+ """
+ # Prepare the handler
+ shandler = handlers.SysLogHandler(
+ (rsyslog_host, rsyslog_port),
+ facility = handlers.SysLogHandler.LOG_DAEMON
+ )
+
+ # The log file must remain open while daemonizing
+ self.files_to_keep.append(shandler.socket)
+ self.prepare_handler(shandler, log_level)
+ return shandler
+
+ def make_handler_locallog(self, log_filename, log_level):
+ """
+ \brief (Internal usage) Prepare local logging
+ \param log_filename The file in which we write the logs
+ \param log_level Log level
+ """
+ # Create directory in which we store the log file
+ log_dir = os.path.dirname(log_filename)
+ if not os.path.exists(log_dir):
+ try:
+ os.makedirs(log_dir)
+ except OSError, why:
+ log_error("OS error: %s" % why)
+
+ # Prepare the handler
+ shandler = logging.handlers.RotatingFileHandler(
+ log_filename,
+ backupCount = 0
+ )
+
+ # The log file must remain open while daemonizing
+ self.files_to_keep.append(shandler.stream)
+ self.prepare_handler(shandler, log_level)
+ return shandler
+
+ def prepare_handler(self, shandler, log_level):
+ """
+ \brief (Internal usage)
+ \param shandler Handler used to log information
+ \param log_level Log level
+ """
+ shandler.setLevel(log_level)
+ formatter = logging.Formatter("%(asctime)s: %(name)s: %(levelname)s %(message)s")
+ shandler.setFormatter(formatter)
+ self.log.addHandler(shandler)
+ self.log.setLevel(getattr(logging, log_level, logging.INFO))
+
+ def __init__(
+ self,
+ #daemon_name,
+ terminate_callback = None
+ #uid = os.getuid(),
+ #gid = os.getgid(),
+ #working_directory = "/",
+ #pid_filename = None,
+ #no_daemon = False,
+ #debug = False,
+ #log = None, # logging.getLogger("plop")
+ #rsyslog_host = "localhost", # Pass None if no rsyslog server
+ #rsyslog_port = 514,
+ #log_file = None,
+ #log_level = logging.INFO
+ ):
+ """
+ \brief Constructor
+ \param daemon_name The name of the daemon
+ \param uid UID used to run the daemon
+ \param gid GID used to run the daemon
+ \param working_directory Working directory used to run the daemon.
+ Example: /var/lib/foo/
+ \param pid_filename Absolute path of the PID file
+ Example: /var/run/foo.pid
+ (ignored if no_daemon == True)
+ \param no_daemon Do not detach the daemon from the terminal
+ \param debug Run daemon in debug mode
+ \param log The logger, pass None if unused
+ Example: logging.getLogger('foo'))
+ \param rsyslog_host Rsyslog hostname, pass None if unused.
+ If rsyslog_host is set to None, log are stored locally
+ \param rsyslog_port Rsyslog port
+ \param log_file Absolute path of the local log file.
+ Example: /var/log/foo.log)
+ \param log_level Log level
+ Example: logging.INFO
+ """
+
+ # Daemon parameters
+ #self.daemon_name = daemon_name
+ self.terminate_callback = terminate_callback
+ #Options().uid = uid
+ #Options().gid = gid
+ #Options().working_directory = working_directory
+ #self.pid_filename = None if no_daemon else pid_filename
+ #Options().no_daemon = no_daemon
+ #Options().lock_file = None
+ #Options().debug = debug
+ #self.log = log
+ #self.rsyslog_host = rsyslog_host
+ #self.rsyslog_port = rsyslog_port
+ #self.log_file = log_file
+ #self.log_level = log_level
+
+ # Reference which file descriptors must remain opened while
+ # daemonizing (for instance the file descriptor related to
+ # the logger)
+ self.files_to_keep = []
+
+ # Initialize self.log (require self.files_to_keep)
+ #if self.log: # for debugging by using stdout, log may be equal to None
+ # if rsyslog_host:
+ # shandler = self.make_handler_rsyslog(
+ # rsyslog_host,
+ # rsyslog_port,
+ # log_level
+ # )
+ # elif log_file:
+ # shandler = self.make_handler_locallog(
+ # log_file,
+ # log_level
+ # )
+
+ @classmethod
+ def init_options(self):
+ opt = Options()
+
+ opt.add_option(
+ "--uid", dest = "uid",
+ help = "UID used to run the dispatcher.",
+ default = self.DEFAULTS['uid']
+ )
+ opt.add_option(
+ "--gid", dest = "gid",
+ help = "GID used to run the dispatcher.",
+ default = self.DEFAULTS['gid']
+ )
+ opt.add_option(
+ "-w", "--working-directory", dest = "working_directory",
+ help = "Working directory.",
+ default = self.DEFAULTS['working_directory']
+ )
+ opt.add_option(
+ "-D", "--debugmode", action = "store_false", dest = "debugmode",
+ help = "Daemon debug mode (useful for developers).",
+ default = self.DEFAULTS['debugmode']
+ )
+ opt.add_option(
+ "-n", "--no-daemon", action = "store_true", dest = "no_daemon",
+ help = "Run as daemon (detach from terminal).",
+ default = self.DEFAULTS["no_daemon"]
+ )
+ opt.add_option(
+ "-i", "--pid-file", dest = "pid_filename",
+ help = "Absolute path to the pid-file to use when running as daemon.",
+ default = self.DEFAULTS['pid_filename']
+ )
+
+
+
+ #------------------------------------------------------------------------
+ # Daemon stuff
+ #------------------------------------------------------------------------
+
+ def remove_pid_file(self):
+ """
+ \brief Remove the pid file (internal usage)
+ """
+ # The lock file is implicitely released while removing the pid file
+ Log.debug("Removing %s" % Options().pid_filename)
+ if os.path.exists(Options().pid_filename) == True:
+ os.remove(Options().pid_filename)
+
+ def make_pid_file(self):
+ """
+ \brief Create a pid file in which we store the PID of the daemon if needed
+ """
+ if Options().pid_filename and Options().no_daemon == False:
+ atexit.register(self.remove_pid_file)
+ file(Options().pid_filename, "w+").write("%s\n" % str(os.getpid()))
+
+ def get_pid_from_pid_file(self):
+ """
+ \brief Retrieve the PID of the daemon thanks to the pid file.
+ \return None if the pid file is not readable or does not exists
+ """
+ pid = None
+ if Options().pid_filename:
+ try:
+ f_pid = file(Options().pid_filename, "r")
+ pid = int(f_pid.read().strip())
+ f_pid.close()
+ except IOError:
+ pid = None
+ return pid
+
+ def make_lock_file(self):
+ """
+ \brief Prepare the lock file required to manage the pid file
+ Initialize Options().lock_file
+ """
+ if Options().pid_filename and Options().no_daemon == False:
+ Log.debug("Daemonizing using pid file '%s'" % Options().pid_filename)
+ Options().lock_file = lockfile.FileLock(Options().pid_filename)
+ if Options().lock_file.is_locked() == True:
+ log_error("'%s' is already running ('%s' is locked)." % (Options().get_name(), Options().pid_filename))
+ self.terminate()
+ Options().lock_file.acquire()
+ else:
+ Options().lock_file = None
+
+ def start(self):
+ """
+ \brief Start the daemon
+ """
+ # Check whether daemon module is properly installed
+ if self.check_python_daemon() == False:
+ self.terminate()
+ import daemon
+
+ # Prepare Options().lock_file
+ self.make_lock_file()
+
+ # Prepare the daemon context
+ dcontext = daemon.DaemonContext(
+ detach_process = (not Options().no_daemon),
+ working_directory = Options().working_directory,
+ pidfile = Options().lock_file if not Options().no_daemon else None,
+ stdin = sys.stdin,
+ stdout = sys.stdout,
+ stderr = sys.stderr,
+ uid = Options().uid,
+ gid = Options().gid,
+ files_preserve = Log().files_to_keep
+ )
+
+ # Prepare signal handling to stop properly if the daemon is killed
+ # Note that signal.SIGKILL can't be handled:
+ # http://crunchtools.com/unixlinux-signals-101/
+ dcontext.signal_map = {
+ signal.SIGTERM : self.signal_handler,
+ signal.SIGQUIT : self.signal_handler,
+ signal.SIGINT : self.signal_handler
+ }
+
+ if Options().debugmode == True:
+ self.main()
+ else:
+ with dcontext:
+ self.make_pid_file()
+ try:
+ self.main()
+ except Exception, why:
+ Log.error("Unhandled exception in start: %s" % why)
+
+ def signal_handler(self, signal_id, frame):
+ """
+ \brief Stop the daemon (signal handler)
+ The lockfile is implicitly released by the daemon package
+ \param signal_id The integer identifying the signal
+ (see also "man 7 signal")
+ Example: 15 if the received signal is signal.SIGTERM
+ \param frame
+ """
+ self.terminate()
+
+ def stop(self):
+ Log.debug("Stopping '%s'" % self.daemon_name)
+
+ def terminate(self):
+ if self.terminate_callback:
+ self.terminate_callback()
+ else:
+ sys.exit(0)
+
+Daemon.init_options()
--- /dev/null
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Depth first search algorithm
+# Based on http://www.boost.org/doc/libs/1_52_0/libs/graph/doc/depth_first_search.html
+#
+# Copyright (C) UPMC Paris Universitas
+# Authors:
+# Marc-Olivier Buob <marc-olivier.buob@lip6.fr>
+# Jordan Augé <jordan.auge@lip6.fr>
+
+class dfs_color:
+ WHITE = 1 # not yet visited
+ GRAY = 2 # currently visited
+ BLACK = 3 # visited
+
+#DFS(G)
+# for each vertex u in V
+# color[u] := WHITE
+# p[u] = u
+# end for
+# time := 0
+# if there is a starting vertex s
+# call DFS-VISIT(G, s)
+# for each vertex u in V
+# if color[u] = WHITE
+# call DFS-VISIT(G, u)
+# end for
+# return (p,d_time,f_time)
+
+def dfs(graph, root, exclude_uv=None):
+ """
+ \brief Run the DFS algorithm
+ \param graph The graph we explore
+ \param root The starting vertex
+ \return A dictionnary which maps each vertex of the tree
+ to its predecessor, None otherwise.
+ Only the root node as a predecessor equal to None.
+ Nodes not referenced in this dictionnary do not
+ belong to the tree.
+ """
+ # Initialization
+ map_vertex_color = {}
+ map_vertex_pred = {}
+ for u in graph.nodes():
+ map_vertex_color[u] = dfs_color.WHITE
+ map_vertex_pred[u] = None
+
+ # Recursive calls
+ if not exclude_uv:
+ exclude_uv = lambda u,v: False
+ dfs_visit(graph, root, map_vertex_color, map_vertex_pred, exclude_uv)
+
+ # Remove from map_vertex_pred the vertices having no
+ # predecessor but the root node.
+ for v, u in map_vertex_pred.items():
+ if u == None and v != root:
+ del map_vertex_pred[v]
+
+ return map_vertex_pred
+
+#DFS-VISIT(G, u)
+# color[u] := GRAY
+# d_time[u] := time := time + 1
+# for each v in Adj[u]
+# if (color[v] = WHITE)
+# p[v] = u
+# call DFS-VISIT(G, v)
+# else if (color[v] = GRAY)
+# ...
+# else if (color[v] = BLACK)
+# ...
+# end for
+# color[u] := BLACK
+# f_time[u] := time := time + 1
+
+def dfs_visit(graph, u, map_vertex_color, map_vertex_pred, exclude_uv):
+ """
+ \brief Internal usage (DFS implementation)
+ \param graph The graph we explore
+ \param u The current node
+ \param map_vertex_color: maps each vertex to a color
+ - dfs_color.WHITE: iif the vertex is not reachable from the root node
+ - dfs_color.BLACK: otherwise
+ \param map_vertex_pred: maps each vertex to its predecessor (if any) visited
+ during the DFS exploration, None otherwise
+ """
+ map_vertex_color[u] = dfs_color.GRAY
+ for v in graph.successors(u):
+ color_v = map_vertex_color[v]
+ if color_v == dfs_color.WHITE and not exclude_uv(u, v):
+ map_vertex_pred[v] = u
+ dfs_visit(graph, v, map_vertex_color, map_vertex_pred, exclude_uv)
+ map_vertex_color[u] = dfs_color.BLACK
+
--- /dev/null
+class Enum(object):
+ def __init__(self, *keys):
+ self.__dict__.update(zip(keys, range(len(keys))))
+ self.invmap = {v:k for k, v in self.__dict__.items()}
+
+ def get_str(self, value):
+ return self.invmap[value]
--- /dev/null
+import copy
+
+class frozendict(dict):
+ def _blocked_attribute(obj):
+ raise AttributeError, "A frozendict cannot be modified."
+ _blocked_attribute = property(_blocked_attribute)
+
+ __delitem__ = __setitem__ = clear = _blocked_attribute
+ pop = popitem = setdefault = update = _blocked_attribute
+
+ def __new__(cls, *args, **kw):
+ new = dict.__new__(cls)
+
+ args_ = []
+ for arg in args:
+ if isinstance(arg, dict):
+ arg = copy.copy(arg)
+ for k, v in arg.items():
+ if isinstance(v, dict):
+ arg[k] = frozendict(v)
+ elif isinstance(v, list):
+ v_ = list()
+ for elm in v:
+ if isinstance(elm, dict):
+ v_.append( frozendict(elm) )
+ else:
+ v_.append( elm )
+ arg[k] = tuple(v_)
+ args_.append( arg )
+ else:
+ args_.append( arg )
+
+ dict.__init__(new, *args_, **kw)
+ return new
+
+ def __init__(self, *args, **kw):
+ pass
+
+ def __hash__(self):
+ try:
+ return self._cached_hash
+ except AttributeError:
+ h = self._cached_hash = hash(tuple(sorted(self.items())))
+ return h
+
+ def __repr__(self):
+ return "frozendict(%s)" % dict.__repr__(self)
--- /dev/null
+"""Borrowed from Django."""
+
+from threading import Lock
+
+class LazyObject(object):
+ """
+ A wrapper for another class that can be used to delay instantiation of the
+ wrapped class.
+
+ By subclassing, you have the opportunity to intercept and alter the
+ instantiation. If you don't need to do that, use SimpleLazyObject.
+ """
+ def __init__(self):
+ self._wrapped = None
+ self._lock = Lock()
+
+ def __getattr__(self, name):
+ self._lock.acquire()
+ if self._wrapped is None:
+ self._setup()
+ self._lock.release()
+ return getattr(self._wrapped, name)
+
+ def __setattr__(self, name, value):
+ if name in ["_wrapped", "_lock"]:
+ # Assign to __dict__ to avoid infinite __setattr__ loops.
+ self.__dict__[name] = value
+ else:
+ if self._wrapped is None:
+ self._setup()
+ setattr(self._wrapped, name, value)
+
+ def __delattr__(self, name):
+ if name == "_wrapped":
+ raise TypeError("can't delete _wrapped.")
+ if self._wrapped is None:
+ self._setup()
+ delattr(self._wrapped, name)
+
+ def _setup(self):
+ """
+ Must be implemented by subclasses to initialise the wrapped object.
+ """
+ raise NotImplementedError
+
+ # introspection support:
+ __members__ = property(lambda self: self.__dir__())
+
+ def __dir__(self):
+ if self._wrapped is None:
+ self._setup()
+ return dir(self._wrapped)
+
--- /dev/null
+#!/usr/bin/python
+#
+# Copyright 2007 Google Inc.
+# Licensed to PSF under a Contributor Agreement.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied. See the License for the specific language governing
+# permissions and limitations under the License.
+
+"""A fast, lightweight IPv4/IPv6 manipulation library in Python.
+
+This library is used to create/poke/manipulate IPv4 and IPv6 addresses
+and networks.
+
+"""
+
+__version__ = '2.1.10'
+
+import struct
+
+IPV4LENGTH = 32
+IPV6LENGTH = 128
+
+
+class AddressValueError(ValueError):
+ """A Value Error related to the address."""
+
+
+class NetmaskValueError(ValueError):
+ """A Value Error related to the netmask."""
+
+
+def IPAddress(address, version=None):
+ """Take an IP string/int and return an object of the correct type.
+
+ Args:
+ address: A string or integer, the IP address. Either IPv4 or
+ IPv6 addresses may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+ version: An Integer, 4 or 6. If set, don't try to automatically
+ determine what the IP address type is. important for things
+ like IPAddress(1), which could be IPv4, '0.0.0.1', or IPv6,
+ '::1'.
+
+ Returns:
+ An IPv4Address or IPv6Address object.
+
+ Raises:
+ ValueError: if the string passed isn't either a v4 or a v6
+ address.
+
+ """
+ if version:
+ if version == 4:
+ return IPv4Address(address)
+ elif version == 6:
+ return IPv6Address(address)
+
+ try:
+ return IPv4Address(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ try:
+ return IPv6Address(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ raise ValueError('%r does not appear to be an IPv4 or IPv6 address' %
+ address)
+
+
+def IPNetwork(address, version=None, strict=False):
+ """Take an IP string/int and return an object of the correct type.
+
+ Args:
+ address: A string or integer, the IP address. Either IPv4 or
+ IPv6 addresses may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+ version: An Integer, if set, don't try to automatically
+ determine what the IP address type is. important for things
+ like IPNetwork(1), which could be IPv4, '0.0.0.1/32', or IPv6,
+ '::1/128'.
+
+ Returns:
+ An IPv4Network or IPv6Network object.
+
+ Raises:
+ ValueError: if the string passed isn't either a v4 or a v6
+ address. Or if a strict network was requested and a strict
+ network wasn't given.
+
+ """
+ if version:
+ if version == 4:
+ return IPv4Network(address, strict)
+ elif version == 6:
+ return IPv6Network(address, strict)
+
+ try:
+ return IPv4Network(address, strict)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ try:
+ return IPv6Network(address, strict)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ raise ValueError('%r does not appear to be an IPv4 or IPv6 network' %
+ address)
+
+
+def v4_int_to_packed(address):
+ """The binary representation of this address.
+
+ Args:
+ address: An integer representation of an IPv4 IP address.
+
+ Returns:
+ The binary representation of this address.
+
+ Raises:
+ ValueError: If the integer is too large to be an IPv4 IP
+ address.
+ """
+ if address > _BaseV4._ALL_ONES:
+ raise ValueError('Address too large for IPv4')
+ return Bytes(struct.pack('!I', address))
+
+
+def v6_int_to_packed(address):
+ """The binary representation of this address.
+
+ Args:
+ address: An integer representation of an IPv4 IP address.
+
+ Returns:
+ The binary representation of this address.
+ """
+ return Bytes(struct.pack('!QQ', address >> 64, address & (2**64 - 1)))
+
+
+def _find_address_range(addresses):
+ """Find a sequence of addresses.
+
+ Args:
+ addresses: a list of IPv4 or IPv6 addresses.
+
+ Returns:
+ A tuple containing the first and last IP addresses in the sequence.
+
+ """
+ first = last = addresses[0]
+ for ip in addresses[1:]:
+ if ip._ip == last._ip + 1:
+ last = ip
+ else:
+ break
+ return (first, last)
+
+def _get_prefix_length(number1, number2, bits):
+ """Get the number of leading bits that are same for two numbers.
+
+ Args:
+ number1: an integer.
+ number2: another integer.
+ bits: the maximum number of bits to compare.
+
+ Returns:
+ The number of leading bits that are the same for two numbers.
+
+ """
+ for i in range(bits):
+ if number1 >> i == number2 >> i:
+ return bits - i
+ return 0
+
+def _count_righthand_zero_bits(number, bits):
+ """Count the number of zero bits on the right hand side.
+
+ Args:
+ number: an integer.
+ bits: maximum number of bits to count.
+
+ Returns:
+ The number of zero bits on the right hand side of the number.
+
+ """
+ if number == 0:
+ return bits
+ for i in range(bits):
+ if (number >> i) % 2:
+ return i
+
+def summarize_address_range(first, last):
+ """Summarize a network range given the first and last IP addresses.
+
+ Example:
+ >>> summarize_address_range(IPv4Address('1.1.1.0'),
+ IPv4Address('1.1.1.130'))
+ [IPv4Network('1.1.1.0/25'), IPv4Network('1.1.1.128/31'),
+ IPv4Network('1.1.1.130/32')]
+
+ Args:
+ first: the first IPv4Address or IPv6Address in the range.
+ last: the last IPv4Address or IPv6Address in the range.
+
+ Returns:
+ The address range collapsed to a list of IPv4Network's or
+ IPv6Network's.
+
+ Raise:
+ TypeError:
+ If the first and last objects are not IP addresses.
+ If the first and last objects are not the same version.
+ ValueError:
+ If the last object is not greater than the first.
+ If the version is not 4 or 6.
+
+ """
+ if not (isinstance(first, _BaseIP) and isinstance(last, _BaseIP)):
+ raise TypeError('first and last must be IP addresses, not networks')
+ if first.version != last.version:
+ raise TypeError("%s and %s are not of the same version" % (
+ str(first), str(last)))
+ if first > last:
+ raise ValueError('last IP address must be greater than first')
+
+ networks = []
+
+ if first.version == 4:
+ ip = IPv4Network
+ elif first.version == 6:
+ ip = IPv6Network
+ else:
+ raise ValueError('unknown IP version')
+
+ ip_bits = first._max_prefixlen
+ first_int = first._ip
+ last_int = last._ip
+ while first_int <= last_int:
+ nbits = _count_righthand_zero_bits(first_int, ip_bits)
+ current = None
+ while nbits >= 0:
+ addend = 2**nbits - 1
+ current = first_int + addend
+ nbits -= 1
+ if current <= last_int:
+ break
+ prefix = _get_prefix_length(first_int, current, ip_bits)
+ net = ip('%s/%d' % (str(first), prefix))
+ networks.append(net)
+ if current == ip._ALL_ONES:
+ break
+ first_int = current + 1
+ first = IPAddress(first_int, version=first._version)
+ return networks
+
+def _collapse_address_list_recursive(addresses):
+ """Loops through the addresses, collapsing concurrent netblocks.
+
+ Example:
+
+ ip1 = IPv4Network('1.1.0.0/24')
+ ip2 = IPv4Network('1.1.1.0/24')
+ ip3 = IPv4Network('1.1.2.0/24')
+ ip4 = IPv4Network('1.1.3.0/24')
+ ip5 = IPv4Network('1.1.4.0/24')
+ ip6 = IPv4Network('1.1.0.1/22')
+
+ _collapse_address_list_recursive([ip1, ip2, ip3, ip4, ip5, ip6]) ->
+ [IPv4Network('1.1.0.0/22'), IPv4Network('1.1.4.0/24')]
+
+ This shouldn't be called directly; it is called via
+ collapse_address_list([]).
+
+ Args:
+ addresses: A list of IPv4Network's or IPv6Network's
+
+ Returns:
+ A list of IPv4Network's or IPv6Network's depending on what we were
+ passed.
+
+ """
+ ret_array = []
+ optimized = False
+
+ for cur_addr in addresses:
+ if not ret_array:
+ ret_array.append(cur_addr)
+ continue
+ if cur_addr in ret_array[-1]:
+ optimized = True
+ elif cur_addr == ret_array[-1].supernet().subnet()[1]:
+ ret_array.append(ret_array.pop().supernet())
+ optimized = True
+ else:
+ ret_array.append(cur_addr)
+
+ if optimized:
+ return _collapse_address_list_recursive(ret_array)
+
+ return ret_array
+
+
+def collapse_address_list(addresses):
+ """Collapse a list of IP objects.
+
+ Example:
+ collapse_address_list([IPv4('1.1.0.0/24'), IPv4('1.1.1.0/24')]) ->
+ [IPv4('1.1.0.0/23')]
+
+ Args:
+ addresses: A list of IPv4Network or IPv6Network objects.
+
+ Returns:
+ A list of IPv4Network or IPv6Network objects depending on what we
+ were passed.
+
+ Raises:
+ TypeError: If passed a list of mixed version objects.
+
+ """
+ i = 0
+ addrs = []
+ ips = []
+ nets = []
+
+ # split IP addresses and networks
+ for ip in addresses:
+ if isinstance(ip, _BaseIP):
+ if ips and ips[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ str(ip), str(ips[-1])))
+ ips.append(ip)
+ elif ip._prefixlen == ip._max_prefixlen:
+ if ips and ips[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ str(ip), str(ips[-1])))
+ ips.append(ip.ip)
+ else:
+ if nets and nets[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ str(ip), str(ips[-1])))
+ nets.append(ip)
+
+ # sort and dedup
+ ips = sorted(set(ips))
+ nets = sorted(set(nets))
+
+ while i < len(ips):
+ (first, last) = _find_address_range(ips[i:])
+ i = ips.index(last) + 1
+ addrs.extend(summarize_address_range(first, last))
+
+ return _collapse_address_list_recursive(sorted(
+ addrs + nets, key=_BaseNet._get_networks_key))
+
+# backwards compatibility
+CollapseAddrList = collapse_address_list
+
+# We need to distinguish between the string and packed-bytes representations
+# of an IP address. For example, b'0::1' is the IPv4 address 48.58.58.49,
+# while '0::1' is an IPv6 address.
+#
+# In Python 3, the native 'bytes' type already provides this functionality,
+# so we use it directly. For earlier implementations where bytes is not a
+# distinct type, we create a subclass of str to serve as a tag.
+#
+# Usage example (Python 2):
+# ip = ipaddr.IPAddress(ipaddr.Bytes('xxxx'))
+#
+# Usage example (Python 3):
+# ip = ipaddr.IPAddress(b'xxxx')
+try:
+ if bytes is str:
+ raise TypeError("bytes is not a distinct type")
+ Bytes = bytes
+except (NameError, TypeError):
+ class Bytes(str):
+ def __repr__(self):
+ return 'Bytes(%s)' % str.__repr__(self)
+
+def get_mixed_type_key(obj):
+ """Return a key suitable for sorting between networks and addresses.
+
+ Address and Network objects are not sortable by default; they're
+ fundamentally different so the expression
+
+ IPv4Address('1.1.1.1') <= IPv4Network('1.1.1.1/24')
+
+ doesn't make any sense. There are some times however, where you may wish
+ to have ipaddr sort these for you anyway. If you need to do this, you
+ can use this function as the key= argument to sorted().
+
+ Args:
+ obj: either a Network or Address object.
+ Returns:
+ appropriate key.
+
+ """
+ if isinstance(obj, _BaseNet):
+ return obj._get_networks_key()
+ elif isinstance(obj, _BaseIP):
+ return obj._get_address_key()
+ return NotImplemented
+
+class _IPAddrBase(object):
+
+ """The mother class."""
+
+ def __index__(self):
+ return self._ip
+
+ def __int__(self):
+ return self._ip
+
+ def __hex__(self):
+ return hex(self._ip)
+
+ @property
+ def exploded(self):
+ """Return the longhand version of the IP address as a string."""
+ return self._explode_shorthand_ip_string()
+
+ @property
+ def compressed(self):
+ """Return the shorthand version of the IP address as a string."""
+ return str(self)
+
+
+class _BaseIP(_IPAddrBase):
+
+ """A generic IP object.
+
+ This IP class contains the version independent methods which are
+ used by single IP addresses.
+
+ """
+
+ def __eq__(self, other):
+ try:
+ return (self._ip == other._ip
+ and self._version == other._version)
+ except AttributeError:
+ return NotImplemented
+
+ def __ne__(self, other):
+ eq = self.__eq__(other)
+ if eq is NotImplemented:
+ return NotImplemented
+ return not eq
+
+ def __le__(self, other):
+ gt = self.__gt__(other)
+ if gt is NotImplemented:
+ return NotImplemented
+ return not gt
+
+ def __ge__(self, other):
+ lt = self.__lt__(other)
+ if lt is NotImplemented:
+ return NotImplemented
+ return not lt
+
+ def __lt__(self, other):
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same version' % (
+ str(self), str(other)))
+ if not isinstance(other, _BaseIP):
+ raise TypeError('%s and %s are not of the same type' % (
+ str(self), str(other)))
+ if self._ip != other._ip:
+ return self._ip < other._ip
+ return False
+
+ def __gt__(self, other):
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same version' % (
+ str(self), str(other)))
+ if not isinstance(other, _BaseIP):
+ raise TypeError('%s and %s are not of the same type' % (
+ str(self), str(other)))
+ if self._ip != other._ip:
+ return self._ip > other._ip
+ return False
+
+ # Shorthand for Integer addition and subtraction. This is not
+ # meant to ever support addition/subtraction of addresses.
+ def __add__(self, other):
+ if not isinstance(other, int):
+ return NotImplemented
+ return IPAddress(int(self) + other, version=self._version)
+
+ def __sub__(self, other):
+ if not isinstance(other, int):
+ return NotImplemented
+ return IPAddress(int(self) - other, version=self._version)
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__class__.__name__, str(self))
+
+ def __str__(self):
+ return '%s' % self._string_from_ip_int(self._ip)
+
+ def __hash__(self):
+ return hash(hex(long(self._ip)))
+
+ def _get_address_key(self):
+ return (self._version, self)
+
+ @property
+ def version(self):
+ raise NotImplementedError('BaseIP has no version')
+
+
+class _BaseNet(_IPAddrBase):
+
+ """A generic IP object.
+
+ This IP class contains the version independent methods which are
+ used by networks.
+
+ """
+
+ def __init__(self, address):
+ self._cache = {}
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__class__.__name__, str(self))
+
+ def iterhosts(self):
+ """Generate Iterator over usable hosts in a network.
+
+ This is like __iter__ except it doesn't return the network
+ or broadcast addresses.
+
+ """
+ cur = int(self.network) + 1
+ bcast = int(self.broadcast) - 1
+ while cur <= bcast:
+ cur += 1
+ yield IPAddress(cur - 1, version=self._version)
+
+ def __iter__(self):
+ cur = int(self.network)
+ bcast = int(self.broadcast)
+ while cur <= bcast:
+ cur += 1
+ yield IPAddress(cur - 1, version=self._version)
+
+ def __getitem__(self, n):
+ network = int(self.network)
+ broadcast = int(self.broadcast)
+ if n >= 0:
+ if network + n > broadcast:
+ raise IndexError
+ return IPAddress(network + n, version=self._version)
+ else:
+ n += 1
+ if broadcast + n < network:
+ raise IndexError
+ return IPAddress(broadcast + n, version=self._version)
+
+ def __lt__(self, other):
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same version' % (
+ str(self), str(other)))
+ if not isinstance(other, _BaseNet):
+ raise TypeError('%s and %s are not of the same type' % (
+ str(self), str(other)))
+ if self.network != other.network:
+ return self.network < other.network
+ if self.netmask != other.netmask:
+ return self.netmask < other.netmask
+ return False
+
+ def __gt__(self, other):
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same version' % (
+ str(self), str(other)))
+ if not isinstance(other, _BaseNet):
+ raise TypeError('%s and %s are not of the same type' % (
+ str(self), str(other)))
+ if self.network != other.network:
+ return self.network > other.network
+ if self.netmask != other.netmask:
+ return self.netmask > other.netmask
+ return False
+
+ def __le__(self, other):
+ gt = self.__gt__(other)
+ if gt is NotImplemented:
+ return NotImplemented
+ return not gt
+
+ def __ge__(self, other):
+ lt = self.__lt__(other)
+ if lt is NotImplemented:
+ return NotImplemented
+ return not lt
+
+ def __eq__(self, other):
+ try:
+ return (self._version == other._version
+ and self.network == other.network
+ and int(self.netmask) == int(other.netmask))
+ except AttributeError:
+ if isinstance(other, _BaseIP):
+ return (self._version == other._version
+ and self._ip == other._ip)
+
+ def __ne__(self, other):
+ eq = self.__eq__(other)
+ if eq is NotImplemented:
+ return NotImplemented
+ return not eq
+
+ def __str__(self):
+ return '%s/%s' % (str(self.ip),
+ str(self._prefixlen))
+
+ def __hash__(self):
+ return hash(int(self.network) ^ int(self.netmask))
+
+ def __contains__(self, other):
+ # always false if one is v4 and the other is v6.
+ if self._version != other._version:
+ return False
+ # dealing with another network.
+ if isinstance(other, _BaseNet):
+ return (self.network <= other.network and
+ self.broadcast >= other.broadcast)
+ # dealing with another address
+ else:
+ return (int(self.network) <= int(other._ip) <=
+ int(self.broadcast))
+
+ def overlaps(self, other):
+ """Tell if self is partly contained in other."""
+ return self.network in other or self.broadcast in other or (
+ other.network in self or other.broadcast in self)
+
+ @property
+ def network(self):
+ x = self._cache.get('network')
+ if x is None:
+ x = IPAddress(self._ip & int(self.netmask), version=self._version)
+ self._cache['network'] = x
+ return x
+
+ @property
+ def broadcast(self):
+ x = self._cache.get('broadcast')
+ if x is None:
+ x = IPAddress(self._ip | int(self.hostmask), version=self._version)
+ self._cache['broadcast'] = x
+ return x
+
+ @property
+ def hostmask(self):
+ x = self._cache.get('hostmask')
+ if x is None:
+ x = IPAddress(int(self.netmask) ^ self._ALL_ONES,
+ version=self._version)
+ self._cache['hostmask'] = x
+ return x
+
+ @property
+ def with_prefixlen(self):
+ return '%s/%d' % (str(self.ip), self._prefixlen)
+
+ @property
+ def with_netmask(self):
+ return '%s/%s' % (str(self.ip), str(self.netmask))
+
+ @property
+ def with_hostmask(self):
+ return '%s/%s' % (str(self.ip), str(self.hostmask))
+
+ @property
+ def numhosts(self):
+ """Number of hosts in the current subnet."""
+ return int(self.broadcast) - int(self.network) + 1
+
+ @property
+ def version(self):
+ raise NotImplementedError('BaseNet has no version')
+
+ @property
+ def prefixlen(self):
+ return self._prefixlen
+
+ def address_exclude(self, other):
+ """Remove an address from a larger block.
+
+ For example:
+
+ addr1 = IPNetwork('10.1.1.0/24')
+ addr2 = IPNetwork('10.1.1.0/26')
+ addr1.address_exclude(addr2) =
+ [IPNetwork('10.1.1.64/26'), IPNetwork('10.1.1.128/25')]
+
+ or IPv6:
+
+ addr1 = IPNetwork('::1/32')
+ addr2 = IPNetwork('::1/128')
+ addr1.address_exclude(addr2) = [IPNetwork('::0/128'),
+ IPNetwork('::2/127'),
+ IPNetwork('::4/126'),
+ IPNetwork('::8/125'),
+ ...
+ IPNetwork('0:0:8000::/33')]
+
+ Args:
+ other: An IPvXNetwork object of the same type.
+
+ Returns:
+ A sorted list of IPvXNetwork objects addresses which is self
+ minus other.
+
+ Raises:
+ TypeError: If self and other are of difffering address
+ versions, or if other is not a network object.
+ ValueError: If other is not completely contained by self.
+
+ """
+ if not self._version == other._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ str(self), str(other)))
+
+ if not isinstance(other, _BaseNet):
+ raise TypeError("%s is not a network object" % str(other))
+
+ if other not in self:
+ raise ValueError('%s not contained in %s' % (str(other),
+ str(self)))
+ if other == self:
+ return []
+
+ ret_addrs = []
+
+ # Make sure we're comparing the network of other.
+ other = IPNetwork('%s/%s' % (str(other.network), str(other.prefixlen)),
+ version=other._version)
+
+ s1, s2 = self.subnet()
+ while s1 != other and s2 != other:
+ if other in s1:
+ ret_addrs.append(s2)
+ s1, s2 = s1.subnet()
+ elif other in s2:
+ ret_addrs.append(s1)
+ s1, s2 = s2.subnet()
+ else:
+ # If we got here, there's a bug somewhere.
+ assert True == False, ('Error performing exclusion: '
+ 's1: %s s2: %s other: %s' %
+ (str(s1), str(s2), str(other)))
+ if s1 == other:
+ ret_addrs.append(s2)
+ elif s2 == other:
+ ret_addrs.append(s1)
+ else:
+ # If we got here, there's a bug somewhere.
+ assert True == False, ('Error performing exclusion: '
+ 's1: %s s2: %s other: %s' %
+ (str(s1), str(s2), str(other)))
+
+ return sorted(ret_addrs, key=_BaseNet._get_networks_key)
+
+ def compare_networks(self, other):
+ """Compare two IP objects.
+
+ This is only concerned about the comparison of the integer
+ representation of the network addresses. This means that the
+ host bits aren't considered at all in this method. If you want
+ to compare host bits, you can easily enough do a
+ 'HostA._ip < HostB._ip'
+
+ Args:
+ other: An IP object.
+
+ Returns:
+ If the IP versions of self and other are the same, returns:
+
+ -1 if self < other:
+ eg: IPv4('1.1.1.0/24') < IPv4('1.1.2.0/24')
+ IPv6('1080::200C:417A') < IPv6('1080::200B:417B')
+ 0 if self == other
+ eg: IPv4('1.1.1.1/24') == IPv4('1.1.1.2/24')
+ IPv6('1080::200C:417A/96') == IPv6('1080::200C:417B/96')
+ 1 if self > other
+ eg: IPv4('1.1.1.0/24') > IPv4('1.1.0.0/24')
+ IPv6('1080::1:200C:417A/112') >
+ IPv6('1080::0:200C:417A/112')
+
+ If the IP versions of self and other are different, returns:
+
+ -1 if self._version < other._version
+ eg: IPv4('10.0.0.1/24') < IPv6('::1/128')
+ 1 if self._version > other._version
+ eg: IPv6('::1/128') > IPv4('255.255.255.0/24')
+
+ """
+ if self._version < other._version:
+ return -1
+ if self._version > other._version:
+ return 1
+ # self._version == other._version below here:
+ if self.network < other.network:
+ return -1
+ if self.network > other.network:
+ return 1
+ # self.network == other.network below here:
+ if self.netmask < other.netmask:
+ return -1
+ if self.netmask > other.netmask:
+ return 1
+ # self.network == other.network and self.netmask == other.netmask
+ return 0
+
+ def _get_networks_key(self):
+ """Network-only key function.
+
+ Returns an object that identifies this address' network and
+ netmask. This function is a suitable "key" argument for sorted()
+ and list.sort().
+
+ """
+ return (self._version, self.network, self.netmask)
+
+ def _ip_int_from_prefix(self, prefixlen=None):
+ """Turn the prefix length netmask into a int for comparison.
+
+ Args:
+ prefixlen: An integer, the prefix length.
+
+ Returns:
+ An integer.
+
+ """
+ if not prefixlen and prefixlen != 0:
+ prefixlen = self._prefixlen
+ return self._ALL_ONES ^ (self._ALL_ONES >> prefixlen)
+
+ def _prefix_from_ip_int(self, ip_int, mask=32):
+ """Return prefix length from the decimal netmask.
+
+ Args:
+ ip_int: An integer, the IP address.
+ mask: The netmask. Defaults to 32.
+
+ Returns:
+ An integer, the prefix length.
+
+ """
+ while mask:
+ if ip_int & 1 == 1:
+ break
+ ip_int >>= 1
+ mask -= 1
+
+ return mask
+
+ def _ip_string_from_prefix(self, prefixlen=None):
+ """Turn a prefix length into a dotted decimal string.
+
+ Args:
+ prefixlen: An integer, the netmask prefix length.
+
+ Returns:
+ A string, the dotted decimal netmask string.
+
+ """
+ if not prefixlen:
+ prefixlen = self._prefixlen
+ return self._string_from_ip_int(self._ip_int_from_prefix(prefixlen))
+
+ def iter_subnets(self, prefixlen_diff=1, new_prefix=None):
+ """The subnets which join to make the current subnet.
+
+ In the case that self contains only one IP
+ (self._prefixlen == 32 for IPv4 or self._prefixlen == 128
+ for IPv6), return a list with just ourself.
+
+ Args:
+ prefixlen_diff: An integer, the amount the prefix length
+ should be increased by. This should not be set if
+ new_prefix is also set.
+ new_prefix: The desired new prefix length. This must be a
+ larger number (smaller prefix) than the existing prefix.
+ This should not be set if prefixlen_diff is also set.
+
+ Returns:
+ An iterator of IPv(4|6) objects.
+
+ Raises:
+ ValueError: The prefixlen_diff is too small or too large.
+ OR
+ prefixlen_diff and new_prefix are both set or new_prefix
+ is a smaller number than the current prefix (smaller
+ number means a larger network)
+
+ """
+ if self._prefixlen == self._max_prefixlen:
+ yield self
+ return
+
+ if new_prefix is not None:
+ if new_prefix < self._prefixlen:
+ raise ValueError('new prefix must be longer')
+ if prefixlen_diff != 1:
+ raise ValueError('cannot set prefixlen_diff and new_prefix')
+ prefixlen_diff = new_prefix - self._prefixlen
+
+ if prefixlen_diff < 0:
+ raise ValueError('prefix length diff must be > 0')
+ new_prefixlen = self._prefixlen + prefixlen_diff
+
+ if not self._is_valid_netmask(str(new_prefixlen)):
+ raise ValueError(
+ 'prefix length diff %d is invalid for netblock %s' % (
+ new_prefixlen, str(self)))
+
+ first = IPNetwork('%s/%s' % (str(self.network),
+ str(self._prefixlen + prefixlen_diff)),
+ version=self._version)
+
+ yield first
+ current = first
+ while True:
+ broadcast = current.broadcast
+ if broadcast == self.broadcast:
+ return
+ new_addr = IPAddress(int(broadcast) + 1, version=self._version)
+ current = IPNetwork('%s/%s' % (str(new_addr), str(new_prefixlen)),
+ version=self._version)
+
+ yield current
+
+ def masked(self):
+ """Return the network object with the host bits masked out."""
+ return IPNetwork('%s/%d' % (self.network, self._prefixlen),
+ version=self._version)
+
+ def subnet(self, prefixlen_diff=1, new_prefix=None):
+ """Return a list of subnets, rather than an iterator."""
+ return list(self.iter_subnets(prefixlen_diff, new_prefix))
+
+ def supernet(self, prefixlen_diff=1, new_prefix=None):
+ """The supernet containing the current network.
+
+ Args:
+ prefixlen_diff: An integer, the amount the prefix length of
+ the network should be decreased by. For example, given a
+ /24 network and a prefixlen_diff of 3, a supernet with a
+ /21 netmask is returned.
+
+ Returns:
+ An IPv4 network object.
+
+ Raises:
+ ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have a
+ negative prefix length.
+ OR
+ If prefixlen_diff and new_prefix are both set or new_prefix is a
+ larger number than the current prefix (larger number means a
+ smaller network)
+
+ """
+ if self._prefixlen == 0:
+ return self
+
+ if new_prefix is not None:
+ if new_prefix > self._prefixlen:
+ raise ValueError('new prefix must be shorter')
+ if prefixlen_diff != 1:
+ raise ValueError('cannot set prefixlen_diff and new_prefix')
+ prefixlen_diff = self._prefixlen - new_prefix
+
+
+ if self.prefixlen - prefixlen_diff < 0:
+ raise ValueError(
+ 'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
+ (self.prefixlen, prefixlen_diff))
+ return IPNetwork('%s/%s' % (str(self.network),
+ str(self.prefixlen - prefixlen_diff)),
+ version=self._version)
+
+ # backwards compatibility
+ Subnet = subnet
+ Supernet = supernet
+ AddressExclude = address_exclude
+ CompareNetworks = compare_networks
+ Contains = __contains__
+
+
+class _BaseV4(object):
+
+ """Base IPv4 object.
+
+ The following methods are used by IPv4 objects in both single IP
+ addresses and networks.
+
+ """
+
+ # Equivalent to 255.255.255.255 or 32 bits of 1's.
+ _ALL_ONES = (2**IPV4LENGTH) - 1
+ _DECIMAL_DIGITS = frozenset('0123456789')
+
+ def __init__(self, address):
+ self._version = 4
+ self._max_prefixlen = IPV4LENGTH
+
+ def _explode_shorthand_ip_string(self):
+ return str(self)
+
+ def _ip_int_from_string(self, ip_str):
+ """Turn the given IP string into an integer for comparison.
+
+ Args:
+ ip_str: A string, the IP ip_str.
+
+ Returns:
+ The IP ip_str as an integer.
+
+ Raises:
+ AddressValueError: if ip_str isn't a valid IPv4 Address.
+
+ """
+ octets = ip_str.split('.')
+ if len(octets) != 4:
+ raise AddressValueError(ip_str)
+
+ packed_ip = 0
+ for oc in octets:
+ try:
+ packed_ip = (packed_ip << 8) | self._parse_octet(oc)
+ except ValueError:
+ raise AddressValueError(ip_str)
+ return packed_ip
+
+ def _parse_octet(self, octet_str):
+ """Convert a decimal octet into an integer.
+
+ Args:
+ octet_str: A string, the number to parse.
+
+ Returns:
+ The octet as an integer.
+
+ Raises:
+ ValueError: if the octet isn't strictly a decimal from [0..255].
+
+ """
+ # Whitelist the characters, since int() allows a lot of bizarre stuff.
+ if not self._DECIMAL_DIGITS.issuperset(octet_str):
+ raise ValueError
+ octet_int = int(octet_str, 10)
+ # Disallow leading zeroes, because no clear standard exists on
+ # whether these should be interpreted as decimal or octal.
+ if octet_int > 255 or (octet_str[0] == '0' and len(octet_str) > 1):
+ raise ValueError
+ return octet_int
+
+ def _string_from_ip_int(self, ip_int):
+ """Turns a 32-bit integer into dotted decimal notation.
+
+ Args:
+ ip_int: An integer, the IP address.
+
+ Returns:
+ The IP address as a string in dotted decimal notation.
+
+ """
+ octets = []
+ for _ in xrange(4):
+ octets.insert(0, str(ip_int & 0xFF))
+ ip_int >>= 8
+ return '.'.join(octets)
+
+ @property
+ def max_prefixlen(self):
+ return self._max_prefixlen
+
+ @property
+ def packed(self):
+ """The binary representation of this address."""
+ return v4_int_to_packed(self._ip)
+
+ @property
+ def version(self):
+ return self._version
+
+ @property
+ def is_reserved(self):
+ """Test if the address is otherwise IETF reserved.
+
+ Returns:
+ A boolean, True if the address is within the
+ reserved IPv4 Network range.
+
+ """
+ return self in IPv4Network('240.0.0.0/4')
+
+ @property
+ def is_private(self):
+ """Test if this address is allocated for private networks.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 1918.
+
+ """
+ return (self in IPv4Network('10.0.0.0/8') or
+ self in IPv4Network('172.16.0.0/12') or
+ self in IPv4Network('192.168.0.0/16'))
+
+ @property
+ def is_multicast(self):
+ """Test if the address is reserved for multicast use.
+
+ Returns:
+ A boolean, True if the address is multicast.
+ See RFC 3171 for details.
+
+ """
+ return self in IPv4Network('224.0.0.0/4')
+
+ @property
+ def is_unspecified(self):
+ """Test if the address is unspecified.
+
+ Returns:
+ A boolean, True if this is the unspecified address as defined in
+ RFC 5735 3.
+
+ """
+ return self in IPv4Network('0.0.0.0')
+
+ @property
+ def is_loopback(self):
+ """Test if the address is a loopback address.
+
+ Returns:
+ A boolean, True if the address is a loopback per RFC 3330.
+
+ """
+ return self in IPv4Network('127.0.0.0/8')
+
+ @property
+ def is_link_local(self):
+ """Test if the address is reserved for link-local.
+
+ Returns:
+ A boolean, True if the address is link-local per RFC 3927.
+
+ """
+ return self in IPv4Network('169.254.0.0/16')
+
+
+class IPv4Address(_BaseV4, _BaseIP):
+
+ """Represent and manipulate single IPv4 Addresses."""
+
+ def __init__(self, address):
+
+ """
+ Args:
+ address: A string or integer representing the IP
+ '192.168.1.1'
+
+ Additionally, an integer can be passed, so
+ IPv4Address('192.168.1.1') == IPv4Address(3232235777).
+ or, more generally
+ IPv4Address(int(IPv4Address('192.168.1.1'))) ==
+ IPv4Address('192.168.1.1')
+
+ Raises:
+ AddressValueError: If ipaddr isn't a valid IPv4 address.
+
+ """
+ _BaseV4.__init__(self, address)
+
+ # Efficient constructor from integer.
+ if isinstance(address, (int, long)):
+ self._ip = address
+ if address < 0 or address > self._ALL_ONES:
+ raise AddressValueError(address)
+ return
+
+ # Constructing from a packed address
+ if isinstance(address, Bytes):
+ try:
+ self._ip, = struct.unpack('!I', address)
+ except struct.error:
+ raise AddressValueError(address) # Wrong length.
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP string.
+ addr_str = str(address)
+ self._ip = self._ip_int_from_string(addr_str)
+
+
+class IPv4Network(_BaseV4, _BaseNet):
+
+ """This class represents and manipulates 32-bit IPv4 networks.
+
+ Attributes: [examples for IPv4Network('1.2.3.4/27')]
+ ._ip: 16909060
+ .ip: IPv4Address('1.2.3.4')
+ .network: IPv4Address('1.2.3.0')
+ .hostmask: IPv4Address('0.0.0.31')
+ .broadcast: IPv4Address('1.2.3.31')
+ .netmask: IPv4Address('255.255.255.224')
+ .prefixlen: 27
+
+ """
+
+ # the valid octets for host and netmasks. only useful for IPv4.
+ _valid_mask_octets = set((255, 254, 252, 248, 240, 224, 192, 128, 0))
+
+ def __init__(self, address, strict=False):
+ """Instantiate a new IPv4 network object.
+
+ Args:
+ address: A string or integer representing the IP [& network].
+ '192.168.1.1/24'
+ '192.168.1.1/255.255.255.0'
+ '192.168.1.1/0.0.0.255'
+ are all functionally the same in IPv4. Similarly,
+ '192.168.1.1'
+ '192.168.1.1/255.255.255.255'
+ '192.168.1.1/32'
+ are also functionaly equivalent. That is to say, failing to
+ provide a subnetmask will create an object with a mask of /32.
+
+ If the mask (portion after the / in the argument) is given in
+ dotted quad form, it is treated as a netmask if it starts with a
+ non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
+ starts with a zero field (e.g. 0.255.255.255 == /8), with the
+ single exception of an all-zero mask which is treated as a
+ netmask == /0. If no mask is given, a default of /32 is used.
+
+ Additionally, an integer can be passed, so
+ IPv4Network('192.168.1.1') == IPv4Network(3232235777).
+ or, more generally
+ IPv4Network(int(IPv4Network('192.168.1.1'))) ==
+ IPv4Network('192.168.1.1')
+
+ strict: A boolean. If true, ensure that we have been passed
+ A true network address, eg, 192.168.1.0/24 and not an
+ IP address on a network, eg, 192.168.1.1/24.
+
+ Raises:
+ AddressValueError: If ipaddr isn't a valid IPv4 address.
+ NetmaskValueError: If the netmask isn't valid for
+ an IPv4 address.
+ ValueError: If strict was True and a network address was not
+ supplied.
+
+ """
+ _BaseNet.__init__(self, address)
+ _BaseV4.__init__(self, address)
+
+ # Constructing from an integer or packed bytes.
+ if isinstance(address, (int, long, Bytes)):
+ self.ip = IPv4Address(address)
+ self._ip = self.ip._ip
+ self._prefixlen = self._max_prefixlen
+ self.netmask = IPv4Address(self._ALL_ONES)
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP prefix string.
+ addr = str(address).split('/')
+
+ if len(addr) > 2:
+ raise AddressValueError(address)
+
+ self._ip = self._ip_int_from_string(addr[0])
+ self.ip = IPv4Address(self._ip)
+
+ if len(addr) == 2:
+ mask = addr[1].split('.')
+ if len(mask) == 4:
+ # We have dotted decimal netmask.
+ if self._is_valid_netmask(addr[1]):
+ self.netmask = IPv4Address(self._ip_int_from_string(
+ addr[1]))
+ elif self._is_hostmask(addr[1]):
+ self.netmask = IPv4Address(
+ self._ip_int_from_string(addr[1]) ^ self._ALL_ONES)
+ else:
+ raise NetmaskValueError('%s is not a valid netmask'
+ % addr[1])
+
+ self._prefixlen = self._prefix_from_ip_int(int(self.netmask))
+ else:
+ # We have a netmask in prefix length form.
+ if not self._is_valid_netmask(addr[1]):
+ raise NetmaskValueError(addr[1])
+ self._prefixlen = int(addr[1])
+ self.netmask = IPv4Address(self._ip_int_from_prefix(
+ self._prefixlen))
+ else:
+ self._prefixlen = self._max_prefixlen
+ self.netmask = IPv4Address(self._ip_int_from_prefix(
+ self._prefixlen))
+ if strict:
+ if self.ip != self.network:
+ raise ValueError('%s has host bits set' %
+ self.ip)
+ if self._prefixlen == (self._max_prefixlen - 1):
+ self.iterhosts = self.__iter__
+
+ def _is_hostmask(self, ip_str):
+ """Test if the IP string is a hostmask (rather than a netmask).
+
+ Args:
+ ip_str: A string, the potential hostmask.
+
+ Returns:
+ A boolean, True if the IP string is a hostmask.
+
+ """
+ bits = ip_str.split('.')
+ try:
+ parts = [int(x) for x in bits if int(x) in self._valid_mask_octets]
+ except ValueError:
+ return False
+ if len(parts) != len(bits):
+ return False
+ if parts[0] < parts[-1]:
+ return True
+ return False
+
+ def _is_valid_netmask(self, netmask):
+ """Verify that the netmask is valid.
+
+ Args:
+ netmask: A string, either a prefix or dotted decimal
+ netmask.
+
+ Returns:
+ A boolean, True if the prefix represents a valid IPv4
+ netmask.
+
+ """
+ mask = netmask.split('.')
+ if len(mask) == 4:
+ if [x for x in mask if int(x) not in self._valid_mask_octets]:
+ return False
+ if [y for idx, y in enumerate(mask) if idx > 0 and
+ y > mask[idx - 1]]:
+ return False
+ return True
+ try:
+ netmask = int(netmask)
+ except ValueError:
+ return False
+ return 0 <= netmask <= self._max_prefixlen
+
+ # backwards compatibility
+ IsRFC1918 = lambda self: self.is_private
+ IsMulticast = lambda self: self.is_multicast
+ IsLoopback = lambda self: self.is_loopback
+ IsLinkLocal = lambda self: self.is_link_local
+
+
+class _BaseV6(object):
+
+ """Base IPv6 object.
+
+ The following methods are used by IPv6 objects in both single IP
+ addresses and networks.
+
+ """
+
+ _ALL_ONES = (2**IPV6LENGTH) - 1
+ _HEXTET_COUNT = 8
+ _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef')
+
+ def __init__(self, address):
+ self._version = 6
+ self._max_prefixlen = IPV6LENGTH
+
+ def _ip_int_from_string(self, ip_str):
+ """Turn an IPv6 ip_str into an integer.
+
+ Args:
+ ip_str: A string, the IPv6 ip_str.
+
+ Returns:
+ A long, the IPv6 ip_str.
+
+ Raises:
+ AddressValueError: if ip_str isn't a valid IPv6 Address.
+
+ """
+ parts = ip_str.split(':')
+
+ # An IPv6 address needs at least 2 colons (3 parts).
+ if len(parts) < 3:
+ raise AddressValueError(ip_str)
+
+ # If the address has an IPv4-style suffix, convert it to hexadecimal.
+ if '.' in parts[-1]:
+ ipv4_int = IPv4Address(parts.pop())._ip
+ parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF))
+ parts.append('%x' % (ipv4_int & 0xFFFF))
+
+ # An IPv6 address can't have more than 8 colons (9 parts).
+ if len(parts) > self._HEXTET_COUNT + 1:
+ raise AddressValueError(ip_str)
+
+ # Disregarding the endpoints, find '::' with nothing in between.
+ # This indicates that a run of zeroes has been skipped.
+ try:
+ skip_index, = (
+ [i for i in xrange(1, len(parts) - 1) if not parts[i]] or
+ [None])
+ except ValueError:
+ # Can't have more than one '::'
+ raise AddressValueError(ip_str)
+
+ # parts_hi is the number of parts to copy from above/before the '::'
+ # parts_lo is the number of parts to copy from below/after the '::'
+ if skip_index is not None:
+ # If we found a '::', then check if it also covers the endpoints.
+ parts_hi = skip_index
+ parts_lo = len(parts) - skip_index - 1
+ if not parts[0]:
+ parts_hi -= 1
+ if parts_hi:
+ raise AddressValueError(ip_str) # ^: requires ^::
+ if not parts[-1]:
+ parts_lo -= 1
+ if parts_lo:
+ raise AddressValueError(ip_str) # :$ requires ::$
+ parts_skipped = self._HEXTET_COUNT - (parts_hi + parts_lo)
+ if parts_skipped < 1:
+ raise AddressValueError(ip_str)
+ else:
+ # Otherwise, allocate the entire address to parts_hi. The endpoints
+ # could still be empty, but _parse_hextet() will check for that.
+ if len(parts) != self._HEXTET_COUNT:
+ raise AddressValueError(ip_str)
+ parts_hi = len(parts)
+ parts_lo = 0
+ parts_skipped = 0
+
+ try:
+ # Now, parse the hextets into a 128-bit integer.
+ ip_int = 0L
+ for i in xrange(parts_hi):
+ ip_int <<= 16
+ ip_int |= self._parse_hextet(parts[i])
+ ip_int <<= 16 * parts_skipped
+ for i in xrange(-parts_lo, 0):
+ ip_int <<= 16
+ ip_int |= self._parse_hextet(parts[i])
+ return ip_int
+ except ValueError:
+ raise AddressValueError(ip_str)
+
+ def _parse_hextet(self, hextet_str):
+ """Convert an IPv6 hextet string into an integer.
+
+ Args:
+ hextet_str: A string, the number to parse.
+
+ Returns:
+ The hextet as an integer.
+
+ Raises:
+ ValueError: if the input isn't strictly a hex number from [0..FFFF].
+
+ """
+ # Whitelist the characters, since int() allows a lot of bizarre stuff.
+ if not self._HEX_DIGITS.issuperset(hextet_str):
+ raise ValueError
+ hextet_int = int(hextet_str, 16)
+ if hextet_int > 0xFFFF:
+ raise ValueError
+ return hextet_int
+
+ def _compress_hextets(self, hextets):
+ """Compresses a list of hextets.
+
+ Compresses a list of strings, replacing the longest continuous
+ sequence of "0" in the list with "" and adding empty strings at
+ the beginning or at the end of the string such that subsequently
+ calling ":".join(hextets) will produce the compressed version of
+ the IPv6 address.
+
+ Args:
+ hextets: A list of strings, the hextets to compress.
+
+ Returns:
+ A list of strings.
+
+ """
+ best_doublecolon_start = -1
+ best_doublecolon_len = 0
+ doublecolon_start = -1
+ doublecolon_len = 0
+ for index in range(len(hextets)):
+ if hextets[index] == '0':
+ doublecolon_len += 1
+ if doublecolon_start == -1:
+ # Start of a sequence of zeros.
+ doublecolon_start = index
+ if doublecolon_len > best_doublecolon_len:
+ # This is the longest sequence of zeros so far.
+ best_doublecolon_len = doublecolon_len
+ best_doublecolon_start = doublecolon_start
+ else:
+ doublecolon_len = 0
+ doublecolon_start = -1
+
+ if best_doublecolon_len > 1:
+ best_doublecolon_end = (best_doublecolon_start +
+ best_doublecolon_len)
+ # For zeros at the end of the address.
+ if best_doublecolon_end == len(hextets):
+ hextets += ['']
+ hextets[best_doublecolon_start:best_doublecolon_end] = ['']
+ # For zeros at the beginning of the address.
+ if best_doublecolon_start == 0:
+ hextets = [''] + hextets
+
+ return hextets
+
+ def _string_from_ip_int(self, ip_int=None):
+ """Turns a 128-bit integer into hexadecimal notation.
+
+ Args:
+ ip_int: An integer, the IP address.
+
+ Returns:
+ A string, the hexadecimal representation of the address.
+
+ Raises:
+ ValueError: The address is bigger than 128 bits of all ones.
+
+ """
+ if not ip_int and ip_int != 0:
+ ip_int = int(self._ip)
+
+ if ip_int > self._ALL_ONES:
+ raise ValueError('IPv6 address is too large')
+
+ hex_str = '%032x' % ip_int
+ hextets = []
+ for x in range(0, 32, 4):
+ hextets.append('%x' % int(hex_str[x:x+4], 16))
+
+ hextets = self._compress_hextets(hextets)
+ return ':'.join(hextets)
+
+ def _explode_shorthand_ip_string(self):
+ """Expand a shortened IPv6 address.
+
+ Args:
+ ip_str: A string, the IPv6 address.
+
+ Returns:
+ A string, the expanded IPv6 address.
+
+ """
+ if isinstance(self, _BaseNet):
+ ip_str = str(self.ip)
+ else:
+ ip_str = str(self)
+
+ ip_int = self._ip_int_from_string(ip_str)
+ parts = []
+ for i in xrange(self._HEXTET_COUNT):
+ parts.append('%04x' % (ip_int & 0xFFFF))
+ ip_int >>= 16
+ parts.reverse()
+ if isinstance(self, _BaseNet):
+ return '%s/%d' % (':'.join(parts), self.prefixlen)
+ return ':'.join(parts)
+
+ @property
+ def max_prefixlen(self):
+ return self._max_prefixlen
+
+ @property
+ def packed(self):
+ """The binary representation of this address."""
+ return v6_int_to_packed(self._ip)
+
+ @property
+ def version(self):
+ return self._version
+
+ @property
+ def is_multicast(self):
+ """Test if the address is reserved for multicast use.
+
+ Returns:
+ A boolean, True if the address is a multicast address.
+ See RFC 2373 2.7 for details.
+
+ """
+ return self in IPv6Network('ff00::/8')
+
+ @property
+ def is_reserved(self):
+ """Test if the address is otherwise IETF reserved.
+
+ Returns:
+ A boolean, True if the address is within one of the
+ reserved IPv6 Network ranges.
+
+ """
+ return (self in IPv6Network('::/8') or
+ self in IPv6Network('100::/8') or
+ self in IPv6Network('200::/7') or
+ self in IPv6Network('400::/6') or
+ self in IPv6Network('800::/5') or
+ self in IPv6Network('1000::/4') or
+ self in IPv6Network('4000::/3') or
+ self in IPv6Network('6000::/3') or
+ self in IPv6Network('8000::/3') or
+ self in IPv6Network('A000::/3') or
+ self in IPv6Network('C000::/3') or
+ self in IPv6Network('E000::/4') or
+ self in IPv6Network('F000::/5') or
+ self in IPv6Network('F800::/6') or
+ self in IPv6Network('FE00::/9'))
+
+ @property
+ def is_unspecified(self):
+ """Test if the address is unspecified.
+
+ Returns:
+ A boolean, True if this is the unspecified address as defined in
+ RFC 2373 2.5.2.
+
+ """
+ return self._ip == 0 and getattr(self, '_prefixlen', 128) == 128
+
+ @property
+ def is_loopback(self):
+ """Test if the address is a loopback address.
+
+ Returns:
+ A boolean, True if the address is a loopback address as defined in
+ RFC 2373 2.5.3.
+
+ """
+ return self._ip == 1 and getattr(self, '_prefixlen', 128) == 128
+
+ @property
+ def is_link_local(self):
+ """Test if the address is reserved for link-local.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 4291.
+
+ """
+ return self in IPv6Network('fe80::/10')
+
+ @property
+ def is_site_local(self):
+ """Test if the address is reserved for site-local.
+
+ Note that the site-local address space has been deprecated by RFC 3879.
+ Use is_private to test if this address is in the space of unique local
+ addresses as defined by RFC 4193.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 3513 2.5.6.
+
+ """
+ return self in IPv6Network('fec0::/10')
+
+ @property
+ def is_private(self):
+ """Test if this address is allocated for private networks.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 4193.
+
+ """
+ return self in IPv6Network('fc00::/7')
+
+ @property
+ def ipv4_mapped(self):
+ """Return the IPv4 mapped address.
+
+ Returns:
+ If the IPv6 address is a v4 mapped address, return the
+ IPv4 mapped address. Return None otherwise.
+
+ """
+ if (self._ip >> 32) != 0xFFFF:
+ return None
+ return IPv4Address(self._ip & 0xFFFFFFFF)
+
+ @property
+ def teredo(self):
+ """Tuple of embedded teredo IPs.
+
+ Returns:
+ Tuple of the (server, client) IPs or None if the address
+ doesn't appear to be a teredo address (doesn't start with
+ 2001::/32)
+
+ """
+ if (self._ip >> 96) != 0x20010000:
+ return None
+ return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF),
+ IPv4Address(~self._ip & 0xFFFFFFFF))
+
+ @property
+ def sixtofour(self):
+ """Return the IPv4 6to4 embedded address.
+
+ Returns:
+ The IPv4 6to4-embedded address if present or None if the
+ address doesn't appear to contain a 6to4 embedded address.
+
+ """
+ if (self._ip >> 112) != 0x2002:
+ return None
+ return IPv4Address((self._ip >> 80) & 0xFFFFFFFF)
+
+
+class IPv6Address(_BaseV6, _BaseIP):
+
+ """Represent and manipulate single IPv6 Addresses.
+ """
+
+ def __init__(self, address):
+ """Instantiate a new IPv6 address object.
+
+ Args:
+ address: A string or integer representing the IP
+
+ Additionally, an integer can be passed, so
+ IPv6Address('2001:4860::') ==
+ IPv6Address(42541956101370907050197289607612071936L).
+ or, more generally
+ IPv6Address(IPv6Address('2001:4860::')._ip) ==
+ IPv6Address('2001:4860::')
+
+ Raises:
+ AddressValueError: If address isn't a valid IPv6 address.
+
+ """
+ _BaseV6.__init__(self, address)
+
+ # Efficient constructor from integer.
+ if isinstance(address, (int, long)):
+ self._ip = address
+ if address < 0 or address > self._ALL_ONES:
+ raise AddressValueError(address)
+ return
+
+ # Constructing from a packed address
+ if isinstance(address, Bytes):
+ try:
+ hi, lo = struct.unpack('!QQ', address)
+ except struct.error:
+ raise AddressValueError(address) # Wrong length.
+ self._ip = (hi << 64) | lo
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP string.
+ addr_str = str(address)
+ if not addr_str:
+ raise AddressValueError('')
+
+ self._ip = self._ip_int_from_string(addr_str)
+
+
+class IPv6Network(_BaseV6, _BaseNet):
+
+ """This class represents and manipulates 128-bit IPv6 networks.
+
+ Attributes: [examples for IPv6('2001:658:22A:CAFE:200::1/64')]
+ .ip: IPv6Address('2001:658:22a:cafe:200::1')
+ .network: IPv6Address('2001:658:22a:cafe::')
+ .hostmask: IPv6Address('::ffff:ffff:ffff:ffff')
+ .broadcast: IPv6Address('2001:658:22a:cafe:ffff:ffff:ffff:ffff')
+ .netmask: IPv6Address('ffff:ffff:ffff:ffff::')
+ .prefixlen: 64
+
+ """
+
+
+ def __init__(self, address, strict=False):
+ """Instantiate a new IPv6 Network object.
+
+ Args:
+ address: A string or integer representing the IPv6 network or the IP
+ and prefix/netmask.
+ '2001:4860::/128'
+ '2001:4860:0000:0000:0000:0000:0000:0000/128'
+ '2001:4860::'
+ are all functionally the same in IPv6. That is to say,
+ failing to provide a subnetmask will create an object with
+ a mask of /128.
+
+ Additionally, an integer can be passed, so
+ IPv6Network('2001:4860::') ==
+ IPv6Network(42541956101370907050197289607612071936L).
+ or, more generally
+ IPv6Network(IPv6Network('2001:4860::')._ip) ==
+ IPv6Network('2001:4860::')
+
+ strict: A boolean. If true, ensure that we have been passed
+ A true network address, eg, 192.168.1.0/24 and not an
+ IP address on a network, eg, 192.168.1.1/24.
+
+ Raises:
+ AddressValueError: If address isn't a valid IPv6 address.
+ NetmaskValueError: If the netmask isn't valid for
+ an IPv6 address.
+ ValueError: If strict was True and a network address was not
+ supplied.
+
+ """
+ _BaseNet.__init__(self, address)
+ _BaseV6.__init__(self, address)
+
+ # Constructing from an integer or packed bytes.
+ if isinstance(address, (int, long, Bytes)):
+ self.ip = IPv6Address(address)
+ self._ip = self.ip._ip
+ self._prefixlen = self._max_prefixlen
+ self.netmask = IPv6Address(self._ALL_ONES)
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP prefix string.
+ addr = str(address).split('/')
+
+ if len(addr) > 2:
+ raise AddressValueError(address)
+
+ self._ip = self._ip_int_from_string(addr[0])
+ self.ip = IPv6Address(self._ip)
+
+ if len(addr) == 2:
+ if self._is_valid_netmask(addr[1]):
+ self._prefixlen = int(addr[1])
+ else:
+ raise NetmaskValueError(addr[1])
+ else:
+ self._prefixlen = self._max_prefixlen
+
+ self.netmask = IPv6Address(self._ip_int_from_prefix(self._prefixlen))
+
+ if strict:
+ if self.ip != self.network:
+ raise ValueError('%s has host bits set' %
+ self.ip)
+ if self._prefixlen == (self._max_prefixlen - 1):
+ self.iterhosts = self.__iter__
+
+ def _is_valid_netmask(self, prefixlen):
+ """Verify that the netmask/prefixlen is valid.
+
+ Args:
+ prefixlen: A string, the netmask in prefix length format.
+
+ Returns:
+ A boolean, True if the prefix represents a valid IPv6
+ netmask.
+
+ """
+ try:
+ prefixlen = int(prefixlen)
+ except ValueError:
+ return False
+ return 0 <= prefixlen <= self._max_prefixlen
+
+ @property
+ def with_netmask(self):
+ return self.with_prefixlen
--- /dev/null
+import sys, logging, traceback, inspect, os.path
+from logging import handlers
+from manifold.util.singleton import Singleton
+from manifold.util.options import Options
+from manifold.util.misc import caller_name, make_list
+from manifold.util import colors
+
+# TODO Log should take separately message strings and arguments to be able to
+# remember which messages are seen several times, and also to allow for
+# translation
+# TODO How to log to stdout without putting None in self.log
+
+class Log(object):
+ __metaclass__ = Singleton
+
+ DEFAULTS = {
+ # Logging
+ "rsyslog_enable" : False,
+ "rsyslog_host" : None, #"log.top-hat.info",
+ "rsyslog_port" : None, #28514,
+ "log_file" : "/var/log/manifold.log",
+ "log_level" : "DEBUG",
+ "debug" : "default",
+ "log_duplicates" : False
+ }
+
+ # COLORS
+ color_ansi = {
+ 'DEBUG' : colors.MYGREEN,
+ 'INFO' : colors.MYBLUE,
+ 'WARNING': colors.MYWARNING,
+ 'ERROR' : colors.MYRED,
+ 'HEADER' : colors.MYHEADER,
+ 'END' : colors.MYEND,
+ 'RECORD' : colors.MYBLUE,
+ 'TMP' : colors.MYRED,
+ }
+
+ @classmethod
+ def color(cls, color):
+ return cls.color_ansi[color] if color else ''
+
+ # To remove duplicate messages
+ seen = {}
+
+ def __init__(self, name='(default)'):
+ self.log = None # logging.getLogger(name)
+ self.files_to_keep = []
+ self.init_log()
+ self.color = True
+
+
+ @classmethod
+ def init_options(self):
+ opt = Options()
+
+ opt.add_option(
+ "--rsyslog-enable", action = "store_false", dest = "rsyslog_enable",
+ help = "Specify if log have to be written to a rsyslog server.",
+ default = self.DEFAULTS["rsyslog_enable"]
+ )
+ opt.add_option(
+ "--rsyslog-host", dest = "rsyslog_host",
+ help = "Rsyslog hostname.",
+ default = self.DEFAULTS["rsyslog_host"]
+ )
+ opt.add_option(
+ "--rsyslog-port", type = "int", dest = "rsyslog_port",
+ help = "Rsyslog port.",
+ default = self.DEFAULTS["rsyslog_port"]
+ )
+ opt.add_option(
+ "-o", "--log-file", dest = "log_file",
+ help = "Log filename.",
+ default = self.DEFAULTS["log_file"]
+ )
+ opt.add_option(
+ "-L", "--log-level", dest = "log_level",
+ choices = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
+ help = "Log level",
+ default = self.DEFAULTS["log_level"]
+ )
+ opt.add_option(
+ "-d", "--debug", dest = "debug",
+ help = "Debug paths (a list of coma-separated python path: path.to.module.function).",
+ default = self.DEFAULTS["debug"]
+ )
+ opt.add_option(
+ "", "--log_duplicates", action = "store_true", dest = "log_duplicates",
+ help = "Remove duplicate messages in logs",
+ default = self.DEFAULTS["log_duplicates"]
+ )
+
+ def init_log(self, options=object()):
+ # Initialize self.log (require self.files_to_keep)
+ if self.log: # for debugging by using stdout, log may be equal to None
+ if Options().rsyslog_host:
+ shandler = self.make_handler_rsyslog(
+ Options().rsyslog_host,
+ Options().rsyslog_port,
+ Options().log_level
+ )
+ elif Options().log_file:
+ shandler = self.make_handler_locallog(
+ Options().log_file,
+ Options().log_level
+ )
+
+ #------------------------------------------------------------------------
+ # Log
+ #------------------------------------------------------------------------
+
+ def make_handler_rsyslog(self, rsyslog_host, rsyslog_port, log_level):
+ """
+ \brief (Internal usage) Prepare logging via rsyslog
+ \param rsyslog_host The hostname of the rsyslog server
+ \param rsyslog_port The port of the rsyslog server
+ \param log_level Log level
+ """
+ # Prepare the handler
+ shandler = handlers.SysLogHandler(
+ (rsyslog_host, rsyslog_port),
+ facility = handlers.SysLogHandler.LOG_DAEMON
+ )
+
+ # The log file must remain open while daemonizing
+ self.prepare_handler(shandler, log_level)
+ return shandler
+
+ def make_handler_locallog(self, log_filename, log_level):
+ """
+ \brief (Internal usage) Prepare local logging
+ \param log_filename The file in which we write the logs
+ \param log_level Log level
+ """
+ # Create directory in which we store the log file
+ log_dir = os.path.dirname(log_filename)
+ if log_dir and not os.path.exists(log_dir):
+ try:
+ os.makedirs(log_dir)
+ except OSError, why:
+ # XXX here we don't log since log is not initialized yet
+ print "OS error: %s" % why
+
+ # Prepare the handler
+ shandler = logging.handlers.RotatingFileHandler(
+ log_filename,
+ backupCount = 0
+ )
+
+ # The log file must remain open while daemonizing
+ self.files_to_keep.append(shandler.stream)
+ self.prepare_handler(shandler, log_level)
+ return shandler
+
+ def prepare_handler(self, shandler, log_level):
+ """
+ \brief (Internal usage)
+ \param shandler Handler used to log information
+ \param log_level Log level
+ """
+ shandler.setLevel(log_level)
+ formatter = logging.Formatter("%(asctime)s: %(name)s: %(levelname)s %(message)s")
+ shandler.setFormatter(formatter)
+ self.log.addHandler(shandler)
+ self.log.setLevel(getattr(logging, log_level, logging.INFO))
+
+ def get_logger(self):
+ return self.log
+
+ @classmethod
+ def print_msg(cls, msg, level=None, caller=None):
+ sys.stdout.write(cls.color(level))
+ if level:
+ print "%s" % level,
+ if caller:
+ print "[%30s]" % caller,
+ print msg,
+ print cls.color('END')
+
+ #---------------------------------------------------------------------
+ # Log: logger abstraction
+ #---------------------------------------------------------------------
+
+ @classmethod
+ def build_message_string(cls, msg, ctx):
+ if ctx:
+ msg = [m % ctx for m in msg]
+ if isinstance(msg, (tuple, list)):
+ msg = map(lambda s : "%s" % s, msg)
+ msg = " ".join(msg)
+ else:
+ msg = "%s" % msg
+ return msg
+
+ @classmethod
+ def log_message(cls, level, msg, ctx):
+ """
+ \brief Logs an message
+ \param level (string) Log level
+ \param msg (string / list of strings) Message string, or List of message strings
+ \param ctx (dict) Context for the message strings
+ """
+ caller = None
+
+ if not Options().log_duplicates:
+ try:
+ count = cls.seen.get(msg, 0)
+ cls.seen[msg] = count + 1
+ except TypeError, e:
+ # Unhashable types in msg
+ count = 0
+
+ if count == 1:
+ msg += (" -- REPEATED -- Future similar messages will be silently ignored. Please use the --log_duplicates option to allow for duplicates",)
+ elif count > 1:
+ return
+
+ if level == 'DEBUG':
+ caller = caller_name(skip=3)
+ # Eventually remove "" added to the configuration file
+ try:
+ paths = tuple(s.strip(' \t\n\r') for s in Options().debug.split(','))
+ except:
+ paths = None
+ if not paths or not caller.startswith(paths):
+ return
+
+ logger = Log().get_logger()
+ msg_str = cls.build_message_string(msg, ctx)
+
+ if logger:
+ logger_fct = getattr(logger, level.lower())
+ logger_fct("%s(): %s" % (inspect.stack()[2][3], msg_str))
+ else:
+ cls.print_msg(msg_str, level, caller)
+
+
+ @classmethod
+ def critical(cls, *msg, **ctx):
+ if not Options().log_level in ['CRITICAL']:
+ return
+ cls.log_message('CRITICAL', msg, ctx)
+ sys.exit(0)
+
+ @classmethod
+ def error(cls, *msg, **ctx):
+ if not Options().log_level in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
+ return
+ cls.log_message('ERROR', msg, ctx)
+ logger = Log().get_logger()
+ if not Log().get_logger():
+ traceback.print_exc()
+ sys.exit(0)
+
+ @classmethod
+ def warning(cls, *msg, **ctx):
+ if not Options().log_level in ['DEBUG', 'INFO', 'WARNING']:
+ return
+ cls.log_message('WARNING', msg, ctx)
+
+ @classmethod
+ def info(cls, *msg, **ctx):
+ if not Options().log_level in ['DEBUG', 'INFO']:
+ return
+ cls.log_message('INFO', msg, ctx)
+
+ @classmethod
+ def debug(cls, *msg, **ctx):
+ if not Options().log_level in ['DEBUG']:
+ return
+ cls.log_message('DEBUG', msg, ctx)
+
+ @classmethod
+ def tmp(cls, *msg):
+ cls.print_msg(' '.join(map(lambda x: "%r"%x, make_list(msg))), 'TMP', caller_name())
+
+ @classmethod
+ def record(cls, *msg):
+ #cls.print_msg(' '.join(map(lambda x: "%r"%x, make_list(msg))), 'RECORD', caller_name())
+ pass
+
+ @classmethod
+ def deprecated(cls, new):
+ #cls.print_msg("Function %s is deprecated, please use %s" % (caller_name(skip=3), new))
+ pass
+
+Log.init_options()
--- /dev/null
+import os, glob, inspect
+from types import StringTypes
+
+def find_local_modules(filepath):
+ modules = []
+ for f in glob.glob(os.path.dirname(filepath)+"/*.py"):
+ name = os.path.basename(f)[:-3]
+ if name != '__init__':
+ modules.append(name)
+ return modules
+
+def make_list(elt):
+ if not elt or isinstance(elt, list):
+ return elt
+ if isinstance(elt, StringTypes):
+ return [elt]
+ if isinstance(elt, (tuple, set, frozenset)):
+ return list(elt)
+
+
+# FROM: https://gist.github.com/techtonik/2151727
+# Public Domain, i.e. feel free to copy/paste
+# Considered a hack in Python 2
+
+import inspect
+
+def caller_name(skip=2):
+ """Get a name of a caller in the format module.class.method
+
+ `skip` specifies how many levels of stack to skip while getting caller
+ name. skip=1 means "who calls me", skip=2 "who calls my caller" etc.
+
+ An empty string is returned if skipped levels exceed stack height
+ """
+ stack = inspect.stack()
+ start = 0 + skip
+ if len(stack) < start + 1:
+ return ''
+ parentframe = stack[start][0]
+
+ name = []
+ module = inspect.getmodule(parentframe)
+ # `modname` can be None when frame is executed directly in console
+ # TODO(techtonik): consider using __main__
+ if module:
+ name.append(module.__name__)
+ # detect classname
+ if 'self' in parentframe.f_locals:
+ # I don't know any way to detect call from the object method
+ # XXX: there seems to be no way to detect static method call - it will
+ # be just a function call
+ name.append(parentframe.f_locals['self'].__class__.__name__)
+ codename = parentframe.f_code.co_name
+ if codename != '<module>': # top level usually
+ name.append( codename ) # function or a method
+ del parentframe
+ return ".".join(name)
+
+def is_sublist(x, y, shortcut=None):
+ if not shortcut: shortcut = []
+ if x == []: return (True, shortcut)
+ if y == []: return (False, None)
+ if x[0] == y[0]:
+ return is_sublist(x[1:],y[1:], shortcut)
+ else:
+ return is_sublist(x, y[1:], shortcut + [y[0]])
--- /dev/null
+import sys
+import os.path
+import optparse
+# xxx warning : this is not taken care of by the debian packaging
+# cfgparse seems to be available by pip only (on debian, that is)
+# there seems to be another package that might be used to do similar stuff
+# python-configglue - Glues together optparse.OptionParser and ConfigParser.ConfigParser
+# additionally argumentparser would probably be the way to go, notwithstanding
+# xxx Moving this into the parse method so this module can at least be imported
+#import cfgparse
+
+from manifold.util.singleton import Singleton
+
+# http://docs.python.org/dev/library/argparse.html#upgrading-optparse-code
+
+class Options(object):
+
+ __metaclass__ = Singleton
+
+ # We should be able to use another default conf file
+ CONF_FILE = '/etc/manifold.conf'
+
+ def __init__(self, name = None):
+ self._opt = optparse.OptionParser()
+ self._defaults = {}
+ self._name = name
+ self.clear()
+
+ def clear(self):
+ self.options = {}
+ self.add_option(
+ "-c", "--config", dest = "cfg_file",
+ help = "Config file to use.",
+ default = self.CONF_FILE
+ )
+ self.uptodate = True
+
+ def parse(self):
+ """
+ \brief Parse options passed from command-line
+ """
+ # add options here
+
+ # if we have a logger singleton, add its options here too
+ # get defaults too
+
+ # Initialize options to default values
+ import cfgparse
+ cfg = cfgparse.ConfigParser()
+ cfg.add_optparse_help_option(self._opt)
+
+ # Load configuration file
+ try:
+ cfg_filename = sys.argv[sys.argv.index("-c") + 1]
+ try:
+ with open(cfg_filename): cfg.add_file(cfg_filename)
+ except IOError:
+ raise Exception, "Cannot open specified configuration file: %s" % cfg_filename
+ except ValueError:
+ try:
+ with open(self.CONF_FILE): cfg.add_file(self.CONF_FILE)
+ except IOError: pass
+
+ for option_name in self._defaults:
+ cfg.add_option(option_name, default = self._defaults[option_name])
+
+ # Load/override options from configuration file and command-line
+ (options, args) = cfg.parse(self._opt)
+ self.options.update(vars(options))
+ self.uptodate = True
+
+
+ def add_option(self, *args, **kwargs):
+ default = kwargs.get('default', None)
+ self._defaults[kwargs['dest']] = default
+ if 'default' in kwargs:
+ # This is very important otherwise file content is not taken into account
+ del kwargs['default']
+ kwargs['help'] += " Defaults to %r." % default
+ self._opt.add_option(*args, **kwargs)
+ self.uptodate = False
+
+ def get_name(self):
+ return self._name if self._name else os.path.basename(sys.argv[0])
+
+ def __repr__(self):
+ return "<Options: %r>" % self.options
+
+ def __getattr__(self, key):
+ if not self.uptodate:
+ self.parse()
+ return self.options.get(key, None)
+
+ def __setattr(self, key, value):
+ self.options[key] = value
--- /dev/null
+from manifold.util.log import Log
+
+class PluginFactory(type):
+ def __init__(cls, name, bases, dic):
+ #super(PluginFactory, cls).__init__(name, bases, dic)
+ type.__init__(cls, name, bases, dic)
+
+ try:
+ registry = getattr(cls, 'registry')
+ except AttributeError:
+ setattr(cls, 'registry', {})
+ registry = getattr(cls, 'registry')
+ # XXX
+ if name != "Gateway":
+ if name.endswith('Gateway'):
+ name = name[:-7]
+ name = name.lower()
+ registry[name] = cls
+
+ def get(self, name):
+ return registry[name.lower()]
+
+ # Adding a class method get to retrieve plugins by name
+ setattr(cls, 'get', classmethod(get))
--- /dev/null
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Class Predicate:
+# Define a condition to join for example to Table instances.
+# If this condition involves several fields, you may define a
+# single Predicate using tuple of fields.
+#
+# Copyright (C) UPMC Paris Universitas
+# Authors:
+# Jordan Augé <jordan.auge@lip6.fr>
+# Marc-Olivier Buob <marc-olivier.buob@lip6.fr>
+
+from types import StringTypes
+from manifold.util.type import returns, accepts
+
+from operator import (
+ and_, or_, inv, add, mul, sub, mod, truediv, lt, le, ne, gt, ge, eq, neg
+)
+
+# Define the inclusion operators
+class contains(type): pass
+class included(type): pass
+
+# New modifier: { contains
+class Predicate:
+
+ operators = {
+ '==' : eq,
+ '!=' : ne,
+ '<' : lt,
+ '<=' : le,
+ '>' : gt,
+ '>=' : ge,
+ '&&' : and_,
+ '||' : or_,
+ 'CONTAINS' : contains,
+ 'INCLUDED' : included
+ }
+
+ operators_short = {
+ '=' : eq,
+ '~' : ne,
+ '<' : lt,
+ '[' : le,
+ '>' : gt,
+ ']' : ge,
+ '&' : and_,
+ '|' : or_,
+ '}' : contains,
+ '{' : included
+ }
+
+ def __init__(self, *args, **kwargs):
+ """
+ Build a Predicate instance.
+ Args:
+ kwargs: You can pass:
+ - 3 args (left, operator, right)
+ left: The left operand (it may be a String instance or a tuple)
+ operator: See Predicate.operators, this is the binary operator
+ involved in this Predicate.
+ right: The right value (it may be a String instance
+ or a literal (String, numerical value, tuple...))
+ - 1 argument (list or tuple), containing three arguments
+ (variable, operator, value)
+ """
+ if len(args) == 3:
+ key, op, value = args
+ elif len(args) == 1 and isinstance(args[0], (tuple,list)) and len(args[0]) == 3:
+ key, op, value = args[0]
+ elif len(args) == 1 and isinstance(args[0], Predicate):
+ key, op, value = args[0].get_tuple()
+ else:
+ raise Exception, "Bad initializer for Predicate (args = %r)" % args
+
+ assert not isinstance(value, (frozenset, dict, set)), "Invalid value type (type = %r)" % type(value)
+ if isinstance(value, list):
+ value = tuple(value)
+
+ self.key = key
+ if isinstance(op, StringTypes):
+ op = op.upper()
+ if op in self.operators.keys():
+ self.op = self.operators[op]
+ elif op in self.operators_short.keys():
+ self.op = self.operators_short[op]
+ else:
+ self.op = op
+
+ if isinstance(value, list):
+ self.value = tuple(value)
+ else:
+ self.value = value
+
+ @returns(StringTypes)
+ def __str__(self):
+ """
+ Returns:
+ The '%s' representation of this Predicate.
+ """
+ key, op, value = self.get_str_tuple()
+ if isinstance(value, (tuple, list, set, frozenset)):
+ value = [repr(v) for v in value]
+ value = "[%s]" % ", ".join(value)
+ return "%s %s %r" % (key, op, value)
+
+ @returns(StringTypes)
+ def __repr__(self):
+ """
+ Returns:
+ The '%r' representation of this Predicate.
+ """
+ return "Predicate<%s %s %r>" % self.get_str_tuple()
+
+ def __hash__(self):
+ """
+ Returns:
+ The hash of this Predicate (this allows to define set of
+ Predicate instances).
+ """
+ return hash(self.get_tuple())
+
+ @returns(bool)
+ def __eq__(self, predicate):
+ """
+ Returns:
+ True iif self == predicate.
+ """
+ if not predicate:
+ return False
+ return self.get_tuple() == predicate.get_tuple()
+
+ def get_key(self):
+ """
+ Returns:
+ The left operand of this Predicate. It may be a String
+ or a tuple of Strings.
+ """
+ return self.key
+
+ def set_key(self, key):
+ """
+ Set the left operand of this Predicate.
+ Params:
+ key: The new left operand.
+ """
+ self.key = key
+
+ def get_op(self):
+ return self.op
+
+ def get_value(self):
+ return self.value
+
+ def set_value(self, value):
+ self.value = value
+
+ def get_tuple(self):
+ return (self.key, self.op, self.value)
+
+ def get_str_op(self):
+ op_str = [s for s, op in self.operators.iteritems() if op == self.op]
+ return op_str[0]
+
+ def get_str_tuple(self):
+ return (self.key, self.get_str_op(), self.value,)
+
+ def to_list(self):
+ return list(self.get_str_tuple())
+
+ def match(self, dic, ignore_missing=False):
+ if isinstance(self.key, tuple):
+ print "PREDICATE MATCH", self.key
+ print dic
+ print "-----------------------------"
+
+ # Can we match ?
+ if self.key not in dic:
+ return ignore_missing
+
+ if self.op == eq:
+ if isinstance(self.value, list):
+ return (dic[self.key] in self.value) # array ?
+ else:
+ return (dic[self.key] == self.value)
+ elif self.op == ne:
+ if isinstance(self.value, list):
+ return (dic[self.key] not in self.value) # array ?
+ else:
+ return (dic[self.key] != self.value) # array ?
+ elif self.op == lt:
+ if isinstance(self.value, StringTypes):
+ # prefix match
+ return dic[self.key].startswith('%s.' % self.value)
+ else:
+ return (dic[self.key] < self.value)
+ elif self.op == le:
+ if isinstance(self.value, StringTypes):
+ return dic[self.key] == self.value or dic[self.key].startswith('%s.' % self.value)
+ else:
+ return (dic[self.key] <= self.value)
+ elif self.op == gt:
+ if isinstance(self.value, StringTypes):
+ # prefix match
+ return self.value.startswith('%s.' % dic[self.key])
+ else:
+ return (dic[self.key] > self.value)
+ elif self.op == ge:
+ if isinstance(self.value, StringTypes):
+ # prefix match
+ return dic[self.key] == self.value or self.value.startswith('%s.' % dic[self.key])
+ else:
+ return (dic[self.key] >= self.value)
+ elif self.op == and_:
+ return (dic[self.key] & self.value) # array ?
+ elif self.op == or_:
+ return (dic[self.key] | self.value) # array ?
+ elif self.op == contains:
+ method, subfield = self.key.split('.', 1)
+ return not not [ x for x in dic[method] if x[subfield] == self.value]
+ elif self.op == included:
+ return dic[self.key] in self.value
+ else:
+ raise Exception, "Unexpected table format: %r" % dic
+
+ def filter(self, dic):
+ """
+ Filter dic according to the current predicate.
+ """
+
+ if '.' in self.key:
+ # users.hrn
+ method, subfield = self.key.split('.', 1)
+ if not method in dic:
+ return None # XXX
+
+ if isinstance(dic[method], dict):
+ # We have a 1..1 relationship: apply the same filter to the dict
+ subpred = Predicate(subfield, self.op, self.value)
+ match = subpred.match(dic[method])
+ return dic if match else None
+
+ elif isinstance(dic[method], (list, tuple)):
+ # 1..N relationships
+ match = False
+ if self.op == contains:
+ return dic if self.match(dic) else None
+ else:
+ subpred = Predicate(subfield, self.op, self.value)
+ dic[method] = subpred.filter(dic[method])
+ return dic
+ else:
+ raise Exception, "Unexpected table format: %r", dic
+
+
+ else:
+ # Individual field operations: this could be simplified, since we are now using operators_short !!
+ # XXX match
+ print "current predicate", self
+ print "matching", dic
+ print "----"
+ return dic if self.match(dic) else None
+
+ def get_field_names(self):
+ if isinstance(self.key, (list, tuple, set, frozenset)):
+ return set(self.key)
+ else:
+ return set([self.key])
+
+ def get_value_names(self):
+ if isinstance(self.value, (list, tuple, set, frozenset)):
+ return set(self.value)
+ else:
+ return set([self.value])
+
+ def has_empty_value(self):
+ if isinstance(self.value, (list, tuple, set, frozenset)):
+ return not any(self.value)
+ else:
+ return not self.value
--- /dev/null
+# Borrowed from Chandler
+# http://chandlerproject.org/Projects/ChandlerTwistedInThreadedEnvironment
+
+import threading, time
+from manifold.util.singleton import Singleton
+from manifold.util.log import *
+from twisted.internet import defer
+from twisted.python import threadable
+
+__author__ ="Brian Kirsch <bkirsch@osafoundation.org>"
+
+#required for using threads with the Reactor
+threadable.init()
+
+class ReactorException(Exception):
+ def __init__(self, *args):
+ Exception.__init__(self, *args)
+
+
+class ReactorThread(threading.Thread):
+ """
+ Run the Reactor in a Thread to prevent blocking the
+ Main Thread once reactor.run is called
+ """
+
+ __metaclass__ = Singleton
+
+ def __init__(self):
+ threading.Thread.__init__(self)
+ self._reactorRunning = False
+
+ # Be sure the import is done only at runtime, we keep a reference in the
+ # class instance
+ from twisted.internet import reactor
+ self.reactor = reactor
+
+ def run(self):
+ if self._reactorRunning:
+ raise ReactorException("Reactor Already Running")
+
+ self._reactorRunning = True
+
+ #call run passing a False flag indicating to the
+ #reactor not to install sig handlers since sig handlers
+ #only work on the main thread
+ try:
+ #signal.signal(signal.SIGINT, signal.default_int_handler)
+ self.reactor.run(False)
+ except Exception, e:
+ print "Reactor exception:", e
+
+ def callInReactor(self, callable, *args, **kw):
+ if self._reactorRunning:
+ self.reactor.callFromThread(callable, *args, **kw)
+ else:
+ callable(*args, **kw)
+
+ def isReactorRunning(self):
+ return self._reactorRunning
+
+ def start_reactor(self):
+ if self._reactorRunning:
+ log_warning("Reactor already running. This is normal, please remove this debug message")
+ return
+ #raise ReactorException("Reactor Already Running")
+ threading.Thread.start(self)
+ cpt = 0
+ while not self._reactorRunning:
+ time.sleep(0.1)
+ cpt +=1
+ if cpt > 5:
+ raise ReactorException, "Reactor thread is too long to start... cancelling"
+ self.reactor.addSystemEventTrigger('after', 'shutdown', self.__reactorShutDown)
+
+ def stop_reactor(self):
+ """
+ may want a way to force thread to join if reactor does not shutdown
+ properly. The reactor can get in to a recursive loop condition if reactor.stop
+ placed in the threads join method. This will require further investigation.
+ """
+ if not self._reactorRunning:
+ raise ReactorException("Reactor Not Running")
+ self.reactor.callFromThread(self.reactor.stop)
+ #self.reactor.join()
+
+ def addReactorEventTrigger(self, phase, eventType, callable):
+ if self._reactorRunning:
+ self.reactor.callFromThread(self.reactor.addSystemEventTrigger, phase, eventType, callable)
+ else:
+ self.reactor.addSystemEventTrigger(phase, eventType, callable)
+
+ def __reactorShuttingDown(self):
+ pass
+
+ def __reactorShutDown(self):
+ """This method called when the reactor is stopped"""
+ self._reactorRunning = False
+
+ def __getattr__(self, name):
+ # We transfer missing methods to the reactor
+ def _missing(*args, **kwargs):
+ self.reactor.callFromThread(getattr(self.reactor, name), *args, **kwargs)
+ return _missing
--- /dev/null
+from manifold.util.singleton import Singleton
+
+class ReactorWrapper(object):
+ __metaclass__ = Singleton
+
+ def __init__(self):
+ # Be sure the import is done only at runtime, we keep a reference in the
+ # class instance
+ from twisted.internet import reactor
+ self.reactor = reactor
+
+
+ def callInReactor(self, callable, *args, **kw):
+ print "ReactorWrapper::callInReactor"
+ if self._reactorRunning:
+ self.reactor.callFromThread(callable, *args, **kw)
+ else:
+ callable(*args, **kw)
+
+ def isReactorRunning(self):
+ return self._reactorRunning
+
+ def start_reactor(self):
+ self.reactor.run()
+
+ def stop_reactor(self):
+ self.reactor.stop()
+
+ def addReactorEventTrigger(self, phase, eventType, callable):
+ print "ReactorWrapper::addReactorEventTrigger"
+ if self._reactorRunning:
+ self.reactor.callFromThread(self.reactor.addSystemEventTrigger, phase, eventType, callable)
+ else:
+ self.reactor.addSystemEventTrigger(phase, eventType, callable)
+
+ def __reactorShuttingDown(self):
+ pass
+
+ def __reactorShutDown(self):
+ """This method called when the reactor is stopped"""
+ print "REACTOR SHUTDOWN"
+ self._reactorRunning = False
+
+ def __getattr__(self, name):
+ # We transfer missing methods to the reactor
+ def _missing(*args, **kwargs):
+ getattr(self.reactor, name)(*args, **kwargs)
+ return _missing
--- /dev/null
+#-------------------------------------------------------------------------
+# Class Singleton
+#
+# Classes that inherit from Singleton can be instanciated only once
+#-------------------------------------------------------------------------
+
+class Singleton(type):
+ def __init__(cls, name, bases, dic):
+ super(Singleton,cls).__init__(name,bases,dic)
+ cls.instance=None
+
+ def __call__(cls, *args, **kw):
+ if cls.instance is None:
+ cls.instance=super(Singleton,cls).__call__(*args,**kw)
+ return cls.instance
+
+
+# See also
+# http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python
--- /dev/null
+from manifold.gateways import Gateway
+from manifold.util.callback import Callback
+
+#URL='sqlite:///:memory:?check_same_thread=False'
+URL='sqlite:////var/myslice/db.sqlite?check_same_thread=False'
+
+class Storage(object):
+ pass
+ # We can read information from files, database, commandline, etc
+ # Let's focus on the database
+
+ @classmethod
+ def register(self, object):
+ """
+ Registers a new object that will be stored locally by manifold.
+ This will live in the
+ """
+ pass
+
+class DBStorage(Storage):
+ @classmethod
+ def execute(self, query, user=None, format='dict'):
+ # XXX Need to pass local parameters
+ gw = Gateway.get('sqlalchemy')(config={'url': URL}, user=user, format=format)
+ gw.set_query(query)
+ cb = Callback()
+ gw.set_callback(cb)
+ gw.start()
+ return cb.get_results()
--- /dev/null
+# http://wiki.python.org/moin/PythonDecoratorLibrary#Type_Enforcement_.28accepts.2Freturns.29
+'''
+One of three degrees of enforcement may be specified by passing
+the 'debug' keyword argument to the decorator:
+ 0 -- NONE: No type-checking. Decorators disabled.
+ 1 -- MEDIUM: Print warning message to stderr. (Default)
+ 2 -- STRONG: Raise TypeError with message.
+If 'debug' is not passed to the decorator, the default level is used.
+
+Example usage:
+ >>> NONE, MEDIUM, STRONG = 0, 1, 2
+ >>>
+ >>> @accepts(int, int, int)
+ ... @returns(float)
+ ... def average(x, y, z):
+ ... return (x + y + z) / 2
+ ...
+ >>> average(5.5, 10, 15.0)
+ TypeWarning: 'average' method accepts (int, int, int), but was given
+ (float, int, float)
+ 15.25
+ >>> average(5, 10, 15)
+ TypeWarning: 'average' method returns (float), but result is (int)
+ 15
+
+Needed to cast params as floats in function def (or simply divide by 2.0).
+
+ >>> TYPE_CHECK = STRONG
+ >>> @accepts(int, debug=TYPE_CHECK)
+ ... @returns(int, debug=TYPE_CHECK)
+ ... def fib(n):
+ ... if n in (0, 1): return n
+ ... return fib(n-1) + fib(n-2)
+ ...
+ >>> fib(5.3)
+ Traceback (most recent call last):
+ ...
+ TypeError: 'fib' method accepts (int), but was given (float)
+
+'''
+import sys
+from itertools import izip
+
+def accepts(*types, **kw):
+ '''Function decorator. Checks decorated function's arguments are
+ of the expected types.
+
+ Parameters:
+ types -- The expected types of the inputs to the decorated function.
+ Must specify type for each parameter.
+ kw -- Optional specification of 'debug' level (this is the only valid
+ keyword argument, no other should be given).
+ debug = ( 0 | 1 | 2 )
+
+ '''
+ if not kw:
+ # default level: MEDIUM
+ debug = 2
+ else:
+ debug = kw['debug']
+ try:
+ def decorator(f):
+ # XXX Missing full support of kwargs
+ def newf(*args, **kwargs):
+ if debug is 0:
+ return f(*args, **kwargs)
+ assert len(args) == len(types)
+ argtypes = tuple(map(type, args))
+ if not compare_types(types, argtypes):
+ # if argtypes != types:
+ msg = info(f.__name__, types, argtypes, 0)
+ if debug is 1:
+ print >> sys.stderr, 'TypeWarning: ', msg
+ elif debug is 2:
+ raise TypeError, msg
+ return f(*args, **kwargs)
+ newf.__name__ = f.__name__
+ return newf
+ return decorator
+ except KeyError, key:
+ raise KeyError, key + "is not a valid keyword argument"
+ except TypeError, msg:
+ raise TypeError, msg
+
+def compare_types(expected, actual):
+ if isinstance(expected, tuple):
+ if isinstance(actual, tuple):
+ for x, y in izip(expected, actual):
+ if not compare_types(x ,y):
+ return False
+ return True
+ else:
+ return actual == type(None) or actual in expected
+ else:
+ return actual == type(None) or actual == expected or isinstance(actual, expected) # issubclass(actual, expected)
+
+def returns(ret_type, **kw):
+ '''Function decorator. Checks decorated function's return value
+ is of the expected type.
+
+ Parameters:
+ ret_type -- The expected type of the decorated function's return value.
+ Must specify type for each parameter.
+ kw -- Optional specification of 'debug' level (this is the only valid
+ keyword argument, no other should be given).
+ debug=(0 | 1 | 2)
+ '''
+ try:
+ if not kw:
+ # default level: MEDIUM
+ debug = 1
+ else:
+ debug = kw['debug']
+ def decorator(f):
+ def newf(*args):
+ result = f(*args)
+ if debug is 0:
+ return result
+ res_type = type(result)
+ if not compare_types(ret_type, res_type):
+ # if res_type != ret_type: # JORDAN: fix to allow for # StringTypes = (str, unicode)
+ # XXX note that this check should be recursive
+ msg = info(f.__name__, (ret_type,), (res_type,), 1)
+ if debug is 1:
+ print >> sys.stderr, 'TypeWarning: ', msg
+ elif debug is 2:
+ raise TypeError, msg
+ return result
+ newf.__name__ = f.__name__
+ return newf
+ return decorator
+ except KeyError, key:
+ raise KeyError, key + "is not a valid keyword argument"
+ except TypeError, msg:
+ raise TypeError, msg
+
+def info(fname, expected, actual, flag):
+ '''Convenience function returns nicely formatted error/warning msg.'''
+ format = lambda types: ', '.join([str(t).split("'")[1] for t in types])
+ msg = "'{}' method ".format( fname )\
+ + ("accepts", "returns")[flag] + " ({}), but ".format(expected)\
+ + ("was given", "result is")[flag] + " ({})".format(actual)
+ return msg
+
--- /dev/null
+import os
+import xml.etree.cElementTree as ElementTree
+
+class XmlListConfig(list):
+ def __init__(self, aList):
+ for element in aList:
+ if element:
+ # treat like dict
+ if len(element) == 1 or element[0].tag != element[1].tag:
+ self.append(XmlDictConfig(element))
+ # treat like list
+ elif element[0].tag == element[1].tag:
+ self.append(XmlListConfig(element))
+ elif element.text:
+ text = element.text.strip()
+ if text:
+ self.append(text)
+
+
+class XmlDictConfig(dict):
+ '''
+ Example usage:
+
+ >>> tree = ElementTree.parse('your_file.xml')
+ >>> root = tree.getroot()
+ >>> xmldict = XmlDictConfig(root)
+
+ Or, if you want to use an XML string:
+
+ >>> root = ElementTree.XML(xml_string)
+ >>> xmldict = XmlDictConfig(root)
+
+ And then use xmldict for what it is... a dict.
+ '''
+ def __init__(self, parent_element):
+ childrenNames = [child.tag for child in parent_element.getchildren()]
+
+ if parent_element.items(): #attributes
+ self.update(dict(parent_element.items()))
+ for element in parent_element:
+ if element:
+ # treat like dict - we assume that if the first two tags
+ # in a series are different, then they are all different.
+ if len(element) == 1 or element[0].tag != element[1].tag:
+ aDict = XmlDictConfig(element)
+ # treat like list - we assume that if the first two tags
+ # in a series are the same, then the rest are the same.
+ else:
+ # here, we put the list in dictionary; the key is the
+ # tag name the list elements all share in common, and
+ # the value is the list itself
+ aDict = {element[0].tag: XmlListConfig(element)}
+ # if the tag has attributes, add those to the dict
+ if element.items():
+ aDict.update(dict(element.items()))
+
+ if childrenNames.count(element.tag) > 1:
+ try:
+ currentValue = self[element.tag]
+ currentValue.append(aDict)
+ self.update({element.tag: currentValue})
+ except: #the first of its kind, an empty list must be created
+ self.update({element.tag: [aDict]}) #aDict is written in [], i.e. it will be a list
+
+ else:
+ self.update({element.tag: aDict})
+ # this assumes that if you've got an attribute in a tag,
+ # you won't be having any text. This may or may not be a
+ # good idea -- time will tell. It works for the way we are
+ # currently doing XML configuration files...
+ elif element.items():
+ self.update({element.tag: dict(element.items())})
+ # finally, if there are no child tags and no attributes, extract
+ # the text
+ else:
+ self.update({element.tag: element.text})
+
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'portal',
+ # SLA
+ 'sla',
]
# this app won't load in a build environment
if not building: INSTALLED_APPS.append ('rest')
#IA_JS_FORMAT = "<script type='text/javascript' src='{URL}' />"
# put stuff under static/
# IA_MEDIA_PREFIX = '/code/'
+
+####SLA#####
+
+SLA_MANAGER_URL = "http://157.193.215.125:4000/sla-service"
+SLA_MANAGER_USER = "normal_user"
+SLA_MANAGER_PASSWORD = "password"
\ No newline at end of file
(r'^testbeds/(?P<slicename>[^/]+)/?$', portal.slicetabtestbeds.SliceTabTestbeds.as_view()),
(r'^experiment/(?P<slicename>[^/]+)/?$', portal.slicetabexperiment.ExperimentView.as_view()),
url(r'^portal/', include('portal.urls')),
+
+ # SLA
+ url(r'^sla/', include('sla.urls')),
]
#this one would not match the convention
class QueryUpdater(Plugin):
+ def __init__ (self, query=None, **settings):
+ Plugin.__init__ (self, **settings)
+ self.query = query
+ self.username = str(settings['username'])
+
def template_file (self):
return "queryupdater.html"
return reqs
def json_settings_list (self):
- return ['plugin_uuid', 'domid', 'query_uuid']
+ return ['plugin_uuid', 'domid', 'query_uuid', 'username']
do_update: function(e) {
var self = e.data;
+ var username = e.data.options.username;
+ var urn = data.value;
// XXX check that the query is not disabled
- manifold.raise_event(self.options.query_uuid, RUN_UPDATE);
+
+ console.log("DATA VALUE: " + data.value);
+
+ if (data.value.toLowerCase().indexOf("iminds") >= 0){
+
+ $('#sla_dialog').show();
+ $('#slamodal').modal('show');
+
+ $(document).ready(function() {
+ $("#accept_sla").click(function(){
+ console.log("SLA ACCEPTED");
+ console.log("With username: " + username);
+ if(urn.toLowerCase().indexOf("wall2") >= 0){
+ $.post("/sla/agreements/simplecreate",
+ { "template_id": "iMindsServiceVirtualwall",
+ "user": username,
+ "expiration_time": new Date() // jgarcia: FIXME
+ });
+ } else if(urn.toLowerCase().indexOf("wilab2") >= 0){
+ $.post("/sla/agreements/simplecreate",
+ { "template_id":"iMindsServiceWiLab2",
+ "user":username,
+ "expiration_time": new Date() // jgarcia: FIXME
+ });
+ }
+ $('#slamodal').modal('hide');
+ console.log("Executing raise_event after sending SLA");
+ // manifold.raise_event(self.options.query_uuid, RUN_UPDATE);
+ });
+ });
+
+ $(document).ready(function() {
+ $("#dismiss_sla").click(function(){
+ console.log("SLA NOT ACCEPTED");
+ $('#slamodal').modal('hide');
+ });
+ });
+
+ } else {
+ console.log("Executing raise_event");
+ manifold.raise_event(self.options.query_uuid, RUN_UPDATE);
+ }
},
// related buttons are also disabled in the html template
--- /dev/null
+from unfold.plugin import Plugin
+
+class SlaDialog(Plugin):
+
+ def __init__ (self, query=None, **settings):
+ Plugin.__init__ (self, **settings)
+ self.query = query
+ self.username = str(settings['username'])
+
+ def template_file (self):
+ return "sladialog.html"
+
+ def requirements (self):
+ reqs = {
+ 'js_files' : [
+ 'js/sladialog.js'
+ ],
+ 'css_files': [
+ 'css/sladialog.css',
+ ]
+ }
+ return reqs
+
+ def json_settings_list (self):
+ # query_uuid will pass self.query results to the javascript
+ # and will be available as "record" in :
+ # on_new_record: function(record)
+ return ['plugin_uuid', 'domid', 'query_uuid', 'username']
+
+ def export_json_settings (self):
+ return True
--- /dev/null
+/**
+ * MyPlugin: demonstration plugin
+ * Version: 0.1
+ * Description: Template for writing new plugins and illustrating the different
+ * possibilities of the plugin API.
+ * This file is part of the Manifold project
+ * Requires: js/plugin.js
+ * URL: http://www.myslice.info
+ * Author: Jordan Augé <jordan.auge@lip6.fr>
+ * Copyright: Copyright 2012-2013 UPMC Sorbonne Universités
+ * License: GPLv3
+ */
+
+(function($){
+
+ var SlaDialog = Plugin.extend({
+
+ /** XXX to check
+ * @brief Plugin constructor
+ * @param options : an associative array of setting values
+ * @param element :
+ * @return : a jQuery collection of objects on which the plugin is
+ * applied, which allows to maintain chainability of calls
+ */
+ init: function(options, element) {
+ // for debugging tools
+ this.classname="SlaDialog";
+ // Call the parent constructor, see FAQ when forgotten
+ this._super(options, element);
+
+ /* Member variables */
+
+ /* Plugin events */
+
+ /* Setup query and record handlers */
+
+ // Explain this will allow query events to be handled
+ // What happens when we don't define some events ?
+ // Some can be less efficient
+ this.listen_query(options.query_uuid);
+
+ /* GUI setup and event binding */
+ // call function
+
+ },
+
+ find_row: function(key)
+ {
+ // key in third position, column id = 2
+ var KEY_POS = 2;
+
+ var cols = $.grep(this.table.fnSettings().aoData, function(col) {
+ return (col._aData[KEY_POS] == key);
+ } );
+
+ if (cols.length == 0)
+ return null;
+ if (cols.length > 1)
+ throw "Too many same-key rows in ResourceSelected plugin";
+
+ return cols[0];
+ },
+
+ /* PLUGIN EVENTS */
+ // on_show like in querytable
+
+
+ /* GUI EVENTS */
+
+ uncheck: function(urn)
+ {
+ $('#slamodal').on('hidden.bs.modal', function(e){
+ $('#' + (urn).replace(/"/g,'')).click();
+ console.log('#' + (data.value).replace(/"/g,''));
+ });
+ },
+
+ // a function to bind events here: click change
+ // how to raise manifold events
+ set_state: function(data, username)
+ {
+ var action;
+ var msg;
+ var button = '';
+ var username = username;
+
+ var uncheck = false;
+
+ switch(data.request) {
+ case FIELD_REQUEST_ADD_RESET:
+ case FIELD_REQUEST_REMOVE_RESET:
+ $('#sla_dialog').hide();
+ // find line and delete it
+ // row = this.find_row(data.value);
+ // if (row)
+ // this.table.fnDeleteRow(row.nTr);
+ // $("#badge-pending").data('number', $("#badge-pending").data('number') - 1 );
+ // $("#badge-pending").text($("#badge-pending").data('number'));
+ return;
+ case FIELD_REQUEST_CHANGE:
+ action = 'UPDATE';
+ break;
+ case FIELD_REQUEST_ADD:
+ action = 'ADD';
+
+ if (data.value.toLowerCase().indexOf("iminds") >= 0){
+
+ $('#sla_dialog').show();
+ $('#slamodal').modal('show');
+
+ $(document).ready(function() {
+ $("#accept_sla").click(function(){
+ console.log("SLA ACCEPTED");
+ console.log("With username: " + username);
+ $.post("/sla/agreements/simplecreate", {"template_id":"iMindsServiceTemplate","user":username});
+ $('#slamodal').modal('hide');
+ });
+ });
+
+ $(document).ready(function() {
+ $("#dismiss_sla").click(function(){
+ console.log("SLA NOT ACCEPTED");
+ // FIX ME: This is not a good solution to prevent the checkbox click
+ var chkbox = document.getElementById((data.value).replace(/"/g,''));
+ if(chkbox.checked){
+ chkbox.click();
+ }
+ $('#slamodal').modal('hide');
+
+ });
+ });
+
+ }
+
+ break;
+ case FIELD_REQUEST_REMOVE:
+ action = 'REMOVE';
+ break;
+ }
+ },
+
+ post_agreement: function()
+ {
+ console.log(this.options.user);
+ },
+
+ /* GUI MANIPULATION */
+
+ // We advise you to write function to change behaviour of the GUI
+ // Will use naming helpers to access content _inside_ the plugin
+ // always refer to these functions in the remaining of the code
+
+ show_hide_button: function()
+ {
+ // this.id, this.el, this.cl, this.elts
+ // same output as a jquery selector with some guarantees
+ },
+
+ /* TEMPLATES */
+
+ // see in the html template
+ // How to load a template, use of mustache
+
+ /* QUERY HANDLERS */
+
+ // How to make sure the plugin is not desynchronized
+ // He should manifest its interest in filters, fields or records
+ // functions triggered only if the proper listen is done
+
+ // no prefix
+
+ on_filter_added: function(filter)
+ {
+
+ },
+
+ on_field_state_changed: function(result)
+ {
+ console.log("triggered state_changed: "+result);
+ // this.set_state(result, this.options.username);
+ },
+
+ // ... be sure to list all events here
+
+ /* RECORD HANDLERS */
+ on_all_new_record: function(record)
+ {
+ //
+ },
+
+ on_new_record: function(record)
+ {
+
+ },
+
+ /* INTERNAL FUNCTIONS */
+ _dummy: function() {
+ // only convention, not strictly enforced at the moment
+ },
+
+ });
+
+ /* Plugin registration */
+ $.plugin('SlaDialog', SlaDialog);
+
+ // TODO Here use cases for instanciating plugins in different ways like in the pastie.
+
+})(jQuery);
--- /dev/null
+<div id={{ domid }}>
+<div class="modal fade" id="slamodal" tabindex="-1" role="dialog" aria-labelledby="myModalLabel" aria-hidden="true"
+ data-backdrop="static" data-keyboard="false">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header">
+ <h4 class="modal-title" id="myModalLabel">Provider iMinds offers the following SLA</h4>
+ </div>
+ <div class="modal-body">
+ <p>SLA description</p>
+ <p>Testbed guarantees 0.99 Uptime rate for 0.99 rate of the resources during the sliver lifetime</p>
+ </div>
+ <div class="modal-footer">
+ <button type="button" id="dismiss_sla" class="btn btn-default" data-dismiss="modal">Dismiss</button>
+ <button type="button" id="accept_sla" class="btn btn-primary">Accept</button>
+ </div>
+ </div>
+ </div>
+</div>
+</div>
--- /dev/null
+from django.core.context_processors import csrf
+from django.http import HttpResponseRedirect
+from django.contrib.auth import authenticate, login, logout
+from django.template import RequestContext
+from django.shortcuts import render_to_response
+from django.shortcuts import render
+
+from unfold.loginrequired import LoginRequiredAutoLogoutView
+
+from manifold.core.query import Query
+from manifoldapi.manifoldapi import execute_query
+from manifoldapi.manifoldresult import ManifoldResult
+from ui.topmenu import topmenu_items, the_user
+from myslice.configengine import ConfigEngine
+
+from myslice.theme import ThemeView
+import json
+
+class ServiceDirectoryView (LoginRequiredAutoLogoutView, ThemeView):
+ template_name = 'servicedirectory.html'
+ server_url = "http://172.24.76.60:80/"
+
+ # expose this so we can mention the backend URL on the welcome page
+ def default_env (self):
+ return {
+ 'MANIFOLD_URL':ConfigEngine().manifold_url(),
+ }
+
+ def post (self,request):
+ env = self.default_env()
+ env['theme'] = self.theme
+ return render_to_response(self.template, env, context_instance=RequestContext(request))
+
+ def get (self, request, state=None):
+ env = self.default_env()
+
+ if request.user.is_authenticated():
+ env['person'] = self.request.user
+ # user_query = Query().get('user').select('user_hrn','parent_authority').filter_by('user_hrn','==','$user_hrn')
+ # user_details = execute_query(self.request, user_query)
+ # try:
+ # env['user_details'] = user_details[0]
+ # except Exception,e:
+ # env['error'] = "Please check your Credentials"
+
+ # try:
+ # user_local_query = Query().get('local:user').select('config').filter_by('email','==',str(env['person']))
+ # user_local_details = execute_query(self.request, user_local_query)
+ # user_local = user_local_details[0]
+ # user_local_config = user_local['config']
+ # user_local_config = json.loads(user_local_config)
+ # user_local_authority = user_local_config.get('authority')
+ # if 'user_details' not in env or 'parent_authority' not in env['user_details'] or env['user_details']['parent_authority'] is None:
+ # env['user_details'] = {'parent_authority': user_local_authority}
+ # except Exception,e:
+ # env['error'] = "Please check your Manifold user config"
+ # ## check user is pi or not
+ # platform_query = Query().get('local:platform').select('platform_id','platform','gateway_type','disabled')
+ # account_query = Query().get('local:account').select('user_id','platform_id','auth_type','config')
+ # platform_details = execute_query(self.request, platform_query)
+ # account_details = execute_query(self.request, account_query)
+ # for platform_detail in platform_details:
+ # for account_detail in account_details:
+ # if platform_detail['platform_id'] == account_detail['platform_id']:
+ # if 'config' in account_detail and account_detail['config'] is not '':
+ # account_config = json.loads(account_detail['config'])
+ # if 'myslice' in platform_detail['platform']:
+ # acc_auth_cred = account_config.get('delegated_authority_credentials','N/A')
+ # # assigning values
+ # if acc_auth_cred == {}:
+ # pi = "is_not_pi"
+ # else:
+ # pi = "is_pi"
+
+ else:
+ env['person'] = None
+
+ env['theme'] = self.theme
+ env['section'] = "Institution"
+ # env['pi'] = pi
+ env['username']=the_user(request)
+ env['topmenu_items'] = topmenu_items(None, request)
+ env['servdirurl'] = self.server_url
+ if state: env['state'] = state
+ elif not env['username']: env['state'] = None
+ # use one or two columns for the layout - not logged in users will see the login prompt
+ env['layout_1_or_2']="layout-unfold2.html" if not env['username'] else "layout-unfold1.html"
+
+
+ return render_to_response(self.template, env, context_instance=RequestContext(request))
+
from plugins.testbeds import TestbedsPlugin
from plugins.scheduler2 import Scheduler2
from plugins.columns_editor import ColumnsEditor
+from plugins.sladialog import SlaDialog
from myslice.theme import ThemeView
toggled = False,
domid = 'pending',
outline_complete = True,
+ username = request.user,
)
# --------------------------------------------------------------------------
},
)
+ # --------------------------------------------------------------------------
+ # SLA View and accept dialog
+
+ sla_dialog = SlaDialog(
+ page = page,
+ title = 'sla dialog',
+ query = main_query,
+ togglable = False,
+ # start turned off, it will open up itself when stuff comes in
+ toggled = True,
+ domid = 'sla_dialog',
+ outline_complete = True,
+ username = request.user,
+ )
+
template_env = {}
template_env['list_resources'] = list_resources.render(self.request)
# template_env['list_reserved_resources'] = list_reserved_resources.render(self.request)
template_env['map_resources'] = map_resources.render(self.request)
template_env['scheduler'] = resources_as_scheduler2.render(self.request)
template_env['pending_resources'] = pending_resources.render(self.request)
+ template_env['sla_dialog'] = sla_dialog.render(self.request)
template_env["theme"] = self.theme
template_env["username"] = request.user
template_env["slice"] = slicename
list-style:none;
float:left;
padding:0;
- margin:15px 50px 0 0;
+ margin:15px 25px 0 0;
}
div#navigation li a {
color:white;
width:400px;
}
+/* Service Directory */
+
+div#appservices div.row {
+ border-bottom: 1px solid;
+ border-color:#dddddd;
+ padding: 15px 15px;
+}
+
+div.portfolio-item {
+
+}
+
+div.portfolio-item p[id*='name-'] {
+ font-weight: bold;
+ cursor: pointer;
+}
\ No newline at end of file
<li><a href="/slice/{{ slice }}#experiment">Statistics</a></li>
<li><a href="/slice/{{ slice }}#experiment">Measurements</a></li>
<li><a href="/slice/{{ slice }}#experiment" data-toggle="tab">Experiment</a></li>
+ <li><a href="/slice/{{ slice }}#sla">SLA</a></li>
</ul>
{% else %}
<ul class="nav nav-tabs nav-section">
<li class="statistics"><a href="#experiment">Statistics</a></li>
<li class="measurements"><a href="#experiment">Measurements</a></li>
<li class="experiment"><a href="#experiment" data-toggle="tab">Experiment</a></li>
+ <li class="sla"><a href="#sla" data-toggle="tab">SLA</a></li>
</ul>
<script>
$(document).ready(function() {
$('div#info').load('/info/{{ slice }}/');
});
</script>
-{% endif %}
\ No newline at end of file
+{% endif %}
<li><a href="/slice/{{ slice }}#experiment">Statistics</a></li>
<li><a href="/slice/{{ slice }}#experiment">Measurements</a></li>
<li><a href="/slice/{{ slice }}#experiment" data-toggle="tab">Experiment</a></li>
+ <li><a href="/slice/{{ slice }}#sla">SLA</a></li>
</ul>
{% else %}
<ul class="nav nav-tabs nav-section">
<li class="statistics"><a href="#experiment">Statistics</a></li>
<li class="measurements"><a href="#experiment">Measurements</a></li>
<li class="experiment"><a href="#experiment" data-toggle="tab">Experiment</a></li>
+ <li class="sla" style="display:none"><a href="#sla" data-toggle="tab">SLA</a></li>
</ul>
<script>
$(document).ready(function() {
$('div#info').load('/info/{{ slice }}/');
});
</script>
-{% endif %}
\ No newline at end of file
+{% endif %}
</div>
</li>
<li id="nav-request"><a href="/portal/validate">REQUESTS</a></li>
+ <li id="nav-request"><a href="/portal/servicedirectory">SERVICES</a></li>
<li id="nav-support"><a href="http://doc.fed4fire.eu/support.html" target="_blank">SUPPORT</a></li>
<li>|</li>
<li id="nav-logout" style="margin-top: 10px;"><a id="logout" style="cursor:pointer;" data-username="{{ username }}"><span class="glyphicon glyphicon-off"></span> LOGOUT</a></li>
--- /dev/null
+{% extends "layout_wide.html" %}
+
+{% block head %}
+<!-- <script type="text/javascript" src="{{STATIC_URL}}/js/institution.js"></script> -->
+{% endblock head %}
+
+{% block content %}
+<div class="container">
+ <div class="row">
+ <div class="col-md-12">
+ <ul class="nav nav-tabs nav-section-mod">
+ <li class="active"><a href="#appservices"> Application Services </a></li>
+ <li><a href="#fedservices"> Federation Services </a></li>
+ </ul>
+ </div>
+ </div>
+</div>
+<div class="container tab-content">
+ <div class="tab-pane active row" id="appservices">
+ <div class="col-md-12">
+ <div id="appservices-tab-loading"><img src="{{ STATIC_URL }}img/loading.gif" alt="Loading Services" /></div>
+ <div id="appservices-tab-loaded" style="display:none;">
+ <div class="container" id="appservices-tab">
+
+ <!-- <table class="table" id="appservices-tab">
+ <tr>
+ <th>Name</th>
+ <th>Provider</th>
+ <th>Endpoint URL</th>
+ <th>Protocol</th>
+ <th>Basic API</th>
+ <th>Brief Description</th>
+ </tr>
+ </table> -->
+ </div>
+ </div>
+ </div>
+ </div>
+
+ <div class="tab-pane row" id="fedservices" data-authority="{{user_details.parent_authority}}">
+ <div class="col-md-12">
+ <div id="fedservices-tab-loading"><img src="{{ STATIC_URL }}img/loading.gif" alt="Loading Services" /></div>
+ <div id="fedservices-tab-loaded" style="display:none;">
+ <table class="table" id="fedservices-tab">
+ <tr>
+ <th>Name</th>
+ <th>Brief Description</th>
+ <th>URL</th>
+ </tr>
+ </table>
+ </div>
+ </div>
+ </div>
+</div>
+
+<script>
+
+$(document).ready(function() {
+
+loadedTabs = [];
+
+ $('.nav-tabs a').click(function (e) {
+ e.preventDefault();
+ $(this).tab('show');
+ // id = $(this).attr('href').substr(1);
+ // if (!(id in loadedTabs)) {
+ // switch(id) {
+ // case 'users':
+ // //loadUsers();
+ // loadedTabs[id] = true;
+ // break;
+ // }
+ // }
+
+ });
+
+ {% if person %}
+
+ $.ajax({
+ type: "GET",
+ dataType: "json",
+ async: "false",
+ url: "{{ servdirurl }}appservices/",
+ success: function(data, status, jqXHR){
+ // console.log(data);
+ // $.each(data, function(i, item){
+ // console.log(item);
+ // var tr = $('<tr>').append(
+ // $('<td id="name">').text(item.name),
+ // $('<td>').text(item.provider),
+ // $('<td>').append('<a href="' + item.endPoint + '">' + item.endPoint + "</a>"),
+ // $('<td>').text(item.protocol),
+ // $('<td>').text(item.APIBasic),
+ // $('<td>').text(item.briefDescription)
+ // );
+ // $("#appservices-tab > tbody:last").append(tr);
+ // $("td#name").click(function(){
+ // window.location.href = data.endPoint;
+ // });
+ // });
+ function createToggle(name){
+ return function(){
+ var el = $('p#expandable-' + name);
+ if(!el.is(':animated')){
+ $(el).toggle(300);
+ }
+ }
+ }
+
+ $.each(data, function(i, item){
+
+ console.log(item);
+ var row = $('<div class="row">').append(
+ $('<div>').addClass("col-md-3 portfolio-item").append(
+ $('<img>').attr('src', "http://placehold.it/150x150")
+ ),
+ $('<div>').addClass("col-md-6 portfolio-item").append(
+ $('<p id="name-' + item.name.replace(/ /g,'') + '">').text(item.name),
+ $('<p>').text(item.briefDescription),
+ $('<p>').text("Provider: " + item.provider),
+ $('<p>').append('Endpoint: <a href="' + item.endPoint + '">' + item.endPoint + "</a>"),
+ $('<p id="expandable-' + item.name.replace(/ /g,'') + '">').text(item.fullDescription).hide(),
+ $('<p id="expandable-' + item.name.replace(/ /g,'') + '">').text("Protocol: " + item.protocol).hide(),
+ $('<p id="expandable-' + item.name.replace(/ /g,'') + '">')
+ .append('API documentation: <a href="' + item.APILink + '">' + item.APILink + "</a>").hide(),
+ $('<p id="expandable-' + item.name.replace(/ /g,'') + '">').text(item.APIBasic).hide()
+ )
+ );
+ $("#appservices-tab").append(row);
+ $('p#name-' + item.name.replace(/ /g,'')).click(createToggle(item.name.replace(/ /g,'')));
+ });
+
+
+ $("div#appservices-tab-loaded").css("display","block");
+ $("div#appservices-tab-loading").css("display","none");
+ },
+ error: function(jqXHR, status){
+ console.log("ERROR: " + status);
+ }
+ });
+
+ $.ajax({
+ type: "GET",
+ dataType: "json",
+ async: "false",
+ url: "{{ servdirurl }}fedservices/",
+ success: function(data, status, jqXHR){
+ // console.log(data);
+ $.each(data, function(i, item){
+ console.log(item);
+ var tr = $('<tr>').append(
+ $('<td>').text(item.name),
+ $('<td>').text(item.briefDescription),
+ $('<td>').append('<a href="' + item.endPoint + '">' + item.endPoint + "</a>")
+ );
+ $("#fedservices-tab > tbody:last").append(tr);
+ });
+
+
+ $("div#fedservices-tab-loaded").css("display","block");
+ $("div#fedservices-tab-loading").css("display","none");
+ },
+ error: function(jqXHR, status){
+ console.log("ERROR: " + status);
+ }
+ });
+
+ // $.post("/rest/authority/",{'filters':{'authority_hrn':'{{user_details.parent_authority}}'}}, function( data ) {
+ // var authority_data = [];
+ // var onelab_data = [];
+ // /* 'city','enabled','legal','longitude','onelab_membership','address','parent_authority','slice','user','country',
+ // 'tech','abbreviated_name','url','postcode','description','scientific','authority_hrn','latitude','name' */
+ // $.each( data, function( key, val ) {
+ // authority_row = "<img src='{{ STATIC_URL }}img/institutions/{{user_details.parent_authority}}.gif' alt='' /><br>";
+ // authority_row += "<br>";
+ // authority_row += "<b>authority:</b> "+val.authority_hrn+"<br>";
+ // authority_data.push(authority_row);
+ // });
+ // $("div#authority-data").html(authority_data.join( "" ));
+ // $("div#onelab-data").html(onelab_data.join( "" ));
+ // $("div#authority-tab-loaded").css("display","block");
+ // $("div#authority-tab-loading").css("display","none");
+ // });
+
+ // $.post("/rest/slice/",{'filters':{'parent_authority':'{{user_details.parent_authority}}'}}, function( data ) {
+ // var list_slices = [];
+ // var table_slices = [];
+ // /* "slice_hrn", "slice_description", "slice_type", "parent_authority", "created", "nodes", "slice_url", "slice_last_updated", "users", "slice_urn", "slice_expires" */
+ // $.each( data, function( key, val ) {
+ // list_slices.push( "<li><a href=\"portal/slice/"+val.slice_hrn+"\">" + val.slice_hrn + "</a></li>" );
+ // if(val.nodes=="undefined" || val.nodes==null){
+ // nodes_length=0;
+ // }else{
+ // nodes_length=val.nodes.length;
+ // }
+ // if(val.users=="undefined" || val.users==null){
+ // users_length=0;
+ // }else{
+ // users_length=val.users.length;
+ // }
+
+ // if(val.slice_url=="undefined" || val.slice_url==null){
+ // slice_url="";
+ // }else{
+ // slice_url="<a href='"+val.slice_url+"' target='_blank'>"+val.slice_url+"</a>";
+ // }
+
+ // slice_row = "<tr id='"+val.slice_hrn+"'>";
+ // slice_row += "<td><input type='checkbox' class='slice' id='"+val.slice_hrn+"'></td>";
+ // slice_row += "<td><a href=\"/slice/"+val.slice_hrn+"\">" + val.slice_hrn + "</a></td>";
+ // slice_row += "<td>"+users_length+"</td>";
+ // slice_row += "<td>"+slice_url+"</td>";
+ // //slice_row += "<td>"+nodes_length+"</td>";
+ // slice_row += "<td>"+val.slice_expires+"</td>";
+ // slice_row += "</tr>";
+ // table_slices.push(slice_row);
+
+ // });
+
+ // /* $("div#slice-list").html($( "<ul/>", { html: list_slices.join( "" ) })); */
+ // $("table#slice-tab tr:last").after(table_slices.join( "" ));
+ // $("div#slice-tab-loaded").css("display","block");
+ // $("div#slice-tab-loading").css("display","none");
+ // });
+
+ // $.post("/rest/user/",{'filters':{'parent_authority':'{{user_details.parent_authority}}'}}, function( data ) {
+ // var list_users = [];
+ // var table_users = [];
+ // /* Available fields
+ // user_gid, user_enabled, slices, pi_authorities, keys, parent_authority, user_first_name,
+ // user_urn, user_last_name, user_phone, user_hrn, user_email, user_type
+ // */
+ // $.each( data, function( key, val ) {
+ // list_users.push( "<li><a href=\"portal/user/"+val.user_email+"\">" + val.user_email + "</a></li>" );
+ // user_row = "<tr id='"+val.user_hrn+"'>";
+ // user_row += "<td><input type='checkbox' class='user' id='"+val.user_hrn+"'></td>";
+ // user_row += "<td>"+val.user_email+"</td>";
+ // user_row += "<td>"+val.user_hrn+"</td>";
+ // user_row += "<td>"+val.user_first_name+"</td>";
+ // user_row += "<td>"+val.user_last_name+"</td>";
+ // user_row += "<td>"+val.user_enabled+"</td>";
+ // user_row += "</tr>";
+ // table_users.push(user_row);
+ // });
+ // $("table#user-tab tr:last").after(table_users.join( "" ));
+ // $("div#user-tab-loaded").css("display","block");
+ // $("div#user-tab-loading").css("display","none");
+ // });
+
+ {% endif %}
+
+}); // end document.ready
+</script>
+{% endblock %}
</div>
<div id="pending" class="panel" style="height:370px;display:none;">
{{pending_resources}}
+ </div>
+ <div id="sla_dialog" class="panel" style="height:370px;display:none;">
+ {{sla_dialog}}
</div>
<div id="map" class="panel" style="height:370px;display:none;">
{{map_resources}}
<div class="tab-pane row" id="statistics">...</div>
<div class="tab-pane row" id="measurements">...</div>
<div class="tab-pane row" id="experiment">...</div>
+ <div class="tab-pane row" id="sla">...</div>
</div>
{% endblock %}
from portal.univbrisview import UnivbrisView
+from portal.servicedirectory import ServiceDirectoryView
+
from portal.documentationview import DocumentationView
from portal.supportview import SupportView
from portal.emailactivationview import ActivateEmailView
'portal.django_passresetview.password_reset_complete'),
url(r'^univbris/?$', UnivbrisView.as_view(), name='univbris'),
- # ...
+
+ url(r'^servicedirectory/?$', ServiceDirectoryView.as_view(), name='servicedirectory'),
)
# (r'^accounts/', include('registration.backends.default.urls')),
from plugins.querycode import QueryCode
from plugins.quickfilter import QuickFilter
-from trash.trashutils import quickfilter_criterias
+from trashutils import quickfilter_criterias
#
from ui.topmenu import topmenu_items_live, the_user
from plugins.querytable import QueryTable
from ui.topmenu import topmenu_items_live, the_user
-from trash.trashutils import hard_wired_slice_names, hard_wired_list, lorem_p, lorem, quickfilter_criterias
+from trashutils import hard_wired_slice_names, hard_wired_list, lorem_p, lorem, quickfilter_criterias
#might be useful or not depending on the context
#@login_required
from ui.topmenu import topmenu_items, the_user
# tmp
-from trash.trashutils import lorem, hard_wired_slice_names
+from trashutils import lorem, hard_wired_slice_names
def scroll_view (request):
return render_to_response ('view-scroll.html',
from ui.topmenu import topmenu_items, the_user
# tmp
-from trash.trashutils import lorem, hard_wired_slice_names
+from trashutils import lorem, hard_wired_slice_names
@login_required
def tab_view (request):
--- /dev/null
+This is the README.txt file for sla-dashboard application.\r
+\r
+sla-dashboard application is composed by the following directories:\r
+* sladashboard: the app related to the application itself. The settings\r
+ file maybe need to be modified: read below.\r
+* slagui: the sla dashboard GUI project.\r
+* slaclient: this project contains all the code needed to connect to\r
+ SLA Manager REST interface, and the conversion from xml/json to python\r
+ objects.\r
+* samples: this directory contains sample files to load in the SLA Manager for\r
+ testing.\r
+* bin: some useful scripts\r
+\r
+\r
+Software requirements\r
+---------------------\r
+Python version: 2.7.x\r
+\r
+The required python packages are listed in requirements.txt\r
+\r
+Installing the requirements inside a virtualenv is recommended.\r
+\r
+SLA Manager (java backend) needs to be running in order to use the dashboard.\r
+\r
+Installing\r
+----------\r
+(to be corrected/completed)\r
+\r
+#\r
+# Install virtualenv\r
+#\r
+$ pip install virtualenv\r
+\r
+\r
+#\r
+# Create virtualenv.\r
+# E.g.: VIRTUALENVS_DIR=~/virtualenvs\r
+#\r
+$ virtualenv $VIRTUALENVS_DIR/sla-dashboard\r
+\r
+#\r
+# Activate virtualenv\r
+#\r
+$ . $VIRTUALENVS_DIR/sla-dashboard/bin/activate\r
+\r
+#\r
+# Change to application dir and install requirements\r
+#\r
+$ cd $SLA_DASHBOARD\r
+$ pip install -r requirements.txt\r
+\r
+#\r
+# Create needed tables for sessions, admin, etc\r
+#\r
+$ ./manage.py syncdb\r
+\r
+Settings\r
+--------\r
+\r
+* sladashboard/settings.py:\r
+ - SLA_MANAGER_URL : The URL of the SLA Manager REST interface.\r
+ - DEBUG: Please, set this to FALSE in production\r
+\r
+* sladashboard/urls.py:\r
+ - dashboard root url: the slagui project is accessed by default\r
+ in $server:$port/slagui. Change "slagui" with the desired path.\r
+\r
+\r
+Running\r
+-------\r
+NOTE: this steps are not suitable in production mode.\r
+\r
+#\r
+# Activate virtualenv\r
+#\r
+$ . $VIRTUALENVS_DIR/sla-dashboard/bin/activate\r
+\r
+#\r
+# Cd to application dir\r
+#\r
+$ cd $SLA_DASHBOARD\r
+\r
+#\r
+# Start server listing in port 8000 (change port as desired)\r
+#\r
+$ ./manage.py runserver 0.0.0.0:8000\r
+\r
+#\r
+# Test\r
+#\r
+curl http://localhost:8000/slagui
\ No newline at end of file
--- /dev/null
+Django==1.5.2
+django-extensions
+south
+django-debug-toolbar
+requests
+python-dateutil<2.0
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<wsag:Template xmlns:wsag="http://www.ggf.org/namespaces/ws-agreement"
+ TemplateId="iMindsServiceTemplate">
+ <wsag:Name>Template for iMinds service</wsag:Name>
+ <wsag:Context>
+ <wsag:AgreementInitiator>iMinds</wsag:AgreementInitiator>
+ <wsag:ServiceProvider>AgreementInitiator</wsag:ServiceProvider>
+ <wsag:ExpirationTime>2015-03-07T12:00:00:000</wsag:ExpirationTime>
+ <sla:Service xmlns:sla="http://sla.atos.eu">iMinds service</sla:Service>
+ </wsag:Context>
+ <wsag:Terms>
+ <wsag:All>
+ <!-- FUNCTIONAL DESCRIPTION -->
+ <wsag:ServiceDescriptionTerm wsag:Name="SDTName1" wsag:ServiceName="iMinds service">
+ The template for iMinds service
+ </wsag:ServiceDescriptionTerm>
+
+ <!-- OPTIONAL SERVICE REFERENCE -->
+
+ <!-- OPTIONAL SERVICE PROPERTIES : non funcional properties-->
+ <wsag:ServiceProperties wsag:Name="NonFunctional" wsag:ServiceName="iMinds service">
+ <wsag:VariableSet>
+ <wsag:Variable Name="UpTime" Metric="xs:double">
+ <wsag:Location>iMinds/UpTime</wsag:Location>
+ </wsag:Variable>
+ <wsag:Variable Name="Performance" Metric="xs:decimal">
+ <wsag:Location>iMinds/Performance</wsag:Location>
+ </wsag:Variable>
+ </wsag:VariableSet>
+ </wsag:ServiceProperties>
+ <wsag:GuaranteeTerm Name="GT_CPULoad">
+ <wsag:ServiceScope ServiceName="iMinds service"/>
+ <wsag:ServiceLevelObjective>
+ <wsag:KPITarget>
+ <wsag:KPIName>UpTime</wsag:KPIName>
+ <wsag:CustomServiceLevel>
+ {"constraint" : "UpTime GT 75"}
+ </wsag:CustomServiceLevel>
+ </wsag:KPITarget>
+ </wsag:ServiceLevelObjective>
+ </wsag:GuaranteeTerm>
+ <wsag:GuaranteeTerm Name="GT_Performance">
+ <wsag:ServiceScope ServiceName="iMinds service"/>
+ <wsag:ServiceLevelObjective>
+ <wsag:KPITarget>
+ <wsag:KPIName>Performance</wsag:KPIName>
+ <wsag:CustomServiceLevel>
+ {"constraint" : "Performance GT 50"}
+ </wsag:CustomServiceLevel>
+ </wsag:KPITarget>
+ </wsag:ServiceLevelObjective>
+ </wsag:GuaranteeTerm>
+ </wsag:All>
+ </wsag:Terms>
+</wsag:Template>
--- /dev/null
+curl -u normal_user:password -H "Content-type: application/xml" -d@providerIMinds.xml localhost:8080/sla-service/providers -X POST
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<provider>
+ <uuid>iMinds</uuid>
+ <name>iMinds Testbed</name>
+</provider>
--- /dev/null
+curl -Umyuser:mypassword -H "Content-type: application/json" -d@simpleAgreementCreationParameters.json localhost:8000/sla/agreements/simplecreate -X POST
\ No newline at end of file
--- /dev/null
+{"template_id":"iMindsServiceTemplate","user":"imauser"}
--- /dev/null
+curl -u normal_user:password -H "Content-type: application/xml" -d@TemplateIMindsService.xml localhost:8080/sla-service/templateso -X POST
\ No newline at end of file
--- /dev/null
+#!/usr/bin/env bash
+#
+# To be executed from application root path
+#
+
+#cmd=$($(grep SLA_MANAGER sladashboard/settings.py) && eval $cmd & print $SLA_MANAGER))
+#eval $(grep SLA_MANAGER_URL sladashboard/settings.py)
+#echo SLA_MANAGER_URL=$SLA_MANAGER_URL
+
+SLA_MANAGER_URL="http://localhost:8080/sla-service"
+
+#
+echo \#Add provider virtualwall
+#
+curl -H "Content-type: application/xml" -d @samples/provider-virtualwall.xml $SLA_MANAGER_URL/providers -X POST
+
+#
+echo \#Add provider wiLab2
+#
+curl -H "Content-type: application/xml" -d @samples/provider-wilab2.xml $SLA_MANAGER_URL/providers -X POST
+
+#
+echo \#Add template
+#
+curl -H "Content-type: application/xml" -d @samples/template.xml $SLA_MANAGER_URL/templates -X POST
+
+#
+echo \#Add agreement03
+#
+curl -H "Content-type: application/xml" -d @samples/agreement03.xml $SLA_MANAGER_URL/agreements -X POST
+curl -H "Content-type: application/xml" -d @samples/enforcement03.xml $SLA_MANAGER_URL/enforcements -X POST
+#curl $SLA_MANAGER_URL/enforcements/agreement03/start -X PUT
+
+#
+echo \#Add agreement04
+#
+curl -H "Content-type: application/xml" -d @samples/agreement04.xml $SLA_MANAGER_URL/agreements -X POST
+curl -H "Content-type: application/xml" -d @samples/enforcement04.xml $SLA_MANAGER_URL/enforcements -X POST
+#curl $SLA_MANAGER_URL/enforcements/agreement04/start -X PUT
+
+#
+#echo \#Add agreement05
+#
+#curl -H "Content-type: application/xml" -d@samples/agreement05.xml $SLA_MANAGER_URL/agreements -X POST
+#curl -d@samples/enforcement05.xml -H "Content-type: application/xml" $SLA_MANAGER_URL/enforcements -X POST
+#curl $SLA_MANAGER_URL/enforcements/agreement05/start -X PUT
+
--- /dev/null
+#!/bin/bash
+
+if [ $# -eq 1 ] ; then
+ curl localhost:8080/sla-service/enforcements/agreement0$1/start -X PUT
+else
+ curl localhost:8080/sla-service/enforcements/agreement03/start -X PUT
+ curl localhost:8080/sla-service/enforcements/agreement04/start -X PUT
+fi
+
--- /dev/null
+#!/bin/bash
+
+if [ $# -eq 1 ] ; then
+ curl localhost:8080/sla-service/enforcements/agreement0$1/stop -X PUT
+else
+ curl localhost:8080/sla-service/enforcements/agreement03/stop -X PUT
+ curl localhost:8080/sla-service/enforcements/agreement04/stop -X PUT
+fi
+
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>\r
+<wsag:Agreement xmlns:wsag="http://www.ggf.org/namespaces/ws-agreement"\r
+ AgreementId="agreement03">\r
+\r
+ <wsag:Name>ExampleAgreement</wsag:Name>\r
+ <wsag:Context>\r
+ <wsag:AgreementInitiator>experimenter01</wsag:AgreementInitiator>\r
+ <wsag:AgreementResponder>virtualwall</wsag:AgreementResponder>\r
+ <wsag:ServiceProvider>AgreementResponder</wsag:ServiceProvider>\r
+ <wsag:ExpirationTime>2014-03-07T12:00:00</wsag:ExpirationTime>\r
+ <wsag:TemplateId>template</wsag:TemplateId>\r
+ <sla:Service xmlns:sla="http://sla.atos.eu">Testbed_guarantee_0.99_Uptime_rate_for_0.99_rate_of_the_resources_during_the_sliver</sla:Service>\r
+ </wsag:Context>\r
+ <wsag:Terms>\r
+ <wsag:All>\r
+ \r
+ <wsag:ServiceProperties Name="ServiceProperties" ServiceName="ServiceName">\r
+ <wsag:VariableSet>\r
+ <wsag:Variable Name="UpTime" Metric="xs:double">\r
+ <wsag:Location>qos:UpTime</wsag:Location>\r
+ </wsag:Variable>\r
+ <wsag:Variable Name="Performance" Metric="xs:double">\r
+ <wsag:Location>qos:Performance</wsag:Location>\r
+ </wsag:Variable>\r
+ </wsag:VariableSet>\r
+ </wsag:ServiceProperties>\r
+\r
+ <!-- Uptime GuaranteTerm-->\r
+ <wsag:GuaranteeTerm Name="GT_UpTime">\r
+ <wsag:ServiceScope ServiceName="sla:virtualwall" />\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>UpTime</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "UpTime GT 0.99"}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+\r
+ <!-- Resource Performance GuaranteTerm-->\r
+ <wsag:GuaranteeTerm Name="GT_Performance">\r
+ <wsag:ServiceScope ServiceName="sla:virtualwall" />\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>Performance</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "Performance GT 0.99"}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ </wsag:All>\r
+ </wsag:Terms>\r
+</wsag:Agreement>\r
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>\r
+<wsag:Agreement xmlns:wsag="http://www.ggf.org/namespaces/ws-agreement"\r
+ AgreementId="agreement04">\r
+\r
+ <wsag:Name>ExampleAgreement</wsag:Name>\r
+ <wsag:Context>\r
+ <wsag:AgreementInitiator>experimenter01</wsag:AgreementInitiator>\r
+ <wsag:AgreementResponder>wiLab2</wsag:AgreementResponder>\r
+ <wsag:ServiceProvider>AgreementResponder</wsag:ServiceProvider>\r
+ <wsag:ExpirationTime>2014-03-07T12:00:00</wsag:ExpirationTime>\r
+ <wsag:TemplateId>template</wsag:TemplateId>\r
+ <sla:Service xmlns:sla="http://sla.atos.eu">Testbed_guarantee_0.99_Uptime_rate_for_0.99_rate_of_the_resources_during_the_sliver</sla:Service>\r
+ </wsag:Context>\r
+ <wsag:Terms>\r
+ <wsag:All>\r
+ \r
+ <wsag:ServiceProperties Name="ServiceProperties" ServiceName="ServiceName">\r
+ <wsag:VariableSet>\r
+ <wsag:Variable Name="UpTime" Metric="xs:double">\r
+ <wsag:Location>qos:UpTime</wsag:Location>\r
+ </wsag:Variable>\r
+ <wsag:Variable Name="Performance" Metric="xs:double">\r
+ <wsag:Location>qos:Performance</wsag:Location>\r
+ </wsag:Variable>\r
+ </wsag:VariableSet>\r
+ </wsag:ServiceProperties>\r
+\r
+ <!-- Uptime GuaranteTerm-->\r
+ <wsag:GuaranteeTerm Name="GT_UpTime">\r
+ <wsag:ServiceScope ServiceName="sla:wiLab2" />\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>UpTime</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "UpTime GT 0.99"}\r
+ </wsag:CustomServiceLevel> \r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+\r
+ <!-- Resource Performance GuaranteTerm-->\r
+ <wsag:GuaranteeTerm Name="GT_Performance">\r
+ <wsag:ServiceScope ServiceName="sla:wiLab2" />\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>Performance</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "Performance GT 0.99"}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ </wsag:All>\r
+ </wsag:Terms>\r
+</wsag:Agreement>\r
--- /dev/null
+<enforcement_job>
+ <agreement_id>agreement03</agreement_id>
+ <enabled>false</enabled>
+</enforcement_job>
--- /dev/null
+<enforcement_job>
+ <agreement_id>agreement04</agreement_id>
+ <enabled>false</enabled>
+</enforcement_job>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>\r
+<wsag:Agreement xmlns:wsag="http://www.ggf.org/namespaces/ws-agreement">\r
+ <wsag:Name>ExampleAgreement</wsag:Name>\r
+ <wsag:Context>\r
+ <wsag:AgreementInitiator>RandomClient</wsag:AgreementInitiator>\r
+ <wsag:AgreementResponder>Provider01</wsag:AgreementResponder>\r
+ <!-- The AgreementResponder (in this case) is mandatory if sla is multi \r
+ service provider -->\r
+ <wsag:ServiceProvider>AgreementResponder</wsag:ServiceProvider>\r
+ <wsag:ExpirationTime>2014-03-07T12:00</wsag:ExpirationTime>\r
+ <wsag:TemplateId>contract-template-2007-12-04</wsag:TemplateId>\r
+ </wsag:Context>\r
+ <wsag:Terms>\r
+ <wsag:All>\r
+ <!-- FUNCTIONAL DESCRIPTION -->\r
+ <!-- <wsag:ServiceDescriptionTerm wsag:Name="SDTName" wsag:ServiceName="ServiceName"> \r
+ DSL expression </wsag:ServiceDescriptionTerm> -->\r
+\r
+ <!-- OPTIONAL SERVICE REFERENCE -->\r
+\r
+ <!-- OPTIONAL SERVICE PROPERTIES : non funcional properties -->\r
+ <wsag:ServiceProperties wsag:Name="NonFunctional"\r
+ wsag:ServiceName="ServiceName">\r
+ <wsag:Variables>\r
+ <wsag:Variable wsag:Name="ResponseTime" wsag:Metric="xs:double">\r
+ <wsag:Location>qos:ResponseTime</wsag:Location>\r
+ </wsag:Variable>\r
+ </wsag:Variables>\r
+ </wsag:ServiceProperties>\r
+ <wsag:GuaranteeTerm wsag:Name="GT_ResponseTime">\r
+ <wsag:ServiceScope wsag:ServiceName="ServiceName" />\r
+ <!-- The qualifying conditions that must be met before the guarantee \r
+ is evaluated -->\r
+ <!-- <wsag:QualifyingCondition>state EQ 'ready'</wsag:QualifyingCondition> -->\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>ResponseTime</wsag:KPIName> <!-- same name as property for the moment -->\r
+ <wsag:CustomServiceLevel>{"contraint" : "ResponseTime LT 100"}</wsag:CustomServiceLevel> <!-- the ServiceProperty is referenced here -->\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ </wsag:All>\r
+ </wsag:Terms>\r
+</wsag:Agreement>\r
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>\r
+<wsag:Agreement xmlns:wsag="http://www.ggf.org/namespaces/ws-agreement"\r
+ AgreementId="agreement02">\r
+\r
+ <wsag:Name>ExampleAgreement</wsag:Name>\r
+ <wsag:Context>\r
+ <wsag:AgreementInitiator>RandomClient</wsag:AgreementInitiator>\r
+ <wsag:AgreementResponder>provider-prueba</wsag:AgreementResponder>\r
+ <!--\r
+ The AgreementResponder (in this case) is mandatory if sla is multi service provider \r
+ -->\r
+ <wsag:ServiceProvider>AgreementResponder</wsag:ServiceProvider>\r
+ <wsag:ExpirationTime>2014-03-07T12:00:00</wsag:ExpirationTime>\r
+ <wsag:TemplateId>template02</wsag:TemplateId>\r
+ </wsag:Context>\r
+ <wsag:Terms>\r
+ <wsag:All>\r
+ <!-- FUNCTIONAL DESCRIPTION -->\r
+ <wsag:ServiceDescriptionTerm Name="SDTName1" ServiceName="ServiceName">\r
+ DSL expression\r
+ </wsag:ServiceDescriptionTerm>\r
+ <wsag:ServiceDescriptionTerm Name="SDTName2" ServiceName="ServiceName">\r
+ DSL expression\r
+ </wsag:ServiceDescriptionTerm>\r
+ \r
+ <!-- OPTIONAL SERVICE REFERENCE -->\r
+ \r
+ <!-- OPTIONAL SERVICE PROPERTIES : non funcional properties-->\r
+ <wsag:ServiceProperties Name="NonFunctional" ServiceName="ServiceName">\r
+ <wsag:VariableSet>\r
+ <wsag:Variable Name="ResponseTime" Metric="xs:double">\r
+ <wsag:Location>qos:ResponseTime</wsag:Location>\r
+ </wsag:Variable>\r
+ <wsag:Variable Name="Performance" Metric="xs:double">\r
+ <wsag:Location>qos:Performance</wsag:Location>\r
+ </wsag:Variable>\r
+ </wsag:VariableSet>\r
+ </wsag:ServiceProperties>\r
+ <wsag:GuaranteeTerm Name="GT_ResponseTime">\r
+ <wsag:ServiceScope ServiceName="ServiceName"/>\r
+ <!-- The qualifying conditions that must be met before the guarantee is evaluated -->\r
+ <!-- \r
+ <wsag:QualifyingCondition>state EQ 'ready'</wsag:QualifyingCondition>\r
+ -->\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>ResponseTime</wsag:KPIName> <!-- same name as property for the moment -->\r
+ <wsag:CustomServiceLevel>{"constraint" : "ResponseTime LT 0.9"}</wsag:CustomServiceLevel> <!-- the ServiceProperty is referenced here -->\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ <wsag:GuaranteeTerm Name="GT_Performance">\r
+ <wsag:ServiceScope ServiceName="ServiceName"/>\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>Performance</wsag:KPIName> <!-- same name as property for the moment -->\r
+ <wsag:CustomServiceLevel>{"constraint" : "Performance GT 0.1"}</wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ <wsag:BusinessValueList>\r
+ <wsag:Importante>3</wsag:Importante> <!-- optional importance (integer) -->\r
+ <wsag:Penalty>\r
+ <wsag:AssessmentInterval>\r
+ <wsag:Count>10</wsag:Count>\r
+ </wsag:AssessmentInterval>\r
+ <wsag:ValueUnit>EUR</wsag:ValueUnit>\r
+ <wsag:ValueExpression>99</wsag:ValueExpression>\r
+ </wsag:Penalty>\r
+ \r
+ <wsag:Reward></wsag:Reward>\r
+ <wsag:Preference></wsag:Preference>\r
+ <wsag:CustomBusinessValue></wsag:CustomBusinessValue>\r
+ </wsag:BusinessValueList>\r
+ </wsag:GuaranteeTerm>\r
+ </wsag:All>\r
+ </wsag:Terms>\r
+</wsag:Agreement>\r
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>\r
+<wsag:Agreement xmlns:wsag="http://www.ggf.org/namespaces/ws-agreement"\r
+ AgreementId="agreement03">\r
+\r
+ <wsag:Name>ExampleAgreement</wsag:Name>\r
+ <wsag:Context>\r
+ <wsag:AgreementInitiator>experimenter01</wsag:AgreementInitiator>\r
+ <wsag:AgreementResponder>virtualwall</wsag:AgreementResponder>\r
+ <wsag:ServiceProvider>AgreementResponder</wsag:ServiceProvider>\r
+ <wsag:ExpirationTime>2014-03-07T12:00:00</wsag:ExpirationTime>\r
+ <wsag:TemplateId>template02</wsag:TemplateId>\r
+ <sla:Service xmlns:sla="http://sla.atos.eu">Testbed_guarantee_0.75_Uptime_rate_for_0.8_rate_of_the_resources_during_the_sliver</sla:Service>\r
+ </wsag:Context>\r
+ <wsag:Terms>\r
+ <wsag:All>\r
+ <!--\r
+ <wsag:ServiceDescriptionTerm Name="SDTName1" ServiceName="ServiceName">\r
+ { "servicename": "service-prueba" }\r
+ </wsag:ServiceDescriptionTerm>\r
+ -->\r
+ <wsag:ServiceProperties Name="ServiceProperties" ServiceName="ServiceName">\r
+ <wsag:VariableSet>\r
+ <wsag:Variable Name="ResponseTime" Metric="xs:double">\r
+ <wsag:Location>service-prueba/ResponseTime</wsag:Location>\r
+ </wsag:Variable>\r
+ <wsag:Variable Name="Performance" Metric="xs:double">\r
+ <wsag:Location>service-prueba/Performance</wsag:Location>\r
+ </wsag:Variable>\r
+ </wsag:VariableSet>\r
+ </wsag:ServiceProperties>\r
+ <wsag:GuaranteeTerm Name="GT_ResponseTime">\r
+ <wsag:ServiceScope ServiceName="ServiceName"/>\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>ResponseTime</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "ResponseTime BETWEEN (0, 200)"}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ <wsag:GuaranteeTerm Name="GT_Performance">\r
+ <wsag:ServiceScope ServiceName="ServiceName"/>\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>Performance</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "Performance BETWEEN (0.1,1)"}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ </wsag:All>\r
+ </wsag:Terms>\r
+</wsag:Agreement>\r
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>\r
+<wsag:Agreement xmlns:wsag="http://www.ggf.org/namespaces/ws-agreement"\r
+ AgreementId="agreement04">\r
+\r
+ <wsag:Name>ExampleAgreement</wsag:Name>\r
+ <wsag:Context>\r
+ <wsag:AgreementInitiator>experimenter01</wsag:AgreementInitiator>\r
+ <wsag:AgreementResponder>wiLab2</wsag:AgreementResponder>\r
+ <wsag:ServiceProvider>AgreementResponder</wsag:ServiceProvider>\r
+ <wsag:ExpirationTime>2014-03-07T12:00:00</wsag:ExpirationTime>\r
+ <wsag:TemplateId>template02</wsag:TemplateId>\r
+ <sla:Service xmlns:sla="http://sla.atos.eu">Testbed_guarantee_0.80_uptime_rate_for_0.75_rate_of_the_resources_during_the_sliver</sla:Service>\r
+ </wsag:Context>\r
+ <wsag:Terms>\r
+ <wsag:All>\r
+ <wsag:ServiceProperties Name="ServiceProperties" ServiceName="ServiceName">\r
+ <wsag:VariableSet>\r
+ <wsag:Variable Name="metric1" Metric="xs:double">\r
+ <wsag:Location>metric1</wsag:Location>\r
+ </wsag:Variable>\r
+ <wsag:Variable Name="metric2" Metric="xs:double">\r
+ <wsag:Location>metric2</wsag:Location>\r
+ </wsag:Variable>\r
+ <wsag:Variable Name="metric3" Metric="xs:double">\r
+ <wsag:Location>metric3</wsag:Location>\r
+ </wsag:Variable>\r
+ </wsag:VariableSet>\r
+ </wsag:ServiceProperties>\r
+ <wsag:GuaranteeTerm Name="GT_Metric1">\r
+ <wsag:ServiceScope ServiceName="ServiceName"/>\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>metric1</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "metric1 BETWEEN (0.1, 1)"}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ <wsag:GuaranteeTerm Name="GT_Metric2">\r
+ <wsag:ServiceScope ServiceName="ServiceName"/>\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>metric2</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "metric2 BETWEEN (0.15, 1)"}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ <wsag:GuaranteeTerm Name="GT_Metric3">\r
+ <wsag:ServiceScope ServiceName="ServiceName"/>\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>metric3</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "metric3 BETWEEN (0.2, 1)"}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ </wsag:All>\r
+ </wsag:Terms>\r
+</wsag:Agreement>\r
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>\r
+<wsag:Agreement xmlns:wsag="http://www.ggf.org/namespaces/ws-agreement"\r
+ AgreementId="agreement05">\r
+\r
+ <wsag:Name>ExampleAgreement</wsag:Name>\r
+ <wsag:Context>\r
+ <wsag:AgreementInitiator>client-prueba</wsag:AgreementInitiator>\r
+ <wsag:AgreementResponder>f4c993580-03fe-41eb-8a21-a56709f9370f</wsag:AgreementResponder>\r
+ <wsag:ServiceProvider>AgreementResponder</wsag:ServiceProvider>\r
+ <wsag:ExpirationTime>2014-03-07T12:00:00</wsag:ExpirationTime>\r
+ <wsag:TemplateId>template02</wsag:TemplateId>\r
+ <sla:Service xmlns:sla="http://sla.atos.eu">service5</sla:Service>\r
+ </wsag:Context>\r
+ <wsag:Terms>\r
+ <wsag:All>\r
+ <wsag:ServiceProperties Name="ServiceProperties" ServiceName="ServiceName">\r
+ <wsag:VariableSet>\r
+ <wsag:Variable Name="metric1" Metric="xs:double">\r
+ <wsag:Location>metric1</wsag:Location>\r
+ </wsag:Variable>\r
+ <wsag:Variable Name="metric2" Metric="xs:double">\r
+ <wsag:Location>metric2</wsag:Location>\r
+ </wsag:Variable>\r
+ <wsag:Variable Name="metric3" Metric="xs:double">\r
+ <wsag:Location>metric3</wsag:Location>\r
+ </wsag:Variable>\r
+ <wsag:Variable Name="metric4" Metric="xs:double">\r
+ <wsag:Location>metric4</wsag:Location>\r
+ </wsag:Variable>\r
+ </wsag:VariableSet>\r
+ </wsag:ServiceProperties>\r
+ <wsag:GuaranteeTerm Name="GT_Metric1">\r
+ <wsag:ServiceScope ServiceName="ServiceName"/>\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>metric1</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "metric1 BETWEEN (0.05, 1)"}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ <wsag:GuaranteeTerm Name="GT_Metric2">\r
+ <wsag:ServiceScope ServiceName="ServiceName"/>\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>metric2</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "metric2 BETWEEN (0.1, 1)"}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ <wsag:GuaranteeTerm Name="GT_Metric3">\r
+ <wsag:ServiceScope ServiceName="ServiceName"/>\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>metric3</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "metric3 BETWEEN (0.15, 1)"}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ <wsag:GuaranteeTerm Name="GT_Metric4">\r
+ <wsag:ServiceScope ServiceName="ServiceName"/>\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>metric4</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "metric4 BETWEEN (0.2, 1)"}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ </wsag:All>\r
+ </wsag:Terms>\r
+</wsag:Agreement>\r
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>\r
+<wsag:Agreement xmlns:wsag="http://www.ggf.org/namespaces/ws-agreement"\r
+ AgreementId="agreement05">\r
+\r
+ <wsag:Name>ExampleAgreement</wsag:Name>\r
+ <wsag:Context>\r
+ <wsag:AgreementInitiator>experimenter01</wsag:AgreementInitiator>\r
+ <wsag:AgreementResponder>virtualwall</wsag:AgreementResponder>\r
+ <wsag:ServiceProvider>AgreementResponder</wsag:ServiceProvider>\r
+ <wsag:ExpirationTime>2014-03-07T12:00:00</wsag:ExpirationTime>\r
+ <wsag:TemplateId>template02</wsag:TemplateId>\r
+ <sla:Service xmlns:sla="http://sla.atos.eu">service5</sla:Service>\r
+ </wsag:Context>\r
+ <wsag:Terms>\r
+ <wsag:All>\r
+ <wsag:ServiceProperties Name="ServiceProperties" ServiceName="ServiceName">\r
+ <wsag:VariableSet>\r
+ <wsag:Variable Name="metric1" Metric="xs:double">\r
+ <wsag:Location>metric1</wsag:Location>\r
+ </wsag:Variable>\r
+ <wsag:Variable Name="metric2" Metric="xs:double">\r
+ <wsag:Location>metric2</wsag:Location>\r
+ </wsag:Variable>\r
+ <wsag:Variable Name="metric3" Metric="xs:double">\r
+ <wsag:Location>metric3</wsag:Location>\r
+ </wsag:Variable>\r
+ <wsag:Variable Name="metric4" Metric="xs:double">\r
+ <wsag:Location>metric4</wsag:Location>\r
+ </wsag:Variable>\r
+ </wsag:VariableSet>\r
+ </wsag:ServiceProperties>\r
+ <wsag:GuaranteeTerm Name="GT_Metric1">\r
+ <wsag:ServiceScope ServiceName="ServiceName"/>\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>metric1</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "metric1 BETWEEN (0.05, 1)"}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ <wsag:GuaranteeTerm Name="GT_Metric2">\r
+ <wsag:ServiceScope ServiceName="ServiceName"/>\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>metric2</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "metric2 BETWEEN (0.1, 1)"}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ <wsag:GuaranteeTerm Name="GT_Metric3">\r
+ <wsag:ServiceScope ServiceName="ServiceName"/>\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>metric3</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "metric3 BETWEEN (0.15, 1)"}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ <wsag:GuaranteeTerm Name="GT_Metric4">\r
+ <wsag:ServiceScope ServiceName="ServiceName"/>\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>metric4</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "metric4 BETWEEN (0.2, 1)"}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ </wsag:All>\r
+ </wsag:Terms>\r
+</wsag:Agreement>\r
--- /dev/null
+<enforcement_job>
+ <agreement_id>agreement04</agreement_id>
+ <enabled>false</enabled>
+</enforcement_job>
--- /dev/null
+<enforcement_job>
+ <agreement_id>agreement02</agreement_id>
+ <enabled>true</enabled>
+</enforcement_job>
--- /dev/null
+<enforcement_job>
+ <agreement_id>agreement05</agreement_id>
+ <enabled>true</enabled>
+</enforcement_job>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+From http://serviceqos.wikispaces.com/WSAgExample
+ -->
+<wsag:Template xmlns:wsag="http://www.ggf.org/namespaces/ws-agreement" TemplateId="contract-template-2007-12-04">
+ <wsag:Name>ExampleTemplate</wsag:Name>
+ <wsag:Context>
+ <wsag:AgreementInitiator>Provider</wsag:AgreementInitiator>
+ <wsag:ServiceProvider>AgreementInitiator</wsag:ServiceProvider>
+ <wsag:ExpirationTime>2013-12-15-1200</wsag:ExpirationTime>
+ <wsag:TemplateId>contract-template-2013-12-15</wsag:TemplateId>
+ </wsag:Context>
+ <wsag:Terms>
+ <wsag:All>
+ <!-- functional description -->
+ <wsag:ServiceDescriptionTerm
+ wsag:Name="General"
+ wsag:ServiceName="Service0001">
+ A GPS service
+ </wsag:ServiceDescriptionTerm>
+ <wsag:ServiceDescriptionTerm
+ wsag:Name="GetCoordsOperation"
+ wsag:ServiceName="GPSService0001">
+ operation to call to get the coords
+ </wsag:ServiceDescriptionTerm>
+ <!-- domain specific reference to a service (additional or optional to SDT) -->
+ <wsag:ServiceReference
+ wsag:Name="CoordsRequest"
+ wsag:ServiceName="GPSService0001">
+ <wsag:EndpointReference>
+ <wsag:Address>http://www.gps.com/coordsservice/getcoords</wsag:Address>
+ <wsag:ServiceName>gps:CoordsRequest</wsag:ServiceName>
+ </wsag:EndpointReference>
+ </wsag:ServiceReference>
+ <!-- non-functional properties -->
+ <wsag:ServiceProperties
+ wsag:Name="AvailabilityProperties"
+ wsag:ServiceName="GPS0001">
+ <wsag:Variables>
+ <wsag:Variable
+ wsag:Name="ResponseTime"
+ wsag:Metric="metric:Duration">
+ <wsag:Location>qos:ResponseTime</wsag:Location>
+ </wsag:Variable>
+ </wsag:Variables>
+ </wsag:ServiceProperties>
+ <wsag:ServiceProperties
+ wsag:Name="UsabilityProperties"
+ wsag:ServiceName="GPS0001">
+ <wsag:Variables>
+ <wsag:Variable
+ wsag:Name="CoordDerivation"
+ wsag:Metric="metric:CoordDerivationMetric">
+ <wsag:Location>qos:CoordDerivation</wsag:Location>
+ </wsag:Variable>
+ </wsag:Variables>
+ </wsag:ServiceProperties>
+ <!-- statements to offered service level(s) -->
+ <wsag:GuaranteeTerm
+ Name="FastReaction" Obligated="ServiceProvider">
+ <wsag:ServiceScope ServiceName="GPS0001">
+ http://www.gps.com/coordsservice/getcoords
+ </wsag:ServiceScope>
+ <wsag:QualifyingCondition>
+ applied when current time in week working hours
+ </wsag:QualifyingCondition>
+ <wsag:ServiceLevelObjective>
+ <wsag:KPITarget>
+ <wsag:KPIName>FastResponseTime</wsag:KPIName>
+ <wsag:Target>
+ //Variable/@Name="ResponseTime" LOWERTHAN 1 second
+ </wsag:Target>
+ </wsag:KPITarget>
+ </wsag:ServiceLevelObjective>
+ </wsag:GuaranteeTerm>
+ </wsag:All>
+ </wsag:Terms>
+</wsag:Template>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<provider>
+ <uuid>virtualwall</uuid>
+ <name>virtualwall</name>
+</provider>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<provider>
+ <uuid>wiLab2</uuid>
+ <name>wiLab2</name>
+</provider>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<wsag:Template xmlns:wsag="http://www.ggf.org/namespaces/ws-agreement"
+ TemplateId="template">
+ <wsag:Name>ExampleTemplate2</wsag:Name>
+ <wsag:Context>
+ <wsag:ExpirationTime>2014-03-07T12:00:00:000</wsag:ExpirationTime>
+ </wsag:Context>
+ <wsag:Terms>
+ <wsag:All>
+ <!-- FUNCTIONAL DESCRIPTION -->
+ <wsag:ServiceDescriptionTerm wsag:Name="SDTName1" wsag:ServiceName="ServiceName">
+ DSL expression
+ </wsag:ServiceDescriptionTerm>
+ <wsag:ServiceDescriptionTerm wsag:Name="SDTName2" wsag:ServiceName="ServiceName">
+ DSL expression
+ </wsag:ServiceDescriptionTerm>
+
+ <!-- OPTIONAL SERVICE REFERENCE -->
+
+ <!-- OPTIONAL SERVICE PROPERTIES : non funcional properties-->
+ <wsag:ServiceProperties wsag:Name="NonFunctional" wsag:ServiceName="ServiceName">
+ <wsag:Variables>
+ <wsag:Variable wsag:Name="ResponseTime" wsag:Metric="xs:double">
+ <wsag:Location>qos:ResponseTime</wsag:Location>
+ </wsag:Variable>
+ <wsag:Variable wsag:Name="Performance" wsag:Metric="xs:double">
+ <wsag:Location>qos:Performance</wsag:Location>
+ </wsag:Variable>
+ </wsag:Variables>
+ </wsag:ServiceProperties>
+ <wsag:GuaranteeTerm wsag:Name="GT_ResponseTime">
+ <wsag:ServiceScope wsag:ServiceName="ServiceName"/>
+ <!-- The qualifying conditions that must be met before the guarantee is evaluated -->
+ <!--
+ <wsag:QualifyingCondition>state EQ 'ready'</wsag:QualifyingCondition>
+ -->
+ <wsag:ServiceLevelObjective>
+ <wsag:KPITarget>
+ <wsag:KPIName>ResponseTime</wsag:KPIName> <!-- same name as property for the moment -->
+ <wsag:CustomServiceLevel>{"constraint" : "ResponseTime LT qos:ResponseTime"}</wsag:CustomServiceLevel> <!-- the ServiceProperty is referenced here -->
+ </wsag:KPITarget>
+ </wsag:ServiceLevelObjective>
+ </wsag:GuaranteeTerm>
+ <wsag:GuaranteeTerm wsag:Name="GT_Performance">
+ <wsag:ServiceScope wsag:ServiceName="ServiceName"/>
+ <wsag:ServiceLevelObjective>
+ <wsag:KPITarget>
+ <wsag:KPIName>Performance</wsag:KPIName> <!-- same name as property for the moment -->
+ <wsag:CustomServiceLevel>{"constraint" : "Performance GT qos:Performance"}</wsag:CustomServiceLevel>
+ </wsag:KPITarget>
+ </wsag:ServiceLevelObjective>
+ <wsag:BusinessValueList>
+ <wsag:Importante>3</wsag:Importante> <!-- optional importance (integer) -->
+ <wsag:Penalty>
+ <wsag:AssessmentInterval>
+ <wsag:Count>10</wsag:Count>
+ </wsag:AssessmentInterval>
+ <wsag:ValueUnit>EUR</wsag:ValueUnit>
+ <wsag:ValueExpression>99</wsag:ValueExpression>
+ </wsag:Penalty>
+
+ <wsag:Reward></wsag:Reward>
+ <wsag:Preference></wsag:Preference>
+ <wsag:CustomBusinessValue></wsag:CustomBusinessValue>
+ </wsag:BusinessValueList>
+ </wsag:GuaranteeTerm>
+ </wsag:All>
+ </wsag:Terms>
+</wsag:Template>
--- /dev/null
+__author__ = 'a145034'\r
--- /dev/null
+# -*- coding: utf-8 -*-
+
+import requests
+
+from requests.auth import HTTPBasicAuth
+
+import xmlconverter
+import wsag_model
+
+from django.conf import settings
+
+
+"""REST client to SLA Manager.
+
+Contains a generic rest client and wrappers over this generic client
+for each resource.
+
+Each resource client implements business-like() functions, but
+returns a tuple (output, requests.Response)
+
+The resource clients are initialized with the rooturl and a path, which
+are combined to build the resource url. The path is defaulted to the known
+resource path. So, for example, to create a agreements client:
+
+c = Agreements("http://localhost/slagui-service")
+
+A Factory facility is provided to create resource client instances. The
+Factory uses "rooturl" module variable to use as rooturl parameter.
+
+restclient.rooturl = "http://localhost/slagui-service"
+c = restclient.Factory.agreements()
+
+"""
+
+_PROVIDERS_PATH = "providerso"
+_AGREEMENTS_PATH = "agreementso"
+_TEMPLATES_PATH = "templateso"
+_VIOLATIONS_PATH = "violationso"
+_ENFORCEMENTJOBS_PATH = "enforcements"
+
+rooturl = settings.SLA_MANAGER_URL
+
+# SLA_MANAGER_USER = "normal_user"
+# SLA_MANAGER_PASSWORD = "password"
+
+class Factory(object):
+ @staticmethod
+ def agreements():
+ """Returns aREST client for Agreements
+
+ :rtype : Agreements
+ """
+ return Agreements(rooturl)
+
+ @staticmethod
+ def providers():
+ """Returns aREST client for Providers
+
+ :rtype : Providers
+ """
+ return Providers(rooturl)
+
+ @staticmethod
+ def violations():
+ """Returns aREST client for Violations
+
+ :rtype : Violations
+ """
+ return Violations(rooturl)
+
+ @staticmethod
+ def templates():
+ """Returns aREST client for Violations
+
+ :rtype : Violations
+ """
+ return Templates(rooturl)
+
+ @staticmethod
+ def enforcements():
+ """Returns aREST client for Enforcements jobs
+
+ :rtype : Enforcements
+ """
+ return Enforcements(rooturl)
+
+class Client(object):
+
+ def __init__(self, root_url):
+
+ """Generic rest client using requests library
+
+ Each operation mimics the corresponding "requests" operation (arguments
+ and return)
+
+ :param str root_url: this url is used as prefix in all subsequent
+ requests
+ """
+ self.rooturl = root_url
+
+ def get(self, path, **kwargs):
+ """Just a wrapper over request.get, just in case.
+
+ Returns a requests.Response
+
+ :rtype : request.Response
+ :param str path: remaining path from root url;
+ empty if desired path equal to rooturl.
+ :param kwargs: arguments to requests.get
+
+ Example:
+ c = Client("http://localhost:8080/service")
+ c.get("/resource", headers = { "accept": "application/json" })
+ """
+ url = _buildpath_(self.rooturl, path)
+ kwargs["auth"] = HTTPBasicAuth(settings.SLA_MANAGER_USER, settings.SLA_MANAGER_PASSWORD)
+ result = requests.get(url, **kwargs)
+ print "GET {} {} {}".format(
+ result.url, result.status_code, result.text[0:70])
+ return result
+
+ def post(self, path, data=None, **kwargs):
+ """Just a wrapper over request.post, just in case
+
+ :rtype : request.Response
+ :param str path: remaining path from root url;
+ empty if desired path equal to rooturl.
+ :param dict[str, str] kwargs: arguments to requests.post
+
+ Example:
+ c = Client("http://localhost:8080/service")
+ c.post(
+ '/resource',
+ '{ "id": "1", "name": "provider-a" }',
+ headers = {
+ "content-type": "application/json",
+ "accept": "application/xml"
+ }
+ )
+ """
+ url = _buildpath_(self.rooturl, path)
+ kwargs["auth"] = HTTPBasicAuth(settings.SLA_MANAGER_USER, settings.SLA_MANAGER_PASSWORD)
+ result = requests.post(url, data, **kwargs)
+ location = result.headers["location"] \
+ if "location" in result.headers else "<null>"
+ print "POST {} {} Location: {}".format(
+ result.url, result.status_code, location)
+ return result
+
+
+
+class _Resource(object):
+
+ def __init__(self, url, converter):
+ """Provides some common operations over resources.
+
+ The operations return a structured representation of the resource.
+
+ :param str url: url to the resource
+ :param Converter converter: resouce xml converter
+
+ Some attributes are initialized to be used from the owner if needed:
+ * client: Client instance
+ * converter: resource xml converter
+ * listconverter: list of resources xml converter
+ """
+ self.client = Client(url)
+ self.converter = converter
+ self.listconverter = xmlconverter.ListConverter(self.converter)
+
+ @staticmethod
+ def _processresult(r, converter):
+
+ """Generic processing of the REST call.
+
+ If no errors, tries to convert the result to a destination entity.
+
+ :param r requests:
+ :param converter Converter:
+ """
+ if r.status_code == 404:
+ return None
+
+ content_type = r.headers.get('content-type', '')
+
+ print("content-type = " + content_type)
+ if content_type == 'application/json':
+ result = r.json()
+ elif content_type == 'application/xml':
+ xml = r.text
+ result = xmlconverter.convertstring(converter, xml)
+ else:
+ result = r.text
+ return result
+
+ def getall(self):
+ """Get all resources
+
+ """
+ r = self.client.get("")
+ resources = self._processresult(r, self.listconverter)
+ return resources, r
+
+ def getbyid(self, id):
+ """Get resource 'id'"""
+ r = self.client.get(id)
+ resource = _Resource._processresult(r, self.converter)
+ return resource, r
+
+ def get(self, params):
+ """Generic query over resource: GET /resource?q1=v1&q2=v2...
+
+ :param dict[str,str] params: values to pass as get parameters
+ """
+ r = self.client.get("", params=params)
+ resources = self._processresult(r, self.listconverter)
+ return resources, r
+
+ def create(self, body, **kwargs):
+ """Creates (POST method) a resource.
+
+ It should be convenient to set content-type header.
+
+ Usage:
+ resource.create(body, headers={'content-type': 'application/xml'})
+ """
+ r = self.client.post("", body, **kwargs)
+ r.raise_for_status()
+ return r
+
+
+class Agreements(object):
+
+ def __init__(self, root_url, path=_AGREEMENTS_PATH):
+ """Business methods for Agreement resource
+ :param str root_url: url to the root of resources
+ :param str path: path to resource from root_url
+
+ The final url to the resource is root_url + "/" + path
+ """
+ resourceurl = _buildpath_(root_url, path)
+ converter = xmlconverter.AgreementConverter()
+ self.res = _Resource(resourceurl, converter)
+
+ def getall(self):
+ """
+ Get all agreements
+
+ :rtype : list[wsag_model.Agreement]
+ """
+ return self.res.getall()
+
+ def getbyid(self, agreementid):
+ """Get an agreement
+
+ :rtype : wsag_model.Agreement
+ """
+ return self.res.getbyid(agreementid)
+
+ def getbyconsumer(self, consumerid):
+ """Get a consumer's agreements
+
+ :rtype : list[wsag_model.Agreement]
+ """
+ return self.res.get(dict(consumerId=consumerid))
+
+ def getbyprovider(self, providerid):
+ """Get the agreements served by a provider
+
+ :rtype : list[wsag_model.Agreement]
+ """
+ return self.res.get(dict(providerId=providerid))
+
+ def getstatus(self, agreementid):
+ """Get guarantee status of an agreement
+
+ :param str agreementid :
+ :rtype : wsag_model.AgreementStatus
+ """
+ path = _buildpath_(agreementid, "guaranteestatus")
+ r = self.res.client.get(path, headers={'accept': 'application/json'})
+ json_obj = r.json()
+ status = wsag_model.AgreementStatus.json_decode(json_obj)
+
+ return status, r
+
+ def create(self, agreement):
+ """Create a new agreement
+
+ :param str agreement: sla template in ws-agreement format.
+ """
+ return self.res.create(agreement)
+
+class Templates(object):
+
+ def __init__(self, root_url, path=_TEMPLATES_PATH):
+ """Business methods for Templates resource
+ :param str root_url: url to the root of resources
+ :param str path: path to resource from root_url
+
+ The final url to the resource is root_url + "/" + path
+ """
+ resourceurl = _buildpath_(root_url, path)
+ converter = xmlconverter.AgreementConverter()
+ self.res = _Resource(resourceurl, converter)
+
+ def getall(self):
+ """ Get all templates
+
+ :rtype : list[wsag_model.Template]
+ """
+ return self.res.getall()
+
+ def getbyid(self, provider_id):
+ """Get a template
+
+ :rtype: wsag_model.Template
+ """
+ return self.res.getbyid(provider_id)
+
+ def create(self, template):
+ """Create a new template
+
+ :param str template: sla template in ws-agreement format.
+ """
+ self.res.create(template)
+
+class Providers(object):
+
+ def __init__(self, root_url, path=_PROVIDERS_PATH):
+ """Business methods for Providers resource
+ :param str root_url: url to the root of resources
+ :param str path: path to resource from root_url
+
+ The final url to the resource is root_url + "/" + path
+ """
+ resourceurl = _buildpath_(root_url, path)
+ converter = xmlconverter.ProviderConverter()
+ self.res = _Resource(resourceurl, converter)
+
+ def getall(self):
+ """ Get all providers
+
+ :rtype : list[wsag_model.Provider]
+ """
+ return self.res.getall()
+
+ def getbyid(self, provider_id):
+ """Get a provider
+
+ :rtype: wsag_model.Provider
+ """
+ return self.res.getbyid(provider_id)
+
+ def create(self, provider):
+ """Create a new provider
+
+ :type provider: wsag_model.Provider
+ """
+ body = provider.to_xml()
+ return self.res.create(body)
+
+class Violations(object):
+
+ def __init__(self, root_url, path=_VIOLATIONS_PATH):
+ """Business methods for Violation resource
+ :param str root_url: url to the root of resources
+ :param str path: path to resource from root_url
+
+ The final url to the resource is root_url + "/" + path
+ """
+ resourceurl = _buildpath_(root_url, path)
+ converter = xmlconverter.ViolationConverter()
+ self.res = _Resource(resourceurl, converter)
+
+ def getall(self):
+ """ Get all violations
+ :rtype : list[wsag_model.Violation]
+ """
+ return self.res.getall()
+
+ def getbyid(self, violationid):
+ """Get a violation
+
+ :rtype : wsag_model.Violation
+ """
+ return self.res.getbyid(violationid)
+
+ def getbyagreement(self, agreement_id, term=None):
+ """Get the violations of an agreement.
+
+ :param str agreement_id:
+ :param str term: optional GuaranteeTerm name. If not specified,
+ violations from all terms will be returned
+ :rtype: list[wsag_model.Violation]
+ """
+ return self.res.get(
+ {"agreementId": agreement_id, "guaranteeTerm": term})
+
+
+class Enforcements(object):
+
+ def __init__(self, root_url, path=_ENFORCEMENTJOBS_PATH):
+ """Business methods for Violation resource
+ :param str root_url: url to the root of resources
+ :param str path: path to resource from root_url
+
+ The final url to the resource is root_url + "/" + path
+ """
+ resourceurl = _buildpath_(root_url, path)
+ converter = xmlconverter.EnforcementConverter()
+ self.res = _Resource(resourceurl, converter)
+
+ def getall(self):
+ """ Get all Enforcements
+ :rtype : list[wsag_model.Violation]
+ """
+ return self.res.getall()
+
+ def getbyagreement(self, agreement_id):
+ """Get the enforcement of an agreement.
+
+ :param str agreement_id:
+
+ :rtype: list[wsag_model.Enforcement]
+ """
+ return self.res.getbyid(agreement_id)
+
+
+def _buildpath_(*paths):
+ return "/".join(paths)
+
+
+def main():
+ #
+ # Move to test
+ #
+ global rooturl
+ rooturl = "http://127.0.0.1:8080/sla-service"
+
+
+ c = Factory.templates()
+ #r = c.getall()
+ #r = c.getbyid("noexiste")
+ #r = c.getstatus("agreement03")
+ #print r
+
+ #r = c.getbyconsumer('RandomClient')
+ r = c.getbyid("template02")
+
+
+ print r
+
+
+if __name__ == "__main__":
+ main()
+
+
--- /dev/null
+# -*- coding: utf-8 -*-
+
+import requests
+
+import xmlconverter
+import wsag_model
+
+
+"""REST client to SLA Manager.
+
+Contains a generic rest client and wrappers over this generic client
+for each resource.
+
+Each resource client implements business-like() functions, but
+returns a tuple (output, requests.Response)
+
+The resource clients are initialized with the rooturl and a path, which
+are combined to build the resource url. The path is defaulted to the known
+resource path. So, for example, to create a agreements client:
+
+c = Agreements("http://localhost/slagui-service")
+
+A Factory facility is provided to create resource client instances. The
+Factory uses "rooturl" module variable to use as rooturl parameter.
+
+restclient.rooturl = "http://localhost/slagui-service"
+c = restclient.Factory.agreements()
+
+"""
+
+_PROVIDERS_PATH = "providers"
+_AGREEMENTS_PATH = "agreements"
+_VIOLATIONS_PATH = "violations"
+_ENFORCEMENTJOBS_PATH = "enforcementjobs"
+
+rooturl = ""
+
+
+class Factory(object):
+ @staticmethod
+ def agreements():
+ """Returns aREST client for Agreements
+
+ :rtype : Agreements
+ """
+ return Agreements(rooturl)
+
+ @staticmethod
+ def providers():
+ """Returns aREST client for Providers
+
+ :rtype : Providers
+ """
+ return Providers(rooturl)
+
+ @staticmethod
+ def violations():
+ """Returns aREST client for Violations
+
+ :rtype : Violations
+ """
+ return Violations(rooturl)
+
+class Client(object):
+
+ def __init__(self, root_url):
+
+ """Generic rest client using requests library
+
+ Each operation mimics the corresponding "requests" operation (arguments
+ and return)
+
+ :param str root_url: this url is used as prefix in all subsequent
+ requests
+ """
+ self.rooturl = root_url
+
+ def get(self, path, **kwargs):
+ """Just a wrapper over request.get, just in case.
+
+ Returns a requests.Response
+
+ :rtype : request.Response
+ :param str path: remaining path from root url;
+ empty if desired path equal to rooturl.
+ :param kwargs: arguments to requests.get
+
+ Example:
+ c = Client("http://localhost:8080/service")
+ c.get("/resource", headers = { "accept": "application/json" })
+ """
+ url = _buildpath_(self.rooturl, path)
+ result = requests.get(url, **kwargs)
+ print "GET {} {} {}".format(
+ result.url, result.status_code, result.text[0:70])
+ return result
+
+class _Resource(object):
+
+ def __init__(self, url, converter):
+ """Provides some common operations over resources.
+
+ The operations return a structured representation of the resource.
+
+ :param str url: url to the resource
+ :param Converter converter: resouce xml converter
+
+ Some attributes are initialized to be used from the owner if needed:
+ * client: Client instance
+ * converter: resource xml converter
+ * listconverter: list of resources xml converter
+ """
+ self.client = Client(url)
+ self.converter = converter
+ self.listconverter = xmlconverter.ListConverter(self.converter)
+
+ @staticmethod
+ def _processresult(r, converter):
+
+ """Generic processing of the REST call.
+
+ If no errors, tries to convert the result to a destination entity.
+
+ :param r requests:
+ :param converter Converter:
+ """
+ if r.status_code == 404:
+ return None
+
+ content_type = r.headers.get('content-type', '')
+
+ print("content-type = " + content_type)
+ if content_type == 'application/json':
+ result = r.json()
+ elif content_type == 'application/xml':
+ xml = r.text
+ result = xmlconverter.convertstring(converter, xml)
+ else:
+ result = r.text
+ return result
+
+ def getall(self):
+ """Get all resources
+
+ """
+ r = self.client.get("")
+ resources = self._processresult(r, self.listconverter)
+ return resources, r
+
+ def getbyid(self, id):
+ """Get resource 'id'"""
+ r = self.client.get(id)
+ resource = _Resource._processresult(r, self.converter)
+ return resource, r
+
+ def get(self, params):
+ """Generic query over resource: GET /resource?q1=v1&q2=v2...
+
+ :param dict[str,str] params: values to pass as get parameters
+ """
+ r = self.client.get("", params=params)
+ resources = self._processresult(r, self.listconverter)
+ return resources, r
+
+class Agreements(object):
+
+ def __init__(self, root_url, path=_AGREEMENTS_PATH):
+ """Business methods for Agreement resource
+ :param str root_url: url to the root of resources
+ :param str path: path to resource from root_url
+
+ The final url to the resource is root_url + "/" + path
+ """
+ resourceurl = _buildpath_(root_url, path)
+ converter = xmlconverter.AgreementConverter()
+ self.res = _Resource(resourceurl, converter)
+
+ def getall(self):
+ """
+ Get all agreements
+
+ :rtype : list[wsag_model.Agreement]
+ """
+ return self.res.getall()
+
+ def getbyid(self, agreementid):
+ """Get an agreement
+
+ :rtype : wsag_model.Agreement
+ """
+ return self.res.getbyid(agreementid)
+
+ def getbyconsumer(self, consumerid):
+ """Get a consumer's agreements
+
+ :rtype : list[wsag_model.Agreement]
+ """
+ return self.res.get(dict(consumerId=consumerid))
+
+ def getbyprovider(self, providerid):
+ """Get the agreements served by a provider
+
+ :rtype : list[wsag_model.Agreement]
+ """
+ return self.res.get(dict(providerId=providerid))
+
+ def getstatus(self, agreementid):
+ """Get guarantee status of an agreement
+
+ :param str agreementid :
+ :rtype : wsag_model.AgreementStatus
+ """
+ path = _buildpath_(agreementid, "guaranteestatus")
+ r = self.res.client.get(path, headers={'accept': 'application/json'})
+ json_obj = r.json()
+ status = wsag_model.AgreementStatus.json_decode(json_obj)
+
+ return status, r
+
+class Providers(object):
+
+ def __init__(self, root_url, path=_PROVIDERS_PATH):
+ """Business methods for Providers resource
+ :param str root_url: url to the root of resources
+ :param str path: path to resource from root_url
+
+ The final url to the resource is root_url + "/" + path
+ """
+ resourceurl = _buildpath_(root_url, path)
+ converter = xmlconverter.ProviderConverter()
+ self.res = _Resource(resourceurl, converter)
+
+ def getall(self):
+ """ Get all providers
+
+ :rtype : list[wsag_model.Provider]
+ """
+ return self.res.getall()
+
+ def getbyid(self, provider_id):
+ """Get a provider
+
+ :rtype: wsag_model.Provider
+ """
+ return self.res.getbyid(provider_id)
+
+class Violations(object):
+
+ def __init__(self, root_url, path=_VIOLATIONS_PATH):
+ """Business methods for Violation resource
+ :param str root_url: url to the root of resources
+ :param str path: path to resource from root_url
+
+ The final url to the resource is root_url + "/" + path
+ """
+ resourceurl = _buildpath_(root_url, path)
+ converter = xmlconverter.ViolationConverter()
+ self.res = _Resource(resourceurl, converter)
+
+ def getall(self):
+ """ Get all violations
+ :rtype : list[wsag_model.Violation]
+ """
+ return self.res.getall()
+
+ def getbyid(self, violationid):
+ """Get a violation
+
+ :rtype : wsag_model.Violation
+ """
+ return self.res.getbyid(violationid)
+
+ def getbyagreement(self, agreement_id, term=None):
+ """Get the violations of an agreement.
+
+ :param str agreement_id:
+ :param str term: optional GuaranteeTerm name. If not specified,
+ violations from all terms will be returned
+ :rtype: list[wsag_model.Violation]
+ """
+ return self.res.get(
+ {"agreementId": agreement_id, "guaranteeTerm": term})
+
+
+def _buildpath_(*paths):
+ return "/".join(paths)
+
+
+def main():
+ #
+ # Move to test
+ #
+ global rooturl
+ rooturl = "http://10.0.2.2:8080/sla-service"
+
+ c = Factory.agreements()
+ #r = c.getall()
+ #r = c.getbyid("noexiste")
+ #r = c.getstatus("agreement03")
+ #print r
+
+ #r = c.getbyconsumer('RandomClient')
+
+ c = Providers(rooturl)
+ r = c.getall()
+
+ c = Violations(rooturl)
+ #r = c.getall()
+ r_ = c.getbyagreement("agreement03", "GT_Otro")
+ r_ = c.getbyid('cf41011d-9f30-4ebc-a967-30b4ea928192')
+
+ print r_
+
+
+if __name__ == "__main__":
+ main()
+
+
--- /dev/null
+# -*- coding: utf-8 -*-
+
+import requests
+
+import xmlconverter
+import wsag_model
+
+
+"""REST client to SLA Manager.
+
+Contains a generic rest client and wrappers over this generic client
+for each resource.
+
+Each resource client implements business-like() functions, but
+returns a tuple (output, requests.Response)
+
+The resource clients are initialized with the rooturl and a path, which
+are combined to build the resource url. The path is defaulted to the known
+resource path. So, for example, to create a agreements client:
+
+c = Agreements("http://localhost/slagui-service")
+
+A Factory facility is provided to create resource client instances. The
+Factory uses "rooturl" module variable to use as rooturl parameter.
+
+restclient.rooturl = "http://localhost/slagui-service"
+c = restclient.Factory.agreements()
+
+"""
+
+_PROVIDERS_PATH = "providers"
+_AGREEMENTS_PATH = "agreements"
+_VIOLATIONS_PATH = "violations"
+_ENFORCEMENTJOBS_PATH = "enforcementjobs"
+
+rooturl = ""
+
+
+class Factory(object):
+ @staticmethod
+ def agreements():
+ """Returns aREST client for Agreements
+
+ :rtype : Agreements
+ """
+ return Agreements(rooturl)
+
+ @staticmethod
+ def providers():
+ """Returns aREST client for Providers
+
+ :rtype : Providers
+ """
+ return Providers(rooturl)
+
+ @staticmethod
+ def violations():
+ """Returns aREST client for Violations
+
+ :rtype : Violations
+ """
+ return Violations(rooturl)
+
+class Client(object):
+
+ def __init__(self, root_url):
+
+ """Generic rest client using requests library
+
+ Each operation mimics the corresponding "requests" operation (arguments
+ and return)
+
+ :param str root_url: this url is used as prefix in all subsequent
+ requests
+ """
+ self.rooturl = root_url
+
+ def get(self, path, **kwargs):
+ """Just a wrapper over request.get, just in case.
+
+ Returns a requests.Response
+
+ :rtype : request.Response
+ :param str path: remaining path from root url;
+ empty if desired path equal to rooturl.
+ :param kwargs: arguments to requests.get
+
+ Example:
+ c = Client("http://localhost:8080/service")
+ c.get("/resource", headers = { "accept": "application/json" })
+ """
+ url = _buildpath_(self.rooturl, path)
+ result = requests.get(url, **kwargs)
+ print "GET {} {} {}".format(
+ result.url, result.status_code, result.text[0:70])
+ return result
+
+class _Resource(object):
+
+ def __init__(self, url, converter):
+ """Provides some common operations over resources.
+
+ The operations return a structured representation of the resource.
+
+ :param str url: url to the resource
+ :param Converter converter: resouce xml converter
+
+ Some attributes are initialized to be used from the owner if needed:
+ * client: Client instance
+ * converter: resource xml converter
+ * listconverter: list of resources xml converter
+ """
+ self.client = Client(url)
+ self.converter = converter
+ self.listconverter = xmlconverter.ListConverter(self.converter)
+
+ @staticmethod
+ def _processresult(r, converter):
+
+ """Generic processing of the REST call.
+
+ If no errors, tries to convert the result to a destination entity.
+
+ :param r requests:
+ :param converter Converter:
+ """
+ if r.status_code == 404:
+ return None
+
+ content_type = r.headers.get('content-type', '')
+
+ print("content-type = " + content_type)
+ if content_type == 'application/json':
+ result = r.json()
+ elif content_type == 'application/xml':
+ xml = r.text
+ result = xmlconverter.convertstring(converter, xml)
+ else:
+ result = r.text
+ return result
+
+ def getall(self):
+ """Get all resources
+
+ """
+ r = self.client.get("")
+ resources = self._processresult(r, self.listconverter)
+ return resources, r
+
+ def getbyid(self, id):
+ """Get resource 'id'"""
+ r = self.client.get(id)
+ resource = _Resource._processresult(r, self.converter)
+ return resource, r
+
+ def get(self, params):
+ """Generic query over resource: GET /resource?q1=v1&q2=v2...
+
+ :param dict[str,str] params: values to pass as get parameters
+ """
+ r = self.client.get("", params=params)
+ resources = self._processresult(r, self.listconverter)
+ return resources, r
+
+class Agreements(object):
+
+ def __init__(self, root_url, path=_AGREEMENTS_PATH):
+ """Business methods for Agreement resource
+ :param str root_url: url to the root of resources
+ :param str path: path to resource from root_url
+
+ The final url to the resource is root_url + "/" + path
+ """
+ resourceurl = _buildpath_(root_url, path)
+ converter = xmlconverter.AgreementConverter()
+ self.res = _Resource(resourceurl, converter)
+
+ def getall(self):
+ """
+ Get all agreements
+
+ :rtype : list[wsag_model.Agreement]
+ """
+ return self.res.getall()
+
+ def getbyid(self, agreementid):
+ """Get an agreement
+
+ :rtype : wsag_model.Agreement
+ """
+ return self.res.getbyid(agreementid)
+
+ def getbyconsumer(self, consumerid):
+ """Get a consumer's agreements
+
+ :rtype : list[wsag_model.Agreement]
+ """
+ return self.res.get(dict(consumerId=consumerid))
+
+ def getbyprovider(self, providerid):
+ """Get the agreements served by a provider
+
+ :rtype : list[wsag_model.Agreement]
+ """
+ return self.res.get(dict(providerId=providerid))
+
+ def getstatus(self, agreementid):
+ """Get guarantee status of an agreement
+
+ :param str agreementid :
+ :rtype : wsag_model.AgreementStatus
+ """
+ path = _buildpath_(agreementid, "guaranteestatus")
+ r = self.res.client.get(path, headers={'accept': 'application/json'})
+ json_obj = r.json()
+ status = wsag_model.AgreementStatus.json_decode(json_obj)
+
+ return status, r
+
+class Providers(object):
+
+ def __init__(self, root_url, path=_PROVIDERS_PATH):
+ """Business methods for Providers resource
+ :param str root_url: url to the root of resources
+ :param str path: path to resource from root_url
+
+ The final url to the resource is root_url + "/" + path
+ """
+ resourceurl = _buildpath_(root_url, path)
+ converter = xmlconverter.ProviderConverter()
+ self.res = _Resource(resourceurl, converter)
+
+ def getall(self):
+ """ Get all providers
+
+ :rtype : list[wsag_model.Provider]
+ """
+ return self.res.getall()
+
+ def getbyid(self, provider_id):
+ """Get a provider
+
+ :rtype: wsag_model.Provider
+ """
+ return self.res.getbyid(provider_id)
+
+class Violations(object):
+
+ def __init__(self, root_url, path=_VIOLATIONS_PATH):
+ """Business methods for Violation resource
+ :param str root_url: url to the root of resources
+ :param str path: path to resource from root_url
+
+ The final url to the resource is root_url + "/" + path
+ """
+ resourceurl = _buildpath_(root_url, path)
+ converter = xmlconverter.ViolationConverter()
+ self.res = _Resource(resourceurl, converter)
+
+ def getall(self):
+ """ Get all violations
+ :rtype : list[wsag_model.Violation]
+ """
+ return self.res.getall()
+
+ def getbyid(self, violationid):
+ """Get a violation
+
+ :rtype : wsag_model.Violation
+ """
+ return self.res.getbyid(violationid)
+
+ def getbyagreement(self, agreement_id, term=None):
+ """Get the violations of an agreement.
+
+ :param str agreement_id:
+ :param str term: optional GuaranteeTerm name. If not specified,
+ violations from all terms will be returned
+ :rtype: list[wsag_model.Violation]
+ """
+ return self.res.get(
+ {"agreementId": agreement_id, "guaranteeTerm": term})
+
+
+def _buildpath_(*paths):
+ return "/".join(paths)
+
+
+def main():
+ #
+ # Move to test
+ #
+ global rooturl
+ rooturl = "http://10.0.2.2:8080/sla-service"
+
+ c = Factory.agreements()
+ #r = c.getall()
+ #r = c.getbyid("noexiste")
+ #r = c.getstatus("agreement03")
+ #print r
+
+ #r = c.getbyconsumer('RandomClient')
+
+ c = Providers(rooturl)
+ r = c.getall()
+
+ c = Violations(rooturl)
+ #r = c.getall()
+ r_ = c.getbyagreement("agreement03", "GT_Otro")
+ r_ = c.getbyid('cf41011d-9f30-4ebc-a967-30b4ea928192')
+
+ print r_
+
+
+if __name__ == "__main__":
+ main()
+
+
--- /dev/null
+# -*- coding: utf-8 -*-\r
+"""Builds templates/agreements based on input data (in json format), submitting\r
+to sla manager.\r
+\r
+It is intended as backend service for a rest interface.\r
+\r
+The json input must work together with the templates to form a valid template\r
+ or agreement for Xifi (be careful!)\r
+\r
+This (very simple) service is coupled to the way xifi is interpreting\r
+ws-agreement.\r
+\r
+\r
+"""\r
+import json\r
+import jsonparser\r
+from sla.slaclient import wsag_model\r
+from sla.slaclient import restclient\r
+from sla.slaclient.templates.fed4fire.django.factory import Factory as TemplateFactory\r
+import sla.slaclient.templates.fed4fire as fed4fire\r
+from time import localtime, strftime\r
+import uuid\r
+class ServiceContext(object):\r
+ def __init__(self, restfactory = None, templatefactory=None):\r
+ """\r
+ :type restfactory: restclient.Factory\r
+ """\r
+ self.restfactory = restfactory\r
+ self.templatefactory = templatefactory\r
+\r
+\r
+def createprovider(json_data, context):\r
+ """Creates a provider in the SlaManager.\r
+ :type json_data:str\r
+ :type context: ServiceContext\r
+\r
+ An example input is:\r
+ {\r
+ "uuid": "f4c993580-03fe-41eb-8a21-a56709f9370f",\r
+ "name": "provider"\r
+ }\r
+ """\r
+ json_obj = json.loads(json_data)\r
+ p = wsag_model.Provider.from_dict(json_obj)\r
+ provider_client = context.restfactory.providers()\r
+ provider_client.create(p)\r
+\r
+\r
+def createtemplate(json_data, context):\r
+ """Creates a template in the SlaManager\r
+\r
+ An example input is:\r
+ {\r
+ "template_id" : "template-id",\r
+ "template_name" : "template-name",\r
+ "provider" : "provider-1",\r
+ "service_id" : "service-id",\r
+ "expiration_time" : "2014-03-28T13:55:00Z",\r
+ "service_properties" : [\r
+ {\r
+ "name" : "uptime",\r
+ "servicename" : "service-a",\r
+ "metric" : "xs:double",\r
+ "location" : "//service-a/uptime"\r
+ }\r
+ ]\r
+ }\r
+\r
+ :type json_data:str\r
+ :type context: ServiceContext\r
+ """\r
+ data = jsonparser.templateinput_from_json(json_data)\r
+ slatemplate = sla.slaclient.templates.fed4fire.render_slatemplate(data)\r
+ client = context.restfactory.templates()\r
+ client.create(slatemplate)\r
+\r
+\r
+def createagreement(json_data, context):\r
+ """Creates an agreement in the SlaManager.\r
+\r
+ The template with template_id is retrieved and the properties and some\r
+ context info is copied to the agreement.\r
+\r
+ An example input is:\r
+ {\r
+ "template_id" : "template-id",\r
+ "agreement_id" : "agreement-id",\r
+ "expiration_time" : "2014-03-28T13:55:00Z",\r
+ "consumer" : "consumer-a",\r
+ "guarantees" : [\r
+ {\r
+ "name" : "uptime",\r
+ "bounds" : [ "0", "1" ]\r
+ }\r
+ ]\r
+ }\r
+ :type json_data:str\r
+ :type context: ServiceContext\r
+ """\r
+ client_templates = context.restfactory.templates()\r
+\r
+ # Builds AgreementInput from json\r
+ data = jsonparser.agreementinput_from_json(json_data)\r
+ # Read template from manager\r
+ slatemplate, request = client_templates.getbyid(data.template_id)\r
+ # Copy (overriding if necessary) from template to AgreementInput\r
+ final_data = data.from_template(slatemplate)\r
+ slaagreement = fed4fire.render_slaagreement(final_data)\r
+\r
+ client_agreements = context.restfactory.agreements()\r
+ return client_agreements.create(slaagreement)\r
+ \r
+\r
+def createagreementsimplified(template_id, user, expiration_time):\r
+ context = ServiceContext(\r
+ restclient.Factory(),\r
+ TemplateFactory()\r
+ )\r
+ \r
+ agreement = {\r
+ "agreement_id": str(uuid.uuid4()),\r
+ "template_id": template_id,\r
+ "expiration_time": expiration_time,\r
+ "consumer": user,\r
+ }\r
+ \r
+ json_data = json.dumps(agreement)\r
+\r
+ return createagreement(json_data, context)\r
+ \r
+def main():\r
+ createagreementsimplified("iMindsServiceWiLab2", "virtualwall", "2014-04-34T23:12:12")\r
+\r
+\r
+if __name__ == "__main__":\r
+ main()\r
+ \r
+ \r
--- /dev/null
+"""\r
+\r
+"""\r
+from sla.slaclient import wsag_model\r
+import json\r
+import dateutil.parser\r
+from sla.slaclient.templates.fed4fire.fed4fire import AgreementInput\r
+from sla.slaclient.templates.fed4fire.fed4fire import TemplateInput\r
+\r
+\r
+def templateinput_from_json(json_data):\r
+ """Creates a TemplateInput from json data.\r
+\r
+ :rtype: TemplateInput\r
+\r
+ An example input is:\r
+ {\r
+ "agreement_id" : "agreement-id"\r
+ "agreement_name" : "agreement-name",\r
+ "template_id" : "template-id",\r
+ "provider" : "provider",\r
+ "service_id" : "service-id",\r
+ "expiration_time" : "2014-03-28T13:55:00Z",\r
+ "service_properties" : [\r
+ {\r
+ "name" : "uptime",\r
+ "servicename" : "service-a",\r
+ "metric" : "xs:double",\r
+ "location" : "//service-a/uptime"\r
+ }\r
+ ]\r
+ }\r
+ """\r
+ d = json.loads(json_data)\r
+ if "expiration_time" in d:\r
+ d["expiration_time"] = dateutil.parser.parse(d["expiration_time"])\r
+\r
+ t = TemplateInput(\r
+ template_id=d.get("template_id", None),\r
+ template_name=d.get("template_name", None),\r
+ provider=d.get("provider", None),\r
+ service_id=d.get("service_id"),\r
+ expiration_time=d.get("expiration_time", None),\r
+ service_properties=_json_parse_service_properties(d)\r
+ )\r
+ return t\r
+\r
+\r
+def agreementinput_from_json(json_data):\r
+ """Creates an AgreementInput from json data.\r
+\r
+ :rtype: AgreementInput\r
+\r
+ An example input is:\r
+ {\r
+ "agreement_id" : "agreement-id"\r
+ "agreement_name" : "agreement-name",\r
+ "template_id" : "template-id",\r
+ "consumer" : "consumer",\r
+ "provider" : "provider",\r
+ "service_id" : "service-id",\r
+ "expiration_time" : "2014-03-28T13:55:00Z",\r
+ "guarantees": [\r
+ {\r
+ "name" : "uptime",\r
+ "bounds" : [ "0", "1" ]\r
+ }\r
+ ]\r
+ }\r
+ """\r
+ d = json.loads(json_data)\r
+ if "expiration_time" in d:\r
+ d["expiration_time"] = dateutil.parser.parse(d["expiration_time"])\r
+\r
+ t = AgreementInput(\r
+ agreement_id=d.get("agreement_id", None),\r
+ agreement_name=d.get("agreement_name", None),\r
+ template_id=d.get("template_id", None),\r
+ consumer=d.get("consumer", None),\r
+ provider=d.get("provider", None),\r
+ service_id=d.get("service_id"),\r
+ expiration_time=d.get("expiration_time", None),\r
+ service_properties=_json_parse_service_properties(d),\r
+ guarantee_terms=_json_parse_guarantee_terms(d)\r
+ )\r
+ return t\r
+\r
+\r
+def _json_parse_service_properties(d):\r
+ """Parse service properties in a json and translates to Property.\r
+ :type d: dict(str, str)\r
+ :rtype: list(wsag_model.Agreement.Property)\r
+ """\r
+ result = []\r
+ for sp in d.get("service_properties", None) or ():\r
+ result.append(\r
+ wsag_model.Agreement.Property(\r
+ servicename=sp.get("servicename", None),\r
+ name=sp.get("name", None),\r
+ metric=sp.get("metric", None),\r
+ location=sp.get("location", None)\r
+ )\r
+ )\r
+ return result\r
+\r
+\r
+def _json_parse_guarantee_terms(d):\r
+ """Parse guarantee terms in a son and translates to GuaranteeTerm.\r
+ :type d: dict(str, str)\r
+ :rtype: list(wsag_model.AgreementInput.GuaranteeTerm)\r
+ """\r
+ result = []\r
+ for term in d.get("guarantees", None) or ():\r
+ result.append(\r
+ AgreementInput.GuaranteeTerm(\r
+ metric_name=term["name"],\r
+ bounds=tuple(term["bounds"])\r
+ )\r
+ )\r
+ return result
\ No newline at end of file
--- /dev/null
+# -*- coding: utf-8 -*-\r
+\r
+from unittest import TestCase\r
+import datetime\r
+import json\r
+\r
+from slaclient.service.fed4fire import jsonparser\r
+\r
+\r
+class ParseJsonTestCase(TestCase):\r
+\r
+ def setUp(self):\r
+\r
+ self.from_json = None\r
+\r
+ self.expirationtime = datetime.datetime.combine(\r
+ datetime.date.today(),\r
+ datetime.time(0, 0, 0)\r
+ )\r
+\r
+ self.template = dict(\r
+ template_id="template-id",\r
+ template_name="template-name",\r
+ provider="provider-id",\r
+ service_id="service-id",\r
+ expiration_time=self.expirationtime.isoformat(),\r
+ service_properties=[\r
+ dict(servicename=None, name="uptime", metric=None,\r
+ location=None),\r
+ dict(servicename="service-name1", name="uptime", metric=None,\r
+ location=""),\r
+ dict(servicename="service-name2", name="metric1",\r
+ metric="xs:string", location=None),\r
+ dict(servicename="service-name2", name="metric2",\r
+ metric="xs:double", location="//monitoring/metric2")\r
+ ]\r
+ )\r
+\r
+ self.agreement = dict(\r
+ agreement_id="agreement-id",\r
+ template_id="template-id",\r
+ agreement_name="agreement-name",\r
+ consumer="consumer-id",\r
+ provider="provider-id",\r
+ service_id="service-id",\r
+ expiration_time=self.expirationtime.isoformat(),\r
+ guarantees=[\r
+ dict(name="sin", bounds=(-1, 1))\r
+ ]\r
+ )\r
+\r
+ def _check_dict(self, d, is_agreement):\r
+ o = self.from_json(json.dumps(d))\r
+ self.assertEquals(d.get("template_id", None), o.template_id)\r
+ if is_agreement:\r
+ self.assertEquals(d.get("agreement_id"), o.agreement_id or None)\r
+ self.assertEquals(d.get("agreement_name"), o.agreement_name)\r
+ self.assertEquals(d.get("consumer"), o.consumer or None)\r
+ else:\r
+ self.assertEquals(d.get("template_name"), o.template_name or None)\r
+ self.assertEquals(d.get("provider"), o.provider)\r
+ self.assertEquals(d.get("service_id"), o.service_id)\r
+ self.assertEquals(d.get("expiration_time"), o.expiration_time_iso)\r
+ if "service_properties" in d:\r
+ for i in range(0, len(d["service_properties"])):\r
+ self.assertEquals(\r
+ d["service_properties"][i].get("servicename"),\r
+ o.service_properties[i].servicename\r
+ )\r
+ self.assertEquals(\r
+ d["service_properties"][i].get("name"),\r
+ o.service_properties[i].name\r
+ )\r
+ self.assertEquals(\r
+ d["service_properties"][i].get("metric"),\r
+ o.service_properties[i].metric\r
+ )\r
+ self.assertEquals(\r
+ d["service_properties"][i].get("location"),\r
+ o.service_properties[i].location\r
+ )\r
+ if "guarantees" in d:\r
+ for i in range(0, len(d["guarantees"])):\r
+ self.assertEquals(\r
+ d["guarantees"][i].get("name"),\r
+ o.guarantee_terms[i].metric_name\r
+ )\r
+ self.assertEquals(\r
+ d["guarantees"][i].get("bounds"),\r
+ o.guarantee_terms[i].bounds\r
+ )\r
+\r
+ def test_template_from_json(self):\r
+ self.from_json = jsonparser.templateinput_from_json\r
+\r
+ #\r
+ # Add fields one by one, and check\r
+ #\r
+ d = dict()\r
+ for key in self.template:\r
+ if key == "service_properties":\r
+ d[key] = []\r
+ for prop in self.template[key]:\r
+ d[key].append(prop)\r
+ self._check_dict(d, False)\r
+ else:\r
+ d[key] = self.template[key]\r
+ self._check_dict(d, False)\r
+\r
+ def test_agreement_from_json(self):\r
+ self.from_json = jsonparser.agreementinput_from_json\r
+\r
+ #\r
+ # Add fields one by one, and check\r
+ #\r
+ d = dict()\r
+\r
+ for key in self.agreement:\r
+ if key == "guarantees":\r
+ d[key] = []\r
+ for term in self.agreement[key]:\r
+ d[key].append(term)\r
+ self._check_dict(d, True)\r
+ else:\r
+ d[key] = self.agreement[key]\r
+ self._check_dict(d, True)\r
--- /dev/null
+# -*- coding: utf-8 -*-\r
+\r
+import uuid\r
+import json\r
+\r
+from unittest import TestCase\r
+from slaclient.service.fed4fire import fed4fireservice\r
+from slaclient import restclient\r
+from slaclient.templates.fed4fire.django.factory import Factory as TemplateFactory\r
+\r
+\r
+class Fed4FireServiceTestCase(TestCase):\r
+\r
+ def setUp(self):\r
+ self.context = fed4fireservice.ServiceContext(\r
+ restclient.Factory("http://localhost:8080/sla-service"),\r
+ TemplateFactory()\r
+ )\r
+ self.provider_id = str(uuid.uuid4())\r
+ self.template_id = str(uuid.uuid4())\r
+ self.provider = {\r
+ "uuid": self.provider_id,\r
+ "name": "provider-" + self.provider_id[0:4]\r
+ }\r
+ self.template = {\r
+ "template_id": self.template_id,\r
+ "template_name": "template-name",\r
+ "provider": self.provider_id,\r
+ "service_id": "service-test",\r
+ "expiration_time": "2014-03-28T13:55:00Z",\r
+ "service_properties": [\r
+ {"name": "uptime"},\r
+ {"name": "responsetime"}\r
+ ]\r
+ }\r
+ self.agreement = {\r
+ "agreement_id": str(uuid.uuid4()),\r
+ "template_id": self.template_id,\r
+ "expiration_time": "2014-03-28T13:55:00Z",\r
+ "consumer": "consumer-a",\r
+ "guarantees": [\r
+ {\r
+ "name": "uptime",\r
+ "bounds": ["0.9", "1"]\r
+ }\r
+ ]\r
+ }\r
+\r
+ def test(self):\r
+ self._test_provider()\r
+ self._test_template()\r
+ self._test_agreement()\r
+\r
+ def _test_provider(self):\r
+ json_data = json.dumps(self.provider)\r
+ fed4fireservice.createprovider(json_data, self.context)\r
+\r
+ def _test_template(self):\r
+ json_data = json.dumps(self.template)\r
+ fed4fireservice.createtemplate(json_data, self.context)\r
+\r
+ def _test_agreement(self):\r
+ json_data = json.dumps(self.agreement)\r
+ fed4fireservice.createagreement(json_data, self.context)\r
+ \r
+def main():\r
+ context = fed4fireservice.ServiceContext(\r
+ restclient.Factory(),\r
+ TemplateFactory()\r
+ )\r
+ provider_id = "trento"\r
+ template_id = "template_vm-Trento:193.205.211.xx"\r
+ provider = {\r
+ "uuid": provider_id,\r
+ "name": "provider-" + provider_id[0:4]\r
+ }\r
+ template = {\r
+ "template_id": template_id,\r
+ "template_name": "template-name",\r
+ "provider": provider_id,\r
+ "service_id": "service-test",\r
+ "expiration_time": "2014-03-28T13:55:00Z",\r
+ "service_properties": [\r
+ {"name": "uptime"},\r
+ {"name": "responsetime"}\r
+ ]\r
+ }\r
+ agreement = {\r
+ "agreement_id": str(uuid.uuid4()),\r
+ "template_id": template_id,\r
+ "expiration_time": "2014-03-28T13:55:00Z",\r
+ "consumer": "consumer-a",\r
+ # the provider id must be repeated\r
+ "provider": provider_id,\r
+ "guarantees": [\r
+ {\r
+ "name": "uptime",\r
+ "bounds": ["0.9", "1"]\r
+ }\r
+ ]\r
+ }\r
+ \r
+ json_data = json.dumps(agreement)\r
+ fed4fireservice.createagreement(json_data, context)\r
+\r
+\r
+if __name__ == "__main__":\r
+ main()\r
+ \r
+ \r
--- /dev/null
+from sla.slaclient.templates.templates import Template
\ No newline at end of file
--- /dev/null
+from sla.slaclient.templates.fed4fire.fed4fire import TemplateInput\r
+from sla.slaclient.templates.fed4fire.fed4fire import AgreementInput\r
+from sla.slaclient.templates.fed4fire.fed4fire import render_slaagreement\r
+from sla.slaclient.templates.fed4fire.fed4fire import render_slatemplate\r
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>\r
+<wsag:Agreement xmlns:wsag="http://www.ggf.org/namespaces/ws-agreement"\r
+ {% if data.agreement_id %}AgreementId="{{data.agreement_id}}"{% endif %}>\r
+ {% if data.agreement_name %}<wsag:Name>{{data.agreement_name}}</wsag:Name>{% endif %}\r
+\r
+ <wsag:Context>\r
+ <wsag:AgreementInitiator>{{data.consumer}}</wsag:AgreementInitiator>\r
+ <wsag:AgreementResponder>{{data.provider}}</wsag:AgreementResponder>\r
+ <wsag:ServiceProvider>AgreementResponder</wsag:ServiceProvider>\r
+ <wsag:ExpirationTime>{{data.expiration_time_iso}}</wsag:ExpirationTime>\r
+ <wsag:TemplateId>{{data.template_id}}</wsag:TemplateId>\r
+ {% if data.service_id %}<sla:Service xmlns:sla="http://sla.atos.eu">{{data.service_id}}</sla:Service>{% endif %}\r
+ </wsag:Context>\r
+\r
+ <wsag:Terms>\r
+ <wsag:All>\r
+ <wsag:ServiceProperties\r
+ wsag:Name="ServiceProperties"\r
+ wsag:ServiceName="{{data.service_id|default:"service"}}">\r
+ <wsag:Variables>\r
+ {% for property in data.service_properties %} <wsag:Variable\r
+ wsag:Name="{{property.name}}"\r
+ wsag:Metric="{{property.metric|default:'xs:double'}}">\r
+ <wsag:Location>{{property.location|default:property.name}}</wsag:Location>\r
+ </wsag:Variable>\r
+ {% endfor %}</wsag:Variables>\r
+ </wsag:ServiceProperties>\r
+ {% for term in data.guarantee_terms %}\r
+ <wsag:GuaranteeTerm Name="{{term.name}}">\r
+ {# do not need servicescope #}\r
+ {% for scope in term.scopes %}\r
+ <wsag:ServiceScope ServiceName="{{scope.servicename}}"/>\r
+ {% endfor %}\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>{{term.servicelevelobjective.kpiname}}</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {% autoescape off %}\r
+ {{term.servicelevelobjective.customservicelevel}}\r
+ {% endautoescape %}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>{% endfor %}\r
+ </wsag:All>\r
+ </wsag:Terms>\r
+</wsag:Agreement>
\ No newline at end of file
--- /dev/null
+"""Django implementation of the templating needed in Fed4FIRE.\r
+"""\r
+import pkgutil\r
+import django.template\r
+from django.conf import settings\r
+import sla.slaclient\r
+\r
+#\r
+# Package where to read the template files\r
+#\r
+_package = "sla.slaclient.templates.fed4fire.django"\r
+\r
+#\r
+# Filename of the sla-agreement template\r
+#\r
+_AGREEMENT_FILENAME = "agreement.xml"\r
+\r
+#\r
+# Filename of the sla-template template\r
+#\r
+_TEMPLATE_FILENAME = "template.xml"\r
+\r
+\r
+class Factory(object):\r
+\r
+ def __init__(self):\r
+ self.slaagreement_tpl = None\r
+ self.slatemplate_tpl = None\r
+\r
+ def _lazy_init(self):\r
+ if not settings.configured:\r
+ settings.configure()\r
+\r
+ @staticmethod\r
+ def _read(filename):\r
+ string = pkgutil.get_data(_package, filename)\r
+ return string\r
+\r
+ def _get_agreement_tpl(self):\r
+ self._lazy_init()\r
+ if self.slaagreement_tpl is None:\r
+ self.slaagreement_tpl = Factory._read(_AGREEMENT_FILENAME)\r
+ return self.slaagreement_tpl\r
+\r
+ def _get_template_tpl(self):\r
+ self._lazy_init()\r
+ if self.slatemplate_tpl is None:\r
+ self.slatemplate_tpl = Factory._read(_TEMPLATE_FILENAME)\r
+ return self.slatemplate_tpl\r
+\r
+ def slaagreement(self):\r
+ tpl = self._get_agreement_tpl()\r
+ result = Template(tpl)\r
+ return result\r
+\r
+ def slatemplate(self):\r
+ tpl = self._get_template_tpl()\r
+ result = Template(tpl)\r
+ return result\r
+\r
+\r
+class Template(sla.slaclient.templates.Template):\r
+\r
+ def __init__(self, string):\r
+ self.impl = django.template.Template(string)\r
+\r
+ def render(self, data):\r
+ context = django.template.Context(dict(data=data))\r
+ result = self.impl.render(context)\r
+ return result\r
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>\r
+<wsag:Template xmlns:wsag="http://www.ggf.org/namespaces/ws-agreement"\r
+ TemplateId="{{data.template_id}}">\r
+ {% if data.template_name %}<wsag:Name>{{data.template_name}}</wsag:Name>{% endif %}\r
+\r
+ <wsag:Context>\r
+ {% if data.provider %}<wsag:AgreementResponder>{{data.provider}}</wsag:AgreementResponder>{% endif %}\r
+ <wsag:ServiceProvider>AgreementResponder</wsag:ServiceProvider>\r
+ {% if data.expiration_time %}<wsag:ExpirationTime>{{data.expiration_time_iso}}</wsag:ExpirationTime>{% endif %}\r
+ <sla:Service xmlns:sla="http://sla.atos.eu">{{data.service_id}}</sla:Service>\r
+ </wsag:Context>\r
+\r
+ <wsag:Terms>\r
+ <wsag:All>\r
+ <wsag:ServiceProperties\r
+ wsag:Name="ServiceProperties"\r
+ wsag:ServiceName="{{data.service_id}}">\r
+ <wsag:Variables>\r
+ {% for property in data.service_properties %} <wsag:Variable\r
+ wsag:Name="{{property.name}}"\r
+ wsag:Metric="{{property.metric|default:'xs:double'}}">\r
+ <wsag:Location>{{property.location|default:property.name}}</wsag:Location>\r
+ </wsag:Variable>\r
+ {% endfor %}</wsag:Variables>\r
+ </wsag:ServiceProperties>\r
+ </wsag:All>\r
+ </wsag:Terms>\r
+</wsag:Template>\r
--- /dev/null
+# -*- coding: utf-8 -*-\r
+"""Template system for xifi project.\r
+\r
+The specific template system is configured with the factory module variable.\r
+\r
+By default, it is set to use django.\r
+\r
+Each implementation must define a factory module/object, defining:\r
+* slaagreement()\r
+* slatemplate()\r
+\r
+that returns a slaclient.templates.Template-compliant object that performs\r
+the actual render.\r
+\r
+This module defines two facade methods:\r
+* render_slaagreement(data)\r
+* render_slatemplate(data)\r
+\r
+and the corresponding input classes:\r
+* AgreementInput\r
+* TemplateInput\r
+\r
+Usage:\r
+ # Thread safe\r
+ import sla.slaclient.templates.fed4fire\r
+ data = sla.slaclient.templates.fed4fire.TemplateInput(template_id="template-test")\r
+ t = sla.slaclient.templates.fed4fire.django.Factory().slatemplate()\r
+ slatemplate_xml = t.render(data)\r
+\r
+ # Non thread safe\r
+ import sla.slaclient.templates.fed4fire\r
+ data = sla.slaclient.templates.fed4fire.TemplateInput(template_id="template-test")\r
+ slatemplate_xml = sla.slaclient.templates.fed4fire.render_slatemplate(data)\r
+\r
+Notes about agreements in XiFi:\r
+ The ws-agreement specification does not address where to place the name/id\r
+ of the service (as known outside SLA) being defined in the\r
+ agreement/template xml. So, it has been defined an element\r
+ wsag:Context/sla:Service, whose text is the name/id of the service. This\r
+ is known here as serviceId.\r
+\r
+ An agreement/template can represent zero or more than one existing services.\r
+ The guarantee terms, service description terms, etc, use the attribute\r
+ serviceName to reference (internally in the xml) the service. So, there\r
+ could be more than one serviceName in a xml (as opposed to the former\r
+ serviceId). In Xifi, there is only one service per agreement, so we\r
+ can give serviceId and serviceName the same value.\r
+\r
+ A ServiceReference defines how a serviceName is known externally: a\r
+ service reference can be a name, a location, a structure containing both...\r
+\r
+ The service properties are a set of variables that are used in the guarantee\r
+ terms contraints. So, for example, if a constraint is : "uptime < 90", we\r
+ can have 2 service properties: ActualUptime and DesiredUptime. And the\r
+ constraint will be "ActualUptime < DesiredUptime". This is the theory. But\r
+ we're not going to use the service properties this way. We will not use the\r
+ thresholds as service properties; only the actual metric. So, in this case,\r
+ the service property is defined in ws-agreement as:\r
+\r
+ <wsag:Variable Name="Uptime" Metric="xs:double">\r
+ <wsag:Location>service-ping/Uptime</wsag:Location>\r
+ </wsag:Variable>\r
+\r
+ The "location" is the strange value here. Ws-agreement says that it is a\r
+ "structural reference" to the place where to find the actual value of the\r
+ metric. The examples I've found are references to the\r
+ ServiceDescriptionTerms in the agreement itself. We are not using SDTs\r
+ (they are used to describe the service to be instantiated), so we can\r
+ extrapolate the location as the "abstract location of the metric".\r
+\r
+ In summary, in XiFi, the service properties will hold the metrics being\r
+ monitored for a service.\r
+\r
+ And the guarantee terms hold the constraints that are being enforced for\r
+ the service in this agreement (maybe we are only interested in enforcing\r
+ one of the metrics).\r
+\r
+ A guarantee term is defined as:\r
+ <wsag:GuaranteeTerm Name="GT_ResponseTime">\r
+ <wsag:ServiceScope ServiceName="service-ping"/>\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>Uptime</wsag:KPIName>\r
+ <wsag:CustomServiceLevel>\r
+ {"constraint" : "Uptime BETWEEN (90, 100)"}\r
+ </wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+\r
+ * Name is a name for the guarantee term. In Xifi, the name will have the\r
+ value "GT_<metric_name>"\r
+ * ServiceName is an internal reference in the agreement to the service\r
+ being enforced, as an agreement can created for more than one service.\r
+ In Xifi, to my knowledge, one service: one agreement, so this service\r
+ name is not really important.\r
+ * KpiName is a name given to the constraint, and I am using the same name\r
+ as the service property used in the constraint. This makes more sense\r
+ when using thresholds as service properties (e.g., a kpi called\r
+ "uptime" could be defined as :\r
+ "actual_uptime BETWEEN(lower_uptime, upper_uptime)").\r
+\r
+ The CustomServiceLevel is not specified by ws-agreement, so it's something\r
+ to be defined by the implementation.\r
+\r
+"""\r
+\r
+from sla.slaclient import wsag_model\r
+import pdb\r
+\r
+from sla.slaclient.templates.fed4fire.django.factory import Factory\r
+factory = Factory()\r
+\r
+\r
+def _getfactory():\r
+ #\r
+ # Hardwired above to avoid multheading issues. This will need some\r
+ # refactoring if the factory really needs to be configurable.\r
+ #\r
+\r
+ global factory\r
+ #if factory is None:\r
+ # from slaclient.templates.fed4fire.django.factory import Factory\r
+ # factory = Factory()\r
+ return factory\r
+\r
+\r
+def render_slaagreement(data):\r
+ """Generate a sla agreement based on the supplied data.\r
+\r
+ :type data: AgreementInput\r
+ """\r
+ print "render_slaagreement"\r
+ template = _getfactory().slaagreement()\r
+ #pdb.set_trace()\r
+ rendered = template.render(data) \r
+ return rendered\r
+\r
+\r
+def render_slatemplate(data):\r
+ """Generate a sla template based on the supplied data.\r
+\r
+ :type data: TemplateInput\r
+ """\r
+ template = _getfactory().slatemplate()\r
+ return template.render(data)\r
+\r
+\r
+class TemplateInput(object):\r
+\r
+ def __init__(self,\r
+ template_id="",\r
+ template_name="",\r
+ provider="",\r
+ service_id="",\r
+ expiration_time=None,\r
+ service_properties=()):\r
+ """Input data to the template for generating a sla-template.\r
+\r
+ :param str template_id: optional TemplateId. If not specified, the\r
+ SlaManager should provide one.\r
+ :param str template_name: optional name for the template.\r
+ :param str service_id: Domain id/name of the service.\r
+ :param str provider: optional Resource Id of the provider party in the\r
+ agreement. The provider must exist previously in the SlaManager.\r
+ :param expiration_time: optional expiration time of this template.\r
+ :type expiration_time: datetime.datetime\r
+ :param service_properties: Metrics that the provider is able to\r
+ monitor for this service.\r
+ :type service_properties: list[slaclient.wsag_model.Agreement.Property]\r
+ """\r
+ self.template_id = template_id\r
+ self.template_name = template_name\r
+ self.service_id = service_id\r
+ self.provider = provider\r
+ self.expiration_time = expiration_time\r
+ self.expiration_time_iso = \\r
+ expiration_time.isoformat() if expiration_time else None\r
+ self.service_properties = service_properties\r
+\r
+ def __repr__(self):\r
+ s = "<TemplateInput(template_id={}, template_name={})" \\r
+ "service_id={}, provider={}, expiration_time={}, " \\r
+ "service_properties={}>"\r
+ return s.format(\r
+ self.template_id,\r
+ self.template_name,\r
+ self.service_id,\r
+ self.provider,\r
+ self.expiration_time_iso,\r
+ repr(self.service_properties)\r
+ )\r
+\r
+\r
+class AgreementInput(object):\r
+\r
+ class GuaranteeTerm(object):\r
+\r
+ def __init__(self,\r
+ metric_name="",\r
+ bounds=(0, 0)):\r
+ """Creates a GuaranteeTerm.\r
+\r
+ Take into account that the GT's name is based on the metric_name.\r
+ :param str metric_name: name of the service property being enforced.\r
+ :param bounds: (lower, upper) bounds of the metric values.\r
+ :type bounds: (float, float)\r
+ """\r
+ self.name = "GT_{}".format(metric_name)\r
+ self.metric_name = metric_name\r
+ self.kpiname = metric_name\r
+ self.bounds = bounds\r
+\r
+ def __init__(self,\r
+ agreement_id="",\r
+ agreement_name="",\r
+ service_id="",\r
+ consumer="",\r
+ provider="",\r
+ template_id="",\r
+ expiration_time=None,\r
+ service_properties=(),\r
+ guarantee_terms=()):\r
+ """Input data to the template for generating a sla-agreement\r
+\r
+ :param str agreement_id: optional agreement id. If not supplied,\r
+ the SlaManager should create one.\r
+ :param str agreement_name: optional agreement name\r
+ :param str service_id: Domain id/name of the service.\r
+ :param str consumer: Id of the consumer party in the agreement.\r
+ :param str provider: Resource Id of the provider party in the agreement.\r
+ The provider must exist previously in the SlaManager.\r
+ :param str template_id: TemplateId of the template this agreement is\r
+ based on.\r
+ :param expiration_time: Expiration time of this agreement.\r
+ :type expiration_time: datetime.datetime\r
+ :param service_properties: Should be the same of the template.\r
+ :type service_properties: list[slaclient.wsag_model.Agreement.Property]\r
+ :param guarantee_terms: Guarantee terms to be enforced in this\r
+ agreement.\r
+ :type guarantee_terms: list(AgreementInput.GuaranteeTerm)\r
+ """\r
+ self.agreement_id = agreement_id\r
+ self.agreement_name = agreement_name\r
+ self.service_id = service_id\r
+ self.consumer = consumer\r
+ self.provider = provider\r
+ self.template_id = template_id\r
+ self.expiration_time = expiration_time\r
+ self.expiration_time_iso = \\r
+ expiration_time.isoformat() if expiration_time else None\r
+ self.service_properties = service_properties\r
+ self.guarantee_terms = guarantee_terms\r
+\r
+ def __repr__(self):\r
+ s = "<AgreementInput(agreement_id={}, agreement_name={}, " \\r
+ "service_id={}, consumer={}, provider={}, template_id={}, " \\r
+ "expiration_time={}, service_properties={}, guarantee_terms={}>"\r
+ return s.format(\r
+ self.agreement_id,\r
+ self.agreement_name,\r
+ self.service_id,\r
+ self.consumer,\r
+ self.provider,\r
+ self.template_id,\r
+ self.expiration_time,\r
+ repr(self.service_properties),\r
+ repr(self.guarantee_terms)\r
+ )\r
+\r
+ def from_template(self, slatemplate):\r
+ """Return a new agreement based on this agreement and copying info\r
+ (overriding if necessary) from a slatemplate.\r
+\r
+ :type slatemplate: wsag_model.Template\r
+ :rtype: AgreementInput\r
+ """\r
+ #\r
+ # NOTE: templateinput does not address guaranteeterms (yet)\r
+ #\r
+ result = AgreementInput(\r
+ agreement_id=self.agreement_id,\r
+ agreement_name=self.agreement_name,\r
+ service_id=slatemplate.context.service,\r
+ consumer=self.consumer,\r
+ provider=slatemplate.context.provider or self.provider,\r
+ template_id=slatemplate.template_id,\r
+ expiration_time=self.expiration_time,\r
+ service_properties=slatemplate.variables.values(),\r
+ guarantee_terms=slatemplate.guaranteeterms.values()\r
+ )\r
+ print result.guarantee_terms[0]\r
+ return result\r
--- /dev/null
+# -*- coding: utf-8 -*-\r
+\r
+from unittest import TestCase\r
+import datetime\r
+\r
+from slaclient import wsag_model\r
+from slaclient import xmlconverter\r
+import slaclient.templates.fed4fire\r
+from slaclient.templates.fed4fire import TemplateInput\r
+from slaclient.templates.fed4fire import AgreementInput\r
+\r
+\r
+\r
+\r
+class TemplatesTestCase(TestCase):\r
+\r
+ def setUp(self):\r
+ self.converter = xmlconverter.AgreementConverter()\r
+\r
+ self.expirationtime = datetime.datetime.combine(\r
+ datetime.date.today(),\r
+ datetime.time(0, 0, 0)\r
+ )\r
+ self.templateinput = TemplateInput(\r
+ template_id="template-id",\r
+ template_name="template-name",\r
+ service_id="service-name",\r
+ expiration_time=self.expirationtime,\r
+ service_properties=[\r
+ wsag_model.Agreement.Property(\r
+ name="uptime",\r
+ metric="xs:double",\r
+ location="uptime"),\r
+ wsag_model.Agreement.Property(\r
+ name="responsetime",\r
+ location="responsetime"),\r
+ wsag_model.Agreement.Property(\r
+ name="quality",\r
+ metric="xs:string"),\r
+ ]\r
+ )\r
+ self.agreementinput = AgreementInput(\r
+ agreement_id="agreement-id",\r
+ agreement_name="agreement-name",\r
+ consumer="consumer-id",\r
+ provider="provider-id",\r
+ service_id="service-name",\r
+ template_id="template-id",\r
+ expiration_time=self.expirationtime,\r
+ service_properties=self.templateinput.service_properties,\r
+ guarantee_terms=[\r
+ AgreementInput.GuaranteeTerm(\r
+ "uptime", (0.9, 1)\r
+ ),\r
+ AgreementInput.GuaranteeTerm(\r
+ "responsetime", (0, 200)\r
+ )\r
+ ]\r
+ )\r
+\r
+ def test_template(self):\r
+ slatemplate = slaclient.templates.fed4fire.render_slatemplate(\r
+ self.templateinput\r
+ )\r
+ # convert xml to wsag_model classes\r
+ actual = xmlconverter.convertstring(self.converter, slatemplate)\r
+ """:type: wsag_model.Template"""\r
+\r
+ expected = self.templateinput\r
+\r
+ self.assertEquals(\r
+ expected.template_id,\r
+ actual.template_id\r
+ )\r
+ self._check_common(expected, actual)\r
+ print slatemplate\r
+\r
+ def test_agreement(self):\r
+ slaagreement = slaclient.templates.fed4fire.render_slaagreement(\r
+ self.agreementinput\r
+ )\r
+ # convert xml to wsag_model classes\r
+ actual = xmlconverter.convertstring(self.converter, slaagreement)\r
+ """:type: wsag_model.Agreement"""\r
+\r
+ expected = self.agreementinput\r
+\r
+ self.assertEquals(\r
+ expected.agreement_id,\r
+ actual.agreement_id\r
+ )\r
+ expected.consumer and self.assertEquals(\r
+ expected.consumer,\r
+ actual.context.consumer\r
+ )\r
+ self._check_common(expected, actual)\r
+ self._check_guarantee_terms(expected, actual)\r
+ print slaagreement\r
+\r
+ def _check_common(self, expected, actual):\r
+ if expected.provider:\r
+ self.assertEquals(\r
+ expected.provider,\r
+ actual.context.provider\r
+ )\r
+ self.assertEquals(\r
+ expected.expiration_time_iso,\r
+ actual.context.expirationtime\r
+ )\r
+ self.assertEquals(\r
+ expected.service_id,\r
+ actual.context.service\r
+ )\r
+ self._check_properties(expected, actual)\r
+\r
+ def _check_properties(self, expected, actual):\r
+ for expected_prop in expected.service_properties:\r
+ actual_prop = actual.variables[expected_prop.name]\r
+ self.assertEquals(\r
+ expected_prop.name,\r
+ actual_prop.name\r
+ )\r
+ self.assertEquals(\r
+ expected_prop.location or expected_prop.name,\r
+ actual_prop.location\r
+ )\r
+ self.assertEquals(\r
+ expected_prop.metric or 'xs:double',\r
+ actual_prop.metric\r
+ )\r
+\r
+ def _check_guarantee_terms(self, expected, actual):\r
+ """\r
+ :type expected: AgreementInput\r
+ :type actual: wsag_model.Agreement\r
+ """\r
+ for expected_term in expected.guarantee_terms:\r
+ actual_term = actual.guaranteeterms[expected_term.name]\r
+\r
+ if actual_term is None:\r
+ self.assertEquals(expected_term.name, None)\r
+ self.assertEquals(\r
+ expected_term.kpiname,\r
+ actual_term.servicelevelobjective.kpiname\r
+ )\r
+ \r
--- /dev/null
+"""This module and submodules offers a generic way to create ws-agreement\r
+representations from structured data, by using templates.\r
+\r
+Each submodule (corresponding to a project) is responsible to declare\r
+the structured data to be used as input, and handle the specific template\r
+library.\r
+\r
+This module only defines a sample interface to be used for each Template object\r
+used by each project.\r
+\r
+Sample usage (read specific submodules' docs):\r
+data = slaclient.<project>.Input(...)\r
+tpl = slaclient.<project>.Template(...)\r
+tpl.render(data)\r
+\r
+"""\r
+\r
+\r
+class Template(object):\r
+\r
+ def __init__(self, file_):\r
+ """This is the interface that all project templates should "implement".\r
+\r
+ It mimics the behavior of django templates.\r
+ """\r
+ pass\r
+\r
+ def render(self, data):\r
+ """Renders this template using 'data' as input.\r
+ """\r
+ pass\r
--- /dev/null
+__author__ = 'a565180'\r
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>\r
+<wsag:Agreement \r
+ xmlns:wsag="http://www.ggf.org/namespaces/ws-agreement"\r
+ xmlns:sla="http://sla.atos.eu"\r
+ AgreementId="agreement02">\r
+\r
+ <wsag:Name>ExampleAgreement</wsag:Name>\r
+ <wsag:Context>\r
+ <wsag:AgreementInitiator>RandomClient</wsag:AgreementInitiator>\r
+ <wsag:AgreementResponder>provider-prueba</wsag:AgreementResponder>\r
+ <!--\r
+ The AgreementResponder (in this case) is mandatory if slagui is multi service provider\r
+ -->\r
+ <wsag:ServiceProvider>AgreementResponder</wsag:ServiceProvider>\r
+ <wsag:ExpirationTime>2014-03-07T12:00:00</wsag:ExpirationTime>\r
+ <wsag:TemplateId>contract-template-2007-12-04</wsag:TemplateId>\r
+ <sla:Service>ExampleService</sla:Service>\r
+ </wsag:Context>\r
+ <wsag:Terms>\r
+ <wsag:All>\r
+ <!-- FUNCTIONAL DESCRIPTION -->\r
+ <wsag:ServiceDescriptionTerm wsag:Name="SDTName1" wsag:ServiceName="ServiceName">\r
+ DSL expression\r
+ </wsag:ServiceDescriptionTerm>\r
+ <wsag:ServiceDescriptionTerm wsag:Name="SDTName2" wsag:ServiceName="ServiceName">\r
+ DSL expression\r
+ </wsag:ServiceDescriptionTerm>\r
+ \r
+ <!-- OPTIONAL SERVICE REFERENCE -->\r
+ \r
+ <!-- OPTIONAL SERVICE PROPERTIES : non funcional properties-->\r
+ <wsag:ServiceProperties wsag:Name="NonFunctional" wsag:ServiceName="ServiceName">\r
+ <wsag:Variables>\r
+ <wsag:Variable wsag:Name="ResponseTime" wsag:Metric="xs:double">\r
+ <wsag:Location>qos:ResponseTime</wsag:Location>\r
+ </wsag:Variable>\r
+ <wsag:Variable wsag:Name="Performance" wsag:Metric="xs:double">\r
+ <wsag:Location>qos:Performance</wsag:Location>\r
+ </wsag:Variable>\r
+ </wsag:Variables>\r
+ </wsag:ServiceProperties>\r
+ <wsag:GuaranteeTerm Name="GT_ResponseTime">\r
+ <wsag:ServiceScope ServiceName="ServiceName">/operation1</wsag:ServiceScope>\r
+ <wsag:ServiceScope ServiceName="ServiceName">/operation2</wsag:ServiceScope>\r
+ <!-- The qualifying conditions that must be met before the guarantee is evaluated -->\r
+ <!-- \r
+ <wsag:QualifyingCondition>state EQ 'ready'</wsag:QualifyingCondition>\r
+ -->\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>ResponseTime</wsag:KPIName> <!-- same name as property for the moment -->\r
+ <wsag:CustomServiceLevel>{"constraint" : "ResponseTime BETWEEN (0,0.9)"}</wsag:CustomServiceLevel> <!-- the ServiceProperty is referenced here -->\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ </wsag:GuaranteeTerm>\r
+ <wsag:GuaranteeTerm Name="GT_Performance">\r
+ <wsag:ServiceScope ServiceName="ServiceName"/>\r
+ <wsag:ServiceLevelObjective>\r
+ <wsag:KPITarget>\r
+ <wsag:KPIName>Performance</wsag:KPIName> <!-- same name as property for the moment -->\r
+ <wsag:CustomServiceLevel>{"constraint" : "Performance BETWEEN (0.1,1)"}</wsag:CustomServiceLevel>\r
+ </wsag:KPITarget>\r
+ </wsag:ServiceLevelObjective>\r
+ <wsag:BusinessValueList>\r
+ <wsag:Important>3</wsag:Important> <!-- optional importance (integer) -->\r
+ <wsag:Penalty>\r
+ <wsag:AssessmentInterval>\r
+ <wsag:Count>10</wsag:Count>\r
+ </wsag:AssessmentInterval>\r
+ <wsag:ValueUnit>EUR</wsag:ValueUnit>\r
+ <wsag:ValueExpression>99</wsag:ValueExpression>\r
+ </wsag:Penalty>\r
+ \r
+ <wsag:Reward></wsag:Reward>\r
+ <wsag:Preference></wsag:Preference>\r
+ <wsag:CustomBusinessValue></wsag:CustomBusinessValue>\r
+ </wsag:BusinessValueList>\r
+ </wsag:GuaranteeTerm>\r
+ </wsag:All>\r
+ </wsag:Terms>\r
+</wsag:Agreement>\r
--- /dev/null
+# -*- coding: utf-8 -*-
+
+from unittest import TestCase
+from pprint import pprint
+import json
+
+from slaclient import wsag_model
+from slaclient import xmlconverter
+
+
+class AgreementAnnotatorTestCase(TestCase):
+
+ def setUp(self):
+ self.violation = """
+ <violation>
+ <uuid>ce0e148f-dfac-4492-bb26-ad2e9a6965ec</uuid>
+ <contract_uuid>agreement04</contract_uuid>
+ <service_scope></service_scope>
+ <metric_name>Performance</metric_name>
+ <datetime>2014-01-14T11:28:22Z</datetime>
+ <actual_value>0.09555700123360344</actual_value>
+ </violation>"""
+
+ self.provider = """
+ <provider>
+ <uuid>1ad9acb9-8dbc-4fe6-9a0b-4244ab6455da</uuid>
+ <name>Provider2</name>
+ </provider> """
+
+ self.list = """
+ <collection href="/providers">
+ <items offset="0" total="2">
+ <provider>
+ <uuid>1ad9acb9-8dbc-4fe6-9a0b-4244ab6455da</uuid>
+ <name>Provider1</name>
+ </provider>
+ <provider>
+ <uuid>2ad9acb9-8dbc-4fe6-9a0b-4244ab6455da</uuid>
+ <name>Provider2</name>
+ </provider>
+ </items>
+ </collection>"""
+
+ self.agreement_status = """
+ {
+ "AgreementId":"agreement03",
+ "guaranteestatus":"VIOLATED",
+ "guaranteeterms":
+ [
+ {"name":"GT_ResponseTime","status":"FULFILLED"},
+ {"name":"GT_Performance","status":"VIOLATED"}
+ ]
+ }"""
+
+ def test_agreement(self):
+ conv = xmlconverter.AgreementConverter()
+
+ out = xmlconverter.convertfile(conv, "slagui/testing/agreement.xml")
+ """:type : Agreement"""
+
+ #pprint(out)
+
+ def test_provider(self):
+ conv = xmlconverter.ProviderConverter()
+ out = xmlconverter.convertstring(conv, self.provider)
+ #pprint(out)
+
+ def test_violation(self):
+ conv = xmlconverter.ViolationConverter()
+ out = xmlconverter.convertstring(conv, self.violation)
+ #pprint(out)
+
+ def test_list(self):
+ conv = xmlconverter.ListConverter(xmlconverter.ProviderConverter())
+ out = xmlconverter.convertstring(conv, self.list)
+ #pprint(out)
+
+ def test_agreement_status_decode(self):
+ json_obj = json.loads(self.agreement_status)
+ out = wsag_model.AgreementStatus.json_decode(json_obj)
+ #pprint(out)
--- /dev/null
+from datetime import datetime
+
+"""Contains the bean models for the SlaManager xml/json types
+"""
+
+
+class Agreement(object):
+
+ class Context(object):
+ def __init__(self):
+ self.expirationtime = datetime.now()
+ self.service = ""
+ self.initiator = ""
+ self.responder = ""
+ self.provider = ""
+ self.consumer = ""
+
+ def __repr__(self):
+ s = "<Context(" + \
+ "expirationtime={}, provider={}, consumer={}, service={})>"
+ return s.format(
+ repr(self.expirationtime),
+ repr(self.provider),
+ repr(self.consumer),
+ repr(self.service))
+
+ def service_formatted(self):
+ return self.service.replace('_', ' ')
+
+ def testbed_formatted(self):
+ return self.template_id.replace('Service', ' - ')
+
+ class Property(object):
+ def __init__(self):
+ self.servicename = ""
+ self.name = ""
+ self.metric = ""
+ self.location = ""
+
+ def __repr__(self):
+ str_ = "<Property(name={}, servicename={}, metric={}, location={})>"
+ return str_.format(
+ repr(self.name),
+ repr(self.servicename),
+ repr(self.metric),
+ repr(self.location))
+
+ class GuaranteeTerm(object):
+
+ class GuaranteeScope(object):
+ def __init__(self):
+ self.servicename = ""
+ self.scope = ""
+
+ def __repr__(self):
+ return "<GuaranteeScope(servicename={}, scope={}>)".format(
+ repr(self.servicename),
+ repr(self.scope)
+ )
+
+ class ServiceLevelObjective(object):
+ def __init__(self):
+ self.kpiname = ""
+ self.customservicelevel = ""
+
+ def __repr__(self):
+ s = "<ServiceLevelObjective(kpiname={}, customservicelevel={})>"
+ return s.format(
+ repr(self.kpiname),
+ repr(self.customservicelevel)
+ )
+
+ def __init__(self):
+ self.name = ""
+ self.scopes = [] # item: GuaranteeScope
+ """:type : list[Agreement.GuaranteeTerm.GuaranteeScope]"""
+ self.servicelevelobjective = \
+ Agreement.GuaranteeTerm.ServiceLevelObjective()
+
+ def __repr__(self):
+ s = "<GuaranteeTerm(scopes={}, servicelevelobjective={})>"
+ return s.format(
+ repr(self.scopes),
+ repr(self.servicelevelobjective)
+ )
+
+ def __init__(self):
+ """Simple bean model for a ws-agreement agreement/template
+ """
+ self.context = Agreement.Context()
+ self.agreement_id = ""
+ self.descriptionterms = {}
+ self.variables = {} # key: Property.name / value: Property
+ """:type : dict[str,Agreement.Property]"""
+ self.guaranteeterms = {} # key: GT.name / value: GT
+ """:type : dict[str,Agreement.GuaranteeTerm]"""
+
+ def __repr__(self):
+ s = ("<Agreement(agreement_id={}, context={}, descriptionterms={}, " +
+ "variables={}, guaranteeterms={}>")
+ return s.format(
+ repr(self.agreement_id),
+ repr(self.context),
+ repr(self.descriptionterms),
+ repr(self.variables),
+ repr(self.guaranteeterms)
+ )
+
+
+class Template(Agreement):
+ #egarrido this code has been copied from xifi and has not beeing tested
+ def __init__(self):
+ super(Template, self).__init__()
+ self.template_id = ""
+
+ def __repr__(self):
+ s = ("<Template(template_id={}, context={}, descriptionterms={}, " +
+ "variables={}, guaranteeterms={}>")
+ return s.format(
+ repr(self.template_id),
+ repr(self.context),
+ repr(self.descriptionterms),
+ repr(self.variables),
+ repr(self.guaranteeterms)
+ )
+
+
+class Enforcement(object):
+ def __init__(self):
+ """Simple bean model for an enforcement"""
+ self.agreement_id = ""
+ self.enabled = ""
+
+ def __repr__(self):
+ return ("<Enforcement(agreement_id={}, enabled={})>".format(
+ self.agreement_id,
+ self.enabled)
+ )
+
+class AgreementStatus(object):
+
+ class StatusEnum:
+ VIOLATED = "VIOLATED"
+ FULFILLED = "FULFILLED"
+ NON_DETERMINED = "NON_DETERMINED"
+
+ class GuaranteeTermStatus(object):
+ def __init__(self):
+ self.name = ""
+ self.status = ""
+
+ def __repr__(self):
+ s = "<GuaranteeTermStatus(name='{}' status='{}')>"
+ return s.format(self.name, self.status)
+
+ def __init__(self):
+ self.agreement_id = ""
+ self.guaranteestatus = ""
+ self.guaranteeterms = []
+
+ def __repr__(self):
+ return (
+ "<AgreementStatus( agreement_id={}, guaranteestatus={}, " +
+ "guaranteeterms={})>").format(
+ self.agreement_id,
+ self.guaranteestatus,
+ repr(self.guaranteeterms))
+
+ @staticmethod
+ def json_decode(json_obj):
+ o = AgreementStatus()
+ o.agreement_id = json_obj["AgreementId"]
+ o.guaranteestatus = json_obj["guaranteestatus"]
+
+ for term in json_obj["guaranteeterms"]:
+ t = AgreementStatus.GuaranteeTermStatus()
+ t.name = term["name"]
+ t.status = term["status"]
+ o.guaranteeterms.append(t)
+ return o
+
+
+class Violation(object):
+ def __init__(self):
+ """Simple bean model for a violation"""
+ self.uuid = ""
+ self.contract_uuid = ""
+ self.service_scope = ""
+ self.metric_name = ""
+ self.datetime = datetime.now()
+ self.actual_value = 0
+
+ def __repr__(self):
+ return ("<Violation(uuid={}, agremeent_id={}, service_scope={}, " +
+ "metric_name={}, datetime={}, actual_value={})>".format(
+ self.uuid,
+ self.contract_uuid,
+ self.service_scope,
+ self.metric_name,
+ self.datetime,
+ self.actual_value)
+ )
+
+
+class Provider(object):
+ def __init__(self):
+ """Simple bean model for a provider"""
+ self.uuid = ""
+ self.name = ""
+
+ def __repr__(self):
+ return ("<Provider(uuid={}, name={})>".format(
+ self.uuid,
+ self.name)
+ )
+ def to_xml(self):
+ xml = "<provider><uuid>{}</uuid><name>{}</name></provider>""".format(
+ self.uuid,
+ self.name
+ )
+ return xml
+
+ @staticmethod
+ def from_dict(d):
+ """Creates a Provider object from a dict structure (e.g.
+ a deserialized json string)
+
+ Usage:
+ json_obj = json.loads(json_data)
+ out = wsag_model.Provider.from_dict(json_obj)
+ """
+ result = Provider(d["uuid"], d["name"])
+ return result
--- /dev/null
+# -*- coding: utf-8 -*-
+
+"""Converts from XML to objects for ws-agreement agreements/templates or any
+other xml returned by SLA Manager.
+
+This module offers a set of converters from xml formats returned by SLA Manager
+to a more-friendly POJO instances.
+
+The converters are designed to be pluggable: see ListConverter.
+
+
+Usage:
+c = AnyConverter() or
+c = ListConverter(AnyOtherConverter())
+
+convertstring(c, "<?xml ... </>")
+
+convertfile(c, "file.xml")
+
+root = ElementTree.parse("file.xml")
+c.convert(root.getroot())
+
+"""
+
+from xml.etree import ElementTree
+from xml.etree.ElementTree import Element
+import dateutil.parser
+
+from wsag_model import Agreement
+from wsag_model import Template
+from wsag_model import Violation
+from wsag_model import Provider
+from wsag_model import Enforcement
+
+
+def convertfile(converter, f):
+ """Reads and converts a xml file
+
+ :rtype : object
+ :param Converter converter:
+ :param str f: file to read
+ """
+ tree = ElementTree.parse(f)
+ result = converter.convert(tree.getroot())
+ return result
+
+
+def convertstring(converter, string):
+ """Converts a string
+
+ :rtype : object
+ :param Converter converter:
+ :param str string: contains the xml to convert
+ """
+ root = ElementTree.fromstring(string)
+ result = converter.convert(root)
+ return result
+
+
+class Converter(object):
+
+ def __init__(self):
+ """Base class for converters
+ """
+ pass
+
+ def convert(self, xmlroot):
+ """Converts the given xml in an object
+
+ :rtype : Object that represents the xml
+ :param Element xmlroot: root element of xml to convert.
+ """
+ return None
+
+
+class ListConverter(Converter):
+ def __init__(self, innerconverter):
+ super(ListConverter, self).__init__()
+ self.innerconverter = innerconverter
+
+ def convert(self, xmlroot):
+ result = []
+
+ for item in xmlroot.find("items"): # loop through "items" children
+ inner = self.innerconverter.convert(item)
+ result.append(inner)
+ return result
+
+
+class ProviderConverter(Converter):
+ """Converter for a provider.
+
+ Input:
+ <provider>
+ <uuid>1ad9acb9-8dbc-4fe6-9a0b-4244ab6455da</uuid>
+ <name>Provider2</name>
+ </provider>
+
+ Output:
+ wsag_model.Provider
+ """
+
+ def __init__(self):
+ super(ProviderConverter, self).__init__()
+
+ def convert(self, xmlroot):
+ result = Provider()
+ result.uuid = xmlroot.find("uuid").text
+ result.name = xmlroot.find("name").text
+ return result
+
+
+class EnforcementConverter(Converter):
+ """Converter for an Enforcement job.
+
+ Input:
+ <enforcement_job>
+ <agreement_id>agreement03</agreement_id>
+ <enabled>false</enabled>
+ </enforcement_job>
+
+ Output:
+ wsag_model.Enforcement
+ """
+
+ def __init__(self):
+ super(EnforcementConverter, self).__init__()
+
+ def convert(self, xmlroot):
+ result = Enforcement()
+ result.agreement_id = xmlroot.find("agreement_id").text
+ result.enabled = xmlroot.find("enabled").text
+ return result
+
+class ViolationConverter(Converter):
+ """Converter for a violation.
+
+ Input:
+ <violation>
+ <uuid>ce0e148f-dfac-4492-bb26-ad2e9a6965ec</uuid>
+ <contract_uuid>agreement04</contract_uuid>
+ <service_scope></service_scope>
+ <metric_name>Performance</metric_name>
+ <datetime>2014-01-14T11:28:22Z</datetime>
+ <actual_value>0.09555700123360344</actual_value>
+ </violation>
+
+ Output:
+ wsag_model.Violation
+ """
+ def __init__(self):
+ super(ViolationConverter, self).__init__()
+
+ def convert(self, xmlroot):
+ result = Violation()
+ result.uuid = xmlroot.find("uuid").text
+ result.contract_uuid = xmlroot.find("contract_uuid").text
+ result.service_scope = xmlroot.find("service_scope").text
+ result.metric_name = xmlroot.find("metric_name").text
+ result.actual_value = xmlroot.find("actual_value").text
+ dt_str = xmlroot.find("datetime").text
+ result.datetime = dateutil.parser.parse(dt_str)
+ return result
+
+
+class AgreementConverter(Converter):
+ def __init__(self):
+ """Converter for an ws-agreement agreement or template.
+ """
+ super(AgreementConverter, self).__init__()
+ self._namespaces = {
+ "wsag": "http://www.ggf.org/namespaces/ws-agreement",
+ "sla": "http://sla.atos.eu",
+ "xifi": "http://sla.xifi.eu"
+ }
+ self.agreement_tags = (
+ "{{{}}}Agreement".format(self._namespaces["wsag"]),
+ )
+ self.template_tags = (
+ "{{{}}}Template".format(self._namespaces["wsag"]),
+ )
+
+ def convert(self, xmlroot):
+ """
+ :param Element xmlroot: root element of xml to convert.
+ :rtype: wsag_model.Agreement
+ """
+ if xmlroot.tag in self.agreement_tags:
+ result = Agreement()
+ result.agreement_id = xmlroot.attrib["AgreementId"]
+ elif xmlroot.tag in self.template_tags:
+ result = Template()
+ result.template_id = xmlroot.attrib["TemplateId"]
+ else:
+ raise ValueError("Not valid root element name: " + xmlroot.tag)
+
+ context = xmlroot.find("wsag:Context", self._namespaces)
+ result.context = self._parse_context(context)
+
+ terms = xmlroot.find("wsag:Terms/wsag:All", self._namespaces)
+
+ properties = terms.findall("wsag:ServiceProperties", self._namespaces)
+ result.variables = self._parse_properties(properties)
+
+ guarantees = terms.findall("wsag:GuaranteeTerm", self._namespaces)
+ result.guaranteeterms = self._parse_guarantees(guarantees)
+
+ return result
+
+ def _parse_context(self, element):
+ nss = self._namespaces
+ result = Agreement.Context()
+
+ result.template_id = self._find_text(element, "wsag:TemplateId")
+ result.expirationtime = self._find_text(element, "wsag:ExpirationTime")
+
+ service_elem = element.find("sla:Service", nss)
+ result.service = \
+ service_elem.text if service_elem is not None else "<servicename>"
+
+ initiator = self._find_text(element, "wsag:AgreementInitiator")
+ responder = self._find_text(element, "wsag:AgreementResponder")
+ serviceprovider_elem = self._find_text(element, "wsag:ServiceProvider")
+
+ #
+ # Deloop the initiator-responder indirection.
+ #
+ if serviceprovider_elem == "AgreementResponder":
+ consumer = initiator
+ provider = responder
+ elif serviceprovider_elem == "AgreementInitiator":
+ consumer = responder
+ provider = initiator
+ else:
+ raise ValueError(
+ "Invalid value for wsag:ServiceProvider : " +
+ serviceprovider_elem)
+
+ result.initiator = initiator
+ result.responder = responder
+ result.provider = provider
+ result.consumer = consumer
+
+ return result
+
+ def _parse_property(self, element, servicename):
+ nss = self._namespaces
+
+ key = _get_attribute(element, "Name")
+ value = Agreement.Property()
+ value.servicename = servicename
+ value.name = key
+ value.metric = _get_attribute(element, "Metric")
+ value.location = element.find("wsag:Location", nss).text
+
+ return key, value
+
+ def _parse_properties(self, elements):
+ result = {}
+ nss = self._namespaces
+ for element in elements:
+ servicename = _get_attribute(element, "ServiceName")
+ for var in element.findall("wsag:Variables/wsag:Variable", nss):
+ key, value = self._parse_property(var, servicename)
+ result[key] = value
+
+ return result
+
+ def _parse_guarantee_scope(self, element):
+ result = Agreement.GuaranteeTerm.GuaranteeScope()
+ result.servicename = _get_attribute(element, "ServiceName")
+ result.scope = element.text
+ return result
+
+ def _parse_guarantee_scopes(self, elements):
+ result = []
+ for scope in elements:
+ result.append(self._parse_guarantee_scope(scope))
+ return result
+
+ def _parse_guarantee(self, element):
+ nss = self._namespaces
+
+ result = Agreement.GuaranteeTerm()
+ name = _get_attribute(element, "Name")
+ result.name = name
+ scopes = element.findall("wsag:ServiceScope", nss)
+ result.scopes = self._parse_guarantee_scopes(scopes)
+
+ kpitarget = element.find(
+ "wsag:ServiceLevelObjective/wsag:KPITarget", nss)
+ slo = Agreement.GuaranteeTerm.ServiceLevelObjective()
+ result.servicelevelobjective = slo
+ slo.kpiname = kpitarget.find("wsag:KPIName", nss).text
+ slo.customservicelevel = kpitarget.find(
+ "wsag:CustomServiceLevel", nss).text
+
+ return name, result
+
+ def _parse_guarantees(self, elements):
+
+ result = {}
+ for element in elements:
+ key, value = self._parse_guarantee(element)
+ result[key] = value
+ return result
+
+ def _find_text(self, src, path):
+ """Returns the inner text of the element located in path from the src
+ element; None if no elements were found.
+
+ :type src: Element
+ :type path: src
+ :rtype: str
+
+ Usage:
+ text = _find_text(root, "wsag:Context/ExpirationTime")
+ """
+ dst = src.find(path, self._namespaces)
+ if dst is None:
+ return ""
+ return dst.text
+
+
+def _get_attribute(element, attrname):
+ """
+ Get attribute from an element.
+
+ Wrapper over Element.attrib, as this doesn't fallback to the element
+ namespace if the attribute is qnamed and the requested attribute name
+ is not.
+
+ Ex:
+ <ns:elem attr1="value1" ns:attr2="value2"/>
+
+ _get_attribute(elem, "attr1") -> value1
+ _get_attribute(elem, "attr2") -> value2
+ _get_attribute(elem, "{uri}:attr1") -> Error
+ _get_attribute(elem, "{uri}:attr2") -> value2
+ """
+ isns = (attrname[0] == '{')
+
+ #
+ # Handle qnamed request:
+ # attrname = {uri}name
+ #
+ if isns:
+ return element.attrib[attrname]
+
+ #
+ # Handle non-qnamed request and non-qnamed actual_attr
+ # attrname = name
+ # actual_attr = name
+ #
+ if attrname in element.attrib:
+ return element.attrib[attrname]
+
+ #
+ # Handle non-qnamed request but qnamed actualAttr
+ # attrname = name
+ # actual_attr = {uri}name
+ #
+ tag_uri = element.tag[0: element.tag.find('}') + 1]
+ return element.attrib[tag_uri + attrname]
--- /dev/null
+# this somehow is not used anymore - should it not be ?
+from django.template import RequestContext
+from django.shortcuts import render_to_response
+from django.shortcuts import render
+from django import forms
+
+from unfold.loginrequired import FreeAccessView
+from unfold.page import Page
+from sla.slaclient import restclient
+from sla.slaclient import wsag_model
+import wsag_helper
+from myslice.theme import ThemeView
+# from sla import SLAPlugin
+from django.core.urlresolvers import reverse
+from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
+
+import slaclient.service.fed4fire.fed4fireservice as fed4fireservice
+from rest_framework.views import APIView
+from django.http import HttpResponse
+
+import json
+import traceback
+
+
+class Rol:
+ CONSUMER = "CONSUMER"
+ PROVIDER = "PROVIDER"
+
+
+class AgreementsFilter(object):
+ def __init__(self, status=None, provider=None, consumer=None):
+ self.status = status
+ self.provider = provider
+ self.consumer = consumer
+
+ def __repr__(self):
+ return "<AgreementsFilter(status={}, provider={}, consumer={})>".format(
+ self.status, self.provider, self.consumer
+ )
+
+ @staticmethod
+ def _check(expectedvalue, actualvalue):
+ if expectedvalue is None or expectedvalue == '':
+ return True
+ else:
+ return actualvalue == expectedvalue
+
+ def check(self, agreement):
+ """Check if this agreement satisfy the filter.
+
+ The agreement must be previously annotated
+ """
+ guaranteestatus = agreement.guaranteestatus
+ provider = agreement.context.provider
+ consumer = agreement.context.consumer
+ return (
+ AgreementsFilter._check(self.status, guaranteestatus) and
+ AgreementsFilter._check(self.provider, provider) and
+ AgreementsFilter._check(self.consumer, consumer)
+ )
+
+
+class FilterForm(forms.Form):
+ _attrs = {'class': 'form-control'}
+ exclude = ()
+ status = forms.ChoiceField(
+ choices=[
+ ('', 'All'),
+ (wsag_model.AgreementStatus.StatusEnum.FULFILLED, 'Fulfilled'),
+ (wsag_model.AgreementStatus.StatusEnum.VIOLATED, 'Violated'),
+ (wsag_model.AgreementStatus.StatusEnum.NON_DETERMINED, 'Non determined')],
+ widget=forms.Select(attrs=_attrs),
+ required=False
+ )
+ provider = forms.CharField(
+ widget=forms.TextInput(attrs=_attrs),
+ required=False
+ )
+ consumer = forms.CharField(
+ widget=forms.TextInput(attrs=_attrs),
+ required=False
+ )
+
+
+class SLAView (FreeAccessView, ThemeView):
+ template_name = 'slice-tab-sla.html'
+
+ def get (self, request, slicename, state=None):
+
+ page=Page(request)
+
+ consumer_id = None
+ agreement_id = None
+ enforcements = {}
+
+ filter_ = None
+ form = FilterForm(request.GET)
+ if form.is_valid():
+ print "IS VALID"
+ filter_ = _get_filter_from_form(form)
+
+ consumer_id = _get_consumer_id(request)
+
+ agreements = _get_agreements(agreement_id, consumer_id=consumer_id, filter_=filter_)
+
+ for agreement in agreements:
+ enf = _get_enforcement(agreement.agreement_id)
+ enforcements[agreement.agreement_id] = enf.enabled
+
+ for key, value in enforcements.items():
+ print key + ": " + value
+
+ template_env = {}
+ # write something of our own instead
+ # more general variables expected in the template
+ template_env['title'] = 'SLA Agreements'
+ template_env['agreements'] = agreements
+ template_env['username'] = request.user
+ template_env['slicename'] = slicename
+ template_env['enforcements'] = enforcements
+
+ # the prelude object in page contains a summary of the requirements() for all plugins
+ # define {js,css}_{files,chunks}
+ prelude_env = page.prelude_env()
+ template_env.update(prelude_env)
+
+ return render_to_response (self.template_name, template_env, context_instance=RequestContext(request))
+
+
+class AgreementsFilter(object):
+ def __init__(self, status=None, provider=None, consumer=None):
+ self.status = status
+ self.provider = provider
+ self.consumer = consumer
+
+ def __repr__(self):
+ return "<AgreementsFilter(status={}, provider={}, consumer={})>".format(
+ self.status, self.provider, self.consumer
+ )
+
+ @staticmethod
+ def _check(expectedvalue, actualvalue):
+ if expectedvalue is None or expectedvalue == '':
+ return True
+ else:
+ return actualvalue == expectedvalue
+
+ def check(self, agreement):
+ """Check if this agreement satisfy the filter.
+
+ The agreement must be previously annotated
+ """
+ guaranteestatus = agreement.guaranteestatus
+ provider = agreement.context.provider
+ consumer = agreement.context.consumer
+ return (
+ AgreementsFilter._check(self.status, guaranteestatus) and
+ AgreementsFilter._check(self.provider, provider) and
+ AgreementsFilter._check(self.consumer, consumer)
+ )
+
+
+class ContactForm(forms.Form):
+ subject = forms.CharField(max_length=100)
+ message = forms.CharField()
+ sender = forms.EmailField()
+ cc_myself = forms.BooleanField(required=False)
+
+
+def _get_agreements_client():
+ return restclient.Factory.agreements()
+
+
+def _get_violations_client():
+ return restclient.Factory.violations()
+
+def _get_enforcements_client():
+ return restclient.Factory.enforcements()
+
+def _get_consumer_id(request):
+ return request.user
+
+
+def _get_agreement(agreement_id):
+
+ agreements_client = _get_agreements_client()
+ agreement, response = agreements_client.getbyid(agreement_id)
+ return agreement
+
+def _get_enforcement(agreement_id):
+
+ enforcements_client = _get_enforcements_client()
+ enforcement, response = enforcements_client.getbyagreement(agreement_id)
+ return enforcement
+
+def _get_filter_from_form(form):
+
+ data = form.cleaned_data
+ result = AgreementsFilter(
+ data["status"], data["provider"], data["consumer"])
+ return result
+
+def agreement_term_violations(request, agreement_id, guarantee_name):
+
+ page = Page(request)
+ prelude_env = page.prelude_env()
+
+ annotator = wsag_helper.AgreementAnnotator()
+ agreement = _get_agreement(agreement_id)
+ violations = _get_agreement_violations(agreement_id, guarantee_name)
+ annotator.annotate_agreement(agreement)
+
+ slicename = request.POST.get('slicename')
+
+ paginator = Paginator(violations, 25) # Show 25 violations per page
+ page_num = request.GET.get('page')
+
+ try:
+ violation_page = paginator.page(page_num)
+ except PageNotAnInteger:
+ # If page is not an integer, deliver first page.
+ violation_page = paginator.page(1)
+ except EmptyPage:
+ # If page is out of range (e.g. 9999), deliver first page.
+ violation_page = paginator.page(1)
+
+ context = {
+ 'agreement_id': agreement_id,
+ 'guarantee_term': agreement.guaranteeterms[guarantee_name],
+ 'violations': violation_page,
+ 'agreement': agreement,
+ 'slicename': slicename,
+ }
+
+ context.update(prelude_env)
+
+ return render_to_response ('violations_template.html', context, context_instance=RequestContext(request))
+# return render(request, 'violations_template.html', context)
+
+def agreement_details(request, agreement_id):
+
+ page = Page(request)
+ prelude_env = page.prelude_env()
+
+ annotator = wsag_helper.AgreementAnnotator()
+ agreement = _get_agreement(agreement_id)
+ violations = _get_agreement_violations(agreement_id)
+ status = _get_agreement_status(agreement_id)
+ annotator.annotate_agreement(agreement, status, violations)
+
+ violations_by_date = wsag_helper.get_violations_bydate(violations)
+ context = {
+ 'agreement_id': agreement_id,
+ 'agreement': agreement,
+ 'status': status,
+ 'violations_by_date': violations_by_date
+ }
+
+ context.update(prelude_env)
+
+ return render_to_response ('violations_template.html', context, context_instance=RequestContext(request))
+ #return render(request, 'agreement_detail.html', context)
+
+
+def _get_agreements_client():
+ return restclient.Factory.agreements()
+
+
+def _get_agreement(agreement_id):
+
+ agreements_client = _get_agreements_client()
+ agreement, response = agreements_client.getbyid(agreement_id)
+ return agreement
+
+def _get_agreements(agreement_id, provider_id=None, consumer_id=None, filter_=None):
+
+ agreements_client = _get_agreements_client()
+ if agreement_id is None:
+ if consumer_id is not None:
+ agreements, response = agreements_client.getbyconsumer(consumer_id)
+ elif provider_id is not None:
+ agreements, response = agreements_client.getbyprovider(provider_id)
+ else:
+ raise ValueError(
+ "Invalid values: consumer_id and provider_id are None")
+ else:
+ agreement, response = agreements_client.getbyid(agreement_id)
+ agreements = [agreement]
+
+ annotator = wsag_helper.AgreementAnnotator()
+ for agreement in agreements:
+ id_ = agreement.agreement_id
+ status = _get_agreement_status(id_)
+ annotator.annotate_agreement(agreement, status)
+
+ if filter_ is not None:
+ print "FILTERING ", repr(filter_)
+ agreements = filter(filter_.check, agreements);
+ else:
+ print "NOT FILTERING"
+ return agreements
+
+
+def _get_agreements_by_consumer(consumer_id):
+
+ agreements_client = _get_agreements_client()
+ agreements, response = agreements_client.getbyconsumer(consumer_id)
+ return agreements
+
+def _get_agreement_status(agreement_id):
+
+ agreements_client = _get_agreements_client()
+ status, response = agreements_client.getstatus(agreement_id)
+ return status
+
+def _get_agreement_violations(agreement_id, term=None):
+
+ violations_client = _get_violations_client()
+ violations, response = violations_client.getbyagreement(agreement_id, term)
+ return violations
+
+
+class AgreementSimple(APIView):
+ def build_response(self, code, text):
+ response = HttpResponse(text, content_type="text/plain", status=code)
+ return response
+
+ def post( self, request, **kwargs):
+ #import pdb; pdb.set_trace()
+ print "------------------------------------------------1"
+ data = {}
+ for key, value in request.DATA.items():
+ new_key = key
+ data[new_key] = value
+
+ try:
+ template_id = data['template_id']
+ except:
+ return self.build_response(400, 'Invalid template_id')
+
+ try:
+ user = data['user']
+ except:
+ return self.build_response(400, 'Invalid user')
+
+ try:
+ print "Calling createagreementsimplified with template_id:",template_id,"and user:",user
+ result = fed4fireservice.createagreementsimplified(template_id, user)
+ print result
+ except Exception, e:
+ print traceback.format_exc()
+ print '%s (%s)' % (e, type(e))
+
+ return self.build_response(400, 'Problem creating agreement')
+
+ return self.build_response(200, result)
--- /dev/null
+.container{width:100%;}
+.left{float:left;width:100px;}
+.right{float:right;width:100px;}
+.center{margin:0 auto;width:100px;}
\ No newline at end of file
--- /dev/null
+/**
+ * MyPlugin: demonstration plugin
+ * Version: 0.1
+ * Description: Template for writing new plugins and illustrating the different
+ * possibilities of the plugin API.
+ * This file is part of the Manifold project
+ * Requires: js/plugin.js
+ * URL: http://www.myslice.info
+ * Author: Jordan Augé <jordan.auge@lip6.fr>
+ * Copyright: Copyright 2012-2013 UPMC Sorbonne Universités
+ * License: GPLv3
+ */
+
+(function($){
+
+ var MyPlugin = Plugin.extend({
+
+ /** XXX to check
+ * @brief Plugin constructor
+ * @param options : an associative array of setting values
+ * @param element :
+ * @return : a jQuery collection of objects on which the plugin is
+ * applied, which allows to maintain chainability of calls
+ */
+ init: function(options, element) {
+ // for debugging tools
+ this.classname="myplugin";
+ // Call the parent constructor, see FAQ when forgotten
+ this._super(options, element);
+
+ /* Member variables */
+
+ /* Plugin events */
+
+ /* Setup query and record handlers */
+
+ // Explain this will allow query events to be handled
+ // What happens when we don't define some events ?
+ // Some can be less efficient
+ this.listen_query(options.query_uuid);
+ this.listen_query(options.query_uuid, 'all');
+
+ /* GUI setup and event binding */
+ // call function
+
+ },
+
+ /* PLUGIN EVENTS */
+ // on_show like in querytable
+
+
+ /* GUI EVENTS */
+
+ // a function to bind events here: click change
+ // how to raise manifold events
+
+
+ /* GUI MANIPULATION */
+
+ // We advise you to write function to change behaviour of the GUI
+ // Will use naming helpers to access content _inside_ the plugin
+ // always refer to these functions in the remaining of the code
+
+
+
+ this.id('showEvaluations').click(function() {
+ alert("WARNING! The experiments are still running.
+ These SLA evaluations could be different at the end of the experiments." );
+ $(".status").css("display","");
+ });
+ });
+
+ show_hide_button: function()
+ {
+ // this.id, this.el, this.cl, this.elts
+ // same output as a jquery selector with some guarantees
+ },
+
+ /* TEMPLATES */
+
+ // see in the html template
+ // How to load a template, use of mustache
+
+ /* QUERY HANDLERS */
+
+ // How to make sure the plugin is not desynchronized
+ // He should manifest its interest in filters, fields or records
+ // functions triggered only if the proper listen is done
+
+ // no prefix
+
+ on_filter_added: function(filter)
+ {
+
+ },
+
+ // ... be sure to list all events here
+
+ /* RECORD HANDLERS */
+ on_all_new_record: function(record)
+ {
+ //
+ },
+
+ /* INTERNAL FUNCTIONS */
+ _dummy: function() {
+ // only convention, not strictly enforced at the moment
+ },
+
+ });
+
+ /* Plugin registration */
+ $.plugin('MyPlugin', MyPlugin);
+
+ // TODO Here use cases for instanciating plugins in different ways like in the pastie.
+
+})(jQuery);
+
+
+$(document).ready(function() {
+ $(".status-success").addClass("icon-ok-sign").attr("title", "Fulfilled")
+ $(".status-error").addClass("icon-remove-sign").attr("title", "Violated")
+ $(".status-non-determined").addClass("icon-exclamation-sign").attr("title", "Non determined")
+
+ $(".icon-plus, .icon-minus").click(function(){ $(this).toggleClass("icon-plus icon-minus")});
+ console.log("ready")
+});
+
+$(".agreement_detail").click(function (ev) { // for each edit contact url
+ ev.preventDefault(); // prevent navigation
+ var url = $(this).data("form"); // get the contact form url
+ $("#sla-modal-agreements-{{ a.agreement_id }}").load(url, function () { // load the url into the modal
+ $(this).modal('show'); // display the modal on url load
+ });
+ return false; // prevent the click propagation
+});
+
+$('.agreement-detail').live('submit', function () {
+ $.ajax({
+ type: $(this).attr('method'),
+ url: this.action,
+ data: $(this).serialize(),
+ context: this,
+ success: function (data, status) {
+ $('#sla-modal-agreements-{{ a.agreement_id }}').html(data);
+ }
+ });
+ return false;
+});
+
+$(document).ready(function() {
+ console.log("consumer_agreements ready");
+});
+
+$(".violation-detail").click(function(ev) { // for each edit contact url
+ ev.preventDefault(); // prevent navigation
+ var url = $(this).data("href");
+ $("#violation-modal").load(url, function() { // load the url into the modal
+ $(this).modal('show'); // display the modal on url load
+ });
+ return false; // prevent the click propagation
+});
+
+
+ this.elts('showEvaluations').click(function(){displayDate()};
+
+
--- /dev/null
+<h2>Agreement detail</h2>
+
+<div class="modal fade" id="myModal" tabindex="-1" role="dialog" aria-labelledby="myModalLabel" aria-hidden="true">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title" id="myModalLabel">search result</h4>
+ </div>
+ <div class="modal-body">
+ <div id="context">
+ <dl class="dl-horizontal">
+ {# Sanity default: if dd is empty, the values are permutated #}
+ <dt>Agreement Id</dt>
+ <dd>{{ agreement.agreement_id|default:" " }}</dd>
+ <dt>Provider</dt>
+ <dd>{{ agreement.context.provider|default:" " }}</dd>
+ <dt>Consumer</dt>
+ <dd>{{ agreement.context.consumer|default:" " }}</dd>
+ <dt>Service</dt>
+ <dd>{{ agreement.context.service_formatted|default:" " }}</dd>
+ <dt>Expiration time</dt>
+ <dd>{{ agreement.context.expirationtime|default:" " }}</dd>
+ </dl>
+ </div>
+
+ <div id="properties_summary">
+
+ <table class="table">
+ <tr>
+ <th>#</th>
+ <th>Metric name</th>
+ <th>Bounds</th>
+ <th># violations</th>
+ </tr>
+
+ {% for tname, t in agreement.guaranteeterms.items %}
+ <tr class="{{ t.statusclass }}">
+ <td>{{ forloop.counter }}</td>
+ <td>{{ t.servicelevelobjective.kpiname }}</td>
+ <td>({{ t.servicelevelobjective.bounds.0 }}, {{ t.servicelevelobjective.bounds.1 }})
+ </td>
+ <td>
+ <a href="{% url " agreement_term_violations " agreement.agreement_id t.name %}">{{ t.nviolations }}</a>
+ </td>
+ </tr>
+ {% empty %}
+ {% endfor %}
+ </table>
+ </div>
+
+ <div id="violations_summary_by_date">
+ <table class="table">
+ <tr>
+ <th>#</th>
+ <th>Date</th>
+ <th># violations</th>
+ </tr>
+
+ {% for date, violations in violations_by_date %}
+ <tr>
+ <td>{{ forloop.counter }}</td>
+ <td>{{ date|date }}</td>
+ <td>{{ violations|length }}</td>
+ </tr>
+ {% endfor %}
+ </table>
+ </div>
+
+
+ <div class="modal-footer">
+ <a href="{{ backurl }}" class="btn btn-default btn-back">Back</a>
+ </div>
+ </div>
+ </div>
+ </div>
+</div>
\ No newline at end of file
--- /dev/null
+<div id="agreements">
+
+{% for a in agreements %}
+ <div>
+ <span class="status-{{a.statusclass}}">{{a.statusclass}}</span>
+ <a data-toggle="collapse" data-parent="#agreements" href="#agreement-{{a.agreementId}}">{{a.context.service}}</a>
+ -
+ <a href="#" data-agreementid="{{a.agreementId}}">Detail</a>
+ </div>
+ <div id="agreement-{{a.agreementId}}" class="collapse">
+ <div>
+ {% for tname,t in a.guaranteeterms.items %}
+ <div>
+ <span class="status-{{t.statusclass}}">{{t.statusclass}}</span>
+ {{t.servicelevelobjective.kpiname}}
+ -
+ <a href="/violations/{{a.agreementId}}/{{t.name}}">Detail</a>
+ </div>
+ {% endfor %}
+ </div>
+ </div>
+{% empty %}
+{% endfor %}
+</div>
+
+
--- /dev/null
+
+<div class="col-md-2">
+</div>
+
+ <div class="col-md-8">
+ <div class="row" id="agreements">
+ <table class="table dataTable" id="sla_table" >
+ <thead>
+ <tr class="header">
+ <th colspan="2">Provider</th>
+ <!-- <th>Testbed</th>
+ <th>Slice_Id</th>
+ <th>Agreement</th>
+ <th>Metric</th>
+ <th>Violations</th>
+ <th>Result</th> -->
+ </tr>
+ </thead>
+ <tbody>
+
+ <tr class="header">
+ <td><span class="glyphicon glyphicon-chevron-down"></span></td>
+ <td>iMinds</td>
+ </tr>
+
+
+ {% for a in agreements %}
+
+
+<!-- Modal - columns selector -->
+<div class="modal fade" id="agreementModal{{a.agreement_id}}" tabindex="-1" role="dialog" aria-labelledby="myModalLabel" aria-hidden="true">
+
+<style type="text/css" scoped>
+ .modal-lg {
+ width: 50%;
+ }
+</style>
+
+ <div class="modal-dialog modal-lg">
+ <div class="modal-content">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title" id="myModalAgreement">Agreement details</h4>
+ </div>
+ <div class="modal-body">
+
+ <dt>Agreement Id</dt>
+ <dd>{{ a.agreement_id|default:" " }}</dd>
+ <dt>Provider</dt>
+ <dd>{{ a.context.provider|default:" " }}</dd>
+ <dt>Consumer</dt>
+ <dd>{{ a.context.consumer|default:" " }}</dd>
+ <dt>Service</dt>
+ <dd>Testbed guarantees 0.99 Uptime rate for 0.99 rate of the resources during the sliver lifetime</dd>
+ <dt>Testbed</dt>
+ <dd>{{ a.context.testbed_formatted }}</dd>
+ <dt>Accepted on:</dt>
+ <dd>{{ a.context.expirationtime|default:" " }}</dd>
+ </div>
+ <div class="modal-footer">
+ <button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
+ </div>
+ </div>
+ </div>
+</div>
+
+ <tr>
+ {% if a.guaranteestatus == "VIOLATED" %}
+ <td class="glyphicon glyphicon-remove-sign" style="color:red;"></td>
+ {% else %}
+ <td class="glyphicon glyphicon-ok-sign" style="color:green;"></td>
+ {% endif %}
+ <td>{{ a.context.template_id }}</td>
+ <td>{{ a.context.expirationtime }}</td>
+ <td>
+ {% with a.agreement_id as key %}
+ {% if enforcements.key == false %}
+ Disabled
+ {% else %}
+ Enabled
+ {% endif %}
+ {% endwith %}
+ </td>
+ <!-- <td>{{slicename}}</td> -->
+ <td>
+ <!-- <a class="agreement-detail" href="{% url "agreement_details" a.agreement_id %}" data-toggle="modal" data-target="#agreementModal">View Agreement</a> -->
+ <!-- <a class="agreement-detail" href="#" data-agreement="{{ a.agreement_id }}">View Agreement</a> -->
+ <a class="agreement-detail" data-toggle="modal" data-target="#agreementModal{{a.agreement_id}}">View Agreement</a>
+ </td>
+
+ {% for tname,t in a.guaranteeterms.items %}
+ <td> {{ t.servicelevelobjective.kpiname }}</td>
+ <td>
+ {% if t.status == "VIOLATED" %}
+
+ <!-- <a class="violation-detail" href="{% url "agreement_term_violations" a.agreement_id t.name %}" data-toggle="modal" data-target="#violationModal">View Violations</a>
+ <a class="violation-detail" href="#"
+ data-agreement="{{ a.agreement_id }}"
+ data-violation="{{ t.name }}">View Violations</a> -->
+ <a class="violation-detail" href="#" data-agreement="{{ a.agreement_id }}" data-violation="{{ t.name }}">View Violations</a>
+
+ {% endif %}
+ </td>
+ <td id="status" style="display:none;">
+ {{ a.statusclass }}
+ </td>
+ {% endfor %}
+ </tr>
+
+ {% empty %}
+ {% endfor %}
+ </tbody>
+
+</table>
+</div>
+</div>
+<!-- <div class="row" style="float:right;">
+ <button id="showEvaluations" type="button" class="btn btn-default" onclick="displayDate()"><span class="glyphicon"></span>Show Evaluations</button>
+</div> -->
+
+<script>
+$(document).ready(function() {
+ $('a.violation-detail').click(function () {
+ var a = $(this).data('agreement');
+ var v = $(this).data('violation');
+ $("#sla").load('/sla/agreements/' + a + '/guarantees/' + v + '/violations', {'slicename': '{{ slicename }}'});
+ });
+
+ // $('a.agreement-detail').click(function () {
+ // var a = $(this).data('agreement');
+ // $("#sla").load('/sla/agreements/' + a + '/detail');
+ // });
+
+ $('tr.header').click(function(){
+ $(this).nextUntil('tr.header').toggle('fast');
+ $('.header .glyphicon').toggleClass('glyphicon-chevron-down glyphicon-chevron-right');
+ });
+});
+
+</script>
\ No newline at end of file
--- /dev/null
+
+<div class="col-md-2">
+</div>
+
+
+<!-- Modal - columns selector -->
+<div class="modal fade" id="myModal" tabindex="-1" role="dialog" aria-labelledby="myModalLabel" aria-hidden="true">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title" id="myModalLabel">Agreement details</h4>
+ </div>
+ <div class="modal-body">
+
+
+ </div>
+ <div class="modal-footer">
+ <button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
+ </div>
+ </div>
+ </div>
+</div>
+
+ <!-- Modal - columns selector -->
+<div class="modal fade" id="violationModal" tabindex="-1" role="dialog" aria-labelledby="myModalViolations" aria-hidden="true">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title" id="myModalViolations">Violations</h4>
+ </div>
+ <div class="modal-body">
+
+
+
+ </div>
+ <div class="modal-footer">
+ <button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
+ </div>
+ </div>
+ </div>
+</div>
+
+ <div class="col-md-8">
+ <div class="row" id="agreements">
+ <div class="panel-group" id="accordion">
+ <div class="panel panel-default">
+ <div class="panel-heading">
+ <h4 class="panel-title">
+ <dt>
+ <span class="glyphicon glyphicon-chevron-down"></span>
+ <a data-toggle="collapse" data-parent="#accordion" href="#collapseOne">Provider</a>
+ </dt>
+ <dd>{% with agreements|first as a %}{{ a.context.provider }}{% endwith %}</dd>
+ </h4>
+ </div>
+
+
+ {% for a in agreements %}
+
+ <div id="collapseOne" class="panel-collapse collapse in">
+ <div class="panel-body">
+ <table class="table">
+ <tr>
+ {% if a.guaranteestatus == "VIOLATED" %}
+ <td class="glyphicon glyphicon-remove-sign" style="color:red;"></td>
+ {% else %}
+ <td class="glyphicon glyphicon-ok-sign" style="color:green;"></td>
+ {% endif %}
+ <td>{{ a.context.template_id }}</td>
+ <!-- <td>{{slicename}}</td> -->
+ <td>
+ <!-- <a class="agreement-detail" href="{% url "agreement_details" a.agreement_id %}" data-toggle="modal" data-target="#agreementModal">View Agreement</a> -->
+ <!-- <a class="agreement-detail" href="#" data-agreement="{{ a.agreement_id }}">View Agreement</a> -->
+ <a class="agreement-detail" data-toggle="modal" data-target="#agreementModal{{a.agreement_id}}">View Agreement</a>
+ </td>
+
+ {% for tname,t in a.guaranteeterms.items %}
+ <td> {{ t.servicelevelobjective.kpiname }}</td>
+ <td>
+ {% if t.status == "VIOLATED" %}
+
+ <!-- <a class="violation-detail" href="{% url "agreement_term_violations" a.agreement_id t.name %}" data-toggle="modal" data-target="#violationModal">View Violations</a>
+ <a class="violation-detail" href="#"
+ data-agreement="{{ a.agreement_id }}"
+ data-violation="{{ t.name }}">View Violations</a> -->
+ <a class="violation-detail" href="#" data-agreement="{{ a.agreement_id }}" data-violation="{{ t.name }}">View Violations</a>
+
+ {% endif %}
+ </td>
+ <td id="status" style="display:none;">
+ {{ a.statusclass }}
+ </td>
+ {% endfor %}
+ </tr>
+ </table>
+ </div>
+ </div>
+
+ <!-- Modal - columns selector -->
+ <div class="modal fade" id="agreementModal{{a.agreement_id}}" tabindex="-1" role="dialog" aria-labelledby="myModalLabel" aria-hidden="true">
+
+ <style type="text/css" scoped>
+ .modal-lg {
+ width: 50%;
+ }
+ </style>
+
+ <div class="modal-dialog modal-lg">
+ <div class="modal-content">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title" id="myModalAgreement">Agreement details</h4>
+ </div>
+ <div class="modal-body">
+
+ <dt>Agreement Id</dt>
+ <dd>{{ a.agreement_id|default:" " }}</dd>
+ <dt>Provider</dt>
+ <dd>{{ a.context.provider|default:" " }}</dd>
+ <dt>Consumer</dt>
+ <dd>{{ a.context.consumer|default:" " }}</dd>
+ <dt>Service</dt>
+ <dd>Testbed guarantees 0.99 Uptime rate for 0.99 rate of the resources during the sliver lifetime</dd>
+ <dt>Template identfier</dt>
+ <dd>{{ a.context.template_id }}</dd>
+ <dt>Expiration time</dt>
+ <dd>{{ a.context.expirationtime|default:" " }}</dd>
+ </div>
+ <div class="modal-footer">
+ <button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
+ </div>
+ </div>
+ </div>
+ </div>
+
+ {% empty %}
+ {% endfor %}
+ </div>
+ </div>
+ </div>
+</div>
+<!-- <div class="row" style="float:right;">
+ <button id="showEvaluations" type="button" class="btn btn-default" onclick="displayDate()"><span class="glyphicon"></span>Show Evaluations</button>
+</div> -->
+
+<script>
+$(document).ready(function() {
+ $('a.violation-detail').click(function () {
+ var a = $(this).data('agreement');
+ var v = $(this).data('violation');
+ $("#sla").load('/sla/agreements/' + a + '/guarantees/' + v + '/violations', {'slicename': '{{ slicename }}'});
+ });
+
+ // $('a.agreement-detail').click(function () {
+ // var a = $(this).data('agreement');
+ // $("#sla").load('/sla/agreements/' + a + '/detail');
+ // });
+
+ // $('tr.header').click(function(){
+ // $(this).nextUntil('tr.header').toggle('fast');
+ // $('.header .glyphicon').toggleClass('glyphicon-chevron-down glyphicon-chevron-right');
+ // });
+});
+
+</script>
\ No newline at end of file
--- /dev/null
+<div class="col-md-6" id="violations" style="align:right;">
+
+ <table class="table table-hover">
+ <tr>
+ <th>#</th>
+ <th>Date</th>
+ <th>Actual value</th>
+ </tr>
+
+ {% for v in violations %}
+ <tr>
+ <td>{{forloop.counter}}</td>
+ <td>{{v.datetime}}</td>
+ <td>{{ v.actual_value|floatformat:"0" }}</td>
+ </tr>
+ {% empty %}
+ <tr><td colspan="3">No violations</td></tr>
+ {% endfor %}
+ </table>
+</div>
+<div>
+<a class="btn btn-default btn-back">Close</a>
+</div>
--- /dev/null
+<div class="col-md-2">
+</div>
+
+<div class="col-md-8">
+ <h2>
+ Violations
+ </h2>
+
+ <div id="context">
+ <dl class="dl-horizontal">
+ {# Sanity default: if dd is empty, the values are permutated #}
+ <dt>Agreement Id</dt>
+ <dd>{{agreement.agreement_id|default:" "}}</dd>
+ <dt>Service</dt>
+ <dd>{{agreement.context.service_formatted|default:" "}}</dd>
+ <dt>Metric name</dt>
+ <dd>{{guarantee_term.servicelevelobjective.kpiname|default:" "}}</dd>
+ {% with guarantee_term.servicelevelobjective.bounds as bounds %}
+ <dt>Threshold</dt>
+ <dd>{{bounds.0|default:" "}}</dd>
+ {% endwith %}
+
+ <dd ><button class="btn btn-default back" style="float:right;">Back</button></dd>
+
+ </dl>
+
+
+ </div>
+
+ <div id="violations">
+
+ <table class="table table-hover">
+ <tr>
+ <th>#</th>
+ <th>Date</th>
+ <th>Actual value</th>
+ </tr>
+
+ {% for v in violations %}
+ <tr>
+ <td>{{forloop.counter}}</td>
+ <td>{{v.datetime}}</td>
+ <td>{{v.actual_value}}</td>
+ </tr>
+ {% empty %}
+ <tr><td colspan="3">No violations</td></tr>
+ {% endfor %}
+ </table>
+ </div>
+
+ <ul class="pagination">
+
+ {% if violations.has_previous %}
+ <li><a class="navigate" data-pagenum="{{ 1 }}" href="#"><<First </a></li>
+ <li><a class="navigate" data-pagenum="{{ violations.previous_page_number }}" href="#"><Previous</a></li>
+ {% endif %}
+
+ <li>
+ <span class="current">
+ Page {{ violations.number }} of {{ violations.paginator.num_pages }}
+ </span>
+ </li>
+
+ {% if violations.has_next %}
+ <li><a class="navigate" data-pagenum="{{ violations.next_page_number }}" href="#">Next> </a></li>
+ <li><a class="navigate" data-pagenum="{{ violations.paginator.num_pages }}" href="#">Last>></a></li>
+ {% endif %}
+
+ </ul>
+</div>
+
+<script>
+$(document).ready(function() {
+ $('a.navigate').click(function () {
+ var page = $(this).data('pagenum');
+ $("#sla").load('{% url "agreement_term_violations" agreement_id guarantee_term.name %}?page=' + page);
+ });
+
+ $('button.back').click(function () {
+ $("#sla").load('{% url "agreements_summary" slicename %}');
+ });
+});
+</script>
\ No newline at end of file
--- /dev/null
+{
+ "auto_complete":
+ {
+ "selected_items":
+ [
+ ]
+ },
+ "buffers":
+ [
+ {
+ "file": "slice-tab-sla.html",
+ "settings":
+ {
+ "buffer_size": 6021,
+ "line_ending": "Unix"
+ }
+ }
+ ],
+ "build_system": "",
+ "command_palette":
+ {
+ "height": 0.0,
+ "selected_items":
+ [
+ ],
+ "width": 0.0
+ },
+ "console":
+ {
+ "height": 0.0,
+ "history":
+ [
+ ]
+ },
+ "distraction_free":
+ {
+ "menu_visible": true,
+ "show_minimap": false,
+ "show_open_files": false,
+ "show_tabs": false,
+ "side_bar_visible": false,
+ "status_bar_visible": false
+ },
+ "file_history":
+ [
+ ],
+ "find":
+ {
+ "height": 0.0
+ },
+ "find_in_files":
+ {
+ "height": 0.0,
+ "where_history":
+ [
+ ]
+ },
+ "find_state":
+ {
+ "case_sensitive": false,
+ "find_history":
+ [
+ ],
+ "highlight": true,
+ "in_selection": false,
+ "preserve_case": false,
+ "regex": false,
+ "replace_history":
+ [
+ ],
+ "reverse": false,
+ "show_context": true,
+ "use_buffer2": true,
+ "whole_word": false,
+ "wrap": true
+ },
+ "groups":
+ [
+ {
+ "selected": 0,
+ "sheets":
+ [
+ {
+ "buffer": 0,
+ "file": "slice-tab-sla.html",
+ "semi_transient": false,
+ "settings":
+ {
+ "buffer_size": 6021,
+ "regions":
+ {
+ },
+ "selection":
+ [
+ [
+ 0,
+ 0
+ ]
+ ],
+ "settings":
+ {
+ "syntax": "Packages/HTML/HTML.tmLanguage"
+ },
+ "translation.x": 0.0,
+ "translation.y": 0.0,
+ "zoom_level": 1.0
+ },
+ "stack_index": 0,
+ "type": "text"
+ }
+ ]
+ }
+ ],
+ "incremental_find":
+ {
+ "height": 0.0
+ },
+ "input":
+ {
+ "height": 0.0
+ },
+ "layout":
+ {
+ "cells":
+ [
+ [
+ 0,
+ 0,
+ 1,
+ 1
+ ]
+ ],
+ "cols":
+ [
+ 0.0,
+ 1.0
+ ],
+ "rows":
+ [
+ 0.0,
+ 1.0
+ ]
+ },
+ "menu_visible": true,
+ "output.find_results":
+ {
+ "height": 0.0
+ },
+ "project": "violations_template.sublime-project",
+ "replace":
+ {
+ "height": 0.0
+ },
+ "save_all_on_build": true,
+ "select_file":
+ {
+ "height": 0.0,
+ "selected_items":
+ [
+ ],
+ "width": 0.0
+ },
+ "select_project":
+ {
+ "height": 0.0,
+ "selected_items":
+ [
+ ],
+ "width": 0.0
+ },
+ "select_symbol":
+ {
+ "height": 0.0,
+ "selected_items":
+ [
+ ],
+ "width": 0.0
+ },
+ "settings":
+ {
+ },
+ "show_minimap": true,
+ "show_open_files": false,
+ "show_tabs": true,
+ "side_bar_visible": true,
+ "side_bar_width": 150.0,
+ "status_bar_visible": true,
+ "template_settings":
+ {
+ }
+}
--- /dev/null
+from django.conf.urls import patterns, url, include
+
+from sla import slicetabsla
+
+urlpatterns = patterns('',
+ url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
+ url(r'^(?P<slicename>[^/]+)/?$', slicetabsla.SLAView.as_view(), name="agreements_summary"),
+ url(r'^agreements/(?P<agreement_id>[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12})/detail$', slicetabsla.agreement_details, name='agreement_details'),
+ url(r'^agreements/(?P<agreement_id>[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12})/guarantees/(?P<guarantee_name>\w+)/violations$', slicetabsla.agreement_term_violations, name='agreement_term_violations'),
+ url(r'^agreements/simplecreate/?$', slicetabsla.AgreementSimple.as_view(), name="agreementsimple"),
+)
+
--- /dev/null
+import re\r
+import datetime\r
+\r
+from slaclient import wsag_model\r
+from slaclient.wsag_model import AgreementStatus\r
+from slaclient.wsag_model import Violation\r
+\r
+\r
+VIOLATED = AgreementStatus.StatusEnum.VIOLATED\r
+NON_DETERMINED = AgreementStatus.StatusEnum.NON_DETERMINED\r
+FULFILLED = AgreementStatus.StatusEnum.FULFILLED\r
+\r
+\r
+def get_violations_bydate(violations):\r
+ """Returns a list of violations per date, from a list of violations\r
+\r
+ :param violations list[Violation]:\r
+ :rtype: list\r
+ """\r
+ d = dict()\r
+ for v in violations:\r
+ assert isinstance(v, Violation)\r
+ date = v.datetime.date()\r
+ if not date in d:\r
+ d[date] = []\r
+ d[date].append(v)\r
+\r
+ result = [(key, d[key]) for key in sorted(d.keys(), reverse=True)]\r
+ return result\r
+\r
+\r
+class AgreementAnnotator(object):\r
+ """Annotates an agreement with the following attributes:\r
+\r
+ agreement.guaranteestatus\r
+ agreement.statusclass\r
+ agreement.guaranteeterms[*].status\r
+ agreement.guaranteeterms[*].statusclass\r
+ agreement.guaranteeterms[*].nviolations\r
+ agreement.guaranteeterms[*].servicelevelobjetive.bounds\r
+\r
+ """\r
+ def __init__(self):\r
+ pass\r
+\r
+ @staticmethod\r
+ def _get_statusclass(status):\r
+ if status is None or status == "" or status == NON_DETERMINED:\r
+ return "non-determined"\r
+ return "success" if status == FULFILLED else "error"\r
+\r
+ @staticmethod\r
+ def _parse_bounds(servicelevel):\r
+# pattern = re.compile(".*BETWEEN *[(]?(.*), *([^)]*)[)]?")\r
+ pattern = re.compile(".*GT *([+-]?\\d*\\.\\d+)(?![-+0-9\\.])")\r
+ constraint = eval(servicelevel.strip(' \t\n\r'))\r
+ m = pattern.match(constraint['constraint'])\r
+ return m.groups()\r
+\r
+ def _annotate_guaranteeterm(self, term, violations):\r
+ #\r
+ # Annotate a guarantee term: set bounds and violations\r
+ #\r
+ level = term.servicelevelobjective.customservicelevel\r
+ bounds = AgreementAnnotator._parse_bounds(level)\r
+ term.servicelevelobjective.bounds = bounds\r
+\r
+ #\r
+ # set status attribute if not set before\r
+ #\r
+ if not hasattr(term, 'status'):\r
+ term.status = wsag_model.AgreementStatus.StatusEnum.NON_DETERMINED\r
+ #\r
+ # TODO: efficiency\r
+ #\r
+ n = 0\r
+ for violation in violations:\r
+ if violation.metric_name == term.servicelevelobjective.kpiname:\r
+ n += 1\r
+ term.nviolations = n\r
+\r
+ def _annotate_guaranteeterm_by_status(\r
+ self, agreement, termstatus, violations):\r
+ #\r
+ # Annotate a guarantee term: it is different from the previous\r
+ # one in that this takes the status into account.\r
+ #\r
+ name = termstatus.name\r
+ status = termstatus.status\r
+\r
+ term = agreement.guaranteeterms[name]\r
+ term.status = status\r
+ term.statusclass = AgreementAnnotator._get_statusclass(status)\r
+ self._annotate_guaranteeterm(term, violations)\r
+\r
+ def annotate_agreement(self, agreement, status=None, violations=()):\r
+\r
+ """Annotate an agreement with certain values needed in the templates\r
+\r
+ :param wsag_model.Agreement agreement: agreement to annotate\r
+ :param wsag_model.AgreementStatus status: status of the agreement.\r
+ :param violations: list of agreement's violations\r
+ (wsag_model.Violation[])\r
+ """\r
+ a = agreement\r
+\r
+ if status is not None:\r
+ a.guaranteestatus = status.guaranteestatus\r
+ a.statusclass = self._get_statusclass(status.guaranteestatus)\r
+ for termstatus in status.guaranteeterms:\r
+ self._annotate_guaranteeterm_by_status(\r
+ agreement, termstatus, violations)\r
+ else:\r
+ a.guaranteestatus = NON_DETERMINED\r
+ for termname, term in agreement.guaranteeterms.items():\r
+ self._annotate_guaranteeterm(term, violations)\r