From: Jordan Augé Date: Tue, 25 Feb 2014 15:40:54 +0000 (+0100) Subject: IMPORTANT: myslice/manifold folder REMOVED due to conflicts with Manifold Server... X-Git-Tag: myslice-1.1~308^2~1 X-Git-Url: http://git.onelab.eu/?p=myslice.git;a=commitdiff_plain;h=8cd242571082562afa089d7da255c8234055f685 IMPORTANT: myslice/manifold folder REMOVED due to conflicts with Manifold Server package ! CREATED myslice/manifoldapi folder, MySlice requires now that Manifold is installed to benefit from libraries like core, query and util --- diff --git a/auth/manifoldbackend.py b/auth/manifoldbackend.py index ab224a35..eb87ab87 100644 --- a/auth/manifoldbackend.py +++ b/auth/manifoldbackend.py @@ -2,7 +2,7 @@ import time from django.contrib.auth.models import User -from manifold.manifoldapi import ManifoldAPI, ManifoldException, ManifoldResult +from manifoldapi.manifoldapi import ManifoldAPI, ManifoldException, ManifoldResult from manifold.core.query import Query # Name my backend 'ManifoldBackend' diff --git a/manifold/core/__init__.py b/manifold/core/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/manifold/core/filter.py b/manifold/core/filter.py deleted file mode 100644 index 3a213483..00000000 --- a/manifold/core/filter.py +++ /dev/null @@ -1,407 +0,0 @@ -from types import StringTypes -try: - set -except NameError: - from sets import Set - set = Set - -import time -import datetime # Jordan -#from manifold.util.parameter import Parameter, Mixed, python_type -from manifold.util.predicate import Predicate, eq -from itertools import ifilter - -class Filter(set): - """ - A filter is a set of predicates - """ - - #def __init__(self, s=()): - # super(Filter, self).__init__(s) - - @staticmethod - def from_list(l): - f = Filter() - try: - for element in l: - f.add(Predicate(*element)) - except Exception, e: - print "Error in setting Filter from list", e - return None - return f - - @staticmethod - def from_dict(d): - f = Filter() - for key, value in d.items(): - if key[0] in Predicate.operators.keys(): - f.add(Predicate(key[1:], key[0], value)) - else: - f.add(Predicate(key, '=', value)) - return f - - def to_list(self): - ret = [] - for predicate in self: - ret.append(predicate.to_list()) - return ret - - - @staticmethod - def from_clause(clause): - """ - NOTE: We can only handle simple clauses formed of AND fields. - """ - raise Exception, "Not implemented" - - def filter_by(self, predicate): - self.add(predicate) - return self - - def __str__(self): - return ' AND '.join([str(pred) for pred in self]) - - def __repr__(self): - return '' % ' AND '.join([str(pred) for pred in self]) - - def __key(self): - return tuple([hash(pred) for pred in self]) - - def __hash__(self): - return hash(self.__key()) - - def __additem__(self, value): - if value.__class__ != Predicate: - raise TypeError("Element of class Predicate expected, received %s" % value.__class__.__name__) - set.__additem__(self, value) - - def keys(self): - return set([x.key for x in self]) - - # XXX THESE FUNCTIONS SHOULD ACCEPT MULTIPLE FIELD NAMES - - def has(self, key): - for x in self: - if x.key == key: - return True - return False - - def has_op(self, key, op): - for x in self: - if x.key == key and x.op == op: - return True - return False - - def has_eq(self, key): - return self.has_op(key, eq) - - def get(self, key): - ret = [] - for x in self: - if x.key == key: - ret.append(x) - return ret - - def delete(self, key): - to_del = [] - for x in self: - if x.key == key: - to_del.append(x) - for x in to_del: - self.remove(x) - - #self = filter(lambda x: x.key != key, self) - - def get_op(self, key, op): - if isinstance(op, (list, tuple, set)): - for x in self: - if x.key == key and x.op in op: - return x.value - else: - for x in self: - if x.key == key and x.op == op: - return x.value - return None - - def get_eq(self, key): - return self.get_op(key, eq) - - def set_op(self, key, op, value): - for x in self: - if x.key == key and x.op == op: - x.value = value - return - raise KeyError, key - - def set_eq(self, key, value): - return self.set_op(key, eq, value) - - def get_predicates(self, key): - # XXX Would deserve returning a filter (cf usage in SFA gateway) - ret = [] - for x in self: - if x.key == key: - ret.append(x) - return ret - -# def filter(self, dic): -# # We go through every filter sequentially -# for predicate in self: -# print "predicate", predicate -# dic = predicate.filter(dic) -# return dic - - def match(self, dic, ignore_missing=True): - for predicate in self: - if not predicate.match(dic, ignore_missing): - return False - return True - - def filter(self, l): - output = [] - for x in l: - if self.match(x): - output.append(x) - return output - - def get_field_names(self): - field_names = set() - for predicate in self: - field_names |= predicate.get_field_names() - return field_names - -#class OldFilter(Parameter, dict): -# """ -# A type of parameter that represents a filter on one or more -# columns of a database table. -# Special features provide support for negation, upper and lower bounds, -# as well as sorting and clipping. -# -# -# fields should be a dictionary of field names and types. -# As of PLCAPI-4.3-26, we provide support for filtering on -# sequence types as well, with the special '&' and '|' modifiers. -# example : fields = {'node_id': Parameter(int, "Node identifier"), -# 'hostname': Parameter(int, "Fully qualified hostname", max = 255), -# ...} -# -# -# filter should be a dictionary of field names and values -# representing the criteria for filtering. -# example : filter = { 'hostname' : '*.edu' , site_id : [34,54] } -# Whether the filter represents an intersection (AND) or a union (OR) -# of these criteria is determined by the join_with argument -# provided to the sql method below -# -# Special features: -# -# * a field starting with '&' or '|' should refer to a sequence type -# the semantic is then that the object value (expected to be a list) -# should contain all (&) or any (|) value specified in the corresponding -# filter value. See other examples below. -# example : filter = { '|role_ids' : [ 20, 40 ] } -# example : filter = { '|roles' : ['tech', 'pi'] } -# example : filter = { '&roles' : ['admin', 'tech'] } -# example : filter = { '&roles' : 'tech' } -# -# * a field starting with the ~ character means negation. -# example : filter = { '~peer_id' : None } -# -# * a field starting with < [ ] or > means lower than or greater than -# < > uses strict comparison -# [ ] is for using <= or >= instead -# example : filter = { ']event_id' : 2305 } -# example : filter = { '>time' : 1178531418 } -# in this example the integer value denotes a unix timestamp -# -# * if a value is a sequence type, then it should represent -# a list of possible values for that field -# example : filter = { 'node_id' : [12,34,56] } -# -# * a (string) value containing either a * or a % character is -# treated as a (sql) pattern; * are replaced with % that is the -# SQL wildcard character. -# example : filter = { 'hostname' : '*.jp' } -# -# * the filter's keys starting with '-' are special and relate to sorting and clipping -# * '-SORT' : a field name, or an ordered list of field names that are used for sorting -# these fields may start with + (default) or - for denoting increasing or decreasing order -# example : filter = { '-SORT' : [ '+node_id', '-hostname' ] } -# * '-OFFSET' : the number of first rows to be ommitted -# * '-LIMIT' : the amount of rows to be returned -# example : filter = { '-OFFSET' : 100, '-LIMIT':25} -# -# Here are a few realistic examples -# -# GetNodes ( { 'node_type' : 'regular' , 'hostname' : '*.edu' , '-SORT' : 'hostname' , '-OFFSET' : 30 , '-LIMIT' : 25 } ) -# would return regular (usual) nodes matching '*.edu' in alphabetical order from 31th to 55th -# -# GetPersons ( { '|role_ids' : [ 20 , 40] } ) -# would return all persons that have either pi (20) or tech (40) roles -# -# GetPersons ( { '&role_ids' : 10 } ) -# GetPersons ( { '&role_ids' : 10 } ) -# GetPersons ( { '|role_ids' : [ 10 ] } ) -# GetPersons ( { '|role_ids' : [ 10 ] } ) -# all 4 forms are equivalent and would return all admin users in the system -# """ -# -# def __init__(self, fields = {}, filter = {}, doc = "Attribute filter"): -# # Store the filter in our dict instance -# dict.__init__(self, filter) -# -# # Declare ourselves as a type of parameter that can take -# # either a value or a list of values for each of the specified -# # fields. -# self.fields = dict ( [ ( field, Mixed (expected, [expected])) -# for (field,expected) in fields.iteritems() ] ) -# -# # Null filter means no filter -# Parameter.__init__(self, self.fields, doc = doc, nullok = True) -# -# def sql(self, api, join_with = "AND"): -# """ -# Returns a SQL conditional that represents this filter. -# """ -# -# # So that we always return something -# if join_with == "AND": -# conditionals = ["True"] -# elif join_with == "OR": -# conditionals = ["False"] -# else: -# assert join_with in ("AND", "OR") -# -# # init -# sorts = [] -# clips = [] -# -# for field, value in self.iteritems(): -# # handle negation, numeric comparisons -# # simple, 1-depth only mechanism -# -# modifiers={'~' : False, -# '<' : False, '>' : False, -# '[' : False, ']' : False, -# '-' : False, -# '&' : False, '|' : False, -# '{': False , -# } -# def check_modifiers(field): -# if field[0] in modifiers.keys(): -# modifiers[field[0]] = True -# field = field[1:] -# return check_modifiers(field) -# return field -# field = check_modifiers(field) -# -# # filter on fields -# if not modifiers['-']: -# if field not in self.fields: -# raise PLCInvalidArgument, "Invalid filter field '%s'" % field -# -# # handling array fileds always as compound values -# if modifiers['&'] or modifiers['|']: -# if not isinstance(value, (list, tuple, set)): -# value = [value,] -# -# if isinstance(value, (list, tuple, set)): -# # handling filters like '~slice_id':[] -# # this should return true, as it's the opposite of 'slice_id':[] which is false -# # prior to this fix, 'slice_id':[] would have returned ``slice_id IN (NULL) '' which is unknown -# # so it worked by coincidence, but the negation '~slice_ids':[] would return false too -# if not value: -# if modifiers['&'] or modifiers['|']: -# operator = "=" -# value = "'{}'" -# else: -# field="" -# operator="" -# value = "FALSE" -# else: -# value = map(str, map(api.db.quote, value)) -# if modifiers['&']: -# operator = "@>" -# value = "ARRAY[%s]" % ", ".join(value) -# elif modifiers['|']: -# operator = "&&" -# value = "ARRAY[%s]" % ", ".join(value) -# else: -# operator = "IN" -# value = "(%s)" % ", ".join(value) -# else: -# if value is None: -# operator = "IS" -# value = "NULL" -# elif isinstance(value, StringTypes) and \ -# (value.find("*") > -1 or value.find("%") > -1): -# operator = "LIKE" -# # insert *** in pattern instead of either * or % -# # we dont use % as requests are likely to %-expansion later on -# # actual replacement to % done in PostgreSQL.py -# value = value.replace ('*','***') -# value = value.replace ('%','***') -# value = str(api.db.quote(value)) -# else: -# operator = "=" -# if modifiers['<']: -# operator='<' -# if modifiers['>']: -# operator='>' -# if modifiers['[']: -# operator='<=' -# if modifiers[']']: -# operator='>=' -# #else: -# # value = str(api.db.quote(value)) -# # jordan -# if isinstance(value, StringTypes) and value[-2:] != "()": # XXX -# value = str(api.db.quote(value)) -# if isinstance(value, datetime.datetime): -# value = str(api.db.quote(str(value))) -# -# #if prefix: -# # field = "%s.%s" % (prefix,field) -# if field: -# clause = "\"%s\" %s %s" % (field, operator, value) -# else: -# clause = "%s %s %s" % (field, operator, value) -# -# if modifiers['~']: -# clause = " ( NOT %s ) " % (clause) -# -# conditionals.append(clause) -# # sorting and clipping -# else: -# if field not in ('SORT','OFFSET','LIMIT'): -# raise PLCInvalidArgument, "Invalid filter, unknown sort and clip field %r"%field -# # sorting -# if field == 'SORT': -# if not isinstance(value,(list,tuple,set)): -# value=[value] -# for field in value: -# order = 'ASC' -# if field[0] == '+': -# field = field[1:] -# elif field[0] == '-': -# field = field[1:] -# order = 'DESC' -# if field not in self.fields: -# raise PLCInvalidArgument, "Invalid field %r in SORT filter"%field -# sorts.append("%s %s"%(field,order)) -# # clipping -# elif field == 'OFFSET': -# clips.append("OFFSET %d"%value) -# # clipping continued -# elif field == 'LIMIT' : -# clips.append("LIMIT %d"%value) -# -# where_part = (" %s " % join_with).join(conditionals) -# clip_part = "" -# if sorts: -# clip_part += " ORDER BY " + ",".join(sorts) -# if clips: -# clip_part += " " + " ".join(clips) -## print 'where_part=',where_part,'clip_part',clip_part -# return (where_part,clip_part) -# diff --git a/manifold/core/query.py b/manifold/core/query.py deleted file mode 100644 index 976f4978..00000000 --- a/manifold/core/query.py +++ /dev/null @@ -1,585 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Query representation -# -# Copyright (C) UPMC Paris Universitas -# Authors: -# Jordan Augé -# Marc-Olivier Buob -# Thierry Parmentelat - -from types import StringTypes -from manifold.core.filter import Filter, Predicate -from manifold.util.frozendict import frozendict -from manifold.util.type import returns, accepts -from manifold.util.clause import Clause -import copy - -import json -import uuid - -def uniqid (): - return uuid.uuid4().hex - -debug=False -#debug=True - -class ParameterError(StandardError): pass - -class Query(object): - """ - Implements a TopHat query. - - We assume this is a correct DAG specification. - - 1/ A field designates several tables = OR specification. - 2/ The set of fields specifies a AND between OR clauses. - """ - - #--------------------------------------------------------------------------- - # Constructor - #--------------------------------------------------------------------------- - - def __init__(self, *args, **kwargs): - - self.query_uuid = uniqid() - - # Initialize optional parameters - self.clear() - - #l = len(kwargs.keys()) - len_args = len(args) - - if len(args) == 1: - if isinstance(args[0], dict): - kwargs = args[0] - args = [] - - # Initialization from a tuple - - if len_args in range(2, 7) and type(args) == tuple: - # Note: range(x,y) <=> [x, y[ - - # XXX UGLY - if len_args == 3: - self.action = 'get' - self.params = {} - self.timestamp = 'now' - self.object, self.filters, self.fields = args - elif len_args == 4: - self.object, self.filters, self.params, self.fields = args - self.action = 'get' - self.timestamp = 'now' - else: - self.action, self.object, self.filters, self.params, self.fields, self.timestamp = args - - # Initialization from a dict - elif "object" in kwargs: - if "action" in kwargs: - self.action = kwargs["action"] - del kwargs["action"] - else: - print "W: defaulting to get action" - self.action = "get" - - - self.object = kwargs["object"] - del kwargs["object"] - - if "filters" in kwargs: - self.filters = kwargs["filters"] - del kwargs["filters"] - else: - self.filters = Filter() - - if "fields" in kwargs: - self.fields = set(kwargs["fields"]) - del kwargs["fields"] - else: - self.fields = set() - - # "update table set x = 3" => params == set - if "params" in kwargs: - self.params = kwargs["params"] - del kwargs["params"] - else: - self.params = {} - - if "timestamp" in kwargs: - self.timestamp = kwargs["timestamp"] - del kwargs["timestamp"] - else: - self.timestamp = "now" - - if kwargs: - raise ParameterError, "Invalid parameter(s) : %r" % kwargs.keys() - #else: - # raise ParameterError, "No valid constructor found for %s : args = %r" % (self.__class__.__name__, args) - - self.sanitize() - - def sanitize(self): - if not self.filters: self.filters = Filter() - if not self.params: self.params = {} - if not self.fields: self.fields = set() - if not self.timestamp: self.timestamp = "now" - - if isinstance(self.filters, list): - f = self.filters - self.filters = Filter() - for x in f: - pred = Predicate(x) - self.filters.add(pred) - elif isinstance(self.filters, Clause): - self.filters = Filter.from_clause(self.filters) - - if isinstance(self.fields, list): - self.fields = set(self.fields) - - for field in self.fields: - if not isinstance(field, StringTypes): - raise TypeError("Invalid field name %s (string expected, got %s)" % (field, type(field))) - - #--------------------------------------------------------------------------- - # Helpers - #--------------------------------------------------------------------------- - - def copy(self): - return copy.deepcopy(self) - - def clear(self): - self.action = 'get' - self.object = None - self.filters = Filter() - self.params = {} - self.fields = set() - self.timestamp = 'now' # ignored for now - - def to_sql(self, platform='', multiline=False): - get_params_str = lambda : ', '.join(['%s = %r' % (k, v) for k, v in self.get_params().items()]) - get_select_str = lambda : ', '.join(self.get_select()) - - table = self.get_from() - select = 'SELECT %s' % (get_select_str() if self.get_select() else '*') - where = 'WHERE %s' % self.get_where() if self.get_where() else '' - at = 'AT %s' % self.get_timestamp() if self.get_timestamp() else '' - params = 'SET %s' % get_params_str() if self.get_params() else '' - - sep = ' ' if not multiline else '\n ' - if platform: platform = "%s:" % platform - strmap = { - 'get' : '%(select)s%(sep)s%(at)s%(sep)sFROM %(platform)s%(table)s%(sep)s%(where)s%(sep)s', - 'update': 'UPDATE %(platform)s%(table)s%(sep)s%(params)s%(sep)s%(where)s%(sep)s%(select)s', - 'create': 'INSERT INTO %(platform)s%(table)s%(sep)s%(params)s%(sep)s%(select)s', - 'delete': 'DELETE FROM %(platform)s%(table)s%(sep)s%(where)s' - } - - return strmap[self.action] % locals() - - @returns(StringTypes) - def __str__(self): - return self.to_sql(multiline=True) - - @returns(StringTypes) - def __repr__(self): - return self.to_sql() - - def __key(self): - return (self.action, self.object, self.filters, frozendict(self.params), frozenset(self.fields)) - - def __hash__(self): - return hash(self.__key()) - - #--------------------------------------------------------------------------- - # Conversion - #--------------------------------------------------------------------------- - - def to_dict(self): - return { - 'action': self.action, - 'object': self.object, - 'timestamp': self.timestamp, - 'filters': self.filters.to_list(), - 'params': self.params, - 'fields': list(self.fields) - } - - def to_json (self, analyzed_query=None): - query_uuid=self.query_uuid - a=self.action - o=self.object - t=self.timestamp - f=json.dumps (self.filters.to_list()) - p=json.dumps (self.params) - c=json.dumps (list(self.fields)) - # xxx unique can be removed, but for now we pad the js structure - unique=0 - - if not analyzed_query: - aq = 'null' - else: - aq = analyzed_query.to_json() - sq="{}" - - result= """ new ManifoldQuery('%(a)s', '%(o)s', '%(t)s', %(f)s, %(p)s, %(c)s, %(unique)s, '%(query_uuid)s', %(aq)s, %(sq)s)"""%locals() - if debug: print 'ManifoldQuery.to_json:',result - return result - - # this builds a ManifoldQuery object from a dict as received from javascript through its ajax request - # we use a json-encoded string - see manifold.js for the sender part - # e.g. here's what I captured from the server's output - # manifoldproxy.proxy: request.POST - def fill_from_POST (self, POST_dict): - try: - json_string=POST_dict['json'] - dict=json.loads(json_string) - for (k,v) in dict.iteritems(): - setattr(self,k,v) - except: - print "Could not decode incoming ajax request as a Query, POST=",POST_dict - if (debug): - import traceback - traceback.print_exc() - self.sanitize() - - #--------------------------------------------------------------------------- - # Accessors - #--------------------------------------------------------------------------- - - @returns(StringTypes) - def get_action(self): - return self.action - - @returns(frozenset) - def get_select(self): - return frozenset(self.fields) - - @returns(StringTypes) - def get_from(self): - return self.object - - @returns(Filter) - def get_where(self): - return self.filters - - @returns(dict) - def get_params(self): - return self.params - - @returns(StringTypes) - def get_timestamp(self): - return self.timestamp - -#DEPRECATED# -#DEPRECATED# def make_filters(self, filters): -#DEPRECATED# return Filter(filters) -#DEPRECATED# -#DEPRECATED# def make_fields(self, fields): -#DEPRECATED# if isinstance(fields, (list, tuple)): -#DEPRECATED# return set(fields) -#DEPRECATED# else: -#DEPRECATED# raise Exception, "Invalid field specification" - - #--------------------------------------------------------------------------- - # LINQ-like syntax - #--------------------------------------------------------------------------- - - @classmethod - #@returns(Query) - def action(self, action, object): - """ - (Internal usage). Craft a Query according to an action name - See methods: get, update, delete, execute. - Args: - action: A String among {"get", "update", "delete", "execute"} - object: The name of the queried object (String) - Returns: - The corresponding Query instance - """ - query = Query() - query.action = action - query.object = object - return query - - @classmethod - #@returns(Query) - def get(self, object): - """ - Craft the Query which fetches the records related to a given object - Args: - object: The name of the queried object (String) - Returns: - The corresponding Query instance - """ - return self.action("get", object) - - @classmethod - #@returns(Query) - def update(self, object): - """ - Craft the Query which updates the records related to a given object - Args: - object: The name of the queried object (String) - Returns: - The corresponding Query instance - """ - return self.action("update", object) - - @classmethod - #@returns(Query) - def create(self, object): - """ - Craft the Query which create the records related to a given object - Args: - object: The name of the queried object (String) - Returns: - The corresponding Query instance - """ - return self.action("create", object) - - @classmethod - #@returns(Query) - def delete(self, object): - """ - Craft the Query which delete the records related to a given object - Args: - object: The name of the queried object (String) - Returns: - The corresponding Query instance - """ - return self.action("delete", object) - - @classmethod - #@returns(Query) - def execute(self, object): - """ - Craft the Query which execute a processing related to a given object - Args: - object: The name of the queried object (String) - Returns: - The corresponding Query instance - """ - return self.action("execute", object) - - #@returns(Query) - def at(self, timestamp): - """ - Set the timestamp carried by the query - Args: - timestamp: The timestamp (it may be a python timestamp, a string - respecting the "%Y-%m-%d %H:%M:%S" python format, or "now") - Returns: - The self Query instance - """ - self.timestamp = timestamp - return self - - def filter_by(self, *args): - """ - Args: - args: It may be: - - the parts of a Predicate (key, op, value) - - None - - a Filter instance - - a set/list/tuple of Predicate instances - """ - if len(args) == 1: - filters = args[0] - if filters == None: - self.filters = Filter() - return self - if not isinstance(filters, (set, list, tuple, Filter)): - filters = [filters] - for predicate in filters: - self.filters.add(predicate) - elif len(args) == 3: - predicate = Predicate(*args) - self.filters.add(predicate) - else: - raise Exception, 'Invalid expression for filter' - return self - - def select(self, *fields): - - # Accept passing iterables - if len(fields) == 1: - tmp, = fields - if not tmp: - fields = None - elif isinstance(tmp, (list, tuple, set, frozenset)): - fields = tuple(tmp) - - if not fields: - # Delete all fields - self.fields = set() - return self - - for field in fields: - self.fields.add(field) - return self - - def set(self, params): - self.params.update(params) - return self - - def __or__(self, query): - assert self.action == query.action - assert self.object == query.object - assert self.timestamp == query.timestamp # XXX - filter = self.filters | query.filters - # fast dict union - # http://my.safaribooksonline.com/book/programming/python/0596007973/python-shortcuts/pythoncook2-chp-4-sect-17 - params = dict(self.params, **query.params) - fields = self.fields | query.fields - return Query.action(self.action, self.object).filter_by(filter).select(fields) - - def __and__(self, query): - assert self.action == query.action - assert self.object == query.object - assert self.timestamp == query.timestamp # XXX - filter = self.filters & query.filters - # fast dict intersection - # http://my.safaribooksonline.com/book/programming/python/0596007973/python-shortcuts/pythoncook2-chp-4-sect-17 - params = dict.fromkeys([x for x in self.params if x in query.params]) - fields = self.fields & query.fields - return Query.action(self.action, self.object).filter_by(filter).select(fields) - - def __le__(self, query): - return ( self == self & query ) or ( query == self | query ) - -class AnalyzedQuery(Query): - - # XXX we might need to propagate special parameters sur as DEBUG, etc. - - def __init__(self, query=None, metadata=None): - self.clear() - self.metadata = metadata - if query: - self.query_uuid = query.query_uuid - self.analyze(query) - else: - self.query_uuid = uniqid() - - @returns(StringTypes) - def __str__(self): - out = [] - fields = self.get_select() - fields = ", ".join(fields) if fields else '*' - out.append("SELECT %s FROM %s WHERE %s" % ( - fields, - self.get_from(), - self.get_where() - )) - cpt = 1 - for method, subquery in self.subqueries(): - out.append(' [SQ #%d : %s] %s' % (cpt, method, str(subquery))) - cpt += 1 - - return "\n".join(out) - - def clear(self): - super(AnalyzedQuery, self).clear() - self._subqueries = {} - - def subquery(self, method): - # Allows for the construction of a subquery - if not method in self._subqueries: - analyzed_query = AnalyzedQuery(metadata=self.metadata) - analyzed_query.action = self.action - try: - type = self.metadata.get_field_type(self.object, method) - except ValueError ,e: # backwards 1..N - type = method - analyzed_query.object = type - self._subqueries[method] = analyzed_query - return self._subqueries[method] - - def get_subquery(self, method): - return self._subqueries.get(method, None) - - def remove_subquery(self, method): - del self._subqueries[method] - - def get_subquery_names(self): - return set(self._subqueries.keys()) - - def get_subqueries(self): - return self._subqueries - - def subqueries(self): - for method, subquery in self._subqueries.iteritems(): - yield (method, subquery) - - def filter_by(self, filters): - if not isinstance(filters, (set, list, tuple, Filter)): - filters = [filters] - for predicate in filters: - if predicate and '.' in predicate.key: - method, subkey = predicate.key.split('.', 1) - # Method contains the name of the subquery, we need the type - # XXX type = self.metadata.get_field_type(self.object, method) - sub_pred = Predicate(subkey, predicate.op, predicate.value) - self.subquery(method).filter_by(sub_pred) - else: - super(AnalyzedQuery, self).filter_by(predicate) - return self - - def select(self, *fields): - - # XXX passing None should reset fields in all subqueries - - # Accept passing iterables - if len(fields) == 1: - tmp, = fields - if isinstance(tmp, (list, tuple, set, frozenset)): - fields = tuple(tmp) - - for field in fields: - if field and '.' in field: - method, subfield = field.split('.', 1) - # Method contains the name of the subquery, we need the type - # XXX type = self.metadata.get_field_type(self.object, method) - self.subquery(method).select(subfield) - else: - super(AnalyzedQuery, self).select(field) - return self - - def set(self, params): - for param, value in self.params.items(): - if '.' in param: - method, subparam = param.split('.', 1) - # Method contains the name of the subquery, we need the type - # XXX type = self.metadata.get_field_type(self.object, method) - self.subquery(method).set({subparam: value}) - else: - super(AnalyzedQuery, self).set({param: value}) - return self - - def analyze(self, query): - self.clear() - self.action = query.action - self.object = query.object - self.filter_by(query.filters) - self.set(query.params) - self.select(query.fields) - - def to_json (self): - query_uuid=self.query_uuid - a=self.action - o=self.object - t=self.timestamp - f=json.dumps (self.filters.to_list()) - p=json.dumps (self.params) - c=json.dumps (list(self.fields)) - # xxx unique can be removed, but for now we pad the js structure - unique=0 - - aq = 'null' - sq=", ".join ( [ "'%s':%s" % (object, subquery.to_json()) - for (object, subquery) in self._subqueries.iteritems()]) - sq="{%s}"%sq - - result= """ new ManifoldQuery('%(a)s', '%(o)s', '%(t)s', %(f)s, %(p)s, %(c)s, %(unique)s, '%(query_uuid)s', %(aq)s, %(sq)s)"""%locals() - if debug: print 'ManifoldQuery.to_json:',result - return result diff --git a/manifold/core/result_value.py b/manifold/core/result_value.py deleted file mode 100644 index 4fe505f8..00000000 --- a/manifold/core/result_value.py +++ /dev/null @@ -1,254 +0,0 @@ -# Inspired from GENI error codes - -import time -import pprint - -class ResultValue(dict): - - # type - SUCCESS = 0 - WARNING = 1 - ERROR = 2 - - # origin - CORE = 0 - GATEWAY = 1 - - # code - SUCCESS = 0 - SERVERBUSY = 32001 - BADARGS = 1 - ERROR = 2 - FORBIDDEN = 3 - BADVERSION = 4 - SERVERERROR = 5 - TOOBIG = 6 - REFUSED = 7 - TIMEDOUT = 8 - DBERROR = 9 - RPCERROR = 10 - - # description - ERRSTR = { - SUCCESS : 'Success', - SERVERBUSY : 'Server is (temporarily) too busy; try again later', - BADARGS : 'Bad Arguments: malformed', - ERROR : 'Error (other)', - FORBIDDEN : 'Operation Forbidden: eg supplied credentials do not provide sufficient privileges (on the given slice)', - BADVERSION : 'Bad Version (eg of RSpec)', - SERVERERROR : 'Server Error', - TOOBIG : 'Too Big (eg request RSpec)', - REFUSED : 'Operation Refused', - TIMEDOUT : 'Operation Timed Out', - DBERROR : 'Database Error', - RPCERROR : '' - } - - ALLOWED_FIELDS = set(['origin', 'type', 'code', 'value', 'description', 'traceback', 'ts']) - - def __init__(self, **kwargs): - - # Checks - given = set(kwargs.keys()) - cstr_success = set(['code', 'origin', 'value']) <= given - cstr_error = set(['code', 'type', 'origin', 'description']) <= given - assert given <= self.ALLOWED_FIELDS, "Wrong fields in ResultValue constructor: %r" % (given - self.ALLOWED_FIELDS) - assert cstr_success or cstr_error, 'Incomplete set of fields in ResultValue constructor: %r' % given - - dict.__init__(self, **kwargs) - - # Set missing fields to None - for field in self.ALLOWED_FIELDS - given: - self[field] = None - if not 'ts' in self: - self['ts'] = time.time() - - - # Internal MySlice errors : return ERROR - # Internal MySlice warnings : return RESULT WITH WARNINGS - # Debug : add DEBUG INFORMATION - # Gateway errors : return RESULT WITH WARNING - # all Gateways errors : return ERROR - - @classmethod - def get_result_value(self, results, result_value_array): - # let's analyze the results of the query plan - # XXX we should inspect all errors to determine whether to return a - # result or not - if not result_value_array: - # No error - return ResultValue(code=self.SUCCESS, origin=[self.CORE, 0], value=results) - else: - # Handle errors - return ResultValue(code=self.WARNING, origin=[self.CORE, 0], description=result_value_array, value=results) - - @classmethod - def get_error(self, error): - return ResultValue(code=error, origin=[self.CORE, 0], value=self.ERRSTR[error]) - - @classmethod - def get_success(self, result): - return ResultValue(code=self.SUCCESS, origin=[self.CORE, 0], value=result) - - def ok_value(self): - return self['value'] - - def error(self): - err = "%r" % self['description'] - - @staticmethod - def to_html (raw_dict): - return pprint.pformat (raw_dict).replace("\\n","
") - -# 67 -# 68 9 -# 69 -# 70 Database Error -# 71 -# 72 -# 73 10 -# 74 -# 75 RPC Error -# 76 -# 77 -# 78 11 -# 79 -# 80 Unavailable (eg server in lockdown) -# 81 -# 82 -# 83 12 -# 84 -# 85 Search Failed (eg for slice) -# 86 -# 87 -# 88 13 -# 89 -# 90 Operation Unsupported -# 91 -# 92 -# 93 14 -# 94 -# 95 Busy (resource, slice, or server); try again -# later -# 96 -# 97 -# 98 15 -# 99 -# 100 Expired (eg slice) -# 101 -# 102 -# 103 16 -# 104 -# 105 In Progress -# 106 -# 107 -# 108 17 -# 109 -# 110 Already Exists (eg slice) -# 111 -# 112 -# 114 -# 115 18 -# 116 -# 117 Required argument(s) missing -# 118 -# 119 -# 120 19 -# 121 -# 122 Input Argument outside of legal range -# 123 -# 124 -# 125 20 -# 126 -# 127 Not authorized: Supplied credential is -# invalid -# 128 -# 129 -# 130 21 -# 131 -# 132 Not authorized: Supplied credential expired -# 133 -# 134 -# 135 22 -# 136 -# 137 Not authorized: Supplied credential does not match client -# certificate or does not match the given slice URN -# 138 -# 139 -# 140 23 -# 141 -# 142 Not authorized: Supplied credential not signed by a trusted -# authority -# 143 -# 144 -# 145 24 -# 146 -# 147 VLAN tag(s) requested not available (likely stitching -# failure) -# 148 -# 149 -# 150 -# diff --git a/manifold/util/__init__.py b/manifold/util/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/manifold/util/autolog.py b/manifold/util/autolog.py deleted file mode 100644 index e88b6f33..00000000 --- a/manifold/util/autolog.py +++ /dev/null @@ -1,422 +0,0 @@ -# Written by Brendan O'Connor, brenocon@gmail.com, www.anyall.org -# * Originally written Aug. 2005 -# * Posted to gist.github.com/16173 on Oct. 2008 - -# Copyright (c) 2003-2006 Open Source Applications Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import re, sys, types - -""" -Have all your function & method calls automatically logged, in indented outline -form - unlike the stack snapshots in an interactive debugger, it tracks call -structure & stack depths across time! - -It hooks into all function calls that you specify, and logs each time they're -called. I find it especially useful when I don't know what's getting called -when, or need to continuously test for state changes. (by hacking this file) - -Originally inspired from the python cookbook: -http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/198078 - -Currently you can - - tag functions or individual methods to be autologged - - tag an entire class's methods to be autologged - - tag an entire module's classes and functions to be autologged - -TODO: - - allow tagging of ALL modules in the program on startup? - -CAVEATS: - - certain classes barf when you logclass() them -- most notably, - SWIG-generated wrappers, and perhaps others. - -USAGE: see examples on the bottom of this file. - - -Viewing tips -============ - -If your terminal can't keep up, try xterm or putty, they seem to be highest -performance. xterm is available for all platforms through X11... - -Also try: (RunChandler > log &); tail -f log - -Also, you can "less -R log" afterward and get the colors correct. - -If you have long lines, less -RS kills wrapping, enhancing readability. Also -can chop at formatAllArgs(). - -If you want long lines to be chopped realtime, try piping through less:: - - RunChandler | less -RS - -but then you have to hit 'space' lots to prevent chandler from freezing. -less's 'F' command is supposed to do this correctly but doesn't work for me. -""" - - -#@@@ should use the standard python logging system? -log = sys.stdout - -# Globally incremented across function calls, so tracks stack depth -indent = 0 -indStr = ' ' - - -# ANSI escape codes for terminals. -# X11 xterm: always works, all platforms -# cygwin dosbox: run through |cat and then colors work -# linux: works on console & gnome-terminal -# mac: untested - - -BLACK = "\033[0;30m" -BLUE = "\033[0;34m" -GREEN = "\033[0;32m" -CYAN = "\033[0;36m" -RED = "\033[0;31m" -PURPLE = "\033[0;35m" -BROWN = "\033[0;33m" -GRAY = "\033[0;37m" -BOLDGRAY = "\033[1;30m" -BOLDBLUE = "\033[1;34m" -BOLDGREEN = "\033[1;32m" -BOLDCYAN = "\033[1;36m" -BOLDRED = "\033[1;31m" -BOLDPURPLE = "\033[1;35m" -BOLDYELLOW = "\033[1;33m" -WHITE = "\033[1;37m" - -NORMAL = "\033[0m" - - -def indentlog(message): - global log, indStr, indent - print >>log, "%s%s" %(indStr*indent, message) - log.flush() - -def shortstr(obj): - """ - Where to put gritty heuristics to make an object appear in most useful - form. defaults to __str__. - """ - if "wx." in str(obj.__class__) or obj.__class__.__name__.startswith("wx"): - shortclassname = obj.__class__.__name__ - ##shortclassname = str(obj.__class__).split('.')[-1] - if hasattr(obj, "blockItem") and hasattr(obj.blockItem, "blockName"): - moreInfo = "block:'%s'" %obj.blockItem.blockName - else: - moreInfo = "at %d" %id(obj) - return "<%s %s>" % (shortclassname, moreInfo) - else: - return str(obj) - -def formatAllArgs(args, kwds): - """ - makes a nice string representation of all the arguments - """ - allargs = [] - for item in args: - allargs.append('%s' % shortstr(item)) - for key,item in kwds.items(): - allargs.append('%s=%s' % (key,shortstr(item))) - formattedArgs = ', '.join(allargs) - if len(formattedArgs) > 150: - return formattedArgs[:146] + " ..." - return formattedArgs - - -def logmodules(listOfModules): - for m in listOfModules: - bindmodule(m) - -def logmodule(module, logMatch=".*", logNotMatch="nomatchasfdasdf"): - """ - WARNING: this seems to break if you import SWIG wrapper classes - directly into the module namespace ... logclass() creates weirdness when - used on them, for some reason. - - @param module: could be either an actual module object, or the string - you can import (which seems to be the same thing as its - __name__). So you can say logmodule(__name__) at the end - of a module definition, to log all of it. - """ - - allow = lambda s: re.match(logMatch, s) and not re.match(logNotMatch, s) - - if isinstance(module, str): - d = {} - exec "import %s" % module in d - import sys - module = sys.modules[module] - - names = module.__dict__.keys() - for name in names: - if not allow(name): continue - - value = getattr(module, name) - if isinstance(value, type): - setattr(module, name, logclass(value)) - print>>log,"autolog.logmodule(): bound %s" %name - elif isinstance(value, types.FunctionType): - setattr(module, name, logfunction(value)) - print>>log,"autolog.logmodule(): bound %s" %name - -def logfunction(theFunction, displayName=None): - """a decorator.""" - if not displayName: displayName = theFunction.__name__ - - def _wrapper(*args, **kwds): - global indent - argstr = formatAllArgs(args, kwds) - - # Log the entry into the function - indentlog("%s%s%s (%s) " % (BOLDRED,displayName,NORMAL, argstr)) - log.flush() - - indent += 1 - returnval = theFunction(*args,**kwds) - indent -= 1 - - # Log return - ##indentlog("return: %s"% shortstr(returnval) - return returnval - return _wrapper - -def logmethod(theMethod, displayName=None): - """use this for class or instance methods, it formats with the object out front.""" - if not displayName: displayName = theMethod.__name__ - def _methodWrapper(self, *args, **kwds): - "Use this one for instance or class methods" - global indent - - argstr = formatAllArgs(args, kwds) - selfstr = shortstr(self) - - #print >> log,"%s%s. %s (%s) " % (indStr*indent,selfstr,methodname,argstr) - indentlog("%s.%s%s%s (%s) " % (selfstr, BOLDRED,theMethod.__name__,NORMAL, argstr)) - log.flush() - - indent += 1 - - if theMethod.__name__ == 'OnSize': - indentlog("position, size = %s%s %s%s" %(BOLDBLUE, self.GetPosition(), self.GetSize(), NORMAL)) - - returnval = theMethod(self, *args,**kwds) - - indent -= 1 - - return returnval - return _methodWrapper - - -def logclass(cls, methodsAsFunctions=False, - logMatch=".*", logNotMatch="asdfnomatch"): - """ - A class "decorator". But python doesn't support decorator syntax for - classes, so do it manually:: - - class C(object): - ... - C = logclass(C) - - @param methodsAsFunctions: set to True if you always want methodname first - in the display. Probably breaks if you're using class/staticmethods? - """ - - allow = lambda s: re.match(logMatch, s) and not re.match(logNotMatch, s) and \ - s not in ('__str__','__repr__') - - namesToCheck = cls.__dict__.keys() - - for name in namesToCheck: - if not allow(name): continue - # unbound methods show up as mere functions in the values of - # cls.__dict__,so we have to go through getattr - value = getattr(cls, name) - - if methodsAsFunctions and callable(value): - setattr(cls, name, logfunction(value)) - elif isinstance(value, types.MethodType): - #a normal instance method - if value.im_self == None: - setattr(cls, name, logmethod(value)) - - #class & static method are more complex. - #a class method - elif value.im_self == cls: - w = logmethod(value.im_func, - displayName="%s.%s" %(cls.__name__, value.__name__)) - setattr(cls, name, classmethod(w)) - else: assert False - - #a static method - elif isinstance(value, types.FunctionType): - w = logfunction(value, - displayName="%s.%s" %(cls.__name__, value.__name__)) - setattr(cls, name, staticmethod(w)) - return cls - -class LogMetaClass(type): - """ - Alternative to logclass(), you set this as a class's __metaclass__. - - It will not work if the metaclass has already been overridden (e.g. - schema.Item or zope.interface (used in Twisted) - - Also, it should fail for class/staticmethods, that hasnt been added here - yet. - """ - - def __new__(cls,classname,bases,classdict): - logmatch = re.compile(classdict.get('logMatch','.*')) - lognotmatch = re.compile(classdict.get('logNotMatch', 'nevermatchthisstringasdfasdf')) - - for attr,item in classdict.items(): - if callable(item) and logmatch.match(attr) and not lognotmatch.match(attr): - classdict['_H_%s'%attr] = item # rebind the method - classdict[attr] = logmethod(item) # replace method by wrapper - - return type.__new__(cls,classname,bases,classdict) - - - -# ---------------------------- Tests and examples ---------------------------- - -if __name__=='__main__': - print; print "------------------- single function logging ---------------" - @logfunction - def test(): - return 42 - - test() - - print; print "------------------- single method logging -----------------" - class Test1(object): - def __init__(self): - self.a = 10 - - @logmethod - def add(self,a,b): return a+b - - @logmethod - def fac(self,val): - if val == 1: - return 1 - else: - return val * self.fac(val-1) - - @logfunction - def fac2(self, val): - if val == 1: - return 1 - else: - return val * self.fac2(val-1) - - t = Test1() - t.add(5,6) - t.fac(4) - print "--- tagged as @logfunction, doesn't understand 'self' is special:" - t.fac2(4) - - - print; print """-------------------- class "decorator" usage ------------------""" - class Test2(object): - #will be ignored - def __init__(self): - self.a = 10 - def ignoreThis(self): pass - - - def add(self,a,b):return a+b - def fac(self,val): - if val == 1: - return 1 - else: - return val * self.fac(val-1) - - Test2 = logclass(Test2, logMatch='fac|add') - - t2 = Test2() - t2.add(5,6) - t2.fac(4) - t2.ignoreThis() - - - print; print "-------------------- metaclass usage ------------------" - class Test3(object): - __metaclass__ = LogMetaClass - logNotMatch = 'ignoreThis' - - def __init__(self): pass - - def fac(self,val): - if val == 1: - return 1 - else: - return val * self.fac(val-1) - def ignoreThis(self): pass - t3 = Test3() - t3.fac(4) - t3.ignoreThis() - - print; print "-------------- testing static & classmethods --------------" - class Test4(object): - @classmethod - def cm(cls, a, b): - print cls - return a+b - - def im(self, a, b): - print self - return a+b - - @staticmethod - def sm(a,b): return a+b - - Test4 = logclass(Test4) - - Test4.cm(4,3) - Test4.sm(4,3) - - t4 = Test4() - t4.im(4,3) - t4.sm(4,3) - t4.cm(4,3) - - #print; print "-------------- static & classmethods: where to put decorators? --------------" - #class Test5(object): - #@classmethod - #@logmethod - #def cm(cls, a, b): - #print cls - #return a+b - #@logmethod - #def im(self, a, b): - #print self - #return a+b - - #@staticmethod - #@logfunction - #def sm(a,b): return a+b - - - #Test5.cm(4,3) - #Test5.sm(4,3) - - #t5 = Test5() - #t5.im(4,3) - #t5.sm(4,3) - #t5.cm(4,3) diff --git a/manifold/util/callback.py b/manifold/util/callback.py deleted file mode 100644 index 03e82806..00000000 --- a/manifold/util/callback.py +++ /dev/null @@ -1,49 +0,0 @@ -from manifold.operators import LAST_RECORD -import threading - -#------------------------------------------------------------------ -# Class callback -#------------------------------------------------------------------ - -class Callback: - def __init__(self, deferred=None, router=None, cache_id=None): - #def __init__(self, deferred=None, event=None, router=None, cache_id=None): - self.results = [] - self._deferred = deferred - - #if not self.event: - self.event = threading.Event() - #else: - # self.event = event - - # Used for caching... - self.router = router - self.cache_id = cache_id - - def __call__(self, value): - # End of the list of records sent by Gateway - if value == LAST_RECORD: - if self.cache_id: - # Add query results to cache (expires in 30min) - #print "Result added to cached under id", self.cache_id - self.router.cache[self.cache_id] = (self.results, time.time() + CACHE_LIFETIME) - - if self._deferred: - # Send results back using deferred object - self._deferred.callback(self.results) - else: - # Not using deferred, trigger the event to return results - self.event.set() - return self.event - - # Not LAST_RECORD add the value to the results - self.results.append(value) - - def wait(self): - self.event.wait() - self.event.clear() - - def get_results(self): - self.wait() - return self.results - diff --git a/manifold/util/clause.py b/manifold/util/clause.py deleted file mode 100644 index 670a689a..00000000 --- a/manifold/util/clause.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Implements a clause -# - a "tree" (more precisely a predecessor map, typically computed thanks to a DFS) -# - a set of needed fields (those queried by the user) -# -# Copyright (C) UPMC Paris Universitas -# Authors: -# Jordan Augé -# Marc-Olivier Buob - -import pyparsing as pp -import operator, re - -from manifold.util.predicate import Predicate -from types import StringTypes - -# XXX When to use Keyword vs. Regex vs. CaselessLiteral -# XXX capitalization ? - -# Instead of CaselessLiteral, try using CaselessKeyword. Keywords are better -# choice for grammar keywords, since they inherently avoid mistaking the leading -# 'in' of 'inside' as the keyword 'in' in your grammar. - - -class Clause(object): - - def __new__(cls, *args, **kwargs): - if len(args) == 1 and isinstance(args[0], StringTypes): - return ClauseStringParser().parse(args[0]) - return super(Clause, cls).__new__(cls, *args, **kwargs) - - def __init__(self, *args, **kwargs): - if len(args) == 2: - # unary - self.operator = Predicate.operators[args[0]] - self.operands = [args[1]] - elif len(args) == 3: - self.operator = Predicate.operators[args[1]] - self.operands = [args[0], args[2]] - else: - raise Exception, "Clause can only be unary or binary" - - def opstr(self, operator): - ops = [string for string, op in Predicate.operators.items() if op == operator] - return ops[0] if ops else '' - - def __repr__(self): - if len(self.operands) == 1: - return "%s(%s)" % (self.operator, self.operands[0]) - else: - return "(%s %s %s)" % (self.operands[0], self.opstr(self.operator), self.operands[1]) - -class ClauseStringParser(object): - - def __init__(self): - """ - BNF HERE - """ - - #integer = pp.Word(nums) - #floatNumber = pp.Regex(r'\d+(\.\d*)?([eE]\d+)?') - point = pp.Literal( "." ) - e = pp.CaselessLiteral( "E" ) - - # Regex string representing the set of possible operators - # Example : ">=|<=|!=|>|<|=" - OPERATOR_RX = '|'.join([re.sub('\|', '\|', o) for o in Predicate.operators.keys()]) - - # predicate - field = pp.Word(pp.alphanums + '_') - operator = pp.Regex(OPERATOR_RX).setName("operator") - value = pp.QuotedString('"') #| pp.Combine( pp.Word( "+-"+ pp.nums, pp.nums) + pp.Optional( point + pp.Optional( pp.Word( pp.nums ) ) ) + pp.Optional( e + pp.Word( "+-"+pp.nums, pp.nums ) ) ) - - predicate = (field + operator + value).setParseAction(self.handlePredicate) - - # clause of predicates - and_op = pp.CaselessLiteral("and") | pp.Keyword("&&") - or_op = pp.CaselessLiteral("or") | pp.Keyword("||") - not_op = pp.Keyword("!") - - predicate_precedence_list = [ - (not_op, 1, pp.opAssoc.RIGHT, lambda x: self.handleClause(*x)), - (and_op, 2, pp.opAssoc.LEFT, lambda x: self.handleClause(*x)), - (or_op, 2, pp.opAssoc.LEFT, lambda x: self.handleClause(*x)) - ] - clause = pp.operatorPrecedence(predicate, predicate_precedence_list) - - self.bnf = clause - - def handlePredicate(self, args): - return Predicate(*args) - - def handleClause(self, args): - return Clause(*args) - - def parse(self, string): - return self.bnf.parseString(string,parseAll=True) - -if __name__ == "__main__": - print ClauseStringParser().parse('country == "Europe" || ts > "01-01-2007" && country == "France"') - print Clause('country == "Europe" || ts > "01-01-2007" && country == "France"') diff --git a/manifold/util/colors.py b/manifold/util/colors.py deleted file mode 100644 index 82639bbf..00000000 --- a/manifold/util/colors.py +++ /dev/null @@ -1,38 +0,0 @@ -# ANSI escape codes for terminals. -# X11 xterm: always works, all platforms -# cygwin dosbox: run through |cat and then colors work -# linux: works on console & gnome-terminal -# mac: untested - -BLACK = "\033[0;30m" -BLUE = "\033[0;34m" -GREEN = "\033[0;32m" -CYAN = "\033[0;36m" -RED = "\033[0;31m" -PURPLE = "\033[0;35m" -BROWN = "\033[0;33m" -GRAY = "\033[0;37m" -BOLDGRAY = "\033[1;30m" -BOLDBLUE = "\033[1;34m" -BOLDGREEN = "\033[1;32m" -BOLDCYAN = "\033[1;36m" -BOLDRED = "\033[1;31m" -BOLDPURPLE = "\033[1;35m" -BOLDYELLOW = "\033[1;33m" -WHITE = "\033[1;37m" - -MYGREEN = '\033[92m' -MYBLUE = '\033[94m' -MYWARNING = '\033[93m' -MYRED = '\033[91m' -MYHEADER = '\033[95m' -MYEND = '\033[0m' - -NORMAL = "\033[0m" - -if __name__ == '__main__': - # Display color names in their color - for name, color in locals().items(): - if name.startswith('__'): continue - print color, name, MYEND - diff --git a/manifold/util/daemon.py b/manifold/util/daemon.py deleted file mode 100644 index 2e5d760e..00000000 --- a/manifold/util/daemon.py +++ /dev/null @@ -1,343 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Daemon: superclass used to implement a daemon easily -# -# Copyright (C)2009-2012, UPMC Paris Universitas -# Authors: -# Marc-Olivier Buob - -# see also: http://www.jejik.com/files/examples/daemon3x.py - -# This is used to import the daemon package instead of the local module which is -# named identically... -from __future__ import absolute_import - -from manifold.util.singleton import Singleton -from manifold.util.log import Log -from manifold.util.options import Options - -import atexit, os, signal, lockfile, logging, sys - -class Daemon(object): - __metaclass__ = Singleton - - DEFAULTS = { - # Running - "uid" : os.getuid(), - "gid" : os.getgid(), - "working_directory" : "/", - "debugmode" : False, - "no_daemon" : False, - "pid_filename" : "/var/run/%s.pid" % Options().get_name() - } - - #------------------------------------------------------------------------- - # Checks - #------------------------------------------------------------------------- - - def check_python_daemon(self): - """ - \brief Check whether python-daemon is properly installed - \return True if everything is file, False otherwise - """ - # http://www.python.org/dev/peps/pep-3143/ - ret = False - try: - import daemon - getattr(daemon, "DaemonContext") - ret = True - except AttributeError, e: - print e - # daemon and python-daemon conflict with each other - Log.critical("Please install python-daemon instead of daemon. Remove daemon first.") - except ImportError: - Log.critical("Please install python-daemon - easy_install python-daemon.") - return ret - - #------------------------------------------------------------------------ - # Initialization - #------------------------------------------------------------------------ - - def make_handler_rsyslog(self, rsyslog_host, rsyslog_port, log_level): - """ - \brief (Internal usage) Prepare logging via rsyslog - \param rsyslog_host The hostname of the rsyslog server - \param rsyslog_port The port of the rsyslog server - \param log_level Log level - """ - # Prepare the handler - shandler = handlers.SysLogHandler( - (rsyslog_host, rsyslog_port), - facility = handlers.SysLogHandler.LOG_DAEMON - ) - - # The log file must remain open while daemonizing - self.files_to_keep.append(shandler.socket) - self.prepare_handler(shandler, log_level) - return shandler - - def make_handler_locallog(self, log_filename, log_level): - """ - \brief (Internal usage) Prepare local logging - \param log_filename The file in which we write the logs - \param log_level Log level - """ - # Create directory in which we store the log file - log_dir = os.path.dirname(log_filename) - if not os.path.exists(log_dir): - try: - os.makedirs(log_dir) - except OSError, why: - log_error("OS error: %s" % why) - - # Prepare the handler - shandler = logging.handlers.RotatingFileHandler( - log_filename, - backupCount = 0 - ) - - # The log file must remain open while daemonizing - self.files_to_keep.append(shandler.stream) - self.prepare_handler(shandler, log_level) - return shandler - - def prepare_handler(self, shandler, log_level): - """ - \brief (Internal usage) - \param shandler Handler used to log information - \param log_level Log level - """ - shandler.setLevel(log_level) - formatter = logging.Formatter("%(asctime)s: %(name)s: %(levelname)s %(message)s") - shandler.setFormatter(formatter) - self.log.addHandler(shandler) - self.log.setLevel(getattr(logging, log_level, logging.INFO)) - - def __init__( - self, - #daemon_name, - terminate_callback = None - #uid = os.getuid(), - #gid = os.getgid(), - #working_directory = "/", - #pid_filename = None, - #no_daemon = False, - #debug = False, - #log = None, # logging.getLogger("plop") - #rsyslog_host = "localhost", # Pass None if no rsyslog server - #rsyslog_port = 514, - #log_file = None, - #log_level = logging.INFO - ): - """ - \brief Constructor - \param daemon_name The name of the daemon - \param uid UID used to run the daemon - \param gid GID used to run the daemon - \param working_directory Working directory used to run the daemon. - Example: /var/lib/foo/ - \param pid_filename Absolute path of the PID file - Example: /var/run/foo.pid - (ignored if no_daemon == True) - \param no_daemon Do not detach the daemon from the terminal - \param debug Run daemon in debug mode - \param log The logger, pass None if unused - Example: logging.getLogger('foo')) - \param rsyslog_host Rsyslog hostname, pass None if unused. - If rsyslog_host is set to None, log are stored locally - \param rsyslog_port Rsyslog port - \param log_file Absolute path of the local log file. - Example: /var/log/foo.log) - \param log_level Log level - Example: logging.INFO - """ - - # Daemon parameters - #self.daemon_name = daemon_name - self.terminate_callback = terminate_callback - #Options().uid = uid - #Options().gid = gid - #Options().working_directory = working_directory - #self.pid_filename = None if no_daemon else pid_filename - #Options().no_daemon = no_daemon - #Options().lock_file = None - #Options().debug = debug - #self.log = log - #self.rsyslog_host = rsyslog_host - #self.rsyslog_port = rsyslog_port - #self.log_file = log_file - #self.log_level = log_level - - # Reference which file descriptors must remain opened while - # daemonizing (for instance the file descriptor related to - # the logger) - self.files_to_keep = [] - - # Initialize self.log (require self.files_to_keep) - #if self.log: # for debugging by using stdout, log may be equal to None - # if rsyslog_host: - # shandler = self.make_handler_rsyslog( - # rsyslog_host, - # rsyslog_port, - # log_level - # ) - # elif log_file: - # shandler = self.make_handler_locallog( - # log_file, - # log_level - # ) - - @classmethod - def init_options(self): - opt = Options() - - opt.add_option( - "--uid", dest = "uid", - help = "UID used to run the dispatcher.", - default = self.DEFAULTS['uid'] - ) - opt.add_option( - "--gid", dest = "gid", - help = "GID used to run the dispatcher.", - default = self.DEFAULTS['gid'] - ) - opt.add_option( - "-w", "--working-directory", dest = "working_directory", - help = "Working directory.", - default = self.DEFAULTS['working_directory'] - ) - opt.add_option( - "-D", "--debugmode", action = "store_false", dest = "debugmode", - help = "Daemon debug mode (useful for developers).", - default = self.DEFAULTS['debugmode'] - ) - opt.add_option( - "-n", "--no-daemon", action = "store_true", dest = "no_daemon", - help = "Run as daemon (detach from terminal).", - default = self.DEFAULTS["no_daemon"] - ) - opt.add_option( - "-i", "--pid-file", dest = "pid_filename", - help = "Absolute path to the pid-file to use when running as daemon.", - default = self.DEFAULTS['pid_filename'] - ) - - - - #------------------------------------------------------------------------ - # Daemon stuff - #------------------------------------------------------------------------ - - def remove_pid_file(self): - """ - \brief Remove the pid file (internal usage) - """ - # The lock file is implicitely released while removing the pid file - Log.debug("Removing %s" % Options().pid_filename) - if os.path.exists(Options().pid_filename) == True: - os.remove(Options().pid_filename) - - def make_pid_file(self): - """ - \brief Create a pid file in which we store the PID of the daemon if needed - """ - if Options().pid_filename and Options().no_daemon == False: - atexit.register(self.remove_pid_file) - file(Options().pid_filename, "w+").write("%s\n" % str(os.getpid())) - - def get_pid_from_pid_file(self): - """ - \brief Retrieve the PID of the daemon thanks to the pid file. - \return None if the pid file is not readable or does not exists - """ - pid = None - if Options().pid_filename: - try: - f_pid = file(Options().pid_filename, "r") - pid = int(f_pid.read().strip()) - f_pid.close() - except IOError: - pid = None - return pid - - def make_lock_file(self): - """ - \brief Prepare the lock file required to manage the pid file - Initialize Options().lock_file - """ - if Options().pid_filename and Options().no_daemon == False: - Log.debug("Daemonizing using pid file '%s'" % Options().pid_filename) - Options().lock_file = lockfile.FileLock(Options().pid_filename) - if Options().lock_file.is_locked() == True: - log_error("'%s' is already running ('%s' is locked)." % (Options().get_name(), Options().pid_filename)) - self.terminate() - Options().lock_file.acquire() - else: - Options().lock_file = None - - def start(self): - """ - \brief Start the daemon - """ - # Check whether daemon module is properly installed - if self.check_python_daemon() == False: - self.terminate() - import daemon - - # Prepare Options().lock_file - self.make_lock_file() - - # Prepare the daemon context - dcontext = daemon.DaemonContext( - detach_process = (not Options().no_daemon), - working_directory = Options().working_directory, - pidfile = Options().lock_file if not Options().no_daemon else None, - stdin = sys.stdin, - stdout = sys.stdout, - stderr = sys.stderr, - uid = Options().uid, - gid = Options().gid, - files_preserve = Log().files_to_keep - ) - - # Prepare signal handling to stop properly if the daemon is killed - # Note that signal.SIGKILL can't be handled: - # http://crunchtools.com/unixlinux-signals-101/ - dcontext.signal_map = { - signal.SIGTERM : self.signal_handler, - signal.SIGQUIT : self.signal_handler, - signal.SIGINT : self.signal_handler - } - - if Options().debugmode == True: - self.main() - else: - with dcontext: - self.make_pid_file() - try: - self.main() - except Exception, why: - Log.error("Unhandled exception in start: %s" % why) - - def signal_handler(self, signal_id, frame): - """ - \brief Stop the daemon (signal handler) - The lockfile is implicitly released by the daemon package - \param signal_id The integer identifying the signal - (see also "man 7 signal") - Example: 15 if the received signal is signal.SIGTERM - \param frame - """ - self.terminate() - - def stop(self): - Log.debug("Stopping '%s'" % self.daemon_name) - - def terminate(self): - if self.terminate_callback: - self.terminate_callback() - else: - sys.exit(0) - -Daemon.init_options() diff --git a/manifold/util/dfs.py b/manifold/util/dfs.py deleted file mode 100644 index 019645d6..00000000 --- a/manifold/util/dfs.py +++ /dev/null @@ -1,95 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Depth first search algorithm -# Based on http://www.boost.org/doc/libs/1_52_0/libs/graph/doc/depth_first_search.html -# -# Copyright (C) UPMC Paris Universitas -# Authors: -# Marc-Olivier Buob -# Jordan Augé - -class dfs_color: - WHITE = 1 # not yet visited - GRAY = 2 # currently visited - BLACK = 3 # visited - -#DFS(G) -# for each vertex u in V -# color[u] := WHITE -# p[u] = u -# end for -# time := 0 -# if there is a starting vertex s -# call DFS-VISIT(G, s) -# for each vertex u in V -# if color[u] = WHITE -# call DFS-VISIT(G, u) -# end for -# return (p,d_time,f_time) - -def dfs(graph, root, exclude_uv=None): - """ - \brief Run the DFS algorithm - \param graph The graph we explore - \param root The starting vertex - \return A dictionnary which maps each vertex of the tree - to its predecessor, None otherwise. - Only the root node as a predecessor equal to None. - Nodes not referenced in this dictionnary do not - belong to the tree. - """ - # Initialization - map_vertex_color = {} - map_vertex_pred = {} - for u in graph.nodes(): - map_vertex_color[u] = dfs_color.WHITE - map_vertex_pred[u] = None - - # Recursive calls - if not exclude_uv: - exclude_uv = lambda u,v: False - dfs_visit(graph, root, map_vertex_color, map_vertex_pred, exclude_uv) - - # Remove from map_vertex_pred the vertices having no - # predecessor but the root node. - for v, u in map_vertex_pred.items(): - if u == None and v != root: - del map_vertex_pred[v] - - return map_vertex_pred - -#DFS-VISIT(G, u) -# color[u] := GRAY -# d_time[u] := time := time + 1 -# for each v in Adj[u] -# if (color[v] = WHITE) -# p[v] = u -# call DFS-VISIT(G, v) -# else if (color[v] = GRAY) -# ... -# else if (color[v] = BLACK) -# ... -# end for -# color[u] := BLACK -# f_time[u] := time := time + 1 - -def dfs_visit(graph, u, map_vertex_color, map_vertex_pred, exclude_uv): - """ - \brief Internal usage (DFS implementation) - \param graph The graph we explore - \param u The current node - \param map_vertex_color: maps each vertex to a color - - dfs_color.WHITE: iif the vertex is not reachable from the root node - - dfs_color.BLACK: otherwise - \param map_vertex_pred: maps each vertex to its predecessor (if any) visited - during the DFS exploration, None otherwise - """ - map_vertex_color[u] = dfs_color.GRAY - for v in graph.successors(u): - color_v = map_vertex_color[v] - if color_v == dfs_color.WHITE and not exclude_uv(u, v): - map_vertex_pred[v] = u - dfs_visit(graph, v, map_vertex_color, map_vertex_pred, exclude_uv) - map_vertex_color[u] = dfs_color.BLACK - diff --git a/manifold/util/enum.py b/manifold/util/enum.py deleted file mode 100644 index 4f3c577b..00000000 --- a/manifold/util/enum.py +++ /dev/null @@ -1,7 +0,0 @@ -class Enum(object): - def __init__(self, *keys): - self.__dict__.update(zip(keys, range(len(keys)))) - self.invmap = {v:k for k, v in self.__dict__.items()} - - def get_str(self, value): - return self.invmap[value] diff --git a/manifold/util/frozendict.py b/manifold/util/frozendict.py deleted file mode 100644 index 32902cb7..00000000 --- a/manifold/util/frozendict.py +++ /dev/null @@ -1,47 +0,0 @@ -import copy - -class frozendict(dict): - def _blocked_attribute(obj): - raise AttributeError, "A frozendict cannot be modified." - _blocked_attribute = property(_blocked_attribute) - - __delitem__ = __setitem__ = clear = _blocked_attribute - pop = popitem = setdefault = update = _blocked_attribute - - def __new__(cls, *args, **kw): - new = dict.__new__(cls) - - args_ = [] - for arg in args: - if isinstance(arg, dict): - arg = copy.copy(arg) - for k, v in arg.items(): - if isinstance(v, dict): - arg[k] = frozendict(v) - elif isinstance(v, list): - v_ = list() - for elm in v: - if isinstance(elm, dict): - v_.append( frozendict(elm) ) - else: - v_.append( elm ) - arg[k] = tuple(v_) - args_.append( arg ) - else: - args_.append( arg ) - - dict.__init__(new, *args_, **kw) - return new - - def __init__(self, *args, **kw): - pass - - def __hash__(self): - try: - return self._cached_hash - except AttributeError: - h = self._cached_hash = hash(tuple(sorted(self.items()))) - return h - - def __repr__(self): - return "frozendict(%s)" % dict.__repr__(self) diff --git a/manifold/util/functional.py b/manifold/util/functional.py deleted file mode 100644 index a4119398..00000000 --- a/manifold/util/functional.py +++ /dev/null @@ -1,53 +0,0 @@ -"""Borrowed from Django.""" - -from threading import Lock - -class LazyObject(object): - """ - A wrapper for another class that can be used to delay instantiation of the - wrapped class. - - By subclassing, you have the opportunity to intercept and alter the - instantiation. If you don't need to do that, use SimpleLazyObject. - """ - def __init__(self): - self._wrapped = None - self._lock = Lock() - - def __getattr__(self, name): - self._lock.acquire() - if self._wrapped is None: - self._setup() - self._lock.release() - return getattr(self._wrapped, name) - - def __setattr__(self, name, value): - if name in ["_wrapped", "_lock"]: - # Assign to __dict__ to avoid infinite __setattr__ loops. - self.__dict__[name] = value - else: - if self._wrapped is None: - self._setup() - setattr(self._wrapped, name, value) - - def __delattr__(self, name): - if name == "_wrapped": - raise TypeError("can't delete _wrapped.") - if self._wrapped is None: - self._setup() - delattr(self._wrapped, name) - - def _setup(self): - """ - Must be implemented by subclasses to initialise the wrapped object. - """ - raise NotImplementedError - - # introspection support: - __members__ = property(lambda self: self.__dir__()) - - def __dir__(self): - if self._wrapped is None: - self._setup() - return dir(self._wrapped) - diff --git a/manifold/util/ipaddr.py b/manifold/util/ipaddr.py deleted file mode 100644 index ad27ae9d..00000000 --- a/manifold/util/ipaddr.py +++ /dev/null @@ -1,1897 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2007 Google Inc. -# Licensed to PSF under a Contributor Agreement. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. See the License for the specific language governing -# permissions and limitations under the License. - -"""A fast, lightweight IPv4/IPv6 manipulation library in Python. - -This library is used to create/poke/manipulate IPv4 and IPv6 addresses -and networks. - -""" - -__version__ = '2.1.10' - -import struct - -IPV4LENGTH = 32 -IPV6LENGTH = 128 - - -class AddressValueError(ValueError): - """A Value Error related to the address.""" - - -class NetmaskValueError(ValueError): - """A Value Error related to the netmask.""" - - -def IPAddress(address, version=None): - """Take an IP string/int and return an object of the correct type. - - Args: - address: A string or integer, the IP address. Either IPv4 or - IPv6 addresses may be supplied; integers less than 2**32 will - be considered to be IPv4 by default. - version: An Integer, 4 or 6. If set, don't try to automatically - determine what the IP address type is. important for things - like IPAddress(1), which could be IPv4, '0.0.0.1', or IPv6, - '::1'. - - Returns: - An IPv4Address or IPv6Address object. - - Raises: - ValueError: if the string passed isn't either a v4 or a v6 - address. - - """ - if version: - if version == 4: - return IPv4Address(address) - elif version == 6: - return IPv6Address(address) - - try: - return IPv4Address(address) - except (AddressValueError, NetmaskValueError): - pass - - try: - return IPv6Address(address) - except (AddressValueError, NetmaskValueError): - pass - - raise ValueError('%r does not appear to be an IPv4 or IPv6 address' % - address) - - -def IPNetwork(address, version=None, strict=False): - """Take an IP string/int and return an object of the correct type. - - Args: - address: A string or integer, the IP address. Either IPv4 or - IPv6 addresses may be supplied; integers less than 2**32 will - be considered to be IPv4 by default. - version: An Integer, if set, don't try to automatically - determine what the IP address type is. important for things - like IPNetwork(1), which could be IPv4, '0.0.0.1/32', or IPv6, - '::1/128'. - - Returns: - An IPv4Network or IPv6Network object. - - Raises: - ValueError: if the string passed isn't either a v4 or a v6 - address. Or if a strict network was requested and a strict - network wasn't given. - - """ - if version: - if version == 4: - return IPv4Network(address, strict) - elif version == 6: - return IPv6Network(address, strict) - - try: - return IPv4Network(address, strict) - except (AddressValueError, NetmaskValueError): - pass - - try: - return IPv6Network(address, strict) - except (AddressValueError, NetmaskValueError): - pass - - raise ValueError('%r does not appear to be an IPv4 or IPv6 network' % - address) - - -def v4_int_to_packed(address): - """The binary representation of this address. - - Args: - address: An integer representation of an IPv4 IP address. - - Returns: - The binary representation of this address. - - Raises: - ValueError: If the integer is too large to be an IPv4 IP - address. - """ - if address > _BaseV4._ALL_ONES: - raise ValueError('Address too large for IPv4') - return Bytes(struct.pack('!I', address)) - - -def v6_int_to_packed(address): - """The binary representation of this address. - - Args: - address: An integer representation of an IPv4 IP address. - - Returns: - The binary representation of this address. - """ - return Bytes(struct.pack('!QQ', address >> 64, address & (2**64 - 1))) - - -def _find_address_range(addresses): - """Find a sequence of addresses. - - Args: - addresses: a list of IPv4 or IPv6 addresses. - - Returns: - A tuple containing the first and last IP addresses in the sequence. - - """ - first = last = addresses[0] - for ip in addresses[1:]: - if ip._ip == last._ip + 1: - last = ip - else: - break - return (first, last) - -def _get_prefix_length(number1, number2, bits): - """Get the number of leading bits that are same for two numbers. - - Args: - number1: an integer. - number2: another integer. - bits: the maximum number of bits to compare. - - Returns: - The number of leading bits that are the same for two numbers. - - """ - for i in range(bits): - if number1 >> i == number2 >> i: - return bits - i - return 0 - -def _count_righthand_zero_bits(number, bits): - """Count the number of zero bits on the right hand side. - - Args: - number: an integer. - bits: maximum number of bits to count. - - Returns: - The number of zero bits on the right hand side of the number. - - """ - if number == 0: - return bits - for i in range(bits): - if (number >> i) % 2: - return i - -def summarize_address_range(first, last): - """Summarize a network range given the first and last IP addresses. - - Example: - >>> summarize_address_range(IPv4Address('1.1.1.0'), - IPv4Address('1.1.1.130')) - [IPv4Network('1.1.1.0/25'), IPv4Network('1.1.1.128/31'), - IPv4Network('1.1.1.130/32')] - - Args: - first: the first IPv4Address or IPv6Address in the range. - last: the last IPv4Address or IPv6Address in the range. - - Returns: - The address range collapsed to a list of IPv4Network's or - IPv6Network's. - - Raise: - TypeError: - If the first and last objects are not IP addresses. - If the first and last objects are not the same version. - ValueError: - If the last object is not greater than the first. - If the version is not 4 or 6. - - """ - if not (isinstance(first, _BaseIP) and isinstance(last, _BaseIP)): - raise TypeError('first and last must be IP addresses, not networks') - if first.version != last.version: - raise TypeError("%s and %s are not of the same version" % ( - str(first), str(last))) - if first > last: - raise ValueError('last IP address must be greater than first') - - networks = [] - - if first.version == 4: - ip = IPv4Network - elif first.version == 6: - ip = IPv6Network - else: - raise ValueError('unknown IP version') - - ip_bits = first._max_prefixlen - first_int = first._ip - last_int = last._ip - while first_int <= last_int: - nbits = _count_righthand_zero_bits(first_int, ip_bits) - current = None - while nbits >= 0: - addend = 2**nbits - 1 - current = first_int + addend - nbits -= 1 - if current <= last_int: - break - prefix = _get_prefix_length(first_int, current, ip_bits) - net = ip('%s/%d' % (str(first), prefix)) - networks.append(net) - if current == ip._ALL_ONES: - break - first_int = current + 1 - first = IPAddress(first_int, version=first._version) - return networks - -def _collapse_address_list_recursive(addresses): - """Loops through the addresses, collapsing concurrent netblocks. - - Example: - - ip1 = IPv4Network('1.1.0.0/24') - ip2 = IPv4Network('1.1.1.0/24') - ip3 = IPv4Network('1.1.2.0/24') - ip4 = IPv4Network('1.1.3.0/24') - ip5 = IPv4Network('1.1.4.0/24') - ip6 = IPv4Network('1.1.0.1/22') - - _collapse_address_list_recursive([ip1, ip2, ip3, ip4, ip5, ip6]) -> - [IPv4Network('1.1.0.0/22'), IPv4Network('1.1.4.0/24')] - - This shouldn't be called directly; it is called via - collapse_address_list([]). - - Args: - addresses: A list of IPv4Network's or IPv6Network's - - Returns: - A list of IPv4Network's or IPv6Network's depending on what we were - passed. - - """ - ret_array = [] - optimized = False - - for cur_addr in addresses: - if not ret_array: - ret_array.append(cur_addr) - continue - if cur_addr in ret_array[-1]: - optimized = True - elif cur_addr == ret_array[-1].supernet().subnet()[1]: - ret_array.append(ret_array.pop().supernet()) - optimized = True - else: - ret_array.append(cur_addr) - - if optimized: - return _collapse_address_list_recursive(ret_array) - - return ret_array - - -def collapse_address_list(addresses): - """Collapse a list of IP objects. - - Example: - collapse_address_list([IPv4('1.1.0.0/24'), IPv4('1.1.1.0/24')]) -> - [IPv4('1.1.0.0/23')] - - Args: - addresses: A list of IPv4Network or IPv6Network objects. - - Returns: - A list of IPv4Network or IPv6Network objects depending on what we - were passed. - - Raises: - TypeError: If passed a list of mixed version objects. - - """ - i = 0 - addrs = [] - ips = [] - nets = [] - - # split IP addresses and networks - for ip in addresses: - if isinstance(ip, _BaseIP): - if ips and ips[-1]._version != ip._version: - raise TypeError("%s and %s are not of the same version" % ( - str(ip), str(ips[-1]))) - ips.append(ip) - elif ip._prefixlen == ip._max_prefixlen: - if ips and ips[-1]._version != ip._version: - raise TypeError("%s and %s are not of the same version" % ( - str(ip), str(ips[-1]))) - ips.append(ip.ip) - else: - if nets and nets[-1]._version != ip._version: - raise TypeError("%s and %s are not of the same version" % ( - str(ip), str(ips[-1]))) - nets.append(ip) - - # sort and dedup - ips = sorted(set(ips)) - nets = sorted(set(nets)) - - while i < len(ips): - (first, last) = _find_address_range(ips[i:]) - i = ips.index(last) + 1 - addrs.extend(summarize_address_range(first, last)) - - return _collapse_address_list_recursive(sorted( - addrs + nets, key=_BaseNet._get_networks_key)) - -# backwards compatibility -CollapseAddrList = collapse_address_list - -# We need to distinguish between the string and packed-bytes representations -# of an IP address. For example, b'0::1' is the IPv4 address 48.58.58.49, -# while '0::1' is an IPv6 address. -# -# In Python 3, the native 'bytes' type already provides this functionality, -# so we use it directly. For earlier implementations where bytes is not a -# distinct type, we create a subclass of str to serve as a tag. -# -# Usage example (Python 2): -# ip = ipaddr.IPAddress(ipaddr.Bytes('xxxx')) -# -# Usage example (Python 3): -# ip = ipaddr.IPAddress(b'xxxx') -try: - if bytes is str: - raise TypeError("bytes is not a distinct type") - Bytes = bytes -except (NameError, TypeError): - class Bytes(str): - def __repr__(self): - return 'Bytes(%s)' % str.__repr__(self) - -def get_mixed_type_key(obj): - """Return a key suitable for sorting between networks and addresses. - - Address and Network objects are not sortable by default; they're - fundamentally different so the expression - - IPv4Address('1.1.1.1') <= IPv4Network('1.1.1.1/24') - - doesn't make any sense. There are some times however, where you may wish - to have ipaddr sort these for you anyway. If you need to do this, you - can use this function as the key= argument to sorted(). - - Args: - obj: either a Network or Address object. - Returns: - appropriate key. - - """ - if isinstance(obj, _BaseNet): - return obj._get_networks_key() - elif isinstance(obj, _BaseIP): - return obj._get_address_key() - return NotImplemented - -class _IPAddrBase(object): - - """The mother class.""" - - def __index__(self): - return self._ip - - def __int__(self): - return self._ip - - def __hex__(self): - return hex(self._ip) - - @property - def exploded(self): - """Return the longhand version of the IP address as a string.""" - return self._explode_shorthand_ip_string() - - @property - def compressed(self): - """Return the shorthand version of the IP address as a string.""" - return str(self) - - -class _BaseIP(_IPAddrBase): - - """A generic IP object. - - This IP class contains the version independent methods which are - used by single IP addresses. - - """ - - def __eq__(self, other): - try: - return (self._ip == other._ip - and self._version == other._version) - except AttributeError: - return NotImplemented - - def __ne__(self, other): - eq = self.__eq__(other) - if eq is NotImplemented: - return NotImplemented - return not eq - - def __le__(self, other): - gt = self.__gt__(other) - if gt is NotImplemented: - return NotImplemented - return not gt - - def __ge__(self, other): - lt = self.__lt__(other) - if lt is NotImplemented: - return NotImplemented - return not lt - - def __lt__(self, other): - if self._version != other._version: - raise TypeError('%s and %s are not of the same version' % ( - str(self), str(other))) - if not isinstance(other, _BaseIP): - raise TypeError('%s and %s are not of the same type' % ( - str(self), str(other))) - if self._ip != other._ip: - return self._ip < other._ip - return False - - def __gt__(self, other): - if self._version != other._version: - raise TypeError('%s and %s are not of the same version' % ( - str(self), str(other))) - if not isinstance(other, _BaseIP): - raise TypeError('%s and %s are not of the same type' % ( - str(self), str(other))) - if self._ip != other._ip: - return self._ip > other._ip - return False - - # Shorthand for Integer addition and subtraction. This is not - # meant to ever support addition/subtraction of addresses. - def __add__(self, other): - if not isinstance(other, int): - return NotImplemented - return IPAddress(int(self) + other, version=self._version) - - def __sub__(self, other): - if not isinstance(other, int): - return NotImplemented - return IPAddress(int(self) - other, version=self._version) - - def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, str(self)) - - def __str__(self): - return '%s' % self._string_from_ip_int(self._ip) - - def __hash__(self): - return hash(hex(long(self._ip))) - - def _get_address_key(self): - return (self._version, self) - - @property - def version(self): - raise NotImplementedError('BaseIP has no version') - - -class _BaseNet(_IPAddrBase): - - """A generic IP object. - - This IP class contains the version independent methods which are - used by networks. - - """ - - def __init__(self, address): - self._cache = {} - - def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, str(self)) - - def iterhosts(self): - """Generate Iterator over usable hosts in a network. - - This is like __iter__ except it doesn't return the network - or broadcast addresses. - - """ - cur = int(self.network) + 1 - bcast = int(self.broadcast) - 1 - while cur <= bcast: - cur += 1 - yield IPAddress(cur - 1, version=self._version) - - def __iter__(self): - cur = int(self.network) - bcast = int(self.broadcast) - while cur <= bcast: - cur += 1 - yield IPAddress(cur - 1, version=self._version) - - def __getitem__(self, n): - network = int(self.network) - broadcast = int(self.broadcast) - if n >= 0: - if network + n > broadcast: - raise IndexError - return IPAddress(network + n, version=self._version) - else: - n += 1 - if broadcast + n < network: - raise IndexError - return IPAddress(broadcast + n, version=self._version) - - def __lt__(self, other): - if self._version != other._version: - raise TypeError('%s and %s are not of the same version' % ( - str(self), str(other))) - if not isinstance(other, _BaseNet): - raise TypeError('%s and %s are not of the same type' % ( - str(self), str(other))) - if self.network != other.network: - return self.network < other.network - if self.netmask != other.netmask: - return self.netmask < other.netmask - return False - - def __gt__(self, other): - if self._version != other._version: - raise TypeError('%s and %s are not of the same version' % ( - str(self), str(other))) - if not isinstance(other, _BaseNet): - raise TypeError('%s and %s are not of the same type' % ( - str(self), str(other))) - if self.network != other.network: - return self.network > other.network - if self.netmask != other.netmask: - return self.netmask > other.netmask - return False - - def __le__(self, other): - gt = self.__gt__(other) - if gt is NotImplemented: - return NotImplemented - return not gt - - def __ge__(self, other): - lt = self.__lt__(other) - if lt is NotImplemented: - return NotImplemented - return not lt - - def __eq__(self, other): - try: - return (self._version == other._version - and self.network == other.network - and int(self.netmask) == int(other.netmask)) - except AttributeError: - if isinstance(other, _BaseIP): - return (self._version == other._version - and self._ip == other._ip) - - def __ne__(self, other): - eq = self.__eq__(other) - if eq is NotImplemented: - return NotImplemented - return not eq - - def __str__(self): - return '%s/%s' % (str(self.ip), - str(self._prefixlen)) - - def __hash__(self): - return hash(int(self.network) ^ int(self.netmask)) - - def __contains__(self, other): - # always false if one is v4 and the other is v6. - if self._version != other._version: - return False - # dealing with another network. - if isinstance(other, _BaseNet): - return (self.network <= other.network and - self.broadcast >= other.broadcast) - # dealing with another address - else: - return (int(self.network) <= int(other._ip) <= - int(self.broadcast)) - - def overlaps(self, other): - """Tell if self is partly contained in other.""" - return self.network in other or self.broadcast in other or ( - other.network in self or other.broadcast in self) - - @property - def network(self): - x = self._cache.get('network') - if x is None: - x = IPAddress(self._ip & int(self.netmask), version=self._version) - self._cache['network'] = x - return x - - @property - def broadcast(self): - x = self._cache.get('broadcast') - if x is None: - x = IPAddress(self._ip | int(self.hostmask), version=self._version) - self._cache['broadcast'] = x - return x - - @property - def hostmask(self): - x = self._cache.get('hostmask') - if x is None: - x = IPAddress(int(self.netmask) ^ self._ALL_ONES, - version=self._version) - self._cache['hostmask'] = x - return x - - @property - def with_prefixlen(self): - return '%s/%d' % (str(self.ip), self._prefixlen) - - @property - def with_netmask(self): - return '%s/%s' % (str(self.ip), str(self.netmask)) - - @property - def with_hostmask(self): - return '%s/%s' % (str(self.ip), str(self.hostmask)) - - @property - def numhosts(self): - """Number of hosts in the current subnet.""" - return int(self.broadcast) - int(self.network) + 1 - - @property - def version(self): - raise NotImplementedError('BaseNet has no version') - - @property - def prefixlen(self): - return self._prefixlen - - def address_exclude(self, other): - """Remove an address from a larger block. - - For example: - - addr1 = IPNetwork('10.1.1.0/24') - addr2 = IPNetwork('10.1.1.0/26') - addr1.address_exclude(addr2) = - [IPNetwork('10.1.1.64/26'), IPNetwork('10.1.1.128/25')] - - or IPv6: - - addr1 = IPNetwork('::1/32') - addr2 = IPNetwork('::1/128') - addr1.address_exclude(addr2) = [IPNetwork('::0/128'), - IPNetwork('::2/127'), - IPNetwork('::4/126'), - IPNetwork('::8/125'), - ... - IPNetwork('0:0:8000::/33')] - - Args: - other: An IPvXNetwork object of the same type. - - Returns: - A sorted list of IPvXNetwork objects addresses which is self - minus other. - - Raises: - TypeError: If self and other are of difffering address - versions, or if other is not a network object. - ValueError: If other is not completely contained by self. - - """ - if not self._version == other._version: - raise TypeError("%s and %s are not of the same version" % ( - str(self), str(other))) - - if not isinstance(other, _BaseNet): - raise TypeError("%s is not a network object" % str(other)) - - if other not in self: - raise ValueError('%s not contained in %s' % (str(other), - str(self))) - if other == self: - return [] - - ret_addrs = [] - - # Make sure we're comparing the network of other. - other = IPNetwork('%s/%s' % (str(other.network), str(other.prefixlen)), - version=other._version) - - s1, s2 = self.subnet() - while s1 != other and s2 != other: - if other in s1: - ret_addrs.append(s2) - s1, s2 = s1.subnet() - elif other in s2: - ret_addrs.append(s1) - s1, s2 = s2.subnet() - else: - # If we got here, there's a bug somewhere. - assert True == False, ('Error performing exclusion: ' - 's1: %s s2: %s other: %s' % - (str(s1), str(s2), str(other))) - if s1 == other: - ret_addrs.append(s2) - elif s2 == other: - ret_addrs.append(s1) - else: - # If we got here, there's a bug somewhere. - assert True == False, ('Error performing exclusion: ' - 's1: %s s2: %s other: %s' % - (str(s1), str(s2), str(other))) - - return sorted(ret_addrs, key=_BaseNet._get_networks_key) - - def compare_networks(self, other): - """Compare two IP objects. - - This is only concerned about the comparison of the integer - representation of the network addresses. This means that the - host bits aren't considered at all in this method. If you want - to compare host bits, you can easily enough do a - 'HostA._ip < HostB._ip' - - Args: - other: An IP object. - - Returns: - If the IP versions of self and other are the same, returns: - - -1 if self < other: - eg: IPv4('1.1.1.0/24') < IPv4('1.1.2.0/24') - IPv6('1080::200C:417A') < IPv6('1080::200B:417B') - 0 if self == other - eg: IPv4('1.1.1.1/24') == IPv4('1.1.1.2/24') - IPv6('1080::200C:417A/96') == IPv6('1080::200C:417B/96') - 1 if self > other - eg: IPv4('1.1.1.0/24') > IPv4('1.1.0.0/24') - IPv6('1080::1:200C:417A/112') > - IPv6('1080::0:200C:417A/112') - - If the IP versions of self and other are different, returns: - - -1 if self._version < other._version - eg: IPv4('10.0.0.1/24') < IPv6('::1/128') - 1 if self._version > other._version - eg: IPv6('::1/128') > IPv4('255.255.255.0/24') - - """ - if self._version < other._version: - return -1 - if self._version > other._version: - return 1 - # self._version == other._version below here: - if self.network < other.network: - return -1 - if self.network > other.network: - return 1 - # self.network == other.network below here: - if self.netmask < other.netmask: - return -1 - if self.netmask > other.netmask: - return 1 - # self.network == other.network and self.netmask == other.netmask - return 0 - - def _get_networks_key(self): - """Network-only key function. - - Returns an object that identifies this address' network and - netmask. This function is a suitable "key" argument for sorted() - and list.sort(). - - """ - return (self._version, self.network, self.netmask) - - def _ip_int_from_prefix(self, prefixlen=None): - """Turn the prefix length netmask into a int for comparison. - - Args: - prefixlen: An integer, the prefix length. - - Returns: - An integer. - - """ - if not prefixlen and prefixlen != 0: - prefixlen = self._prefixlen - return self._ALL_ONES ^ (self._ALL_ONES >> prefixlen) - - def _prefix_from_ip_int(self, ip_int, mask=32): - """Return prefix length from the decimal netmask. - - Args: - ip_int: An integer, the IP address. - mask: The netmask. Defaults to 32. - - Returns: - An integer, the prefix length. - - """ - while mask: - if ip_int & 1 == 1: - break - ip_int >>= 1 - mask -= 1 - - return mask - - def _ip_string_from_prefix(self, prefixlen=None): - """Turn a prefix length into a dotted decimal string. - - Args: - prefixlen: An integer, the netmask prefix length. - - Returns: - A string, the dotted decimal netmask string. - - """ - if not prefixlen: - prefixlen = self._prefixlen - return self._string_from_ip_int(self._ip_int_from_prefix(prefixlen)) - - def iter_subnets(self, prefixlen_diff=1, new_prefix=None): - """The subnets which join to make the current subnet. - - In the case that self contains only one IP - (self._prefixlen == 32 for IPv4 or self._prefixlen == 128 - for IPv6), return a list with just ourself. - - Args: - prefixlen_diff: An integer, the amount the prefix length - should be increased by. This should not be set if - new_prefix is also set. - new_prefix: The desired new prefix length. This must be a - larger number (smaller prefix) than the existing prefix. - This should not be set if prefixlen_diff is also set. - - Returns: - An iterator of IPv(4|6) objects. - - Raises: - ValueError: The prefixlen_diff is too small or too large. - OR - prefixlen_diff and new_prefix are both set or new_prefix - is a smaller number than the current prefix (smaller - number means a larger network) - - """ - if self._prefixlen == self._max_prefixlen: - yield self - return - - if new_prefix is not None: - if new_prefix < self._prefixlen: - raise ValueError('new prefix must be longer') - if prefixlen_diff != 1: - raise ValueError('cannot set prefixlen_diff and new_prefix') - prefixlen_diff = new_prefix - self._prefixlen - - if prefixlen_diff < 0: - raise ValueError('prefix length diff must be > 0') - new_prefixlen = self._prefixlen + prefixlen_diff - - if not self._is_valid_netmask(str(new_prefixlen)): - raise ValueError( - 'prefix length diff %d is invalid for netblock %s' % ( - new_prefixlen, str(self))) - - first = IPNetwork('%s/%s' % (str(self.network), - str(self._prefixlen + prefixlen_diff)), - version=self._version) - - yield first - current = first - while True: - broadcast = current.broadcast - if broadcast == self.broadcast: - return - new_addr = IPAddress(int(broadcast) + 1, version=self._version) - current = IPNetwork('%s/%s' % (str(new_addr), str(new_prefixlen)), - version=self._version) - - yield current - - def masked(self): - """Return the network object with the host bits masked out.""" - return IPNetwork('%s/%d' % (self.network, self._prefixlen), - version=self._version) - - def subnet(self, prefixlen_diff=1, new_prefix=None): - """Return a list of subnets, rather than an iterator.""" - return list(self.iter_subnets(prefixlen_diff, new_prefix)) - - def supernet(self, prefixlen_diff=1, new_prefix=None): - """The supernet containing the current network. - - Args: - prefixlen_diff: An integer, the amount the prefix length of - the network should be decreased by. For example, given a - /24 network and a prefixlen_diff of 3, a supernet with a - /21 netmask is returned. - - Returns: - An IPv4 network object. - - Raises: - ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have a - negative prefix length. - OR - If prefixlen_diff and new_prefix are both set or new_prefix is a - larger number than the current prefix (larger number means a - smaller network) - - """ - if self._prefixlen == 0: - return self - - if new_prefix is not None: - if new_prefix > self._prefixlen: - raise ValueError('new prefix must be shorter') - if prefixlen_diff != 1: - raise ValueError('cannot set prefixlen_diff and new_prefix') - prefixlen_diff = self._prefixlen - new_prefix - - - if self.prefixlen - prefixlen_diff < 0: - raise ValueError( - 'current prefixlen is %d, cannot have a prefixlen_diff of %d' % - (self.prefixlen, prefixlen_diff)) - return IPNetwork('%s/%s' % (str(self.network), - str(self.prefixlen - prefixlen_diff)), - version=self._version) - - # backwards compatibility - Subnet = subnet - Supernet = supernet - AddressExclude = address_exclude - CompareNetworks = compare_networks - Contains = __contains__ - - -class _BaseV4(object): - - """Base IPv4 object. - - The following methods are used by IPv4 objects in both single IP - addresses and networks. - - """ - - # Equivalent to 255.255.255.255 or 32 bits of 1's. - _ALL_ONES = (2**IPV4LENGTH) - 1 - _DECIMAL_DIGITS = frozenset('0123456789') - - def __init__(self, address): - self._version = 4 - self._max_prefixlen = IPV4LENGTH - - def _explode_shorthand_ip_string(self): - return str(self) - - def _ip_int_from_string(self, ip_str): - """Turn the given IP string into an integer for comparison. - - Args: - ip_str: A string, the IP ip_str. - - Returns: - The IP ip_str as an integer. - - Raises: - AddressValueError: if ip_str isn't a valid IPv4 Address. - - """ - octets = ip_str.split('.') - if len(octets) != 4: - raise AddressValueError(ip_str) - - packed_ip = 0 - for oc in octets: - try: - packed_ip = (packed_ip << 8) | self._parse_octet(oc) - except ValueError: - raise AddressValueError(ip_str) - return packed_ip - - def _parse_octet(self, octet_str): - """Convert a decimal octet into an integer. - - Args: - octet_str: A string, the number to parse. - - Returns: - The octet as an integer. - - Raises: - ValueError: if the octet isn't strictly a decimal from [0..255]. - - """ - # Whitelist the characters, since int() allows a lot of bizarre stuff. - if not self._DECIMAL_DIGITS.issuperset(octet_str): - raise ValueError - octet_int = int(octet_str, 10) - # Disallow leading zeroes, because no clear standard exists on - # whether these should be interpreted as decimal or octal. - if octet_int > 255 or (octet_str[0] == '0' and len(octet_str) > 1): - raise ValueError - return octet_int - - def _string_from_ip_int(self, ip_int): - """Turns a 32-bit integer into dotted decimal notation. - - Args: - ip_int: An integer, the IP address. - - Returns: - The IP address as a string in dotted decimal notation. - - """ - octets = [] - for _ in xrange(4): - octets.insert(0, str(ip_int & 0xFF)) - ip_int >>= 8 - return '.'.join(octets) - - @property - def max_prefixlen(self): - return self._max_prefixlen - - @property - def packed(self): - """The binary representation of this address.""" - return v4_int_to_packed(self._ip) - - @property - def version(self): - return self._version - - @property - def is_reserved(self): - """Test if the address is otherwise IETF reserved. - - Returns: - A boolean, True if the address is within the - reserved IPv4 Network range. - - """ - return self in IPv4Network('240.0.0.0/4') - - @property - def is_private(self): - """Test if this address is allocated for private networks. - - Returns: - A boolean, True if the address is reserved per RFC 1918. - - """ - return (self in IPv4Network('10.0.0.0/8') or - self in IPv4Network('172.16.0.0/12') or - self in IPv4Network('192.168.0.0/16')) - - @property - def is_multicast(self): - """Test if the address is reserved for multicast use. - - Returns: - A boolean, True if the address is multicast. - See RFC 3171 for details. - - """ - return self in IPv4Network('224.0.0.0/4') - - @property - def is_unspecified(self): - """Test if the address is unspecified. - - Returns: - A boolean, True if this is the unspecified address as defined in - RFC 5735 3. - - """ - return self in IPv4Network('0.0.0.0') - - @property - def is_loopback(self): - """Test if the address is a loopback address. - - Returns: - A boolean, True if the address is a loopback per RFC 3330. - - """ - return self in IPv4Network('127.0.0.0/8') - - @property - def is_link_local(self): - """Test if the address is reserved for link-local. - - Returns: - A boolean, True if the address is link-local per RFC 3927. - - """ - return self in IPv4Network('169.254.0.0/16') - - -class IPv4Address(_BaseV4, _BaseIP): - - """Represent and manipulate single IPv4 Addresses.""" - - def __init__(self, address): - - """ - Args: - address: A string or integer representing the IP - '192.168.1.1' - - Additionally, an integer can be passed, so - IPv4Address('192.168.1.1') == IPv4Address(3232235777). - or, more generally - IPv4Address(int(IPv4Address('192.168.1.1'))) == - IPv4Address('192.168.1.1') - - Raises: - AddressValueError: If ipaddr isn't a valid IPv4 address. - - """ - _BaseV4.__init__(self, address) - - # Efficient constructor from integer. - if isinstance(address, (int, long)): - self._ip = address - if address < 0 or address > self._ALL_ONES: - raise AddressValueError(address) - return - - # Constructing from a packed address - if isinstance(address, Bytes): - try: - self._ip, = struct.unpack('!I', address) - except struct.error: - raise AddressValueError(address) # Wrong length. - return - - # Assume input argument to be string or any object representation - # which converts into a formatted IP string. - addr_str = str(address) - self._ip = self._ip_int_from_string(addr_str) - - -class IPv4Network(_BaseV4, _BaseNet): - - """This class represents and manipulates 32-bit IPv4 networks. - - Attributes: [examples for IPv4Network('1.2.3.4/27')] - ._ip: 16909060 - .ip: IPv4Address('1.2.3.4') - .network: IPv4Address('1.2.3.0') - .hostmask: IPv4Address('0.0.0.31') - .broadcast: IPv4Address('1.2.3.31') - .netmask: IPv4Address('255.255.255.224') - .prefixlen: 27 - - """ - - # the valid octets for host and netmasks. only useful for IPv4. - _valid_mask_octets = set((255, 254, 252, 248, 240, 224, 192, 128, 0)) - - def __init__(self, address, strict=False): - """Instantiate a new IPv4 network object. - - Args: - address: A string or integer representing the IP [& network]. - '192.168.1.1/24' - '192.168.1.1/255.255.255.0' - '192.168.1.1/0.0.0.255' - are all functionally the same in IPv4. Similarly, - '192.168.1.1' - '192.168.1.1/255.255.255.255' - '192.168.1.1/32' - are also functionaly equivalent. That is to say, failing to - provide a subnetmask will create an object with a mask of /32. - - If the mask (portion after the / in the argument) is given in - dotted quad form, it is treated as a netmask if it starts with a - non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it - starts with a zero field (e.g. 0.255.255.255 == /8), with the - single exception of an all-zero mask which is treated as a - netmask == /0. If no mask is given, a default of /32 is used. - - Additionally, an integer can be passed, so - IPv4Network('192.168.1.1') == IPv4Network(3232235777). - or, more generally - IPv4Network(int(IPv4Network('192.168.1.1'))) == - IPv4Network('192.168.1.1') - - strict: A boolean. If true, ensure that we have been passed - A true network address, eg, 192.168.1.0/24 and not an - IP address on a network, eg, 192.168.1.1/24. - - Raises: - AddressValueError: If ipaddr isn't a valid IPv4 address. - NetmaskValueError: If the netmask isn't valid for - an IPv4 address. - ValueError: If strict was True and a network address was not - supplied. - - """ - _BaseNet.__init__(self, address) - _BaseV4.__init__(self, address) - - # Constructing from an integer or packed bytes. - if isinstance(address, (int, long, Bytes)): - self.ip = IPv4Address(address) - self._ip = self.ip._ip - self._prefixlen = self._max_prefixlen - self.netmask = IPv4Address(self._ALL_ONES) - return - - # Assume input argument to be string or any object representation - # which converts into a formatted IP prefix string. - addr = str(address).split('/') - - if len(addr) > 2: - raise AddressValueError(address) - - self._ip = self._ip_int_from_string(addr[0]) - self.ip = IPv4Address(self._ip) - - if len(addr) == 2: - mask = addr[1].split('.') - if len(mask) == 4: - # We have dotted decimal netmask. - if self._is_valid_netmask(addr[1]): - self.netmask = IPv4Address(self._ip_int_from_string( - addr[1])) - elif self._is_hostmask(addr[1]): - self.netmask = IPv4Address( - self._ip_int_from_string(addr[1]) ^ self._ALL_ONES) - else: - raise NetmaskValueError('%s is not a valid netmask' - % addr[1]) - - self._prefixlen = self._prefix_from_ip_int(int(self.netmask)) - else: - # We have a netmask in prefix length form. - if not self._is_valid_netmask(addr[1]): - raise NetmaskValueError(addr[1]) - self._prefixlen = int(addr[1]) - self.netmask = IPv4Address(self._ip_int_from_prefix( - self._prefixlen)) - else: - self._prefixlen = self._max_prefixlen - self.netmask = IPv4Address(self._ip_int_from_prefix( - self._prefixlen)) - if strict: - if self.ip != self.network: - raise ValueError('%s has host bits set' % - self.ip) - if self._prefixlen == (self._max_prefixlen - 1): - self.iterhosts = self.__iter__ - - def _is_hostmask(self, ip_str): - """Test if the IP string is a hostmask (rather than a netmask). - - Args: - ip_str: A string, the potential hostmask. - - Returns: - A boolean, True if the IP string is a hostmask. - - """ - bits = ip_str.split('.') - try: - parts = [int(x) for x in bits if int(x) in self._valid_mask_octets] - except ValueError: - return False - if len(parts) != len(bits): - return False - if parts[0] < parts[-1]: - return True - return False - - def _is_valid_netmask(self, netmask): - """Verify that the netmask is valid. - - Args: - netmask: A string, either a prefix or dotted decimal - netmask. - - Returns: - A boolean, True if the prefix represents a valid IPv4 - netmask. - - """ - mask = netmask.split('.') - if len(mask) == 4: - if [x for x in mask if int(x) not in self._valid_mask_octets]: - return False - if [y for idx, y in enumerate(mask) if idx > 0 and - y > mask[idx - 1]]: - return False - return True - try: - netmask = int(netmask) - except ValueError: - return False - return 0 <= netmask <= self._max_prefixlen - - # backwards compatibility - IsRFC1918 = lambda self: self.is_private - IsMulticast = lambda self: self.is_multicast - IsLoopback = lambda self: self.is_loopback - IsLinkLocal = lambda self: self.is_link_local - - -class _BaseV6(object): - - """Base IPv6 object. - - The following methods are used by IPv6 objects in both single IP - addresses and networks. - - """ - - _ALL_ONES = (2**IPV6LENGTH) - 1 - _HEXTET_COUNT = 8 - _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef') - - def __init__(self, address): - self._version = 6 - self._max_prefixlen = IPV6LENGTH - - def _ip_int_from_string(self, ip_str): - """Turn an IPv6 ip_str into an integer. - - Args: - ip_str: A string, the IPv6 ip_str. - - Returns: - A long, the IPv6 ip_str. - - Raises: - AddressValueError: if ip_str isn't a valid IPv6 Address. - - """ - parts = ip_str.split(':') - - # An IPv6 address needs at least 2 colons (3 parts). - if len(parts) < 3: - raise AddressValueError(ip_str) - - # If the address has an IPv4-style suffix, convert it to hexadecimal. - if '.' in parts[-1]: - ipv4_int = IPv4Address(parts.pop())._ip - parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF)) - parts.append('%x' % (ipv4_int & 0xFFFF)) - - # An IPv6 address can't have more than 8 colons (9 parts). - if len(parts) > self._HEXTET_COUNT + 1: - raise AddressValueError(ip_str) - - # Disregarding the endpoints, find '::' with nothing in between. - # This indicates that a run of zeroes has been skipped. - try: - skip_index, = ( - [i for i in xrange(1, len(parts) - 1) if not parts[i]] or - [None]) - except ValueError: - # Can't have more than one '::' - raise AddressValueError(ip_str) - - # parts_hi is the number of parts to copy from above/before the '::' - # parts_lo is the number of parts to copy from below/after the '::' - if skip_index is not None: - # If we found a '::', then check if it also covers the endpoints. - parts_hi = skip_index - parts_lo = len(parts) - skip_index - 1 - if not parts[0]: - parts_hi -= 1 - if parts_hi: - raise AddressValueError(ip_str) # ^: requires ^:: - if not parts[-1]: - parts_lo -= 1 - if parts_lo: - raise AddressValueError(ip_str) # :$ requires ::$ - parts_skipped = self._HEXTET_COUNT - (parts_hi + parts_lo) - if parts_skipped < 1: - raise AddressValueError(ip_str) - else: - # Otherwise, allocate the entire address to parts_hi. The endpoints - # could still be empty, but _parse_hextet() will check for that. - if len(parts) != self._HEXTET_COUNT: - raise AddressValueError(ip_str) - parts_hi = len(parts) - parts_lo = 0 - parts_skipped = 0 - - try: - # Now, parse the hextets into a 128-bit integer. - ip_int = 0L - for i in xrange(parts_hi): - ip_int <<= 16 - ip_int |= self._parse_hextet(parts[i]) - ip_int <<= 16 * parts_skipped - for i in xrange(-parts_lo, 0): - ip_int <<= 16 - ip_int |= self._parse_hextet(parts[i]) - return ip_int - except ValueError: - raise AddressValueError(ip_str) - - def _parse_hextet(self, hextet_str): - """Convert an IPv6 hextet string into an integer. - - Args: - hextet_str: A string, the number to parse. - - Returns: - The hextet as an integer. - - Raises: - ValueError: if the input isn't strictly a hex number from [0..FFFF]. - - """ - # Whitelist the characters, since int() allows a lot of bizarre stuff. - if not self._HEX_DIGITS.issuperset(hextet_str): - raise ValueError - hextet_int = int(hextet_str, 16) - if hextet_int > 0xFFFF: - raise ValueError - return hextet_int - - def _compress_hextets(self, hextets): - """Compresses a list of hextets. - - Compresses a list of strings, replacing the longest continuous - sequence of "0" in the list with "" and adding empty strings at - the beginning or at the end of the string such that subsequently - calling ":".join(hextets) will produce the compressed version of - the IPv6 address. - - Args: - hextets: A list of strings, the hextets to compress. - - Returns: - A list of strings. - - """ - best_doublecolon_start = -1 - best_doublecolon_len = 0 - doublecolon_start = -1 - doublecolon_len = 0 - for index in range(len(hextets)): - if hextets[index] == '0': - doublecolon_len += 1 - if doublecolon_start == -1: - # Start of a sequence of zeros. - doublecolon_start = index - if doublecolon_len > best_doublecolon_len: - # This is the longest sequence of zeros so far. - best_doublecolon_len = doublecolon_len - best_doublecolon_start = doublecolon_start - else: - doublecolon_len = 0 - doublecolon_start = -1 - - if best_doublecolon_len > 1: - best_doublecolon_end = (best_doublecolon_start + - best_doublecolon_len) - # For zeros at the end of the address. - if best_doublecolon_end == len(hextets): - hextets += [''] - hextets[best_doublecolon_start:best_doublecolon_end] = [''] - # For zeros at the beginning of the address. - if best_doublecolon_start == 0: - hextets = [''] + hextets - - return hextets - - def _string_from_ip_int(self, ip_int=None): - """Turns a 128-bit integer into hexadecimal notation. - - Args: - ip_int: An integer, the IP address. - - Returns: - A string, the hexadecimal representation of the address. - - Raises: - ValueError: The address is bigger than 128 bits of all ones. - - """ - if not ip_int and ip_int != 0: - ip_int = int(self._ip) - - if ip_int > self._ALL_ONES: - raise ValueError('IPv6 address is too large') - - hex_str = '%032x' % ip_int - hextets = [] - for x in range(0, 32, 4): - hextets.append('%x' % int(hex_str[x:x+4], 16)) - - hextets = self._compress_hextets(hextets) - return ':'.join(hextets) - - def _explode_shorthand_ip_string(self): - """Expand a shortened IPv6 address. - - Args: - ip_str: A string, the IPv6 address. - - Returns: - A string, the expanded IPv6 address. - - """ - if isinstance(self, _BaseNet): - ip_str = str(self.ip) - else: - ip_str = str(self) - - ip_int = self._ip_int_from_string(ip_str) - parts = [] - for i in xrange(self._HEXTET_COUNT): - parts.append('%04x' % (ip_int & 0xFFFF)) - ip_int >>= 16 - parts.reverse() - if isinstance(self, _BaseNet): - return '%s/%d' % (':'.join(parts), self.prefixlen) - return ':'.join(parts) - - @property - def max_prefixlen(self): - return self._max_prefixlen - - @property - def packed(self): - """The binary representation of this address.""" - return v6_int_to_packed(self._ip) - - @property - def version(self): - return self._version - - @property - def is_multicast(self): - """Test if the address is reserved for multicast use. - - Returns: - A boolean, True if the address is a multicast address. - See RFC 2373 2.7 for details. - - """ - return self in IPv6Network('ff00::/8') - - @property - def is_reserved(self): - """Test if the address is otherwise IETF reserved. - - Returns: - A boolean, True if the address is within one of the - reserved IPv6 Network ranges. - - """ - return (self in IPv6Network('::/8') or - self in IPv6Network('100::/8') or - self in IPv6Network('200::/7') or - self in IPv6Network('400::/6') or - self in IPv6Network('800::/5') or - self in IPv6Network('1000::/4') or - self in IPv6Network('4000::/3') or - self in IPv6Network('6000::/3') or - self in IPv6Network('8000::/3') or - self in IPv6Network('A000::/3') or - self in IPv6Network('C000::/3') or - self in IPv6Network('E000::/4') or - self in IPv6Network('F000::/5') or - self in IPv6Network('F800::/6') or - self in IPv6Network('FE00::/9')) - - @property - def is_unspecified(self): - """Test if the address is unspecified. - - Returns: - A boolean, True if this is the unspecified address as defined in - RFC 2373 2.5.2. - - """ - return self._ip == 0 and getattr(self, '_prefixlen', 128) == 128 - - @property - def is_loopback(self): - """Test if the address is a loopback address. - - Returns: - A boolean, True if the address is a loopback address as defined in - RFC 2373 2.5.3. - - """ - return self._ip == 1 and getattr(self, '_prefixlen', 128) == 128 - - @property - def is_link_local(self): - """Test if the address is reserved for link-local. - - Returns: - A boolean, True if the address is reserved per RFC 4291. - - """ - return self in IPv6Network('fe80::/10') - - @property - def is_site_local(self): - """Test if the address is reserved for site-local. - - Note that the site-local address space has been deprecated by RFC 3879. - Use is_private to test if this address is in the space of unique local - addresses as defined by RFC 4193. - - Returns: - A boolean, True if the address is reserved per RFC 3513 2.5.6. - - """ - return self in IPv6Network('fec0::/10') - - @property - def is_private(self): - """Test if this address is allocated for private networks. - - Returns: - A boolean, True if the address is reserved per RFC 4193. - - """ - return self in IPv6Network('fc00::/7') - - @property - def ipv4_mapped(self): - """Return the IPv4 mapped address. - - Returns: - If the IPv6 address is a v4 mapped address, return the - IPv4 mapped address. Return None otherwise. - - """ - if (self._ip >> 32) != 0xFFFF: - return None - return IPv4Address(self._ip & 0xFFFFFFFF) - - @property - def teredo(self): - """Tuple of embedded teredo IPs. - - Returns: - Tuple of the (server, client) IPs or None if the address - doesn't appear to be a teredo address (doesn't start with - 2001::/32) - - """ - if (self._ip >> 96) != 0x20010000: - return None - return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF), - IPv4Address(~self._ip & 0xFFFFFFFF)) - - @property - def sixtofour(self): - """Return the IPv4 6to4 embedded address. - - Returns: - The IPv4 6to4-embedded address if present or None if the - address doesn't appear to contain a 6to4 embedded address. - - """ - if (self._ip >> 112) != 0x2002: - return None - return IPv4Address((self._ip >> 80) & 0xFFFFFFFF) - - -class IPv6Address(_BaseV6, _BaseIP): - - """Represent and manipulate single IPv6 Addresses. - """ - - def __init__(self, address): - """Instantiate a new IPv6 address object. - - Args: - address: A string or integer representing the IP - - Additionally, an integer can be passed, so - IPv6Address('2001:4860::') == - IPv6Address(42541956101370907050197289607612071936L). - or, more generally - IPv6Address(IPv6Address('2001:4860::')._ip) == - IPv6Address('2001:4860::') - - Raises: - AddressValueError: If address isn't a valid IPv6 address. - - """ - _BaseV6.__init__(self, address) - - # Efficient constructor from integer. - if isinstance(address, (int, long)): - self._ip = address - if address < 0 or address > self._ALL_ONES: - raise AddressValueError(address) - return - - # Constructing from a packed address - if isinstance(address, Bytes): - try: - hi, lo = struct.unpack('!QQ', address) - except struct.error: - raise AddressValueError(address) # Wrong length. - self._ip = (hi << 64) | lo - return - - # Assume input argument to be string or any object representation - # which converts into a formatted IP string. - addr_str = str(address) - if not addr_str: - raise AddressValueError('') - - self._ip = self._ip_int_from_string(addr_str) - - -class IPv6Network(_BaseV6, _BaseNet): - - """This class represents and manipulates 128-bit IPv6 networks. - - Attributes: [examples for IPv6('2001:658:22A:CAFE:200::1/64')] - .ip: IPv6Address('2001:658:22a:cafe:200::1') - .network: IPv6Address('2001:658:22a:cafe::') - .hostmask: IPv6Address('::ffff:ffff:ffff:ffff') - .broadcast: IPv6Address('2001:658:22a:cafe:ffff:ffff:ffff:ffff') - .netmask: IPv6Address('ffff:ffff:ffff:ffff::') - .prefixlen: 64 - - """ - - - def __init__(self, address, strict=False): - """Instantiate a new IPv6 Network object. - - Args: - address: A string or integer representing the IPv6 network or the IP - and prefix/netmask. - '2001:4860::/128' - '2001:4860:0000:0000:0000:0000:0000:0000/128' - '2001:4860::' - are all functionally the same in IPv6. That is to say, - failing to provide a subnetmask will create an object with - a mask of /128. - - Additionally, an integer can be passed, so - IPv6Network('2001:4860::') == - IPv6Network(42541956101370907050197289607612071936L). - or, more generally - IPv6Network(IPv6Network('2001:4860::')._ip) == - IPv6Network('2001:4860::') - - strict: A boolean. If true, ensure that we have been passed - A true network address, eg, 192.168.1.0/24 and not an - IP address on a network, eg, 192.168.1.1/24. - - Raises: - AddressValueError: If address isn't a valid IPv6 address. - NetmaskValueError: If the netmask isn't valid for - an IPv6 address. - ValueError: If strict was True and a network address was not - supplied. - - """ - _BaseNet.__init__(self, address) - _BaseV6.__init__(self, address) - - # Constructing from an integer or packed bytes. - if isinstance(address, (int, long, Bytes)): - self.ip = IPv6Address(address) - self._ip = self.ip._ip - self._prefixlen = self._max_prefixlen - self.netmask = IPv6Address(self._ALL_ONES) - return - - # Assume input argument to be string or any object representation - # which converts into a formatted IP prefix string. - addr = str(address).split('/') - - if len(addr) > 2: - raise AddressValueError(address) - - self._ip = self._ip_int_from_string(addr[0]) - self.ip = IPv6Address(self._ip) - - if len(addr) == 2: - if self._is_valid_netmask(addr[1]): - self._prefixlen = int(addr[1]) - else: - raise NetmaskValueError(addr[1]) - else: - self._prefixlen = self._max_prefixlen - - self.netmask = IPv6Address(self._ip_int_from_prefix(self._prefixlen)) - - if strict: - if self.ip != self.network: - raise ValueError('%s has host bits set' % - self.ip) - if self._prefixlen == (self._max_prefixlen - 1): - self.iterhosts = self.__iter__ - - def _is_valid_netmask(self, prefixlen): - """Verify that the netmask/prefixlen is valid. - - Args: - prefixlen: A string, the netmask in prefix length format. - - Returns: - A boolean, True if the prefix represents a valid IPv6 - netmask. - - """ - try: - prefixlen = int(prefixlen) - except ValueError: - return False - return 0 <= prefixlen <= self._max_prefixlen - - @property - def with_netmask(self): - return self.with_prefixlen diff --git a/manifold/util/log.py b/manifold/util/log.py deleted file mode 100644 index cdb187f1..00000000 --- a/manifold/util/log.py +++ /dev/null @@ -1,288 +0,0 @@ -import sys, logging, traceback, inspect, os.path -from logging import handlers -from manifold.util.singleton import Singleton -from manifold.util.options import Options -from manifold.util.misc import caller_name, make_list -from manifold.util import colors - -# TODO Log should take separately message strings and arguments to be able to -# remember which messages are seen several times, and also to allow for -# translation -# TODO How to log to stdout without putting None in self.log - -class Log(object): - __metaclass__ = Singleton - - DEFAULTS = { - # Logging - "rsyslog_enable" : False, - "rsyslog_host" : None, #"log.top-hat.info", - "rsyslog_port" : None, #28514, - "log_file" : "/var/log/manifold.log", - "log_level" : "DEBUG", - "debug" : "default", - "log_duplicates" : False - } - - # COLORS - color_ansi = { - 'DEBUG' : colors.MYGREEN, - 'INFO' : colors.MYBLUE, - 'WARNING': colors.MYWARNING, - 'ERROR' : colors.MYRED, - 'HEADER' : colors.MYHEADER, - 'END' : colors.MYEND, - 'RECORD' : colors.MYBLUE, - 'TMP' : colors.MYRED, - } - - @classmethod - def color(cls, color): - return cls.color_ansi[color] if color else '' - - # To remove duplicate messages - seen = {} - - def __init__(self, name='(default)'): - self.log = None # logging.getLogger(name) - self.files_to_keep = [] - self.init_log() - self.color = True - - - @classmethod - def init_options(self): - opt = Options() - - opt.add_option( - "--rsyslog-enable", action = "store_false", dest = "rsyslog_enable", - help = "Specify if log have to be written to a rsyslog server.", - default = self.DEFAULTS["rsyslog_enable"] - ) - opt.add_option( - "--rsyslog-host", dest = "rsyslog_host", - help = "Rsyslog hostname.", - default = self.DEFAULTS["rsyslog_host"] - ) - opt.add_option( - "--rsyslog-port", type = "int", dest = "rsyslog_port", - help = "Rsyslog port.", - default = self.DEFAULTS["rsyslog_port"] - ) - opt.add_option( - "-o", "--log-file", dest = "log_file", - help = "Log filename.", - default = self.DEFAULTS["log_file"] - ) - opt.add_option( - "-L", "--log-level", dest = "log_level", - choices = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"], - help = "Log level", - default = self.DEFAULTS["log_level"] - ) - opt.add_option( - "-d", "--debug", dest = "debug", - help = "Debug paths (a list of coma-separated python path: path.to.module.function).", - default = self.DEFAULTS["debug"] - ) - opt.add_option( - "", "--log_duplicates", action = "store_true", dest = "log_duplicates", - help = "Remove duplicate messages in logs", - default = self.DEFAULTS["log_duplicates"] - ) - - def init_log(self, options=object()): - # Initialize self.log (require self.files_to_keep) - if self.log: # for debugging by using stdout, log may be equal to None - if Options().rsyslog_host: - shandler = self.make_handler_rsyslog( - Options().rsyslog_host, - Options().rsyslog_port, - Options().log_level - ) - elif Options().log_file: - shandler = self.make_handler_locallog( - Options().log_file, - Options().log_level - ) - - #------------------------------------------------------------------------ - # Log - #------------------------------------------------------------------------ - - def make_handler_rsyslog(self, rsyslog_host, rsyslog_port, log_level): - """ - \brief (Internal usage) Prepare logging via rsyslog - \param rsyslog_host The hostname of the rsyslog server - \param rsyslog_port The port of the rsyslog server - \param log_level Log level - """ - # Prepare the handler - shandler = handlers.SysLogHandler( - (rsyslog_host, rsyslog_port), - facility = handlers.SysLogHandler.LOG_DAEMON - ) - - # The log file must remain open while daemonizing - self.prepare_handler(shandler, log_level) - return shandler - - def make_handler_locallog(self, log_filename, log_level): - """ - \brief (Internal usage) Prepare local logging - \param log_filename The file in which we write the logs - \param log_level Log level - """ - # Create directory in which we store the log file - log_dir = os.path.dirname(log_filename) - if log_dir and not os.path.exists(log_dir): - try: - os.makedirs(log_dir) - except OSError, why: - # XXX here we don't log since log is not initialized yet - print "OS error: %s" % why - - # Prepare the handler - shandler = logging.handlers.RotatingFileHandler( - log_filename, - backupCount = 0 - ) - - # The log file must remain open while daemonizing - self.files_to_keep.append(shandler.stream) - self.prepare_handler(shandler, log_level) - return shandler - - def prepare_handler(self, shandler, log_level): - """ - \brief (Internal usage) - \param shandler Handler used to log information - \param log_level Log level - """ - shandler.setLevel(log_level) - formatter = logging.Formatter("%(asctime)s: %(name)s: %(levelname)s %(message)s") - shandler.setFormatter(formatter) - self.log.addHandler(shandler) - self.log.setLevel(getattr(logging, log_level, logging.INFO)) - - def get_logger(self): - return self.log - - @classmethod - def print_msg(cls, msg, level=None, caller=None): - sys.stdout.write(cls.color(level)) - if level: - print "%s" % level, - if caller: - print "[%30s]" % caller, - print msg, - print cls.color('END') - - #--------------------------------------------------------------------- - # Log: logger abstraction - #--------------------------------------------------------------------- - - @classmethod - def build_message_string(cls, msg, ctx): - if ctx: - msg = [m % ctx for m in msg] - if isinstance(msg, (tuple, list)): - msg = map(lambda s : "%s" % s, msg) - msg = " ".join(msg) - else: - msg = "%s" % msg - return msg - - @classmethod - def log_message(cls, level, msg, ctx): - """ - \brief Logs an message - \param level (string) Log level - \param msg (string / list of strings) Message string, or List of message strings - \param ctx (dict) Context for the message strings - """ - caller = None - - if not Options().log_duplicates: - try: - count = cls.seen.get(msg, 0) - cls.seen[msg] = count + 1 - except TypeError, e: - # Unhashable types in msg - count = 0 - - if count == 1: - msg += (" -- REPEATED -- Future similar messages will be silently ignored. Please use the --log_duplicates option to allow for duplicates",) - elif count > 1: - return - - if level == 'DEBUG': - caller = caller_name(skip=3) - # Eventually remove "" added to the configuration file - try: - paths = tuple(s.strip(' \t\n\r') for s in Options().debug.split(',')) - except: - paths = None - if not paths or not caller.startswith(paths): - return - - logger = Log().get_logger() - msg_str = cls.build_message_string(msg, ctx) - - if logger: - logger_fct = getattr(logger, level.lower()) - logger_fct("%s(): %s" % (inspect.stack()[2][3], msg_str)) - else: - cls.print_msg(msg_str, level, caller) - - - @classmethod - def critical(cls, *msg, **ctx): - if not Options().log_level in ['CRITICAL']: - return - cls.log_message('CRITICAL', msg, ctx) - sys.exit(0) - - @classmethod - def error(cls, *msg, **ctx): - if not Options().log_level in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']: - return - cls.log_message('ERROR', msg, ctx) - logger = Log().get_logger() - if not Log().get_logger(): - traceback.print_exc() - sys.exit(0) - - @classmethod - def warning(cls, *msg, **ctx): - if not Options().log_level in ['DEBUG', 'INFO', 'WARNING']: - return - cls.log_message('WARNING', msg, ctx) - - @classmethod - def info(cls, *msg, **ctx): - if not Options().log_level in ['DEBUG', 'INFO']: - return - cls.log_message('INFO', msg, ctx) - - @classmethod - def debug(cls, *msg, **ctx): - if not Options().log_level in ['DEBUG']: - return - cls.log_message('DEBUG', msg, ctx) - - @classmethod - def tmp(cls, *msg): - cls.print_msg(' '.join(map(lambda x: "%r"%x, make_list(msg))), 'TMP', caller_name()) - - @classmethod - def record(cls, *msg): - #cls.print_msg(' '.join(map(lambda x: "%r"%x, make_list(msg))), 'RECORD', caller_name()) - pass - - @classmethod - def deprecated(cls, new): - #cls.print_msg("Function %s is deprecated, please use %s" % (caller_name(skip=3), new)) - pass - -Log.init_options() diff --git a/manifold/util/misc.py b/manifold/util/misc.py deleted file mode 100644 index 2ec3d029..00000000 --- a/manifold/util/misc.py +++ /dev/null @@ -1,66 +0,0 @@ -import os, glob, inspect -from types import StringTypes - -def find_local_modules(filepath): - modules = [] - for f in glob.glob(os.path.dirname(filepath)+"/*.py"): - name = os.path.basename(f)[:-3] - if name != '__init__': - modules.append(name) - return modules - -def make_list(elt): - if not elt or isinstance(elt, list): - return elt - if isinstance(elt, StringTypes): - return [elt] - if isinstance(elt, (tuple, set, frozenset)): - return list(elt) - - -# FROM: https://gist.github.com/techtonik/2151727 -# Public Domain, i.e. feel free to copy/paste -# Considered a hack in Python 2 - -import inspect - -def caller_name(skip=2): - """Get a name of a caller in the format module.class.method - - `skip` specifies how many levels of stack to skip while getting caller - name. skip=1 means "who calls me", skip=2 "who calls my caller" etc. - - An empty string is returned if skipped levels exceed stack height - """ - stack = inspect.stack() - start = 0 + skip - if len(stack) < start + 1: - return '' - parentframe = stack[start][0] - - name = [] - module = inspect.getmodule(parentframe) - # `modname` can be None when frame is executed directly in console - # TODO(techtonik): consider using __main__ - if module: - name.append(module.__name__) - # detect classname - if 'self' in parentframe.f_locals: - # I don't know any way to detect call from the object method - # XXX: there seems to be no way to detect static method call - it will - # be just a function call - name.append(parentframe.f_locals['self'].__class__.__name__) - codename = parentframe.f_code.co_name - if codename != '': # top level usually - name.append( codename ) # function or a method - del parentframe - return ".".join(name) - -def is_sublist(x, y, shortcut=None): - if not shortcut: shortcut = [] - if x == []: return (True, shortcut) - if y == []: return (False, None) - if x[0] == y[0]: - return is_sublist(x[1:],y[1:], shortcut) - else: - return is_sublist(x, y[1:], shortcut + [y[0]]) diff --git a/manifold/util/options.py b/manifold/util/options.py deleted file mode 100644 index e09ad335..00000000 --- a/manifold/util/options.py +++ /dev/null @@ -1,95 +0,0 @@ -import sys -import os.path -import optparse -# xxx warning : this is not taken care of by the debian packaging -# cfgparse seems to be available by pip only (on debian, that is) -# there seems to be another package that might be used to do similar stuff -# python-configglue - Glues together optparse.OptionParser and ConfigParser.ConfigParser -# additionally argumentparser would probably be the way to go, notwithstanding -# xxx Moving this into the parse method so this module can at least be imported -#import cfgparse - -from manifold.util.singleton import Singleton - -# http://docs.python.org/dev/library/argparse.html#upgrading-optparse-code - -class Options(object): - - __metaclass__ = Singleton - - # We should be able to use another default conf file - CONF_FILE = '/etc/manifold.conf' - - def __init__(self, name = None): - self._opt = optparse.OptionParser() - self._defaults = {} - self._name = name - self.clear() - - def clear(self): - self.options = {} - self.add_option( - "-c", "--config", dest = "cfg_file", - help = "Config file to use.", - default = self.CONF_FILE - ) - self.uptodate = True - - def parse(self): - """ - \brief Parse options passed from command-line - """ - # add options here - - # if we have a logger singleton, add its options here too - # get defaults too - - # Initialize options to default values - import cfgparse - cfg = cfgparse.ConfigParser() - cfg.add_optparse_help_option(self._opt) - - # Load configuration file - try: - cfg_filename = sys.argv[sys.argv.index("-c") + 1] - try: - with open(cfg_filename): cfg.add_file(cfg_filename) - except IOError: - raise Exception, "Cannot open specified configuration file: %s" % cfg_filename - except ValueError: - try: - with open(self.CONF_FILE): cfg.add_file(self.CONF_FILE) - except IOError: pass - - for option_name in self._defaults: - cfg.add_option(option_name, default = self._defaults[option_name]) - - # Load/override options from configuration file and command-line - (options, args) = cfg.parse(self._opt) - self.options.update(vars(options)) - self.uptodate = True - - - def add_option(self, *args, **kwargs): - default = kwargs.get('default', None) - self._defaults[kwargs['dest']] = default - if 'default' in kwargs: - # This is very important otherwise file content is not taken into account - del kwargs['default'] - kwargs['help'] += " Defaults to %r." % default - self._opt.add_option(*args, **kwargs) - self.uptodate = False - - def get_name(self): - return self._name if self._name else os.path.basename(sys.argv[0]) - - def __repr__(self): - return "" % self.options - - def __getattr__(self, key): - if not self.uptodate: - self.parse() - return self.options.get(key, None) - - def __setattr(self, key, value): - self.options[key] = value diff --git a/manifold/util/plugin_factory.py b/manifold/util/plugin_factory.py deleted file mode 100644 index 1956355a..00000000 --- a/manifold/util/plugin_factory.py +++ /dev/null @@ -1,24 +0,0 @@ -from manifold.util.log import Log - -class PluginFactory(type): - def __init__(cls, name, bases, dic): - #super(PluginFactory, cls).__init__(name, bases, dic) - type.__init__(cls, name, bases, dic) - - try: - registry = getattr(cls, 'registry') - except AttributeError: - setattr(cls, 'registry', {}) - registry = getattr(cls, 'registry') - # XXX - if name != "Gateway": - if name.endswith('Gateway'): - name = name[:-7] - name = name.lower() - registry[name] = cls - - def get(self, name): - return registry[name.lower()] - - # Adding a class method get to retrieve plugins by name - setattr(cls, 'get', classmethod(get)) diff --git a/manifold/util/predicate.py b/manifold/util/predicate.py deleted file mode 100644 index fb32e4d8..00000000 --- a/manifold/util/predicate.py +++ /dev/null @@ -1,281 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Class Predicate: -# Define a condition to join for example to Table instances. -# If this condition involves several fields, you may define a -# single Predicate using tuple of fields. -# -# Copyright (C) UPMC Paris Universitas -# Authors: -# Jordan Augé -# Marc-Olivier Buob - -from types import StringTypes -from manifold.util.type import returns, accepts - -from operator import ( - and_, or_, inv, add, mul, sub, mod, truediv, lt, le, ne, gt, ge, eq, neg -) - -# Define the inclusion operators -class contains(type): pass -class included(type): pass - -# New modifier: { contains -class Predicate: - - operators = { - '==' : eq, - '!=' : ne, - '<' : lt, - '<=' : le, - '>' : gt, - '>=' : ge, - '&&' : and_, - '||' : or_, - 'CONTAINS' : contains, - 'INCLUDED' : included - } - - operators_short = { - '=' : eq, - '~' : ne, - '<' : lt, - '[' : le, - '>' : gt, - ']' : ge, - '&' : and_, - '|' : or_, - '}' : contains, - '{' : included - } - - def __init__(self, *args, **kwargs): - """ - Build a Predicate instance. - Args: - kwargs: You can pass: - - 3 args (left, operator, right) - left: The left operand (it may be a String instance or a tuple) - operator: See Predicate.operators, this is the binary operator - involved in this Predicate. - right: The right value (it may be a String instance - or a literal (String, numerical value, tuple...)) - - 1 argument (list or tuple), containing three arguments - (variable, operator, value) - """ - if len(args) == 3: - key, op, value = args - elif len(args) == 1 and isinstance(args[0], (tuple,list)) and len(args[0]) == 3: - key, op, value = args[0] - elif len(args) == 1 and isinstance(args[0], Predicate): - key, op, value = args[0].get_tuple() - else: - raise Exception, "Bad initializer for Predicate (args = %r)" % args - - assert not isinstance(value, (frozenset, dict, set)), "Invalid value type (type = %r)" % type(value) - if isinstance(value, list): - value = tuple(value) - - self.key = key - if isinstance(op, StringTypes): - op = op.upper() - if op in self.operators.keys(): - self.op = self.operators[op] - elif op in self.operators_short.keys(): - self.op = self.operators_short[op] - else: - self.op = op - - if isinstance(value, list): - self.value = tuple(value) - else: - self.value = value - - @returns(StringTypes) - def __str__(self): - """ - Returns: - The '%s' representation of this Predicate. - """ - key, op, value = self.get_str_tuple() - if isinstance(value, (tuple, list, set, frozenset)): - value = [repr(v) for v in value] - value = "[%s]" % ", ".join(value) - return "%s %s %r" % (key, op, value) - - @returns(StringTypes) - def __repr__(self): - """ - Returns: - The '%r' representation of this Predicate. - """ - return "Predicate<%s %s %r>" % self.get_str_tuple() - - def __hash__(self): - """ - Returns: - The hash of this Predicate (this allows to define set of - Predicate instances). - """ - return hash(self.get_tuple()) - - @returns(bool) - def __eq__(self, predicate): - """ - Returns: - True iif self == predicate. - """ - if not predicate: - return False - return self.get_tuple() == predicate.get_tuple() - - def get_key(self): - """ - Returns: - The left operand of this Predicate. It may be a String - or a tuple of Strings. - """ - return self.key - - def set_key(self, key): - """ - Set the left operand of this Predicate. - Params: - key: The new left operand. - """ - self.key = key - - def get_op(self): - return self.op - - def get_value(self): - return self.value - - def set_value(self, value): - self.value = value - - def get_tuple(self): - return (self.key, self.op, self.value) - - def get_str_op(self): - op_str = [s for s, op in self.operators.iteritems() if op == self.op] - return op_str[0] - - def get_str_tuple(self): - return (self.key, self.get_str_op(), self.value,) - - def to_list(self): - return list(self.get_str_tuple()) - - def match(self, dic, ignore_missing=False): - if isinstance(self.key, tuple): - print "PREDICATE MATCH", self.key - print dic - print "-----------------------------" - - # Can we match ? - if self.key not in dic: - return ignore_missing - - if self.op == eq: - if isinstance(self.value, list): - return (dic[self.key] in self.value) # array ? - else: - return (dic[self.key] == self.value) - elif self.op == ne: - if isinstance(self.value, list): - return (dic[self.key] not in self.value) # array ? - else: - return (dic[self.key] != self.value) # array ? - elif self.op == lt: - if isinstance(self.value, StringTypes): - # prefix match - return dic[self.key].startswith('%s.' % self.value) - else: - return (dic[self.key] < self.value) - elif self.op == le: - if isinstance(self.value, StringTypes): - return dic[self.key] == self.value or dic[self.key].startswith('%s.' % self.value) - else: - return (dic[self.key] <= self.value) - elif self.op == gt: - if isinstance(self.value, StringTypes): - # prefix match - return self.value.startswith('%s.' % dic[self.key]) - else: - return (dic[self.key] > self.value) - elif self.op == ge: - if isinstance(self.value, StringTypes): - # prefix match - return dic[self.key] == self.value or self.value.startswith('%s.' % dic[self.key]) - else: - return (dic[self.key] >= self.value) - elif self.op == and_: - return (dic[self.key] & self.value) # array ? - elif self.op == or_: - return (dic[self.key] | self.value) # array ? - elif self.op == contains: - method, subfield = self.key.split('.', 1) - return not not [ x for x in dic[method] if x[subfield] == self.value] - elif self.op == included: - return dic[self.key] in self.value - else: - raise Exception, "Unexpected table format: %r" % dic - - def filter(self, dic): - """ - Filter dic according to the current predicate. - """ - - if '.' in self.key: - # users.hrn - method, subfield = self.key.split('.', 1) - if not method in dic: - return None # XXX - - if isinstance(dic[method], dict): - # We have a 1..1 relationship: apply the same filter to the dict - subpred = Predicate(subfield, self.op, self.value) - match = subpred.match(dic[method]) - return dic if match else None - - elif isinstance(dic[method], (list, tuple)): - # 1..N relationships - match = False - if self.op == contains: - return dic if self.match(dic) else None - else: - subpred = Predicate(subfield, self.op, self.value) - dic[method] = subpred.filter(dic[method]) - return dic - else: - raise Exception, "Unexpected table format: %r", dic - - - else: - # Individual field operations: this could be simplified, since we are now using operators_short !! - # XXX match - print "current predicate", self - print "matching", dic - print "----" - return dic if self.match(dic) else None - - def get_field_names(self): - if isinstance(self.key, (list, tuple, set, frozenset)): - return set(self.key) - else: - return set([self.key]) - - def get_value_names(self): - if isinstance(self.value, (list, tuple, set, frozenset)): - return set(self.value) - else: - return set([self.value]) - - def has_empty_value(self): - if isinstance(self.value, (list, tuple, set, frozenset)): - return not any(self.value) - else: - return not self.value diff --git a/manifold/util/reactor_thread.py b/manifold/util/reactor_thread.py deleted file mode 100644 index 4eb2c7e2..00000000 --- a/manifold/util/reactor_thread.py +++ /dev/null @@ -1,103 +0,0 @@ -# Borrowed from Chandler -# http://chandlerproject.org/Projects/ChandlerTwistedInThreadedEnvironment - -import threading, time -from manifold.util.singleton import Singleton -from manifold.util.log import * -from twisted.internet import defer -from twisted.python import threadable - -__author__ ="Brian Kirsch " - -#required for using threads with the Reactor -threadable.init() - -class ReactorException(Exception): - def __init__(self, *args): - Exception.__init__(self, *args) - - -class ReactorThread(threading.Thread): - """ - Run the Reactor in a Thread to prevent blocking the - Main Thread once reactor.run is called - """ - - __metaclass__ = Singleton - - def __init__(self): - threading.Thread.__init__(self) - self._reactorRunning = False - - # Be sure the import is done only at runtime, we keep a reference in the - # class instance - from twisted.internet import reactor - self.reactor = reactor - - def run(self): - if self._reactorRunning: - raise ReactorException("Reactor Already Running") - - self._reactorRunning = True - - #call run passing a False flag indicating to the - #reactor not to install sig handlers since sig handlers - #only work on the main thread - try: - #signal.signal(signal.SIGINT, signal.default_int_handler) - self.reactor.run(False) - except Exception, e: - print "Reactor exception:", e - - def callInReactor(self, callable, *args, **kw): - if self._reactorRunning: - self.reactor.callFromThread(callable, *args, **kw) - else: - callable(*args, **kw) - - def isReactorRunning(self): - return self._reactorRunning - - def start_reactor(self): - if self._reactorRunning: - log_warning("Reactor already running. This is normal, please remove this debug message") - return - #raise ReactorException("Reactor Already Running") - threading.Thread.start(self) - cpt = 0 - while not self._reactorRunning: - time.sleep(0.1) - cpt +=1 - if cpt > 5: - raise ReactorException, "Reactor thread is too long to start... cancelling" - self.reactor.addSystemEventTrigger('after', 'shutdown', self.__reactorShutDown) - - def stop_reactor(self): - """ - may want a way to force thread to join if reactor does not shutdown - properly. The reactor can get in to a recursive loop condition if reactor.stop - placed in the threads join method. This will require further investigation. - """ - if not self._reactorRunning: - raise ReactorException("Reactor Not Running") - self.reactor.callFromThread(self.reactor.stop) - #self.reactor.join() - - def addReactorEventTrigger(self, phase, eventType, callable): - if self._reactorRunning: - self.reactor.callFromThread(self.reactor.addSystemEventTrigger, phase, eventType, callable) - else: - self.reactor.addSystemEventTrigger(phase, eventType, callable) - - def __reactorShuttingDown(self): - pass - - def __reactorShutDown(self): - """This method called when the reactor is stopped""" - self._reactorRunning = False - - def __getattr__(self, name): - # We transfer missing methods to the reactor - def _missing(*args, **kwargs): - self.reactor.callFromThread(getattr(self.reactor, name), *args, **kwargs) - return _missing diff --git a/manifold/util/reactor_wrapper.py b/manifold/util/reactor_wrapper.py deleted file mode 100644 index eb18874f..00000000 --- a/manifold/util/reactor_wrapper.py +++ /dev/null @@ -1,48 +0,0 @@ -from manifold.util.singleton import Singleton - -class ReactorWrapper(object): - __metaclass__ = Singleton - - def __init__(self): - # Be sure the import is done only at runtime, we keep a reference in the - # class instance - from twisted.internet import reactor - self.reactor = reactor - - - def callInReactor(self, callable, *args, **kw): - print "ReactorWrapper::callInReactor" - if self._reactorRunning: - self.reactor.callFromThread(callable, *args, **kw) - else: - callable(*args, **kw) - - def isReactorRunning(self): - return self._reactorRunning - - def start_reactor(self): - self.reactor.run() - - def stop_reactor(self): - self.reactor.stop() - - def addReactorEventTrigger(self, phase, eventType, callable): - print "ReactorWrapper::addReactorEventTrigger" - if self._reactorRunning: - self.reactor.callFromThread(self.reactor.addSystemEventTrigger, phase, eventType, callable) - else: - self.reactor.addSystemEventTrigger(phase, eventType, callable) - - def __reactorShuttingDown(self): - pass - - def __reactorShutDown(self): - """This method called when the reactor is stopped""" - print "REACTOR SHUTDOWN" - self._reactorRunning = False - - def __getattr__(self, name): - # We transfer missing methods to the reactor - def _missing(*args, **kwargs): - getattr(self.reactor, name)(*args, **kwargs) - return _missing diff --git a/manifold/util/singleton.py b/manifold/util/singleton.py deleted file mode 100644 index b622c135..00000000 --- a/manifold/util/singleton.py +++ /dev/null @@ -1,19 +0,0 @@ -#------------------------------------------------------------------------- -# Class Singleton -# -# Classes that inherit from Singleton can be instanciated only once -#------------------------------------------------------------------------- - -class Singleton(type): - def __init__(cls, name, bases, dic): - super(Singleton,cls).__init__(name,bases,dic) - cls.instance=None - - def __call__(cls, *args, **kw): - if cls.instance is None: - cls.instance=super(Singleton,cls).__call__(*args,**kw) - return cls.instance - - -# See also -# http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python diff --git a/manifold/util/storage.py b/manifold/util/storage.py deleted file mode 100644 index 066993e6..00000000 --- a/manifold/util/storage.py +++ /dev/null @@ -1,29 +0,0 @@ -from manifold.gateways import Gateway -from manifold.util.callback import Callback - -#URL='sqlite:///:memory:?check_same_thread=False' -URL='sqlite:////var/myslice/db.sqlite?check_same_thread=False' - -class Storage(object): - pass - # We can read information from files, database, commandline, etc - # Let's focus on the database - - @classmethod - def register(self, object): - """ - Registers a new object that will be stored locally by manifold. - This will live in the - """ - pass - -class DBStorage(Storage): - @classmethod - def execute(self, query, user=None, format='dict'): - # XXX Need to pass local parameters - gw = Gateway.get('sqlalchemy')(config={'url': URL}, user=user, format=format) - gw.set_query(query) - cb = Callback() - gw.set_callback(cb) - gw.start() - return cb.get_results() diff --git a/manifold/util/type.py b/manifold/util/type.py deleted file mode 100644 index 1cc03b2b..00000000 --- a/manifold/util/type.py +++ /dev/null @@ -1,144 +0,0 @@ -# http://wiki.python.org/moin/PythonDecoratorLibrary#Type_Enforcement_.28accepts.2Freturns.29 -''' -One of three degrees of enforcement may be specified by passing -the 'debug' keyword argument to the decorator: - 0 -- NONE: No type-checking. Decorators disabled. - 1 -- MEDIUM: Print warning message to stderr. (Default) - 2 -- STRONG: Raise TypeError with message. -If 'debug' is not passed to the decorator, the default level is used. - -Example usage: - >>> NONE, MEDIUM, STRONG = 0, 1, 2 - >>> - >>> @accepts(int, int, int) - ... @returns(float) - ... def average(x, y, z): - ... return (x + y + z) / 2 - ... - >>> average(5.5, 10, 15.0) - TypeWarning: 'average' method accepts (int, int, int), but was given - (float, int, float) - 15.25 - >>> average(5, 10, 15) - TypeWarning: 'average' method returns (float), but result is (int) - 15 - -Needed to cast params as floats in function def (or simply divide by 2.0). - - >>> TYPE_CHECK = STRONG - >>> @accepts(int, debug=TYPE_CHECK) - ... @returns(int, debug=TYPE_CHECK) - ... def fib(n): - ... if n in (0, 1): return n - ... return fib(n-1) + fib(n-2) - ... - >>> fib(5.3) - Traceback (most recent call last): - ... - TypeError: 'fib' method accepts (int), but was given (float) - -''' -import sys -from itertools import izip - -def accepts(*types, **kw): - '''Function decorator. Checks decorated function's arguments are - of the expected types. - - Parameters: - types -- The expected types of the inputs to the decorated function. - Must specify type for each parameter. - kw -- Optional specification of 'debug' level (this is the only valid - keyword argument, no other should be given). - debug = ( 0 | 1 | 2 ) - - ''' - if not kw: - # default level: MEDIUM - debug = 2 - else: - debug = kw['debug'] - try: - def decorator(f): - # XXX Missing full support of kwargs - def newf(*args, **kwargs): - if debug is 0: - return f(*args, **kwargs) - assert len(args) == len(types) - argtypes = tuple(map(type, args)) - if not compare_types(types, argtypes): - # if argtypes != types: - msg = info(f.__name__, types, argtypes, 0) - if debug is 1: - print >> sys.stderr, 'TypeWarning: ', msg - elif debug is 2: - raise TypeError, msg - return f(*args, **kwargs) - newf.__name__ = f.__name__ - return newf - return decorator - except KeyError, key: - raise KeyError, key + "is not a valid keyword argument" - except TypeError, msg: - raise TypeError, msg - -def compare_types(expected, actual): - if isinstance(expected, tuple): - if isinstance(actual, tuple): - for x, y in izip(expected, actual): - if not compare_types(x ,y): - return False - return True - else: - return actual == type(None) or actual in expected - else: - return actual == type(None) or actual == expected or isinstance(actual, expected) # issubclass(actual, expected) - -def returns(ret_type, **kw): - '''Function decorator. Checks decorated function's return value - is of the expected type. - - Parameters: - ret_type -- The expected type of the decorated function's return value. - Must specify type for each parameter. - kw -- Optional specification of 'debug' level (this is the only valid - keyword argument, no other should be given). - debug=(0 | 1 | 2) - ''' - try: - if not kw: - # default level: MEDIUM - debug = 1 - else: - debug = kw['debug'] - def decorator(f): - def newf(*args): - result = f(*args) - if debug is 0: - return result - res_type = type(result) - if not compare_types(ret_type, res_type): - # if res_type != ret_type: # JORDAN: fix to allow for # StringTypes = (str, unicode) - # XXX note that this check should be recursive - msg = info(f.__name__, (ret_type,), (res_type,), 1) - if debug is 1: - print >> sys.stderr, 'TypeWarning: ', msg - elif debug is 2: - raise TypeError, msg - return result - newf.__name__ = f.__name__ - return newf - return decorator - except KeyError, key: - raise KeyError, key + "is not a valid keyword argument" - except TypeError, msg: - raise TypeError, msg - -def info(fname, expected, actual, flag): - '''Convenience function returns nicely formatted error/warning msg.''' - format = lambda types: ', '.join([str(t).split("'")[1] for t in types]) - msg = "'{}' method ".format( fname )\ - + ("accepts", "returns")[flag] + " ({}), but ".format(expected)\ - + ("was given", "result is")[flag] + " ({})".format(actual) - return msg - diff --git a/manifold/util/xmldict.py b/manifold/util/xmldict.py deleted file mode 100644 index e45af734..00000000 --- a/manifold/util/xmldict.py +++ /dev/null @@ -1,77 +0,0 @@ -import os -import xml.etree.cElementTree as ElementTree - -class XmlListConfig(list): - def __init__(self, aList): - for element in aList: - if element: - # treat like dict - if len(element) == 1 or element[0].tag != element[1].tag: - self.append(XmlDictConfig(element)) - # treat like list - elif element[0].tag == element[1].tag: - self.append(XmlListConfig(element)) - elif element.text: - text = element.text.strip() - if text: - self.append(text) - - -class XmlDictConfig(dict): - ''' - Example usage: - - >>> tree = ElementTree.parse('your_file.xml') - >>> root = tree.getroot() - >>> xmldict = XmlDictConfig(root) - - Or, if you want to use an XML string: - - >>> root = ElementTree.XML(xml_string) - >>> xmldict = XmlDictConfig(root) - - And then use xmldict for what it is... a dict. - ''' - def __init__(self, parent_element): - childrenNames = [child.tag for child in parent_element.getchildren()] - - if parent_element.items(): #attributes - self.update(dict(parent_element.items())) - for element in parent_element: - if element: - # treat like dict - we assume that if the first two tags - # in a series are different, then they are all different. - if len(element) == 1 or element[0].tag != element[1].tag: - aDict = XmlDictConfig(element) - # treat like list - we assume that if the first two tags - # in a series are the same, then the rest are the same. - else: - # here, we put the list in dictionary; the key is the - # tag name the list elements all share in common, and - # the value is the list itself - aDict = {element[0].tag: XmlListConfig(element)} - # if the tag has attributes, add those to the dict - if element.items(): - aDict.update(dict(element.items())) - - if childrenNames.count(element.tag) > 1: - try: - currentValue = self[element.tag] - currentValue.append(aDict) - self.update({element.tag: currentValue}) - except: #the first of its kind, an empty list must be created - self.update({element.tag: [aDict]}) #aDict is written in [], i.e. it will be a list - - else: - self.update({element.tag: aDict}) - # this assumes that if you've got an attribute in a tag, - # you won't be having any text. This may or may not be a - # good idea -- time will tell. It works for the way we are - # currently doing XML configuration files... - elif element.items(): - self.update({element.tag: dict(element.items())}) - # finally, if there are no child tags and no attributes, extract - # the text - else: - self.update({element.tag: element.text}) - diff --git a/manifold/__init__.py b/manifoldapi/__init__.py similarity index 100% rename from manifold/__init__.py rename to manifoldapi/__init__.py diff --git a/manifoldapi/__init__.pyc b/manifoldapi/__init__.pyc new file mode 100644 index 00000000..f06df9b0 Binary files /dev/null and b/manifoldapi/__init__.pyc differ diff --git a/manifold/manifoldapi.py b/manifoldapi/manifoldapi.py similarity index 100% rename from manifold/manifoldapi.py rename to manifoldapi/manifoldapi.py diff --git a/manifoldapi/manifoldapi.pyc b/manifoldapi/manifoldapi.pyc new file mode 100644 index 00000000..764b163e Binary files /dev/null and b/manifoldapi/manifoldapi.pyc differ diff --git a/manifold/manifoldproxy.py b/manifoldapi/manifoldproxy.py similarity index 96% rename from manifold/manifoldproxy.py rename to manifoldapi/manifoldproxy.py index d0a8a3ea..c16ffdee 100644 --- a/manifold/manifoldproxy.py +++ b/manifoldapi/manifoldproxy.py @@ -5,11 +5,11 @@ import os.path #from django.core import serializers from django.http import HttpResponse, HttpResponseForbidden -#from manifold.manifoldquery import ManifoldQuery +#from manifoldapi.manifoldquery import ManifoldQuery from manifold.core.query import Query from manifold.core.result_value import ResultValue -from manifold.manifoldapi import ManifoldAPI -from manifold.manifoldresult import ManifoldException +from manifoldapi import ManifoldAPI +from manifoldresult import ManifoldException from manifold.util.log import Log from myslice.configengine import ConfigEngine diff --git a/manifoldapi/manifoldproxy.pyc b/manifoldapi/manifoldproxy.pyc new file mode 100644 index 00000000..fdd83754 Binary files /dev/null and b/manifoldapi/manifoldproxy.pyc differ diff --git a/manifold/manifoldresult.py b/manifoldapi/manifoldresult.py similarity index 100% rename from manifold/manifoldresult.py rename to manifoldapi/manifoldresult.py diff --git a/manifoldapi/manifoldresult.pyc b/manifoldapi/manifoldresult.pyc new file mode 100644 index 00000000..7baabe9e Binary files /dev/null and b/manifoldapi/manifoldresult.pyc differ diff --git a/manifold/metadata.py b/manifoldapi/metadata.py similarity index 93% rename from manifold/metadata.py rename to manifoldapi/metadata.py index de9bf2eb..300397cb 100644 --- a/manifold/metadata.py +++ b/manifoldapi/metadata.py @@ -1,10 +1,10 @@ import json import os.path -from manifold.manifoldresult import ManifoldResult -from manifold.manifoldapi import ManifoldAPI +from manifoldresult import ManifoldResult +from manifoldapi import ManifoldAPI -from django.contrib import messages +from django.contrib import messages debug=False #debug=True diff --git a/manifoldapi/metadata.pyc b/manifoldapi/metadata.pyc new file mode 100644 index 00000000..dd5ac476 Binary files /dev/null and b/manifoldapi/metadata.pyc differ diff --git a/manifold/static/css/manifold.css b/manifoldapi/static/css/manifold.css similarity index 100% rename from manifold/static/css/manifold.css rename to manifoldapi/static/css/manifold.css diff --git a/manifold/static/js/buffer.js b/manifoldapi/static/js/buffer.js similarity index 100% rename from manifold/static/js/buffer.js rename to manifoldapi/static/js/buffer.js diff --git a/manifold/static/js/class.js b/manifoldapi/static/js/class.js similarity index 100% rename from manifold/static/js/class.js rename to manifoldapi/static/js/class.js diff --git a/manifold/static/js/manifold-query.js b/manifoldapi/static/js/manifold-query.js similarity index 100% rename from manifold/static/js/manifold-query.js rename to manifoldapi/static/js/manifold-query.js diff --git a/manifold/static/js/manifold.js b/manifoldapi/static/js/manifold.js similarity index 100% rename from manifold/static/js/manifold.js rename to manifoldapi/static/js/manifold.js diff --git a/manifold/static/js/metadata.js b/manifoldapi/static/js/metadata.js similarity index 100% rename from manifold/static/js/metadata.js rename to manifoldapi/static/js/metadata.js diff --git a/manifold/static/js/plugin.js b/manifoldapi/static/js/plugin.js similarity index 100% rename from manifold/static/js/plugin.js rename to manifoldapi/static/js/plugin.js diff --git a/manifold/static/js/record_generator.js b/manifoldapi/static/js/record_generator.js similarity index 100% rename from manifold/static/js/record_generator.js rename to manifoldapi/static/js/record_generator.js diff --git a/myslice/settings.py b/myslice/settings.py index 4f655385..3d8559cb 100644 --- a/myslice/settings.py +++ b/myslice/settings.py @@ -31,6 +31,8 @@ if not os.path.isdir (os.path.join(HTTPROOT,"static")): HTTPROOT=ROOT DATAROOT=ROOT +print "HTTPROOT = ",HTTPROOT + if not os.path.isdir(ROOT): raise Exception,"Cannot find ROOT %s for unfold"%ROOT if not os.path.isdir(HTTPROOT): raise Exception,"Cannot find HTTPROOT %s for unfold"%HTTPROOT @@ -202,7 +204,7 @@ INSTALLED_APPS = [ # our django project 'myslice', # the core of the UI - 'auth', 'manifold', 'unfold', + 'auth', 'manifoldapi', 'unfold', # plugins 'plugins', # views - more or less stable diff --git a/myslice/urls.py b/myslice/urls.py index 1eac188f..67bb75d9 100644 --- a/myslice/urls.py +++ b/myslice/urls.py @@ -53,7 +53,7 @@ urls = [ (r'^logout/?$', 'auth.views.logout_user'), # # the manifold proxy - (r'^manifold/proxy/(?P\w+)/?$', 'manifold.manifoldproxy.proxy'), + (r'^manifold/proxy/(?P\w+)/?$', 'manifoldapi.manifoldproxy.proxy'), # # # RESTful interface diff --git a/plugins/topmenuvalidation/__init__.py b/plugins/topmenuvalidation/__init__.py index 2006e799..30998308 100644 --- a/plugins/topmenuvalidation/__init__.py +++ b/plugins/topmenuvalidation/__init__.py @@ -37,7 +37,7 @@ Query.get('ple:user').filter_by('user_hrn', '==', '$user_hrn').select('pi_author #################### here is an extract previously in topmenu.py #import json #from pprint import pprint -#from manifold.manifoldapi import execute_query +#from manifoldapi.manifoldapi import execute_query #from manifold.core.query import Query ### # ** Where am I a PI ** ### # For this we need to ask SFA (of all authorities) = PI function diff --git a/portal/about.py b/portal/about.py index ade6f6a5..49a890bb 100644 --- a/portal/about.py +++ b/portal/about.py @@ -8,7 +8,7 @@ from django.shortcuts import render from unfold.loginrequired import FreeAccessView -from manifold.manifoldresult import ManifoldResult +from manifoldapi.manifoldresult import ManifoldResult from ui.topmenu import topmenu_items, the_user from myslice.configengine import ConfigEngine diff --git a/portal/accountview.py b/portal/accountview.py index ce504ac2..62c6305b 100644 --- a/portal/accountview.py +++ b/portal/accountview.py @@ -1,7 +1,7 @@ from unfold.loginrequired import LoginRequiredAutoLogoutView # from manifold.core.query import Query -from manifold.manifoldapi import execute_query +from manifoldapi.manifoldapi import execute_query from portal.actions import manifold_update_user, manifold_update_account, manifold_add_account, manifold_delete_account, sfa_update_user # from unfold.page import Page diff --git a/portal/actions.py b/portal/actions.py index 12117055..36d019cf 100644 --- a/portal/actions.py +++ b/portal/actions.py @@ -1,6 +1,6 @@ from django.http import HttpResponse from manifold.core.query import Query -from manifold.manifoldapi import execute_query,execute_admin_query +from manifoldapi.manifoldapi import execute_query,execute_admin_query from portal.models import PendingUser, PendingSlice, PendingAuthority import json diff --git a/portal/dashboardview.py b/portal/dashboardview.py index 3910726c..a4e0ad45 100644 --- a/portal/dashboardview.py +++ b/portal/dashboardview.py @@ -1,6 +1,6 @@ import json from manifold.core.query import Query -from manifold.manifoldapi import execute_query +from manifoldapi.manifoldapi import execute_query from unfold.page import Page @@ -22,7 +22,7 @@ class DashboardView (LoginRequiredAutoLogoutView, ThemeView): # We might have slices on different registries with different user accounts # We note that this portal could be specific to a given registry, to which we register users, but i'm not sure that simplifies things # Different registries mean different identities, unless we identify via SFA HRN or have associated the user email to a single hrn - + print self.request #messages.info(self.request, 'You have logged in') page = Page(self.request) diff --git a/portal/django_passresetview.py b/portal/django_passresetview.py index 8e571998..1d84a20b 100644 --- a/portal/django_passresetview.py +++ b/portal/django_passresetview.py @@ -81,9 +81,9 @@ from django.http import HttpResponse, HttpResponseRedirec from unfold.loginrequired import FreeAccessView from ui.topmenu import topmenu_items_live -from manifold.manifoldapi import execute_admin_query +from manifoldapi.manifoldapi import execute_admin_query from manifold.core.query import Query -from portal.actions import manifold_update_user +from portal.actions import manifold_update_user from portal.forms import PassResetForm from portal.actions import manifold_update_user diff --git a/portal/documentationview.py b/portal/documentationview.py index 3d947737..6a75a629 100644 --- a/portal/documentationview.py +++ b/portal/documentationview.py @@ -8,7 +8,7 @@ from django.shortcuts import render from unfold.loginrequired import FreeAccessView -from manifold.manifoldresult import ManifoldResult +from manifoldapi.manifoldresult import ManifoldResult from ui.topmenu import topmenu_items, the_user from myslice.configengine import ConfigEngine diff --git a/portal/experimentview.py b/portal/experimentview.py index 2576e66f..70d42671 100644 --- a/portal/experimentview.py +++ b/portal/experimentview.py @@ -8,7 +8,7 @@ from django.shortcuts import render from unfold.loginrequired import FreeAccessView -from manifold.manifoldresult import ManifoldResult +from manifoldapi.manifoldresult import ManifoldResult from ui.topmenu import topmenu_items, the_user from myslice.configengine import ConfigEngine diff --git a/portal/homeview.py b/portal/homeview.py index 395cd965..8bd2c27a 100644 --- a/portal/homeview.py +++ b/portal/homeview.py @@ -8,7 +8,7 @@ from django.shortcuts import render from unfold.loginrequired import FreeAccessView -from manifold.manifoldresult import ManifoldResult +from manifoldapi.manifoldresult import ManifoldResult from ui.topmenu import topmenu_items, the_user from myslice.configengine import ConfigEngine diff --git a/portal/institution.py b/portal/institution.py index 31d2d599..b984c7ad 100644 --- a/portal/institution.py +++ b/portal/institution.py @@ -8,7 +8,7 @@ from django.shortcuts import render from unfold.loginrequired import FreeAccessView -from manifold.manifoldresult import ManifoldResult +from manifoldapi.manifoldresult import ManifoldResult from ui.topmenu import topmenu_items, the_user from myslice.configengine import ConfigEngine diff --git a/portal/joinview.py b/portal/joinview.py index 5d2d3639..caea6d28 100644 --- a/portal/joinview.py +++ b/portal/joinview.py @@ -13,7 +13,7 @@ from unfold.page import Page from unfold.loginrequired import FreeAccessView from ui.topmenu import topmenu_items_live -from manifold.manifoldapi import execute_admin_query +from manifoldapi.manifoldapi import execute_admin_query from manifold.core.query import Query from portal.models import PendingUser,PendingAuthority diff --git a/portal/manageuserview.py b/portal/manageuserview.py index f036f040..c5cda1d0 100644 --- a/portal/manageuserview.py +++ b/portal/manageuserview.py @@ -1,7 +1,7 @@ from unfold.loginrequired import LoginRequiredAutoLogoutView # from manifold.core.query import Query -from manifold.manifoldapi import execute_query, execute_admin_query +from manifoldapi.manifoldapi import execute_query, execute_admin_query from portal.actions import manifold_update_user, manifold_update_account, manifold_add_account, manifold_delete_account, sfa_update_user # from unfold.page import Page diff --git a/portal/registrationview.py b/portal/registrationview.py index e04652cf..d9ab6b5c 100644 --- a/portal/registrationview.py +++ b/portal/registrationview.py @@ -13,7 +13,7 @@ from unfold.page import Page from unfold.loginrequired import FreeAccessView from ui.topmenu import topmenu_items_live -from manifold.manifoldapi import execute_admin_query +from manifoldapi.manifoldapi import execute_admin_query from manifold.core.query import Query from portal.models import PendingUser diff --git a/portal/slicerequestview.py b/portal/slicerequestview.py index dbd53555..5124d9d2 100644 --- a/portal/slicerequestview.py +++ b/portal/slicerequestview.py @@ -2,10 +2,10 @@ from django.template.loader import render_to_string from django.shortcuts import render from django.core.mail import send_mail -from unfold.page import Page +from unfold.page import Page from manifold.core.query import Query -from manifold.manifoldapi import execute_admin_query, execute_query +from manifoldapi.manifoldapi import execute_admin_query, execute_query from portal.models import PendingSlice from portal.actions import authority_get_pi_emails diff --git a/portal/sliceview.py b/portal/sliceview.py index a48b846f..744d2b34 100644 --- a/portal/sliceview.py +++ b/portal/sliceview.py @@ -6,7 +6,7 @@ from unfold.loginrequired import LoginRequiredAutoLogoutView from unfold.page import Page from manifold.core.query import Query, AnalyzedQuery -from manifold.manifoldapi import execute_query +from manifoldapi.manifoldapi import execute_query from ui.topmenu import topmenu_items_live, the_user @@ -36,8 +36,8 @@ from theme import ThemeView tmp_default_slice='ple.upmc.myslicedemo' # temporary : turn off the users part to speed things up -#do_query_users=True -do_query_users=False +do_query_users=True +#do_query_users=False #do_query_leases=True do_query_leases=False @@ -112,7 +112,8 @@ class SliceView (LoginRequiredAutoLogoutView, ThemeView): # page.enqueue_query(query_user_all) # else: # print "authority of the user is not in local:user db" - query_user_all = Query.get('user').select(user_fields) + query_user_all = Query.get('user').select(user_fields).filter_by('parent_authority','==','ple.upmc') + page.enqueue_query(query_user_all) # query_user_all = None # ... and for the relations @@ -298,15 +299,16 @@ class SliceView (LoginRequiredAutoLogoutView, ThemeView): main_stack.insert(tab_users) tab_users.insert(QueryTable( - page = page, - title = 'Users List', - domid = 'users-list', + page = page, + title = 'Users List', + domid = 'users-list', # tab's sons preferably turn this off - togglable = False, + togglable = False, # this is the query at the core of the slice list - query = sq_user, + query = sq_user, query_all = query_user_all, - checkboxes = True, + init_key = 'user_hrn', + checkboxes = True, datatables_options = { 'iDisplayLength' : 25, 'bLengthChange' : True, diff --git a/portal/supportview.py b/portal/supportview.py index c16c89fd..11d74766 100644 --- a/portal/supportview.py +++ b/portal/supportview.py @@ -8,7 +8,7 @@ from django.shortcuts import render from unfold.loginrequired import FreeAccessView -from manifold.manifoldresult import ManifoldResult +from manifoldapi.manifoldresult import ManifoldResult from ui.topmenu import topmenu_items, the_user from myslice.configengine import ConfigEngine diff --git a/portal/templates/onelab/onelab_home-view.html b/portal/templates/onelab/onelab_home-view.html index 6777eb33..812b36fc 100644 --- a/portal/templates/onelab/onelab_home-view.html +++ b/portal/templates/onelab/onelab_home-view.html @@ -14,9 +14,9 @@ SUPPORT - + - + {% if person %} @@ -58,7 +58,7 @@ REQUESTS - + diff --git a/portal/usersview.py b/portal/usersview.py index 8d7f37e7..e9856c8c 100644 --- a/portal/usersview.py +++ b/portal/usersview.py @@ -2,7 +2,7 @@ from manifold.core.query import Query from unfold.page import Page from ui.topmenu import topmenu_items_live, the_user -from manifold.manifoldapi import execute_admin_query +from manifoldapi.manifoldapi import execute_admin_query from plugins.querytable import QueryTable from unfold.loginrequired import LoginRequiredAutoLogoutView diff --git a/portal/validationview.py b/portal/validationview.py index f38681cb..43b34e50 100644 --- a/portal/validationview.py +++ b/portal/validationview.py @@ -41,7 +41,7 @@ from plugins.raw import Raw from portal.models import PendingUser, PendingSlice from portal.actions import get_requests -from manifold.manifoldapi import execute_query +from manifoldapi.manifoldapi import execute_query from manifold.core.query import Query from unfold.page import Page from theme import ThemeView diff --git a/portal/views.py b/portal/views.py index f86cb4c2..d58593cf 100644 --- a/portal/views.py +++ b/portal/views.py @@ -41,7 +41,7 @@ from plugins.raw import Raw from portal.models import PendingUser, PendingSlice from portal.actions import get_requests -from manifold.manifoldapi import execute_query +from manifoldapi.manifoldapi import execute_query from manifold.core.query import Query from unfold.page import Page diff --git a/rest/__init__.py b/rest/__init__.py index 892e4ca5..71fe44dc 100644 --- a/rest/__init__.py +++ b/rest/__init__.py @@ -6,7 +6,7 @@ from unfold.loginrequired import LoginRequiredView from django.http import HttpResponse from manifold.core.query import Query, AnalyzedQuery -from manifold.manifoldapi import execute_query +from manifoldapi.manifoldapi import execute_query from string import join import json @@ -107,4 +107,4 @@ def post(request, object_type, object_name): pass def error(): - return HttpResponse(json.dumps({'error' : 'error message'}), content_type="application/json") \ No newline at end of file + return HttpResponse(json.dumps({'error' : 'error message'}), content_type="application/json") diff --git a/rest/get.py b/rest/get.py index c72b4e4d..d5866483 100644 --- a/rest/get.py +++ b/rest/get.py @@ -6,7 +6,7 @@ from unfold.loginrequired import LoginRequiredView from django.http import HttpResponse from manifold.core.query import Query, AnalyzedQuery -from manifold.manifoldapi import execute_query +from manifoldapi.manifoldapi import execute_query import json @@ -60,4 +60,4 @@ def slice(request, slice_name): # def get (self, request, name='default'): -# return HttpResponse() \ No newline at end of file +# return HttpResponse() diff --git a/rest/platform.py b/rest/platform.py index 87792a6d..bdb6e069 100644 --- a/rest/platform.py +++ b/rest/platform.py @@ -6,7 +6,7 @@ from unfold.loginrequired import LoginRequiredView from django.http import HttpResponse from manifold.core.query import Query, AnalyzedQuery -from manifold.manifoldapi import execute_query +from manifoldapi.manifoldapi import execute_query import json @@ -39,4 +39,4 @@ def get(request, platform_name): for r in response : response_data['data'].append([ r['platform'], r['platform_longname'], r['platform_url'], r['platform_description'], r['gateway_type'] ]) - return HttpResponse(json.dumps(response_data), content_type="application/json") \ No newline at end of file + return HttpResponse(json.dumps(response_data), content_type="application/json") diff --git a/sample/dashboardview.py b/sample/dashboardview.py index 18438d82..1f68cc66 100644 --- a/sample/dashboardview.py +++ b/sample/dashboardview.py @@ -9,7 +9,6 @@ from django.contrib.auth.decorators import login_required from unfold.page import Page from manifold.core.query import Query -#from manifold.manifoldquery import ManifoldQuery from plugins.stack import Stack from plugins.lists.slicelist import SliceList diff --git a/setup.py b/setup.py index d7008a38..f3531def 100644 --- a/setup.py +++ b/setup.py @@ -11,6 +11,7 @@ from distutils.core import setup # we don't have a final list so let's keep it simple for now packages= [ os.path.dirname(init) for init in (glob("*/__init__.py")+glob("*/*/__init__.py")) ] +print packages setup(packages = packages, # xxx somehow this does not seem to show up in debian packaging diff --git a/unfold/loginrequired.py b/unfold/loginrequired.py index 92f9ddb4..d4f9e984 100644 --- a/unfold/loginrequired.py +++ b/unfold/loginrequired.py @@ -4,7 +4,7 @@ from django.http import HttpResponseRedirect # for 'as_view' that we need to call in urls.py and the like from django.views.generic.base import TemplateView -from manifold.manifoldresult import ManifoldException +from manifoldapi.manifoldresult import ManifoldException ### # IMPORTANT NOTE diff --git a/unfold/page.py b/unfold/page.py index 9c2fdd7e..42a35065 100644 --- a/unfold/page.py +++ b/unfold/page.py @@ -6,7 +6,7 @@ import json from django.template.loader import render_to_string -from manifold.metadata import MetaData +from manifoldapi.metadata import MetaData from unfold.prelude import Prelude