--- /dev/null
+from types import StringTypes
+try:
+ set
+except NameError:
+ from sets import Set
+ set = Set
+
+import time
+import datetime # Jordan
+#from manifold.util.parameter import Parameter, Mixed, python_type
+from manifold.util.predicate import Predicate, eq
+from itertools import ifilter
+
+class Filter(set):
+ """
+ A filter is a set of predicates
+ """
+
+ @staticmethod
+ def from_list(l):
+ f = Filter()
+ try:
+ for element in l:
+ f.add(Predicate(*element))
+ except Exception, e:
+ print "Error in setting Filter from list", e
+ return None
+ return f
+
+ @staticmethod
+ def from_dict(d):
+ f = Filter()
+ for key, value in d.items():
+ if key[0] in Predicate.operators.keys():
+ f.add(Predicate(key[1:], key[0], value))
+ else:
+ f.add(Predicate(key, '=', value))
+ return f
+
+ def filter_by(self, predicate):
+ self.add(predicate)
+ return self
+
+ def __str__(self):
+ return '<Filter: %s>' % ' AND '.join([str(pred) for pred in self])
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __key(self):
+ return tuple([hash(pred) for pred in self])
+
+ def __hash__(self):
+ return hash(self.__key())
+
+ def __additem__(self, value):
+ if value.__class__ != Predicate:
+ raise TypeError("Element of class Predicate expected, received %s" % value.__class__.__name__)
+ set.__additem__(self, value)
+
+ def keys(self):
+ return set([x.key for x in self])
+
+ def has(self, key):
+ for x in self:
+ if x.key == key:
+ return True
+ return False
+
+ def has_op(self, key, op):
+ for x in self:
+ if x.key == key and x.op == op:
+ return True
+ return False
+
+ def has_eq(self, key):
+ return self.has_op(key, eq)
+
+ def get(self, key):
+ ret = []
+ for x in self:
+ if x.key == key:
+ ret.append(x)
+ return ret
+
+ def delete(self, key):
+ to_del = []
+ for x in self:
+ if x.key == key:
+ to_del.append(x)
+ for x in to_del:
+ self.remove(x)
+
+ #self = filter(lambda x: x.key != key, self)
+
+ def get_op(self, key, op):
+ for x in self:
+ if x.key == key and x.op == op:
+ return x.value
+ raise KeyError, key
+
+ def get_eq(self, key):
+ return self.get_op(key, eq)
+
+ def set_op(self, key, op, value):
+ for x in self:
+ if x.key == key and x.op == op:
+ x.value = value
+ return
+ raise KeyError, key
+
+ def set_eq(self, key, value):
+ return self.set_op(key, eq, value)
+
+ def get_predicates(self, key):
+ # XXX Would deserve returning a filter (cf usage in SFA gateway)
+ ret = []
+ for x in self:
+ if x.key == key:
+ ret.append(x)
+ return ret
+
+# def filter(self, dic):
+# # We go through every filter sequentially
+# for predicate in self:
+# print "predicate", predicate
+# dic = predicate.filter(dic)
+# return dic
+
+ def match(self, dic):
+ for predicate in self:
+ if not predicate.match(dic, ignore_missing=True):
+ return False
+ return True
+
+ def filter(self, l):
+ output = []
+ for x in l:
+ if self.match(x):
+ output.append(x)
+ return output
+
+ def to_list(self):
+ return [list(pred.get_str_tuple()) for pred in self]
+
+#class OldFilter(Parameter, dict):
+# """
+# A type of parameter that represents a filter on one or more
+# columns of a database table.
+# Special features provide support for negation, upper and lower bounds,
+# as well as sorting and clipping.
+#
+#
+# fields should be a dictionary of field names and types.
+# As of PLCAPI-4.3-26, we provide support for filtering on
+# sequence types as well, with the special '&' and '|' modifiers.
+# example : fields = {'node_id': Parameter(int, "Node identifier"),
+# 'hostname': Parameter(int, "Fully qualified hostname", max = 255),
+# ...}
+#
+#
+# filter should be a dictionary of field names and values
+# representing the criteria for filtering.
+# example : filter = { 'hostname' : '*.edu' , site_id : [34,54] }
+# Whether the filter represents an intersection (AND) or a union (OR)
+# of these criteria is determined by the join_with argument
+# provided to the sql method below
+#
+# Special features:
+#
+# * a field starting with '&' or '|' should refer to a sequence type
+# the semantic is then that the object value (expected to be a list)
+# should contain all (&) or any (|) value specified in the corresponding
+# filter value. See other examples below.
+# example : filter = { '|role_ids' : [ 20, 40 ] }
+# example : filter = { '|roles' : ['tech', 'pi'] }
+# example : filter = { '&roles' : ['admin', 'tech'] }
+# example : filter = { '&roles' : 'tech' }
+#
+# * a field starting with the ~ character means negation.
+# example : filter = { '~peer_id' : None }
+#
+# * a field starting with < [ ] or > means lower than or greater than
+# < > uses strict comparison
+# [ ] is for using <= or >= instead
+# example : filter = { ']event_id' : 2305 }
+# example : filter = { '>time' : 1178531418 }
+# in this example the integer value denotes a unix timestamp
+#
+# * if a value is a sequence type, then it should represent
+# a list of possible values for that field
+# example : filter = { 'node_id' : [12,34,56] }
+#
+# * a (string) value containing either a * or a % character is
+# treated as a (sql) pattern; * are replaced with % that is the
+# SQL wildcard character.
+# example : filter = { 'hostname' : '*.jp' }
+#
+# * the filter's keys starting with '-' are special and relate to sorting and clipping
+# * '-SORT' : a field name, or an ordered list of field names that are used for sorting
+# these fields may start with + (default) or - for denoting increasing or decreasing order
+# example : filter = { '-SORT' : [ '+node_id', '-hostname' ] }
+# * '-OFFSET' : the number of first rows to be ommitted
+# * '-LIMIT' : the amount of rows to be returned
+# example : filter = { '-OFFSET' : 100, '-LIMIT':25}
+#
+# Here are a few realistic examples
+#
+# GetNodes ( { 'node_type' : 'regular' , 'hostname' : '*.edu' , '-SORT' : 'hostname' , '-OFFSET' : 30 , '-LIMIT' : 25 } )
+# would return regular (usual) nodes matching '*.edu' in alphabetical order from 31th to 55th
+#
+# GetPersons ( { '|role_ids' : [ 20 , 40] } )
+# would return all persons that have either pi (20) or tech (40) roles
+#
+# GetPersons ( { '&role_ids' : 10 } )
+# GetPersons ( { '&role_ids' : 10 } )
+# GetPersons ( { '|role_ids' : [ 10 ] } )
+# GetPersons ( { '|role_ids' : [ 10 ] } )
+# all 4 forms are equivalent and would return all admin users in the system
+# """
+#
+# def __init__(self, fields = {}, filter = {}, doc = "Attribute filter"):
+# # Store the filter in our dict instance
+# dict.__init__(self, filter)
+#
+# # Declare ourselves as a type of parameter that can take
+# # either a value or a list of values for each of the specified
+# # fields.
+# self.fields = dict ( [ ( field, Mixed (expected, [expected]))
+# for (field,expected) in fields.iteritems() ] )
+#
+# # Null filter means no filter
+# Parameter.__init__(self, self.fields, doc = doc, nullok = True)
+#
+# def sql(self, api, join_with = "AND"):
+# """
+# Returns a SQL conditional that represents this filter.
+# """
+#
+# # So that we always return something
+# if join_with == "AND":
+# conditionals = ["True"]
+# elif join_with == "OR":
+# conditionals = ["False"]
+# else:
+# assert join_with in ("AND", "OR")
+#
+# # init
+# sorts = []
+# clips = []
+#
+# for field, value in self.iteritems():
+# # handle negation, numeric comparisons
+# # simple, 1-depth only mechanism
+#
+# modifiers={'~' : False,
+# '<' : False, '>' : False,
+# '[' : False, ']' : False,
+# '-' : False,
+# '&' : False, '|' : False,
+# '{': False ,
+# }
+# def check_modifiers(field):
+# if field[0] in modifiers.keys():
+# modifiers[field[0]] = True
+# field = field[1:]
+# return check_modifiers(field)
+# return field
+# field = check_modifiers(field)
+#
+# # filter on fields
+# if not modifiers['-']:
+# if field not in self.fields:
+# raise PLCInvalidArgument, "Invalid filter field '%s'" % field
+#
+# # handling array fileds always as compound values
+# if modifiers['&'] or modifiers['|']:
+# if not isinstance(value, (list, tuple, set)):
+# value = [value,]
+#
+# if isinstance(value, (list, tuple, set)):
+# # handling filters like '~slice_id':[]
+# # this should return true, as it's the opposite of 'slice_id':[] which is false
+# # prior to this fix, 'slice_id':[] would have returned ``slice_id IN (NULL) '' which is unknown
+# # so it worked by coincidence, but the negation '~slice_ids':[] would return false too
+# if not value:
+# if modifiers['&'] or modifiers['|']:
+# operator = "="
+# value = "'{}'"
+# else:
+# field=""
+# operator=""
+# value = "FALSE"
+# else:
+# value = map(str, map(api.db.quote, value))
+# if modifiers['&']:
+# operator = "@>"
+# value = "ARRAY[%s]" % ", ".join(value)
+# elif modifiers['|']:
+# operator = "&&"
+# value = "ARRAY[%s]" % ", ".join(value)
+# else:
+# operator = "IN"
+# value = "(%s)" % ", ".join(value)
+# else:
+# if value is None:
+# operator = "IS"
+# value = "NULL"
+# elif isinstance(value, StringTypes) and \
+# (value.find("*") > -1 or value.find("%") > -1):
+# operator = "LIKE"
+# # insert *** in pattern instead of either * or %
+# # we dont use % as requests are likely to %-expansion later on
+# # actual replacement to % done in PostgreSQL.py
+# value = value.replace ('*','***')
+# value = value.replace ('%','***')
+# value = str(api.db.quote(value))
+# else:
+# operator = "="
+# if modifiers['<']:
+# operator='<'
+# if modifiers['>']:
+# operator='>'
+# if modifiers['[']:
+# operator='<='
+# if modifiers[']']:
+# operator='>='
+# #else:
+# # value = str(api.db.quote(value))
+# # jordan
+# if isinstance(value, StringTypes) and value[-2:] != "()": # XXX
+# value = str(api.db.quote(value))
+# if isinstance(value, datetime.datetime):
+# value = str(api.db.quote(str(value)))
+#
+# #if prefix:
+# # field = "%s.%s" % (prefix,field)
+# if field:
+# clause = "\"%s\" %s %s" % (field, operator, value)
+# else:
+# clause = "%s %s %s" % (field, operator, value)
+#
+# if modifiers['~']:
+# clause = " ( NOT %s ) " % (clause)
+#
+# conditionals.append(clause)
+# # sorting and clipping
+# else:
+# if field not in ('SORT','OFFSET','LIMIT'):
+# raise PLCInvalidArgument, "Invalid filter, unknown sort and clip field %r"%field
+# # sorting
+# if field == 'SORT':
+# if not isinstance(value,(list,tuple,set)):
+# value=[value]
+# for field in value:
+# order = 'ASC'
+# if field[0] == '+':
+# field = field[1:]
+# elif field[0] == '-':
+# field = field[1:]
+# order = 'DESC'
+# if field not in self.fields:
+# raise PLCInvalidArgument, "Invalid field %r in SORT filter"%field
+# sorts.append("%s %s"%(field,order))
+# # clipping
+# elif field == 'OFFSET':
+# clips.append("OFFSET %d"%value)
+# # clipping continued
+# elif field == 'LIMIT' :
+# clips.append("LIMIT %d"%value)
+#
+# where_part = (" %s " % join_with).join(conditionals)
+# clip_part = ""
+# if sorts:
+# clip_part += " ORDER BY " + ",".join(sorts)
+# if clips:
+# clip_part += " " + " ".join(clips)
+## print 'where_part=',where_part,'clip_part',clip_part
+# return (where_part,clip_part)
+#
--- /dev/null
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Query representation
+#
+# Copyright (C) UPMC Paris Universitas
+# Authors:
+# Jordan Augé <jordan.auge@lip6.fr>
+# Marc-Olivier Buob <marc-olivier.buob@lip6.fr>
+# Thierry Parmentelat <thierry.parmentelat@inria.fr>
+
+from types import StringTypes
+from manifold.core.filter import Filter, Predicate
+from manifold.util.frozendict import frozendict
+from manifold.util.type import returns, accepts
+import copy
+
+import json
+import uuid
+
+def uniqid ():
+ return uuid.uuid4().hex
+
+debug=False
+debug=True
+
+class ParameterError(StandardError): pass
+
+class Query(object):
+ """
+ Implements a TopHat query.
+
+ We assume this is a correct DAG specification.
+
+ 1/ A field designates several tables = OR specification.
+ 2/ The set of fields specifies a AND between OR clauses.
+ """
+
+ #---------------------------------------------------------------------------
+ # Constructor
+ #---------------------------------------------------------------------------
+
+ def __init__(self, *args, **kwargs):
+
+ self.query_uuid = uniqid()
+
+ # Initialize optional parameters
+ self.clear()
+
+ #l = len(kwargs.keys())
+ len_args = len(args)
+
+ if len(args) == 1:
+ if isinstance(args[0], dict):
+ kwargs = args[0]
+ args = []
+
+ # Initialization from a tuple
+
+ if len_args in range(2, 7) and type(args) == tuple:
+ # Note: range(x,y) <=> [x, y[
+
+ # XXX UGLY
+ if len_args == 3:
+ self.action = 'get'
+ self.params = {}
+ self.timestamp = 'now'
+ self.object, self.filters, self.fields = args
+ elif len_args == 4:
+ self.object, self.filters, self.params, self.fields = args
+ self.action = 'get'
+ self.timestamp = 'now'
+ else:
+ self.action, self.object, self.filters, self.params, self.fields, self.timestamp = args
+
+ # Initialization from a dict
+ elif "object" in kwargs:
+ if "action" in kwargs:
+ self.action = kwargs["action"]
+ del kwargs["action"]
+ else:
+ print "W: defaulting to get action"
+ self.action = "get"
+
+
+ self.object = kwargs["object"]
+ del kwargs["object"]
+
+ if "filters" in kwargs:
+ self.filters = kwargs["filters"]
+ del kwargs["filters"]
+ else:
+ self.filters = Filter([])
+
+ if "fields" in kwargs:
+ self.fields = set(kwargs["fields"])
+ del kwargs["fields"]
+ else:
+ self.fields = set([])
+
+ # "update table set x = 3" => params == set
+ if "params" in kwargs:
+ self.params = kwargs["params"]
+ del kwargs["params"]
+ else:
+ self.params = {}
+
+ if "timestamp" in kwargs:
+ self.timestamp = kwargs["timestamp"]
+ del kwargs["timestamp"]
+ else:
+ self.timestamp = "now"
+
+ if kwargs:
+ raise ParameterError, "Invalid parameter(s) : %r" % kwargs.keys()
+ #else:
+ # raise ParameterError, "No valid constructor found for %s : args = %r" % (self.__class__.__name__, args)
+
+ if not self.filters: self.filters = Filter([])
+ if not self.params: self.params = {}
+ if not self.fields: self.fields = set([])
+ if not self.timestamp: self.timestamp = "now"
+
+ if isinstance(self.filters, list):
+ f = self.filters
+ self.filters = Filter([])
+ for x in f:
+ pred = Predicate(x)
+ self.filters.add(pred)
+
+ if isinstance(self.fields, list):
+ self.fields = set(self.fields)
+
+ for field in self.fields:
+ if not isinstance(field, StringTypes):
+ raise TypeError("Invalid field name %s (string expected, got %s)" % (field, type(field)))
+
+ #---------------------------------------------------------------------------
+ # Helpers
+ #---------------------------------------------------------------------------
+
+ def copy(self):
+ return copy.deepcopy(self)
+
+ def clear(self):
+ self.action = 'get'
+ self.object = None
+ self.filters = Filter([])
+ self.params = {}
+ self.fields = set([])
+ self.timestamp = "now"
+ self.timestamp = 'now' # ignored for now
+
+ @returns(StringTypes)
+ def __str__(self):
+ return "SELECT %s FROM %s WHERE %s" % (
+ ", ".join(self.get_select()) if self.get_select() else '*',
+ self.get_from(),
+ self.get_where()
+ )
+
+ @returns(StringTypes)
+ def __repr__(self):
+ return self.__str__()
+
+ def __key(self):
+ return (self.action, self.object, self.filters, frozendict(self.params), frozenset(self.fields))
+
+ def __hash__(self):
+ print "HASH", self.__key()
+ return hash(self.__key())
+
+ #---------------------------------------------------------------------------
+ # Conversion
+ #---------------------------------------------------------------------------
+
+ def to_dict(self):
+ return {
+ 'action': self.action,
+ 'object': self.object,
+ 'timestamp': self.timestamp,
+ 'filters': self.filters,
+ 'params': self.params,
+ 'fields': self.fields
+ }
+
+ def to_json (self, analyzed_query=None):
+ query_uuid=self.query_uuid
+ a=self.action
+ s=self.object
+ t=self.timestamp
+ f=json.dumps (self.filters.to_list())
+ p=json.dumps (self.params)
+ c=json.dumps (list(self.fields))
+ # xxx unique can be removed, but for now we pad the js structure
+ unique=0
+
+ if not analyzed_query:
+ aq = 'null'
+ else:
+ aq = analyzed_query.to_json()
+ sq="{}"
+
+ result= """ new ManifoldQuery('%(a)s', '%(s)s', '%(t)s', %(f)s, %(p)s, %(c)s, %(unique)s, '%(query_uuid)s', %(aq)s, %(sq)s)"""%locals()
+ if debug: print 'ManifoldQuery.to_json:',result
+ return result
+
+ # this builds a ManifoldQuery object from a dict as received from javascript through its ajax request
+ # we use a json-encoded string - see manifold.js for the sender part
+ # e.g. here's what I captured from the server's output
+ # manifoldproxy.proxy: request.POST <QueryDict: {u'json': [u'{"action":"get","object":"resource","timestamp":"latest","filters":[["slice_hrn","=","ple.inria.omftest"]],"params":[],"fields":["hrn","hostname"],"unique":0,"query_uuid":"436aae70a48141cc826f88e08fbd74b1","analyzed_query":null,"subqueries":{}}']}>
+ def fill_from_POST (self, POST_dict):
+ try:
+ json_string=POST_dict['json']
+ dict=json.loads(json_string)
+ for (k,v) in dict.iteritems():
+ setattr(self,k,v)
+ except:
+ print "Could not decode incoming ajax request as a Query, POST=",POST_dict
+ if (debug):
+ import traceback
+ traceback.print_exc()
+
+ #---------------------------------------------------------------------------
+ # Accessors
+ #---------------------------------------------------------------------------
+
+ @returns(StringTypes)
+ def get_action(self):
+ return self.action
+
+ @returns(frozenset)
+ def get_select(self):
+ return frozenset(self.fields)
+
+ @returns(StringTypes)
+ def get_from(self):
+ return self.object
+
+ @returns(Filter)
+ def get_where(self):
+ return self.filters
+
+ @returns(dict)
+ def get_params(self):
+ return self.params
+
+ @returns(StringTypes)
+ def get_timestamp(self):
+ return self.timestamp
+
+#DEPRECATED#
+#DEPRECATED# def make_filters(self, filters):
+#DEPRECATED# return Filter(filters)
+#DEPRECATED#
+#DEPRECATED# def make_fields(self, fields):
+#DEPRECATED# if isinstance(fields, (list, tuple)):
+#DEPRECATED# return set(fields)
+#DEPRECATED# else:
+#DEPRECATED# raise Exception, "Invalid field specification"
+
+ #---------------------------------------------------------------------------
+ # LINQ-like syntax
+ #---------------------------------------------------------------------------
+
+ @classmethod
+ def action(self, action, object):
+ query = Query()
+ query.action = 'get'
+ query.object = object
+ return query
+
+ @classmethod
+ def get(self, object): return self.action('get', object)
+
+ @classmethod
+ def update(self, object): return self.action('update', object)
+
+ @classmethod
+ def create(self, object): return self.action('create', object)
+
+ @classmethod
+ def delete(self, object): return self.action('delete', object)
+
+ @classmethod
+ def execute(self, object): return self.action('execute', object)
+
+ def filter_by(self, *args):
+ if len(args) == 1:
+ filters = args[0]
+ if not isinstance(filters, (set, list, tuple, Filter)):
+ filters = [filters]
+ for predicate in filters:
+ self.filters.add(predicate)
+ elif len(args) == 3:
+ predicate = Predicate(*args)
+ self.filters.add(predicate)
+ else:
+ raise Exception, 'Invalid expression for filter'
+ return self
+
+ def select(self, fields):
+ if not isinstance(fields, (set, list, tuple)):
+ fields = [fields]
+ for field in fields:
+ self.fields.add(field)
+ return self
+
+ def set(self, params):
+ self.params.update(params)
+ return self
+
+class AnalyzedQuery(Query):
+
+ # XXX we might need to propagate special parameters sur as DEBUG, etc.
+
+ def __init__(self, query=None):
+ self.clear()
+ if query:
+ self.query_uuid = query.query_uuid
+ self.analyze(query)
+ else:
+ self.query_uuid = uniqid()
+
+ @returns(StringTypes)
+ def __str__(self):
+ out = []
+ out.append("SELECT %s FROM %s WHERE %s" % (
+ ", ".join(self.get_select()),
+ self.get_from(),
+ self.get_where()
+ ))
+ cpt = 1
+ for method, subquery in self.subqueries():
+ out.append(' [SQ #%d : %s] %s' % (cpt, method, str(subquery)))
+ cpt += 1
+
+ return "\n".join(out)
+
+ def clear(self):
+ super(AnalyzedQuery, self).clear()
+ self._subqueries = {}
+
+ def subquery(self, method):
+ # Allows for the construction of a subquery
+ if not method in self._subqueries:
+ analyzed_query = AnalyzedQuery()
+ analyzed_query.action = self.action
+ analyzed_query.object = method
+ self._subqueries[method] = analyzed_query
+ return self._subqueries[method]
+
+ def subqueries(self):
+ for method, subquery in self._subqueries.iteritems():
+ yield (method, subquery)
+
+ def filter_by(self, filters):
+ if not filters: return self
+ if not isinstance(filters, (set, list, tuple, Filter)):
+ filters = [filters]
+ for predicate in filters:
+ if '.' in predicate.key:
+ method, subkey = pred.key.split('.', 1)
+ sub_pred = Predicate(subkey, pred.op, pred.value)
+ self.subquery(method).filter_by(sub_pred)
+ else:
+ super(AnalyzedQuery, self).filter_by(predicate)
+ return self
+
+ def select(self, fields):
+ if not isinstance(fields, (set, list, tuple)):
+ fields = [fields]
+ for field in fields:
+ if '.' in field:
+ method, subfield = field.split('.', 1)
+ self.subquery(method).select(subfield)
+ else:
+ super(AnalyzedQuery, self).select(field)
+ return self
+
+ def set(self, params):
+ for param, value in self.params.items():
+ if '.' in param:
+ method, subparam = param.split('.', 1)
+ self.subquery(method).set({subparam: value})
+ else:
+ super(AnalyzedQuery, self).set({param: value})
+ return self
+
+ def analyze(self, query):
+ self.clear()
+ self.action = query.action
+ self.object = query.object
+ self.filter_by(query.filters)
+ self.set(query.params)
+ self.select(query.fields)
+
+ def to_json (self):
+ query_uuid=self.query_uuid
+ a=self.action
+ s=self.object
+ t=self.timestamp
+ f=json.dumps (self.filters.to_list())
+ p=json.dumps (self.params)
+ c=json.dumps (list(self.fields))
+ # xxx unique can be removed, but for now we pad the js structure
+ unique=0
+
+ aq = 'null'
+ sq=", ".join ( [ "'%s':%s" % (subject, subquery.to_json())
+ for (subject, subquery) in self._subqueries.iteritems()])
+ sq="{%s}"%sq
+
+ result= """ new ManifoldQuery('%(a)s', '%(s)s', '%(t)s', %(f)s, %(p)s, %(c)s, %(unique)s, '%(query_uuid)s', %(aq)s, %(sq)s)"""%locals()
+ if debug: print 'ManifoldQuery.to_json:',result
+ return result
var field = k.substr(pos+1);
if (!q.subqueries[subject]) {
q.subqueries[subject] = new ManifoldQuery();
- q.subqueries[subject].action = this.action;
- q.subqueries[subject].subject = this.subject;
- q.subqueries[subject].timestamp = this.timestamp;
+ q.subqueries[subject].action = q.action;
+ q.subqueries[subject].subject = subject;
+ q.subqueries[subject].timestamp = q.timestamp;
}
q.subqueries[subject].filters.push(Array(field, op, v));
} else {
- q.filters.push(this.filter);
+ q.filters.push(filter);
}
});
var field = param.substr(pos+1);
if (!q.subqueries[subject]) {
q.subqueries[subject] = new ManifoldQuery();
- q.subqueries[subject].action = this.action;
- q.subqueries[subject].subject = this.subject;
- q.subqueries[subject].timestamp = this.timestamp;
+ q.subqueries[subject].action = q.action;
+ q.subqueries[subject].subject = subject;
+ q.subqueries[subject].timestamp = q.timestamp;
}
q.subqueries[subject].params[field] = value;
} else {
var field = v.substr(pos+1);
if (!q.subqueries[subject]) {
q.subqueries[subject] = new ManifoldQuery();
- q.subqueries[subject].action = this.action;
- q.subqueries[subject].subject = this.subject;
- q.subqueries[subject].timestamp = this.timestamp;
+ q.subqueries[subject].action = q.action;
+ q.subqueries[subject].subject = subject;
+ q.subqueries[subject].timestamp = q.timestamp;
}
q.subqueries[subject].fields.push(field);
} else {
this.unique = unique;
this.query_uuid = query_uuid;
- if (typeof analyzed_query == "undefined")
+
+ if (typeof aq == "undefined")
this.analyzed_query = null;
else
this.analyzed_query = aq;
- if (typeof subqueries == "undefined")
+ if (typeof sq == "undefined")
this.subqueries = {};
else
this.subqueries = sq;
if (query.analyzed_query == null) {
query.analyze_subqueries();
}
- manifold.all_queries[query.query_uuid]=query;
+ manifold.all_queries[query.query_uuid]=query;
},
/*!
})
},
+
+ /*!
+ * Returns whether a query expects a unique results.
+ * This is the case when the filters contain a key of the object
+ * \fn query_expects_unique_result(query)
+ * \memberof Manifold
+ * \param ManifoldQuery query Query for which we are testing whether it expects a unique result
+ */
+ query_expects_unique_result: function(query) {
+ /* XXX we need functions to query metadata */
+ //var keys = MANIFOLD_METADATA[query.object]['keys']; /* array of array of field names */
+ /* TODO requires keys in metadata */
+ return true;
+ },
+
+ /*!
+ * Publish result
+ * \fn publish_result(query, results)
+ * \memberof Manifold
+ * \param ManifoldQuery query Query which has received results
+ * \param array results results corresponding to query
+ */
+ publish_result: function(query, result) {
+ /* Publish an update announce */
+ var channel="/results/" + query.query_uuid + "/changed";
+ if (manifold.asynchroneous_debug) messages.debug("publishing result on " + channel);
+ jQuery.publish(channel, [result, query]);
+ },
+
+ /*!
+ * Recursively publish result
+ * \fn publish_result_rec(query, result)
+ * \memberof Manifold
+ * \param ManifoldQuery query Query which has received result
+ * \param array result result corresponding to query
+ */
+ publish_result_rec: function(query, result) {
+ /* If the result is not unique, only publish the top query;
+ * otherwise, publish the main object as well as subqueries
+ * XXX how much recursive are we ?
+ */
+ if (manifold.query_expects_unique_result(query)) {
+ /* Also publish subqueries */
+ jQuery.each(query.subqueries, function(object, subquery) {
+ manifold.publish_result_rec(subquery, result[0][object]);
+ /* TODO remove object from result */
+ });
+ }
+ manifold.publish_result(query, result);
+ },
+
// if set domid allows the result to be directed to just one plugin
// most of the time publish_uuid will be query.query_uuid
- // however in some cases we wish to publish the results under a different uuid
- // e.g. an updater wants to publish its results as if from the original (get) query
+ // however in some cases we wish to publish the result under a different uuid
+ // e.g. an updater wants to publish its result as if from the original (get) query
asynchroneous_success : function (data, query, publish_uuid, domid) {
// xxx should have a nicer declaration of that enum in sync with the python code somehow
if (data.code == 2) { // ERROR
jQuery.publish("/results/" + publish_uuid + "/failed", [data.code, data.description] );
}
// once everything is checked we can use the 'value' part of the manifoldresult
- var value=data.value;
- if (value) {
+ var result=data.value;
+ if (result) {
if (!!domid) {
/* Directly inform the requestor */
- if (manifold.asynchroneous_debug) messages.debug("directing results to " + domid);
- jQuery('#' + domid).trigger('results', [value]);
+ if (manifold.asynchroneous_debug) messages.debug("directing result to " + domid);
+ jQuery('#' + domid).trigger('results', [result]);
} else {
- /* Publish an update announce */
- var channel="/results/" + publish_uuid + "/changed";
- if (manifold.asynchroneous_debug) messages.debug("publishing results on " + channel);
- jQuery.publish(channel, [value, query]);
+ /* XXX Jordan XXX I don't need publish_uuid here... What is it used for ? */
+ /* query is the query we sent to the backend; we need to find the
+ * corresponding analyezd_query in manifold.all_queries
+ */
+ tmp_query = manifold.find_query(query.query_uuid);
+ manifold.publish_result_rec(tmp_query.analyzed_query, result);
}
}
--- /dev/null
+import copy
+
+class frozendict(dict):
+ def _blocked_attribute(obj):
+ raise AttributeError, "A frozendict cannot be modified."
+ _blocked_attribute = property(_blocked_attribute)
+
+ __delitem__ = __setitem__ = clear = _blocked_attribute
+ pop = popitem = setdefault = update = _blocked_attribute
+
+ def __new__(cls, *args, **kw):
+ new = dict.__new__(cls)
+
+ args_ = []
+ for arg in args:
+ if isinstance(arg, dict):
+ arg = copy.copy(arg)
+ for k, v in arg.items():
+ if isinstance(v, dict):
+ arg[k] = frozendict(v)
+ elif isinstance(v, list):
+ v_ = list()
+ for elm in v:
+ if isinstance(elm, dict):
+ v_.append( frozendict(elm) )
+ else:
+ v_.append( elm )
+ arg[k] = tuple(v_)
+ args_.append( arg )
+ else:
+ args_.append( arg )
+
+ dict.__init__(new, *args_, **kw)
+ return new
+
+ def __init__(self, *args, **kw):
+ pass
+
+ def __hash__(self):
+ try:
+ return self._cached_hash
+ except AttributeError:
+ h = self._cached_hash = hash(tuple(sorted(self.items())))
+ return h
+
+ def __repr__(self):
+ return "frozendict(%s)" % dict.__repr__(self)
--- /dev/null
+import os, glob, inspect
+
+
+# Define the inclusion operator
+class contains(type): pass
+
+def find_local_modules(filepath):
+ modules = []
+ for f in glob.glob(os.path.dirname(filepath)+"/*.py"):
+ name = os.path.basename(f)[:-3]
+ if name != '__init__':
+ modules.append(name)
+ return modules
--- /dev/null
+from operator import (
+ and_, or_, inv, add, mul, sub, mod, truediv, lt, le, ne, gt, ge, eq, neg
+ )
+from manifold.util.misc import contains
+from types import StringTypes
+
+# New modifier: { contains
+class Predicate:
+
+ operators = {
+ "==" : eq,
+ "!=" : ne,
+ "<" : lt,
+ "<=" : le,
+ ">" : gt,
+ ">=" : ge,
+ "&&" : and_,
+ "||" : or_,
+ "contains" : contains
+ }
+
+ operators_short = {
+ "=" : eq,
+ "~" : ne,
+ "<" : lt,
+ "[" : le,
+ ">" : gt,
+ "]" : ge,
+ "&" : and_,
+ "|" : or_,
+ "}" : contains
+ }
+
+ def __init__(self, *args, **kwargs):
+ """
+ \brief Build a predicate (left, operator, right)
+ \param You can pass:
+ - three args (left, operator, right)
+ - one argument (list or tuple) containing three elements (variable, operator, value)
+ "operator" is a String defined in operators or in operators_short and refers
+ tMao a binary operation.
+ "left" and "right" refers to a variable/constant involved in the Predicate.
+ """
+ if len(args) == 3:
+ key, op, value = args
+ elif len(args) == 1 and isinstance(args[0], (tuple,list)) and len(args[0]) == 3:
+ key, op, value = args[0]
+ elif len(args) == 1 and isinstance(args[0], Predicate):
+ key, op, value = args[0].get_tuple()
+ else:
+ raise Exception, "Bad initializer for Predicate"
+ self.key = key
+ if op in self.operators.keys():
+ self.op = self.operators[op]
+ elif op in self.operators_short.keys():
+ self.op = self.operators_short[op]
+ else:
+ self.op = op
+ if isinstance(value, (list, set)):
+ self.value = tuple(value)
+ else:
+ self.value = value
+
+ def __str__(self):
+ return "Pred(%s, %s, %s)" % self.get_str_tuple()
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __hash__(self):
+ return hash(self.get_tuple())
+
+ def get_tuple(self):
+ return (self.key, self.op, self.value)
+
+ def get_str_op(self):
+ op_str = [s for s, op in self.operators.iteritems() if op == self.op]
+ return op_str[0]
+
+ def get_str_tuple(self):
+ return (self.key, self.get_str_op(), self.value,)
+
+ def match(self, dic, ignore_missing=False):
+ if isinstance(self.key, tuple):
+ print "PREDICATE MATCH", self.key
+ print dic
+ print "-----------------------------"
+
+ # Can we match ?
+ if self.key not in dic:
+ return ignore_missing
+
+ if self.op == eq:
+ if isinstance(self.value, list):
+ return (dic[self.key] in self.value) # array ?
+ else:
+ return (dic[self.key] == self.value)
+ elif self.op == ne:
+ if isinstance(self.value, list):
+ return (dic[self.key] not in self.value) # array ?
+ else:
+ return (dic[self.key] != self.value) # array ?
+ elif self.op == lt:
+ if isinstance(self.value, StringTypes):
+ # prefix match
+ return dic[self.key].startswith('%s.' % self.value)
+ else:
+ return (dic[self.key] < self.value)
+ elif self.op == le:
+ if isinstance(self.value, StringTypes):
+ return dic[self.key] == self.value or dic[self.key].startswith('%s.' % self.value)
+ else:
+ return (dic[self.key] <= self.value)
+ elif self.op == gt:
+ if isinstance(self.value, StringTypes):
+ # prefix match
+ return self.value.startswith('%s.' % dic[self.key])
+ else:
+ return (dic[self.key] > self.value)
+ elif self.op == ge:
+ if isinstance(self.value, StringTypes):
+ # prefix match
+ return dic[self.key] == self.value or self.value.startswith('%s.' % dic[self.key])
+ else:
+ return (dic[self.key] >= self.value)
+ elif self.op == and_:
+ return (dic[self.key] & self.value) # array ?
+ elif self.op == or_:
+ return (dic[self.key] | self.value) # array ?
+ elif self.op == contains:
+ method, subfield = self.key.split('.', 1)
+ return not not [ x for x in dic[method] if x[subfield] == self.value]
+ else:
+ raise Exception, "Unexpected table format: %r", dic
+
+ def filter(self, dic):
+ """
+ Filter dic according to the current predicate.
+ """
+
+ if '.' in self.key:
+ # users.hrn
+ method, subfield = self.key.split('.', 1)
+ if not method in dic:
+ return None # XXX
+
+ if isinstance(dic[method], dict):
+ # We have a 1..1 relationship: apply the same filter to the dict
+ subpred = Predicate(subfield, self.op, self.value)
+ match = subpred.match(dic[method])
+ return dic if match else None
+
+ elif isinstance(dic[method], (list, tuple)):
+ # 1..N relationships
+ match = False
+ if self.op == contains:
+ return dic if self.match(dic) else None
+ else:
+ subpred = Predicate(subfield, self.op, self.value)
+ dic[method] = subpred.filter(dic[method])
+ return dic
+ else:
+ raise Exception, "Unexpected table format: %r", dic
+
+
+ else:
+ # Individual field operations: this could be simplified, since we are now using operators_short !!
+ # XXX match
+ print "current predicate", self
+ print "matching", dic
+ print "----"
+ return dic if self.match(dic) else None
+
--- /dev/null
+#-------------------------------------------------------------------------
+# Class Singleton
+#
+# Classes that inherit from Singleton can be instanciated only once
+#-------------------------------------------------------------------------
+
+class Singleton(type):
+ def __init__(cls, name, bases, dic):
+ super(Singleton,cls).__init__(name,bases,dic)
+ cls.instance=None
+
+ def __call__(cls, *args, **kw):
+ if cls.instance is None:
+ cls.instance=super(Singleton,cls).__call__(*args,**kw)
+ return cls.instance
+
+
+# See also
+# http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python
--- /dev/null
+# http://wiki.python.org/moin/PythonDecoratorLibrary#Type_Enforcement_.28accepts.2Freturns.29
+'''
+One of three degrees of enforcement may be specified by passing
+the 'debug' keyword argument to the decorator:
+ 0 -- NONE: No type-checking. Decorators disabled.
+ 1 -- MEDIUM: Print warning message to stderr. (Default)
+ 2 -- STRONG: Raise TypeError with message.
+If 'debug' is not passed to the decorator, the default level is used.
+
+Example usage:
+ >>> NONE, MEDIUM, STRONG = 0, 1, 2
+ >>>
+ >>> @accepts(int, int, int)
+ ... @returns(float)
+ ... def average(x, y, z):
+ ... return (x + y + z) / 2
+ ...
+ >>> average(5.5, 10, 15.0)
+ TypeWarning: 'average' method accepts (int, int, int), but was given
+ (float, int, float)
+ 15.25
+ >>> average(5, 10, 15)
+ TypeWarning: 'average' method returns (float), but result is (int)
+ 15
+
+Needed to cast params as floats in function def (or simply divide by 2.0).
+
+ >>> TYPE_CHECK = STRONG
+ >>> @accepts(int, debug=TYPE_CHECK)
+ ... @returns(int, debug=TYPE_CHECK)
+ ... def fib(n):
+ ... if n in (0, 1): return n
+ ... return fib(n-1) + fib(n-2)
+ ...
+ >>> fib(5.3)
+ Traceback (most recent call last):
+ ...
+ TypeError: 'fib' method accepts (int), but was given (float)
+
+'''
+import sys
+from itertools import izip
+
+def accepts(*types, **kw):
+ '''Function decorator. Checks decorated function's arguments are
+ of the expected types.
+
+ Parameters:
+ types -- The expected types of the inputs to the decorated function.
+ Must specify type for each parameter.
+ kw -- Optional specification of 'debug' level (this is the only valid
+ keyword argument, no other should be given).
+ debug = ( 0 | 1 | 2 )
+
+ '''
+ if not kw:
+ # default level: MEDIUM
+ debug = 2
+ else:
+ debug = kw['debug']
+ try:
+ def decorator(f):
+ def newf(*args):
+ if debug is 0:
+ return f(*args)
+ assert len(args) == len(types)
+ argtypes = tuple(map(type, args))
+ if not compare_types(types, argtypes):
+ # if argtypes != types:
+ msg = info(f.__name__, types, argtypes, 0)
+ if debug is 1:
+ print >> sys.stderr, 'TypeWarning: ', msg
+ elif debug is 2:
+ raise TypeError, msg
+ return f(*args)
+ newf.__name__ = f.__name__
+ return newf
+ return decorator
+ except KeyError, key:
+ raise KeyError, key + "is not a valid keyword argument"
+ except TypeError, msg:
+ raise TypeError, msg
+
+def compare_types(expected, actual):
+ if isinstance(expected, tuple):
+ if isinstance(actual, tuple):
+ for x, y in izip(expected, actual):
+ if not compare_types(x ,y):
+ return False
+ return True
+ else:
+ return actual == type(None) or actual in expected
+ else:
+ return actual == type(None) or actual == expected or issubclass(actual, expected)
+
+def returns(ret_type, **kw):
+ '''Function decorator. Checks decorated function's return value
+ is of the expected type.
+
+ Parameters:
+ ret_type -- The expected type of the decorated function's return value.
+ Must specify type for each parameter.
+ kw -- Optional specification of 'debug' level (this is the only valid
+ keyword argument, no other should be given).
+ debug=(0 | 1 | 2)
+ '''
+ try:
+ if not kw:
+ # default level: MEDIUM
+ debug = 1
+ else:
+ debug = kw['debug']
+ def decorator(f):
+ def newf(*args):
+ result = f(*args)
+ if debug is 0:
+ return result
+ res_type = type(result)
+ if not compare_types(ret_type, res_type):
+ # if res_type != ret_type: # JORDAN: fix to allow for # StringTypes = (str, unicode)
+ # XXX note that this check should be recursive
+ msg = info(f.__name__, (ret_type,), (res_type,), 1)
+ if debug is 1:
+ print >> sys.stderr, 'TypeWarning: ', msg
+ elif debug is 2:
+ raise TypeError, msg
+ return result
+ newf.__name__ = f.__name__
+ return newf
+ return decorator
+ except KeyError, key:
+ raise KeyError, key + "is not a valid keyword argument"
+ except TypeError, msg:
+ raise TypeError, msg
+
+def info(fname, expected, actual, flag):
+ '''Convenience function returns nicely formatted error/warning msg.'''
+ format = lambda types: ', '.join([str(t).split("'")[1] for t in types])
+ msg = "'{}' method ".format( fname )\
+ + ("accepts", "returns")[flag] + " ({}), but ".format(expected)\
+ + ("was given", "result is")[flag] + " ({})".format(actual)
+ return msg
+
fnDrawCallback: function() { hazelnut_draw_callback.call(object, options); }
};
// the intention here is that options.datatables_options as coming from the python object take precedence
- $.extend(actual_options, options.datatables_options );
+// XXX DISABLED by jordan: was causing errors in datatables.js $.extend(actual_options, options.datatables_options );
this.table = $('#hazelnut-' + options.plugin_uuid).dataTable(actual_options);
/* Setup the SelectAll button in the dataTable header */
*/
function hazelnut_filter (oSettings, aData, iDataIndex) {
var cur_query = this.current_query;
+ if (!cur_query) return true;
var ret = true;
/* We have an array of filters : a filter is an array (key op val)
def slice_view (request, slicename=tmp_default_slice):
page = Page(request)
+ page.expose_js_metadata()
+
# TODO The query to run is embedded in the URL
main_query = Query({'action': 'get', 'object': 'slice'}).filter_by('slice_hrn', '=', slicename)
#old# fields=['network','type','hrn','hostname'],
#old# filters= [ [ 'slice_hrn', '=', slicename, ] ],
#old# )
- page.enqueue_query(main_query)
+
+ aq = AnalyzedQuery(main_query)
+ page.enqueue_query(main_query, analyzed_query=aq)
# Prepare the display according to all metadata
# (some parts will be pending, others can be triggered by users).
# ... and for the relations
# XXX Let's hardcode resources for now
- aq = AnalyzedQuery(main_query)
sq = aq.subquery('resource')
tab_resources = Tabs (
)
main_plugin.insert(tab_resources)
+ jj = aq.to_json()
+ print "="*80
+ print "AQ=", jj
+ print "="*80
tab_resources.insert(
Hazelnut (
page = page,
# in this case (exec=True) the js async callback (see manifold.asynchroneous_success)
# offers the option to deliver the result to a specific DOM elt (in this case, set domid)
# otherwise (i.e. if domid not provided), it goes through the pubsub system (so all plugins can receive it)
- def enqueue_query (self, query, run_it=True, domid=None):
+ #
+ # NOTE:
+ # analyzed_query is required because it contains query_uuid that the
+ # plugins initialized in the python part will listen to. When a result is
+ # received in javascript, subresults should be publish to the appropriate
+ # query_uuid.
+ #
+ def enqueue_query (self, query, run_it=True, domid=None, analyzed_query=None):
# _queries is the set of all known queries
- self._queries = self._queries.union(set( [ query, ] ))
+ # XXX complex XXX self._queries = self._queries.union(set( [ query, ] ))
+ self._queries.add((query, analyzed_query))
# _queue is the list of queries that need to be triggered, with an optional domid
# we only do this if run_it is set
if run_it: self._queue.append ( (query.query_uuid,domid) )
# compute variables to expose to the template
env = {}
# expose the json definition of all queries
- env['queries_json'] = [ query.to_json() for query in self._queries ]
+ env['queries_json'] = [ query.to_json(analyzed_query=aq) for (query, aq) in self._queries ]
def query_publish_dom_tuple (a,b):
result={'query_uuid':a}
if b: result['domid']=b