From: Soner Sevinc Date: Thu, 22 May 2008 03:16:38 +0000 (+0000) Subject: (no commit message) X-Git-Tag: sfa-0.9-0@14641~876 X-Git-Url: http://git.onelab.eu/?a=commitdiff_plain;h=e3e89d07138c6a780c32f213b4ac3d59dadb316e;p=sfa.git --- diff --git a/README b/README index 7535174f..20474730 100644 --- a/README +++ b/README @@ -1,18 +1,5 @@ -The server folder contains basically the code for the registry interface. - The most important file is called the 'server.py' containing the implementation of the operations of the API. - There are two files containing python dictionaries which are used to save and reload cache information about the authority hierarchies. - The 'slice' and 'component' folders contain the keys of each authority in hierarchy. +The PLCAPI changes are in the directory "PLCAPI/trunk/PLC/Methods". +The SSL change is in the M2Crypto directory, at "/var/lib/python-support/python2.5/M2Crypto/SSL". -The gui directory is the client part. - There is a GUI in java, but the authentication logic and function calls are all in a file called 'clientstub.py'. This file is found in 'gui/JavaApplication1/client_osaka' directory. - 'client_osaka' and 'client_keiko' are just two example client folders that we use to test the interfaces. - -util folder contains almost all of the core code. - 'tree.py' is important for setting up a hierarchy of authorities corresponding to PlanetLab sites. As noted before, you can find the generated hierarchy under the folder in server folder, with names 'slice' and 'component' folders. - sec folder under util is the security module. 'sec.py' contains both the authentication/authorization related functions (used while a new connection is being established), also certificate related functions (credentials creation, etc). - Other files are util code for PlanetLab to GENI conversion logic, database and exceptions. - -Contact me at ssevinc@cs.princeton.edu for more information. Soner Sevinc - - +04/30/08 diff --git a/changes/Nodes.py b/changes/Nodes.py new file mode 100755 index 00000000..05120566 --- /dev/null +++ b/changes/Nodes.py @@ -0,0 +1,327 @@ +# +# Functions for interacting with the nodes table in the database +# +# Mark Huang +# Copyright (C) 2006 The Trustees of Princeton University +# +# $Id: Nodes.py 5654 2007-11-06 03:43:55Z tmack $ +# + +from types import StringTypes +import re + +from PLC.Faults import * +from PLC.Parameter import Parameter, Mixed +from PLC.Filter import Filter +from PLC.Debug import profile +from PLC.Table import Row, Table +from PLC.NodeNetworks import NodeNetwork, NodeNetworks +from PLC.BootStates import BootStates + +def valid_hostname(hostname): + # 1. Each part begins and ends with a letter or number. + # 2. Each part except the last can contain letters, numbers, or hyphens. + # 3. Each part is between 1 and 64 characters, including the trailing dot. + # 4. At least two parts. + # 5. Last part can only contain between 2 and 6 letters. + good_hostname = r'^([a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?\.)+' \ + r'[a-z]{2,6}$' + return hostname and \ + re.match(good_hostname, hostname, re.IGNORECASE) + +class Node(Row): + """ + Representation of a row in the nodes table. To use, optionally + instantiate with a dict of values. Update as you would a + dict. Commit to the database with sync(). + """ + + table_name = 'nodes' + primary_key = 'node_id' + # Thierry -- we use delete on nodenetworks so the related NodeNetworkSettings get deleted too + join_tables = ['nodegroup_node', 'conf_file_node', 'pcu_node', 'slice_node', 'slice_attribute', 'node_session', 'peer_node','node_slice_whitelist'] + fields = { + 'node_id': Parameter(int, "Node identifier"), + 'hostname': Parameter(str, "Fully qualified hostname", max = 255), + 'site_id': Parameter(int, "Site at which this node is located"), + 'boot_state': Parameter(str, "Boot state", max = 20), + 'model': Parameter(str, "Make and model of the actual machine", max = 255, nullok = True), + 'boot_nonce': Parameter(str, "(Admin only) Random value generated by the node at last boot", max = 128), + 'version': Parameter(str, "Apparent Boot CD version", max = 64), + 'ssh_rsa_key': Parameter(str, "Last known SSH host key", max = 1024), + 'date_created': Parameter(int, "Date and time when node entry was created", ro = True), + 'last_updated': Parameter(int, "Date and time when node entry was created", ro = True), + 'uuid': Parameter(str, "Universal Unique Identifier"), + 'last_contact': Parameter(int, "Date and time when node last contacted plc", ro = True), + 'key': Parameter(str, "(Admin only) Node key", max = 256), + 'session': Parameter(str, "(Admin only) Node session value", max = 256, ro = True), + 'nodenetwork_ids': Parameter([int], "List of network interfaces that this node has"), + 'nodegroup_ids': Parameter([int], "List of node groups that this node is in"), + 'conf_file_ids': Parameter([int], "List of configuration files specific to this node"), + # 'root_person_ids': Parameter([int], "(Admin only) List of people who have root access to this node"), + 'slice_ids': Parameter([int], "List of slices on this node"), + 'slice_ids_whitelist': Parameter([int], "List of slices allowed on this node"), + 'pcu_ids': Parameter([int], "List of PCUs that control this node"), + 'ports': Parameter([int], "List of PCU ports that this node is connected to"), + 'peer_id': Parameter(int, "Peer to which this node belongs", nullok = True), + 'peer_node_id': Parameter(int, "Foreign node identifier at peer", nullok = True), + } + related_fields = { + 'nodenetworks': [Mixed(Parameter(int, "NodeNetwork identifier"), + Filter(NodeNetwork.fields))], + 'nodegroups': [Mixed(Parameter(int, "NodeGroup identifier"), + Parameter(str, "NodeGroup name"))], + 'conf_files': [Parameter(int, "ConfFile identifier")], + 'slices': [Mixed(Parameter(int, "Slice identifier"), + Parameter(str, "Slice name"))], + 'slices_whitelist': [Mixed(Parameter(int, "Slice identifier"), + Parameter(str, "Slice name"))] + } + # for Cache + class_key = 'hostname' + foreign_fields = ['boot_state','model','version','uuid'] + # forget about these ones, they are read-only anyway + # handling them causes Cache to re-sync all over again + # 'date_created','last_updated' + foreign_xrefs = [ + # in this case, we dont need the 'table' but Cache will look it up, so... + {'field' : 'site_id' , 'class' : 'Site' , 'table' : 'unused-on-direct-refs' } , + ] + + def validate_hostname(self, hostname): + if not valid_hostname(hostname): + raise PLCInvalidArgument, "Invalid hostname" + + conflicts = Nodes(self.api, [hostname]) + for node in conflicts: + if 'node_id' not in self or self['node_id'] != node['node_id']: + raise PLCInvalidArgument, "Hostname already in use" + + return hostname + + def validate_boot_state(self, boot_state): + boot_states = [row['boot_state'] for row in BootStates(self.api)] + if boot_state not in boot_states: + raise PLCInvalidArgument, "Invalid boot state" + + return boot_state + + validate_date_created = Row.validate_timestamp + validate_last_updated = Row.validate_timestamp + validate_last_contact = Row.validate_timestamp + + def update_last_contact(self, commit = True): + """ + Update last_contact field with current time + """ + + assert 'node_id' in self + assert self.table_name + + self.api.db.do("UPDATE %s SET last_contact = CURRENT_TIMESTAMP " % (self.table_name) + \ + " where node_id = %d" % ( self['node_id']) ) + self.sync(commit) + + + def update_last_updated(self, commit = True): + """ + Update last_updated field with current time + """ + + assert 'node_id' in self + assert self.table_name + + self.api.db.do("UPDATE %s SET last_updated = CURRENT_TIMESTAMP " % (self.table_name) + \ + " where node_id = %d" % (self['node_id']) ) + self.sync(commit) + + def associate_nodenetworks(self, auth, field, value): + """ + Delete nodenetworks not found in value list (using DeleteNodeNetwor)k + Add nodenetworks found in value list (using AddNodeNetwork) + Updates nodenetworks found w/ nodenetwork_id in value list (using UpdateNodeNetwork) + """ + + assert 'nodenetworkp_ids' in self + assert 'node_id' in self + assert isinstance(value, list) + + (nodenetwork_ids, blank, nodenetworks) = self.separate_types(value) + + if self['nodenetwork_ids'] != nodenetwork_ids: + from PLC.Methods.DeleteNodeNetwork import DeleteNodeNetwork + + stale_nodenetworks = set(self['nodenetwork_ids']).difference(nodenetwork_ids) + + for stale_nodenetwork in stale_nodenetworks: + DeleteNodeNetwork.__call__(DeleteNodeNetwork(self.api), auth, stale_nodenetwork['nodenetwork_id']) + + def associate_nodegroups(self, auth, field, value): + """ + Add node to nodegroups found in value list (AddNodeToNodegroup) + Delete node from nodegroup not found in value list (DeleteNodeFromNodegroup) + """ + + from PLC.NodeGroups import NodeGroups + + assert 'nodegroup_ids' in self + assert 'node_id' in self + assert isinstance(value, list) + + (nodegroup_ids, nodegroup_names) = self.separate_types(value)[0:2] + + if nodegroup_names: + nodegroups = NodeGroups(self.api, nodegroup_names, ['nodegroup_id']).dict('nodegroup_id') + nodegroup_ids += nodegroups.keys() + + if self['nodegroup_ids'] != nodegroup_ids: + from PLC.Methods.AddNodeToNodeGroup import AddNodeToNodeGroup + from PLC.Methods.DeleteNodeFromNodeGroup import DeleteNodeFromNodeGroup + + new_nodegroups = set(nodegroup_ids).difference(self['nodegroup_ids']) + stale_nodegroups = set(self['nodegroup_ids']).difference(nodegroup_ids) + + for new_nodegroup in new_nodegroups: + AddNodeToNodeGroup.__call__(AddNodeToNodeGroup(self.api), auth, self['node_id'], new_nodegroup) + for stale_nodegroup in stale_nodegroups: + DeleteNodeFromNodeGroup.__call__(DeleteNodeFromNodeGroup(self.api), auth, self['node_id'], stale_nodegroup) + + + + def associate_conf_files(self, auth, field, value): + """ + Add conf_files found in value list (AddConfFileToNode) + Delets conf_files not found in value list (DeleteConfFileFromNode) + """ + + assert 'conf_file_ids' in self + assert 'node_id' in self + assert isinstance(value, list) + + conf_file_ids = self.separate_types(value)[0] + + if self['conf_file_ids'] != conf_file_ids: + from PLC.Methods.AddConfFileToNode import AddConfFileToNode + from PLC.Methods.DeleteConfFileFromNode import DeleteConfFileFromNode + new_conf_files = set(conf_file_ids).difference(self['conf_file_ids']) + stale_conf_files = set(self['conf_file_ids']).difference(conf_file_ids) + + for new_conf_file in new_conf_files: + AddConfFileToNode.__call__(AddConfFileToNode(self.api), auth, new_conf_file, self['node_id']) + for stale_conf_file in stale_conf_files: + DeleteConfFileFromNode.__call__(DeleteConfFileFromNode(self.api), auth, stale_conf_file, self['node_id']) + + + def associate_slices(self, auth, field, value): + """ + Add slices found in value list to (AddSliceToNode) + Delete slices not found in value list (DeleteSliceFromNode) + """ + + from PLC.Slices import Slices + + assert 'slice_ids' in self + assert 'node_id' in self + assert isinstance(value, list) + + (slice_ids, slice_names) = self.separate_types(value)[0:2] + + if slice_names: + slices = Slices(self.api, slice_names, ['slice_id']).dict('slice_id') + slice_ids += slices.keys() + + if self['slice_ids'] != slice_ids: + from PLC.Methods.AddSliceToNodes import AddSliceToNodes + from PLC.Methods.DeleteSliceFromNodes import DeleteSliceFromNodes + new_slices = set(slice_ids).difference(self['slice_ids']) + stale_slices = set(self['slice_ids']).difference(slice_ids) + + for new_slice in new_slices: + AddSliceToNodes.__call__(AddSliceToNodes(self.api), auth, new_slice, [self['node_id']]) + for stale_slice in stale_slices: + DeleteSliceFromNodes.__call__(DeleteSliceFromNodes(self.api), auth, stale_slice, [self['node_id']]) + + def associate_slices_whitelist(self, auth, field, value): + """ + Add slices found in value list to whitelist (AddSliceToNodesWhitelist) + Delete slices not found in value list from whitelist (DeleteSliceFromNodesWhitelist) + """ + + from PLC.Slices import Slices + + assert 'slice_ids_whitelist' in self + assert 'node_id' in self + assert isinstance(value, list) + + (slice_ids, slice_names) = self.separate_types(value)[0:2] + + if slice_names: + slices = Slices(self.api, slice_names, ['slice_id']).dict('slice_id') + slice_ids += slices.keys() + + if self['slice_ids_whitelist'] != slice_ids: + from PLC.Methods.AddSliceToNodesWhitelist import AddSliceToNodesWhitelist + from PLC.Methods.DeleteSliceFromNodesWhitelist import DeleteSliceFromNodesWhitelist + new_slices = set(slice_ids).difference(self['slice_ids_whitelist']) + stale_slices = set(self['slice_ids_whitelist']).difference(slice_ids) + + for new_slice in new_slices: + AddSliceToNodesWhitelist.__call__(AddSliceToNodesWhitelist(self.api), auth, new_slice, [self['node_id']]) + for stale_slice in stale_slices: + DeleteSliceFromNodesWhitelist.__call__(DeleteSliceFromNodesWhitelist(self.api), auth, stale_slice, [self['node_id']]) + + + def delete(self, commit = True): + """ + Delete existing node. + """ + + assert 'node_id' in self + assert 'nodenetwork_ids' in self + + # we need to clean up NodeNetworkSettings, so handling nodenetworks as part of join_tables does not work + for nodenetwork in NodeNetworks(self.api,self['nodenetwork_ids']): + nodenetwork.delete() + + # Clean up miscellaneous join tables + for table in self.join_tables: + self.api.db.do("DELETE FROM %s WHERE node_id = %d" % \ + (table, self['node_id'])) + + # Mark as deleted + self['deleted'] = True + self.sync(commit) + + +class Nodes(Table): + """ + Representation of row(s) from the nodes table in the + database. + """ + + def __init__(self, api, node_filter = None, columns = None): + Table.__init__(self, api, Node, columns) + + sql = "SELECT %s FROM view_nodes WHERE deleted IS False" % \ + ", ".join(self.columns) + + if node_filter is not None: + if isinstance(node_filter, (list, tuple, set)): + # Separate the list into integers and strings + ints = filter(lambda x: isinstance(x, (int, long)), node_filter) + strs = filter(lambda x: isinstance(x, StringTypes), node_filter) + node_filter = Filter(Node.fields, {'node_id': ints, 'hostname': strs}) + sql += " AND (%s) %s" % node_filter.sql(api, "OR") + elif isinstance(node_filter, dict): + node_filter = Filter(Node.fields, node_filter) + sql += " AND (%s) %s" % node_filter.sql(api, "AND") + elif isinstance (node_filter, StringTypes): + node_filter = Filter(Node.fields, {'hostname':[node_filter]}) + sql += " AND (%s) %s" % node_filter.sql(api, "AND") + elif isinstance (node_filter, int): + node_filter = Filter(Node.fields, {'node_id':[node_filter]}) + sql += " AND (%s) %s" % node_filter.sql(api, "AND") + else: + raise PLCInvalidArgument, "Wrong node filter %r"%node_filter + + self.selectall(sql) diff --git a/changes/Persons.py b/changes/Persons.py new file mode 100755 index 00000000..5a45942c --- /dev/null +++ b/changes/Persons.py @@ -0,0 +1,502 @@ +# +# Functions for interacting with the persons table in the database +# +# Mark Huang +# Copyright (C) 2006 The Trustees of Princeton University +# +# $Id: Persons.py 5652 2007-11-06 03:42:57Z tmack $ +# + +from types import StringTypes +from datetime import datetime +import md5 +import time +from random import Random +import re +import crypt + +from PLC.Faults import * +from PLC.Debug import log +from PLC.Parameter import Parameter, Mixed +from PLC.Filter import Filter +from PLC.Table import Row, Table +from PLC.Roles import Role, Roles +from PLC.Keys import Key, Keys +from PLC.Messages import Message, Messages + +class Person(Row): + """ + Representation of a row in the persons table. To use, optionally + instantiate with a dict of values. Update as you would a + dict. Commit to the database with sync(). + """ + + table_name = 'persons' + primary_key = 'person_id' + join_tables = ['person_key', 'person_role', 'person_site', 'slice_person', 'person_session', 'peer_person'] + fields = { + 'person_id': Parameter(int, "User identifier"), + 'first_name': Parameter(str, "Given name", max = 128), + 'last_name': Parameter(str, "Surname", max = 128), + 'title': Parameter(str, "Title", max = 128, nullok = True), + 'email': Parameter(str, "Primary e-mail address", max = 254), + 'phone': Parameter(str, "Telephone number", max = 64, nullok = True), + 'url': Parameter(str, "Home page", max = 254, nullok = True), + 'bio': Parameter(str, "Biography", max = 254, nullok = True), + 'enabled': Parameter(bool, "Has been enabled"), + 'password': Parameter(str, "Account password in crypt() form", max = 254), + 'verification_key': Parameter(str, "Reset password key", max = 254, nullok = True), + 'verification_expires': Parameter(int, "Date and time when verification_key expires", nullok = True), + 'last_updated': Parameter(int, "Date and time of last update", ro = True), + 'date_created': Parameter(int, "Date and time when account was created", ro = True), + 'uuid': Parameter(str, "Universal Unique Identifier"), + 'role_ids': Parameter([int], "List of role identifiers"), + 'roles': Parameter([str], "List of roles"), + 'site_ids': Parameter([int], "List of site identifiers"), + 'key_ids': Parameter([int], "List of key identifiers"), + 'slice_ids': Parameter([int], "List of slice identifiers"), + 'peer_id': Parameter(int, "Peer to which this user belongs", nullok = True), + 'peer_person_id': Parameter(int, "Foreign user identifier at peer", nullok = True), + } + related_fields = { + 'roles': [Mixed(Parameter(int, "Role identifier"), + Parameter(str, "Role name"))], + 'sites': [Mixed(Parameter(int, "Site identifier"), + Parameter(str, "Site name"))], + 'keys': [Mixed(Parameter(int, "Key identifier"), + Filter(Key.fields))], + 'slices': [Mixed(Parameter(int, "Slice identifier"), + Parameter(str, "Slice name"))] + } + + + + # for Cache + class_key = 'email' + foreign_fields = ['first_name', 'last_name', 'title', 'email', 'phone', 'url', + 'bio', 'enabled', 'password', 'uuid', ] + # forget about these ones, they are read-only anyway + # handling them causes Cache to re-sync all over again + # 'last_updated', 'date_created' + foreign_xrefs = [ + {'field' : 'key_ids', 'class': 'Key', 'table' : 'person_key' } , + {'field' : 'site_ids', 'class': 'Site', 'table' : 'person_site'}, +# xxx this is not handled by Cache yet +# 'role_ids': Parameter([int], "List of role identifiers"), +] + + def validate_email(self, email): + """ + Validate email address. Stolen from Mailman. + """ + + invalid_email = PLCInvalidArgument("Invalid e-mail address") + email_badchars = r'[][()<>|;^,\200-\377]' + + # Pretty minimal, cheesy check. We could do better... + if not email or email.count(' ') > 0: + raise invalid_email + if re.search(email_badchars, email) or email[0] == '-': + raise invalid_email + + email = email.lower() + at_sign = email.find('@') + if at_sign < 1: + raise invalid_email + user = email[:at_sign] + rest = email[at_sign+1:] + domain = rest.split('.') + + # This means local, unqualified addresses, are not allowed + if not domain: + raise invalid_email + if len(domain) < 2: + raise invalid_email + + # check only against users on the same peer + if 'peer_id' in self: + namespace_peer_id = self['peer_id'] + else: + namespace_peer_id = None + + conflicts = Persons(self.api, {'email':email,'peer_id':namespace_peer_id}) + + for person in conflicts: + if 'person_id' not in self or self['person_id'] != person['person_id']: + raise PLCInvalidArgument, "E-mail address already in use" + + return email + + def validate_password(self, password): + """ + Encrypt password if necessary before committing to the + database. + """ + + magic = "$1$" + + if len(password) > len(magic) and \ + password[0:len(magic)] == magic: + return password + else: + # Generate a somewhat unique 8 character salt string + salt = str(time.time()) + str(Random().random()) + salt = md5.md5(salt).hexdigest()[:8] + return crypt.crypt(password.encode(self.api.encoding), magic + salt + "$") + + validate_date_created = Row.validate_timestamp + validate_last_updated = Row.validate_timestamp + validate_verification_expires = Row.validate_timestamp + + def can_update(self, person): + """ + Returns true if we can update the specified person. We can + update a person if: + + 1. We are the person. + 2. We are an admin. + 3. We are a PI and the person is a user or tech or at + one of our sites. + """ + + assert isinstance(person, Person) + + if self['person_id'] == person['person_id']: + return True + + if 'admin' in self['roles']: + return True + + if 'pi' in self['roles']: + if set(self['site_ids']).intersection(person['site_ids']): + # Can update people with higher role IDs + return min(self['role_ids']) < min(person['role_ids']) + + return False + + def can_view(self, person): + """ + Returns true if we can view the specified person. We can + view a person if: + + 1. We are the person. + 2. We are an admin. + 3. We are a PI and the person is at one of our sites. + """ + + assert isinstance(person, Person) + + if self.can_update(person): + return True + + if 'pi' in self['roles']: + if set(self['site_ids']).intersection(person['site_ids']): + # Can view people with equal or higher role IDs + return min(self['role_ids']) <= min(person['role_ids']) + + return False + + add_role = Row.add_object(Role, 'person_role') + remove_role = Row.remove_object(Role, 'person_role') + + add_key = Row.add_object(Key, 'person_key') + remove_key = Row.remove_object(Key, 'person_key') + + def set_primary_site(self, site, commit = True): + """ + Set the primary site for an existing user. + """ + + assert 'person_id' in self + assert 'site_id' in site + + person_id = self['person_id'] + site_id = site['site_id'] + self.api.db.do("UPDATE person_site SET is_primary = False" \ + " WHERE person_id = %(person_id)d", + locals()) + self.api.db.do("UPDATE person_site SET is_primary = True" \ + " WHERE person_id = %(person_id)d" \ + " AND site_id = %(site_id)d", + locals()) + + if commit: + self.api.db.commit() + + assert 'site_ids' in self + assert site_id in self['site_ids'] + + # Make sure that the primary site is first in the list + self['site_ids'].remove(site_id) + self['site_ids'].insert(0, site_id) + + def update_last_updated(self, commit = True): + """ + Update last_updated field with current time + """ + + assert 'person_id' in self + assert self.table_name + + self.api.db.do("UPDATE %s SET last_updated = CURRENT_TIMESTAMP " % (self.table_name) + \ + " where person_id = %d" % (self['person_id']) ) + self.sync(commit) + + def associate_roles(self, auth, field, value): + """ + Adds roles found in value list to this person (using AddRoleToPerson). + Deletes roles not found in value list from this person (using DeleteRoleFromPerson). + """ + + assert 'role_ids' in self + assert 'person_id' in self + assert isinstance(value, list) + + (role_ids, role_names) = self.separate_types(value)[0:2] + + # Translate roles into role_ids + if role_names: + roles = Roles(self.api, role_names, ['role_id']).dict('role_id') + role_ids += roles.keys() + + # Add new ids, remove stale ids + if self['role_ids'] != role_ids: + from PLC.Methods.AddRoleToPerson import AddRoleToPerson + from PLC.Methods.DeleteRoleFromPerson import DeleteRoleFromPerson + new_roles = set(role_ids).difference(self['role_ids']) + stale_roles = set(self['role_ids']).difference(role_ids) + + for new_role in new_roles: + AddRoleToPerson.__call__(AddRoleToPerson(self.api), auth, new_role, self['person_id']) + for stale_role in stale_roles: + DeleteRoleFromPerson.__call__(DeleteRoleFromPerson(self.api), auth, stale_role, self['person_id']) + + + def associate_sites(self, auth, field, value): + """ + Adds person to sites found in value list (using AddPersonToSite). + Deletes person from site not found in value list (using DeletePersonFromSite). + """ + + from PLC.Sites import Sites + + assert 'site_ids' in self + assert 'person_id' in self + assert isinstance(value, list) + + (site_ids, site_names) = self.separate_types(value)[0:2] + + # Translate roles into role_ids + if site_names: + sites = Sites(self.api, site_names, ['site_id']).dict('site_id') + site_ids += sites.keys() + + # Add new ids, remove stale ids + if self['site_ids'] != site_ids: + from PLC.Methods.AddPersonToSite import AddPersonToSite + from PLC.Methods.DeletePersonFromSite import DeletePersonFromSite + new_sites = set(site_ids).difference(self['site_ids']) + stale_sites = set(self['site_ids']).difference(site_ids) + + for new_site in new_sites: + AddPersonToSite.__call__(AddPersonToSite(self.api), auth, self['person_id'], new_site) + for stale_site in stale_sites: + DeletePersonFromSite.__call__(DeletePersonFromSite(self.api), auth, self['person_id'], stale_site) + + + def associate_keys(self, auth, field, value): + """ + Deletes key_ids not found in value list (using DeleteKey). + Adds key if key_fields w/o key_id is found (using AddPersonKey). + Updates key if key_fields w/ key_id is found (using UpdateKey). + """ + assert 'key_ids' in self + assert 'person_id' in self + assert isinstance(value, list) + + (key_ids, blank, keys) = self.separate_types(value) + + if self['key_ids'] != key_ids: + from PLC.Methods.DeleteKey import DeleteKey + stale_keys = set(self['key_ids']).difference(key_ids) + + for stale_key in stale_keys: + DeleteKey.__call__(DeleteKey(self.api), auth, stale_key) + + if keys: + from PLC.Methods.AddPersonKey import AddPersonKey + from PLC.Methods.UpdateKey import UpdateKey + updated_keys = filter(lambda key: 'key_id' in key, keys) + added_keys = filter(lambda key: 'key_id' not in key, keys) + + for key in added_keys: + AddPersonKey.__call__(AddPersonKey(self.api), auth, self['person_id'], key) + for key in updated_keys: + key_id = key.pop('key_id') + UpdateKey.__call__(UpdateKey(self.api), auth, key_id, key) + + + def associate_slices(self, auth, field, value): + """ + Adds person to slices found in value list (using AddPersonToSlice). + Deletes person from slices found in value list (using DeletePersonFromSlice). + """ + + from PLC.Slices import Slices + + assert 'slice_ids' in self + assert 'person_id' in self + assert isinstance(value, list) + + (slice_ids, slice_names) = self.separate_types(value)[0:2] + + # Translate roles into role_ids + if slice_names: + slices = Slices(self.api, slice_names, ['slice_id']).dict('slice_id') + slice_ids += slices.keys() + + # Add new ids, remove stale ids + if self['slice_ids'] != slice_ids: + from PLC.Methods.AddPersonToSlice import AddPersonToSlice + from PLC.Methods.DeletePersonFromSlice import DeletePersonFromSlice + new_slices = set(slice_ids).difference(self['slice_ids']) + stale_slices = set(self['slice_ids']).difference(slice_ids) + + for new_slice in new_slices: + AddPersonToSlice.__call__(AddPersonToSlice(self.api), auth, self['person_id'], new_slice) + for stale_slice in stale_slices: + DeletePersonFromSlice.__call__(DeletePersonFromSlice(self.api), auth, self['person_id'], stale_slice) + + + def delete(self, commit = True): + """ + Delete existing user. + """ + + # Delete all keys + keys = Keys(self.api, self['key_ids']) + for key in keys: + key.delete(commit = False) + + # Clean up miscellaneous join tables + for table in self.join_tables: + self.api.db.do("DELETE FROM %s WHERE person_id = %d" % \ + (table, self['person_id'])) + + # Mark as deleted + self['deleted'] = True + self.sync(commit) + +class Persons(Table): + """ + Representation of row(s) from the persons table in the + database. + """ + + def __init__(self, api, person_filter = None, columns = None): + Table.__init__(self, api, Person, columns) + #sql = "SELECT %s FROM view_persons WHERE deleted IS False" % \ + # ", ".join(self.columns) + foreign_fields = {'role_ids': ('role_id', 'person_role'), + 'roles': ('name', 'roles'), + 'site_ids': ('site_id', 'person_site'), + 'key_ids': ('key_id', 'person_key'), + 'slice_ids': ('slice_id', 'slice_person') + } + foreign_keys = {} + db_fields = filter(lambda field: field not in foreign_fields.keys(), Person.fields.keys()) + all_fields = db_fields + [value[0] for value in foreign_fields.values()] + fields = [] + _select = "SELECT " + _from = " FROM persons " + _join = " LEFT JOIN peer_person USING (person_id) " + _where = " WHERE deleted IS False " + + if not columns: + # include all columns + fields = all_fields + tables = [value[1] for value in foreign_fields.values()] + tables.sort() + for key in foreign_fields.keys(): + foreign_keys[foreign_fields[key][0]] = key + for table in tables: + if table in ['roles']: + _join += " LEFT JOIN roles USING(role_id) " + else: + _join += " LEFT JOIN %s USING (person_id) " % (table) + else: + tables = set() + columns = filter(lambda column: column in db_fields+foreign_fields.keys(), columns) + columns.sort() + for column in columns: + if column in foreign_fields.keys(): + (field, table) = foreign_fields[column] + foreign_keys[field] = column + fields += [field] + tables.add(table) + if column in ['roles']: + _join += " LEFT JOIN roles USING(role_id) " + else: + _join += " LEFT JOIN %s USING (person_id)" % \ + (foreign_fields[column][1]) + + else: + fields += [column] + + # postgres will return timestamps as datetime objects. + # XMLPRC cannot marshal datetime so convert to int + timestamps = ['date_created', 'last_updated', 'verification_expires'] + for field in fields: + if field in timestamps: + fields[fields.index(field)] = \ + "CAST(date_part('epoch', %s) AS bigint) AS %s" % (field, field) + + _select += ", ".join(fields) + sql = _select + _from + _join + _where + + # deal with filter + if person_filter is not None: + if isinstance(person_filter, (list, tuple, set)): + # Separate the list into integers and strings + ints = filter(lambda x: isinstance(x, (int, long)), person_filter) + strs = filter(lambda x: isinstance(x, StringTypes), person_filter) + person_filter = Filter(Person.fields, {'person_id': ints, 'email': strs}) + sql += " AND (%s) %s" % person_filter.sql(api, "OR") + elif isinstance(person_filter, dict): + person_filter = Filter(Person.fields, person_filter) + sql += " AND (%s) %s" % person_filter.sql(api, "AND") + elif isinstance (person_filter, StringTypes): + person_filter = Filter(Person.fields, {'email':[person_filter]}) + sql += " AND (%s) %s" % person_filter.sql(api, "AND") + elif isinstance (person_filter, int): + person_filter = Filter(Person.fields, {'person_id':[person_filter]}) + sql += " AND (%s) %s" % person_filter.sql(api, "AND") + else: + raise PLCInvalidArgument, "Wrong person filter %r"%person_filter + + # aggregate data + all_persons = {} + for row in self.api.db.selectall(sql): + person_id = row['person_id'] + + if all_persons.has_key(person_id): + for (key, key_list) in foreign_keys.items(): + data = row.pop(key) + row[key_list] = [data] + if data and data not in all_persons[person_id][key_list]: + all_persons[person_id][key_list].append(data) + else: + for key in foreign_keys.keys(): + value = row.pop(key) + if value: + row[foreign_keys[key]] = [value] + else: + row[foreign_keys[key]] = [] + if row: + all_persons[person_id] = row + + # populate self + for row in all_persons.values(): + obj = self.classobj(self.api, row) + self.append(obj) + diff --git a/changes/README b/changes/README index 20474730..21618cf6 100644 --- a/changes/README +++ b/changes/README @@ -1,4 +1,4 @@ -The PLCAPI changes are in the directory "PLCAPI/trunk/PLC/Methods". +The PLCAPI changes are in the directories "PLCAPI/trunk/PLC" and "PLCAPI/trunk/PLC/Methods". The SSL change is in the M2Crypto directory, at "/var/lib/python-support/python2.5/M2Crypto/SSL". Soner Sevinc diff --git a/changes/Roles.py b/changes/Roles.py new file mode 100755 index 00000000..41f27162 --- /dev/null +++ b/changes/Roles.py @@ -0,0 +1,72 @@ +# +# Functions for interacting with the roles table in the database +# +# Mark Huang +# Copyright (C) 2006 The Trustees of Princeton University +# +# $Id: Roles.py 5574 2007-10-25 20:33:17Z thierry $ +# + +from types import StringTypes +from PLC.Faults import * +from PLC.Parameter import Parameter +from PLC.Filter import Filter +from PLC.Table import Row, Table + +class Role(Row): + """ + Representation of a row in the roles table. To use, + instantiate with a dict of values. + """ + + table_name = 'roles' + primary_key = 'role_id' + join_tables = ['person_role', ('slice_attribute_types', 'min_role_id')] + fields = { + 'role_id': Parameter(int, "Role identifier"), + 'name': Parameter(str, "Role", max = 100), + } + + def validate_role_id(self, role_id): + # Make sure role does not already exist + conflicts = Roles(self.api, [role_id]) + if conflicts: + raise PLCInvalidArgument, "Role ID already in use" + + return role_id + + def validate_name(self, name): + # Make sure name is not blank + if not len(name): + raise PLCInvalidArgument, "Role must be specified" + + # Make sure role does not already exist + conflicts = Roles(self.api, [name]) + if conflicts: + raise PLCInvalidArgument, "Role name already in use" + + return name + +class Roles(Table): + """ + Representation of the roles table in the database. + """ + + def __init__(self, api, role_filter = None, columns = None): + Table.__init__(self, api, Role) + + sql = "SELECT %s FROM roles WHERE True" % \ + ", ".join(Role.fields) + + if role_filter is not None: + if isinstance(role_filter, (list, tuple, set)): + # Separate the list into integers and strings + ints = filter(lambda x: isinstance(x, (int, long)), role_filter) + strs = filter(lambda x: isinstance(x, StringTypes), role_filter) + role_filter = Filter(Role.fields, {'role_id': ints, 'name': strs}) + sql += " AND (%s) %s" % role_filter.sql(api, "OR") + elif isinstance(role_filter, dict): + role_filter = Filter(Role.fields, role_filter) + sql += " AND (%s) %s" % role_filter.sql(api, "AND") + + self.selectall(sql) diff --git a/changes/Sites.py b/changes/Sites.py new file mode 100755 index 00000000..6774c2d5 --- /dev/null +++ b/changes/Sites.py @@ -0,0 +1,272 @@ +from types import StringTypes +import string + +from PLC.Faults import * +from PLC.Parameter import Parameter, Mixed +from PLC.Filter import Filter +from PLC.Debug import profile +from PLC.Table import Row, Table +from PLC.Slices import Slice, Slices +from PLC.PCUs import PCU, PCUs +from PLC.Nodes import Node, Nodes +from PLC.Addresses import Address, Addresses +from PLC.Persons import Person, Persons + +class Site(Row): + """ + Representation of a row in the sites table. To use, optionally + instantiate with a dict of values. Update as you would a + dict. Commit to the database with sync(). + """ + + table_name = 'sites' + primary_key = 'site_id' + join_tables = ['person_site', 'site_address', 'peer_site'] + fields = { + 'site_id': Parameter(int, "Site identifier"), + 'name': Parameter(str, "Full site name", max = 254), + 'abbreviated_name': Parameter(str, "Abbreviated site name", max = 50), + 'login_base': Parameter(str, "Site slice prefix", max = 20), + 'is_public': Parameter(bool, "Publicly viewable site"), + 'enabled': Parameter(bool, "Has been enabled"), + 'latitude': Parameter(float, "Decimal latitude of the site", min = -90.0, max = 90.0, nullok = True), + 'longitude': Parameter(float, "Decimal longitude of the site", min = -180.0, max = 180.0, nullok = True), + 'url': Parameter(str, "URL of a page that describes the site", max = 254, nullok = True), + 'date_created': Parameter(int, "Date and time when site entry was created, in seconds since UNIX epoch", ro = True), + 'last_updated': Parameter(int, "Date and time when site entry was last updated, in seconds since UNIX epoch", ro = True), + 'max_slices': Parameter(int, "Maximum number of slices that the site is able to create"), + 'max_slivers': Parameter(int, "Maximum number of slivers that the site is able to create"), + 'uuid': Parameter(str, "Universal Unique Identifier"), + 'person_ids': Parameter([int], "List of account identifiers"), + 'slice_ids': Parameter([int], "List of slice identifiers"), + 'address_ids': Parameter([int], "List of address identifiers"), + 'pcu_ids': Parameter([int], "List of PCU identifiers"), + 'node_ids': Parameter([int], "List of site node identifiers"), + 'peer_id': Parameter(int, "Peer to which this site belongs", nullok = True), + 'peer_site_id': Parameter(int, "Foreign site identifier at peer", nullok = True), + 'ext_consortium_id': Parameter(int, "external consortium id", nullok = True) + } + related_fields = { + 'persons': [Mixed(Parameter(int, "Person identifier"), + Parameter(str, "Email address"))], + 'addresses': [Mixed(Parameter(int, "Address identifer"), + Filter(Address.fields))] + } + # for Cache + class_key = 'login_base' + foreign_fields = ['abbreviated_name', 'name', 'is_public', 'latitude', 'longitude', + 'url', 'max_slices', 'max_slivers', 'uuid', + ] + # forget about these ones, they are read-only anyway + # handling them causes Cache to re-sync all over again + # 'last_updated', 'date_created' + foreign_xrefs = [] + + def validate_name(self, name): + if not len(name): + raise PLCInvalidArgument, "Name must be specified" + + return name + + validate_abbreviated_name = validate_name + + def validate_login_base(self, login_base): + if not len(login_base): + raise PLCInvalidArgument, "Login base must be specified" + + if not set(login_base).issubset(string.lowercase + string.digits): + raise PLCInvalidArgument, "Login base must consist only of lowercase ASCII letters or numbers" + + conflicts = Sites(self.api, [login_base]) + for site in conflicts: + if 'site_id' not in self or self['site_id'] != site['site_id']: + raise PLCInvalidArgument, "login_base already in use" + + return login_base + + def validate_latitude(self, latitude): + if not self.has_key('longitude') or \ + self['longitude'] is None: + raise PLCInvalidArgument, "Longitude must also be specified" + + return latitude + + def validate_longitude(self, longitude): + if not self.has_key('latitude') or \ + self['latitude'] is None: + raise PLCInvalidArgument, "Latitude must also be specified" + + return longitude + + validate_date_created = Row.validate_timestamp + validate_last_updated = Row.validate_timestamp + + add_person = Row.add_object(Person, 'person_site') + remove_person = Row.remove_object(Person, 'person_site') + + add_address = Row.add_object(Address, 'site_address') + remove_address = Row.remove_object(Address, 'site_address') + + def update_last_updated(self, commit = True): + """ + Update last_updated field with current time + """ + + assert 'site_id' in self + assert self.table_name + + self.api.db.do("UPDATE %s SET last_updated = CURRENT_TIMESTAMP " % (self.table_name) + \ + " where site_id = %d" % (self['site_id']) ) + self.sync(commit) + + + def associate_persons(self, auth, field, value): + """ + Adds persons found in value list to this site (using AddPersonToSite). + Deletes persons not found in value list from this site (using DeletePersonFromSite). + """ + + assert 'person_ids' in self + assert 'site_id' in self + assert isinstance(value, list) + + (person_ids, emails) = self.separate_types(value)[0:2] + + # Translate emails into person_ids + if emails: + persons = Persons(self.api, emails, ['person_id']).dict('person_id') + person_ids += persons.keys() + + # Add new ids, remove stale ids + if self['person_ids'] != person_ids: + from PLC.Methods.AddPersonToSite import AddPersonToSite + from PLC.Methods.DeletePersonFromSite import DeletePersonFromSite + new_persons = set(person_ids).difference(self['person_ids']) + stale_persons = set(self['person_ids']).difference(person_ids) + + for new_person in new_persons: + AddPersonToSite.__call__(AddPersonToSite(self.api), auth, new_person, self['site_id']) + for stale_person in stale_persons: + DeletePersonFromSite.__call__(DeletePersonFromSite(self.api), auth, stale_person, self['site_id']) + + def associate_addresses(self, auth, field, value): + """ + Deletes addresses_ids not found in value list (using DeleteAddress). + Adds address if slice_fields w/o address_id found in value list (using AddSiteAddress). + Update address if slice_fields w/ address_id found in value list (using UpdateAddress). + """ + + assert 'address_ids' in self + assert 'site_id' in self + assert isinstance(value, list) + + (address_ids, blank, addresses) = self.separate_types(value) + + for address in addresses: + if 'address_id' in address: + address_ids.append(address['address_id']) + + # Add new ids, remove stale ids + if self['address_ids'] != address_ids: + from PLC.Methods.DeleteAddress import DeleteAddress + stale_addresses = set(self['address_ids']).difference(address_ids) + + for stale_address in stale_addresses: + DeleteAddress.__call__(DeleteAddress(self.api), auth, stale_address) + + if addresses: + from PLC.Methods.AddSiteAddress import AddSiteAddress + from PLC.Methods.UpdateAddress import UpdateAddress + + updated_addresses = filter(lambda address: 'address_id' in address, addresses) + added_addresses = filter(lambda address: 'address_id' not in address, addresses) + + for address in added_addresses: + AddSiteAddress.__call__(AddSiteAddress(self.api), auth, self['site_id'], address) + for address in updated_addresses: + address_id = address.pop('address_id') + UpdateAddress.__call__(UpdateAddress(self.api), auth, address_id, address) + + def delete(self, commit = True): + """ + Delete existing site. + """ + + assert 'site_id' in self + + # Delete accounts of all people at the site who are not + # members of at least one other non-deleted site. + persons = Persons(self.api, self['person_ids']) + for person in persons: + delete = True + + person_sites = Sites(self.api, person['site_ids']) + for person_site in person_sites: + if person_site['site_id'] != self['site_id']: + delete = False + break + + if delete: + person.delete(commit = False) + + # Delete all site addresses + addresses = Addresses(self.api, self['address_ids']) + for address in addresses: + address.delete(commit = False) + + # Delete all site slices + slices = Slices(self.api, self['slice_ids']) + for slice in slices: + slice.delete(commit = False) + + # Delete all site PCUs + pcus = PCUs(self.api, self['pcu_ids']) + for pcu in pcus: + pcu.delete(commit = False) + + # Delete all site nodes + nodes = Nodes(self.api, self['node_ids']) + for node in nodes: + node.delete(commit = False) + + # Clean up miscellaneous join tables + for table in self.join_tables: + self.api.db.do("DELETE FROM %s WHERE site_id = %d" % \ + (table, self['site_id'])) + + # Mark as deleted + self['deleted'] = True + self.sync(commit) + +class Sites(Table): + """ + Representation of row(s) from the sites table in the + database. + """ + + def __init__(self, api, site_filter = None, columns = None): + Table.__init__(self, api, Site, columns) + + sql = "SELECT %s FROM view_sites WHERE deleted IS False" % \ + ", ".join(self.columns) + + if site_filter is not None: + if isinstance(site_filter, (list, tuple, set)): + # Separate the list into integers and strings + ints = filter(lambda x: isinstance(x, (int, long)), site_filter) + strs = filter(lambda x: isinstance(x, StringTypes), site_filter) + site_filter = Filter(Site.fields, {'site_id': ints, 'login_base': strs}) + sql += " AND (%s) %s" % site_filter.sql(api, "OR") + elif isinstance(site_filter, dict): + site_filter = Filter(Site.fields, site_filter) + sql += " AND (%s) %s" % site_filter.sql(api, "AND") + elif isinstance (site_filter, StringTypes): + site_filter = Filter(Site.fields, {'login_base':[site_filter]}) + sql += " AND (%s) %s" % site_filter.sql(api, "AND") + elif isinstance (site_filter, int): + site_filter = Filter(Site.fields, {'site_id':[site_filter]}) + sql += " AND (%s) %s" % site_filter.sql(api, "AND") + else: + raise PLCInvalidArgument, "Wrong site filter %r"%site_filter + + self.selectall(sql) diff --git a/changes/Slices.py b/changes/Slices.py new file mode 100755 index 00000000..047b07ae --- /dev/null +++ b/changes/Slices.py @@ -0,0 +1,297 @@ +from types import StringTypes +import time +import re + +from PLC.Faults import * +from PLC.Parameter import Parameter, Mixed +from PLC.Filter import Filter +from PLC.Debug import profile +from PLC.Table import Row, Table +from PLC.SliceInstantiations import SliceInstantiation, SliceInstantiations +from PLC.Nodes import Node +from PLC.Persons import Person, Persons +from PLC.SliceAttributes import SliceAttribute + +class Slice(Row): + """ + Representation of a row in the slices table. To use, optionally + instantiate with a dict of values. Update as you would a + dict. Commit to the database with sync().To use, instantiate + with a dict of values. + """ + + table_name = 'slices' + primary_key = 'slice_id' + join_tables = ['slice_node', 'slice_person', 'slice_attribute', 'peer_slice', 'node_slice_whitelist'] + fields = { + 'slice_id': Parameter(int, "Slice identifier"), + 'site_id': Parameter(int, "Identifier of the site to which this slice belongs"), + 'name': Parameter(str, "Slice name", max = 32), + 'instantiation': Parameter(str, "Slice instantiation state"), + 'url': Parameter(str, "URL further describing this slice", max = 254, nullok = True), + 'description': Parameter(str, "Slice description", max = 2048, nullok = True), + 'max_nodes': Parameter(int, "Maximum number of nodes that can be assigned to this slice"), + 'creator_person_id': Parameter(int, "Identifier of the account that created this slice"), + 'created': Parameter(int, "Date and time when slice was created, in seconds since UNIX epoch", ro = True), + 'expires': Parameter(int, "Date and time when slice expires, in seconds since UNIX epoch"), + 'uuid': Parameter(str, "Universal Unique Identifier"), + 'node_ids': Parameter([int], "List of nodes in this slice", ro = True), + 'person_ids': Parameter([int], "List of accounts that can use this slice", ro = True), + 'slice_attribute_ids': Parameter([int], "List of slice attributes", ro = True), + 'peer_id': Parameter(int, "Peer to which this slice belongs", nullok = True), + 'peer_slice_id': Parameter(int, "Foreign slice identifier at peer", nullok = True), + } + related_fields = { + 'persons': [Mixed(Parameter(int, "Person identifier"), + Parameter(str, "Email address"))], + 'nodes': [Mixed(Parameter(int, "Node identifier"), + Parameter(str, "Fully qualified hostname"))] + } + # for Cache + class_key = 'name' + foreign_fields = ['instantiation', 'url', 'description', 'max_nodes', 'expires', 'uuid'] + foreign_xrefs = [ + {'field': 'node_ids' , 'class': 'Node', 'table': 'slice_node' }, + {'field': 'person_ids', 'class': 'Person', 'table': 'slice_person'}, + {'field': 'creator_person_id', 'class': 'Person', 'table': 'unused-on-direct-refs'}, + {'field': 'site_id', 'class': 'Site', 'table': 'unused-on-direct-refs'}, + ] + # forget about this one, it is read-only anyway + # handling it causes Cache to re-sync all over again + # 'created' + + def validate_name(self, name): + # N.B.: Responsibility of the caller to ensure that login_base + # portion of the slice name corresponds to a valid site, if + # desired. + + # 1. Lowercase. + # 2. Begins with login_base (letters or numbers). + # 3. Then single underscore after login_base. + # 4. Then letters, numbers, or underscores. + good_name = r'^[a-z0-9]+_[a-zA-Z0-9_]+$' + if not name or \ + not re.match(good_name, name): + raise PLCInvalidArgument, "Invalid slice name" + + conflicts = Slices(self.api, [name]) + for slice in conflicts: + if 'slice_id' not in self or self['slice_id'] != slice['slice_id']: + raise PLCInvalidArgument, "Slice name already in use, %s"%name + + return name + + def validate_instantiation(self, instantiation): + instantiations = [row['instantiation'] for row in SliceInstantiations(self.api)] + if instantiation not in instantiations: + raise PLCInvalidArgument, "No such instantiation state" + + return instantiation + + validate_created = Row.validate_timestamp + + def validate_expires(self, expires): + # N.B.: Responsibility of the caller to ensure that expires is + # not too far into the future. + check_future = not ('is_deleted' in self and self['is_deleted']) + return Row.validate_timestamp(self, expires, check_future = check_future) + + add_person = Row.add_object(Person, 'slice_person') + remove_person = Row.remove_object(Person, 'slice_person') + + add_node = Row.add_object(Node, 'slice_node') + remove_node = Row.remove_object(Node, 'slice_node') + + add_to_node_whitelist = Row.add_object(Node, 'node_slice_whitelist') + delete_from_node_whitelist = Row.remove_object(Node, 'node_slice_whitelist') + + def associate_persons(self, auth, field, value): + """ + Adds persons found in value list to this slice (using AddPersonToSlice). + Deletes persons not found in value list from this slice (using DeletePersonFromSlice). + """ + + assert 'person_ids' in self + assert 'slice_id' in self + assert isinstance(value, list) + + (person_ids, emails) = self.separate_types(value)[0:2] + + # Translate emails into person_ids + if emails: + persons = Persons(self.api, emails, ['person_id']).dict('person_id') + person_ids += persons.keys() + + # Add new ids, remove stale ids + if self['person_ids'] != person_ids: + from PLC.Methods.AddPersonToSlice import AddPersonToSlice + from PLC.Methods.DeletePersonFromSlice import DeletePersonFromSlice + new_persons = set(person_ids).difference(self['person_ids']) + stale_persons = set(self['person_ids']).difference(person_ids) + + for new_person in new_persons: + AddPersonToSlice.__call__(AddPersonToSlice(self.api), auth, new_person, self['slice_id']) + for stale_person in stale_persons: + DeletePersonFromSlice.__call__(DeletePersonFromSlice(self.api), auth, stale_person, self['slice_id']) + + def associate_nodes(self, auth, field, value): + """ + Adds nodes found in value list to this slice (using AddSliceToNodes). + Deletes nodes not found in value list from this slice (using DeleteSliceFromNodes). + """ + + from PLC.Nodes import Nodes + + assert 'node_ids' in self + assert 'slice_id' in self + assert isinstance(value, list) + + (node_ids, hostnames) = self.separate_types(value)[0:2] + + # Translate hostnames into node_ids + if hostnames: + nodes = Nodes(self.api, hostnames, ['node_id']).dict('node_id') + node_ids += nodes.keys() + + # Add new ids, remove stale ids + if self['node_ids'] != node_ids: + from PLC.Methods.AddSliceToNodes import AddSliceToNodes + from PLC.Methods.DeleteSliceFromNodes import DeleteSliceFromNodes + new_nodes = set(node_ids).difference(self['node_ids']) + stale_nodes = set(self['node_ids']).difference(node_ids) + + if new_nodes: + AddSliceToNodes.__call__(AddSliceToNodes(self.api), auth, self['slice_id'], list(new_nodes)) + if stale_nodes: + DeleteSliceFromNodes.__call__(DeleteSliceFromNodes(self.api), auth, self['slice_id'], list(stale_nodes)) + def associate_slice_attributes(self, auth, fields, value): + """ + Deletes slice_attribute_ids not found in value list (using DeleteSliceAttribute). + Adds slice_attributes if slice_fields w/o slice_id is found (using AddSliceAttribute). + Updates slice_attribute if slice_fields w/ slice_id is found (using UpdateSlceiAttribute). + """ + + assert 'slice_attribute_ids' in self + assert isinstance(value, list) + + (attribute_ids, blank, attributes) = self.separate_types(value) + + # There is no way to add attributes by id. They are + # associated with a slice when they are created. + # So we are only looking to delete here + if self['slice_attribute_ids'] != attribute_ids: + from PLC.Methods.DeleteSliceAttribute import DeleteSliceAttribute + stale_attributes = set(self['slice_attribute_ids']).difference(attribute_ids) + + for stale_attribute in stale_attributes: + DeleteSliceAttribute.__call__(DeleteSliceAttribute(self.api), auth, stale_attribute['slice_attribute_id']) + + # If dictionary exists, we are either adding new + # attributes or updating existing ones. + if attributes: + from PLC.Methods.AddSliceAttribute import AddSliceAttribute + from PLC.Methods.UpdateSliceAttribute import UpdateSliceAttribute + + added_attributes = filter(lambda x: 'slice_attribute_id' not in x, attributes) + updated_attributes = filter(lambda x: 'slice_attribute_id' in x, attributes) + + for added_attribute in added_attributes: + if 'attribute_type' in added_attribute: + type = added_attribute['attribute_type'] + elif 'attribute_type_id' in added_attribute: + type = added_attribute['attribute_type_id'] + else: + raise PLCInvalidArgument, "Must specify attribute_type or attribute_type_id" + + if 'value' in added_attribute: + value = added_attribute['value'] + else: + raise PLCInvalidArgument, "Must specify a value" + + if 'node_id' in added_attribute: + node_id = added_attribute['node_id'] + else: + node_id = None + + if 'nodegroup_id' in added_attribute: + nodegroup_id = added_attribute['nodegroup_id'] + else: + nodegroup_id = None + + AddSliceAttribute.__call__(AddSliceAttribute(self.api), auth, self['slice_id'], type, value, node_id, nodegroup_id) + for updated_attribute in updated_attributes: + attribute_id = updated_attribute.pop('slice_attribute_id') + if attribute_id not in self['slice_attribute_ids']: + raise PLCInvalidArgument, "Attribute doesnt belong to this slice" + else: + UpdateSliceAttribute.__call__(UpdateSliceAttribute(self.api), auth, attribute_id, updated_attribute) + + def sync(self, commit = True): + """ + Add or update a slice. + """ + + # Before a new slice is added, delete expired slices + if 'slice_id' not in self: + expired = Slices(self.api, expires = -int(time.time())) + for slice in expired: + slice.delete(commit) + + Row.sync(self, commit) + + def delete(self, commit = True): + """ + Delete existing slice. + """ + + assert 'slice_id' in self + + # Clean up miscellaneous join tables + for table in self.join_tables: + self.api.db.do("DELETE FROM %s WHERE slice_id = %d" % \ + (table, self['slice_id'])) + + # Mark as deleted + self['is_deleted'] = True + self.sync(commit) + + +class Slices(Table): + """ + Representation of row(s) from the slices table in the + database. + """ + + def __init__(self, api, slice_filter = None, columns = None, expires = int(time.time())): + Table.__init__(self, api, Slice, columns) + + sql = "SELECT %s FROM view_slices WHERE is_deleted IS False" % \ + ", ".join(self.columns) + + if expires is not None: + if expires >= 0: + sql += " AND expires > %d" % expires + else: + expires = -expires + sql += " AND expires < %d" % expires + + if slice_filter is not None: + if isinstance(slice_filter, (list, tuple, set)): + # Separate the list into integers and strings + ints = filter(lambda x: isinstance(x, (int, long)), slice_filter) + strs = filter(lambda x: isinstance(x, StringTypes), slice_filter) + slice_filter = Filter(Slice.fields, {'slice_id': ints, 'name': strs}) + sql += " AND (%s) %s" % slice_filter.sql(api, "OR") + elif isinstance(slice_filter, dict): + slice_filter = Filter(Slice.fields, slice_filter) + sql += " AND (%s) %s" % slice_filter.sql(api, "AND") + elif isinstance (slice_filter, StringTypes): + slice_filter = Filter(Slice.fields, {'name':[slice_filter]}) + sql += " AND (%s) %s" % slice_filter.sql(api, "AND") + elif isinstance (slice_filter, int): + slice_filter = Filter(Slice.fields, {'slice_id':[slice_filter]}) + sql += " AND (%s) %s" % slice_filter.sql(api, "AND") + else: + raise PLCInvalidArgument, "Wrong slice filter %r"%slice_filter + + self.selectall(sql)