--- /dev/null
+import SimpleXMLRPCServer
+import time
+
+dummy_api_addr = ("localhost", 8080)
+
+# Fake Testbed DB
+
+nodes_list = []
+for i in range(1,11):
+ node = {'hostname': 'node'+str(i)+'.dummy-testbed.org', 'type': 'dummy-node', 'node_id': i}
+ nodes_list.append(node)
+
+slices_list = []
+for i in range(1,3):
+ slice = {'slice_name': 'slice'+str(i), 'user_ids': range(i,4,2), 'slice_id': i, 'node_ids': range(i,10,2), 'enabled': True, 'expires': int(time.time())+60*60*24*30}
+ slices_list.append(slice)
+
+users_list = []
+for i in range(1,5):
+ user = {'user_name': 'user'+str(i), 'user_id': i, 'email': 'user'+str(i)+'@dummy-testbed.org', 'keys': ['user_ssh_pub_key_'+str(i)]}
+ users_list.append(user)
+
+DB = {'nodes_list': nodes_list,'node_index': 11, 'slices_list': slices_list, 'slice_index': 3, 'users_list': users_list, 'user_index': 5}
+
+#Filter function gor the GET methods
+
+def FilterList(myfilter, mylist):
+ result = []
+ result.extend(mylist)
+ for item in mylist:
+ for key in myfilter.keys():
+ if myfilter[key] != item[key]:
+ result.remove(item)
+ break
+ return result
+
+
+# RPC functions definition
+#GET
+def GetTestbedInfo():
+ return {'name': 'Dummy', 'longitude': 123456, 'latitude': 654321, 'domain':'dummy-testbed.org'}
+
+def GetNodes(filter={}):
+ global DB
+ result = []
+ result.extend(DB['nodes_list'])
+ if 'node_ids' in filter:
+ for node in DB['nodes_list']:
+ if node['node_id'] not in filter['node_ids']:
+ result.remove(node)
+ if filter:
+ result = FilterList(filter, result)
+ return result
+
+def GetSlices(filter={}):
+ global DB
+ result = []
+ result.extend(DB['slices_list'])
+ if 'slice_ids' in filter:
+ for slice in DB['slices_list']:
+ if slice['slice_id'] not in filter['slice_ids']:
+ result.remove(slice)
+
+ if filter:
+ result = FilterList(filter, result)
+ return result
+
+
+def GetUsers(filter={}):
+ global DB
+ result = []
+ result.extend(DB['users_list'])
+ if 'user_ids' in filter:
+ for user in DB['users_list']:
+ if user['user_id'] not in filter['user_ids']:
+ result.remove(user)
+
+ if filter:
+ result = FilterList(filter, result)
+ return result
+
+
+#def GetKeys():
+
+
+
+#add
+
+def AddNode(node):
+ global DB
+ if not isinstance(node, dict):
+ return False
+ for key in node.keys():
+ if key not in ['hostname', 'type']:
+ return False
+ node['node_id'] = DB['node_index']
+ DB['node_index'] += 1
+ DB['nodes_list'].append(node)
+ return node['node_id']
+
+def AddSlice(slice):
+ global DB
+ if not isinstance(slice, dict):
+ return False
+ for key in slice.keys():
+ if key not in ['slice_name', 'user_ids', 'node_ids', 'enabled', 'expires']:
+ return False
+ slice['slice_id'] = DB['slice_index']
+ slice['expires'] = int(time.time())+60*60*24*30
+ DB['slice_index'] += 1
+ DB['slices_list'].append(slice)
+ return slice['slice_id']
+
+
+def AddUser(user):
+ global DB
+ if not isinstance(user, dict):
+ return False
+ for key in user.keys():
+ if key not in ['user_name', 'email', 'keys']:
+ return False
+ user['user_id'] = DB['user_index']
+ DB['user_index'] += 1
+ DB['users_list'].append(user)
+ return user['user_id']
+
+
+def AddUserKey(param):
+ global DB
+ if not isinstance(param, dict):
+ return False
+ try:
+ for user in DB['users_list']:
+ if param['user_id'] == user['user_id']:
+ user['keys'].append(param['key'])
+ return True
+ return False
+ except:
+ return False
+
+def AddUserToSlice(param):
+ global DB
+ if not isinstance(param, dict):
+ return False
+ try:
+ for slice in DB['slices_list']:
+ if param['slice_id'] == slice['slice_id']:
+ if not 'user_ids' in slice: slice['user_ids'] = []
+ slice['user_ids'].append(param['user_id'])
+ return True
+ return False
+ except:
+ return False
+
+def AddSliceToNodes(param):
+ global DB
+ if not isinstance(param, dict):
+ return False
+ try:
+ for slice in DB['slices_list']:
+ if param['slice_id'] == slice['slice_id']:
+ if not 'node_ids' in slice: slice['node_ids'] = []
+ slice['node_ids'].extend(param['node_ids'])
+ return True
+ return False
+ except:
+ return False
+
+
+#Delete
+
+def DeleteNode(param):
+ global DB
+ if not isinstance(param, dict):
+ return False
+ try:
+ for node in DB['nodes_list']:
+ if param['node_id'] == node['node_id']:
+ DB['nodes_list'].remove(node)
+ for slice in DB['slices_list']:
+ if param['node_id'] in slice['node_ids']:
+ slice['node_ids'].remove(param['node_id'])
+ return True
+ return False
+ except:
+ return False
+
+
+def DeleteSlice(param):
+ global DB
+ if not isinstance(param, dict):
+ return False
+ try:
+ for slice in DB['slices_list']:
+ if param['slice_id'] == slice['slice_id']:
+ DB['slices_list'].remove(slice)
+ return True
+ return False
+ except:
+ return False
+
+
+def DeleteUser(param):
+ global DB
+ if not isinstance(param, dict):
+ return False
+ try:
+ for user in DB['users_list']:
+ if param['user_id'] == user['user_id']:
+ DB['users_list'].remove(user)
+ for slice in DB['slices_list']:
+ if param['user_id'] in slice['user_ids']:
+ slice['user_ids'].remove(param['user_id'])
+ return True
+ return False
+ except:
+ return False
+
+
+def DeleteKey(param):
+ global DB
+ if not isinstance(param, dict):
+ return False
+ try:
+ for user in DB['users_list']:
+ if param['key'] in user['keys']:
+ user['keys'].remove(param['key'])
+ return True
+ return False
+ except:
+ return False
+
+def DeleteUserFromSlice(param):
+ global DB
+ if not isinstance(param, dict):
+ return False
+ try:
+ for slice in DB['slices_list']:
+ if param['slice_id'] == slice['slice_id'] and param['user_id'] in slice['user_ids']:
+ slice['user_ids'].remove(param['user_id'])
+ return True
+ return False
+ except:
+ return False
+
+
+def DeleteSliceFromNodes(param):
+ global DB
+ if not isinstance(param, dict):
+ return False
+ try:
+ for slice in DB['slices_list']:
+ if param['slice_id'] == slice['slice_id']:
+ for node_id in param['node_ids']:
+ if node_id in slice['node_ids']: slice['node_ids'].remove(node_id)
+ return True
+ return False
+ except:
+ return False
+
+
+#Update
+
+def UpdateNode(param):
+ global DB
+ if not isinstance(param, dict):
+ return False
+ try:
+ for node in DB['nodes_list']:
+ if param['node_id'] == node['node_id']:
+ for key in param['fields'].keys():
+ if key in ['hostname', 'type']:
+ node[key] = param['fields'][key]
+ return True
+ return False
+ except:
+ return False
+
+
+def UpdateSlice(param):
+ global DB
+ if not isinstance(param, dict):
+ return False
+ try:
+ for slice in DB['slices_list']:
+ if param['slice_id'] == slice['slice_id']:
+ for key in param['fields'].keys():
+ if key in ['slice_name']:
+ slice[key] = param['fields'][key]
+ return True
+ return False
+ except:
+ return False
+
+
+def UpdateUser(param):
+ global DB
+ if not isinstance(param, dict):
+ return False
+ try:
+ for user in DB['users_list']:
+ if param['user_id'] == user['user_id']:
+ for key in param['fields'].keys():
+ if key in ['user_name', 'email']:
+ user[key] = param['fields'][key]
+ return True
+ return False
+ except:
+ return False
+
+
+
+
+# Instantiate the XMLRPC server
+dummy_api_server = SimpleXMLRPCServer.SimpleXMLRPCServer(dummy_api_addr)
+
+# RPC functions registration
+dummy_api_server.register_function(GetTestbedInfo)
+dummy_api_server.register_function(GetNodes)
+dummy_api_server.register_function(GetSlices)
+dummy_api_server.register_function(GetUsers)
+dummy_api_server.register_function(AddNode)
+dummy_api_server.register_function(AddSlice)
+dummy_api_server.register_function(AddUser)
+dummy_api_server.register_function(AddUserKey)
+dummy_api_server.register_function(AddUserToSlice)
+dummy_api_server.register_function(AddSliceToNodes)
+dummy_api_server.register_function(DeleteNode)
+dummy_api_server.register_function(DeleteSlice)
+dummy_api_server.register_function(DeleteUser)
+dummy_api_server.register_function(DeleteKey)
+dummy_api_server.register_function(DeleteUserFromSlice)
+dummy_api_server.register_function(DeleteSliceFromNodes)
+dummy_api_server.register_function(UpdateNode)
+dummy_api_server.register_function(UpdateSlice)
+dummy_api_server.register_function(UpdateUser)
+
+
+# Register Introspective functions
+dummy_api_server.register_introspection_functions()
+
+# Handle requests
+dummy_api_server.serve_forever()
+
+
+
--- /dev/null
+#!/usr/bin/python
+from sfa.util.xrn import Xrn, hrn_to_urn, urn_to_hrn
+from sfa.util.sfatime import utcparse, datetime_to_string
+from sfa.util.sfalogging import logger
+
+from sfa.rspecs.rspec import RSpec
+from sfa.rspecs.elements.hardware_type import HardwareType
+from sfa.rspecs.elements.node import Node
+from sfa.rspecs.elements.link import Link
+from sfa.rspecs.elements.sliver import Sliver
+from sfa.rspecs.elements.login import Login
+from sfa.rspecs.elements.location import Location
+from sfa.rspecs.elements.interface import Interface
+from sfa.rspecs.elements.services import Services
+from sfa.rspecs.elements.pltag import PLTag
+from sfa.rspecs.elements.lease import Lease
+from sfa.rspecs.elements.granularity import Granularity
+from sfa.rspecs.version_manager import VersionManager
+
+from sfa.dummy.dummyxrn import DummyXrn, hostname_to_urn, hrn_to_dummy_slicename, slicename_to_hrn
+
+import time
+
+class DummyAggregate:
+
+ def __init__(self, driver):
+ self.driver = driver
+
+ def get_slice_and_slivers(self, slice_xrn):
+ """
+ Returns a dict of slivers keyed on the sliver's node_id
+ """
+ slivers = {}
+ slice = None
+ if not slice_xrn:
+ return (slice, slivers)
+ slice_urn = hrn_to_urn(slice_xrn, 'slice')
+ slice_hrn, _ = urn_to_hrn(slice_xrn)
+ slice_name = hrn_to_dummy_slicename(slice_hrn)
+ slices = self.driver.shell.GetSlices({'slice_name': slice_name})
+ if not slices:
+ return (slice, slivers)
+ slice = slices[0]
+
+ # sort slivers by node id
+ slice_nodes = self.driver.shell.GetNodes({'node_ids': slice['node_ids']})
+ for node in slice_nodes:
+ slivers[node['node_id']] = node
+
+ return (slice, slivers)
+
+ def get_nodes(self, slice_xrn, slice=None,slivers=[], options={}):
+ # if we are dealing with a slice that has no node just return
+ # and empty list
+ if slice_xrn:
+ if not slice or not slice['node_ids']:
+ return []
+
+ filter = {}
+ if slice and 'node_ids' in slice and slice['node_ids']:
+ filter['node_ids'] = slice['node_ids']
+
+ nodes = self.driver.shell.GetNodes(filter)
+
+ rspec_nodes = []
+ for node in nodes:
+ rspec_node = Node()
+ # xxx how to retrieve site['login_base']
+ site=self.driver.testbedInfo
+ rspec_node['component_id'] = hostname_to_urn(self.driver.hrn, site['name'], node['hostname'])
+ rspec_node['component_name'] = node['hostname']
+ rspec_node['component_manager_id'] = Xrn(self.driver.hrn, 'authority+cm').get_urn()
+ rspec_node['authority_id'] = hrn_to_urn(PlXrn.site_hrn(self.driver.hrn, site['name']), 'authority+sa')
+ rspec_node['exclusive'] = 'false'
+ rspec_node['hardware_types'] = [HardwareType({'name': 'plab-pc'}),
+ HardwareType({'name': 'pc'})]
+ # add site/interface info to nodes.
+ # assumes that sites, interfaces and tags have already been prepared.
+ if site['longitude'] and site['latitude']:
+ location = Location({'longitude': site['longitude'], 'latitude': site['latitude'], 'country': 'unknown'})
+ rspec_node['location'] = location
+
+ if node['node_id'] in slivers:
+ # add sliver info
+ sliver = slivers[node['node_id']]
+ rspec_node['client_id'] = node['hostname']
+ rspec_node['slivers'] = [sliver]
+
+ # slivers always provide the ssh service
+ login = Login({'authentication': 'ssh-keys', 'hostname': node['hostname'], 'port':'22', 'username': slice['slice_name']})
+ service = Services({'login': login})
+ rspec_node['services'] = [service]
+ rspec_nodes.append(rspec_node)
+ return rspec_nodes
+
+
+
+ def get_rspec(self, slice_xrn=None, version = None, options={}):
+
+ version_manager = VersionManager()
+ version = version_manager.get_version(version)
+ if not slice_xrn:
+ rspec_version = version_manager._get_version(version.type, version.version, 'ad')
+ else:
+ rspec_version = version_manager._get_version(version.type, version.version, 'manifest')
+
+ slice, slivers = self.get_slice_and_slivers(slice_xrn)
+ rspec = RSpec(version=rspec_version, user_options=options)
+ if slice and 'expires' in slice:
+ rspec.xml.set('expires', datetime_to_string(utcparse(slice['expires'])))
+
+ nodes = self.get_nodes(slice_xrn, slice, slivers, options)
+ rspec.version.add_nodes(nodes)
+ rspec.version.add_links(links)
+ # add sliver defaults
+ default_sliver = slivers.get(None, [])
+ if default_sliver:
+ default_sliver_attribs = default_sliver.get('tags', [])
+ for attrib in default_sliver_attribs:
+ logger.info(attrib)
+ rspec.version.add_default_sliver_attribute(attrib['tagname'], attrib['value'])
+
+ return rspec.toxml()
+
+
--- /dev/null
+import time
+import datetime
+#
+from sfa.util.faults import MissingSfaInfo, UnknownSfaType, \
+ RecordNotFound, SfaNotImplemented, SliverDoesNotExist
+
+from sfa.util.sfalogging import logger
+from sfa.util.defaultdict import defaultdict
+from sfa.util.sfatime import utcparse, datetime_to_string, datetime_to_epoch
+from sfa.util.xrn import Xrn, hrn_to_urn, get_leaf
+from sfa.util.cache import Cache
+
+# one would think the driver should not need to mess with the SFA db, but..
+from sfa.storage.alchemy import dbsession
+from sfa.storage.model import RegRecord
+
+# used to be used in get_ticket
+#from sfa.trust.sfaticket import SfaTicket
+
+from sfa.rspecs.version_manager import VersionManager
+from sfa.rspecs.rspec import RSpec
+
+# the driver interface, mostly provides default behaviours
+from sfa.managers.driver import Driver
+
+from sfa.dummy.dummyshell import DummyShell
+from sfa.dummy.dummyaggregate import DummyAggregate
+from sfa.dummy.dummyslices import DummySlices
+from sfa.dummy.dummyxrn import DummyXrn, slicename_to_hrn, hostname_to_hrn, hrn_to_dummy_slicename, xrn_to_hostname
+
+
+def list_to_dict(recs, key):
+ """
+ convert a list of dictionaries into a dictionary keyed on the
+ specified dictionary key
+ """
+ return dict ( [ (rec[key],rec) for rec in recs ] )
+
+#
+# DummyShell is just an xmlrpc serverproxy where methods can be sent as-is;
+#
+class DummyDriver (Driver):
+
+ # the cache instance is a class member so it survives across incoming requests
+ cache = None
+
+ def __init__ (self, config):
+ Driver.__init__ (self, config)
+ self.config = config
+ self.hrn = config.SFA_INTERFACE_HRN
+ self.root_auth = config.SFA_REGISTRY_ROOT_AUTH
+ self.shell = DummyShell (config)
+ self.testbedInfo = self.shell.GetTestbedInfo()
+
+ ########################################
+ ########## registry oriented
+ ########################################
+
+ def augment_records_with_testbed_info (self, sfa_records):
+ return self.fill_record_info (sfa_records)
+
+ ##########
+ def register (self, sfa_record, hrn, pub_key):
+ type = sfa_record['type']
+ dummy_record = self.sfa_fields_to_dummy_fields(type, hrn, sfa_record)
+
+ if type == 'authority':
+ pointer = -1
+
+ elif type == 'slice':
+ slices = self.shell.GetSlices({'slice_name': dummy_record['slice_name']})
+ if not slices:
+ pointer = self.shell.AddSlice(dummy_record)
+ else:
+ pointer = slices[0]['slice_id']
+
+ elif type == 'user':
+ users = self.shell.GetUsers({'email':sfa_record['email']})
+ if not users:
+ pointer = self.shell.AddUser(dummy_record)
+ else:
+ pointer = users[0]['user_id']
+
+ # Add the user's key
+ if pub_key:
+ self.shell.AddUserKey({'user_id' : pointer, 'key' : pub_key})
+
+ elif type == 'node':
+ nodes = self.shell.GetNodes(dummy_record['hostname'])
+ if not nodes:
+ pointer = self.shell.AddNode(dummy_record)
+ else:
+ pointer = users[0]['node_id']
+
+ return pointer
+
+ ##########
+ def update (self, old_sfa_record, new_sfa_record, hrn, new_key):
+ pointer = old_sfa_record['pointer']
+ type = old_sfa_record['type']
+ dummy_record=self.sfa_fields_to_dummy_fields(type, hrn, new_sfa_record)
+
+ # new_key implemented for users only
+ if new_key and type not in [ 'user' ]:
+ raise UnknownSfaType(type)
+
+
+ if type == "slice":
+ self.shell.UpdateSlice({'slice_id': pointer, 'fields': dummy_record})
+
+ elif type == "user":
+ self.shell.UpdateUser({'user_id': pointer, 'fields': dummy_record})
+
+ if new_key:
+ self.shell.AddUserKey({'user_id' : pointer, 'key' : new_key})
+
+ elif type == "node":
+ self.shell.UpdateNode({'node_id': pointer, 'fields': dummy_record})
+
+
+ return True
+
+
+ ##########
+ def remove (self, sfa_record):
+ type=sfa_record['type']
+ pointer=sfa_record['pointer']
+ if type == 'user':
+ self.shell.DeleteUser{'user_id': pointer})
+ elif type == 'slice':
+ self.shell.DeleteSlice('slice_id': pointer)
+ elif type == 'node':
+ self.shell.DeleteNode('node_id': pointer)
+
+ return True
+
+
+
+
+
+ ##
+ # Convert SFA fields to Dummy testbed fields for use when registering or updating
+ # registry record in the dummy testbed
+ #
+
+ def sfa_fields_to_dummy_fields(self, type, hrn, sfa_record):
+
+ dummy_record = {}
+
+ if type == "slice":
+ dummy_record["slice_name"] = hrn_to_dummy_slicename(hrn)
+
+ elif type == "node":
+ if "hostname" not in sfa_record:
+ raise MissingSfaInfo("hostname")
+ dummy_record["hostname"] = sfa_record["hostname"]
+ if "type" in sfa_record:
+ dummy_record["type"] = sfa_record["type"]
+ else:
+ dummy_record["type"] = "dummy_type"
+
+ elif type == "authority":
+ dummy_record["name"] = hrn
+
+ elif type == "user":
+ dummy_record["user_name"] = sfa_record["email"].split('@')[0]
+ dummy_record["email"] = sfa_record["email"]
+
+ return dummy_record
+
+ ####################
+ def fill_record_info(self, records):
+ """
+ Given a (list of) SFA record, fill in the DUMMY TESTBED specific
+ and SFA specific fields in the record.
+ """
+ if not isinstance(records, list):
+ records = [records]
+
+ self.fill_record_dummy_info(records)
+ self.fill_record_hrns(records)
+ self.fill_record_sfa_info(records)
+ return records
+
+ def fill_record_dummy_info(self, records):
+ """
+ Fill in the DUMMY specific fields of a SFA record. This
+ involves calling the appropriate DUMMY method to retrieve the
+ database record for the object.
+
+ @param record: record to fill in field (in/out param)
+ """
+ # get ids by type
+ node_ids, slice_ids, user_ids = [], [], []
+ type_map = {'node': node_ids, 'slice': slice_ids, 'user': user_ids}
+
+ for record in records:
+ for type in type_map:
+ if type == record['type']:
+ type_map[type].append(record['pointer'])
+
+ # get dummy records
+ nodes, slices, users = {}, {}, {}
+ if node_ids:
+ node_list = self.shell.GetNodes({'node_ids':node_ids})
+ nodes = list_to_dict(node_list, 'node_id')
+ if slice_ids:
+ slice_list = self.shell.GetSlices({'slice_ids':slice_ids})
+ slices = list_to_dict(slice_list, 'slice_id')
+ if user_ids:
+ user_list = self.shell.GetUsers({'user_ids': user_ids})
+ users = list_to_dict(user_list, 'user_id')
+
+ dummy_records = {'node': nodes, 'slice': slices, 'user': users}
+
+
+ # fill record info
+ for record in records:
+ # records with pointer==-1 do not have dummy info.
+ if record['pointer'] == -1:
+ continue
+
+ for type in dummy_records:
+ if record['type'] == type:
+ if record['pointer'] in dummy_records[type]:
+ record.update(dummy_records[type][record['pointer']])
+ break
+ # fill in key info
+ if record['type'] == 'user':
+ record['key_ids'] = []
+ recors['keys'] = []
+ for key in dummy_records['user'][record['pointer']]['keys']:
+ record['key_ids'].append(-1)
+ recors['keys'].append(key)
+
+ return records
+
+ def fill_record_hrns(self, records):
+ """
+ convert dummy ids to hrns
+ """
+
+ # get ids
+ slice_ids, user_ids, node_ids = [], [], []
+ for record in records:
+ if 'user_ids' in record:
+ user_ids.extend(record['user_ids'])
+ if 'slice_ids' in record:
+ slice_ids.extend(record['slice_ids'])
+ if 'node_ids' in record:
+ node_ids.extend(record['node_ids'])
+
+ # get dummy records
+ slices, users, nodes = {}, {}, {}
+ if user_ids:
+ user_list = self.shell.GetUsers({'user_ids': user_ids})
+ users = list_to_dict(user_list, 'user_id')
+ if slice_ids:
+ slice_list = self.shell.GetSlices({'slice_ids': slice_ids})
+ slices = list_to_dict(slice_list, 'slice_id')
+ if node_ids:
+ node_list = self.shell.GetNodes({'node_ids': node_ids})
+ nodes = list_to_dict(node_list, 'node_id')
+
+ # convert ids to hrns
+ for record in records:
+ # get all relevant data
+ type = record['type']
+ pointer = record['pointer']
+ testbed_name = self.testbed_name()
+ auth_hrn = self.hrn
+ if pointer == -1:
+ continue
+
+ if 'user_ids' in record:
+ emails = [users[user_id]['email'] for user_id in record['user_ids'] \
+ if user_id in users]
+ usernames = [email.split('@')[0] for email in emails]
+ user_hrns = [".".join([auth_hrn, testbed_name, username]) for username in usernames]
+ record['users'] = user_hrns
+ if 'slice_ids' in record:
+ slicenames = [slices[slice_id]['slice_name'] for slice_id in record['slice_ids'] \
+ if slice_id in slices]
+ slice_hrns = [slicename_to_hrn(auth_hrn, slicename) for slicename in slicenames]
+ record['slices'] = slice_hrns
+ if 'node_ids' in record:
+ hostnames = [nodes[node_id]['hostname'] for node_id in record['node_ids'] \
+ if node_id in nodes]
+ node_hrns = [hostname_to_hrn(auth_hrn, login_base, hostname) for hostname in hostnames]
+ record['nodes'] = node_hrns
+
+
+ return records
+
+ def fill_record_sfa_info(self, records):
+
+ def startswith(prefix, values):
+ return [value for value in values if value.startswith(prefix)]
+
+ # get user ids
+ user_ids = []
+ for record in records:
+ user_ids.extend(record.get("user_ids", []))
+
+ # get sfa records for all records associated with these records.
+ # we'll replace pl ids (person_ids) with hrns from the sfa records
+ # we obtain
+
+ # get the registry records
+ user_list, users = [], {}
+ user_list = dbsession.query (RegRecord).filter(RegRecord.pointer.in_(user_ids))
+ # create a hrns keyed on the sfa record's pointer.
+ # Its possible for multiple records to have the same pointer so
+ # the dict's value will be a list of hrns.
+ users = defaultdict(list)
+ for user in user_list:
+ users[user.pointer].append(user)
+
+ # get the dummy records
+ dummy_user_list, dummy_users = [], {}
+ dummy_user_list = self.shell.GetUsers({'user_ids': user_ids})
+ dummy_users = list_to_dict(dummy_user_list, 'user_id')
+
+ # fill sfa info
+ for record in records:
+ # skip records with no pl info (top level authorities)
+ #if record['pointer'] == -1:
+ # continue
+ sfa_info = {}
+ type = record['type']
+ logger.info("fill_record_sfa_info - incoming record typed %s"%type)
+ if (type == "slice"):
+ # all slice users are researchers
+ record['geni_urn'] = hrn_to_urn(record['hrn'], 'slice')
+ record['PI'] = []
+ record['researcher'] = []
+ for user_id in record.get('user_ids', []):
+ hrns = [user.hrn for user in users[user_id]]
+ record['researcher'].extend(hrns)
+
+ elif (type.startswith("authority")):
+ record['url'] = None
+ logger.info("fill_record_sfa_info - authority xherex")
+
+ elif (type == "node"):
+ sfa_info['dns'] = record.get("hostname", "")
+ # xxx TODO: URI, LatLong, IP, DNS
+
+ elif (type == "user"):
+ logger.info('setting user.email')
+ sfa_info['email'] = record.get("email", "")
+ sfa_info['geni_urn'] = hrn_to_urn(record['hrn'], 'user')
+ sfa_info['geni_certificate'] = record['gid']
+ # xxx TODO: PostalAddress, Phone
+ record.update(sfa_info)
+
+
+ ####################
+ def update_relation (self, subject_type, target_type, relation_name, subject_id, target_ids):
+ # hard-wire the code for slice/user for now, could be smarter if needed
+ if subject_type =='slice' and target_type == 'user' and relation_name == 'researcher':
+ subject=self.shell.GetSlices (subject_id)[0]
+ current_target_ids = subject['user_ids']
+ add_target_ids = list ( set (target_ids).difference(current_target_ids))
+ del_target_ids = list ( set (current_target_ids).difference(target_ids))
+ logger.debug ("subject_id = %s (type=%s)"%(subject_id,type(subject_id)))
+ for target_id in add_target_ids:
+ self.shell.AddUserToSlice ({'user_id': target_id, 'slice_id': subject_id})
+ logger.debug ("add_target_id = %s (type=%s)"%(target_id,type(target_id)))
+ for target_id in del_target_ids:
+ logger.debug ("del_target_id = %s (type=%s)"%(target_id,type(target_id)))
+ self.shell.DeleteUserFromSlice ({'user_id': target_id, 'slice_id': subject_id})
+ else:
+ logger.info('unexpected relation %s to maintain, %s -> %s'%(relation_name,subject_type,target_type))
+
+
+ ########################################
+ ########## aggregate oriented
+ ########################################
+
+ def testbed_name (self): return "dummy"
+
+ # 'geni_request_rspec_versions' and 'geni_ad_rspec_versions' are mandatory
+ def aggregate_version (self):
+ version_manager = VersionManager()
+ ad_rspec_versions = []
+ request_rspec_versions = []
+ for rspec_version in version_manager.versions:
+ if rspec_version.content_type in ['*', 'ad']:
+ ad_rspec_versions.append(rspec_version.to_dict())
+ if rspec_version.content_type in ['*', 'request']:
+ request_rspec_versions.append(rspec_version.to_dict())
+ return {
+ 'testbed':self.testbed_name(),
+ 'geni_request_rspec_versions': request_rspec_versions,
+ 'geni_ad_rspec_versions': ad_rspec_versions,
+ }
+
+ def list_slices (self, creds, options):
+
+ slices = self.shell.GetSlices()
+ slice_hrns = [slicename_to_hrn(self.hrn, slice['slice_name']) for slice in slices]
+ slice_urns = [hrn_to_urn(slice_hrn, 'slice') for slice_hrn in slice_hrns]
+
+ return slice_urns
+
+ # first 2 args are None in case of resource discovery
+ def list_resources (self, slice_urn, slice_hrn, creds, options):
+
+ version_manager = VersionManager()
+ # get the rspec's return format from options
+ rspec_version = version_manager.get_version(options.get('geni_rspec_version'))
+ version_string = "rspec_%s" % (rspec_version)
+
+ aggregate = DummyAggregate(self)
+ rspec = aggregate.get_rspec(slice_xrn=slice_urn, version=rspec_version,
+ options=options)
+
+ return rspec
+
+ def sliver_status (self, slice_urn, slice_hrn):
+ # find out where this slice is currently running
+ slice_name = hrn_to_dummy_slicename(slice_hrn)
+
+ slice = self.shell.GetSlices({'slice_name': slice_name})
+ if len(slices) == 0:
+ raise SliverDoesNotExist("%s (used %s as slicename internally)" % (slice_hrn, slicename))
+
+ # report about the local nodes only
+ nodes = self.shell.GetNodes({'node_ids':slice['node_ids']})
+
+ if len(nodes) == 0:
+ raise SliverDoesNotExist("You have not allocated any slivers here")
+
+ # get login info
+ user = {}
+ keys = []
+ if slice['user_ids']:
+ users = self.shell.GetUsers({'user_ids': slice['user_ids']})
+ for user in users:
+ keys.extend(user['keys'])
+
+ user.update({'urn': slice_urn,
+ 'login': slice['slice_name'],
+ 'protocol': ['ssh'],
+ 'port': ['22'],
+ 'keys': keys})
+
+
+ result = {}
+ top_level_status = 'unknown'
+ if nodes:
+ top_level_status = 'ready'
+ result['geni_urn'] = slice_urn
+ result['dummy_login'] = slice['slice_name']
+ result['dummy_expires'] = datetime_to_string(utcparse(slice['expires']))
+ result['geni_expires'] = datetime_to_string(utcparse(slice['expires']))
+
+ resources = []
+ for node in nodes:
+ res = {}
+ res['dummy_hostname'] = node['hostname']
+ res['geni_expires'] = datetime_to_string(utcparse(slice['expires']))
+ sliver_id = Xrn(slice_urn, type='slice', id=node['node_id'], authority=self.hrn).urn
+ res['geni_urn'] = sliver_id
+ res['geni_status'] = 'ready'
+ res['geni_error'] = ''
+ res['users'] = [users]
+
+ resources.append(res)
+
+ result['geni_status'] = top_level_status
+ result['geni_resources'] = resources
+ return result
+
+ def create_sliver (self, slice_urn, slice_hrn, creds, rspec_string, users, options):
+
+ aggregate = DummyAggregate(self)
+ slices = DummySlices(self)
+ sfa_peer = slices.get_sfa_peer(slice_hrn)
+ slice_record=None
+ if users:
+ slice_record = users[0].get('slice_record', {})
+
+ # parse rspec
+ rspec = RSpec(rspec_string)
+ requested_attributes = rspec.version.get_slice_attributes()
+
+ # ensure slice record exists
+ slice = slices.verify_slice(slice_hrn, slice_record, peer, sfa_peer, options=options)
+ # ensure user records exists
+ users = slices.verify_users(slice_hrn, slice, users, peer, sfa_peer, options=options)
+
+ # add/remove slice from nodes
+ requested_slivers = []
+ for node in rspec.version.get_nodes_with_slivers():
+ hostname = None
+ if node.get('component_name'):
+ hostname = node.get('component_name').strip()
+ elif node.get('component_id'):
+ hostname = xrn_to_hostname(node.get('component_id').strip())
+ if hostname:
+ requested_slivers.append(hostname)
+ nodes = slices.verify_slice_nodes(slice, requested_slivers, peer)
+
+ return aggregate.get_rspec(slice_xrn=slice_urn, version=rspec.version)
+
+ def delete_sliver (self, slice_urn, slice_hrn, creds, options):
+ slicename = hrn_to_dummy_slicename(slice_hrn)
+ slices = self.shell.GetSlices({'slice_name': slicename})
+ if not slices:
+ return True
+ slice = slices[0]
+
+ try:
+ self.shell.DeleteSliceFromNodes({'slice_id': slice['slice_id'], 'node_ids': slice['node_ids'])
+ return True
+ except:
+ return False
+
+ def renew_sliver (self, slice_urn, slice_hrn, creds, expiration_time, options):
+ slicename = hrn_to_dummy_slicename(slice_hrn)
+ slices = self.shell.GetSlices({'slice_name': slicename})
+ if not slices:
+ raise RecordNotFound(slice_hrn)
+ slice = slices[0]
+ requested_time = utcparse(expiration_time)
+ record = {'expires': int(datetime_to_epoch(requested_time))}
+ try:
+ self.shell.UpdateSlice({'slice_id': slice['slice_id'], 'fields':record})
+ return True
+ except:
+ return False
+
+ # set the 'enabled' tag to True
+ def start_slice (self, slice_urn, slice_hrn, creds):
+ slicename = hrn_to_dummy_slicename(slice_hrn)
+ slices = self.shell.GetSlices({'slice_name': slicename})
+ if not slices:
+ raise RecordNotFound(slice_hrn)
+ slice_id = slices[0]['slice_id']
+ slice_enabled = slices[0]['enabled']
+ # just update the slice enabled tag
+ if not slice_enabled:
+ self.shell.UpdateSlice({'slice_id': slice_id, 'fields': {'enabled': True}})
+ return 1
+
+ # set the 'enabled' tag to False
+ def stop_slice (self, slice_urn, slice_hrn, creds):
+ slicename = hrn_to_pl_slicename(slice_hrn)
+ slices = self.shell.GetSlices({'slice_name': slicename})
+ if not slices:
+ raise RecordNotFound(slice_hrn)
+ slice_id = slices[0]['slice_id']
+ slice_enabled = slices[0]['enabled']
+ # just update the slice enabled tag
+ if slice_enabled:
+ self.shell.UpdateSlice({'slice_id': slice_id, 'fields': {'enabled': False}})
+ return 1
+
+ def reset_slice (self, slice_urn, slice_hrn, creds):
+ raise SfaNotImplemented ("reset_slice not available at this interface")
+
+ def get_ticket (self, slice_urn, slice_hrn, creds, rspec_string, options):
+ raise SfaNotImplemented,"DummyDriver.get_ticket needs a rewrite"
--- /dev/null
+import sys
+import xmlrpclib
+import socket
+from urlparse import urlparse
+
+from sfa.util.sfalogging import logger
+
+class DummyShell:
+ """
+ A simple xmlrpc shell to the dummy testbed API instance
+
+ """
+
+ direct_calls = ['AddNode', 'AddSlice', 'AddUser', 'AddUserKey', 'AddUserToSlice', 'AddSliceToNodes',
+ 'GetTestbedInfo', 'GetNodes', 'GetSlices', 'GetUsers',
+ 'DeleteNode', 'DeleteSlice', 'DeleteUser', 'DeleteKey', 'DeleteUserFromSlice',
+ 'DeleteSliceFromNodes',
+ 'UpdateNode', 'UpdateSlice', 'UpdateUser'
+ ]
+
+
+ def __init__ ( self, config ) :
+ url = config.SFA_DUMMY_URL
+ self.proxy = xmlrpclib.Server(url, verbose = False, allow_none = True)
+
+ def __getattr__(self, name):
+ def func(*args, **kwds):
+ if not name in direct_calls:
+ raise Exception, "Illegal method call %s for DUMMY driver"%(name)
+ result=getattr(self.proxy, actual_name)(*args, **kwds)
+ logger.debug('DummyShell %s returned ... '%(name))
+ return result
+ return func
+
--- /dev/null
+from types import StringTypes
+from collections import defaultdict
+
+from sfa.util.sfatime import utcparse, datetime_to_epoch
+from sfa.util.sfalogging import logger
+from sfa.util.xrn import Xrn, get_leaf, get_authority, urn_to_hrn
+
+from sfa.rspecs.rspec import RSpec
+
+from sfa.dummy.dummyxrn import DummyXrn, hrn_to_dummy_slicename
+
+MAXINT = 2L**31-1
+
+class DummySlices:
+
+
+ def __init__(self, driver):
+ self.driver = driver
+
+ def get_slivers(self, xrn, node=None):
+ hrn, type = urn_to_hrn(xrn)
+
+ slice_name = hrn_to_dummy_slicename(hrn)
+
+ slices = self.driver.shell.GetSlices({'slice_name': slice_name})
+ slice = slices[0]
+ # Build up list of users and slice attributes
+ user_ids = slice['user_ids']
+ # Get user information
+ all_users_list = self.driver.shell.GetUsers({'user_id':user_ids})
+ all_users = {}
+ for user in all_users_list:
+ all_users[user['user_id']] = user
+
+ # Build up list of keys
+ all_keys = set()
+ for user in all_users_list:
+ all_keys.extend(user['keys'])
+
+ slivers = []
+ for slice in slices:
+ keys = all_keys
+ # XXX Sanity check; though technically this should be a system invariant
+ # checked with an assertion
+ if slice['expires'] > MAXINT: slice['expires']= MAXINT
+
+ slivers.append({
+ 'hrn': hrn,
+ 'name': slice['name'],
+ 'slice_id': slice['slice_id'],
+ 'expires': slice['expires'],
+ 'keys': keys,
+ })
+
+ return slivers
+
+ def get_sfa_peer(self, xrn):
+ hrn, type = urn_to_hrn(xrn)
+
+ # return the authority for this hrn or None if we are the authority
+ sfa_peer = None
+ slice_authority = get_authority(hrn)
+ site_authority = get_authority(slice_authority)
+
+ if site_authority != self.driver.hrn:
+ sfa_peer = site_authority
+
+ return sfa_peer
+
+
+ def verify_slice_nodes(self, slice, requested_slivers, peer):
+
+ nodes = self.driver.shell.GetNodes({'node_ids': slice['node_ids']})
+ current_slivers = [node['hostname'] for node in nodes]
+
+ # remove nodes not in rspec
+ deleted_nodes = list(set(current_slivers).difference(requested_slivers))
+
+ # add nodes from rspec
+ added_nodes = list(set(requested_slivers).difference(current_slivers))
+
+ try:
+ self.driver.shell.AddSliceToNodes({'slice_id': slice['slice_id'], 'node_ids': added_nodes})
+ self.driver.shell.DeleteSliceFromNodes({'slice_id': slice['slice_id'], 'node_ids': deleted_nodes})
+
+ except:
+ logger.log_exc('Failed to add/remove slice from nodes')
+ return nodes
+
+
+
+ def verify_slice(self, slice_hrn, slice_record, peer, sfa_peer, options={}):
+ slicename = hrn_to_dummy_slicename(slice_hrn)
+ parts = slicename.split("_")
+ login_base = parts[0]
+ slices = self.driver.shell.GetSlices({'slice_name': slicename})
+ if not slices:
+ slice = {'slice_name': slicename}
+ # add the slice
+ slice['slice_id'] = self.driver.shell.AddSlice(slice)
+ slice['node_ids'] = []
+ slice['user_ids'] = []
+ else:
+ slice = slices[0]
+ if slice_record.get('expires'):
+ requested_expires = int(datetime_to_epoch(utcparse(slice_record['expires'])))
+ if requested_expires and slice['expires'] != requested_expires:
+ self.driver.shell.UpdateSlice( {'slice_id': slice['slice_id'], 'fields':{'expires' : requested_expires}})
+
+ return slice
+
+ def verify_users(self, slice_hrn, slice_record, users, peer, sfa_peer, options={}):
+ users_by_email = {}
+ users_dict = {}
+ for user in users:
+ user['urn'] = user['urn'].lower()
+ hrn, type = urn_to_hrn(user['urn'])
+ username = get_leaf(hrn)
+ login_base = PlXrn(xrn=user['urn']).pl_login_base()
+ user['username'] = username
+ user['site'] = login_base
+
+ if 'email' in user:
+ user['email'] = user['email'].lower()
+ users_by_email[user['email']] = user
+ users_dict[user['email']] = user
+ else:
+ users_by_site[user['site']].append(user)
+
+ # start building a list of existing users
+ existing_user_ids = []
+ existing_user_ids_filter = []
+ if users_by_email:
+ existing_user_ids_filter.extend(users_by_email.keys())
+ if users_by_site:
+ for login_base in users_by_site:
+ users = users_by_site[login_base]
+ for user in users:
+ existing_user_ids_filter.append(user['username']+'@geni.net')
+ if existing_user_ids_filter:
+ # get existing users by email
+ existing_users = self.driver.shell.GetPersons({'email': existing_user_ids_filter},
+ ['person_id', 'key_ids', 'email'])
+ existing_user_ids.extend([user['email'] for user in existing_users])
+
+ if users_by_site:
+ # get a list of user sites (based on requeste user urns
+ site_list = self.driver.shell.GetSites(users_by_site.keys(), \
+ ['site_id', 'login_base', 'person_ids'])
+ # get all existing users at these sites
+ sites = {}
+ site_user_ids = []
+ for site in site_list:
+ sites[site['site_id']] = site
+ site_user_ids.extend(site['person_ids'])
+
+ existing_site_persons_list = self.driver.shell.GetPersons(site_user_ids,
+ ['person_id', 'key_ids', 'email', 'site_ids'])
+
+ # all requested users are either existing users or new (added) users
+ for login_base in users_by_site:
+ requested_site_users = users_by_site[login_base]
+ for requested_user in requested_site_users:
+ user_found = False
+ for existing_user in existing_site_persons_list:
+ for site_id in existing_user['site_ids']:
+ if site_id in sites:
+ site = sites[site_id]
+ if login_base == site['login_base'] and \
+ existing_user['email'].startswith(requested_user['username']+'@'):
+ existing_user_ids.append(existing_user['email'])
+ requested_user['email'] = existing_user['email']
+ users_dict[existing_user['email']] = requested_user
+ user_found = True
+ break
+ if user_found:
+ break
+
+ if user_found == False:
+ fake_email = requested_user['username'] + '@geni.net'
+ requested_user['email'] = fake_email
+ users_dict[fake_email] = requested_user
+
+ # requested slice users
+ requested_user_ids = users_dict.keys()
+ # existing slice users
+ existing_slice_users_filter = {'person_id': slice_record.get('person_ids', [])}
+ existing_slice_users = self.driver.shell.GetPersons(existing_slice_users_filter,
+ ['person_id', 'key_ids', 'email'])
+ existing_slice_user_ids = [user['email'] for user in existing_slice_users]
+
+ # users to be added, removed or updated
+ added_user_ids = set(requested_user_ids).difference(existing_user_ids)
+ added_slice_user_ids = set(requested_user_ids).difference(existing_slice_user_ids)
+ removed_user_ids = set(existing_slice_user_ids).difference(requested_user_ids)
+ updated_user_ids = set(existing_slice_user_ids).intersection(requested_user_ids)
+
+ # Remove stale users (only if we are not appending).
+ # Append by default.
+ append = options.get('append', True)
+ if append == False:
+ for removed_user_id in removed_user_ids:
+ self.driver.shell.DeletePersonFromSlice(removed_user_id, slice_record['name'])
+ # update_existing users
+ updated_users_list = [user for user in users_dict.values() if user['email'] in \
+ updated_user_ids]
+ self.verify_keys(existing_slice_users, updated_users_list, peer, options)
+
+ added_persons = []
+ # add new users
+ for added_user_id in added_user_ids:
+ added_user = users_dict[added_user_id]
+ hrn, type = urn_to_hrn(added_user['urn'])
+ person = {
+ 'first_name': added_user.get('first_name', hrn),
+ 'last_name': added_user.get('last_name', hrn),
+ 'email': added_user_id,
+ 'peer_person_id': None,
+ 'keys': [],
+ 'key_ids': added_user.get('key_ids', []),
+ }
+ person['person_id'] = self.driver.shell.AddPerson(person)
+ if peer:
+ person['peer_person_id'] = added_user['person_id']
+ added_persons.append(person)
+
+ # enable the account
+ self.driver.shell.UpdatePerson(person['person_id'], {'enabled': True})
+
+ # add person to site
+ self.driver.shell.AddPersonToSite(added_user_id, added_user['site'])
+
+ for key_string in added_user.get('keys', []):
+ key = {'key':key_string, 'key_type':'ssh'}
+ key['key_id'] = self.driver.shell.AddPersonKey(person['person_id'], key)
+ person['keys'].append(key)
+
+ # add the registry record
+# if sfa_peer:
+# peer_dict = {'type': 'user', 'hrn': hrn, 'peer_authority': sfa_peer, \
+# 'pointer': person['person_id']}
+# self.registry.register_peer_object(self.credential, peer_dict)
+
+ for added_slice_user_id in added_slice_user_ids.union(added_user_ids):
+ # add person to the slice
+ self.driver.shell.AddPersonToSlice(added_slice_user_id, slice_record['name'])
+ # if this is a peer record then it should already be bound to a peer.
+ # no need to return worry about it getting bound later
+
+ return added_persons
+
+
+ def verify_keys(self, old_users, new_users, peer, options={}):
+ # existing keys
+ existing_keys = []
+ for user in old_users:
+ existing_keys.append(user['keys'])
+ userdict = {}
+ for user in old_users:
+ userdict[user['email']] = user
+
+ # add new keys
+ requested_keys = []
+ updated_users = []
+ for user in new_users:
+ user_keys = user.get('keys', [])
+ updated_users.append(user)
+ for key_string in user_keys:
+ requested_keys.append(key_string)
+ if key_string not in existing_keys:
+ key = key_string
+ try:
+ self.driver.shell.AddUserKey({'user_id': user['user_id'], 'key':key})
+
+ except:
+ pass
+ # remove old keys (only if we are not appending)
+ append = options.get('append', True)
+ if append == False:
+ removed_keys = set(existing_keys).difference(requested_keys)
+ for key in removed_keys:
+ try:
+ self.driver.shell.DeleteKey({'key': key})
+ except:
+ pass
+
+
--- /dev/null
+# specialized Xrn class for Dummy TB
+import re
+from sfa.util.xrn import Xrn
+
+# temporary helper functions to use this module instead of namespace
+def hostname_to_hrn (auth, testbed_name, hostname):
+ return DummyXrn(auth=auth+'.'+testbed_name,hostname=hostname).get_hrn()
+def hostname_to_urn(auth, testbed_name, hostname):
+ return DummyXrn(auth=auth+'.'+testbed_name,hostname=hostname).get_urn()
+def slicename_to_hrn (auth_hrn, slicename):
+ return DummyXrn(auth=auth_hrn,slicename=slicename).get_hrn()
+def email_to_hrn (auth_hrn, email):
+ return DummyXrn(auth=auth_hrn, email=email).get_hrn()
+def hrn_to_dummy_slicename (hrn):
+ return DummyXrn(xrn=hrn,type='slice').dummy_slicename()
+def hrn_to_dummy_authname (hrn):
+ return DummyXrn(xrn=hrn,type='any').dummy_authname()
+def xrn_to_hostname(hrn):
+ return Xrn.unescape(PlXrn(xrn=hrn, type='node').get_leaf())
+
+class DummyXrn (Xrn):
+
+ @staticmethod
+ def site_hrn (auth, testbed_name):
+ return '.'.join([auth,testbed_name])
+
+ def __init__ (self, auth=None, hostname=None, slicename=None, email=None, interface=None, **kwargs):
+ #def hostname_to_hrn(auth_hrn, login_base, hostname):
+ if hostname is not None:
+ self.type='node'
+ # keep only the first part of the DNS name
+ #self.hrn='.'.join( [auth,hostname.split(".")[0] ] )
+ # escape the '.' in the hostname
+ self.hrn='.'.join( [auth,Xrn.escape(hostname)] )
+ self.hrn_to_urn()
+ #def slicename_to_hrn(auth_hrn, slicename):
+ elif slicename is not None:
+ self.type='slice'
+ # split at the first _
+ parts = slicename.split("_",1)
+ self.hrn = ".".join([auth] + parts )
+ self.hrn_to_urn()
+ #def email_to_hrn(auth_hrn, email):
+ elif email is not None:
+ self.type='person'
+ # keep only the part before '@' and replace special chars into _
+ self.hrn='.'.join([auth,email.split('@')[0].replace(".", "_").replace("+", "_")])
+ self.hrn_to_urn()
+ elif interface is not None:
+ self.type = 'interface'
+ self.hrn = auth + '.' + interface
+ self.hrn_to_urn()
+ else:
+ Xrn.__init__ (self,**kwargs)
+
+ #def hrn_to_pl_slicename(hrn):
+ def dummy_slicename (self):
+ self._normalize()
+ leaf = self.leaf
+ sliver_id_parts = leaf.split(':')
+ name = sliver_id_parts[0]
+ name = re.sub('[^a-zA-Z0-9_]', '', name)
+ return self.pl_login_base() + '_' + name
+
+ #def hrn_to_pl_authname(hrn):
+ def dummy_authname (self):
+ self._normalize()
+ return self.authority[-1]
+
+ def interface_name(self):
+ self._normalize()
+ return self.leaf
+
+ def pl_login_base (self):
+ self._normalize()
+ if self.type and self.type.startswith('authority'):
+ base = self.leaf
+ else:
+ base = self.authority[-1]
+
+ # Fix up names of GENI Federates
+ base = base.lower()
+ base = re.sub('\\\[^a-zA-Z0-9]', '', base)
+
+ if len(base) > 20:
+ base = base[len(base)-20:]
+
+ return base
--- /dev/null
+from sfa.generic import Generic
+
+class dummy (Generic):
+
+ # the importer class
+ def importer_class (self):
+ import sfa.importer.dummyimporter
+ return sfa.importer.dummyimporter.PlImporter
+
+ # use the standard api class
+ def api_class (self):
+ import sfa.server.sfaapi
+ return sfa.server.sfaapi.SfaApi
+
+ # the manager classes for the server-side services
+ def registry_manager_class (self) :
+ import sfa.managers.registry_manager
+ return sfa.managers.registry_manager.RegistryManager
+ def slicemgr_manager_class (self) :
+ import sfa.managers.slice_manager
+ return sfa.managers.slice_manager.SliceManager
+ def aggregate_manager_class (self) :
+ import sfa.managers.aggregate_manager
+ return sfa.managers.aggregate_manager.AggregateManager
+
+ # driver class for server-side services, talk to the whole testbed
+ def driver_class (self):
+ import sfa.dummy.dummydriver
+ return sfa.dummy.dummydriver.DummyDriver
+
+
--- /dev/null
+#
+# Dummy importer
+#
+# requirements
+#
+# read the planetlab database and update the local registry database accordingly
+# so we update the following collections
+# . authorities (from pl sites)
+# . node (from pl nodes)
+# . users+keys (from pl persons and attached keys)
+# known limitation : *one* of the ssh keys is chosen at random here
+# xxx todo/check xxx at the very least, when a key is known to the registry
+# and is still current in plc
+# then we should definitely make sure to keep that one in sfa...
+# . slice+researchers (from pl slices and attached users)
+#
+
+import os
+
+from sfa.util.config import Config
+from sfa.util.xrn import Xrn, get_leaf, get_authority, hrn_to_urn
+
+from sfa.trust.gid import create_uuid
+from sfa.trust.certificate import convert_public_key, Keypair
+
+from sfa.storage.alchemy import dbsession
+from sfa.storage.model import RegRecord, RegAuthority, RegSlice, RegNode, RegUser, RegKey
+
+from sfa.dummy.dummyshell import PlShell
+from sfa.dummy.dummyxrn import hostname_to_hrn, slicename_to_hrn, email_to_hrn, hrn_to_dummy_slicename
+
+def _get_site_hrn(interface_hrn, site):
+ hrn = ".".join([interface_hrn, site['name']])
+ return hrn
+
+
+class DummyImporter:
+
+ def __init__ (self, auth_hierarchy, logger):
+ self.auth_hierarchy = auth_hierarchy
+ self.logger=logger
+
+ def add_options (self, parser):
+ # we don't have any options for now
+ pass
+
+ # hrn hash is initialized from current db
+ # remember just-created records as we go
+ # xxx might make sense to add a UNIQUE constraint in the db itself
+ def remember_record_by_hrn (self, record):
+ tuple = (record.type, record.hrn)
+ if tuple in self.records_by_type_hrn:
+ self.logger.warning ("DummyImporter.remember_record_by_hrn: duplicate (%s,%s)"%tuple)
+ return
+ self.records_by_type_hrn [ tuple ] = record
+
+ # ditto for pointer hash
+ def remember_record_by_pointer (self, record):
+ if record.pointer == -1:
+ self.logger.warning ("DummyImporter.remember_record_by_pointer: pointer is void")
+ return
+ tuple = (record.type, record.pointer)
+ if tuple in self.records_by_type_pointer:
+ self.logger.warning ("DummyImporter.remember_record_by_pointer: duplicate (%s,%s)"%tuple)
+ return
+ self.records_by_type_pointer [ ( record.type, record.pointer,) ] = record
+
+ def remember_record (self, record):
+ self.remember_record_by_hrn (record)
+ self.remember_record_by_pointer (record)
+
+ def locate_by_type_hrn (self, type, hrn):
+ return self.records_by_type_hrn.get ( (type, hrn), None)
+
+ def locate_by_type_pointer (self, type, pointer):
+ return self.records_by_type_pointer.get ( (type, pointer), None)
+
+ # a convenience/helper function to see if a record is already known
+ # a former, broken, attempt (in 2.1-9) had been made
+ # to try and use 'pointer' as a first, most significant attempt
+ # the idea being to preserve stuff as much as possible, and thus
+ # to avoid creating a new gid in the case of a simple hrn rename
+ # however this of course doesn't work as the gid depends on the hrn...
+ #def locate (self, type, hrn=None, pointer=-1):
+ # if pointer!=-1:
+ # attempt = self.locate_by_type_pointer (type, pointer)
+ # if attempt : return attempt
+ # if hrn is not None:
+ # attempt = self.locate_by_type_hrn (type, hrn,)
+ # if attempt : return attempt
+ # return None
+
+ # this makes the run method a bit abtruse - out of the way
+
+ def run (self, options):
+ config = Config ()
+ interface_hrn = config.SFA_INTERFACE_HRN
+ root_auth = config.SFA_REGISTRY_ROOT_AUTH
+ shell = DummyShell (config)
+
+ ######## retrieve all existing SFA objects
+ all_records = dbsession.query(RegRecord).all()
+
+ # create hash by (type,hrn)
+ # we essentially use this to know if a given record is already known to SFA
+ self.records_by_type_hrn = \
+ dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
+ # create hash by (type,pointer)
+ self.records_by_type_pointer = \
+ dict ( [ ( (record.type, record.pointer) , record ) for record in all_records
+ if record.pointer != -1] )
+
+ # initialize record.stale to True by default, then mark stale=False on the ones that are in use
+ for record in all_records: record.stale=True
+
+ ######## retrieve Dummy TB data
+ # Get all plc sites
+ # retrieve only required stuf
+ sites = [shell.GetTestbedInfo()]
+ # create a hash of sites by login_base
+# sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
+ # Get all dummy TB users
+ users = shell.GetUsers()
+ # create a hash of users by user_id
+ users_by_id = dict ( [ ( user['user_id'], user) for user in users ] )
+ # Get all dummy TB public keys
+ keys = []
+ for user in users:
+ keys.extend(user['keys'])
+ # create a dict user_id -> [ keys ]
+ keys_by_person_id = {}
+ for user in users:
+ keys_by_person_id[user['user_id']] = user['keys']
+ # Get all dummy TB nodes
+ nodes = shell.GetNodes()
+ # create hash by node_id
+ nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] )
+ # Get all dummy TB slices
+ slices = shell.GetSlices()
+ # create hash by slice_id
+ slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] )
+
+
+ # start importing
+ for site in sites:
+ site_hrn = _get_site_hrn(interface_hrn, site)
+ # import if hrn is not in list of existing hrns or if the hrn exists
+ # but its not a site record
+ site_record=self.locate_by_type_hrn ('authority', site_hrn)
+ if not site_record:
+ try:
+ urn = hrn_to_urn(site_hrn, 'authority')
+ if not self.auth_hierarchy.auth_exists(urn):
+ self.auth_hierarchy.create_auth(urn)
+ auth_info = self.auth_hierarchy.get_auth_info(urn)
+ site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
+ pointer= -1,
+ authority=get_authority(site_hrn))
+ site_record.just_created()
+ dbsession.add(site_record)
+ dbsession.commit()
+ self.logger.info("DummyImporter: imported authority (site) : %s" % site_record)
+ self.remember_record (site_record)
+ except:
+ # if the site import fails then there is no point in trying to import the
+ # site's child records (node, slices, persons), so skip them.
+ self.logger.log_exc("DummyImporter: failed to import site. Skipping child records")
+ continue
+ else:
+ # xxx update the record ...
+ pass
+ site_record.stale=False
+
+ # import node records
+ for node in nodes:
+ site_auth = get_authority(site_hrn)
+ site_name = site['name']
+ node_hrn = hostname_to_hrn(site_auth, site_name, node['hostname'])
+ # xxx this sounds suspicious
+ if len(node_hrn) > 64: node_hrn = node_hrn[:64]
+ node_record = self.locate_by_type_hrn ( 'node', node_hrn )
+ if not node_record:
+ try:
+ pkey = Keypair(create=True)
+ urn = hrn_to_urn(node_hrn, 'node')
+ node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
+ node_record = RegNode (hrn=node_hrn, gid=node_gid,
+ pointer =node['node_id'],
+ authority=get_authority(node_hrn))
+ node_record.just_created()
+ dbsession.add(node_record)
+ dbsession.commit()
+ self.logger.info("DummyImporter: imported node: %s" % node_record)
+ self.remember_record (node_record)
+ except:
+ self.logger.log_exc("DummyImporter: failed to import node")
+ else:
+ # xxx update the record ...
+ pass
+ node_record.stale=False
+
+ site_pis=[]
+ # import users
+ for user in users:
+ user_hrn = email_to_hrn(site_hrn, user['email'])
+ # xxx suspicious again
+ if len(person_hrn) > 64: person_hrn = person_hrn[:64]
+ user_urn = hrn_to_urn(user_hrn, 'user')
+
+ user_record = self.locate_by_type_hrn ( 'user', user_hrn)
+
+ # return a tuple pubkey (a dummy TB key object) and pkey (a Keypair object)
+
+ def init_user_key (user):
+ pubkey = None
+ pkey = None
+ if user['keys']:
+ # randomly pick first key in set
+ for key in user['keys']:
+ pubkey = key
+ try:
+ pkey = convert_public_key(pubkey)
+ break
+ except:
+ continue
+ if not pkey:
+ self.logger.warn('DummyImporter: unable to convert public key for %s' % user_hrn)
+ pkey = Keypair(create=True)
+ else:
+ # the user has no keys. Creating a random keypair for the user's gid
+ self.logger.warn("DummyImporter: user %s does not have a NITOS public key"%user_hrn)
+ pkey = Keypair(create=True)
+ return (pubkey, pkey)
+
+ # new user
+ try:
+ if not user_record:
+ (pubkey,pkey) = init_user_key (user)
+ user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
+ user_gid.set_email(user['email'])
+ user_record = RegUser (hrn=user_hrn, gid=user_gid,
+ pointer=user['user_id'],
+ authority=get_authority(user_hrn),
+ email=user['email'])
+ if pubkey:
+ user_record.reg_keys=[RegKey (pubkey)]
+ else:
+ self.logger.warning("No key found for user %s"%user_record)
+ user_record.just_created()
+ dbsession.add (user_record)
+ dbsession.commit()
+ self.logger.info("DummyImporter: imported person: %s" % user_record)
+ self.remember_record ( user_record )
+
+ else:
+ # update the record ?
+ # if user's primary key has changed then we need to update the
+ # users gid by forcing an update here
+ sfa_keys = user_record.reg_keys
+ def key_in_list (key,sfa_keys):
+ for reg_key in sfa_keys:
+ if reg_key.key==key: return True
+ return False
+ # is there a new key in Dummy TB ?
+ new_keys=False
+ for key in user['keys']:
+ if not key_in_list (key,sfa_keys):
+ new_keys = True
+ if new_keys:
+ (pubkey,pkey) = init_user_key (user)
+ user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
+ if not pubkey:
+ user_record.reg_keys=[]
+ else:
+ user_record.reg_keys=[ RegKey (pubkey)]
+ self.logger.info("DummyImporter: updated person: %s" % user_record)
+ user_record.email = person['email']
+ dbsession.commit()
+ user_record.stale=False
+ except:
+ self.logger.log_exc("DummyImporter: failed to import user %d %s"%(user['user_id'],user['email']))
+
+
+ # import slices
+ for slice in slices:
+ slice_hrn = slicename_to_hrn(interface_hrn, slice['slice_ame'])
+ slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
+ if not slice_record:
+ try:
+ pkey = Keypair(create=True)
+ urn = hrn_to_urn(slice_hrn, 'slice')
+ slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
+ slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid,
+ pointer=slice['slice_id'],
+ authority=get_authority(slice_hrn))
+ slice_record.just_created()
+ dbsession.add(slice_record)
+ dbsession.commit()
+ self.logger.info("DummyImporter: imported slice: %s" % slice_record)
+ self.remember_record ( slice_record )
+ except:
+ self.logger.log_exc("DummyImporter: failed to import slice")
+ else:
+ # xxx update the record ...
+ self.logger.warning ("Slice update not yet implemented")
+ pass
+ # record current users affiliated with the slice
+ slice_record.reg_researchers = \
+ [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['user_ids'] ]
+ dbsession.commit()
+ slice_record.stale=False
+
+ ### remove stale records
+ # special records must be preserved
+ system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
+ for record in all_records:
+ if record.hrn in system_hrns:
+ record.stale=False
+ if record.peer_authority:
+ record.stale=False
+
+ for record in all_records:
+ try: stale=record.stale
+ except:
+ stale=True
+ self.logger.warning("stale not found with %s"%record)
+ if stale:
+ self.logger.info("DummyImporter: deleting stale record: %s" % record)
+ dbsession.delete(record)
+ dbsession.commit()