from sfa.util.version import version_core
from sfa.rspecs.rspec_version import RSpecVersion
from sfa.rspecs.sfa_rspec import sfa_rspec_version
-from sfa.rspecs.pg_rspec import pg_rspec_version
+from sfa.rspecs.pl_rspec_version import *
+#from sfa.rspecs.pg_rspec import pg_rspec_version
+from sfa.rspecs.pg_rspec import pg_rspec_ad_version, pg_rspec_request_version
from sfa.rspecs.rspec_parser import parse_rspec
from sfa.util.sfatime import utcparse
from sfa.util.callids import Callids
-
-
-from sfa.plc.OARrspec import *
+from sfa.senslab.OARrspec import *
#from sfa.plc.aggregate import Aggregate
def GetVersion(api):
+ print>>sys.stderr, "\r\n AGGREGATE GET_VERSION "
xrn=Xrn(api.hrn)
- supported_rspecs = [dict(pg_rspec__request_version), dict(sfa_rspec_version)]
+ supported_rspecs = [dict(pg_rspec_request_version), dict(sfa_rspec_version)]
ad_rspec_versions = [dict(pg_rspec_ad_version), dict(sfa_rspec_version)]
version_more = {'interface':'aggregate',
'testbed':'senslab',
'ad_rspec_versions': ad_rspec_versions,
'default_ad_rspec': dict(sfa_rspec_version)
}
+ print>>sys.stderr, "\r\n AGGREGATE GET_VERSION : %s \r\n \r\n" %(version_core(version_more))
return version_core(version_more)
def __get_registry_objects(slice_xrn, creds, users):
slice = {}
- extime = Credential(string=creds[0]).get_expiration()
- # If the expiration time is > 60 days from now, set the expiration time to 60 days from now
- if extime > datetime.datetime.utcnow() + datetime.timedelta(days=60):
- extime = datetime.datetime.utcnow() + datetime.timedelta(days=60)
+ #extime = Credential(string=creds[0]).get_expiration()
+ ## If the expiration time is > 60 days from now, set the expiration time to 60 days from now
+ #if extime > datetime.datetime.utcnow() + datetime.timedelta(days=60):
+ extime = datetime.datetime.utcnow() + datetime.timedelta(days=60)
slice['expires'] = int(time.mktime(extime.timetuple()))
slice['hrn'] = hrn
slice['name'] = hrn_to_pl_slicename(hrn)
Create the sliver[s] (slice) at this aggregate.
Verify HRN and initialize the slice record in PLC if necessary.
"""
+ print>>sys.stderr, " \r\n AGGREGATE CreateSliver-----------------> "
if Callids().already_handled(call_id): return ""
reg_objects = __get_registry_objects(slice_xrn, creds, users)
slice = slices.verify_slice(registry, credential, hrn, site_id,
remote_site_id, peer, sfa_peer, reg_objects)
- nodes = api.plshell.GetNodes(api.plauth, slice['node_ids'], ['hostname'])
+ nodes = api.oar.GetNodes(slice['node_ids'], ['hostname'])
current_slivers = [node['hostname'] for node in nodes]
rspec = parse_rspec(rspec_string)
requested_slivers = [str(host) for host in rspec.get_nodes_with_slivers()]
from sfa.rspecs.rspec_parser import parse_rspec
from sfa.rspecs.rspec_version import RSpecVersion
from sfa.rspecs.sfa_rspec import sfa_rspec_version
-from sfa.rspecs.pg_rspec import pg_rspec_version
+from sfa.rspecs.pg_rspec import pg_rspec_ad_version, pg_rspec_request_version
from sfa.util.policy import Policy
from sfa.util.prefixTree import prefixTree
from sfa.util.sfaticket import *
try:
return server.url
except:
- sfa_logger().warning("ParseVersion, falling back to xmlrpclib.ServerProxy internals")
+ sfa_logger().warning("GetVersion, falling back to xmlrpclib.ServerProxy internals")
return server._ServerProxy__host + server._ServerProxy__handler
-def ParseVersion(api):
+def GetVersion(api):
# peers explicitly in aggregates.xml
peers =dict ([ (peername,get_serverproxy_url(v)) for (peername,v) in api.aggregates.iteritems()
if peername != api.hrn])
xrn=Xrn (api.hrn)
- supported_rspecs = [dict(pg_rspec_version), dict(sfa_rspec_version)]
+ request_rspec_versions = [dict(pg_rspec_request_version), dict(sfa_rspec_version)]
+ ad_rspec_versions = [dict(pg_rspec_ad_version), dict(sfa_rspec_version)]
version_more = {'interface':'slicemgr',
'hrn' : xrn.get_hrn(),
'urn' : xrn.get_urn(),
'peers': peers,
- 'request_rspec_versions': supported_rspecs,
- 'ad_rspec_versions': supported_rspecs,
+ 'request_rspec_versions': request_rspec_versions,
+ 'ad_rspec_versions': ad_rspec_versions,
'default_ad_rspec': dict(sfa_rspec_version)
}
sm_version=version_core(version_more)
local_am_url=get_serverproxy_url(api.aggregates[api.hrn])
sm_version['peers'][api.hrn]=local_am_url.replace('localhost',sm_version['hostname'])
return sm_version
-
+
def CreateSliver(api, xrn, creds, rspec_str, users, call_id):
def _CreateSliver(aggregate, xrn, credential, rspec, users, call_id):
# Need to call ParseVersion at an aggregate to determine the supported
# rspec type/format beofre calling CreateSliver at an Aggregate.
# The Aggregate's verion info is cached
+ print>>sys.stderr, " \r\n \t\t =======SLICE MANAGER _CreateSliver "
+
server = api.aggregates[aggregate]
# get cached aggregate version
aggregate_version_key = 'version_'+ aggregate
aggregate_version = api.cache.get(aggregate_version_key)
- if not aggregate_version:
- # get current aggregate version anc cache it for 24 hours
- aggregate_version = server.ParseVersion()
+ print>>sys.stderr, " \r\n \t\t =======SLICE MANAGER _CreateSliver aggregate_version WTF ? %s"%(aggregate_version )
+ if aggregate_version is None:
+ # get current aggregate version anc cache it for 24 hours
+ print>>sys.stderr, " \r\n \t\t =======SLICE MANAGER It s browwwwwn server"
+ aggregate_version = server.GetVersion()
+ print>>sys.stderr, " \r\n \t\t =======SLICE MANAGER _CreateSliver GET aggregate_version %s"%(aggregate_version )
api.cache.add(aggregate_version_key, aggregate_version, 60 * 60 * 24)
if 'sfa' not in aggregate_version and 'geni_api' in aggregate_version:
import sys
import httplib
import json
+from sfa.senslab.parsing import *
+from sfa.senslab.SenslabImportUsers import *
-OARIP='192.168.0.109'
+OARIP='10.127.255.254'
OARrequests_list = ["GET_version", "GET_timezone", "GET_jobs", "GET_jobs_table", "GET_jobs_details",
"GET_resources_full", "GET_resources"]
def AddMobility(self,tuplelist,value):
- tuplelist.append(('mobile',int(value)))
+ if value :
+ tuplelist.append(('mobile',int(value)))
return 0
def ParseVersion(self) :
print self.raw_json
- self.version_json_dict.update(api_version=self.raw_json['oar'] ,
- apilib_version=self.raw_json['apilib'],
+ self.version_json_dict.update(api_version=self.raw_json['oar_version'] ,
+ apilib_version=self.raw_json['apilib_version'],
api_timezone=self.raw_json['api_timezone'],
api_timestamp=self.raw_json['api_timestamp'],
- oar_version=self.raw_json['oar'] )
+ oar_version=self.raw_json['oar_version'] )
print self.version_json_dict['apilib_version']
def ParseTimezone(self) :
def ParseJobsDetails (self):
print "ParseJobsDetails"
- def ParseResources(self) :
+ def ParseResources(self) :
+ print>>sys.stderr, " \r\n \t\t\t ParseResources__________________________ "
#resources are listed inside the 'items' list from the json
self.raw_json = self.raw_json['items']
self.ParseNodes()
def ParseResourcesFull(self ) :
- print self.raw_json[1]
+ print>>sys.stderr, " \r\n \t\t\t ParseResourcesFull_____________________________ "
+ #print self.raw_json[1]
#resources are listed inside the 'items' list from the json
if self.version_json_dict['apilib_version'] != "0.2.10" :
self.raw_json = self.raw_json['items']
('login_base', node['site_login_base']),\
('node_ids',nodes_per_site[node['site_id']]),\
('latitude',"48.83726"),\
- ('longitude',"- 2.10336")]
+ ('longitude',"- 2.10336"),('name',"demolab"),\
+ ('pcu_ids', []), ('max_slices', None), ('ext_consortium_id', None),\
+ ('max_slivers', None), ('is_public', True), ('peer_site_id', None),\
+ ('abbreviated_name', "demolab"), ('address_ids', []),\
+ ('url', "http,//www.sdemolab.fr"), ('person_ids', []),\
+ ('site_tag_ids', []), ('enabled', True), ('slice_ids', []),\
+ ('date_created', None), ('peer_id', None),]
self.site_dict[node['site_login_base']] = dict(self.site_dict[node['site_login_base']])
- print self.site_dict
+
+ print>>sys.stderr, "\r\n \r\n =============\t\t ParseSites site dict %s \r\n"%(self.site_dict)
def GetNodesFromOARParse(self):
def GetSites(self, site_filter= None, return_fields=None):
- print>>sys.stderr, " \r\n GetSites"
+ print>>sys.stderr, " \r\n GetSites+++++++++++++++++"
self.parser.SendRequest("GET_resources_full")
site_dict = self.parser.GetSitesFromOARParse()
return_site_list = []
- print>>sys.stderr, " \r\n GetSites sites_dict %s" %(site_dict)
+ site = site_dict.values()[0]
+ Users = SenslabImportUsers()
+
+ print>>sys.stderr, " \r\n GetSites sites_dict %s site_filter %s \r\n \r\n \r\n \r\n------site %s" %(site_dict,site_filter,site )
+ admins_dict ={'person_ids': Users.GetPIs(site['site_id'])}
+ site.update(admins_dict)
+
+ slice_list = Users.GetSlices()
+ for sl in slice_list:
+ print>>sys.stderr, " \r\n GetSites sl %s" %(sl)
+ if sl['site_id'] == site['site_id']:
+ site['slice_ids'].append(sl['slice_id'])
+ print>>sys.stderr, " \r\n GetSites -site['site_id'] %s --slice_list %s" %(site['site_id'],slice_list )
+
+
+ print>>sys.stderr, " \r\n GetSites -site['site_id'] %s --admins_dict %s---site %s" %(site['site_id'],admins_dict,site )
if not (site_filter or return_fields):
return_site_list = site_dict.values()
return return_site_list
#panos new user options variable
user_options = {}
- def __init__(self):
+ def __init__(self ,api, user_options={}):
self.OARImporter = OARapi()
print >>sys.stderr,'\r\n \r\n \t\t__INIT OARRSPEC__'
+ self.user_options = user_options
def prepare_sites(self, force=False):
- print >>sys.stderr,'\r\n \r\n ++++++++++++++\t\t', self.OARImporter.GetSites()
+ print >>sys.stderr,'\r\n \r\n ++++++++++++++\t\t prepare_sites'
if not self.sites or force:
for site in self.OARImporter.GetSites():
print >>sys.stderr,'prepare_sites : site ', site
self.nodes[node['node_id']] = node
print >>sys.stderr,'prepare_nodes:node', node
-
+ #def prepare_interfaces(self, force=False):
+ #if not self.interfaces or force:
+ #for interface in self.api.plshell.GetInterfaces(self.api.plauth):
+ #self.interfaces[interface['interface_id']] = interface
#def prepare_node_tags(self, force=False):
#if not self.node_tags or force:
#for node_tag in self.api.plshell.GetNodeTags(self.api.plauth):
#self.node_tags[node_tag['node_tag_id']] = node_tag
-
+
+ def prepare_links(self, force=False):
+ if not self.links or force:
+ pass
def prepare(self, force=False):
if not self.prepared or force:
self.prepare_sites(force)
self.prepare_nodes(force)
-
+ self.prepare_links(force)
+ #self.prepare_interfaces(force)
+ #self.prepare_node_tags(force)
# add site/interface info to nodes
for node_id in self.nodes:
node = self.nodes[node_id]
site = self.sites[node['site_id']]
-
- node['network'] = "grenoble-senslab"
- node['network_urn'] = hrn_to_urn(node['network'], 'authority+sa')
+ #interfaces = [self.interfaces[interface_id] for interface_id in node['interface_ids']]
+ #tags = [self.node_tags[tag_id] for tag_id in node['node_tag_ids']]
+ node['network'] = "senslab"
+ node['network_urn'] = hrn_to_urn(node['network'], 'authority+am')
node['urn'] = hostname_to_urn(node['network'], site['login_base'], node['hostname'])
- node['site_urn'] = hrn_to_urn(PlXrn.site_hrn(node['network'], site['login_base']), 'authority')
+ node['site_urn'] = hrn_to_urn(PlXrn.site_hrn(node['network'], site['login_base']), 'authority+sa')
node['site'] = site
-
+ #node['interfaces'] = interfaces
#node['tags'] = tags
+
print >>sys.stderr, "\r\n OAR prepare ", node
self.prepared = True
+
#
# The import tool assumes that the existing PLC hierarchy should all be part
# of "planetlab.us" (see the root_auth and level1_auth variables below).
from sfa.trust.gid import create_uuid
+
def _un_unicode(str):
if isinstance(str, unicode):
return str.encode("ascii", "ignore")
self.TrustedRoots = TrustedRootList(Config.get_trustedroots_dir(self.config))
print>>sys.stderr, "\r\n ========= \t\t SenslabImport TrustedRoots\r\n" , self.TrustedRoots
self.plc_auth = self.config.get_plc_auth()
+ print>>sys.stderr, "\r\n ========= \t\t SenslabImport self.plc_auth %s \r\n" %(self.plc_auth )
self.root_auth = self.config.SFA_REGISTRY_ROOT_AUTH
table.update(person_record)
def import_slice(self, parent_hrn, slice):
- slicename = slice['name'].split("_",1)[-1]
- slicename = _cleanup_string(slicename)
+ #slicename = slice['name'].split("_",1)[-1]
+
+ slicename = _cleanup_string(slice['name'])
if not slicename:
self.logger.error("Import: failed to parse slice name %s" %slice['name'])
import sys
import httplib
import json
+import datetime
+import time
+from sfa.senslab.parsing import *
-
-
-def strip_dictionnary (dict_to_strip):
- stripped_filter = []
- stripped_filterdict = {}
- for f in dict_to_strip :
- stripped_filter.append(str(f).strip('|'))
-
- stripped_filterdict = dict(zip(stripped_filter, dict_to_strip.values()))
-
- return stripped_filterdict
-
-
-def filter_return_fields( dict_to_filter, return_fields):
- filtered_dict = {}
- #print>>sys.stderr, " \r\n \t \tfilter_return_fields return fields %s " %(return_fields)
- for field in return_fields:
- #print>>sys.stderr, " \r\n \t \tfield %s " %(field)
- if field in dict_to_filter:
- filtered_dict[field] = dict_to_filter[field]
- #print>>sys.stderr, " \r\n \t\t filter_return_fields filtered_dict %s " %(filtered_dict)
- return filtered_dict
-
-
-
-def parse_filter(list_to_filter, param_filter, type_of_list, return_fields=None) :
- list_type = { 'persons': {'str': 'email','int':'person_id'}, 'keys':{'int':'key_id'}}
- if type_of_list not in list_type:
- print>>sys.stderr, " \r\n type_of_list Error parse_filter %s " %(type_of_list)
- return []
-
- print>>sys.stderr, " \r\n ____FIRST ENTRY parse_filter param_filter %s type %s " %(param_filter, type(param_filter))
- return_filtered_list= []
-
- for item in list_to_filter:
- tmp_item = {}
-
- if type(param_filter) is list :
- #print>>sys.stderr, " \r\n p_filter LIST %s " %(param_filter)
-
- for p_filter in param_filter:
- #print>>sys.stderr, " \r\n p_filter %s \t item %s " %(p_filter,item)
- if type(p_filter) is int:
- if item[list_type[type_of_list]['int']] == p_filter :
- if return_fields:
- tmp_item = filter_return_fields(item,return_fields)
- else:
- tmp_item = item
- return_filtered_list.append(tmp_item)
- #print>>sys.stderr, " \r\n 1tmp_item",tmp_item
-
- if type(p_filter) is str:
- if item[list_type[type_of_list]['str']] == p_filter :
- if return_fields:
- tmp_item = filter_return_fields(item,return_fields)
- else:
- tmp_item = item
- return_filtered_list.append(tmp_item)
- #print>>sys.stderr, " \r\n 2tmp_item",tmp_item
-
- elif type(param_filter) is dict:
- stripped_filterdict = strip_dictionnary(param_filter)
-
- tmp_copy = {}
- tmp_copy = item.copy()
- #print>>sys.stderr, " \r\n \t\t ________tmp_copy %s " %(tmp_copy)
- key_list = tmp_copy.keys()
- for key in key_list:
- print>>sys.stderr, " \r\n \t\t key %s " %(key)
- if key not in stripped_filterdict.keys():
- del tmp_copy[key]
-
-
- print>>sys.stderr, " \r\n tmp_copy %s param_filter %s cmp = %s " %(tmp_copy, param_filter,cmp(tmp_copy, stripped_filterdict))
-
- if cmp(tmp_copy, stripped_filterdict) == 0:
- if return_fields:
- tmp_item = filter_return_fields(item,return_fields)
- else:
-
- tmp_item = item
- return_filtered_list.append(tmp_item)
-
- return return_filtered_list
class SenslabImportUsers:
def __init__(self):
self.person_list = []
self.keys_list = []
+ self.slices_list= []
#self.resources_fulldict['keys'] = []
self.InitPersons()
self.InitKeys()
-
+ self.InitSlices()
+
+
+ def InitSlices(self):
+ slices_per_site = {}
+ dflt_slice = { 'instantiation': None, 'description': "Senslab Slice Test", 'node_ids': [], 'url': "http://localhost.localdomain/", 'max_nodes': 256, 'site_id': 3,'peer_slice_id': None, 'slice_tag_ids': [], 'peer_id': None, 'hrn' :None}
+ for person in self.person_list:
+ if 'user' or 'pi' in person['roles']:
+ def_slice = {}
+ #print>>sys.stderr, "\r\n \rn \t\t _____-----------************def_slice person %s \r\n \rn " %(person['person_id'])
+ def_slice['person_ids'] = []
+ def_slice['person_ids'].append(person['person_id'])
+ def_slice['slice_id'] = person['person_id']
+ def_slice['creator_person_id'] = person['person_id']
+ extime = datetime.datetime.utcnow()
+ def_slice['created'] = int(time.mktime(extime.timetuple()))
+ extime = extime + datetime.timedelta(days=365)
+ def_slice['expires'] = int(time.mktime(extime.timetuple()))
+ #print>>sys.stderr, "\r\n \rn \t\t _____-----------************def_slice expires %s \r\n \r\n "%(def_slice['expires'])
+ def_slice['name'] = person['email'].replace('@','_',1)
+ #print>>sys.stderr, "\r\n \rn \t\t _____-----------************def_slice %s \r\n \r\n " %(def_slice['name'])
+ def_slice.update(dflt_slice)
+ self.slices_list.append(def_slice)
+
+ print>>sys.stderr, "InitSlices SliceLIST", self.slices_list
+
def InitPersons(self):
persons_per_site = {}
person_id = 7
persons_per_site[person_id] = {'person_id': person_id,'site_ids': [3],'email': 'a_rioot@senslab.fr', 'key_ids':[1], 'roles': ['pi'], 'role_ids':[20]}
person_id = 8
persons_per_site[person_id] = {'person_id': person_id,'site_ids': [3],'email': 'lost@senslab.fr','key_ids':[1],'roles': ['pi'], 'role_ids':[20]}
+ person_id = 9
+ persons_per_site[person_id] = {'person_id': person_id,'site_ids': [3],'email': 'user@senslab.fr','key_ids':[1],'roles': ['user'], 'role_ids':[1]}
for person_id in persons_per_site.keys():
person = persons_per_site[person_id]
if person['person_id'] not in self.person_list:
def InitKeys(self):
print>>sys.stderr, " InitKeys HEYYYYYYY\r\n"
- self.keys_list = [{'peer_key_id': None, 'key_type': 'ssh', 'key' :"ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEArcdW0X2la754SoFE+URbDsYP07AZJjrspMlvUc6u+4o6JpGRkqiv7XdkgOMIn6w3DF3cYCcA1Mc6XSG7gSD7eQx614cjlLmXzHpxSeidSs/LgZaAQpq9aQ0KhEiFxg0gp8TPeB5Z37YOPUumvcJr1ArwL/8tAOx3ClwgRhccr2HOe10YtZbMEboCarTlzNHiGolo7RYIJjGuG2RBSeAg6SMZrtnn0OdKBwp3iUlOfkS98eirVtWUp+G5+SZggip3fS3k5Oj7OPr1qauva8Rizt02Shz30DN9ikFNqV2KuPg54nC27/DQsQ6gtycARRVY91VvchmOk0HxFiW/9kS2GQ== root@FlabFedora2",'person_id': 7, 'key_id':1, 'peer_id':None }, {'peer_key_id': None, 'key_type': 'ssh', 'key' :"ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEArcdW0X2la754SoFE+URbDsYP07AZJjrspMlvUc6u+4o6JpGRkqiv7XdkgOMIn6w3DF3cYCcA1Mc6XSG7gSD7eQx614cjlLmXzHpxSeidSs/LgZaAQpq9aQ0KhEiFxg0gp8TPeB5Z37YOPUumvcJr1ArwL/8tAOx3ClwgRhccr2HOe10YtZbMEboCarTlzNHiGolo7RYIJjGuG2RBSeAg6SMZrtnn0OdKBwp3iUlOfkS98eirVtWUp+G5+SZggip3fS3k5Oj7OPr1qauva8Rizt02Shz30DN9ikFNqV2KuPg54nC27/DQsQ6gtycARRVY91VvchmOk0HxFiW/9kS2GQ== root@FlabFedora2",'person_id': 8, 'key_id':1, 'peer_id':None }]
+ self.keys_list = [{'peer_key_id': None, 'key_type': 'ssh', 'key' :"ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEArcdW0X2la754SoFE+URbDsYP07AZJjrspMlvUc6u+4o6JpGRkqiv7XdkgOMIn6w3DF3cYCcA1Mc6XSG7gSD7eQx614cjlLmXzHpxSeidSs/LgZaAQpq9aQ0KhEiFxg0gp8TPeB5Z37YOPUumvcJr1ArwL/8tAOx3ClwgRhccr2HOe10YtZbMEboCarTlzNHiGolo7RYIJjGuG2RBSeAg6SMZrtnn0OdKBwp3iUlOfkS98eirVtWUp+G5+SZggip3fS3k5Oj7OPr1qauva8Rizt02Shz30DN9ikFNqV2KuPg54nC27/DQsQ6gtycARRVY91VvchmOk0HxFiW/9kS2GQ== root@FlabFedora2",'person_id': 7, 'key_id':1, 'peer_id':None },
+ {'peer_key_id': None, 'key_type': 'ssh', 'key' :"ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEArcdW0X2la754SoFE+URbDsYP07AZJjrspMlvUc6u+4o6JpGRkqiv7XdkgOMIn6w3DF3cYCcA1Mc6XSG7gSD7eQx614cjlLmXzHpxSeidSs/LgZaAQpq9aQ0KhEiFxg0gp8TPeB5Z37YOPUumvcJr1ArwL/8tAOx3ClwgRhccr2HOe10YtZbMEboCarTlzNHiGolo7RYIJjGuG2RBSeAg6SMZrtnn0OdKBwp3iUlOfkS98eirVtWUp+G5+SZggip3fS3k5Oj7OPr1qauva8Rizt02Shz30DN9ikFNqV2KuPg54nC27/DQsQ6gtycARRVY91VvchmOk0HxFiW/9kS2GQ== root@FlabFedora2",'person_id': 8, 'key_id':1, 'peer_id':None },
+ {'peer_key_id': None, 'key_type': 'ssh', 'key' :"ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEArcdW0X2la754SoFE+URbDsYP07AZJjrspMlvUc6u+4o6JpGRkqiv7XdkgOMIn6w3DF3cYCcA1Mc6XSG7gSD7eQx614cjlLmXzHpxSeidSs/LgZaAQpq9aQ0KhEiFxg0gp8TPeB5Z37YOPUumvcJr1ArwL/8tAOx3ClwgRhccr2HOe10YtZbMEboCarTlzNHiGolo7RYIJjGuG2RBSeAg6SMZrtnn0OdKBwp3iUlOfkS98eirVtWUp+G5+SZggip3fS3k5Oj7OPr1qauva8Rizt02Shz30DN9ikFNqV2KuPg54nC27/DQsQ6gtycARRVY91VvchmOk0HxFiW/9kS2GQ== root@FlabFedora2",'person_id': 9, 'key_id':1, 'peer_id':None }]
def GetPersons(self, person_filter=None, return_fields=None):
- print>>sys.stderr, " \r\n GetPersons person_filter %s return_fields %s" %(person_filter,return_fields)
+ print>>sys.stderr, " \r\n GetPersons person_filter %s return_fields %s list: %s" %(person_filter,return_fields, self.person_list)
if not self.person_list :
print>>sys.stderr, " \r\n ========>GetPersons NO PERSON LIST DAMMIT<========== \r\n"
return return_person_list
+ def GetPIs(self,site_id):
+ return_person_list= []
+ for person in self.person_list :
+ if site_id in person['site_ids'] and 'pi' in person['roles'] :
+ return_person_list.append(person['person_id'])
+ print>>sys.stderr, " \r\n GetPIs return_person_list %s :" %(return_person_list)
+ return return_person_list
+
+
def GetKeys(self,key_filter=None, return_fields=None):
return_key_list= []
print>>sys.stderr, " \r\n GetKeys"
#return_key_list.append(tmp_key)
#print>>sys.stderr," \r\n End GetKeys with filter ", return_key_list
#return return_key_list
-
\ No newline at end of file
+
+ def GetSlices( self,slice_filter=None, return_fields=None):
+ return_slice_list= []
+ print>>sys.stderr, "\r\n\r\n\t =======================GetSlices "
+ if not (slice_filter or return_fields):
+ return self.slices_list
+ return_slice_list= parse_filter(self.slices_list, slice_filter,'slice', return_fields)
+ return return_slice_list
+
+
+ def AddSlice(self, slice_fields):
+ print>>sys.stderr, " \r\n \r\nAddSlice "
+
+
+ def AddPersonToSlice(self,person_id_or_email, slice_id_or_name):
+ print>>sys.stderr, " \r\n \r\n AddPersonToSlice"
+
+ def DeletePersonFromSlice(self,person_id_or_email, slice_id_or_name):
+ print>>sys.stderr, " \r\n \r\n DeletePersonFromSlice "
existing_records[(result['hrn'], result['type'])] = result
existing_hrns.append(result['hrn'])
- # Get all Senslab sites
- sites_dict = OARImporter.GetSites()
- print "\r\n sSITES_DICT" , sites_dict
+
#Get Senslab nodes
keys_list = SenslabUsers.GetKeys()
print "\r\n KEYSS_LIST ",keys_list
+ slices_list = SenslabUsers.GetSlices()
+ print "\r\n SLICES_LIST ",slices_list
+
+ # Get all Senslab sites
+ sites_dict = OARImporter.GetSites()
+ print "\r\n sSITES_DICT" , sites_dict
+
# start importing
for site in sites_dict:
site_hrn = interface_hrn + "." + site['login_base']
print "SITE HRN UNKNOWN" , site, site_hrn
site_hrn = sfaImporter.import_site(interface_hrn, site)
- print "\r\n \r\n ===========IMPORT NODE8RECORDS ==========\r\n site %s \r\n \t nodes_dict %s" %(site,nodes_dict)
+ print "\r\n \r\n ===========IMPORT NODE_RECORDS ==========\r\n site %s \r\n \t nodes_dict %s" %(site,nodes_dict)
# import node records
for node_id in site['node_ids']:
#for[node['node_id'] for node in nodes_dict]:
print "\t\t NODE HRN NOT in existing records!" ,hrn
sfaImporter.import_node(hrn, node)
-
+ # import persons
for person in persons_list:
hrn = email_to_hrn(site_hrn, person['email'])
print >>sys.stderr, "\r\n\r\n^^^^^^^^^^^^^PERSON hrn %s person %s site hrn %s" %(hrn,person,site_hrn)
sfaImporter.import_person( site_hrn, person,keys_list)
-
+
+# import slices
+ for slice_id in site['slice_ids']:
+ print >>sys.stderr, "\r\n\r\n \t ^^^^^^^\\\\\\\\\\\\\\\^^^^^^ slice_id %s " %(slice_id)
+ for sl in slices_list:
+ if slice_id is sl['slice_id']:
+ #hrn = slicename_to_hrn(interface_hrn, sl['name'])
+ hrn = email_to_hrn(site_hrn, sl['name'])
+ print >>sys.stderr, "\r\n\r\n^^^^^^^^^^^^^SLICE ID hrn %s site_hrn %s" %(hrn,site_hrn)
+ if hrn not in existing_hrns or \
+ (hrn, 'slice') not in existing_records:
+ sfaImporter.import_slice(site_hrn, sl)
+
+
+
# remove stale records
for (record_hrn, type) in existing_records.keys():
record = existing_records[(record_hrn, type)]