login_base=site['login_base']
for node_id in site.get('node_ids', []):
try: node=nodes_by_id[node_id]
- except: print 'cannot find node %s'%node_id; continue
+ except: print('cannot find node %s'%node_id); continue
hrn=hostname_to_hrn (toplevel, login_base, node['hostname'])
if node['hrn'] != hrn:
if verbose:
- print "Node %s - current hrn %s, should be %s"%(node['hostname'], node['hrn'], hrn)
+ print("Node %s - current hrn %s, should be %s"%(node['hostname'], node['hrn'], hrn))
if dry_run: continue
SetNodeHrn (node['node_id'],hrn)
else:
- if verbose: print "Node %s OK"%node['hostname']
+ if verbose: print("Node %s OK"%node['hostname'])
def handle_persons (sites,sites_by_id, dry_run,verbose):
persons=GetPersons ({'peer_id':None},['person_id','email','hrn','site_ids'])
login_base=site['login_base']
for person_id in site.get('person_ids', []):
try: person=persons_by_id[person_id]
- except: print 'cannot find person %s'%person_id; continue
+ except: print('cannot find person %s'%person_id); continue
how_many=len(person['site_ids'])
if how_many !=1:
- if verbose: print "Checking persons in exactly one site -- person %s in %s site(s) -- ignored"%(person['email'],how_many)
+ if verbose: print("Checking persons in exactly one site -- person %s in %s site(s) -- ignored"%(person['email'],how_many))
continue
hrn=email_to_hrn ("%s.%s"%(toplevel,login_base),person['email'])
if person['hrn'] != hrn:
if verbose:
- print "Person %s - current hrn %s, should be %s"%(person['email'], person['hrn'], hrn)
+ print("Person %s - current hrn %s, should be %s"%(person['email'], person['hrn'], hrn))
if dry_run: continue
SetPersonHrn (person['person_id'],hrn)
else:
- if verbose: print "Person %s OK"%person['email']
+ if verbose: print("Person %s OK"%person['email'])
def handle_slices (sites,sites_by_id, dry_run,verbose):
login_base=site['login_base']
for slice_id in site.get('slice_ids', []):
try: slice=slices_by_id[slice_id]
- except: print 'cannot find slice %s'%slice_id; continue
+ except: print('cannot find slice %s'%slice_id); continue
hrn=slicename_to_hrn (toplevel, slice['name'])
if slice['hrn'] != hrn:
if verbose:
- print "Slice %s - current hrn %s, should be %s"%(slice['name'], slice['hrn'], hrn)
+ print("Slice %s - current hrn %s, should be %s"%(slice['name'], slice['hrn'], hrn))
if dry_run: continue
SetSliceHrn (slice['slice_id'],hrn)
else:
- if verbose: print "Slice %s OK"%slice['name']
+ if verbose: print("Slice %s OK"%slice['name'])
def handle_sites (sites,sites_by_id, dry_run,verbose):
hrn='.'.join([toplevel, site['login_base']])
if site['hrn'] != hrn:
if verbose:
- print "Site %s - current hrn %s, should be %s"%(site['name'].encode('ascii', 'ignore'), site['hrn'], hrn)
+ print("Site %s - current hrn %s, should be %s"%(site['name'].encode('ascii', 'ignore'), site['hrn'], hrn))
if dry_run: continue
SetSiteHrn (site['site_id'],hrn)
else:
- if verbose: print "Site %s OK"%site['name']
+ if verbose: print("Site %s OK"%site['name'])
try:
vsys_tag_type=GetSliceTags({'tagname':'vsys'})[0]
except:
- print "Can't find tag vsys - exiting"
+ print("Can't find tag vsys - exiting")
sys.exit(1)
def add_value (slice, value, options):
(slice_id, slice_name ) = (slice['slice_id'], slice['name'])
if options.dry_run:
- print "Would add vsys=%s to slice %s (%d)"%(value,slice_name,slice_id)
+ print("Would add vsys=%s to slice %s (%d)"%(value,slice_name,slice_id))
return
if options.verbose:
- print "Adding vsys=%s to slice %s (%d)"%(value,slice_name,slice_id)
+ print("Adding vsys=%s to slice %s (%d)"%(value,slice_name,slice_id))
AddSliceTag (slice_id, 'vsys', value)
add_value (slice,value,options)
counter+=1
if options.verbose:
- print "Found %d slices for which %s is missing"%(counter,value)
+ print("Found %d slices for which %s is missing"%(counter,value))
def main ():
usage="""Usage: %prog
-#!/usr/bin/python
+#!/usr/bin/python3
###
### utility script for cleaning empty directories
### useful to clean up /var/tmp/bootmedium
for dir in dirs:
try:
if dir.index("/") != 0:
- print "%s: args must be absolute paths"%(sys.argv[0])
- print "%s: %s ignored"%(sys.argv[0],dir)
+ print("%s: args must be absolute paths"%(sys.argv[0]))
+ print("%s: %s ignored"%(sys.argv[0],dir))
else:
clean_root(dir)
- except OSError, (errno, strerror):
- print ERROR_STR % {'path' : path, 'error': strerror }
+ except OSError as xxx_todo_changeme:
+ (errno, strerror) = xxx_todo_changeme.args
+ print(ERROR_STR % {'path' : path, 'error': strerror })
if __name__ == '__main__':
main (sys.argv[1:])
if os.path.exists(directory):
try:
filenames = os.listdir(directory)
- except OSError, e:
- raise Exception, "Error when opening %s (%s)" % \
- (os.path.join(dir, file), e)
+ except OSError as e:
+ raise Exception("Error when opening %s (%s)" % \
+ (os.path.join(dir, file), e))
# ignore files that contain either ~ or .
ignore_tokens = ("~",".")
ignore = True
if ignore:
- print "db-config: ignored %s snippet" % filename
+ print("db-config: ignored %s snippet" % filename)
filenames = []
- keys = numberedfiles.keys()
+ keys = list(numberedfiles.keys())
keys.sort()
for k in keys:
for filename in numberedfiles[k]:
(options,steps) = parser.parse_args()
# Load variables into dictionaries
- for category_id, (category, variablelist) in variables.iteritems():
- globals()[category_id] = dict(zip(variablelist.keys(),
- [variable['value'] for variable in variablelist.values()]))
+ for category_id, (category, variablelist) in variables.items():
+ globals()[category_id] = dict(list(zip(list(variablelist.keys()),
+ [variable['value'] for variable in list(variablelist.values())])))
directory="/etc/planetlab/db-config.d"
snippets = GetSnippets(directory)
if options.list_steps:
if not options.verbose:
- print snippet
+ print(snippet)
else:
- print "Found step %s/%s"%(directory,snippet)
+ print("Found step %s/%s"%(directory,snippet))
os.system("rpm -qf %s/%s"%(directory,snippet))
continue
fullpath = os.path.join(directory, snippet)
if options.verbose:
- print "Running step %s"%fullpath
- execfile(fullpath)
+ print("Running step %s"%fullpath)
+ exec(compile(open(fullpath).read(), fullpath, 'exec'))
if __name__ == '__main__':
main()
pid = pidfile.readline().strip()
pidfile.close()
if os.path.isdir("/proc/" + pid):
- print "Error: Another copy of %s is still running (%s)" % (prog, pid)
+ print("Error: Another copy of %s is still running (%s)" % (prog, pid))
sys.exit(1)
except IOError:
pass
variables = cfg.variables()
(category, variablelist) = variables['plc_dns']
- plc_dns = dict(zip(variablelist.keys(),
- [variable['value'] for variable in variablelist.values()]))
+ plc_dns = dict(list(zip(list(variablelist.keys()),
+ [variable['value'] for variable in list(variablelist.values())])))
if plc_dns['enabled'] != "true":
return 0
nodes[node['node_id']] = node
interface_ids = set()
- for node in nodes.values():
+ for node in list(nodes.values()):
interface_ids.update(node['interface_ids'])
for interface in GetInterfaces(list(interface_ids)):
else:
hostname = nodes[interface['node_id']]['hostname']
- if hosts.has_key(interface['ip']):
+ if interface['ip'] in hosts:
if hostname not in hosts[interface['ip']]:
hosts[interface['ip']].append(hostname)
else:
# Write /etc/plc_hosts
plc_hosts = open("/etc/plc_hosts", "w")
plc_hosts.write("# DO NOT EDIT; File is writen and removed by automatic scripts\n")
- for ip, hostnames in hosts.iteritems():
+ for ip, hostnames in hosts.items():
plc_hosts.write(ip + "\t" + " ".join(hostnames) + "\n")
plc_hosts.close()
self.args.append("/var/log/httpd/sfa_access_log")
if self.options.verbose:
- print 'Options:',self.options
- print 'Arguments:',self.args
+ print('Options:',self.options)
+ print('Arguments:',self.args)
def file_size (self,filename):
try:
return os.stat(filename)[6]
except:
- print "WARNING: file %s has vanished"%filename
+ print("WARNING: file %s has vanished"%filename)
return 0
def number_files (self):
def scan_files (self) :
if self.options.verbose:
- print 'entering scan_files, files=',self.files
+ print('entering scan_files, files=',self.files)
# mark entries in files as pre-existing
for key in self.files:
filenames = []
for arg in self.args:
if self.options.verbose:
- print 'scan_files -- Considering arg',arg
+ print('scan_files -- Considering arg',arg)
if os.path.isfile (arg):
filenames += [ arg ]
elif os.path.isdir (arg) :
filenames += self.walk (arg)
else:
- print "mtail : no such file or directory %s -- ignored"%arg
+ print("mtail : no such file or directory %s -- ignored"%arg)
# updates files
for filename in filenames :
# known file
- if self.files.has_key(filename):
+ if filename in self.files:
size = self.file_size(filename)
offset = self.files[filename]['size']
if size > offset:
try:
self.format
self.show_now()
- print self.format%filename,"new file"
+ print(self.format%filename,"new file")
self.show_file_end(filename,0,self.file_size(filename))
except:
pass
# cleanup
# avoid side-effects on the current loop basis
- read_filenames = self.files.keys()
+ read_filenames = list(self.files.keys())
for filename in read_filenames:
- if self.files[filename].has_key('old-file'):
+ if 'old-file' in self.files[filename]:
self.show_now()
- print self.format%filename,"file has gone"
+ print(self.format%filename,"file has gone")
del self.files[filename]
# compute margin and format
if not filenames:
- print sys.argv[0],": WARNING : no file in scope"
+ print(sys.argv[0],": WARNING : no file in scope")
self.format="%s"
else:
if len(filenames)==1:
self.margin=max(*[len(f) for f in filenames])
self.format="%%%ds"%self.margin
if self.options.verbose:
- print 'Current set of files:',filenames
+ print('Current set of files:',filenames)
def tail_files (self):
if self.options.verbose:
- print 'tail_files'
+ print('tail_files')
for filename in self.files:
size = self.file_size(filename)
offset = self.files[filename]['size']
def show_now (self):
if self.options.show_time:
label=time.strftime(self.options.time_format,time.localtime())
- print label,
+ print(label, end=' ')
def show_file_end (self, filename, offset, size):
try:
file.seek(offset)
line=file.read(size-offset)
self.show_now()
- print self.format%filename,'----------------------------------------'
- print line
+ print(self.format%filename,'----------------------------------------')
+ print(line)
file.close()
def show_file_when_size_decreased (self, filename, offset, size):
- print self.format%filename,'---------- file size decreased ---------',
+ print(self.format%filename,'---------- file size decreased ---------', end=' ')
if self.options.verbose:
- print 'size during last check',offset,'current size',size
+ print('size during last check',offset,'current size',size)
else:
- print ''
+ print('')
# get all files under a directory
def walk ( self, root ):
for char in typed:
if char.lower() in ['l']: os.system("clear")
elif char.lower() in ['m']:
- for i in range(3) : print 60 * '='
+ for i in range(3) : print(60 * '=')
elif char.lower() in ['q']: sys.exit(0)
elif char.lower() in ['h']:
- print """l: refresh page
+ print("""l: refresh page
m: mark
q: quit
-h: help"""
+h: help""")
###
if __name__ == '__main__':
curl.setopt(pycurl.POST, 1)
curl.setopt(pycurl.POSTFIELDS, self.getpeername_post_request(local_peername))
- import StringIO
- b = StringIO.StringIO()
+ import io
+ b = io.StringIO()
curl.setopt(pycurl.WRITEFUNCTION, b.write)
try:
curl.perform()
errcode = curl.getinfo(pycurl.HTTP_CODE)
response = b.getvalue()
- print 'xmlrpc answer',response
+ print('xmlrpc answer',response)
if response.find('Failed') >= 0:
- print 'FAILURE : failed to authenticate ?'
+ print('FAILURE : failed to authenticate ?')
return False
elif response.find(remote_peername) <0:
- print 'FAILURE : xmlrpc round trip OK but peername does not match'
+ print('FAILURE : xmlrpc round trip OK but peername does not match')
return False
else:
- print 'SUCCESS'
+ print('SUCCESS')
return True
- except pycurl.error, err:
+ except pycurl.error as err:
(errcode, errmsg) = err
if errcode == 60:
- print 'FAILURE', "SSL certificate validation failed, %r"%(errmsg)
+ print('FAILURE', "SSL certificate validation failed, %r"%(errmsg))
elif errcode != 200:
- print 'FAILURE', "HTTP error %d, errmsg %r" % (errcode,errmsg)
+ print('FAILURE', "HTTP error %d, errmsg %r" % (errcode,errmsg))
return False
def main (self):
# for url_format in [ 'https://%s:443/PLCAPI/' , 'https://%s/PLCAPI/' ]:
for url_format in [ 'https://%s/PLCAPI/' ]:
url=url_format%hostname
- print '============================== Checking url=',url
+ print('============================== Checking url=',url)
if self.check_url(url,local_peername,remote_peername,cacert):
ok=True
if ok:
-#!/usr/bin/python
+#!/usr/bin/python3
#
# Script for basic access to the PlanetLab Central (PLC) configuration
# file store.
def usage():
- print """
+ print("""
Script to access the PLC configuration file store.
Usage: %s [OPTION]... [FILES]
-h, --help This message
-s, --save Save changes to first configuration file
-""".lstrip() % sys.argv[0]
+""".lstrip() % sys.argv[0])
sys.exit(1)
def deprecated (message):
- print "%s: deprecated usage"%sys.argv[0]
- print message
+ print("%s: deprecated usage"%sys.argv[0])
+ print(message)
sys.exit(1)
def main():
try:
(opts, argv) = getopt.gnu_getopt(sys.argv[1:], shortopts, longopts)
- except Exception, err:
+ except Exception as err:
sys.stderr.write("Error: " + str(err) + os.linesep)
sys.exit(1)
elif opt == '-s' or opt == "--save":
if not optval:
usage()
- print 'parsed save option',optval
+ print('parsed save option',optval)
save = optval
elif opt == '-h' or opt == "--help":
usage()
plc.load(file)
except IOError:
pass
- except Exception, err:
+ except Exception as err:
sys.stderr.write("Error: %s: %s" % (file, str(err)) + os.linesep)
sys.exit(1)
# --category, --variable, --value
- if category.has_key('id') and variable.has_key('id'):
- if variable.has_key('value'):
+ if 'id' in category and 'id' in variable:
+ if 'value' in variable:
plc.set(category, variable)
else:
(category, variable) = plc.get(category['id'], variable['id'])
- if variable.has_key('value'):
- print variable['value']
+ if 'value' in variable:
+ print(variable['value'])
# --shell, --php, --xml
if output is not None:
# so that plc.d/{api,postgres} can create configs/site.xml
dirname = os.path.dirname (save)
if (not os.path.exists (dirname)):
- os.makedirs(dirname,0755)
+ os.makedirs(dirname,0o755)
if (not os.path.exists (dirname)):
- print "Cannot create dir %s - exiting" % dirname
+ print("Cannot create dir %s - exiting" % dirname)
sys.exit(1)
plc.save(save)
try:
nodegroup=GetNodeGroups({'groupname':self.options.nodegroup})[0]
except:
- print "No such nodegroup %s - ignored"%self.options.nodegroup
+ print("No such nodegroup %s - ignored"%self.options.nodegroup)
return
nodegroup_node_ids=nodegroup['node_ids']
if len(nodegroup_node_ids)==0:
- print "Empty nodegroup %s - ignored"%self.options.nodegroup
+ print("Empty nodegroup %s - ignored"%self.options.nodegroup)
return
# let's go
self.open()
return orphans
def list_person (margin,p):
- print margin,'%6d'%p['person_id'], time.asctime(time.gmtime(p['date_created'])),
- if not p['peer_id']: print 'LOCAL',
- else: print 'pr=',p['peer_id'],
- if p['enabled']: print 'ENB',
- else: print 'DIS',
- print p['email']
+ print(margin,'%6d'%p['person_id'], time.asctime(time.gmtime(p['date_created'])), end=' ')
+ if not p['peer_id']: print('LOCAL', end=' ')
+ else: print('pr=',p['peer_id'], end=' ')
+ if p['enabled']: print('ENB', end=' ')
+ else: print('DIS', end=' ')
+ print(p['email'])
date_keys=['date_created','last_updated']
def details_person (p):
- keys=p.keys()
+ keys=list(p.keys())
keys.sort()
for key in keys:
- print key,'->',
+ print(key,'->', end=' ')
value=p[key]
- if key in date_keys: print time.asctime(time.gmtime(value))
- else: print value
+ if key in date_keys: print(time.asctime(time.gmtime(value)))
+ else: print(value)
def get_related(email):
return GetPersons ({'email':email,'~peer_id':None})
def header (message):
- print '--------------------'
- print GetPeerName(),
- print time.asctime(time.gmtime())
- print 'Listing orphan accounts and any similar remote'
- print '--------------------'
+ print('--------------------')
+ print(GetPeerName(), end=' ')
+ print(time.asctime(time.gmtime()))
+ print('Listing orphan accounts and any similar remote')
+ print('--------------------')
def delete_local (person,default_bool,options):
# just in case
if person['peer_id'] != None:
- print 'ERROR: cannot delete non-local person',person['email']
+ print('ERROR: cannot delete non-local person',person['email'])
return
prompt = 'want to delete '+person['email']
while not done:
done=True
try:
- answer = raw_input(prompt).strip()
+ answer = input(prompt).strip()
except EOFError :
- print 'bailing out'
+ print('bailing out')
sys.exit(1)
if answer=='':
id=person['person_id']
email=person['email']
if options.dry_run:
- if do_delete: print 'Would delete',id,'->',email
- else: print 'Would preserve',id,'->',email
+ if do_delete: print('Would delete',id,'->',email)
+ else: print('Would preserve',id,'->',email)
elif do_delete:
- print 'Deleting',id,'->',email,
- if DeletePerson(id) == 1: print 'OK',id,'deleted'
- else: print 'Deletion failed'
+ print('Deleting',id,'->',email, end=' ')
+ if DeletePerson(id) == 1: print('OK',id,'deleted')
+ else: print('Deletion failed')
def main_orphans (options):
orphans = get_orphans()
def Run (peername):
timestring=time.strftime("%Y-%m-%d-%H-%M-%S")
- print 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',peername
- print 'RefreshPeer on %s - starting on %s'%(peername,timestring)
- print 'xxxxxxxxxx'
+ print('xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',peername)
+ print('RefreshPeer on %s - starting on %s'%(peername,timestring))
+ print('xxxxxxxxxx')
sys.stdout.flush()
start=time.time()
result=RefreshPeer(peername)
finish=time.time()
- print 'Total duration',finish-start
- print 'xxxxxxxxxx timers:'
- keys=result.keys()
+ print('Total duration',finish-start)
+ print('xxxxxxxxxx timers:')
+ keys=list(result.keys())
keys.sort()
for key in keys:
- print key,result[key]
+ print(key,result[key])
sys.stdout.flush()
sys.stderr.flush()
class SpotAliens:
def __init__(self,verbose):
- print "==================== initializing ..",
+ print("==================== initializing ..", end=' ')
self.verbose=verbose
self.all_sites=GetSites({},['peer_id','site_id','login_base','name'])
self.all_nodes=GetNodes({},['peer_id','node_id','site_id','hostname'])
self.site_hash=dict ( [ (site['site_id'],site) for site in self.all_sites ] )
self.key_hash=dict ( [ (key['key_id'],key) for key in self.all_keys ] )
self.slice_hash=dict ( [ (slice['slice_id'],slice) for slice in self.all_slices ] )
- print "done"
+ print("done")
def spot_nodes (self):
"nodes are expected to be in the same peer as their owning site"
site=self.site_hash[node['site_id']]
if node['peer_id'] != site['peer_id']:
counter+=1
- if self.verbose: print "NODE-SITE mismatch %r IN SITE %r"%(node,site)
- print '==================== Found %d inconsistent nodes'%counter
+ if self.verbose: print("NODE-SITE mismatch %r IN SITE %r"%(node,site))
+ print('==================== Found %d inconsistent nodes'%counter)
def spot_slices (self):
"slices are expected to be in the same peer as their owning site"
site=self.site_hash[slice['site_id']]
if slice['peer_id'] != site['peer_id']:
counter+=1
- if self.verbose: print "SLICE-SITE mismatch %r IN SITE %r"%(slice,site)
- print '==================== Found %d inconsistent slices'%counter
+ if self.verbose: print("SLICE-SITE mismatch %r IN SITE %r"%(slice,site))
+ print('==================== Found %d inconsistent slices'%counter)
def spot_persons (self):
"persons are expected to be in the same peer as their owning site"
site=self.site_hash[site_id]
if person['peer_id'] != site['peer_id']:
counter+=1
- if self.verbose: print "PERSON-SITE mismatch %r IN SITE %r"%(person,site)
- print '==================== Found %d inconsistent persons'%counter
+ if self.verbose: print("PERSON-SITE mismatch %r IN SITE %r"%(person,site))
+ print('==================== Found %d inconsistent persons'%counter)
def spot_keys (self):
"persons are expected to be in the same peer as their attached keys"
key=self.key_hash[key_id]
if person['peer_id'] != key['peer_id']:
counter+=1
- if self.verbose: print "PERSON-KEY mismatch %r & KEY %r"%(person,key)
- print '==================== Found %d inconsistent keys'%counter
+ if self.verbose: print("PERSON-KEY mismatch %r & KEY %r"%(person,key))
+ print('==================== Found %d inconsistent keys'%counter)
def spot_foreign (self):
"foreign persons should not have a site"
counter=1
for person in self.all_persons:
if person['peer_id'] and person['site_ids']:
- if self.verbose: print "WARNING Foreign person %r attached on sites:"%person
+ if self.verbose: print("WARNING Foreign person %r attached on sites:"%person)
for site_id in person['site_ids']:
counter+=1
- if self.verbose: print " %r"%self.site_hash[site_id]
- print '==================== Found %d foreign persons with a site'%counter
+ if self.verbose: print(" %r"%self.site_hash[site_id])
+ print('==================== Found %d foreign persons with a site'%counter)
def main ():
usage="""Usage: %prog [-- options]
-#!/usr/bin/python
+#!/usr/bin/python3
"""The Sirius Calendar Service.
if not run:
print("---- You should run")
for command in commands:
- print(" ".join(command))
+ print((" ".join(command)))
else:
for command in commands:
- print("Running {}".format(" ".join(command)))
+ print(("Running {}".format(" ".join(command))))
retcod = subprocess.call(command)
if retcod != 0:
- print("Warning: failed with retcod = {}".format(retcod))
+ print(("Warning: failed with retcod = {}".format(retcod)))
def main():
parser = ArgumentParser()
if args.deep:
commands = []
- print("Found {} containers that are known but not running".format(len(not_running_containers)))
+ print(("Found {} containers that are known but not running".format(len(not_running_containers))))
for not_running_container in not_running_containers:
commands.append(['userdel', not_running_container])
commands.append(['virsh', '-c', 'lxc:///', 'undefine', not_running_container])
# we need to call 'btrfs subvolume delete' on these remainings
# instead of just 'rm'
if zombies_containers:
- print("-------- Found {} existing, but not running, containers".format(len(zombies_containers)))
+ print(("-------- Found {} existing, but not running, containers".format(len(zombies_containers))))
commands = []
zombie_dirs = ["/vservers/"+z for z in zombies_containers]
if args.verbose:
- print("zombie_dirs='{}'".format(" ".join(zombie_dirs)))
+ print(("zombie_dirs='{}'".format(" ".join(zombie_dirs))))
subvolumes = [ path
for z in zombies_containers
for prefix in flavour_prefixes
for path in glob.glob("/vservers/{z}/{prefix}*".format(z=z, prefix=prefix))]
if subvolumes:
if args.verbose:
- print("zombie_subvolumes='{}'".format(" ".join(subvolumes)))
+ print(("zombie_subvolumes='{}'".format(" ".join(subvolumes))))
for subvolume in subvolumes:
commands.append([ 'btrfs', 'subvolume', 'delete', subvolume])
for zombie_dir in zombie_dirs:
#### should happen much less frequently
weirdos_containers = running_containers - existing_containers
if weirdos_containers:
- print("-------- Found {} running but non existing".format(len(weirdos_containers)))
+ print(("-------- Found {} running but non existing".format(len(weirdos_containers))))
for w in weirdos_containers:
- print("/vservers/{}".format(w))
+ print(("/vservers/{}".format(w)))
- print("{} slices are currently running".format(len(running_containers)))
+ print(("{} slices are currently running".format(len(running_containers))))
main()
-#!/usr/bin/python
+#!/usr/bin/python3
#
# Write out sites.xml
#
# Parse options
def usage():
- print "Usage: %s [OPTION]..." % sys.argv[0]
- print "Options:"
- print " -n, --dryrun Dry run, do not write files (default: %s)" % dryrun
- print " -d, --docroot=DIR Document root (default: %s)" % DOCROOT
- print " -h, --help This message"
+ print("Usage: %s [OPTION]..." % sys.argv[0])
+ print("Options:")
+ print(" -n, --dryrun Dry run, do not write files (default: %s)" % dryrun)
+ print(" -d, --docroot=DIR Document root (default: %s)" % DOCROOT)
+ print(" -h, --help This message")
sys.exit(1)
# Get options
try:
(opts, argv) = getopt.getopt(sys.argv[1:], "nd:h", ["dryrun", "docroot=", "help"])
-except getopt.GetoptError, err:
- print "Error: " + err.msg
+except getopt.GetoptError as err:
+ print("Error: " + err.msg)
usage()
for (opt, optval) in opts:
if not dryrun:
if os.access(PID_FILE, os.R_OK):
pid= file(PID_FILE).readline().strip()
- if pid <> "":
+ if pid != "":
if os.system("/bin/kill -0 %s > /dev/null 2>&1" % pid) == 0:
sys.exit(0)
# remove whitelisted nodes
remove_whitelisted = lambda node: not node['slice_ids_whitelist']
-nodes = filter(remove_whitelisted, nodes)
+nodes = list(filter(remove_whitelisted, nodes))
nodes = dict([(node['node_id'], node) for node in nodes])
for nodenetwork in nodenetworks:
- if nodes.has_key(nodenetwork['node_id']):
+ if nodenetwork['node_id'] in nodes:
node = nodes[nodenetwork['node_id']]
- for key, value in nodenetwork.iteritems():
+ for key, value in nodenetwork.items():
node[key] = value
group_node_ids = dict([(group['groupname'], group['node_ids']) for group in groups])
self.ignorableWhitespace("".join(self.indents))
self.write('<' + name)
- for (name, value) in attrs.items():
+ for (name, value) in list(attrs.items()):
self.write(' %s=%s' % (name, quoteattr(value.strip())))
self.write('/>')
# <SITE ...>
attrs = {}
for attr in ['name', 'latitude', 'longitude', 'url', 'site_id', 'login_base']:
- attrs[attr.upper()] = unicode(site[attr])
- attrs['FULL_SITE_NAME'] = unicode(site['name'])
- attrs['SHORT_SITE_NAME'] = unicode(site['abbreviated_name'])
+ attrs[attr.upper()] = str(site[attr])
+ attrs['FULL_SITE_NAME'] = str(site['name'])
+ attrs['SHORT_SITE_NAME'] = str(site['abbreviated_name'])
xml.startElement('SITE', attrs)
for node_id in site['node_ids']:
- if nodes.has_key(node_id):
+ if node_id in nodes:
node = nodes[node_id]
# <HOST ...>
attrs = {}
- attrs['NAME'] = unicode(node['hostname'])
+ attrs['NAME'] = str(node['hostname'])
attrs['VERSION'] = "2.0"
for attr in ['model', 'node_id', 'boot_state']:
- attrs[attr.upper()] = unicode(node[attr]).strip()
+ attrs[attr.upper()] = str(node[attr]).strip()
# If the node is in Alpha, Beta, or Rollout, otherwise Production
for group in ['Alpha', 'Beta', 'Rollout', 'Production']:
- if group_node_ids.has_key(group) and \
+ if group in group_node_ids and \
node_id in group_node_ids[group]:
break
attrs['STATUS'] = group
if node['version']:
- attrs['BOOT_VERSION'] = unicode(node['version'].splitlines()[0])
+ attrs['BOOT_VERSION'] = str(node['version'].splitlines()[0])
if node['ssh_rsa_key']:
- attrs['RSA_KEY'] = unicode(node['ssh_rsa_key'].splitlines()[0])
-
- if node.has_key('ip') and node['ip']:
- attrs['IP'] = unicode(node['ip'])
- if node.has_key('mac') and node['mac']:
- attrs['MAC'] = unicode(node['mac'])
- if node.has_key('bwlimit') and node['bwlimit']:
- attrs['BWLIMIT'] = unicode(format_tc_rate(node['bwlimit']))
+ attrs['RSA_KEY'] = str(node['ssh_rsa_key'].splitlines()[0])
+
+ if 'ip' in node and node['ip']:
+ attrs['IP'] = str(node['ip'])
+ if 'mac' in node and node['mac']:
+ attrs['MAC'] = str(node['mac'])
+ if 'bwlimit' in node and node['bwlimit']:
+ attrs['BWLIMIT'] = str(format_tc_rate(node['bwlimit']))
xml.simpleElement('HOST', attrs)
import string
import codecs
import socket
-import urllib2
+import urllib.request, urllib.error, urllib.parse
import csv
SCRIPT_PID_FILE= "/var/run/gen-static-content.pid"
f.write( "$node_list_generated_time= '%s';\n" % time_generated )
# Nodes with primary IP addresses in boot state
- nodes_in_boot = filter(lambda node: node['boot_state'] == "boot" and node['ip'],
- all_nodes)
+ nodes_in_boot = [node for node in all_nodes if node['boot_state'] == "boot" and node['ip']]
# Hostnames
all_hosts = [node['hostname'] for node in nodes_in_boot]
f.write( "}\n" )
for group in ['Alpha', 'Beta']:
- if not node_group_nodes.has_key(group):
+ if group not in node_group_nodes:
node_group_nodes[group] = []
# Group nodes with primary IP addresses in boot state
- group_nodes_in_boot = filter(lambda node: node['boot_state'] == "boot" and node['ip'],
- node_group_nodes[group])
+ group_nodes_in_boot = [node for node in node_group_nodes[group] if node['boot_state'] == "boot" and node['ip']]
# Group hostnames
group_hosts = [node['hostname'] for node in group_nodes_in_boot]
f.write( "}\n" )
# All production nodes (nodes not in Alpha or Beta)
- production_nodes_in_boot = filter(lambda node: node not in node_group_nodes['Alpha'] and \
- node not in node_group_nodes['Beta'],
- nodes_in_boot)
+ production_nodes_in_boot = [node for node in nodes_in_boot if node not in node_group_nodes['Alpha'] and \
+ node not in node_group_nodes['Beta']]
production_hosts = [node['hostname'] for node in production_nodes_in_boot]
f.write( "elseif( $which_node_list == 'production_hosts' )\n" )
url = url + '/slice.php?csv=1&start_time=2+days+ago'
if slices:
url = url + '&slices[]=' + '&slices[]='.join(slices)
- stats = urllib2.urlopen(url)
+ stats = urllib.request.urlopen(url)
fields = ['slice', 'flows', 'packets', 'bytes', 'src_ips',
'dst_ips', 'top_dst_ip', 'top_dst_ip_bytes']
rows = csv.DictReader(stats, fields)
f.write("<?php\n")
f.write("$planetflow = array(\n")
for row in rows:
- if row.has_key('slice'):
+ if 'slice' in row:
f.write("'%s' => array(\n" % row['slice'])
for field in fields:
- if row.has_key(field) and \
+ if field in row and \
row[field] is not None and \
row[field] != "":
if type(row[field]) == type(0):
if os.access(SCRIPT_PID_FILE, os.R_OK):
pid= string.strip(file(SCRIPT_PID_FILE).readline())
- if pid <> "":
+ if pid != "":
if os.system("/bin/kill -0 %s > /dev/null 2>&1" % pid) == 0:
sys.exit(0)
if nodenetwork['is_primary']:
node['ip'] = nodenetwork['ip']
break
- except IndexError, KeyError:
+ except IndexError as KeyError:
continue
# Get list of nodes in each node group
for group in all_groups:
- nodes_in_group = filter(lambda node: node['node_id'] in group['node_ids'], all_nodes)
+ nodes_in_group = [node for node in all_nodes if node['node_id'] in group['node_ids']]
node_group_nodes[group['tagname']] = nodes_in_group
# generate the static content files
output_file_path= "%s/%s" % (GENERATED_OUTPUT_PATH,file_name)
tmp_output_file_path= output_file_path + '.tmp'
tmp_output_file= codecs.open( tmp_output_file_path, encoding = 'utf-8', mode = "w" )
- except IOError, err:
- print( "Unable to open file %s for writing." % output_file_path )
+ except IOError as err:
+ print(( "Unable to open file %s for writing." % output_file_path ))
continue
try:
func(tmp_output_file)
tmp_output_file.flush()
shutil.copyfile( tmp_output_file_path, output_file_path )
- except Exception, e:
- print "Unable to get content for file: %s" % file_name, e
+ except Exception as e:
+ print("Unable to get content for file: %s" % file_name, e)
import traceback
traceback.print_exc()
-#!/usr/bin/python
+#!/usr/bin/python3
#
# Generates:
#
dryrun = False
(plcapi, moreopts, argv) = plcapilib.plcapi(globals(), sys.argv, shortopts, longopts, moreusage)
-for opt, optval in moreopts.iteritems():
+for opt, optval in moreopts.items():
if opt == "-n" or opt == "--dryrun":
dryrun = True
if len(fields) < len(required):
continue
# {'account': 'princeton_mlh', 'password': 'x', ...}
- entries.append(dict(zip(required + optional, fields)))
+ entries.append(dict(list(zip(required + optional, fields))))
return entries
pis = []
for site in GetSites([site_id_or_login_base], ['person_ids']):
persons = GetPersons(site['person_ids'], ['email', 'roles', 'enabled'])
- pis += filter(lambda person: 'pi' in person['roles'] and person['enabled'], persons)
+ pis += [person for person in persons if 'pi' in person['roles'] and person['enabled']]
if pis:
return [pi['email'] for pi in pis]
return ["NO-pi"]
techs = []
for site in GetSites([login_base], ['person_ids']):
persons = GetPersons(site['person_ids'], ['email', 'roles', 'enabled'])
- techs += filter(lambda person: 'tech' in person['roles'] and person['enabled'], persons)
+ techs += [person for person in persons if 'tech' in person['roles'] and person['enabled']]
if techs:
return [tech['email'] for tech in techs]
return ["NO-tech"]
users = []
for slice in GetSlices([name], ['site_id', 'person_ids']):
persons = GetPersons(slice['person_ids'], ['email', 'enabled'])
- enabledpersons += filter(lambda person: person['enabled'], persons)
+ enabledpersons += [person for person in persons if person['enabled']]
users += [person['email'] for person in enabledpersons]
# Add all the PIs for the site
users += GetPIs(slice['site_id'])
for pw in passwd('/usr/share/doc/plc/accounts.txt'):
# Only allow posts from those with implicit or explicit access to
# all servers or explicit access to the CVS server
- if pw.has_key('servers') and pw['servers'] not in ['*', 'cvs']:
+ if 'servers' in pw and pw['servers'] not in ['*', 'cvs']:
continue
# System users are those with UIDs greater than 2000 and less than 3000
if not dryrun:
config_list = os.popen("/var/mailman/bin/config_list -i %s cvs" % cvs_config.name)
if config_list.close() is not None:
- raise Exception, "/var/mailman/bin/config_list cvs failed"
+ raise Exception("/var/mailman/bin/config_list cvs failed")
# Get all emails
announce = []
# Merge in membership of announce-additions
list_members = os.popen("/var/mailman/bin/list_members announce-additions", 'r')
- announce += map(lambda line: line.strip(), list_members.readlines())
+ announce += [line.strip() for line in list_members.readlines()]
list_members.close()
# Remove duplicates and sort
announce = list(Set(announce))
sync_members = os.popen("/var/mailman/bin/sync_members %s -w=yes -g=yes -f - announce" % flags, 'w')
sync_members.write("\n".join(announce))
if sync_members.close() is not None:
- raise Exception, "/var/mailman/bin/sync_members announce failed"
+ raise Exception("/var/mailman/bin/sync_members announce failed")
# Generate {pis,techs}@lists.planet-lab.org membership
sync_members = os.popen("/var/mailman/bin/sync_members %s -w=no -g=no -f - pis" % flags, 'w')
sync_members.write("\n".join(pis))
if sync_members.close() is not None:
- raise Exception, "/var/mailman/bin/sync_members pis failed"
+ raise Exception("/var/mailman/bin/sync_members pis failed")
sync_members = os.popen("/var/mailman/bin/sync_members %s -w=no -g=no -f - techs" % flags, 'w')
sync_members.write("\n".join(techs))
if sync_members.close() is not None:
- raise Exception, "/var/mailman/bin/sync_members techs failed"
+ raise Exception("/var/mailman/bin/sync_members techs failed")
# Generate local-host-names file
local_host_names.write("planet-lab.org\n")
# Otherwise, print space-separated list of aliases
elif len(argv) == 2:
if argv[0] == "slice":
- print " ".join(GetSliceUsers(argv[1]))
+ print(" ".join(GetSliceUsers(argv[1])))
elif argv[0] == "pi":
- print " ".join(GetPIs(argv[1]))
+ print(" ".join(GetPIs(argv[1])))
elif argv[0] == "tech":
- print " ".join(GetTechs(argv[1]))
+ print(" ".join(GetTechs(argv[1])))
else:
plcapi.usage(moreusage)
-#!/usr/bin/python
+#!/usr/bin/python3
#
# Notify users of slices that are about to expire
#
self.filename = filename
def write(self, data):
try:
- fd = os.open(self.filename, os.O_WRONLY | os.O_CREAT | os.O_APPEND, 0644)
+ fd = os.open(self.filename, os.O_WRONLY | os.O_CREAT | os.O_APPEND, 0o644)
os.write(fd, '{}'.format(data))
os.close(fd)
except OSError:
expires = now + (options.expires * 24 * 60 * 60)
if options.verbose:
- print "Checking for slices that expire before " + time.ctime(expires)
+ print("Checking for slices that expire before " + time.ctime(expires))
slice_filter = {'peer_id': None}
if options.slices:
persons = GetPersons ({'peer_id': None}, ['person_id', 'email', 'sfa_created'])
persons_by_id = { p['person_id'] : p for p in persons }
if options.verbose:
- print "retrieved {} persons".format(len(persons))
+ print("retrieved {} persons".format(len(persons)))
slices = GetSlices(slice_filter, ['slice_id', 'name', 'expires', 'description', 'url', 'person_ids'])
if options.verbose:
- print "scanning {} slices".format(len(slices))
+ print("scanning {} slices".format(len(slices)))
for slice in slices:
# See if slice expires before the specified warning date
if not recipient_emails:
if options.verbose:
- print """{slice_name} has no recipient
-({nb_in_slice} in slice, {nb_not_sfa} not sfa_created)""".format(**locals())
+ print("""{slice_name} has no recipient
+({nb_in_slice} in slice, {nb_not_sfa} not sfa_created)""".format(**locals()))
continue
log_details = [time.ctime(now), slice_name, time.ctime(slice['expires'])]
log_data = "{}\t{}".format("\t".join(log_details), ",".join(recipient_emails))
if options.dryrun:
- print "-------------------- Found slice to renew {slice_name}".format(**locals())
- print message_format.format(**locals())
- print "log >> {}".format(log_data)
+ print("-------------------- Found slice to renew {slice_name}".format(**locals()))
+ print(message_format.format(**locals()))
+ print("log >> {}".format(log_data))
else:
NotifyPersons(slice['person_ids'],
"{PLC_NAME} slice {slice_name} expires in {days}".format(**locals()),
message_format.format(**locals()))
- print >> log, log_data
+ print(log_data, file=log)