2 # in spring 2010, an attempt was made to use pycurl instead of forking curl
3 # it turned out, however, that after around 10 cycles of the nodemanager,
4 # attempts to call GetSlivers were failing with a curl error 60
5 # we are thus reverting to the version from tag curlwrapper.py-NodeManager-2.0-8
6 # the (broken) pycurl version can be found in tags 2.0-9 and 2.0-10
8 from subprocess import PIPE, Popen
9 from select import select
20 def kill(self, signal = signal.SIGTERM):
21 os.kill(self.pid, signal)
23 def retrieve(url, cacert=None, postdata=None, timeout=90):
24 # command = ('/usr/bin/curl', '--fail', '--silent')
25 command = ('/usr/bin/curl', '--fail', )
26 if cacert: command += ('--cacert', cacert)
27 if postdata: command += ('--data', '@-')
29 command += ('--max-time', str(timeout))
30 command += ('--connect-timeout', str(timeout))
33 print 'Invoking ',command
34 if postdata: print 'with postdata=',postdata
35 p = Sopen(command , stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
36 if postdata: p.stdin.write(postdata)
38 sout, sin, serr = select([p.stdout,p.stderr],[],[], timeout)
39 if len(sout) == 0 and len(sin) == 0 and len(serr) == 0:
40 logger.verbose("curlwrapper: timed out after %s" % timeout)
41 p.kill(signal.SIGKILL)
42 data = p.stdout.read()
46 # when this triggers, the error sometimes doesn't get printed
47 logger.log ("curlwrapper: retrieve, got stderr <%s>"%err)
48 raise xmlrpclib.ProtocolError(url, rc, err, postdata)