2 # in spring 2010, an attempt was made to use pycurl instead of forking curl
3 # it turned out, however, that after around 10 cycles of the nodemanager,
4 # attempts to call GetSlivers were failing with a curl error 60
5 # we are thus reverting to the version from tag curlwrapper.py-NodeManager-2.0-8
6 # the (broekn) pycurl version can be found in tags 2.0-9 and 2.0-10
8 from subprocess import PIPE, Popen
9 from select import select
17 def kill(self, signal = signal.SIGTERM):
18 os.kill(self.pid, signal)
20 def retrieve(url, cacert=None, postdata=None, timeout=90):
21 # options = ('/usr/bin/curl', '--fail', '--silent')
22 options = ('/usr/bin/curl', '--fail', )
23 if cacert: options += ('--cacert', cacert)
24 if postdata: options += ('--data', '@-')
26 options += ('--max-time', str(timeout))
27 options += ('--connect-timeout', str(timeout))
28 p = Sopen(options + (url,), stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
29 if postdata: p.stdin.write(postdata)
31 sout, sin, serr = select([p.stdout,p.stderr],[],[], timeout)
32 if len(sout) == 0 and len(sin) == 0 and len(serr) == 0:
33 logger.verbose("curlwrapper: timed out after %s" % timeout)
34 p.kill(signal.SIGKILL)
35 data = p.stdout.read()
39 # when this triggers, the error sometimes doesn't get printed
40 logger.log ("curlwrapper: retrieve, got stderr <%s>"%err)
41 raise xmlrpclib.ProtocolError(url, rc, err, postdata)