##// END OF EJS Templates
keepalive: ensure `close_all()` actually closes all cached connections...
Matt Harbison -
r50436:8251f7cc stable
parent child Browse files
Show More
@@ -1,845 +1,847 b''
1 # This library is free software; you can redistribute it and/or
1 # This library is free software; you can redistribute it and/or
2 # modify it under the terms of the GNU Lesser General Public
2 # modify it under the terms of the GNU Lesser General Public
3 # License as published by the Free Software Foundation; either
3 # License as published by the Free Software Foundation; either
4 # version 2.1 of the License, or (at your option) any later version.
4 # version 2.1 of the License, or (at your option) any later version.
5 #
5 #
6 # This library is distributed in the hope that it will be useful,
6 # This library is distributed in the hope that it will be useful,
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
9 # Lesser General Public License for more details.
9 # Lesser General Public License for more details.
10 #
10 #
11 # You should have received a copy of the GNU Lesser General Public
11 # You should have received a copy of the GNU Lesser General Public
12 # License along with this library; if not, see
12 # License along with this library; if not, see
13 # <http://www.gnu.org/licenses/>.
13 # <http://www.gnu.org/licenses/>.
14
14
15 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
15 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
16 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
16 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
17
17
18 # Modified by Benoit Boissinot:
18 # Modified by Benoit Boissinot:
19 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
19 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
20 # Modified by Dirkjan Ochtman:
20 # Modified by Dirkjan Ochtman:
21 # - import md5 function from a local util module
21 # - import md5 function from a local util module
22 # Modified by Augie Fackler:
22 # Modified by Augie Fackler:
23 # - add safesend method and use it to prevent broken pipe errors
23 # - add safesend method and use it to prevent broken pipe errors
24 # on large POST requests
24 # on large POST requests
25
25
26 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
26 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
27
27
28 >>> import urllib2
28 >>> import urllib2
29 >>> from keepalive import HTTPHandler
29 >>> from keepalive import HTTPHandler
30 >>> keepalive_handler = HTTPHandler()
30 >>> keepalive_handler = HTTPHandler()
31 >>> opener = urlreq.buildopener(keepalive_handler)
31 >>> opener = urlreq.buildopener(keepalive_handler)
32 >>> urlreq.installopener(opener)
32 >>> urlreq.installopener(opener)
33 >>>
33 >>>
34 >>> fo = urlreq.urlopen('http://www.python.org')
34 >>> fo = urlreq.urlopen('http://www.python.org')
35
35
36 If a connection to a given host is requested, and all of the existing
36 If a connection to a given host is requested, and all of the existing
37 connections are still in use, another connection will be opened. If
37 connections are still in use, another connection will be opened. If
38 the handler tries to use an existing connection but it fails in some
38 the handler tries to use an existing connection but it fails in some
39 way, it will be closed and removed from the pool.
39 way, it will be closed and removed from the pool.
40
40
41 To remove the handler, simply re-run build_opener with no arguments, and
41 To remove the handler, simply re-run build_opener with no arguments, and
42 install that opener.
42 install that opener.
43
43
44 You can explicitly close connections by using the close_connection()
44 You can explicitly close connections by using the close_connection()
45 method of the returned file-like object (described below) or you can
45 method of the returned file-like object (described below) or you can
46 use the handler methods:
46 use the handler methods:
47
47
48 close_connection(host)
48 close_connection(host)
49 close_all()
49 close_all()
50 open_connections()
50 open_connections()
51
51
52 NOTE: using the close_connection and close_all methods of the handler
52 NOTE: using the close_connection and close_all methods of the handler
53 should be done with care when using multiple threads.
53 should be done with care when using multiple threads.
54 * there is nothing that prevents another thread from creating new
54 * there is nothing that prevents another thread from creating new
55 connections immediately after connections are closed
55 connections immediately after connections are closed
56 * no checks are done to prevent in-use connections from being closed
56 * no checks are done to prevent in-use connections from being closed
57
57
58 >>> keepalive_handler.close_all()
58 >>> keepalive_handler.close_all()
59
59
60 EXTRA ATTRIBUTES AND METHODS
60 EXTRA ATTRIBUTES AND METHODS
61
61
62 Upon a status of 200, the object returned has a few additional
62 Upon a status of 200, the object returned has a few additional
63 attributes and methods, which should not be used if you want to
63 attributes and methods, which should not be used if you want to
64 remain consistent with the normal urllib2-returned objects:
64 remain consistent with the normal urllib2-returned objects:
65
65
66 close_connection() - close the connection to the host
66 close_connection() - close the connection to the host
67 readlines() - you know, readlines()
67 readlines() - you know, readlines()
68 status - the return status (i.e. 404)
68 status - the return status (i.e. 404)
69 reason - english translation of status (i.e. 'File not found')
69 reason - english translation of status (i.e. 'File not found')
70
70
71 If you want the best of both worlds, use this inside an
71 If you want the best of both worlds, use this inside an
72 AttributeError-catching try:
72 AttributeError-catching try:
73
73
74 >>> try: status = fo.status
74 >>> try: status = fo.status
75 >>> except AttributeError: status = None
75 >>> except AttributeError: status = None
76
76
77 Unfortunately, these are ONLY there if status == 200, so it's not
77 Unfortunately, these are ONLY there if status == 200, so it's not
78 easy to distinguish between non-200 responses. The reason is that
78 easy to distinguish between non-200 responses. The reason is that
79 urllib2 tries to do clever things with error codes 301, 302, 401,
79 urllib2 tries to do clever things with error codes 301, 302, 401,
80 and 407, and it wraps the object upon return.
80 and 407, and it wraps the object upon return.
81 """
81 """
82
82
83 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
83 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
84
84
85
85
86 import collections
86 import collections
87 import hashlib
87 import hashlib
88 import socket
88 import socket
89 import sys
89 import sys
90 import threading
90 import threading
91
91
92 from .i18n import _
92 from .i18n import _
93 from .pycompat import getattr
93 from .pycompat import getattr
94 from .node import hex
94 from .node import hex
95 from . import (
95 from . import (
96 pycompat,
96 pycompat,
97 urllibcompat,
97 urllibcompat,
98 util,
98 util,
99 )
99 )
100 from .utils import procutil
100 from .utils import procutil
101
101
102 httplib = util.httplib
102 httplib = util.httplib
103 urlerr = util.urlerr
103 urlerr = util.urlerr
104 urlreq = util.urlreq
104 urlreq = util.urlreq
105
105
106 DEBUG = None
106 DEBUG = None
107
107
108
108
109 class ConnectionManager:
109 class ConnectionManager:
110 """
110 """
111 The connection manager must be able to:
111 The connection manager must be able to:
112 * keep track of all existing
112 * keep track of all existing
113 """
113 """
114
114
115 def __init__(self):
115 def __init__(self):
116 self._lock = threading.Lock()
116 self._lock = threading.Lock()
117 self._hostmap = collections.defaultdict(list) # host -> [connection]
117 self._hostmap = collections.defaultdict(list) # host -> [connection]
118 self._connmap = {} # map connections to host
118 self._connmap = {} # map connections to host
119 self._readymap = {} # map connection to ready state
119 self._readymap = {} # map connection to ready state
120
120
121 def add(self, host, connection, ready):
121 def add(self, host, connection, ready):
122 self._lock.acquire()
122 self._lock.acquire()
123 try:
123 try:
124 self._hostmap[host].append(connection)
124 self._hostmap[host].append(connection)
125 self._connmap[connection] = host
125 self._connmap[connection] = host
126 self._readymap[connection] = ready
126 self._readymap[connection] = ready
127 finally:
127 finally:
128 self._lock.release()
128 self._lock.release()
129
129
130 def remove(self, connection):
130 def remove(self, connection):
131 self._lock.acquire()
131 self._lock.acquire()
132 try:
132 try:
133 try:
133 try:
134 host = self._connmap[connection]
134 host = self._connmap[connection]
135 except KeyError:
135 except KeyError:
136 pass
136 pass
137 else:
137 else:
138 del self._connmap[connection]
138 del self._connmap[connection]
139 del self._readymap[connection]
139 del self._readymap[connection]
140 self._hostmap[host].remove(connection)
140 self._hostmap[host].remove(connection)
141 if not self._hostmap[host]:
141 if not self._hostmap[host]:
142 del self._hostmap[host]
142 del self._hostmap[host]
143 finally:
143 finally:
144 self._lock.release()
144 self._lock.release()
145
145
146 def set_ready(self, connection, ready):
146 def set_ready(self, connection, ready):
147 try:
147 try:
148 self._readymap[connection] = ready
148 self._readymap[connection] = ready
149 except KeyError:
149 except KeyError:
150 pass
150 pass
151
151
152 def get_ready_conn(self, host):
152 def get_ready_conn(self, host):
153 conn = None
153 conn = None
154 self._lock.acquire()
154 self._lock.acquire()
155 try:
155 try:
156 for c in self._hostmap[host]:
156 for c in self._hostmap[host]:
157 if self._readymap[c]:
157 if self._readymap[c]:
158 self._readymap[c] = False
158 self._readymap[c] = False
159 conn = c
159 conn = c
160 break
160 break
161 finally:
161 finally:
162 self._lock.release()
162 self._lock.release()
163 return conn
163 return conn
164
164
165 def get_all(self, host=None):
165 def get_all(self, host=None):
166 if host:
166 if host:
167 return list(self._hostmap[host])
167 return list(self._hostmap[host])
168 else:
168 else:
169 return dict(self._hostmap)
169 return dict(
170 {h: list(conns) for (h, conns) in self._hostmap.items()}
171 )
170
172
171
173
172 class KeepAliveHandler:
174 class KeepAliveHandler:
173 def __init__(self, timeout=None):
175 def __init__(self, timeout=None):
174 self._cm = ConnectionManager()
176 self._cm = ConnectionManager()
175 self._timeout = timeout
177 self._timeout = timeout
176 self.requestscount = 0
178 self.requestscount = 0
177 self.sentbytescount = 0
179 self.sentbytescount = 0
178
180
179 #### Connection Management
181 #### Connection Management
180 def open_connections(self):
182 def open_connections(self):
181 """return a list of connected hosts and the number of connections
183 """return a list of connected hosts and the number of connections
182 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
184 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
183 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
185 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
184
186
185 def close_connection(self, host):
187 def close_connection(self, host):
186 """close connection(s) to <host>
188 """close connection(s) to <host>
187 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
189 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
188 no error occurs if there is no connection to that host."""
190 no error occurs if there is no connection to that host."""
189 for h in self._cm.get_all(host):
191 for h in self._cm.get_all(host):
190 self._cm.remove(h)
192 self._cm.remove(h)
191 h.close()
193 h.close()
192
194
193 def close_all(self):
195 def close_all(self):
194 """close all open connections"""
196 """close all open connections"""
195 for host, conns in self._cm.get_all().items():
197 for host, conns in self._cm.get_all().items():
196 for h in conns:
198 for h in conns:
197 self._cm.remove(h)
199 self._cm.remove(h)
198 h.close()
200 h.close()
199
201
200 def _request_closed(self, request, host, connection):
202 def _request_closed(self, request, host, connection):
201 """tells us that this request is now closed and that the
203 """tells us that this request is now closed and that the
202 connection is ready for another request"""
204 connection is ready for another request"""
203 self._cm.set_ready(connection, True)
205 self._cm.set_ready(connection, True)
204
206
205 def _remove_connection(self, host, connection, close=0):
207 def _remove_connection(self, host, connection, close=0):
206 if close:
208 if close:
207 connection.close()
209 connection.close()
208 self._cm.remove(connection)
210 self._cm.remove(connection)
209
211
210 #### Transaction Execution
212 #### Transaction Execution
211 def http_open(self, req):
213 def http_open(self, req):
212 return self.do_open(HTTPConnection, req)
214 return self.do_open(HTTPConnection, req)
213
215
214 def do_open(self, http_class, req):
216 def do_open(self, http_class, req):
215 host = urllibcompat.gethost(req)
217 host = urllibcompat.gethost(req)
216 if not host:
218 if not host:
217 raise urlerr.urlerror(b'no host given')
219 raise urlerr.urlerror(b'no host given')
218
220
219 try:
221 try:
220 h = self._cm.get_ready_conn(host)
222 h = self._cm.get_ready_conn(host)
221 while h:
223 while h:
222 r = self._reuse_connection(h, req, host)
224 r = self._reuse_connection(h, req, host)
223
225
224 # if this response is non-None, then it worked and we're
226 # if this response is non-None, then it worked and we're
225 # done. Break out, skipping the else block.
227 # done. Break out, skipping the else block.
226 if r:
228 if r:
227 break
229 break
228
230
229 # connection is bad - possibly closed by server
231 # connection is bad - possibly closed by server
230 # discard it and ask for the next free connection
232 # discard it and ask for the next free connection
231 h.close()
233 h.close()
232 self._cm.remove(h)
234 self._cm.remove(h)
233 h = self._cm.get_ready_conn(host)
235 h = self._cm.get_ready_conn(host)
234 else:
236 else:
235 # no (working) free connections were found. Create a new one.
237 # no (working) free connections were found. Create a new one.
236 h = http_class(host, timeout=self._timeout)
238 h = http_class(host, timeout=self._timeout)
237 if DEBUG:
239 if DEBUG:
238 DEBUG.info(
240 DEBUG.info(
239 b"creating new connection to %s (%d)", host, id(h)
241 b"creating new connection to %s (%d)", host, id(h)
240 )
242 )
241 self._cm.add(host, h, False)
243 self._cm.add(host, h, False)
242 self._start_transaction(h, req)
244 self._start_transaction(h, req)
243 r = h.getresponse()
245 r = h.getresponse()
244 # The string form of BadStatusLine is the status line. Add some context
246 # The string form of BadStatusLine is the status line. Add some context
245 # to make the error message slightly more useful.
247 # to make the error message slightly more useful.
246 except httplib.BadStatusLine as err:
248 except httplib.BadStatusLine as err:
247 raise urlerr.urlerror(
249 raise urlerr.urlerror(
248 _(b'bad HTTP status line: %s') % pycompat.sysbytes(err.line)
250 _(b'bad HTTP status line: %s') % pycompat.sysbytes(err.line)
249 )
251 )
250 except (socket.error, httplib.HTTPException) as err:
252 except (socket.error, httplib.HTTPException) as err:
251 raise urlerr.urlerror(err)
253 raise urlerr.urlerror(err)
252
254
253 # If not a persistent connection, don't try to reuse it. Look
255 # If not a persistent connection, don't try to reuse it. Look
254 # for this using getattr() since vcr doesn't define this
256 # for this using getattr() since vcr doesn't define this
255 # attribute, and in that case always close the connection.
257 # attribute, and in that case always close the connection.
256 if getattr(r, 'will_close', True):
258 if getattr(r, 'will_close', True):
257 self._cm.remove(h)
259 self._cm.remove(h)
258
260
259 if DEBUG:
261 if DEBUG:
260 DEBUG.info(b"STATUS: %s, %s", r.status, r.reason)
262 DEBUG.info(b"STATUS: %s, %s", r.status, r.reason)
261 r._handler = self
263 r._handler = self
262 r._host = host
264 r._host = host
263 r._url = req.get_full_url()
265 r._url = req.get_full_url()
264 r._connection = h
266 r._connection = h
265 r.code = r.status
267 r.code = r.status
266 r.headers = r.msg
268 r.headers = r.msg
267 r.msg = r.reason
269 r.msg = r.reason
268
270
269 return r
271 return r
270
272
271 def _reuse_connection(self, h, req, host):
273 def _reuse_connection(self, h, req, host):
272 """start the transaction with a re-used connection
274 """start the transaction with a re-used connection
273 return a response object (r) upon success or None on failure.
275 return a response object (r) upon success or None on failure.
274 This DOES not close or remove bad connections in cases where
276 This DOES not close or remove bad connections in cases where
275 it returns. However, if an unexpected exception occurs, it
277 it returns. However, if an unexpected exception occurs, it
276 will close and remove the connection before re-raising.
278 will close and remove the connection before re-raising.
277 """
279 """
278 try:
280 try:
279 self._start_transaction(h, req)
281 self._start_transaction(h, req)
280 r = h.getresponse()
282 r = h.getresponse()
281 # note: just because we got something back doesn't mean it
283 # note: just because we got something back doesn't mean it
282 # worked. We'll check the version below, too.
284 # worked. We'll check the version below, too.
283 except (socket.error, httplib.HTTPException):
285 except (socket.error, httplib.HTTPException):
284 r = None
286 r = None
285 except: # re-raises
287 except: # re-raises
286 # adding this block just in case we've missed
288 # adding this block just in case we've missed
287 # something we will still raise the exception, but
289 # something we will still raise the exception, but
288 # lets try and close the connection and remove it
290 # lets try and close the connection and remove it
289 # first. We previously got into a nasty loop
291 # first. We previously got into a nasty loop
290 # where an exception was uncaught, and so the
292 # where an exception was uncaught, and so the
291 # connection stayed open. On the next try, the
293 # connection stayed open. On the next try, the
292 # same exception was raised, etc. The trade-off is
294 # same exception was raised, etc. The trade-off is
293 # that it's now possible this call will raise
295 # that it's now possible this call will raise
294 # a DIFFERENT exception
296 # a DIFFERENT exception
295 if DEBUG:
297 if DEBUG:
296 DEBUG.error(
298 DEBUG.error(
297 b"unexpected exception - closing connection to %s (%d)",
299 b"unexpected exception - closing connection to %s (%d)",
298 host,
300 host,
299 id(h),
301 id(h),
300 )
302 )
301 self._cm.remove(h)
303 self._cm.remove(h)
302 h.close()
304 h.close()
303 raise
305 raise
304
306
305 if r is None or r.version == 9:
307 if r is None or r.version == 9:
306 # httplib falls back to assuming HTTP 0.9 if it gets a
308 # httplib falls back to assuming HTTP 0.9 if it gets a
307 # bad header back. This is most likely to happen if
309 # bad header back. This is most likely to happen if
308 # the socket has been closed by the server since we
310 # the socket has been closed by the server since we
309 # last used the connection.
311 # last used the connection.
310 if DEBUG:
312 if DEBUG:
311 DEBUG.info(
313 DEBUG.info(
312 b"failed to re-use connection to %s (%d)", host, id(h)
314 b"failed to re-use connection to %s (%d)", host, id(h)
313 )
315 )
314 r = None
316 r = None
315 else:
317 else:
316 if DEBUG:
318 if DEBUG:
317 DEBUG.info(b"re-using connection to %s (%d)", host, id(h))
319 DEBUG.info(b"re-using connection to %s (%d)", host, id(h))
318
320
319 return r
321 return r
320
322
321 def _start_transaction(self, h, req):
323 def _start_transaction(self, h, req):
322 oldbytescount = getattr(h, 'sentbytescount', 0)
324 oldbytescount = getattr(h, 'sentbytescount', 0)
323
325
324 # What follows mostly reimplements HTTPConnection.request()
326 # What follows mostly reimplements HTTPConnection.request()
325 # except it adds self.parent.addheaders in the mix and sends headers
327 # except it adds self.parent.addheaders in the mix and sends headers
326 # in a deterministic order (to make testing easier).
328 # in a deterministic order (to make testing easier).
327 headers = util.sortdict(self.parent.addheaders)
329 headers = util.sortdict(self.parent.addheaders)
328 headers.update(sorted(req.headers.items()))
330 headers.update(sorted(req.headers.items()))
329 headers.update(sorted(req.unredirected_hdrs.items()))
331 headers.update(sorted(req.unredirected_hdrs.items()))
330 headers = util.sortdict((n.lower(), v) for n, v in headers.items())
332 headers = util.sortdict((n.lower(), v) for n, v in headers.items())
331 skipheaders = {}
333 skipheaders = {}
332 for n in ('host', 'accept-encoding'):
334 for n in ('host', 'accept-encoding'):
333 if n in headers:
335 if n in headers:
334 skipheaders['skip_' + n.replace('-', '_')] = 1
336 skipheaders['skip_' + n.replace('-', '_')] = 1
335 try:
337 try:
336 if urllibcompat.hasdata(req):
338 if urllibcompat.hasdata(req):
337 data = urllibcompat.getdata(req)
339 data = urllibcompat.getdata(req)
338 h.putrequest(
340 h.putrequest(
339 req.get_method(),
341 req.get_method(),
340 urllibcompat.getselector(req),
342 urllibcompat.getselector(req),
341 **skipheaders
343 **skipheaders
342 )
344 )
343 if 'content-type' not in headers:
345 if 'content-type' not in headers:
344 h.putheader(
346 h.putheader(
345 'Content-type', 'application/x-www-form-urlencoded'
347 'Content-type', 'application/x-www-form-urlencoded'
346 )
348 )
347 if 'content-length' not in headers:
349 if 'content-length' not in headers:
348 h.putheader('Content-length', '%d' % len(data))
350 h.putheader('Content-length', '%d' % len(data))
349 else:
351 else:
350 h.putrequest(
352 h.putrequest(
351 req.get_method(),
353 req.get_method(),
352 urllibcompat.getselector(req),
354 urllibcompat.getselector(req),
353 **skipheaders
355 **skipheaders
354 )
356 )
355 except socket.error as err:
357 except socket.error as err:
356 raise urlerr.urlerror(err)
358 raise urlerr.urlerror(err)
357 for k, v in headers.items():
359 for k, v in headers.items():
358 h.putheader(k, v)
360 h.putheader(k, v)
359 h.endheaders()
361 h.endheaders()
360 if urllibcompat.hasdata(req):
362 if urllibcompat.hasdata(req):
361 h.send(data)
363 h.send(data)
362
364
363 # This will fail to record events in case of I/O failure. That's OK.
365 # This will fail to record events in case of I/O failure. That's OK.
364 self.requestscount += 1
366 self.requestscount += 1
365 self.sentbytescount += getattr(h, 'sentbytescount', 0) - oldbytescount
367 self.sentbytescount += getattr(h, 'sentbytescount', 0) - oldbytescount
366
368
367 try:
369 try:
368 self.parent.requestscount += 1
370 self.parent.requestscount += 1
369 self.parent.sentbytescount += (
371 self.parent.sentbytescount += (
370 getattr(h, 'sentbytescount', 0) - oldbytescount
372 getattr(h, 'sentbytescount', 0) - oldbytescount
371 )
373 )
372 except AttributeError:
374 except AttributeError:
373 pass
375 pass
374
376
375
377
376 class HTTPHandler(KeepAliveHandler, urlreq.httphandler):
378 class HTTPHandler(KeepAliveHandler, urlreq.httphandler):
377 pass
379 pass
378
380
379
381
380 class HTTPResponse(httplib.HTTPResponse):
382 class HTTPResponse(httplib.HTTPResponse):
381 # we need to subclass HTTPResponse in order to
383 # we need to subclass HTTPResponse in order to
382 # 1) add readline(), readlines(), and readinto() methods
384 # 1) add readline(), readlines(), and readinto() methods
383 # 2) add close_connection() methods
385 # 2) add close_connection() methods
384 # 3) add info() and geturl() methods
386 # 3) add info() and geturl() methods
385
387
386 # in order to add readline(), read must be modified to deal with a
388 # in order to add readline(), read must be modified to deal with a
387 # buffer. example: readline must read a buffer and then spit back
389 # buffer. example: readline must read a buffer and then spit back
388 # one line at a time. The only real alternative is to read one
390 # one line at a time. The only real alternative is to read one
389 # BYTE at a time (ick). Once something has been read, it can't be
391 # BYTE at a time (ick). Once something has been read, it can't be
390 # put back (ok, maybe it can, but that's even uglier than this),
392 # put back (ok, maybe it can, but that's even uglier than this),
391 # so if you THEN do a normal read, you must first take stuff from
393 # so if you THEN do a normal read, you must first take stuff from
392 # the buffer.
394 # the buffer.
393
395
394 # the read method wraps the original to accommodate buffering,
396 # the read method wraps the original to accommodate buffering,
395 # although read() never adds to the buffer.
397 # although read() never adds to the buffer.
396 # Both readline and readlines have been stolen with almost no
398 # Both readline and readlines have been stolen with almost no
397 # modification from socket.py
399 # modification from socket.py
398
400
399 def __init__(self, sock, debuglevel=0, strict=0, method=None):
401 def __init__(self, sock, debuglevel=0, strict=0, method=None):
400 httplib.HTTPResponse.__init__(
402 httplib.HTTPResponse.__init__(
401 self, sock, debuglevel=debuglevel, method=method
403 self, sock, debuglevel=debuglevel, method=method
402 )
404 )
403 self.fileno = sock.fileno
405 self.fileno = sock.fileno
404 self.code = None
406 self.code = None
405 self.receivedbytescount = 0
407 self.receivedbytescount = 0
406 self._rbuf = b''
408 self._rbuf = b''
407 self._rbufsize = 8096
409 self._rbufsize = 8096
408 self._handler = None # inserted by the handler later
410 self._handler = None # inserted by the handler later
409 self._host = None # (same)
411 self._host = None # (same)
410 self._url = None # (same)
412 self._url = None # (same)
411 self._connection = None # (same)
413 self._connection = None # (same)
412
414
413 _raw_read = httplib.HTTPResponse.read
415 _raw_read = httplib.HTTPResponse.read
414 _raw_readinto = getattr(httplib.HTTPResponse, 'readinto', None)
416 _raw_readinto = getattr(httplib.HTTPResponse, 'readinto', None)
415
417
416 # Python 2.7 has a single close() which closes the socket handle.
418 # Python 2.7 has a single close() which closes the socket handle.
417 # This method was effectively renamed to _close_conn() in Python 3. But
419 # This method was effectively renamed to _close_conn() in Python 3. But
418 # there is also a close(). _close_conn() is called by methods like
420 # there is also a close(). _close_conn() is called by methods like
419 # read().
421 # read().
420
422
421 def close(self):
423 def close(self):
422 if self.fp:
424 if self.fp:
423 self.fp.close()
425 self.fp.close()
424 self.fp = None
426 self.fp = None
425 if self._handler:
427 if self._handler:
426 self._handler._request_closed(
428 self._handler._request_closed(
427 self, self._host, self._connection
429 self, self._host, self._connection
428 )
430 )
429
431
430 def _close_conn(self):
432 def _close_conn(self):
431 self.close()
433 self.close()
432
434
433 def close_connection(self):
435 def close_connection(self):
434 self._handler._remove_connection(self._host, self._connection, close=1)
436 self._handler._remove_connection(self._host, self._connection, close=1)
435 self.close()
437 self.close()
436
438
437 def info(self):
439 def info(self):
438 return self.headers
440 return self.headers
439
441
440 def geturl(self):
442 def geturl(self):
441 return self._url
443 return self._url
442
444
443 def read(self, amt=None):
445 def read(self, amt=None):
444 # the _rbuf test is only in this first if for speed. It's not
446 # the _rbuf test is only in this first if for speed. It's not
445 # logically necessary
447 # logically necessary
446 if self._rbuf and amt is not None:
448 if self._rbuf and amt is not None:
447 L = len(self._rbuf)
449 L = len(self._rbuf)
448 if amt > L:
450 if amt > L:
449 amt -= L
451 amt -= L
450 else:
452 else:
451 s = self._rbuf[:amt]
453 s = self._rbuf[:amt]
452 self._rbuf = self._rbuf[amt:]
454 self._rbuf = self._rbuf[amt:]
453 return s
455 return s
454 # Careful! http.client.HTTPResponse.read() on Python 3 is
456 # Careful! http.client.HTTPResponse.read() on Python 3 is
455 # implemented using readinto(), which can duplicate self._rbuf
457 # implemented using readinto(), which can duplicate self._rbuf
456 # if it's not empty.
458 # if it's not empty.
457 s = self._rbuf
459 s = self._rbuf
458 self._rbuf = b''
460 self._rbuf = b''
459 data = self._raw_read(amt)
461 data = self._raw_read(amt)
460
462
461 self.receivedbytescount += len(data)
463 self.receivedbytescount += len(data)
462 try:
464 try:
463 self._connection.receivedbytescount += len(data)
465 self._connection.receivedbytescount += len(data)
464 except AttributeError:
466 except AttributeError:
465 pass
467 pass
466 try:
468 try:
467 self._handler.parent.receivedbytescount += len(data)
469 self._handler.parent.receivedbytescount += len(data)
468 except AttributeError:
470 except AttributeError:
469 pass
471 pass
470
472
471 s += data
473 s += data
472 return s
474 return s
473
475
474 # stolen from Python SVN #68532 to fix issue1088
476 # stolen from Python SVN #68532 to fix issue1088
475 def _read_chunked(self, amt):
477 def _read_chunked(self, amt):
476 chunk_left = self.chunk_left
478 chunk_left = self.chunk_left
477 parts = []
479 parts = []
478
480
479 while True:
481 while True:
480 if chunk_left is None:
482 if chunk_left is None:
481 line = self.fp.readline()
483 line = self.fp.readline()
482 i = line.find(b';')
484 i = line.find(b';')
483 if i >= 0:
485 if i >= 0:
484 line = line[:i] # strip chunk-extensions
486 line = line[:i] # strip chunk-extensions
485 try:
487 try:
486 chunk_left = int(line, 16)
488 chunk_left = int(line, 16)
487 except ValueError:
489 except ValueError:
488 # close the connection as protocol synchronization is
490 # close the connection as protocol synchronization is
489 # probably lost
491 # probably lost
490 self.close()
492 self.close()
491 raise httplib.IncompleteRead(b''.join(parts))
493 raise httplib.IncompleteRead(b''.join(parts))
492 if chunk_left == 0:
494 if chunk_left == 0:
493 break
495 break
494 if amt is None:
496 if amt is None:
495 parts.append(self._safe_read(chunk_left))
497 parts.append(self._safe_read(chunk_left))
496 elif amt < chunk_left:
498 elif amt < chunk_left:
497 parts.append(self._safe_read(amt))
499 parts.append(self._safe_read(amt))
498 self.chunk_left = chunk_left - amt
500 self.chunk_left = chunk_left - amt
499 return b''.join(parts)
501 return b''.join(parts)
500 elif amt == chunk_left:
502 elif amt == chunk_left:
501 parts.append(self._safe_read(amt))
503 parts.append(self._safe_read(amt))
502 self._safe_read(2) # toss the CRLF at the end of the chunk
504 self._safe_read(2) # toss the CRLF at the end of the chunk
503 self.chunk_left = None
505 self.chunk_left = None
504 return b''.join(parts)
506 return b''.join(parts)
505 else:
507 else:
506 parts.append(self._safe_read(chunk_left))
508 parts.append(self._safe_read(chunk_left))
507 amt -= chunk_left
509 amt -= chunk_left
508
510
509 # we read the whole chunk, get another
511 # we read the whole chunk, get another
510 self._safe_read(2) # toss the CRLF at the end of the chunk
512 self._safe_read(2) # toss the CRLF at the end of the chunk
511 chunk_left = None
513 chunk_left = None
512
514
513 # read and discard trailer up to the CRLF terminator
515 # read and discard trailer up to the CRLF terminator
514 ### note: we shouldn't have any trailers!
516 ### note: we shouldn't have any trailers!
515 while True:
517 while True:
516 line = self.fp.readline()
518 line = self.fp.readline()
517 if not line:
519 if not line:
518 # a vanishingly small number of sites EOF without
520 # a vanishingly small number of sites EOF without
519 # sending the trailer
521 # sending the trailer
520 break
522 break
521 if line == b'\r\n':
523 if line == b'\r\n':
522 break
524 break
523
525
524 # we read everything; close the "file"
526 # we read everything; close the "file"
525 self.close()
527 self.close()
526
528
527 return b''.join(parts)
529 return b''.join(parts)
528
530
529 def readline(self):
531 def readline(self):
530 # Fast path for a line is already available in read buffer.
532 # Fast path for a line is already available in read buffer.
531 i = self._rbuf.find(b'\n')
533 i = self._rbuf.find(b'\n')
532 if i >= 0:
534 if i >= 0:
533 i += 1
535 i += 1
534 line = self._rbuf[:i]
536 line = self._rbuf[:i]
535 self._rbuf = self._rbuf[i:]
537 self._rbuf = self._rbuf[i:]
536 return line
538 return line
537
539
538 # No newline in local buffer. Read until we find one.
540 # No newline in local buffer. Read until we find one.
539 # readinto read via readinto will already return _rbuf
541 # readinto read via readinto will already return _rbuf
540 if self._raw_readinto is None:
542 if self._raw_readinto is None:
541 chunks = [self._rbuf]
543 chunks = [self._rbuf]
542 else:
544 else:
543 chunks = []
545 chunks = []
544 i = -1
546 i = -1
545 readsize = self._rbufsize
547 readsize = self._rbufsize
546 while True:
548 while True:
547 new = self._raw_read(readsize)
549 new = self._raw_read(readsize)
548 if not new:
550 if not new:
549 break
551 break
550
552
551 self.receivedbytescount += len(new)
553 self.receivedbytescount += len(new)
552 self._connection.receivedbytescount += len(new)
554 self._connection.receivedbytescount += len(new)
553 try:
555 try:
554 self._handler.parent.receivedbytescount += len(new)
556 self._handler.parent.receivedbytescount += len(new)
555 except AttributeError:
557 except AttributeError:
556 pass
558 pass
557
559
558 chunks.append(new)
560 chunks.append(new)
559 i = new.find(b'\n')
561 i = new.find(b'\n')
560 if i >= 0:
562 if i >= 0:
561 break
563 break
562
564
563 # We either have exhausted the stream or have a newline in chunks[-1].
565 # We either have exhausted the stream or have a newline in chunks[-1].
564
566
565 # EOF
567 # EOF
566 if i == -1:
568 if i == -1:
567 self._rbuf = b''
569 self._rbuf = b''
568 return b''.join(chunks)
570 return b''.join(chunks)
569
571
570 i += 1
572 i += 1
571 self._rbuf = chunks[-1][i:]
573 self._rbuf = chunks[-1][i:]
572 chunks[-1] = chunks[-1][:i]
574 chunks[-1] = chunks[-1][:i]
573 return b''.join(chunks)
575 return b''.join(chunks)
574
576
575 def readlines(self, sizehint=0):
577 def readlines(self, sizehint=0):
576 total = 0
578 total = 0
577 list = []
579 list = []
578 while True:
580 while True:
579 line = self.readline()
581 line = self.readline()
580 if not line:
582 if not line:
581 break
583 break
582 list.append(line)
584 list.append(line)
583 total += len(line)
585 total += len(line)
584 if sizehint and total >= sizehint:
586 if sizehint and total >= sizehint:
585 break
587 break
586 return list
588 return list
587
589
588 def readinto(self, dest):
590 def readinto(self, dest):
589 if self._raw_readinto is None:
591 if self._raw_readinto is None:
590 res = self.read(len(dest))
592 res = self.read(len(dest))
591 if not res:
593 if not res:
592 return 0
594 return 0
593 dest[0 : len(res)] = res
595 dest[0 : len(res)] = res
594 return len(res)
596 return len(res)
595 total = len(dest)
597 total = len(dest)
596 have = len(self._rbuf)
598 have = len(self._rbuf)
597 if have >= total:
599 if have >= total:
598 dest[0:total] = self._rbuf[:total]
600 dest[0:total] = self._rbuf[:total]
599 self._rbuf = self._rbuf[total:]
601 self._rbuf = self._rbuf[total:]
600 return total
602 return total
601 mv = memoryview(dest)
603 mv = memoryview(dest)
602 got = self._raw_readinto(mv[have:total])
604 got = self._raw_readinto(mv[have:total])
603
605
604 self.receivedbytescount += got
606 self.receivedbytescount += got
605 self._connection.receivedbytescount += got
607 self._connection.receivedbytescount += got
606 try:
608 try:
607 self._handler.receivedbytescount += got
609 self._handler.receivedbytescount += got
608 except AttributeError:
610 except AttributeError:
609 pass
611 pass
610
612
611 dest[0:have] = self._rbuf
613 dest[0:have] = self._rbuf
612 got += len(self._rbuf)
614 got += len(self._rbuf)
613 self._rbuf = b''
615 self._rbuf = b''
614 return got
616 return got
615
617
616
618
617 def safesend(self, str):
619 def safesend(self, str):
618 """Send `str' to the server.
620 """Send `str' to the server.
619
621
620 Shamelessly ripped off from httplib to patch a bad behavior.
622 Shamelessly ripped off from httplib to patch a bad behavior.
621 """
623 """
622 # _broken_pipe_resp is an attribute we set in this function
624 # _broken_pipe_resp is an attribute we set in this function
623 # if the socket is closed while we're sending data but
625 # if the socket is closed while we're sending data but
624 # the server sent us a response before hanging up.
626 # the server sent us a response before hanging up.
625 # In that case, we want to pretend to send the rest of the
627 # In that case, we want to pretend to send the rest of the
626 # outgoing data, and then let the user use getresponse()
628 # outgoing data, and then let the user use getresponse()
627 # (which we wrap) to get this last response before
629 # (which we wrap) to get this last response before
628 # opening a new socket.
630 # opening a new socket.
629 if getattr(self, '_broken_pipe_resp', None) is not None:
631 if getattr(self, '_broken_pipe_resp', None) is not None:
630 return
632 return
631
633
632 if self.sock is None:
634 if self.sock is None:
633 if self.auto_open:
635 if self.auto_open:
634 self.connect()
636 self.connect()
635 else:
637 else:
636 raise httplib.NotConnected
638 raise httplib.NotConnected
637
639
638 # send the data to the server. if we get a broken pipe, then close
640 # send the data to the server. if we get a broken pipe, then close
639 # the socket. we want to reconnect when somebody tries to send again.
641 # the socket. we want to reconnect when somebody tries to send again.
640 #
642 #
641 # NOTE: we DO propagate the error, though, because we cannot simply
643 # NOTE: we DO propagate the error, though, because we cannot simply
642 # ignore the error... the caller will know if they can retry.
644 # ignore the error... the caller will know if they can retry.
643 if self.debuglevel > 0:
645 if self.debuglevel > 0:
644 print(b"send:", repr(str))
646 print(b"send:", repr(str))
645 try:
647 try:
646 blocksize = 8192
648 blocksize = 8192
647 read = getattr(str, 'read', None)
649 read = getattr(str, 'read', None)
648 if read is not None:
650 if read is not None:
649 if self.debuglevel > 0:
651 if self.debuglevel > 0:
650 print(b"sending a read()able")
652 print(b"sending a read()able")
651 data = read(blocksize)
653 data = read(blocksize)
652 while data:
654 while data:
653 self.sock.sendall(data)
655 self.sock.sendall(data)
654 self.sentbytescount += len(data)
656 self.sentbytescount += len(data)
655 data = read(blocksize)
657 data = read(blocksize)
656 else:
658 else:
657 self.sock.sendall(str)
659 self.sock.sendall(str)
658 self.sentbytescount += len(str)
660 self.sentbytescount += len(str)
659 except BrokenPipeError:
661 except BrokenPipeError:
660 if self._HTTPConnection__state == httplib._CS_REQ_SENT:
662 if self._HTTPConnection__state == httplib._CS_REQ_SENT:
661 self._broken_pipe_resp = None
663 self._broken_pipe_resp = None
662 self._broken_pipe_resp = self.getresponse()
664 self._broken_pipe_resp = self.getresponse()
663 reraise = False
665 reraise = False
664 else:
666 else:
665 reraise = True
667 reraise = True
666 self.close()
668 self.close()
667 if reraise:
669 if reraise:
668 raise
670 raise
669
671
670
672
671 def wrapgetresponse(cls):
673 def wrapgetresponse(cls):
672 """Wraps getresponse in cls with a broken-pipe sane version."""
674 """Wraps getresponse in cls with a broken-pipe sane version."""
673
675
674 def safegetresponse(self):
676 def safegetresponse(self):
675 # In safesend() we might set the _broken_pipe_resp
677 # In safesend() we might set the _broken_pipe_resp
676 # attribute, in which case the socket has already
678 # attribute, in which case the socket has already
677 # been closed and we just need to give them the response
679 # been closed and we just need to give them the response
678 # back. Otherwise, we use the normal response path.
680 # back. Otherwise, we use the normal response path.
679 r = getattr(self, '_broken_pipe_resp', None)
681 r = getattr(self, '_broken_pipe_resp', None)
680 if r is not None:
682 if r is not None:
681 return r
683 return r
682 return cls.getresponse(self)
684 return cls.getresponse(self)
683
685
684 safegetresponse.__doc__ = cls.getresponse.__doc__
686 safegetresponse.__doc__ = cls.getresponse.__doc__
685 return safegetresponse
687 return safegetresponse
686
688
687
689
688 class HTTPConnection(httplib.HTTPConnection):
690 class HTTPConnection(httplib.HTTPConnection):
689 # url.httpsconnection inherits from this. So when adding/removing
691 # url.httpsconnection inherits from this. So when adding/removing
690 # attributes, be sure to audit httpsconnection() for unintended
692 # attributes, be sure to audit httpsconnection() for unintended
691 # consequences.
693 # consequences.
692
694
693 # use the modified response class
695 # use the modified response class
694 response_class = HTTPResponse
696 response_class = HTTPResponse
695 send = safesend
697 send = safesend
696 getresponse = wrapgetresponse(httplib.HTTPConnection)
698 getresponse = wrapgetresponse(httplib.HTTPConnection)
697
699
698 def __init__(self, *args, **kwargs):
700 def __init__(self, *args, **kwargs):
699 httplib.HTTPConnection.__init__(self, *args, **kwargs)
701 httplib.HTTPConnection.__init__(self, *args, **kwargs)
700 self.sentbytescount = 0
702 self.sentbytescount = 0
701 self.receivedbytescount = 0
703 self.receivedbytescount = 0
702
704
703
705
704 #########################################################################
706 #########################################################################
705 ##### TEST FUNCTIONS
707 ##### TEST FUNCTIONS
706 #########################################################################
708 #########################################################################
707
709
708
710
709 def continuity(url):
711 def continuity(url):
710 md5 = hashlib.md5
712 md5 = hashlib.md5
711 format = b'%25s: %s'
713 format = b'%25s: %s'
712
714
713 # first fetch the file with the normal http handler
715 # first fetch the file with the normal http handler
714 opener = urlreq.buildopener()
716 opener = urlreq.buildopener()
715 urlreq.installopener(opener)
717 urlreq.installopener(opener)
716 fo = urlreq.urlopen(url)
718 fo = urlreq.urlopen(url)
717 foo = fo.read()
719 foo = fo.read()
718 fo.close()
720 fo.close()
719 m = md5(foo)
721 m = md5(foo)
720 print(format % (b'normal urllib', hex(m.digest())))
722 print(format % (b'normal urllib', hex(m.digest())))
721
723
722 # now install the keepalive handler and try again
724 # now install the keepalive handler and try again
723 opener = urlreq.buildopener(HTTPHandler())
725 opener = urlreq.buildopener(HTTPHandler())
724 urlreq.installopener(opener)
726 urlreq.installopener(opener)
725
727
726 fo = urlreq.urlopen(url)
728 fo = urlreq.urlopen(url)
727 foo = fo.read()
729 foo = fo.read()
728 fo.close()
730 fo.close()
729 m = md5(foo)
731 m = md5(foo)
730 print(format % (b'keepalive read', hex(m.digest())))
732 print(format % (b'keepalive read', hex(m.digest())))
731
733
732 fo = urlreq.urlopen(url)
734 fo = urlreq.urlopen(url)
733 foo = b''
735 foo = b''
734 while True:
736 while True:
735 f = fo.readline()
737 f = fo.readline()
736 if f:
738 if f:
737 foo = foo + f
739 foo = foo + f
738 else:
740 else:
739 break
741 break
740 fo.close()
742 fo.close()
741 m = md5(foo)
743 m = md5(foo)
742 print(format % (b'keepalive readline', hex(m.digest())))
744 print(format % (b'keepalive readline', hex(m.digest())))
743
745
744
746
745 def comp(N, url):
747 def comp(N, url):
746 print(b' making %i connections to:\n %s' % (N, url))
748 print(b' making %i connections to:\n %s' % (N, url))
747
749
748 procutil.stdout.write(b' first using the normal urllib handlers')
750 procutil.stdout.write(b' first using the normal urllib handlers')
749 # first use normal opener
751 # first use normal opener
750 opener = urlreq.buildopener()
752 opener = urlreq.buildopener()
751 urlreq.installopener(opener)
753 urlreq.installopener(opener)
752 t1 = fetch(N, url)
754 t1 = fetch(N, url)
753 print(b' TIME: %.3f s' % t1)
755 print(b' TIME: %.3f s' % t1)
754
756
755 procutil.stdout.write(b' now using the keepalive handler ')
757 procutil.stdout.write(b' now using the keepalive handler ')
756 # now install the keepalive handler and try again
758 # now install the keepalive handler and try again
757 opener = urlreq.buildopener(HTTPHandler())
759 opener = urlreq.buildopener(HTTPHandler())
758 urlreq.installopener(opener)
760 urlreq.installopener(opener)
759 t2 = fetch(N, url)
761 t2 = fetch(N, url)
760 print(b' TIME: %.3f s' % t2)
762 print(b' TIME: %.3f s' % t2)
761 print(b' improvement factor: %.2f' % (t1 / t2))
763 print(b' improvement factor: %.2f' % (t1 / t2))
762
764
763
765
764 def fetch(N, url, delay=0):
766 def fetch(N, url, delay=0):
765 import time
767 import time
766
768
767 lens = []
769 lens = []
768 starttime = time.time()
770 starttime = time.time()
769 for i in range(N):
771 for i in range(N):
770 if delay and i > 0:
772 if delay and i > 0:
771 time.sleep(delay)
773 time.sleep(delay)
772 fo = urlreq.urlopen(url)
774 fo = urlreq.urlopen(url)
773 foo = fo.read()
775 foo = fo.read()
774 fo.close()
776 fo.close()
775 lens.append(len(foo))
777 lens.append(len(foo))
776 diff = time.time() - starttime
778 diff = time.time() - starttime
777
779
778 j = 0
780 j = 0
779 for i in lens[1:]:
781 for i in lens[1:]:
780 j = j + 1
782 j = j + 1
781 if not i == lens[0]:
783 if not i == lens[0]:
782 print(b"WARNING: inconsistent length on read %i: %i" % (j, i))
784 print(b"WARNING: inconsistent length on read %i: %i" % (j, i))
783
785
784 return diff
786 return diff
785
787
786
788
787 def test_timeout(url):
789 def test_timeout(url):
788 global DEBUG
790 global DEBUG
789 dbbackup = DEBUG
791 dbbackup = DEBUG
790
792
791 class FakeLogger:
793 class FakeLogger:
792 def debug(self, msg, *args):
794 def debug(self, msg, *args):
793 print(msg % args)
795 print(msg % args)
794
796
795 info = warning = error = debug
797 info = warning = error = debug
796
798
797 DEBUG = FakeLogger()
799 DEBUG = FakeLogger()
798 print(b" fetching the file to establish a connection")
800 print(b" fetching the file to establish a connection")
799 fo = urlreq.urlopen(url)
801 fo = urlreq.urlopen(url)
800 data1 = fo.read()
802 data1 = fo.read()
801 fo.close()
803 fo.close()
802
804
803 i = 20
805 i = 20
804 print(b" waiting %i seconds for the server to close the connection" % i)
806 print(b" waiting %i seconds for the server to close the connection" % i)
805 while i > 0:
807 while i > 0:
806 procutil.stdout.write(b'\r %2i' % i)
808 procutil.stdout.write(b'\r %2i' % i)
807 procutil.stdout.flush()
809 procutil.stdout.flush()
808 time.sleep(1)
810 time.sleep(1)
809 i -= 1
811 i -= 1
810 procutil.stderr.write(b'\r')
812 procutil.stderr.write(b'\r')
811
813
812 print(b" fetching the file a second time")
814 print(b" fetching the file a second time")
813 fo = urlreq.urlopen(url)
815 fo = urlreq.urlopen(url)
814 data2 = fo.read()
816 data2 = fo.read()
815 fo.close()
817 fo.close()
816
818
817 if data1 == data2:
819 if data1 == data2:
818 print(b' data are identical')
820 print(b' data are identical')
819 else:
821 else:
820 print(b' ERROR: DATA DIFFER')
822 print(b' ERROR: DATA DIFFER')
821
823
822 DEBUG = dbbackup
824 DEBUG = dbbackup
823
825
824
826
825 def test(url, N=10):
827 def test(url, N=10):
826 print(b"performing continuity test (making sure stuff isn't corrupted)")
828 print(b"performing continuity test (making sure stuff isn't corrupted)")
827 continuity(url)
829 continuity(url)
828 print(b'')
830 print(b'')
829 print(b"performing speed comparison")
831 print(b"performing speed comparison")
830 comp(N, url)
832 comp(N, url)
831 print(b'')
833 print(b'')
832 print(b"performing dropped-connection check")
834 print(b"performing dropped-connection check")
833 test_timeout(url)
835 test_timeout(url)
834
836
835
837
836 if __name__ == '__main__':
838 if __name__ == '__main__':
837 import time
839 import time
838
840
839 try:
841 try:
840 N = int(sys.argv[1])
842 N = int(sys.argv[1])
841 url = sys.argv[2]
843 url = sys.argv[2]
842 except (IndexError, ValueError):
844 except (IndexError, ValueError):
843 print(b"%s <integer> <url>" % sys.argv[0])
845 print(b"%s <integer> <url>" % sys.argv[0])
844 else:
846 else:
845 test(url, N)
847 test(url, N)
General Comments 0
You need to be logged in to leave comments. Login now