##// END OF EJS Templates
keepalive: remove pycompat.iteritems()...
Gregory Szorc -
r49779:dea766fc default
parent child Browse files
Show More
@@ -1,846 +1,846
1 # This library is free software; you can redistribute it and/or
1 # This library is free software; you can redistribute it and/or
2 # modify it under the terms of the GNU Lesser General Public
2 # modify it under the terms of the GNU Lesser General Public
3 # License as published by the Free Software Foundation; either
3 # License as published by the Free Software Foundation; either
4 # version 2.1 of the License, or (at your option) any later version.
4 # version 2.1 of the License, or (at your option) any later version.
5 #
5 #
6 # This library is distributed in the hope that it will be useful,
6 # This library is distributed in the hope that it will be useful,
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
9 # Lesser General Public License for more details.
9 # Lesser General Public License for more details.
10 #
10 #
11 # You should have received a copy of the GNU Lesser General Public
11 # You should have received a copy of the GNU Lesser General Public
12 # License along with this library; if not, see
12 # License along with this library; if not, see
13 # <http://www.gnu.org/licenses/>.
13 # <http://www.gnu.org/licenses/>.
14
14
15 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
15 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
16 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
16 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
17
17
18 # Modified by Benoit Boissinot:
18 # Modified by Benoit Boissinot:
19 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
19 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
20 # Modified by Dirkjan Ochtman:
20 # Modified by Dirkjan Ochtman:
21 # - import md5 function from a local util module
21 # - import md5 function from a local util module
22 # Modified by Augie Fackler:
22 # Modified by Augie Fackler:
23 # - add safesend method and use it to prevent broken pipe errors
23 # - add safesend method and use it to prevent broken pipe errors
24 # on large POST requests
24 # on large POST requests
25
25
26 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
26 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
27
27
28 >>> import urllib2
28 >>> import urllib2
29 >>> from keepalive import HTTPHandler
29 >>> from keepalive import HTTPHandler
30 >>> keepalive_handler = HTTPHandler()
30 >>> keepalive_handler = HTTPHandler()
31 >>> opener = urlreq.buildopener(keepalive_handler)
31 >>> opener = urlreq.buildopener(keepalive_handler)
32 >>> urlreq.installopener(opener)
32 >>> urlreq.installopener(opener)
33 >>>
33 >>>
34 >>> fo = urlreq.urlopen('http://www.python.org')
34 >>> fo = urlreq.urlopen('http://www.python.org')
35
35
36 If a connection to a given host is requested, and all of the existing
36 If a connection to a given host is requested, and all of the existing
37 connections are still in use, another connection will be opened. If
37 connections are still in use, another connection will be opened. If
38 the handler tries to use an existing connection but it fails in some
38 the handler tries to use an existing connection but it fails in some
39 way, it will be closed and removed from the pool.
39 way, it will be closed and removed from the pool.
40
40
41 To remove the handler, simply re-run build_opener with no arguments, and
41 To remove the handler, simply re-run build_opener with no arguments, and
42 install that opener.
42 install that opener.
43
43
44 You can explicitly close connections by using the close_connection()
44 You can explicitly close connections by using the close_connection()
45 method of the returned file-like object (described below) or you can
45 method of the returned file-like object (described below) or you can
46 use the handler methods:
46 use the handler methods:
47
47
48 close_connection(host)
48 close_connection(host)
49 close_all()
49 close_all()
50 open_connections()
50 open_connections()
51
51
52 NOTE: using the close_connection and close_all methods of the handler
52 NOTE: using the close_connection and close_all methods of the handler
53 should be done with care when using multiple threads.
53 should be done with care when using multiple threads.
54 * there is nothing that prevents another thread from creating new
54 * there is nothing that prevents another thread from creating new
55 connections immediately after connections are closed
55 connections immediately after connections are closed
56 * no checks are done to prevent in-use connections from being closed
56 * no checks are done to prevent in-use connections from being closed
57
57
58 >>> keepalive_handler.close_all()
58 >>> keepalive_handler.close_all()
59
59
60 EXTRA ATTRIBUTES AND METHODS
60 EXTRA ATTRIBUTES AND METHODS
61
61
62 Upon a status of 200, the object returned has a few additional
62 Upon a status of 200, the object returned has a few additional
63 attributes and methods, which should not be used if you want to
63 attributes and methods, which should not be used if you want to
64 remain consistent with the normal urllib2-returned objects:
64 remain consistent with the normal urllib2-returned objects:
65
65
66 close_connection() - close the connection to the host
66 close_connection() - close the connection to the host
67 readlines() - you know, readlines()
67 readlines() - you know, readlines()
68 status - the return status (i.e. 404)
68 status - the return status (i.e. 404)
69 reason - english translation of status (i.e. 'File not found')
69 reason - english translation of status (i.e. 'File not found')
70
70
71 If you want the best of both worlds, use this inside an
71 If you want the best of both worlds, use this inside an
72 AttributeError-catching try:
72 AttributeError-catching try:
73
73
74 >>> try: status = fo.status
74 >>> try: status = fo.status
75 >>> except AttributeError: status = None
75 >>> except AttributeError: status = None
76
76
77 Unfortunately, these are ONLY there if status == 200, so it's not
77 Unfortunately, these are ONLY there if status == 200, so it's not
78 easy to distinguish between non-200 responses. The reason is that
78 easy to distinguish between non-200 responses. The reason is that
79 urllib2 tries to do clever things with error codes 301, 302, 401,
79 urllib2 tries to do clever things with error codes 301, 302, 401,
80 and 407, and it wraps the object upon return.
80 and 407, and it wraps the object upon return.
81 """
81 """
82
82
83 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
83 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
84
84
85
85
86 import collections
86 import collections
87 import errno
87 import errno
88 import hashlib
88 import hashlib
89 import socket
89 import socket
90 import sys
90 import sys
91 import threading
91 import threading
92
92
93 from .i18n import _
93 from .i18n import _
94 from .pycompat import getattr
94 from .pycompat import getattr
95 from .node import hex
95 from .node import hex
96 from . import (
96 from . import (
97 pycompat,
97 pycompat,
98 urllibcompat,
98 urllibcompat,
99 util,
99 util,
100 )
100 )
101 from .utils import procutil
101 from .utils import procutil
102
102
103 httplib = util.httplib
103 httplib = util.httplib
104 urlerr = util.urlerr
104 urlerr = util.urlerr
105 urlreq = util.urlreq
105 urlreq = util.urlreq
106
106
107 DEBUG = None
107 DEBUG = None
108
108
109
109
110 class ConnectionManager(object):
110 class ConnectionManager(object):
111 """
111 """
112 The connection manager must be able to:
112 The connection manager must be able to:
113 * keep track of all existing
113 * keep track of all existing
114 """
114 """
115
115
116 def __init__(self):
116 def __init__(self):
117 self._lock = threading.Lock()
117 self._lock = threading.Lock()
118 self._hostmap = collections.defaultdict(list) # host -> [connection]
118 self._hostmap = collections.defaultdict(list) # host -> [connection]
119 self._connmap = {} # map connections to host
119 self._connmap = {} # map connections to host
120 self._readymap = {} # map connection to ready state
120 self._readymap = {} # map connection to ready state
121
121
122 def add(self, host, connection, ready):
122 def add(self, host, connection, ready):
123 self._lock.acquire()
123 self._lock.acquire()
124 try:
124 try:
125 self._hostmap[host].append(connection)
125 self._hostmap[host].append(connection)
126 self._connmap[connection] = host
126 self._connmap[connection] = host
127 self._readymap[connection] = ready
127 self._readymap[connection] = ready
128 finally:
128 finally:
129 self._lock.release()
129 self._lock.release()
130
130
131 def remove(self, connection):
131 def remove(self, connection):
132 self._lock.acquire()
132 self._lock.acquire()
133 try:
133 try:
134 try:
134 try:
135 host = self._connmap[connection]
135 host = self._connmap[connection]
136 except KeyError:
136 except KeyError:
137 pass
137 pass
138 else:
138 else:
139 del self._connmap[connection]
139 del self._connmap[connection]
140 del self._readymap[connection]
140 del self._readymap[connection]
141 self._hostmap[host].remove(connection)
141 self._hostmap[host].remove(connection)
142 if not self._hostmap[host]:
142 if not self._hostmap[host]:
143 del self._hostmap[host]
143 del self._hostmap[host]
144 finally:
144 finally:
145 self._lock.release()
145 self._lock.release()
146
146
147 def set_ready(self, connection, ready):
147 def set_ready(self, connection, ready):
148 try:
148 try:
149 self._readymap[connection] = ready
149 self._readymap[connection] = ready
150 except KeyError:
150 except KeyError:
151 pass
151 pass
152
152
153 def get_ready_conn(self, host):
153 def get_ready_conn(self, host):
154 conn = None
154 conn = None
155 self._lock.acquire()
155 self._lock.acquire()
156 try:
156 try:
157 for c in self._hostmap[host]:
157 for c in self._hostmap[host]:
158 if self._readymap[c]:
158 if self._readymap[c]:
159 self._readymap[c] = False
159 self._readymap[c] = False
160 conn = c
160 conn = c
161 break
161 break
162 finally:
162 finally:
163 self._lock.release()
163 self._lock.release()
164 return conn
164 return conn
165
165
166 def get_all(self, host=None):
166 def get_all(self, host=None):
167 if host:
167 if host:
168 return list(self._hostmap[host])
168 return list(self._hostmap[host])
169 else:
169 else:
170 return dict(self._hostmap)
170 return dict(self._hostmap)
171
171
172
172
173 class KeepAliveHandler(object):
173 class KeepAliveHandler(object):
174 def __init__(self, timeout=None):
174 def __init__(self, timeout=None):
175 self._cm = ConnectionManager()
175 self._cm = ConnectionManager()
176 self._timeout = timeout
176 self._timeout = timeout
177 self.requestscount = 0
177 self.requestscount = 0
178 self.sentbytescount = 0
178 self.sentbytescount = 0
179
179
180 #### Connection Management
180 #### Connection Management
181 def open_connections(self):
181 def open_connections(self):
182 """return a list of connected hosts and the number of connections
182 """return a list of connected hosts and the number of connections
183 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
183 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
184 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
184 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
185
185
186 def close_connection(self, host):
186 def close_connection(self, host):
187 """close connection(s) to <host>
187 """close connection(s) to <host>
188 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
188 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
189 no error occurs if there is no connection to that host."""
189 no error occurs if there is no connection to that host."""
190 for h in self._cm.get_all(host):
190 for h in self._cm.get_all(host):
191 self._cm.remove(h)
191 self._cm.remove(h)
192 h.close()
192 h.close()
193
193
194 def close_all(self):
194 def close_all(self):
195 """close all open connections"""
195 """close all open connections"""
196 for host, conns in pycompat.iteritems(self._cm.get_all()):
196 for host, conns in self._cm.get_all().items():
197 for h in conns:
197 for h in conns:
198 self._cm.remove(h)
198 self._cm.remove(h)
199 h.close()
199 h.close()
200
200
201 def _request_closed(self, request, host, connection):
201 def _request_closed(self, request, host, connection):
202 """tells us that this request is now closed and that the
202 """tells us that this request is now closed and that the
203 connection is ready for another request"""
203 connection is ready for another request"""
204 self._cm.set_ready(connection, True)
204 self._cm.set_ready(connection, True)
205
205
206 def _remove_connection(self, host, connection, close=0):
206 def _remove_connection(self, host, connection, close=0):
207 if close:
207 if close:
208 connection.close()
208 connection.close()
209 self._cm.remove(connection)
209 self._cm.remove(connection)
210
210
211 #### Transaction Execution
211 #### Transaction Execution
212 def http_open(self, req):
212 def http_open(self, req):
213 return self.do_open(HTTPConnection, req)
213 return self.do_open(HTTPConnection, req)
214
214
215 def do_open(self, http_class, req):
215 def do_open(self, http_class, req):
216 host = urllibcompat.gethost(req)
216 host = urllibcompat.gethost(req)
217 if not host:
217 if not host:
218 raise urlerr.urlerror(b'no host given')
218 raise urlerr.urlerror(b'no host given')
219
219
220 try:
220 try:
221 h = self._cm.get_ready_conn(host)
221 h = self._cm.get_ready_conn(host)
222 while h:
222 while h:
223 r = self._reuse_connection(h, req, host)
223 r = self._reuse_connection(h, req, host)
224
224
225 # if this response is non-None, then it worked and we're
225 # if this response is non-None, then it worked and we're
226 # done. Break out, skipping the else block.
226 # done. Break out, skipping the else block.
227 if r:
227 if r:
228 break
228 break
229
229
230 # connection is bad - possibly closed by server
230 # connection is bad - possibly closed by server
231 # discard it and ask for the next free connection
231 # discard it and ask for the next free connection
232 h.close()
232 h.close()
233 self._cm.remove(h)
233 self._cm.remove(h)
234 h = self._cm.get_ready_conn(host)
234 h = self._cm.get_ready_conn(host)
235 else:
235 else:
236 # no (working) free connections were found. Create a new one.
236 # no (working) free connections were found. Create a new one.
237 h = http_class(host, timeout=self._timeout)
237 h = http_class(host, timeout=self._timeout)
238 if DEBUG:
238 if DEBUG:
239 DEBUG.info(
239 DEBUG.info(
240 b"creating new connection to %s (%d)", host, id(h)
240 b"creating new connection to %s (%d)", host, id(h)
241 )
241 )
242 self._cm.add(host, h, False)
242 self._cm.add(host, h, False)
243 self._start_transaction(h, req)
243 self._start_transaction(h, req)
244 r = h.getresponse()
244 r = h.getresponse()
245 # The string form of BadStatusLine is the status line. Add some context
245 # The string form of BadStatusLine is the status line. Add some context
246 # to make the error message slightly more useful.
246 # to make the error message slightly more useful.
247 except httplib.BadStatusLine as err:
247 except httplib.BadStatusLine as err:
248 raise urlerr.urlerror(
248 raise urlerr.urlerror(
249 _(b'bad HTTP status line: %s') % pycompat.sysbytes(err.line)
249 _(b'bad HTTP status line: %s') % pycompat.sysbytes(err.line)
250 )
250 )
251 except (socket.error, httplib.HTTPException) as err:
251 except (socket.error, httplib.HTTPException) as err:
252 raise urlerr.urlerror(err)
252 raise urlerr.urlerror(err)
253
253
254 # If not a persistent connection, don't try to reuse it. Look
254 # If not a persistent connection, don't try to reuse it. Look
255 # for this using getattr() since vcr doesn't define this
255 # for this using getattr() since vcr doesn't define this
256 # attribute, and in that case always close the connection.
256 # attribute, and in that case always close the connection.
257 if getattr(r, 'will_close', True):
257 if getattr(r, 'will_close', True):
258 self._cm.remove(h)
258 self._cm.remove(h)
259
259
260 if DEBUG:
260 if DEBUG:
261 DEBUG.info(b"STATUS: %s, %s", r.status, r.reason)
261 DEBUG.info(b"STATUS: %s, %s", r.status, r.reason)
262 r._handler = self
262 r._handler = self
263 r._host = host
263 r._host = host
264 r._url = req.get_full_url()
264 r._url = req.get_full_url()
265 r._connection = h
265 r._connection = h
266 r.code = r.status
266 r.code = r.status
267 r.headers = r.msg
267 r.headers = r.msg
268 r.msg = r.reason
268 r.msg = r.reason
269
269
270 return r
270 return r
271
271
272 def _reuse_connection(self, h, req, host):
272 def _reuse_connection(self, h, req, host):
273 """start the transaction with a re-used connection
273 """start the transaction with a re-used connection
274 return a response object (r) upon success or None on failure.
274 return a response object (r) upon success or None on failure.
275 This DOES not close or remove bad connections in cases where
275 This DOES not close or remove bad connections in cases where
276 it returns. However, if an unexpected exception occurs, it
276 it returns. However, if an unexpected exception occurs, it
277 will close and remove the connection before re-raising.
277 will close and remove the connection before re-raising.
278 """
278 """
279 try:
279 try:
280 self._start_transaction(h, req)
280 self._start_transaction(h, req)
281 r = h.getresponse()
281 r = h.getresponse()
282 # note: just because we got something back doesn't mean it
282 # note: just because we got something back doesn't mean it
283 # worked. We'll check the version below, too.
283 # worked. We'll check the version below, too.
284 except (socket.error, httplib.HTTPException):
284 except (socket.error, httplib.HTTPException):
285 r = None
285 r = None
286 except: # re-raises
286 except: # re-raises
287 # adding this block just in case we've missed
287 # adding this block just in case we've missed
288 # something we will still raise the exception, but
288 # something we will still raise the exception, but
289 # lets try and close the connection and remove it
289 # lets try and close the connection and remove it
290 # first. We previously got into a nasty loop
290 # first. We previously got into a nasty loop
291 # where an exception was uncaught, and so the
291 # where an exception was uncaught, and so the
292 # connection stayed open. On the next try, the
292 # connection stayed open. On the next try, the
293 # same exception was raised, etc. The trade-off is
293 # same exception was raised, etc. The trade-off is
294 # that it's now possible this call will raise
294 # that it's now possible this call will raise
295 # a DIFFERENT exception
295 # a DIFFERENT exception
296 if DEBUG:
296 if DEBUG:
297 DEBUG.error(
297 DEBUG.error(
298 b"unexpected exception - closing connection to %s (%d)",
298 b"unexpected exception - closing connection to %s (%d)",
299 host,
299 host,
300 id(h),
300 id(h),
301 )
301 )
302 self._cm.remove(h)
302 self._cm.remove(h)
303 h.close()
303 h.close()
304 raise
304 raise
305
305
306 if r is None or r.version == 9:
306 if r is None or r.version == 9:
307 # httplib falls back to assuming HTTP 0.9 if it gets a
307 # httplib falls back to assuming HTTP 0.9 if it gets a
308 # bad header back. This is most likely to happen if
308 # bad header back. This is most likely to happen if
309 # the socket has been closed by the server since we
309 # the socket has been closed by the server since we
310 # last used the connection.
310 # last used the connection.
311 if DEBUG:
311 if DEBUG:
312 DEBUG.info(
312 DEBUG.info(
313 b"failed to re-use connection to %s (%d)", host, id(h)
313 b"failed to re-use connection to %s (%d)", host, id(h)
314 )
314 )
315 r = None
315 r = None
316 else:
316 else:
317 if DEBUG:
317 if DEBUG:
318 DEBUG.info(b"re-using connection to %s (%d)", host, id(h))
318 DEBUG.info(b"re-using connection to %s (%d)", host, id(h))
319
319
320 return r
320 return r
321
321
322 def _start_transaction(self, h, req):
322 def _start_transaction(self, h, req):
323 oldbytescount = getattr(h, 'sentbytescount', 0)
323 oldbytescount = getattr(h, 'sentbytescount', 0)
324
324
325 # What follows mostly reimplements HTTPConnection.request()
325 # What follows mostly reimplements HTTPConnection.request()
326 # except it adds self.parent.addheaders in the mix and sends headers
326 # except it adds self.parent.addheaders in the mix and sends headers
327 # in a deterministic order (to make testing easier).
327 # in a deterministic order (to make testing easier).
328 headers = util.sortdict(self.parent.addheaders)
328 headers = util.sortdict(self.parent.addheaders)
329 headers.update(sorted(req.headers.items()))
329 headers.update(sorted(req.headers.items()))
330 headers.update(sorted(req.unredirected_hdrs.items()))
330 headers.update(sorted(req.unredirected_hdrs.items()))
331 headers = util.sortdict((n.lower(), v) for n, v in headers.items())
331 headers = util.sortdict((n.lower(), v) for n, v in headers.items())
332 skipheaders = {}
332 skipheaders = {}
333 for n in ('host', 'accept-encoding'):
333 for n in ('host', 'accept-encoding'):
334 if n in headers:
334 if n in headers:
335 skipheaders['skip_' + n.replace('-', '_')] = 1
335 skipheaders['skip_' + n.replace('-', '_')] = 1
336 try:
336 try:
337 if urllibcompat.hasdata(req):
337 if urllibcompat.hasdata(req):
338 data = urllibcompat.getdata(req)
338 data = urllibcompat.getdata(req)
339 h.putrequest(
339 h.putrequest(
340 req.get_method(),
340 req.get_method(),
341 urllibcompat.getselector(req),
341 urllibcompat.getselector(req),
342 **skipheaders
342 **skipheaders
343 )
343 )
344 if 'content-type' not in headers:
344 if 'content-type' not in headers:
345 h.putheader(
345 h.putheader(
346 'Content-type', 'application/x-www-form-urlencoded'
346 'Content-type', 'application/x-www-form-urlencoded'
347 )
347 )
348 if 'content-length' not in headers:
348 if 'content-length' not in headers:
349 h.putheader('Content-length', '%d' % len(data))
349 h.putheader('Content-length', '%d' % len(data))
350 else:
350 else:
351 h.putrequest(
351 h.putrequest(
352 req.get_method(),
352 req.get_method(),
353 urllibcompat.getselector(req),
353 urllibcompat.getselector(req),
354 **skipheaders
354 **skipheaders
355 )
355 )
356 except socket.error as err:
356 except socket.error as err:
357 raise urlerr.urlerror(err)
357 raise urlerr.urlerror(err)
358 for k, v in headers.items():
358 for k, v in headers.items():
359 h.putheader(k, v)
359 h.putheader(k, v)
360 h.endheaders()
360 h.endheaders()
361 if urllibcompat.hasdata(req):
361 if urllibcompat.hasdata(req):
362 h.send(data)
362 h.send(data)
363
363
364 # This will fail to record events in case of I/O failure. That's OK.
364 # This will fail to record events in case of I/O failure. That's OK.
365 self.requestscount += 1
365 self.requestscount += 1
366 self.sentbytescount += getattr(h, 'sentbytescount', 0) - oldbytescount
366 self.sentbytescount += getattr(h, 'sentbytescount', 0) - oldbytescount
367
367
368 try:
368 try:
369 self.parent.requestscount += 1
369 self.parent.requestscount += 1
370 self.parent.sentbytescount += (
370 self.parent.sentbytescount += (
371 getattr(h, 'sentbytescount', 0) - oldbytescount
371 getattr(h, 'sentbytescount', 0) - oldbytescount
372 )
372 )
373 except AttributeError:
373 except AttributeError:
374 pass
374 pass
375
375
376
376
377 class HTTPHandler(KeepAliveHandler, urlreq.httphandler):
377 class HTTPHandler(KeepAliveHandler, urlreq.httphandler):
378 pass
378 pass
379
379
380
380
381 class HTTPResponse(httplib.HTTPResponse):
381 class HTTPResponse(httplib.HTTPResponse):
382 # we need to subclass HTTPResponse in order to
382 # we need to subclass HTTPResponse in order to
383 # 1) add readline(), readlines(), and readinto() methods
383 # 1) add readline(), readlines(), and readinto() methods
384 # 2) add close_connection() methods
384 # 2) add close_connection() methods
385 # 3) add info() and geturl() methods
385 # 3) add info() and geturl() methods
386
386
387 # in order to add readline(), read must be modified to deal with a
387 # in order to add readline(), read must be modified to deal with a
388 # buffer. example: readline must read a buffer and then spit back
388 # buffer. example: readline must read a buffer and then spit back
389 # one line at a time. The only real alternative is to read one
389 # one line at a time. The only real alternative is to read one
390 # BYTE at a time (ick). Once something has been read, it can't be
390 # BYTE at a time (ick). Once something has been read, it can't be
391 # put back (ok, maybe it can, but that's even uglier than this),
391 # put back (ok, maybe it can, but that's even uglier than this),
392 # so if you THEN do a normal read, you must first take stuff from
392 # so if you THEN do a normal read, you must first take stuff from
393 # the buffer.
393 # the buffer.
394
394
395 # the read method wraps the original to accommodate buffering,
395 # the read method wraps the original to accommodate buffering,
396 # although read() never adds to the buffer.
396 # although read() never adds to the buffer.
397 # Both readline and readlines have been stolen with almost no
397 # Both readline and readlines have been stolen with almost no
398 # modification from socket.py
398 # modification from socket.py
399
399
400 def __init__(self, sock, debuglevel=0, strict=0, method=None):
400 def __init__(self, sock, debuglevel=0, strict=0, method=None):
401 httplib.HTTPResponse.__init__(
401 httplib.HTTPResponse.__init__(
402 self, sock, debuglevel=debuglevel, method=method
402 self, sock, debuglevel=debuglevel, method=method
403 )
403 )
404 self.fileno = sock.fileno
404 self.fileno = sock.fileno
405 self.code = None
405 self.code = None
406 self.receivedbytescount = 0
406 self.receivedbytescount = 0
407 self._rbuf = b''
407 self._rbuf = b''
408 self._rbufsize = 8096
408 self._rbufsize = 8096
409 self._handler = None # inserted by the handler later
409 self._handler = None # inserted by the handler later
410 self._host = None # (same)
410 self._host = None # (same)
411 self._url = None # (same)
411 self._url = None # (same)
412 self._connection = None # (same)
412 self._connection = None # (same)
413
413
414 _raw_read = httplib.HTTPResponse.read
414 _raw_read = httplib.HTTPResponse.read
415 _raw_readinto = getattr(httplib.HTTPResponse, 'readinto', None)
415 _raw_readinto = getattr(httplib.HTTPResponse, 'readinto', None)
416
416
417 # Python 2.7 has a single close() which closes the socket handle.
417 # Python 2.7 has a single close() which closes the socket handle.
418 # This method was effectively renamed to _close_conn() in Python 3. But
418 # This method was effectively renamed to _close_conn() in Python 3. But
419 # there is also a close(). _close_conn() is called by methods like
419 # there is also a close(). _close_conn() is called by methods like
420 # read().
420 # read().
421
421
422 def close(self):
422 def close(self):
423 if self.fp:
423 if self.fp:
424 self.fp.close()
424 self.fp.close()
425 self.fp = None
425 self.fp = None
426 if self._handler:
426 if self._handler:
427 self._handler._request_closed(
427 self._handler._request_closed(
428 self, self._host, self._connection
428 self, self._host, self._connection
429 )
429 )
430
430
431 def _close_conn(self):
431 def _close_conn(self):
432 self.close()
432 self.close()
433
433
434 def close_connection(self):
434 def close_connection(self):
435 self._handler._remove_connection(self._host, self._connection, close=1)
435 self._handler._remove_connection(self._host, self._connection, close=1)
436 self.close()
436 self.close()
437
437
438 def info(self):
438 def info(self):
439 return self.headers
439 return self.headers
440
440
441 def geturl(self):
441 def geturl(self):
442 return self._url
442 return self._url
443
443
444 def read(self, amt=None):
444 def read(self, amt=None):
445 # the _rbuf test is only in this first if for speed. It's not
445 # the _rbuf test is only in this first if for speed. It's not
446 # logically necessary
446 # logically necessary
447 if self._rbuf and amt is not None:
447 if self._rbuf and amt is not None:
448 L = len(self._rbuf)
448 L = len(self._rbuf)
449 if amt > L:
449 if amt > L:
450 amt -= L
450 amt -= L
451 else:
451 else:
452 s = self._rbuf[:amt]
452 s = self._rbuf[:amt]
453 self._rbuf = self._rbuf[amt:]
453 self._rbuf = self._rbuf[amt:]
454 return s
454 return s
455 # Careful! http.client.HTTPResponse.read() on Python 3 is
455 # Careful! http.client.HTTPResponse.read() on Python 3 is
456 # implemented using readinto(), which can duplicate self._rbuf
456 # implemented using readinto(), which can duplicate self._rbuf
457 # if it's not empty.
457 # if it's not empty.
458 s = self._rbuf
458 s = self._rbuf
459 self._rbuf = b''
459 self._rbuf = b''
460 data = self._raw_read(amt)
460 data = self._raw_read(amt)
461
461
462 self.receivedbytescount += len(data)
462 self.receivedbytescount += len(data)
463 try:
463 try:
464 self._connection.receivedbytescount += len(data)
464 self._connection.receivedbytescount += len(data)
465 except AttributeError:
465 except AttributeError:
466 pass
466 pass
467 try:
467 try:
468 self._handler.parent.receivedbytescount += len(data)
468 self._handler.parent.receivedbytescount += len(data)
469 except AttributeError:
469 except AttributeError:
470 pass
470 pass
471
471
472 s += data
472 s += data
473 return s
473 return s
474
474
475 # stolen from Python SVN #68532 to fix issue1088
475 # stolen from Python SVN #68532 to fix issue1088
476 def _read_chunked(self, amt):
476 def _read_chunked(self, amt):
477 chunk_left = self.chunk_left
477 chunk_left = self.chunk_left
478 parts = []
478 parts = []
479
479
480 while True:
480 while True:
481 if chunk_left is None:
481 if chunk_left is None:
482 line = self.fp.readline()
482 line = self.fp.readline()
483 i = line.find(b';')
483 i = line.find(b';')
484 if i >= 0:
484 if i >= 0:
485 line = line[:i] # strip chunk-extensions
485 line = line[:i] # strip chunk-extensions
486 try:
486 try:
487 chunk_left = int(line, 16)
487 chunk_left = int(line, 16)
488 except ValueError:
488 except ValueError:
489 # close the connection as protocol synchronization is
489 # close the connection as protocol synchronization is
490 # probably lost
490 # probably lost
491 self.close()
491 self.close()
492 raise httplib.IncompleteRead(b''.join(parts))
492 raise httplib.IncompleteRead(b''.join(parts))
493 if chunk_left == 0:
493 if chunk_left == 0:
494 break
494 break
495 if amt is None:
495 if amt is None:
496 parts.append(self._safe_read(chunk_left))
496 parts.append(self._safe_read(chunk_left))
497 elif amt < chunk_left:
497 elif amt < chunk_left:
498 parts.append(self._safe_read(amt))
498 parts.append(self._safe_read(amt))
499 self.chunk_left = chunk_left - amt
499 self.chunk_left = chunk_left - amt
500 return b''.join(parts)
500 return b''.join(parts)
501 elif amt == chunk_left:
501 elif amt == chunk_left:
502 parts.append(self._safe_read(amt))
502 parts.append(self._safe_read(amt))
503 self._safe_read(2) # toss the CRLF at the end of the chunk
503 self._safe_read(2) # toss the CRLF at the end of the chunk
504 self.chunk_left = None
504 self.chunk_left = None
505 return b''.join(parts)
505 return b''.join(parts)
506 else:
506 else:
507 parts.append(self._safe_read(chunk_left))
507 parts.append(self._safe_read(chunk_left))
508 amt -= chunk_left
508 amt -= chunk_left
509
509
510 # we read the whole chunk, get another
510 # we read the whole chunk, get another
511 self._safe_read(2) # toss the CRLF at the end of the chunk
511 self._safe_read(2) # toss the CRLF at the end of the chunk
512 chunk_left = None
512 chunk_left = None
513
513
514 # read and discard trailer up to the CRLF terminator
514 # read and discard trailer up to the CRLF terminator
515 ### note: we shouldn't have any trailers!
515 ### note: we shouldn't have any trailers!
516 while True:
516 while True:
517 line = self.fp.readline()
517 line = self.fp.readline()
518 if not line:
518 if not line:
519 # a vanishingly small number of sites EOF without
519 # a vanishingly small number of sites EOF without
520 # sending the trailer
520 # sending the trailer
521 break
521 break
522 if line == b'\r\n':
522 if line == b'\r\n':
523 break
523 break
524
524
525 # we read everything; close the "file"
525 # we read everything; close the "file"
526 self.close()
526 self.close()
527
527
528 return b''.join(parts)
528 return b''.join(parts)
529
529
530 def readline(self):
530 def readline(self):
531 # Fast path for a line is already available in read buffer.
531 # Fast path for a line is already available in read buffer.
532 i = self._rbuf.find(b'\n')
532 i = self._rbuf.find(b'\n')
533 if i >= 0:
533 if i >= 0:
534 i += 1
534 i += 1
535 line = self._rbuf[:i]
535 line = self._rbuf[:i]
536 self._rbuf = self._rbuf[i:]
536 self._rbuf = self._rbuf[i:]
537 return line
537 return line
538
538
539 # No newline in local buffer. Read until we find one.
539 # No newline in local buffer. Read until we find one.
540 # readinto read via readinto will already return _rbuf
540 # readinto read via readinto will already return _rbuf
541 if self._raw_readinto is None:
541 if self._raw_readinto is None:
542 chunks = [self._rbuf]
542 chunks = [self._rbuf]
543 else:
543 else:
544 chunks = []
544 chunks = []
545 i = -1
545 i = -1
546 readsize = self._rbufsize
546 readsize = self._rbufsize
547 while True:
547 while True:
548 new = self._raw_read(readsize)
548 new = self._raw_read(readsize)
549 if not new:
549 if not new:
550 break
550 break
551
551
552 self.receivedbytescount += len(new)
552 self.receivedbytescount += len(new)
553 self._connection.receivedbytescount += len(new)
553 self._connection.receivedbytescount += len(new)
554 try:
554 try:
555 self._handler.parent.receivedbytescount += len(new)
555 self._handler.parent.receivedbytescount += len(new)
556 except AttributeError:
556 except AttributeError:
557 pass
557 pass
558
558
559 chunks.append(new)
559 chunks.append(new)
560 i = new.find(b'\n')
560 i = new.find(b'\n')
561 if i >= 0:
561 if i >= 0:
562 break
562 break
563
563
564 # We either have exhausted the stream or have a newline in chunks[-1].
564 # We either have exhausted the stream or have a newline in chunks[-1].
565
565
566 # EOF
566 # EOF
567 if i == -1:
567 if i == -1:
568 self._rbuf = b''
568 self._rbuf = b''
569 return b''.join(chunks)
569 return b''.join(chunks)
570
570
571 i += 1
571 i += 1
572 self._rbuf = chunks[-1][i:]
572 self._rbuf = chunks[-1][i:]
573 chunks[-1] = chunks[-1][:i]
573 chunks[-1] = chunks[-1][:i]
574 return b''.join(chunks)
574 return b''.join(chunks)
575
575
576 def readlines(self, sizehint=0):
576 def readlines(self, sizehint=0):
577 total = 0
577 total = 0
578 list = []
578 list = []
579 while True:
579 while True:
580 line = self.readline()
580 line = self.readline()
581 if not line:
581 if not line:
582 break
582 break
583 list.append(line)
583 list.append(line)
584 total += len(line)
584 total += len(line)
585 if sizehint and total >= sizehint:
585 if sizehint and total >= sizehint:
586 break
586 break
587 return list
587 return list
588
588
589 def readinto(self, dest):
589 def readinto(self, dest):
590 if self._raw_readinto is None:
590 if self._raw_readinto is None:
591 res = self.read(len(dest))
591 res = self.read(len(dest))
592 if not res:
592 if not res:
593 return 0
593 return 0
594 dest[0 : len(res)] = res
594 dest[0 : len(res)] = res
595 return len(res)
595 return len(res)
596 total = len(dest)
596 total = len(dest)
597 have = len(self._rbuf)
597 have = len(self._rbuf)
598 if have >= total:
598 if have >= total:
599 dest[0:total] = self._rbuf[:total]
599 dest[0:total] = self._rbuf[:total]
600 self._rbuf = self._rbuf[total:]
600 self._rbuf = self._rbuf[total:]
601 return total
601 return total
602 mv = memoryview(dest)
602 mv = memoryview(dest)
603 got = self._raw_readinto(mv[have:total])
603 got = self._raw_readinto(mv[have:total])
604
604
605 self.receivedbytescount += got
605 self.receivedbytescount += got
606 self._connection.receivedbytescount += got
606 self._connection.receivedbytescount += got
607 try:
607 try:
608 self._handler.receivedbytescount += got
608 self._handler.receivedbytescount += got
609 except AttributeError:
609 except AttributeError:
610 pass
610 pass
611
611
612 dest[0:have] = self._rbuf
612 dest[0:have] = self._rbuf
613 got += len(self._rbuf)
613 got += len(self._rbuf)
614 self._rbuf = b''
614 self._rbuf = b''
615 return got
615 return got
616
616
617
617
618 def safesend(self, str):
618 def safesend(self, str):
619 """Send `str' to the server.
619 """Send `str' to the server.
620
620
621 Shamelessly ripped off from httplib to patch a bad behavior.
621 Shamelessly ripped off from httplib to patch a bad behavior.
622 """
622 """
623 # _broken_pipe_resp is an attribute we set in this function
623 # _broken_pipe_resp is an attribute we set in this function
624 # if the socket is closed while we're sending data but
624 # if the socket is closed while we're sending data but
625 # the server sent us a response before hanging up.
625 # the server sent us a response before hanging up.
626 # In that case, we want to pretend to send the rest of the
626 # In that case, we want to pretend to send the rest of the
627 # outgoing data, and then let the user use getresponse()
627 # outgoing data, and then let the user use getresponse()
628 # (which we wrap) to get this last response before
628 # (which we wrap) to get this last response before
629 # opening a new socket.
629 # opening a new socket.
630 if getattr(self, '_broken_pipe_resp', None) is not None:
630 if getattr(self, '_broken_pipe_resp', None) is not None:
631 return
631 return
632
632
633 if self.sock is None:
633 if self.sock is None:
634 if self.auto_open:
634 if self.auto_open:
635 self.connect()
635 self.connect()
636 else:
636 else:
637 raise httplib.NotConnected
637 raise httplib.NotConnected
638
638
639 # send the data to the server. if we get a broken pipe, then close
639 # send the data to the server. if we get a broken pipe, then close
640 # the socket. we want to reconnect when somebody tries to send again.
640 # the socket. we want to reconnect when somebody tries to send again.
641 #
641 #
642 # NOTE: we DO propagate the error, though, because we cannot simply
642 # NOTE: we DO propagate the error, though, because we cannot simply
643 # ignore the error... the caller will know if they can retry.
643 # ignore the error... the caller will know if they can retry.
644 if self.debuglevel > 0:
644 if self.debuglevel > 0:
645 print(b"send:", repr(str))
645 print(b"send:", repr(str))
646 try:
646 try:
647 blocksize = 8192
647 blocksize = 8192
648 read = getattr(str, 'read', None)
648 read = getattr(str, 'read', None)
649 if read is not None:
649 if read is not None:
650 if self.debuglevel > 0:
650 if self.debuglevel > 0:
651 print(b"sending a read()able")
651 print(b"sending a read()able")
652 data = read(blocksize)
652 data = read(blocksize)
653 while data:
653 while data:
654 self.sock.sendall(data)
654 self.sock.sendall(data)
655 self.sentbytescount += len(data)
655 self.sentbytescount += len(data)
656 data = read(blocksize)
656 data = read(blocksize)
657 else:
657 else:
658 self.sock.sendall(str)
658 self.sock.sendall(str)
659 self.sentbytescount += len(str)
659 self.sentbytescount += len(str)
660 except socket.error as v:
660 except socket.error as v:
661 reraise = True
661 reraise = True
662 if v.args[0] == errno.EPIPE: # Broken pipe
662 if v.args[0] == errno.EPIPE: # Broken pipe
663 if self._HTTPConnection__state == httplib._CS_REQ_SENT:
663 if self._HTTPConnection__state == httplib._CS_REQ_SENT:
664 self._broken_pipe_resp = None
664 self._broken_pipe_resp = None
665 self._broken_pipe_resp = self.getresponse()
665 self._broken_pipe_resp = self.getresponse()
666 reraise = False
666 reraise = False
667 self.close()
667 self.close()
668 if reraise:
668 if reraise:
669 raise
669 raise
670
670
671
671
672 def wrapgetresponse(cls):
672 def wrapgetresponse(cls):
673 """Wraps getresponse in cls with a broken-pipe sane version."""
673 """Wraps getresponse in cls with a broken-pipe sane version."""
674
674
675 def safegetresponse(self):
675 def safegetresponse(self):
676 # In safesend() we might set the _broken_pipe_resp
676 # In safesend() we might set the _broken_pipe_resp
677 # attribute, in which case the socket has already
677 # attribute, in which case the socket has already
678 # been closed and we just need to give them the response
678 # been closed and we just need to give them the response
679 # back. Otherwise, we use the normal response path.
679 # back. Otherwise, we use the normal response path.
680 r = getattr(self, '_broken_pipe_resp', None)
680 r = getattr(self, '_broken_pipe_resp', None)
681 if r is not None:
681 if r is not None:
682 return r
682 return r
683 return cls.getresponse(self)
683 return cls.getresponse(self)
684
684
685 safegetresponse.__doc__ = cls.getresponse.__doc__
685 safegetresponse.__doc__ = cls.getresponse.__doc__
686 return safegetresponse
686 return safegetresponse
687
687
688
688
689 class HTTPConnection(httplib.HTTPConnection):
689 class HTTPConnection(httplib.HTTPConnection):
690 # url.httpsconnection inherits from this. So when adding/removing
690 # url.httpsconnection inherits from this. So when adding/removing
691 # attributes, be sure to audit httpsconnection() for unintended
691 # attributes, be sure to audit httpsconnection() for unintended
692 # consequences.
692 # consequences.
693
693
694 # use the modified response class
694 # use the modified response class
695 response_class = HTTPResponse
695 response_class = HTTPResponse
696 send = safesend
696 send = safesend
697 getresponse = wrapgetresponse(httplib.HTTPConnection)
697 getresponse = wrapgetresponse(httplib.HTTPConnection)
698
698
699 def __init__(self, *args, **kwargs):
699 def __init__(self, *args, **kwargs):
700 httplib.HTTPConnection.__init__(self, *args, **kwargs)
700 httplib.HTTPConnection.__init__(self, *args, **kwargs)
701 self.sentbytescount = 0
701 self.sentbytescount = 0
702 self.receivedbytescount = 0
702 self.receivedbytescount = 0
703
703
704
704
705 #########################################################################
705 #########################################################################
706 ##### TEST FUNCTIONS
706 ##### TEST FUNCTIONS
707 #########################################################################
707 #########################################################################
708
708
709
709
710 def continuity(url):
710 def continuity(url):
711 md5 = hashlib.md5
711 md5 = hashlib.md5
712 format = b'%25s: %s'
712 format = b'%25s: %s'
713
713
714 # first fetch the file with the normal http handler
714 # first fetch the file with the normal http handler
715 opener = urlreq.buildopener()
715 opener = urlreq.buildopener()
716 urlreq.installopener(opener)
716 urlreq.installopener(opener)
717 fo = urlreq.urlopen(url)
717 fo = urlreq.urlopen(url)
718 foo = fo.read()
718 foo = fo.read()
719 fo.close()
719 fo.close()
720 m = md5(foo)
720 m = md5(foo)
721 print(format % (b'normal urllib', hex(m.digest())))
721 print(format % (b'normal urllib', hex(m.digest())))
722
722
723 # now install the keepalive handler and try again
723 # now install the keepalive handler and try again
724 opener = urlreq.buildopener(HTTPHandler())
724 opener = urlreq.buildopener(HTTPHandler())
725 urlreq.installopener(opener)
725 urlreq.installopener(opener)
726
726
727 fo = urlreq.urlopen(url)
727 fo = urlreq.urlopen(url)
728 foo = fo.read()
728 foo = fo.read()
729 fo.close()
729 fo.close()
730 m = md5(foo)
730 m = md5(foo)
731 print(format % (b'keepalive read', hex(m.digest())))
731 print(format % (b'keepalive read', hex(m.digest())))
732
732
733 fo = urlreq.urlopen(url)
733 fo = urlreq.urlopen(url)
734 foo = b''
734 foo = b''
735 while True:
735 while True:
736 f = fo.readline()
736 f = fo.readline()
737 if f:
737 if f:
738 foo = foo + f
738 foo = foo + f
739 else:
739 else:
740 break
740 break
741 fo.close()
741 fo.close()
742 m = md5(foo)
742 m = md5(foo)
743 print(format % (b'keepalive readline', hex(m.digest())))
743 print(format % (b'keepalive readline', hex(m.digest())))
744
744
745
745
746 def comp(N, url):
746 def comp(N, url):
747 print(b' making %i connections to:\n %s' % (N, url))
747 print(b' making %i connections to:\n %s' % (N, url))
748
748
749 procutil.stdout.write(b' first using the normal urllib handlers')
749 procutil.stdout.write(b' first using the normal urllib handlers')
750 # first use normal opener
750 # first use normal opener
751 opener = urlreq.buildopener()
751 opener = urlreq.buildopener()
752 urlreq.installopener(opener)
752 urlreq.installopener(opener)
753 t1 = fetch(N, url)
753 t1 = fetch(N, url)
754 print(b' TIME: %.3f s' % t1)
754 print(b' TIME: %.3f s' % t1)
755
755
756 procutil.stdout.write(b' now using the keepalive handler ')
756 procutil.stdout.write(b' now using the keepalive handler ')
757 # now install the keepalive handler and try again
757 # now install the keepalive handler and try again
758 opener = urlreq.buildopener(HTTPHandler())
758 opener = urlreq.buildopener(HTTPHandler())
759 urlreq.installopener(opener)
759 urlreq.installopener(opener)
760 t2 = fetch(N, url)
760 t2 = fetch(N, url)
761 print(b' TIME: %.3f s' % t2)
761 print(b' TIME: %.3f s' % t2)
762 print(b' improvement factor: %.2f' % (t1 / t2))
762 print(b' improvement factor: %.2f' % (t1 / t2))
763
763
764
764
765 def fetch(N, url, delay=0):
765 def fetch(N, url, delay=0):
766 import time
766 import time
767
767
768 lens = []
768 lens = []
769 starttime = time.time()
769 starttime = time.time()
770 for i in range(N):
770 for i in range(N):
771 if delay and i > 0:
771 if delay and i > 0:
772 time.sleep(delay)
772 time.sleep(delay)
773 fo = urlreq.urlopen(url)
773 fo = urlreq.urlopen(url)
774 foo = fo.read()
774 foo = fo.read()
775 fo.close()
775 fo.close()
776 lens.append(len(foo))
776 lens.append(len(foo))
777 diff = time.time() - starttime
777 diff = time.time() - starttime
778
778
779 j = 0
779 j = 0
780 for i in lens[1:]:
780 for i in lens[1:]:
781 j = j + 1
781 j = j + 1
782 if not i == lens[0]:
782 if not i == lens[0]:
783 print(b"WARNING: inconsistent length on read %i: %i" % (j, i))
783 print(b"WARNING: inconsistent length on read %i: %i" % (j, i))
784
784
785 return diff
785 return diff
786
786
787
787
788 def test_timeout(url):
788 def test_timeout(url):
789 global DEBUG
789 global DEBUG
790 dbbackup = DEBUG
790 dbbackup = DEBUG
791
791
792 class FakeLogger(object):
792 class FakeLogger(object):
793 def debug(self, msg, *args):
793 def debug(self, msg, *args):
794 print(msg % args)
794 print(msg % args)
795
795
796 info = warning = error = debug
796 info = warning = error = debug
797
797
798 DEBUG = FakeLogger()
798 DEBUG = FakeLogger()
799 print(b" fetching the file to establish a connection")
799 print(b" fetching the file to establish a connection")
800 fo = urlreq.urlopen(url)
800 fo = urlreq.urlopen(url)
801 data1 = fo.read()
801 data1 = fo.read()
802 fo.close()
802 fo.close()
803
803
804 i = 20
804 i = 20
805 print(b" waiting %i seconds for the server to close the connection" % i)
805 print(b" waiting %i seconds for the server to close the connection" % i)
806 while i > 0:
806 while i > 0:
807 procutil.stdout.write(b'\r %2i' % i)
807 procutil.stdout.write(b'\r %2i' % i)
808 procutil.stdout.flush()
808 procutil.stdout.flush()
809 time.sleep(1)
809 time.sleep(1)
810 i -= 1
810 i -= 1
811 procutil.stderr.write(b'\r')
811 procutil.stderr.write(b'\r')
812
812
813 print(b" fetching the file a second time")
813 print(b" fetching the file a second time")
814 fo = urlreq.urlopen(url)
814 fo = urlreq.urlopen(url)
815 data2 = fo.read()
815 data2 = fo.read()
816 fo.close()
816 fo.close()
817
817
818 if data1 == data2:
818 if data1 == data2:
819 print(b' data are identical')
819 print(b' data are identical')
820 else:
820 else:
821 print(b' ERROR: DATA DIFFER')
821 print(b' ERROR: DATA DIFFER')
822
822
823 DEBUG = dbbackup
823 DEBUG = dbbackup
824
824
825
825
826 def test(url, N=10):
826 def test(url, N=10):
827 print(b"performing continuity test (making sure stuff isn't corrupted)")
827 print(b"performing continuity test (making sure stuff isn't corrupted)")
828 continuity(url)
828 continuity(url)
829 print(b'')
829 print(b'')
830 print(b"performing speed comparison")
830 print(b"performing speed comparison")
831 comp(N, url)
831 comp(N, url)
832 print(b'')
832 print(b'')
833 print(b"performing dropped-connection check")
833 print(b"performing dropped-connection check")
834 test_timeout(url)
834 test_timeout(url)
835
835
836
836
837 if __name__ == '__main__':
837 if __name__ == '__main__':
838 import time
838 import time
839
839
840 try:
840 try:
841 N = int(sys.argv[1])
841 N = int(sys.argv[1])
842 url = sys.argv[2]
842 url = sys.argv[2]
843 except (IndexError, ValueError):
843 except (IndexError, ValueError):
844 print(b"%s <integer> <url>" % sys.argv[0])
844 print(b"%s <integer> <url>" % sys.argv[0])
845 else:
845 else:
846 test(url, N)
846 test(url, N)
General Comments 0
You need to be logged in to leave comments. Login now