##// END OF EJS Templates
py3: handle keyword arguments correctly in keepalive.py...
Pulkit Goyal -
r35365:03112a2c default
parent child Browse files
Show More
@@ -1,726 +1,726
1 1 # This library is free software; you can redistribute it and/or
2 2 # modify it under the terms of the GNU Lesser General Public
3 3 # License as published by the Free Software Foundation; either
4 4 # version 2.1 of the License, or (at your option) any later version.
5 5 #
6 6 # This library is distributed in the hope that it will be useful,
7 7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
9 9 # Lesser General Public License for more details.
10 10 #
11 11 # You should have received a copy of the GNU Lesser General Public
12 12 # License along with this library; if not, see
13 13 # <http://www.gnu.org/licenses/>.
14 14
15 15 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
16 16 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
17 17
18 18 # Modified by Benoit Boissinot:
19 19 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
20 20 # Modified by Dirkjan Ochtman:
21 21 # - import md5 function from a local util module
22 22 # Modified by Augie Fackler:
23 23 # - add safesend method and use it to prevent broken pipe errors
24 24 # on large POST requests
25 25
26 26 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
27 27
28 28 >>> import urllib2
29 29 >>> from keepalive import HTTPHandler
30 30 >>> keepalive_handler = HTTPHandler()
31 31 >>> opener = urlreq.buildopener(keepalive_handler)
32 32 >>> urlreq.installopener(opener)
33 33 >>>
34 34 >>> fo = urlreq.urlopen('http://www.python.org')
35 35
36 36 If a connection to a given host is requested, and all of the existing
37 37 connections are still in use, another connection will be opened. If
38 38 the handler tries to use an existing connection but it fails in some
39 39 way, it will be closed and removed from the pool.
40 40
41 41 To remove the handler, simply re-run build_opener with no arguments, and
42 42 install that opener.
43 43
44 44 You can explicitly close connections by using the close_connection()
45 45 method of the returned file-like object (described below) or you can
46 46 use the handler methods:
47 47
48 48 close_connection(host)
49 49 close_all()
50 50 open_connections()
51 51
52 52 NOTE: using the close_connection and close_all methods of the handler
53 53 should be done with care when using multiple threads.
54 54 * there is nothing that prevents another thread from creating new
55 55 connections immediately after connections are closed
56 56 * no checks are done to prevent in-use connections from being closed
57 57
58 58 >>> keepalive_handler.close_all()
59 59
60 60 EXTRA ATTRIBUTES AND METHODS
61 61
62 62 Upon a status of 200, the object returned has a few additional
63 63 attributes and methods, which should not be used if you want to
64 64 remain consistent with the normal urllib2-returned objects:
65 65
66 66 close_connection() - close the connection to the host
67 67 readlines() - you know, readlines()
68 68 status - the return status (i.e. 404)
69 69 reason - english translation of status (i.e. 'File not found')
70 70
71 71 If you want the best of both worlds, use this inside an
72 72 AttributeError-catching try:
73 73
74 74 >>> try: status = fo.status
75 75 >>> except AttributeError: status = None
76 76
77 77 Unfortunately, these are ONLY there if status == 200, so it's not
78 78 easy to distinguish between non-200 responses. The reason is that
79 79 urllib2 tries to do clever things with error codes 301, 302, 401,
80 80 and 407, and it wraps the object upon return.
81 81 """
82 82
83 83 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
84 84
85 85 from __future__ import absolute_import, print_function
86 86
87 87 import errno
88 88 import hashlib
89 89 import socket
90 90 import sys
91 91 import threading
92 92
93 93 from .i18n import _
94 94 from . import (
95 95 pycompat,
96 96 urllibcompat,
97 97 util,
98 98 )
99 99
100 100 httplib = util.httplib
101 101 urlerr = util.urlerr
102 102 urlreq = util.urlreq
103 103
104 104 DEBUG = None
105 105
106 106 class ConnectionManager(object):
107 107 """
108 108 The connection manager must be able to:
109 109 * keep track of all existing
110 110 """
111 111 def __init__(self):
112 112 self._lock = threading.Lock()
113 113 self._hostmap = {} # map hosts to a list of connections
114 114 self._connmap = {} # map connections to host
115 115 self._readymap = {} # map connection to ready state
116 116
117 117 def add(self, host, connection, ready):
118 118 self._lock.acquire()
119 119 try:
120 120 if host not in self._hostmap:
121 121 self._hostmap[host] = []
122 122 self._hostmap[host].append(connection)
123 123 self._connmap[connection] = host
124 124 self._readymap[connection] = ready
125 125 finally:
126 126 self._lock.release()
127 127
128 128 def remove(self, connection):
129 129 self._lock.acquire()
130 130 try:
131 131 try:
132 132 host = self._connmap[connection]
133 133 except KeyError:
134 134 pass
135 135 else:
136 136 del self._connmap[connection]
137 137 del self._readymap[connection]
138 138 self._hostmap[host].remove(connection)
139 139 if not self._hostmap[host]:
140 140 del self._hostmap[host]
141 141 finally:
142 142 self._lock.release()
143 143
144 144 def set_ready(self, connection, ready):
145 145 try:
146 146 self._readymap[connection] = ready
147 147 except KeyError:
148 148 pass
149 149
150 150 def get_ready_conn(self, host):
151 151 conn = None
152 152 self._lock.acquire()
153 153 try:
154 154 if host in self._hostmap:
155 155 for c in self._hostmap[host]:
156 156 if self._readymap[c]:
157 157 self._readymap[c] = 0
158 158 conn = c
159 159 break
160 160 finally:
161 161 self._lock.release()
162 162 return conn
163 163
164 164 def get_all(self, host=None):
165 165 if host:
166 166 return list(self._hostmap.get(host, []))
167 167 else:
168 168 return dict(self._hostmap)
169 169
170 170 class KeepAliveHandler(object):
171 171 def __init__(self):
172 172 self._cm = ConnectionManager()
173 173
174 174 #### Connection Management
175 175 def open_connections(self):
176 176 """return a list of connected hosts and the number of connections
177 177 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
178 178 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
179 179
180 180 def close_connection(self, host):
181 181 """close connection(s) to <host>
182 182 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
183 183 no error occurs if there is no connection to that host."""
184 184 for h in self._cm.get_all(host):
185 185 self._cm.remove(h)
186 186 h.close()
187 187
188 188 def close_all(self):
189 189 """close all open connections"""
190 190 for host, conns in self._cm.get_all().iteritems():
191 191 for h in conns:
192 192 self._cm.remove(h)
193 193 h.close()
194 194
195 195 def _request_closed(self, request, host, connection):
196 196 """tells us that this request is now closed and that the
197 197 connection is ready for another request"""
198 198 self._cm.set_ready(connection, 1)
199 199
200 200 def _remove_connection(self, host, connection, close=0):
201 201 if close:
202 202 connection.close()
203 203 self._cm.remove(connection)
204 204
205 205 #### Transaction Execution
206 206 def http_open(self, req):
207 207 return self.do_open(HTTPConnection, req)
208 208
209 209 def do_open(self, http_class, req):
210 210 host = urllibcompat.gethost(req)
211 211 if not host:
212 212 raise urlerr.urlerror('no host given')
213 213
214 214 try:
215 215 h = self._cm.get_ready_conn(host)
216 216 while h:
217 217 r = self._reuse_connection(h, req, host)
218 218
219 219 # if this response is non-None, then it worked and we're
220 220 # done. Break out, skipping the else block.
221 221 if r:
222 222 break
223 223
224 224 # connection is bad - possibly closed by server
225 225 # discard it and ask for the next free connection
226 226 h.close()
227 227 self._cm.remove(h)
228 228 h = self._cm.get_ready_conn(host)
229 229 else:
230 230 # no (working) free connections were found. Create a new one.
231 231 h = http_class(host)
232 232 if DEBUG:
233 233 DEBUG.info("creating new connection to %s (%d)",
234 234 host, id(h))
235 235 self._cm.add(host, h, 0)
236 236 self._start_transaction(h, req)
237 237 r = h.getresponse()
238 238 # The string form of BadStatusLine is the status line. Add some context
239 239 # to make the error message slightly more useful.
240 240 except httplib.BadStatusLine as err:
241 241 raise urlerr.urlerror(
242 242 _('bad HTTP status line: %s') % pycompat.sysbytes(err.line))
243 243 except (socket.error, httplib.HTTPException) as err:
244 244 raise urlerr.urlerror(err)
245 245
246 246 # if not a persistent connection, don't try to reuse it
247 247 if r.will_close:
248 248 self._cm.remove(h)
249 249
250 250 if DEBUG:
251 251 DEBUG.info("STATUS: %s, %s", r.status, r.reason)
252 252 r._handler = self
253 253 r._host = host
254 254 r._url = req.get_full_url()
255 255 r._connection = h
256 256 r.code = r.status
257 257 r.headers = r.msg
258 258 r.msg = r.reason
259 259
260 260 return r
261 261
262 262 def _reuse_connection(self, h, req, host):
263 263 """start the transaction with a re-used connection
264 264 return a response object (r) upon success or None on failure.
265 265 This DOES not close or remove bad connections in cases where
266 266 it returns. However, if an unexpected exception occurs, it
267 267 will close and remove the connection before re-raising.
268 268 """
269 269 try:
270 270 self._start_transaction(h, req)
271 271 r = h.getresponse()
272 272 # note: just because we got something back doesn't mean it
273 273 # worked. We'll check the version below, too.
274 274 except (socket.error, httplib.HTTPException):
275 275 r = None
276 276 except: # re-raises
277 277 # adding this block just in case we've missed
278 278 # something we will still raise the exception, but
279 279 # lets try and close the connection and remove it
280 280 # first. We previously got into a nasty loop
281 281 # where an exception was uncaught, and so the
282 282 # connection stayed open. On the next try, the
283 283 # same exception was raised, etc. The trade-off is
284 284 # that it's now possible this call will raise
285 285 # a DIFFERENT exception
286 286 if DEBUG:
287 287 DEBUG.error("unexpected exception - closing "
288 288 "connection to %s (%d)", host, id(h))
289 289 self._cm.remove(h)
290 290 h.close()
291 291 raise
292 292
293 293 if r is None or r.version == 9:
294 294 # httplib falls back to assuming HTTP 0.9 if it gets a
295 295 # bad header back. This is most likely to happen if
296 296 # the socket has been closed by the server since we
297 297 # last used the connection.
298 298 if DEBUG:
299 299 DEBUG.info("failed to re-use connection to %s (%d)",
300 300 host, id(h))
301 301 r = None
302 302 else:
303 303 if DEBUG:
304 304 DEBUG.info("re-using connection to %s (%d)", host, id(h))
305 305
306 306 return r
307 307
308 308 def _start_transaction(self, h, req):
309 309 # What follows mostly reimplements HTTPConnection.request()
310 310 # except it adds self.parent.addheaders in the mix and sends headers
311 311 # in a deterministic order (to make testing easier).
312 312 headers = util.sortdict(self.parent.addheaders)
313 313 headers.update(sorted(req.headers.items()))
314 314 headers.update(sorted(req.unredirected_hdrs.items()))
315 315 headers = util.sortdict((n.lower(), v) for n, v in headers.items())
316 316 skipheaders = {}
317 317 for n in ('host', 'accept-encoding'):
318 318 if n in headers:
319 319 skipheaders['skip_' + n.replace('-', '_')] = 1
320 320 try:
321 321 if urllibcompat.hasdata(req):
322 322 data = urllibcompat.getdata(req)
323 323 h.putrequest(
324 324 req.get_method(), urllibcompat.getselector(req),
325 **skipheaders)
325 **pycompat.strkwargs(skipheaders))
326 326 if 'content-type' not in headers:
327 327 h.putheader('Content-type',
328 328 'application/x-www-form-urlencoded')
329 329 if 'content-length' not in headers:
330 330 h.putheader('Content-length', '%d' % len(data))
331 331 else:
332 332 h.putrequest(
333 333 req.get_method(), urllibcompat.getselector(req),
334 **skipheaders)
334 **pycompat.strkwargs(skipheaders))
335 335 except socket.error as err:
336 336 raise urlerr.urlerror(err)
337 337 for k, v in headers.items():
338 338 h.putheader(k, v)
339 339 h.endheaders()
340 340 if urllibcompat.hasdata(req):
341 341 h.send(data)
342 342
343 343 class HTTPHandler(KeepAliveHandler, urlreq.httphandler):
344 344 pass
345 345
346 346 class HTTPResponse(httplib.HTTPResponse):
347 347 # we need to subclass HTTPResponse in order to
348 348 # 1) add readline() and readlines() methods
349 349 # 2) add close_connection() methods
350 350 # 3) add info() and geturl() methods
351 351
352 352 # in order to add readline(), read must be modified to deal with a
353 353 # buffer. example: readline must read a buffer and then spit back
354 354 # one line at a time. The only real alternative is to read one
355 355 # BYTE at a time (ick). Once something has been read, it can't be
356 356 # put back (ok, maybe it can, but that's even uglier than this),
357 357 # so if you THEN do a normal read, you must first take stuff from
358 358 # the buffer.
359 359
360 360 # the read method wraps the original to accommodate buffering,
361 361 # although read() never adds to the buffer.
362 362 # Both readline and readlines have been stolen with almost no
363 363 # modification from socket.py
364 364
365 365
366 366 def __init__(self, sock, debuglevel=0, strict=0, method=None):
367 367 extrakw = {}
368 368 if not pycompat.ispy3:
369 extrakw['strict'] = True
370 extrakw['buffering'] = True
369 extrakw[r'strict'] = True
370 extrakw[r'buffering'] = True
371 371 httplib.HTTPResponse.__init__(self, sock, debuglevel=debuglevel,
372 372 method=method, **extrakw)
373 373 self.fileno = sock.fileno
374 374 self.code = None
375 375 self._rbuf = ''
376 376 self._rbufsize = 8096
377 377 self._handler = None # inserted by the handler later
378 378 self._host = None # (same)
379 379 self._url = None # (same)
380 380 self._connection = None # (same)
381 381
382 382 _raw_read = httplib.HTTPResponse.read
383 383
384 384 def close(self):
385 385 if self.fp:
386 386 self.fp.close()
387 387 self.fp = None
388 388 if self._handler:
389 389 self._handler._request_closed(self, self._host,
390 390 self._connection)
391 391
392 392 def close_connection(self):
393 393 self._handler._remove_connection(self._host, self._connection, close=1)
394 394 self.close()
395 395
396 396 def info(self):
397 397 return self.headers
398 398
399 399 def geturl(self):
400 400 return self._url
401 401
402 402 def read(self, amt=None):
403 403 # the _rbuf test is only in this first if for speed. It's not
404 404 # logically necessary
405 405 if self._rbuf and amt is not None:
406 406 L = len(self._rbuf)
407 407 if amt > L:
408 408 amt -= L
409 409 else:
410 410 s = self._rbuf[:amt]
411 411 self._rbuf = self._rbuf[amt:]
412 412 return s
413 413
414 414 s = self._rbuf + self._raw_read(amt)
415 415 self._rbuf = ''
416 416 return s
417 417
418 418 # stolen from Python SVN #68532 to fix issue1088
419 419 def _read_chunked(self, amt):
420 420 chunk_left = self.chunk_left
421 421 parts = []
422 422
423 423 while True:
424 424 if chunk_left is None:
425 425 line = self.fp.readline()
426 426 i = line.find(';')
427 427 if i >= 0:
428 428 line = line[:i] # strip chunk-extensions
429 429 try:
430 430 chunk_left = int(line, 16)
431 431 except ValueError:
432 432 # close the connection as protocol synchronization is
433 433 # probably lost
434 434 self.close()
435 435 raise httplib.IncompleteRead(''.join(parts))
436 436 if chunk_left == 0:
437 437 break
438 438 if amt is None:
439 439 parts.append(self._safe_read(chunk_left))
440 440 elif amt < chunk_left:
441 441 parts.append(self._safe_read(amt))
442 442 self.chunk_left = chunk_left - amt
443 443 return ''.join(parts)
444 444 elif amt == chunk_left:
445 445 parts.append(self._safe_read(amt))
446 446 self._safe_read(2) # toss the CRLF at the end of the chunk
447 447 self.chunk_left = None
448 448 return ''.join(parts)
449 449 else:
450 450 parts.append(self._safe_read(chunk_left))
451 451 amt -= chunk_left
452 452
453 453 # we read the whole chunk, get another
454 454 self._safe_read(2) # toss the CRLF at the end of the chunk
455 455 chunk_left = None
456 456
457 457 # read and discard trailer up to the CRLF terminator
458 458 ### note: we shouldn't have any trailers!
459 459 while True:
460 460 line = self.fp.readline()
461 461 if not line:
462 462 # a vanishingly small number of sites EOF without
463 463 # sending the trailer
464 464 break
465 465 if line == '\r\n':
466 466 break
467 467
468 468 # we read everything; close the "file"
469 469 self.close()
470 470
471 471 return ''.join(parts)
472 472
473 473 def readline(self):
474 474 # Fast path for a line is already available in read buffer.
475 475 i = self._rbuf.find('\n')
476 476 if i >= 0:
477 477 i += 1
478 478 line = self._rbuf[:i]
479 479 self._rbuf = self._rbuf[i:]
480 480 return line
481 481
482 482 # No newline in local buffer. Read until we find one.
483 483 chunks = [self._rbuf]
484 484 i = -1
485 485 readsize = self._rbufsize
486 486 while True:
487 487 new = self._raw_read(readsize)
488 488 if not new:
489 489 break
490 490
491 491 chunks.append(new)
492 492 i = new.find('\n')
493 493 if i >= 0:
494 494 break
495 495
496 496 # We either have exhausted the stream or have a newline in chunks[-1].
497 497
498 498 # EOF
499 499 if i == -1:
500 500 self._rbuf = ''
501 501 return ''.join(chunks)
502 502
503 503 i += 1
504 504 self._rbuf = chunks[-1][i:]
505 505 chunks[-1] = chunks[-1][:i]
506 506 return ''.join(chunks)
507 507
508 508 def readlines(self, sizehint=0):
509 509 total = 0
510 510 list = []
511 511 while True:
512 512 line = self.readline()
513 513 if not line:
514 514 break
515 515 list.append(line)
516 516 total += len(line)
517 517 if sizehint and total >= sizehint:
518 518 break
519 519 return list
520 520
521 521 def safesend(self, str):
522 522 """Send `str' to the server.
523 523
524 524 Shamelessly ripped off from httplib to patch a bad behavior.
525 525 """
526 526 # _broken_pipe_resp is an attribute we set in this function
527 527 # if the socket is closed while we're sending data but
528 528 # the server sent us a response before hanging up.
529 529 # In that case, we want to pretend to send the rest of the
530 530 # outgoing data, and then let the user use getresponse()
531 531 # (which we wrap) to get this last response before
532 532 # opening a new socket.
533 533 if getattr(self, '_broken_pipe_resp', None) is not None:
534 534 return
535 535
536 536 if self.sock is None:
537 537 if self.auto_open:
538 538 self.connect()
539 539 else:
540 540 raise httplib.NotConnected
541 541
542 542 # send the data to the server. if we get a broken pipe, then close
543 543 # the socket. we want to reconnect when somebody tries to send again.
544 544 #
545 545 # NOTE: we DO propagate the error, though, because we cannot simply
546 546 # ignore the error... the caller will know if they can retry.
547 547 if self.debuglevel > 0:
548 548 print("send:", repr(str))
549 549 try:
550 550 blocksize = 8192
551 551 read = getattr(str, 'read', None)
552 552 if read is not None:
553 553 if self.debuglevel > 0:
554 554 print("sending a read()able")
555 555 data = read(blocksize)
556 556 while data:
557 557 self.sock.sendall(data)
558 558 data = read(blocksize)
559 559 else:
560 560 self.sock.sendall(str)
561 561 except socket.error as v:
562 562 reraise = True
563 563 if v[0] == errno.EPIPE: # Broken pipe
564 564 if self._HTTPConnection__state == httplib._CS_REQ_SENT:
565 565 self._broken_pipe_resp = None
566 566 self._broken_pipe_resp = self.getresponse()
567 567 reraise = False
568 568 self.close()
569 569 if reraise:
570 570 raise
571 571
572 572 def wrapgetresponse(cls):
573 573 """Wraps getresponse in cls with a broken-pipe sane version.
574 574 """
575 575 def safegetresponse(self):
576 576 # In safesend() we might set the _broken_pipe_resp
577 577 # attribute, in which case the socket has already
578 578 # been closed and we just need to give them the response
579 579 # back. Otherwise, we use the normal response path.
580 580 r = getattr(self, '_broken_pipe_resp', None)
581 581 if r is not None:
582 582 return r
583 583 return cls.getresponse(self)
584 584 safegetresponse.__doc__ = cls.getresponse.__doc__
585 585 return safegetresponse
586 586
587 587 class HTTPConnection(httplib.HTTPConnection):
588 588 # use the modified response class
589 589 response_class = HTTPResponse
590 590 send = safesend
591 591 getresponse = wrapgetresponse(httplib.HTTPConnection)
592 592
593 593
594 594 #########################################################################
595 595 ##### TEST FUNCTIONS
596 596 #########################################################################
597 597
598 598
599 599 def continuity(url):
600 600 md5 = hashlib.md5
601 601 format = '%25s: %s'
602 602
603 603 # first fetch the file with the normal http handler
604 604 opener = urlreq.buildopener()
605 605 urlreq.installopener(opener)
606 606 fo = urlreq.urlopen(url)
607 607 foo = fo.read()
608 608 fo.close()
609 609 m = md5(foo)
610 610 print(format % ('normal urllib', m.hexdigest()))
611 611
612 612 # now install the keepalive handler and try again
613 613 opener = urlreq.buildopener(HTTPHandler())
614 614 urlreq.installopener(opener)
615 615
616 616 fo = urlreq.urlopen(url)
617 617 foo = fo.read()
618 618 fo.close()
619 619 m = md5(foo)
620 620 print(format % ('keepalive read', m.hexdigest()))
621 621
622 622 fo = urlreq.urlopen(url)
623 623 foo = ''
624 624 while True:
625 625 f = fo.readline()
626 626 if f:
627 627 foo = foo + f
628 628 else:
629 629 break
630 630 fo.close()
631 631 m = md5(foo)
632 632 print(format % ('keepalive readline', m.hexdigest()))
633 633
634 634 def comp(N, url):
635 635 print(' making %i connections to:\n %s' % (N, url))
636 636
637 637 util.stdout.write(' first using the normal urllib handlers')
638 638 # first use normal opener
639 639 opener = urlreq.buildopener()
640 640 urlreq.installopener(opener)
641 641 t1 = fetch(N, url)
642 642 print(' TIME: %.3f s' % t1)
643 643
644 644 util.stdout.write(' now using the keepalive handler ')
645 645 # now install the keepalive handler and try again
646 646 opener = urlreq.buildopener(HTTPHandler())
647 647 urlreq.installopener(opener)
648 648 t2 = fetch(N, url)
649 649 print(' TIME: %.3f s' % t2)
650 650 print(' improvement factor: %.2f' % (t1 / t2))
651 651
652 652 def fetch(N, url, delay=0):
653 653 import time
654 654 lens = []
655 655 starttime = time.time()
656 656 for i in range(N):
657 657 if delay and i > 0:
658 658 time.sleep(delay)
659 659 fo = urlreq.urlopen(url)
660 660 foo = fo.read()
661 661 fo.close()
662 662 lens.append(len(foo))
663 663 diff = time.time() - starttime
664 664
665 665 j = 0
666 666 for i in lens[1:]:
667 667 j = j + 1
668 668 if not i == lens[0]:
669 669 print("WARNING: inconsistent length on read %i: %i" % (j, i))
670 670
671 671 return diff
672 672
673 673 def test_timeout(url):
674 674 global DEBUG
675 675 dbbackup = DEBUG
676 676 class FakeLogger(object):
677 677 def debug(self, msg, *args):
678 678 print(msg % args)
679 679 info = warning = error = debug
680 680 DEBUG = FakeLogger()
681 681 print(" fetching the file to establish a connection")
682 682 fo = urlreq.urlopen(url)
683 683 data1 = fo.read()
684 684 fo.close()
685 685
686 686 i = 20
687 687 print(" waiting %i seconds for the server to close the connection" % i)
688 688 while i > 0:
689 689 util.stdout.write('\r %2i' % i)
690 690 util.stdout.flush()
691 691 time.sleep(1)
692 692 i -= 1
693 693 util.stderr.write('\r')
694 694
695 695 print(" fetching the file a second time")
696 696 fo = urlreq.urlopen(url)
697 697 data2 = fo.read()
698 698 fo.close()
699 699
700 700 if data1 == data2:
701 701 print(' data are identical')
702 702 else:
703 703 print(' ERROR: DATA DIFFER')
704 704
705 705 DEBUG = dbbackup
706 706
707 707
708 708 def test(url, N=10):
709 709 print("performing continuity test (making sure stuff isn't corrupted)")
710 710 continuity(url)
711 711 print('')
712 712 print("performing speed comparison")
713 713 comp(N, url)
714 714 print('')
715 715 print("performing dropped-connection check")
716 716 test_timeout(url)
717 717
718 718 if __name__ == '__main__':
719 719 import time
720 720 try:
721 721 N = int(sys.argv[1])
722 722 url = sys.argv[2]
723 723 except (IndexError, ValueError):
724 724 print("%s <integer> <url>" % sys.argv[0])
725 725 else:
726 726 test(url, N)
General Comments 0
You need to be logged in to leave comments. Login now