##// END OF EJS Templates
keepalive: implement readinto()...
Gregory Szorc -
r37313:97eedbd5 default
parent child Browse files
Show More
@@ -1,730 +1,738 b''
1 1 # This library is free software; you can redistribute it and/or
2 2 # modify it under the terms of the GNU Lesser General Public
3 3 # License as published by the Free Software Foundation; either
4 4 # version 2.1 of the License, or (at your option) any later version.
5 5 #
6 6 # This library is distributed in the hope that it will be useful,
7 7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
9 9 # Lesser General Public License for more details.
10 10 #
11 11 # You should have received a copy of the GNU Lesser General Public
12 12 # License along with this library; if not, see
13 13 # <http://www.gnu.org/licenses/>.
14 14
15 15 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
16 16 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
17 17
18 18 # Modified by Benoit Boissinot:
19 19 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
20 20 # Modified by Dirkjan Ochtman:
21 21 # - import md5 function from a local util module
22 22 # Modified by Augie Fackler:
23 23 # - add safesend method and use it to prevent broken pipe errors
24 24 # on large POST requests
25 25
26 26 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
27 27
28 28 >>> import urllib2
29 29 >>> from keepalive import HTTPHandler
30 30 >>> keepalive_handler = HTTPHandler()
31 31 >>> opener = urlreq.buildopener(keepalive_handler)
32 32 >>> urlreq.installopener(opener)
33 33 >>>
34 34 >>> fo = urlreq.urlopen('http://www.python.org')
35 35
36 36 If a connection to a given host is requested, and all of the existing
37 37 connections are still in use, another connection will be opened. If
38 38 the handler tries to use an existing connection but it fails in some
39 39 way, it will be closed and removed from the pool.
40 40
41 41 To remove the handler, simply re-run build_opener with no arguments, and
42 42 install that opener.
43 43
44 44 You can explicitly close connections by using the close_connection()
45 45 method of the returned file-like object (described below) or you can
46 46 use the handler methods:
47 47
48 48 close_connection(host)
49 49 close_all()
50 50 open_connections()
51 51
52 52 NOTE: using the close_connection and close_all methods of the handler
53 53 should be done with care when using multiple threads.
54 54 * there is nothing that prevents another thread from creating new
55 55 connections immediately after connections are closed
56 56 * no checks are done to prevent in-use connections from being closed
57 57
58 58 >>> keepalive_handler.close_all()
59 59
60 60 EXTRA ATTRIBUTES AND METHODS
61 61
62 62 Upon a status of 200, the object returned has a few additional
63 63 attributes and methods, which should not be used if you want to
64 64 remain consistent with the normal urllib2-returned objects:
65 65
66 66 close_connection() - close the connection to the host
67 67 readlines() - you know, readlines()
68 68 status - the return status (i.e. 404)
69 69 reason - english translation of status (i.e. 'File not found')
70 70
71 71 If you want the best of both worlds, use this inside an
72 72 AttributeError-catching try:
73 73
74 74 >>> try: status = fo.status
75 75 >>> except AttributeError: status = None
76 76
77 77 Unfortunately, these are ONLY there if status == 200, so it's not
78 78 easy to distinguish between non-200 responses. The reason is that
79 79 urllib2 tries to do clever things with error codes 301, 302, 401,
80 80 and 407, and it wraps the object upon return.
81 81 """
82 82
83 83 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
84 84
85 85 from __future__ import absolute_import, print_function
86 86
87 87 import errno
88 88 import hashlib
89 89 import socket
90 90 import sys
91 91 import threading
92 92
93 93 from .i18n import _
94 94 from . import (
95 95 node,
96 96 pycompat,
97 97 urllibcompat,
98 98 util,
99 99 )
100 100 from .utils import (
101 101 procutil,
102 102 )
103 103
104 104 httplib = util.httplib
105 105 urlerr = util.urlerr
106 106 urlreq = util.urlreq
107 107
108 108 DEBUG = None
109 109
110 110 class ConnectionManager(object):
111 111 """
112 112 The connection manager must be able to:
113 113 * keep track of all existing
114 114 """
115 115 def __init__(self):
116 116 self._lock = threading.Lock()
117 117 self._hostmap = {} # map hosts to a list of connections
118 118 self._connmap = {} # map connections to host
119 119 self._readymap = {} # map connection to ready state
120 120
121 121 def add(self, host, connection, ready):
122 122 self._lock.acquire()
123 123 try:
124 124 if host not in self._hostmap:
125 125 self._hostmap[host] = []
126 126 self._hostmap[host].append(connection)
127 127 self._connmap[connection] = host
128 128 self._readymap[connection] = ready
129 129 finally:
130 130 self._lock.release()
131 131
132 132 def remove(self, connection):
133 133 self._lock.acquire()
134 134 try:
135 135 try:
136 136 host = self._connmap[connection]
137 137 except KeyError:
138 138 pass
139 139 else:
140 140 del self._connmap[connection]
141 141 del self._readymap[connection]
142 142 self._hostmap[host].remove(connection)
143 143 if not self._hostmap[host]:
144 144 del self._hostmap[host]
145 145 finally:
146 146 self._lock.release()
147 147
148 148 def set_ready(self, connection, ready):
149 149 try:
150 150 self._readymap[connection] = ready
151 151 except KeyError:
152 152 pass
153 153
154 154 def get_ready_conn(self, host):
155 155 conn = None
156 156 self._lock.acquire()
157 157 try:
158 158 if host in self._hostmap:
159 159 for c in self._hostmap[host]:
160 160 if self._readymap[c]:
161 161 self._readymap[c] = 0
162 162 conn = c
163 163 break
164 164 finally:
165 165 self._lock.release()
166 166 return conn
167 167
168 168 def get_all(self, host=None):
169 169 if host:
170 170 return list(self._hostmap.get(host, []))
171 171 else:
172 172 return dict(self._hostmap)
173 173
174 174 class KeepAliveHandler(object):
175 175 def __init__(self):
176 176 self._cm = ConnectionManager()
177 177
178 178 #### Connection Management
179 179 def open_connections(self):
180 180 """return a list of connected hosts and the number of connections
181 181 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
182 182 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
183 183
184 184 def close_connection(self, host):
185 185 """close connection(s) to <host>
186 186 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
187 187 no error occurs if there is no connection to that host."""
188 188 for h in self._cm.get_all(host):
189 189 self._cm.remove(h)
190 190 h.close()
191 191
192 192 def close_all(self):
193 193 """close all open connections"""
194 194 for host, conns in self._cm.get_all().iteritems():
195 195 for h in conns:
196 196 self._cm.remove(h)
197 197 h.close()
198 198
199 199 def _request_closed(self, request, host, connection):
200 200 """tells us that this request is now closed and that the
201 201 connection is ready for another request"""
202 202 self._cm.set_ready(connection, 1)
203 203
204 204 def _remove_connection(self, host, connection, close=0):
205 205 if close:
206 206 connection.close()
207 207 self._cm.remove(connection)
208 208
209 209 #### Transaction Execution
210 210 def http_open(self, req):
211 211 return self.do_open(HTTPConnection, req)
212 212
213 213 def do_open(self, http_class, req):
214 214 host = urllibcompat.gethost(req)
215 215 if not host:
216 216 raise urlerr.urlerror('no host given')
217 217
218 218 try:
219 219 h = self._cm.get_ready_conn(host)
220 220 while h:
221 221 r = self._reuse_connection(h, req, host)
222 222
223 223 # if this response is non-None, then it worked and we're
224 224 # done. Break out, skipping the else block.
225 225 if r:
226 226 break
227 227
228 228 # connection is bad - possibly closed by server
229 229 # discard it and ask for the next free connection
230 230 h.close()
231 231 self._cm.remove(h)
232 232 h = self._cm.get_ready_conn(host)
233 233 else:
234 234 # no (working) free connections were found. Create a new one.
235 235 h = http_class(host)
236 236 if DEBUG:
237 237 DEBUG.info("creating new connection to %s (%d)",
238 238 host, id(h))
239 239 self._cm.add(host, h, 0)
240 240 self._start_transaction(h, req)
241 241 r = h.getresponse()
242 242 # The string form of BadStatusLine is the status line. Add some context
243 243 # to make the error message slightly more useful.
244 244 except httplib.BadStatusLine as err:
245 245 raise urlerr.urlerror(
246 246 _('bad HTTP status line: %s') % pycompat.sysbytes(err.line))
247 247 except (socket.error, httplib.HTTPException) as err:
248 248 raise urlerr.urlerror(err)
249 249
250 250 # if not a persistent connection, don't try to reuse it
251 251 if r.will_close:
252 252 self._cm.remove(h)
253 253
254 254 if DEBUG:
255 255 DEBUG.info("STATUS: %s, %s", r.status, r.reason)
256 256 r._handler = self
257 257 r._host = host
258 258 r._url = req.get_full_url()
259 259 r._connection = h
260 260 r.code = r.status
261 261 r.headers = r.msg
262 262 r.msg = r.reason
263 263
264 264 return r
265 265
266 266 def _reuse_connection(self, h, req, host):
267 267 """start the transaction with a re-used connection
268 268 return a response object (r) upon success or None on failure.
269 269 This DOES not close or remove bad connections in cases where
270 270 it returns. However, if an unexpected exception occurs, it
271 271 will close and remove the connection before re-raising.
272 272 """
273 273 try:
274 274 self._start_transaction(h, req)
275 275 r = h.getresponse()
276 276 # note: just because we got something back doesn't mean it
277 277 # worked. We'll check the version below, too.
278 278 except (socket.error, httplib.HTTPException):
279 279 r = None
280 280 except: # re-raises
281 281 # adding this block just in case we've missed
282 282 # something we will still raise the exception, but
283 283 # lets try and close the connection and remove it
284 284 # first. We previously got into a nasty loop
285 285 # where an exception was uncaught, and so the
286 286 # connection stayed open. On the next try, the
287 287 # same exception was raised, etc. The trade-off is
288 288 # that it's now possible this call will raise
289 289 # a DIFFERENT exception
290 290 if DEBUG:
291 291 DEBUG.error("unexpected exception - closing "
292 292 "connection to %s (%d)", host, id(h))
293 293 self._cm.remove(h)
294 294 h.close()
295 295 raise
296 296
297 297 if r is None or r.version == 9:
298 298 # httplib falls back to assuming HTTP 0.9 if it gets a
299 299 # bad header back. This is most likely to happen if
300 300 # the socket has been closed by the server since we
301 301 # last used the connection.
302 302 if DEBUG:
303 303 DEBUG.info("failed to re-use connection to %s (%d)",
304 304 host, id(h))
305 305 r = None
306 306 else:
307 307 if DEBUG:
308 308 DEBUG.info("re-using connection to %s (%d)", host, id(h))
309 309
310 310 return r
311 311
312 312 def _start_transaction(self, h, req):
313 313 # What follows mostly reimplements HTTPConnection.request()
314 314 # except it adds self.parent.addheaders in the mix and sends headers
315 315 # in a deterministic order (to make testing easier).
316 316 headers = util.sortdict(self.parent.addheaders)
317 317 headers.update(sorted(req.headers.items()))
318 318 headers.update(sorted(req.unredirected_hdrs.items()))
319 319 headers = util.sortdict((n.lower(), v) for n, v in headers.items())
320 320 skipheaders = {}
321 321 for n in ('host', 'accept-encoding'):
322 322 if n in headers:
323 323 skipheaders['skip_' + n.replace('-', '_')] = 1
324 324 try:
325 325 if urllibcompat.hasdata(req):
326 326 data = urllibcompat.getdata(req)
327 327 h.putrequest(
328 328 req.get_method(), urllibcompat.getselector(req),
329 329 **pycompat.strkwargs(skipheaders))
330 330 if r'content-type' not in headers:
331 331 h.putheader(r'Content-type',
332 332 r'application/x-www-form-urlencoded')
333 333 if r'content-length' not in headers:
334 334 h.putheader(r'Content-length', r'%d' % len(data))
335 335 else:
336 336 h.putrequest(
337 337 req.get_method(), urllibcompat.getselector(req),
338 338 **pycompat.strkwargs(skipheaders))
339 339 except socket.error as err:
340 340 raise urlerr.urlerror(err)
341 341 for k, v in headers.items():
342 342 h.putheader(k, v)
343 343 h.endheaders()
344 344 if urllibcompat.hasdata(req):
345 345 h.send(data)
346 346
347 347 class HTTPHandler(KeepAliveHandler, urlreq.httphandler):
348 348 pass
349 349
350 350 class HTTPResponse(httplib.HTTPResponse):
351 351 # we need to subclass HTTPResponse in order to
352 # 1) add readline() and readlines() methods
352 # 1) add readline(), readlines(), and readinto() methods
353 353 # 2) add close_connection() methods
354 354 # 3) add info() and geturl() methods
355 355
356 356 # in order to add readline(), read must be modified to deal with a
357 357 # buffer. example: readline must read a buffer and then spit back
358 358 # one line at a time. The only real alternative is to read one
359 359 # BYTE at a time (ick). Once something has been read, it can't be
360 360 # put back (ok, maybe it can, but that's even uglier than this),
361 361 # so if you THEN do a normal read, you must first take stuff from
362 362 # the buffer.
363 363
364 364 # the read method wraps the original to accommodate buffering,
365 365 # although read() never adds to the buffer.
366 366 # Both readline and readlines have been stolen with almost no
367 367 # modification from socket.py
368 368
369 369
370 370 def __init__(self, sock, debuglevel=0, strict=0, method=None):
371 371 extrakw = {}
372 372 if not pycompat.ispy3:
373 373 extrakw[r'strict'] = True
374 374 extrakw[r'buffering'] = True
375 375 httplib.HTTPResponse.__init__(self, sock, debuglevel=debuglevel,
376 376 method=method, **extrakw)
377 377 self.fileno = sock.fileno
378 378 self.code = None
379 379 self._rbuf = ''
380 380 self._rbufsize = 8096
381 381 self._handler = None # inserted by the handler later
382 382 self._host = None # (same)
383 383 self._url = None # (same)
384 384 self._connection = None # (same)
385 385
386 386 _raw_read = httplib.HTTPResponse.read
387 387
388 388 def close(self):
389 389 if self.fp:
390 390 self.fp.close()
391 391 self.fp = None
392 392 if self._handler:
393 393 self._handler._request_closed(self, self._host,
394 394 self._connection)
395 395
396 396 def close_connection(self):
397 397 self._handler._remove_connection(self._host, self._connection, close=1)
398 398 self.close()
399 399
400 400 def info(self):
401 401 return self.headers
402 402
403 403 def geturl(self):
404 404 return self._url
405 405
406 406 def read(self, amt=None):
407 407 # the _rbuf test is only in this first if for speed. It's not
408 408 # logically necessary
409 409 if self._rbuf and amt is not None:
410 410 L = len(self._rbuf)
411 411 if amt > L:
412 412 amt -= L
413 413 else:
414 414 s = self._rbuf[:amt]
415 415 self._rbuf = self._rbuf[amt:]
416 416 return s
417 417
418 418 s = self._rbuf + self._raw_read(amt)
419 419 self._rbuf = ''
420 420 return s
421 421
422 422 # stolen from Python SVN #68532 to fix issue1088
423 423 def _read_chunked(self, amt):
424 424 chunk_left = self.chunk_left
425 425 parts = []
426 426
427 427 while True:
428 428 if chunk_left is None:
429 429 line = self.fp.readline()
430 430 i = line.find(';')
431 431 if i >= 0:
432 432 line = line[:i] # strip chunk-extensions
433 433 try:
434 434 chunk_left = int(line, 16)
435 435 except ValueError:
436 436 # close the connection as protocol synchronization is
437 437 # probably lost
438 438 self.close()
439 439 raise httplib.IncompleteRead(''.join(parts))
440 440 if chunk_left == 0:
441 441 break
442 442 if amt is None:
443 443 parts.append(self._safe_read(chunk_left))
444 444 elif amt < chunk_left:
445 445 parts.append(self._safe_read(amt))
446 446 self.chunk_left = chunk_left - amt
447 447 return ''.join(parts)
448 448 elif amt == chunk_left:
449 449 parts.append(self._safe_read(amt))
450 450 self._safe_read(2) # toss the CRLF at the end of the chunk
451 451 self.chunk_left = None
452 452 return ''.join(parts)
453 453 else:
454 454 parts.append(self._safe_read(chunk_left))
455 455 amt -= chunk_left
456 456
457 457 # we read the whole chunk, get another
458 458 self._safe_read(2) # toss the CRLF at the end of the chunk
459 459 chunk_left = None
460 460
461 461 # read and discard trailer up to the CRLF terminator
462 462 ### note: we shouldn't have any trailers!
463 463 while True:
464 464 line = self.fp.readline()
465 465 if not line:
466 466 # a vanishingly small number of sites EOF without
467 467 # sending the trailer
468 468 break
469 469 if line == '\r\n':
470 470 break
471 471
472 472 # we read everything; close the "file"
473 473 self.close()
474 474
475 475 return ''.join(parts)
476 476
477 477 def readline(self):
478 478 # Fast path for a line is already available in read buffer.
479 479 i = self._rbuf.find('\n')
480 480 if i >= 0:
481 481 i += 1
482 482 line = self._rbuf[:i]
483 483 self._rbuf = self._rbuf[i:]
484 484 return line
485 485
486 486 # No newline in local buffer. Read until we find one.
487 487 chunks = [self._rbuf]
488 488 i = -1
489 489 readsize = self._rbufsize
490 490 while True:
491 491 new = self._raw_read(readsize)
492 492 if not new:
493 493 break
494 494
495 495 chunks.append(new)
496 496 i = new.find('\n')
497 497 if i >= 0:
498 498 break
499 499
500 500 # We either have exhausted the stream or have a newline in chunks[-1].
501 501
502 502 # EOF
503 503 if i == -1:
504 504 self._rbuf = ''
505 505 return ''.join(chunks)
506 506
507 507 i += 1
508 508 self._rbuf = chunks[-1][i:]
509 509 chunks[-1] = chunks[-1][:i]
510 510 return ''.join(chunks)
511 511
512 512 def readlines(self, sizehint=0):
513 513 total = 0
514 514 list = []
515 515 while True:
516 516 line = self.readline()
517 517 if not line:
518 518 break
519 519 list.append(line)
520 520 total += len(line)
521 521 if sizehint and total >= sizehint:
522 522 break
523 523 return list
524 524
525 def readinto(self, dest):
526 res = self.read(len(dest))
527 if not res:
528 return 0
529
530 dest[0:len(res)] = res
531 return len(res)
532
525 533 def safesend(self, str):
526 534 """Send `str' to the server.
527 535
528 536 Shamelessly ripped off from httplib to patch a bad behavior.
529 537 """
530 538 # _broken_pipe_resp is an attribute we set in this function
531 539 # if the socket is closed while we're sending data but
532 540 # the server sent us a response before hanging up.
533 541 # In that case, we want to pretend to send the rest of the
534 542 # outgoing data, and then let the user use getresponse()
535 543 # (which we wrap) to get this last response before
536 544 # opening a new socket.
537 545 if getattr(self, '_broken_pipe_resp', None) is not None:
538 546 return
539 547
540 548 if self.sock is None:
541 549 if self.auto_open:
542 550 self.connect()
543 551 else:
544 552 raise httplib.NotConnected
545 553
546 554 # send the data to the server. if we get a broken pipe, then close
547 555 # the socket. we want to reconnect when somebody tries to send again.
548 556 #
549 557 # NOTE: we DO propagate the error, though, because we cannot simply
550 558 # ignore the error... the caller will know if they can retry.
551 559 if self.debuglevel > 0:
552 560 print("send:", repr(str))
553 561 try:
554 562 blocksize = 8192
555 563 read = getattr(str, 'read', None)
556 564 if read is not None:
557 565 if self.debuglevel > 0:
558 566 print("sending a read()able")
559 567 data = read(blocksize)
560 568 while data:
561 569 self.sock.sendall(data)
562 570 data = read(blocksize)
563 571 else:
564 572 self.sock.sendall(str)
565 573 except socket.error as v:
566 574 reraise = True
567 575 if v[0] == errno.EPIPE: # Broken pipe
568 576 if self._HTTPConnection__state == httplib._CS_REQ_SENT:
569 577 self._broken_pipe_resp = None
570 578 self._broken_pipe_resp = self.getresponse()
571 579 reraise = False
572 580 self.close()
573 581 if reraise:
574 582 raise
575 583
576 584 def wrapgetresponse(cls):
577 585 """Wraps getresponse in cls with a broken-pipe sane version.
578 586 """
579 587 def safegetresponse(self):
580 588 # In safesend() we might set the _broken_pipe_resp
581 589 # attribute, in which case the socket has already
582 590 # been closed and we just need to give them the response
583 591 # back. Otherwise, we use the normal response path.
584 592 r = getattr(self, '_broken_pipe_resp', None)
585 593 if r is not None:
586 594 return r
587 595 return cls.getresponse(self)
588 596 safegetresponse.__doc__ = cls.getresponse.__doc__
589 597 return safegetresponse
590 598
591 599 class HTTPConnection(httplib.HTTPConnection):
592 600 # use the modified response class
593 601 response_class = HTTPResponse
594 602 send = safesend
595 603 getresponse = wrapgetresponse(httplib.HTTPConnection)
596 604
597 605
598 606 #########################################################################
599 607 ##### TEST FUNCTIONS
600 608 #########################################################################
601 609
602 610
603 611 def continuity(url):
604 612 md5 = hashlib.md5
605 613 format = '%25s: %s'
606 614
607 615 # first fetch the file with the normal http handler
608 616 opener = urlreq.buildopener()
609 617 urlreq.installopener(opener)
610 618 fo = urlreq.urlopen(url)
611 619 foo = fo.read()
612 620 fo.close()
613 621 m = md5(foo)
614 622 print(format % ('normal urllib', node.hex(m.digest())))
615 623
616 624 # now install the keepalive handler and try again
617 625 opener = urlreq.buildopener(HTTPHandler())
618 626 urlreq.installopener(opener)
619 627
620 628 fo = urlreq.urlopen(url)
621 629 foo = fo.read()
622 630 fo.close()
623 631 m = md5(foo)
624 632 print(format % ('keepalive read', node.hex(m.digest())))
625 633
626 634 fo = urlreq.urlopen(url)
627 635 foo = ''
628 636 while True:
629 637 f = fo.readline()
630 638 if f:
631 639 foo = foo + f
632 640 else:
633 641 break
634 642 fo.close()
635 643 m = md5(foo)
636 644 print(format % ('keepalive readline', node.hex(m.digest())))
637 645
638 646 def comp(N, url):
639 647 print(' making %i connections to:\n %s' % (N, url))
640 648
641 649 procutil.stdout.write(' first using the normal urllib handlers')
642 650 # first use normal opener
643 651 opener = urlreq.buildopener()
644 652 urlreq.installopener(opener)
645 653 t1 = fetch(N, url)
646 654 print(' TIME: %.3f s' % t1)
647 655
648 656 procutil.stdout.write(' now using the keepalive handler ')
649 657 # now install the keepalive handler and try again
650 658 opener = urlreq.buildopener(HTTPHandler())
651 659 urlreq.installopener(opener)
652 660 t2 = fetch(N, url)
653 661 print(' TIME: %.3f s' % t2)
654 662 print(' improvement factor: %.2f' % (t1 / t2))
655 663
656 664 def fetch(N, url, delay=0):
657 665 import time
658 666 lens = []
659 667 starttime = time.time()
660 668 for i in range(N):
661 669 if delay and i > 0:
662 670 time.sleep(delay)
663 671 fo = urlreq.urlopen(url)
664 672 foo = fo.read()
665 673 fo.close()
666 674 lens.append(len(foo))
667 675 diff = time.time() - starttime
668 676
669 677 j = 0
670 678 for i in lens[1:]:
671 679 j = j + 1
672 680 if not i == lens[0]:
673 681 print("WARNING: inconsistent length on read %i: %i" % (j, i))
674 682
675 683 return diff
676 684
677 685 def test_timeout(url):
678 686 global DEBUG
679 687 dbbackup = DEBUG
680 688 class FakeLogger(object):
681 689 def debug(self, msg, *args):
682 690 print(msg % args)
683 691 info = warning = error = debug
684 692 DEBUG = FakeLogger()
685 693 print(" fetching the file to establish a connection")
686 694 fo = urlreq.urlopen(url)
687 695 data1 = fo.read()
688 696 fo.close()
689 697
690 698 i = 20
691 699 print(" waiting %i seconds for the server to close the connection" % i)
692 700 while i > 0:
693 701 procutil.stdout.write('\r %2i' % i)
694 702 procutil.stdout.flush()
695 703 time.sleep(1)
696 704 i -= 1
697 705 procutil.stderr.write('\r')
698 706
699 707 print(" fetching the file a second time")
700 708 fo = urlreq.urlopen(url)
701 709 data2 = fo.read()
702 710 fo.close()
703 711
704 712 if data1 == data2:
705 713 print(' data are identical')
706 714 else:
707 715 print(' ERROR: DATA DIFFER')
708 716
709 717 DEBUG = dbbackup
710 718
711 719
712 720 def test(url, N=10):
713 721 print("performing continuity test (making sure stuff isn't corrupted)")
714 722 continuity(url)
715 723 print('')
716 724 print("performing speed comparison")
717 725 comp(N, url)
718 726 print('')
719 727 print("performing dropped-connection check")
720 728 test_timeout(url)
721 729
722 730 if __name__ == '__main__':
723 731 import time
724 732 try:
725 733 N = int(sys.argv[1])
726 734 url = sys.argv[2]
727 735 except (IndexError, ValueError):
728 736 print("%s <integer> <url>" % sys.argv[0])
729 737 else:
730 738 test(url, N)
General Comments 0
You need to be logged in to leave comments. Login now