##// END OF EJS Templates
httpclient: update to 938f2107d6e2 of httpplus...
Augie Fackler -
r27601:1ad9da96 default
parent child Browse files
Show More
@@ -1,731 +1,768 b''
1 1 # Copyright 2010, Google Inc.
2 2 # All rights reserved.
3 3 #
4 4 # Redistribution and use in source and binary forms, with or without
5 5 # modification, are permitted provided that the following conditions are
6 6 # met:
7 7 #
8 8 # * Redistributions of source code must retain the above copyright
9 9 # notice, this list of conditions and the following disclaimer.
10 10 # * Redistributions in binary form must reproduce the above
11 11 # copyright notice, this list of conditions and the following disclaimer
12 12 # in the documentation and/or other materials provided with the
13 13 # distribution.
14 14 # * Neither the name of Google Inc. nor the names of its
15 15 # contributors may be used to endorse or promote products derived from
16 16 # this software without specific prior written permission.
17 17
18 18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 29 """Improved HTTP/1.1 client library
30 30
31 31 This library contains an HTTPConnection which is similar to the one in
32 32 httplib, but has several additional features:
33 33
34 34 * supports keepalives natively
35 35 * uses select() to block for incoming data
36 36 * notices when the server responds early to a request
37 37 * implements ssl inline instead of in a different class
38 38 """
39 from __future__ import absolute_import
39 40
40 41 # Many functions in this file have too many arguments.
41 42 # pylint: disable=R0913
42 43
43 44 import cStringIO
44 45 import errno
45 46 import httplib
46 47 import logging
47 48 import rfc822
48 49 import select
49 50 import socket
50 51
51 import _readers
52 import socketutil
52 from . import (
53 _readers,
54 socketutil,
55 )
53 56
54 57 logger = logging.getLogger(__name__)
55 58
56 59 __all__ = ['HTTPConnection', 'HTTPResponse']
57 60
58 61 HTTP_VER_1_0 = 'HTTP/1.0'
59 62 HTTP_VER_1_1 = 'HTTP/1.1'
60 63
61 64 OUTGOING_BUFFER_SIZE = 1 << 15
62 65 INCOMING_BUFFER_SIZE = 1 << 20
63 66
64 67 HDR_ACCEPT_ENCODING = 'accept-encoding'
65 68 HDR_CONNECTION_CTRL = 'connection'
66 69 HDR_CONTENT_LENGTH = 'content-length'
67 70 HDR_XFER_ENCODING = 'transfer-encoding'
68 71
69 72 XFER_ENCODING_CHUNKED = 'chunked'
70 73
71 74 CONNECTION_CLOSE = 'close'
72 75
73 76 EOL = '\r\n'
74 77 _END_HEADERS = EOL * 2
75 78
76 79 # Based on some searching around, 1 second seems like a reasonable
77 80 # default here.
78 81 TIMEOUT_ASSUME_CONTINUE = 1
79 82 TIMEOUT_DEFAULT = None
80 83
81 84
82 85 class HTTPResponse(object):
83 86 """Response from an HTTP server.
84 87
85 88 The response will continue to load as available. If you need the
86 89 complete response before continuing, check the .complete() method.
87 90 """
88 91 def __init__(self, sock, timeout, method):
89 92 self.sock = sock
90 93 self.method = method
91 94 self.raw_response = ''
92 95 self._headers_len = 0
93 96 self.headers = None
94 97 self.will_close = False
95 98 self.status_line = ''
96 99 self.status = None
97 100 self.continued = False
98 101 self.http_version = None
99 102 self.reason = None
100 103 self._reader = None
101 104
102 105 self._read_location = 0
103 106 self._eol = EOL
104 107
105 108 self._timeout = timeout
106 109
107 110 @property
108 111 def _end_headers(self):
109 112 return self._eol * 2
110 113
111 114 def complete(self):
112 115 """Returns true if this response is completely loaded.
113 116
114 117 Note that if this is a connection where complete means the
115 118 socket is closed, this will nearly always return False, even
116 119 in cases where all the data has actually been loaded.
117 120 """
118 121 if self._reader:
119 122 return self._reader.done()
120 123
121 124 def _close(self):
122 125 if self._reader is not None:
123 126 # We're a friend of the reader class here.
124 127 # pylint: disable=W0212
125 128 self._reader._close()
126 129
130 def getheader(self, header, default=None):
131 return self.headers.getheader(header, default=default)
132
133 def getheaders(self):
134 return self.headers.items()
135
127 136 def readline(self):
128 137 """Read a single line from the response body.
129 138
130 139 This may block until either a line ending is found or the
131 140 response is complete.
132 141 """
133 142 blocks = []
134 143 while True:
135 144 self._reader.readto('\n', blocks)
136 145
137 146 if blocks and blocks[-1][-1] == '\n' or self.complete():
138 147 break
139 148
140 149 self._select()
141 150
142 151 return ''.join(blocks)
143 152
144 153 def read(self, length=None):
145 154 """Read data from the response body."""
146 155 # if length is None, unbounded read
147 156 while (not self.complete() # never select on a finished read
148 157 and (not length # unbounded, so we wait for complete()
149 158 or length > self._reader.available_data)):
150 159 self._select()
151 160 if not length:
152 161 length = self._reader.available_data
153 162 r = self._reader.read(length)
154 163 if self.complete() and self.will_close:
155 164 self.sock.close()
156 165 return r
157 166
158 167 def _select(self):
159 168 r, unused_write, unused_err = select.select(
160 169 [self.sock], [], [], self._timeout)
161 170 if not r:
162 171 # socket was not readable. If the response is not
163 172 # complete, raise a timeout.
164 173 if not self.complete():
165 174 logger.info('timed out with timeout of %s', self._timeout)
166 175 raise HTTPTimeoutException('timeout reading data')
167 176 try:
168 177 data = self.sock.recv(INCOMING_BUFFER_SIZE)
169 178 except socket.sslerror as e:
170 179 if e.args[0] != socket.SSL_ERROR_WANT_READ:
171 180 raise
172 181 logger.debug('SSL_ERROR_WANT_READ in _select, should retry later')
173 182 return True
174 183 logger.debug('response read %d data during _select', len(data))
175 184 # If the socket was readable and no data was read, that means
176 185 # the socket was closed. Inform the reader (if any) so it can
177 186 # raise an exception if this is an invalid situation.
178 187 if not data:
179 188 if self._reader:
180 189 # We're a friend of the reader class here.
181 190 # pylint: disable=W0212
182 191 self._reader._close()
183 192 return False
184 193 else:
185 194 self._load_response(data)
186 195 return True
187 196
188 197 # This method gets replaced by _load later, which confuses pylint.
189 198 def _load_response(self, data): # pylint: disable=E0202
190 199 # Being here implies we're not at the end of the headers yet,
191 200 # since at the end of this method if headers were completely
192 201 # loaded we replace this method with the load() method of the
193 202 # reader we created.
194 203 self.raw_response += data
195 204 # This is a bogus server with bad line endings
196 205 if self._eol not in self.raw_response:
197 206 for bad_eol in ('\n', '\r'):
198 207 if (bad_eol in self.raw_response
199 208 # verify that bad_eol is not the end of the incoming data
200 209 # as this could be a response line that just got
201 210 # split between \r and \n.
202 211 and (self.raw_response.index(bad_eol) <
203 212 (len(self.raw_response) - 1))):
204 213 logger.info('bogus line endings detected, '
205 214 'using %r for EOL', bad_eol)
206 215 self._eol = bad_eol
207 216 break
208 217 # exit early if not at end of headers
209 218 if self._end_headers not in self.raw_response or self.headers:
210 219 return
211 220
212 221 # handle 100-continue response
213 222 hdrs, body = self.raw_response.split(self._end_headers, 1)
214 223 unused_http_ver, status = hdrs.split(' ', 1)
215 224 if status.startswith('100'):
216 225 self.raw_response = body
217 226 self.continued = True
218 227 logger.debug('continue seen, setting body to %r', body)
219 228 return
220 229
221 230 # arriving here means we should parse response headers
222 231 # as all headers have arrived completely
223 232 hdrs, body = self.raw_response.split(self._end_headers, 1)
224 233 del self.raw_response
225 234 if self._eol in hdrs:
226 235 self.status_line, hdrs = hdrs.split(self._eol, 1)
227 236 else:
228 237 self.status_line = hdrs
229 238 hdrs = ''
230 239 # TODO HTTP < 1.0 support
231 240 (self.http_version, self.status,
232 241 self.reason) = self.status_line.split(' ', 2)
233 242 self.status = int(self.status)
234 243 if self._eol != EOL:
235 244 hdrs = hdrs.replace(self._eol, '\r\n')
236 245 headers = rfc822.Message(cStringIO.StringIO(hdrs))
237 246 content_len = None
238 247 if HDR_CONTENT_LENGTH in headers:
239 248 content_len = int(headers[HDR_CONTENT_LENGTH])
240 249 if self.http_version == HTTP_VER_1_0:
241 250 self.will_close = True
242 251 elif HDR_CONNECTION_CTRL in headers:
243 252 self.will_close = (
244 253 headers[HDR_CONNECTION_CTRL].lower() == CONNECTION_CLOSE)
245 254 if (HDR_XFER_ENCODING in headers
246 255 and headers[HDR_XFER_ENCODING].lower() == XFER_ENCODING_CHUNKED):
247 256 self._reader = _readers.ChunkedReader(self._eol)
248 257 logger.debug('using a chunked reader')
249 258 else:
250 259 # HEAD responses are forbidden from returning a body, and
251 260 # it's implausible for a CONNECT response to use
252 261 # close-is-end logic for an OK response.
253 262 if (self.method == 'HEAD' or
254 263 (self.method == 'CONNECT' and content_len is None)):
255 264 content_len = 0
256 265 if content_len is not None:
257 266 logger.debug('using a content-length reader with length %d',
258 267 content_len)
259 268 self._reader = _readers.ContentLengthReader(content_len)
260 269 else:
261 270 # Response body had no length specified and is not
262 271 # chunked, so the end of the body will only be
263 272 # identifiable by the termination of the socket by the
264 273 # server. My interpretation of the spec means that we
265 274 # are correct in hitting this case if
266 275 # transfer-encoding, content-length, and
267 276 # connection-control were left unspecified.
268 277 self._reader = _readers.CloseIsEndReader()
269 278 logger.debug('using a close-is-end reader')
270 279 self.will_close = True
271 280
272 281 if body:
273 282 # We're a friend of the reader class here.
274 283 # pylint: disable=W0212
275 284 self._reader._load(body)
276 285 logger.debug('headers complete')
277 286 self.headers = headers
278 287 # We're a friend of the reader class here.
279 288 # pylint: disable=W0212
280 289 self._load_response = self._reader._load
281 290
291 def _foldheaders(headers):
292 """Given some headers, rework them so we can safely overwrite values.
293
294 >>> _foldheaders({'Accept-Encoding': 'wat'})
295 {'accept-encoding': ('Accept-Encoding', 'wat')}
296 """
297 return dict((k.lower(), (k, v)) for k, v in headers.iteritems())
298
282 299
283 300 class HTTPConnection(object):
284 301 """Connection to a single http server.
285 302
286 303 Supports 100-continue and keepalives natively. Uses select() for
287 304 non-blocking socket operations.
288 305 """
289 306 http_version = HTTP_VER_1_1
290 307 response_class = HTTPResponse
291 308
292 309 def __init__(self, host, port=None, use_ssl=None, ssl_validator=None,
293 310 timeout=TIMEOUT_DEFAULT,
294 311 continue_timeout=TIMEOUT_ASSUME_CONTINUE,
295 proxy_hostport=None, ssl_wrap_socket=None, **ssl_opts):
312 proxy_hostport=None, proxy_headers=None,
313 ssl_wrap_socket=None, **ssl_opts):
296 314 """Create a new HTTPConnection.
297 315
298 316 Args:
299 317 host: The host to which we'll connect.
300 318 port: Optional. The port over which we'll connect. Default 80 for
301 319 non-ssl, 443 for ssl.
302 320 use_ssl: Optional. Whether to use ssl. Defaults to False if port is
303 321 not 443, true if port is 443.
304 322 ssl_validator: a function(socket) to validate the ssl cert
305 323 timeout: Optional. Connection timeout, default is TIMEOUT_DEFAULT.
306 324 continue_timeout: Optional. Timeout for waiting on an expected
307 325 "100 Continue" response. Default is TIMEOUT_ASSUME_CONTINUE.
308 326 proxy_hostport: Optional. Tuple of (host, port) to use as an http
309 327 proxy for the connection. Default is to not use a proxy.
328 proxy_headers: Optional dict of header keys and values to send to
329 a proxy when using CONNECT. For compatibility with
330 httplib, the Proxy-Authorization header may be
331 specified in headers for request(), which will clobber
332 any such header specified here if specified. Providing
333 this option and not proxy_hostport will raise an
334 ValueError.
310 335 ssl_wrap_socket: Optional function to use for wrapping
311 336 sockets. If unspecified, the one from the ssl module will
312 337 be used if available, or something that's compatible with
313 338 it if on a Python older than 2.6.
314 339
315 340 Any extra keyword arguments to this function will be provided
316 341 to the ssl_wrap_socket method. If no ssl
317 342 """
318 343 if port is None and host.count(':') == 1 or ']:' in host:
319 344 host, port = host.rsplit(':', 1)
320 345 port = int(port)
321 346 if '[' in host:
322 347 host = host[1:-1]
323 348 if ssl_wrap_socket is not None:
324 349 self._ssl_wrap_socket = ssl_wrap_socket
325 350 else:
326 351 self._ssl_wrap_socket = socketutil.wrap_socket
327 352 if use_ssl is None and port is None:
328 353 use_ssl = False
329 354 port = 80
330 355 elif use_ssl is None:
331 356 use_ssl = (port == 443)
332 357 elif port is None:
333 if use_ssl:
334 port = 443
335 else:
336 port = 80
358 port = (use_ssl and 443 or 80)
337 359 self.port = port
338 360 if use_ssl and not socketutil.have_ssl:
339 361 raise Exception('ssl requested but unavailable on this Python')
340 362 self.ssl = use_ssl
341 363 self.ssl_opts = ssl_opts
342 364 self._ssl_validator = ssl_validator
343 365 self.host = host
344 366 self.sock = None
345 367 self._current_response = None
346 368 self._current_response_taken = False
347 369 if proxy_hostport is None:
348 370 self._proxy_host = self._proxy_port = None
371 if proxy_headers:
372 raise ValueError(
373 'proxy_headers may not be specified unless '
374 'proxy_hostport is also specified.')
375 else:
376 self._proxy_headers = {}
349 377 else:
350 378 self._proxy_host, self._proxy_port = proxy_hostport
379 self._proxy_headers = _foldheaders(proxy_headers or {})
351 380
352 381 self.timeout = timeout
353 382 self.continue_timeout = continue_timeout
354 383
355 def _connect(self):
384 def _connect(self, proxy_headers):
356 385 """Connect to the host and port specified in __init__."""
357 386 if self.sock:
358 387 return
359 388 if self._proxy_host is not None:
360 389 logger.info('Connecting to http proxy %s:%s',
361 390 self._proxy_host, self._proxy_port)
362 391 sock = socketutil.create_connection((self._proxy_host,
363 392 self._proxy_port))
364 393 if self.ssl:
365 # TODO proxy header support
366 394 data = self._buildheaders('CONNECT', '%s:%d' % (self.host,
367 395 self.port),
368 {}, HTTP_VER_1_0)
396 proxy_headers, HTTP_VER_1_0)
369 397 sock.send(data)
370 398 sock.setblocking(0)
371 399 r = self.response_class(sock, self.timeout, 'CONNECT')
372 400 timeout_exc = HTTPTimeoutException(
373 401 'Timed out waiting for CONNECT response from proxy')
374 402 while not r.complete():
375 403 try:
376 404 # We're a friend of the response class, so let
377 405 # us use the private attribute.
378 406 # pylint: disable=W0212
379 407 if not r._select():
380 408 if not r.complete():
381 409 raise timeout_exc
382 410 except HTTPTimeoutException:
383 411 # This raise/except pattern looks goofy, but
384 412 # _select can raise the timeout as well as the
385 413 # loop body. I wish it wasn't this convoluted,
386 414 # but I don't have a better solution
387 415 # immediately handy.
388 416 raise timeout_exc
389 417 if r.status != 200:
390 418 raise HTTPProxyConnectFailedException(
391 419 'Proxy connection failed: %d %s' % (r.status,
392 420 r.read()))
393 421 logger.info('CONNECT (for SSL) to %s:%s via proxy succeeded.',
394 422 self.host, self.port)
395 423 else:
396 424 sock = socketutil.create_connection((self.host, self.port))
397 425 if self.ssl:
398 426 # This is the default, but in the case of proxied SSL
399 427 # requests the proxy logic above will have cleared
400 428 # blocking mode, so re-enable it just to be safe.
401 429 sock.setblocking(1)
402 430 logger.debug('wrapping socket for ssl with options %r',
403 431 self.ssl_opts)
404 432 sock = self._ssl_wrap_socket(sock, **self.ssl_opts)
405 433 if self._ssl_validator:
406 434 self._ssl_validator(sock)
407 435 sock.setblocking(0)
408 436 self.sock = sock
409 437
410 438 def _buildheaders(self, method, path, headers, http_ver):
411 439 if self.ssl and self.port == 443 or self.port == 80:
412 440 # default port for protocol, so leave it out
413 441 hdrhost = self.host
414 442 else:
415 443 # include nonstandard port in header
416 444 if ':' in self.host: # must be IPv6
417 445 hdrhost = '[%s]:%d' % (self.host, self.port)
418 446 else:
419 447 hdrhost = '%s:%d' % (self.host, self.port)
420 448 if self._proxy_host and not self.ssl:
421 449 # When talking to a regular http proxy we must send the
422 450 # full URI, but in all other cases we must not (although
423 451 # technically RFC 2616 says servers must accept our
424 452 # request if we screw up, experimentally few do that
425 453 # correctly.)
426 454 assert path[0] == '/', 'path must start with a /'
427 455 path = 'http://%s%s' % (hdrhost, path)
428 456 outgoing = ['%s %s %s%s' % (method, path, http_ver, EOL)]
429 457 headers['host'] = ('Host', hdrhost)
430 458 headers[HDR_ACCEPT_ENCODING] = (HDR_ACCEPT_ENCODING, 'identity')
431 459 for hdr, val in headers.itervalues():
432 460 outgoing.append('%s: %s%s' % (hdr, val, EOL))
433 461 outgoing.append(EOL)
434 462 return ''.join(outgoing)
435 463
436 464 def close(self):
437 465 """Close the connection to the server.
438 466
439 467 This is a no-op if the connection is already closed. The
440 468 connection may automatically close if requested by the server
441 469 or required by the nature of a response.
442 470 """
443 471 if self.sock is None:
444 472 return
445 473 self.sock.close()
446 474 self.sock = None
447 475 logger.info('closed connection to %s on %s', self.host, self.port)
448 476
449 477 def busy(self):
450 478 """Returns True if this connection object is currently in use.
451 479
452 480 If a response is still pending, this will return True, even if
453 481 the request has finished sending. In the future,
454 482 HTTPConnection may transparently juggle multiple connections
455 483 to the server, in which case this will be useful to detect if
456 484 any of those connections is ready for use.
457 485 """
458 486 cr = self._current_response
459 487 if cr is not None:
460 488 if self._current_response_taken:
461 489 if cr.will_close:
462 490 self.sock = None
463 491 self._current_response = None
464 492 return False
465 493 elif cr.complete():
466 494 self._current_response = None
467 495 return False
468 496 return True
469 497 return False
470 498
471 def _reconnect(self, where):
499 def _reconnect(self, where, pheaders):
472 500 logger.info('reconnecting during %s', where)
473 501 self.close()
474 self._connect()
502 self._connect(pheaders)
475 503
476 504 def request(self, method, path, body=None, headers={},
477 505 expect_continue=False):
478 506 """Send a request to the server.
479 507
480 508 For increased flexibility, this does not return the response
481 509 object. Future versions of HTTPConnection that juggle multiple
482 510 sockets will be able to send (for example) 5 requests all at
483 511 once, and then let the requests arrive as data is
484 512 available. Use the `getresponse()` method to retrieve the
485 513 response.
486 514 """
487 515 if self.busy():
488 516 raise httplib.CannotSendRequest(
489 517 'Can not send another request before '
490 518 'current response is read!')
491 519 self._current_response_taken = False
492 520
493 521 logger.info('sending %s request for %s to %s on port %s',
494 522 method, path, self.host, self.port)
495 hdrs = dict((k.lower(), (k, v)) for k, v in headers.iteritems())
523 hdrs = _foldheaders(headers)
496 524 if hdrs.get('expect', ('', ''))[1].lower() == '100-continue':
497 525 expect_continue = True
498 526 elif expect_continue:
499 527 hdrs['expect'] = ('Expect', '100-Continue')
528 # httplib compatibility: if the user specified a
529 # proxy-authorization header, that's actually intended for a
530 # proxy CONNECT action, not the real request, but only if
531 # we're going to use a proxy.
532 pheaders = dict(self._proxy_headers)
533 if self._proxy_host and self.ssl:
534 pa = hdrs.pop('proxy-authorization', None)
535 if pa is not None:
536 pheaders['proxy-authorization'] = pa
500 537
501 538 chunked = False
502 539 if body and HDR_CONTENT_LENGTH not in hdrs:
503 540 if getattr(body, '__len__', False):
504 541 hdrs[HDR_CONTENT_LENGTH] = (HDR_CONTENT_LENGTH, len(body))
505 542 elif getattr(body, 'read', False):
506 543 hdrs[HDR_XFER_ENCODING] = (HDR_XFER_ENCODING,
507 544 XFER_ENCODING_CHUNKED)
508 545 chunked = True
509 546 else:
510 547 raise BadRequestData('body has no __len__() nor read()')
511 548
512 549 # If we're reusing the underlying socket, there are some
513 550 # conditions where we'll want to retry, so make a note of the
514 551 # state of self.sock
515 552 fresh_socket = self.sock is None
516 self._connect()
553 self._connect(pheaders)
517 554 outgoing_headers = self._buildheaders(
518 555 method, path, hdrs, self.http_version)
519 556 response = None
520 557 first = True
521 558
522 559 while ((outgoing_headers or body)
523 560 and not (response and response.complete())):
524 561 select_timeout = self.timeout
525 562 out = outgoing_headers or body
526 563 blocking_on_continue = False
527 564 if expect_continue and not outgoing_headers and not (
528 565 response and (response.headers or response.continued)):
529 566 logger.info(
530 567 'waiting up to %s seconds for'
531 568 ' continue response from server',
532 569 self.continue_timeout)
533 570 select_timeout = self.continue_timeout
534 571 blocking_on_continue = True
535 572 out = False
536 573 if out:
537 574 w = [self.sock]
538 575 else:
539 576 w = []
540 577 r, w, x = select.select([self.sock], w, [], select_timeout)
541 578 # if we were expecting a 100 continue and it's been long
542 579 # enough, just go ahead and assume it's ok. This is the
543 580 # recommended behavior from the RFC.
544 581 if r == w == x == []:
545 582 if blocking_on_continue:
546 583 expect_continue = False
547 584 logger.info('no response to continue expectation from '
548 585 'server, optimistically sending request body')
549 586 else:
550 587 raise HTTPTimeoutException('timeout sending data')
551 588 was_first = first
552 589
553 590 # incoming data
554 591 if r:
555 592 try:
556 593 try:
557 594 data = r[0].recv(INCOMING_BUFFER_SIZE)
558 595 except socket.sslerror as e:
559 596 if e.args[0] != socket.SSL_ERROR_WANT_READ:
560 597 raise
561 598 logger.debug('SSL_ERROR_WANT_READ while sending '
562 599 'data, retrying...')
563 600 continue
564 601 if not data:
565 602 logger.info('socket appears closed in read')
566 603 self.sock = None
567 604 self._current_response = None
568 605 if response is not None:
569 606 # We're a friend of the response class, so let
570 607 # us use the private attribute.
571 608 # pylint: disable=W0212
572 609 response._close()
573 610 # This if/elif ladder is a bit subtle,
574 611 # comments in each branch should help.
575 612 if response is not None and response.complete():
576 613 # Server responded completely and then
577 614 # closed the socket. We should just shut
578 615 # things down and let the caller get their
579 616 # response.
580 617 logger.info('Got an early response, '
581 618 'aborting remaining request.')
582 619 break
583 620 elif was_first and response is None:
584 621 # Most likely a keepalive that got killed
585 622 # on the server's end. Commonly happens
586 623 # after getting a really large response
587 624 # from the server.
588 625 logger.info(
589 626 'Connection appeared closed in read on first'
590 627 ' request loop iteration, will retry.')
591 self._reconnect('read')
628 self._reconnect('read', pheaders)
592 629 continue
593 630 else:
594 631 # We didn't just send the first data hunk,
595 632 # and either have a partial response or no
596 633 # response at all. There's really nothing
597 634 # meaningful we can do here.
598 635 raise HTTPStateError(
599 636 'Connection appears closed after '
600 637 'some request data was written, but the '
601 638 'response was missing or incomplete!')
602 639 logger.debug('read %d bytes in request()', len(data))
603 640 if response is None:
604 641 response = self.response_class(
605 642 r[0], self.timeout, method)
606 643 # We're a friend of the response class, so let us
607 644 # use the private attribute.
608 645 # pylint: disable=W0212
609 646 response._load_response(data)
610 647 # Jump to the next select() call so we load more
611 648 # data if the server is still sending us content.
612 649 continue
613 650 except socket.error as e:
614 651 if e[0] != errno.EPIPE and not was_first:
615 652 raise
616 653
617 654 # outgoing data
618 655 if w and out:
619 656 try:
620 657 if getattr(out, 'read', False):
621 658 # pylint guesses the type of out incorrectly here
622 659 # pylint: disable=E1103
623 660 data = out.read(OUTGOING_BUFFER_SIZE)
624 661 if not data:
625 662 continue
626 663 if len(data) < OUTGOING_BUFFER_SIZE:
627 664 if chunked:
628 665 body = '0' + EOL + EOL
629 666 else:
630 667 body = None
631 668 if chunked:
632 669 out = hex(len(data))[2:] + EOL + data + EOL
633 670 else:
634 671 out = data
635 672 amt = w[0].send(out)
636 673 except socket.error as e:
637 674 if e[0] == socket.SSL_ERROR_WANT_WRITE and self.ssl:
638 675 # This means that SSL hasn't flushed its buffer into
639 676 # the socket yet.
640 677 # TODO: find a way to block on ssl flushing its buffer
641 678 # similar to selecting on a raw socket.
642 679 continue
643 680 if e[0] == errno.EWOULDBLOCK or e[0] == errno.EAGAIN:
644 681 continue
645 682 elif (e[0] not in (errno.ECONNRESET, errno.EPIPE)
646 683 and not first):
647 684 raise
648 self._reconnect('write')
685 self._reconnect('write', pheaders)
649 686 amt = self.sock.send(out)
650 687 logger.debug('sent %d', amt)
651 688 first = False
652 689 if out is body:
653 690 body = out[amt:]
654 691 else:
655 692 outgoing_headers = out[amt:]
656 693
657 694 # close if the server response said to or responded before eating
658 695 # the whole request
659 696 if response is None:
660 697 response = self.response_class(self.sock, self.timeout, method)
661 698 if not fresh_socket:
662 699 if not response._select():
663 700 # This means the response failed to get any response
664 701 # data at all, and in all probability the socket was
665 702 # closed before the server even saw our request. Try
666 703 # the request again on a fresh socket.
667 logging.debug('response._select() failed during request().'
668 ' Assuming request needs to be retried.')
704 logger.debug('response._select() failed during request().'
705 ' Assuming request needs to be retried.')
669 706 self.sock = None
670 707 # Call this method explicitly to re-try the
671 708 # request. We don't use self.request() because
672 709 # some tools (notably Mercurial) expect to be able
673 710 # to subclass and redefine request(), and they
674 711 # don't have the same argspec as we do.
675 712 #
676 713 # TODO restructure sending of requests to avoid
677 714 # this recursion
678 715 return HTTPConnection.request(
679 716 self, method, path, body=body, headers=headers,
680 717 expect_continue=expect_continue)
681 718 data_left = bool(outgoing_headers or body)
682 719 if data_left:
683 720 logger.info('stopped sending request early, '
684 721 'will close the socket to be safe.')
685 722 response.will_close = True
686 723 if response.will_close:
687 724 # The socket will be closed by the response, so we disown
688 725 # the socket
689 726 self.sock = None
690 727 self._current_response = response
691 728
692 729 def getresponse(self):
693 730 """Returns the response to the most recent request."""
694 731 if self._current_response is None:
695 732 raise httplib.ResponseNotReady()
696 733 r = self._current_response
697 734 while r.headers is None:
698 735 # We're a friend of the response class, so let us use the
699 736 # private attribute.
700 737 # pylint: disable=W0212
701 738 if not r._select() and not r.complete():
702 739 raise _readers.HTTPRemoteClosedError()
703 740 if r.will_close:
704 741 self.sock = None
705 742 self._current_response = None
706 743 elif r.complete():
707 744 self._current_response = None
708 745 else:
709 746 self._current_response_taken = True
710 747 return r
711 748
712 749
713 750 class HTTPTimeoutException(httplib.HTTPException):
714 751 """A timeout occurred while waiting on the server."""
715 752
716 753
717 754 class BadRequestData(httplib.HTTPException):
718 755 """Request body object has neither __len__ nor read."""
719 756
720 757
721 758 class HTTPProxyConnectFailedException(httplib.HTTPException):
722 759 """Connecting to the HTTP proxy failed."""
723 760
724 761
725 762 class HTTPStateError(httplib.HTTPException):
726 763 """Invalid internal state encountered."""
727 764
728 765 # Forward this exception type from _readers since it needs to be part
729 766 # of the public API.
730 767 HTTPRemoteClosedError = _readers.HTTPRemoteClosedError
731 768 # no-check-code
@@ -1,232 +1,234 b''
1 1 # Copyright 2011, Google Inc.
2 2 # All rights reserved.
3 3 #
4 4 # Redistribution and use in source and binary forms, with or without
5 5 # modification, are permitted provided that the following conditions are
6 6 # met:
7 7 #
8 8 # * Redistributions of source code must retain the above copyright
9 9 # notice, this list of conditions and the following disclaimer.
10 10 # * Redistributions in binary form must reproduce the above
11 11 # copyright notice, this list of conditions and the following disclaimer
12 12 # in the documentation and/or other materials provided with the
13 13 # distribution.
14 14 # * Neither the name of Google Inc. nor the names of its
15 15 # contributors may be used to endorse or promote products derived from
16 16 # this software without specific prior written permission.
17 17
18 18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 29 """Reader objects to abstract out different body response types.
30 30
31 31 This module is package-private. It is not expected that these will
32 32 have any clients outside of httpplus.
33 33 """
34 from __future__ import absolute_import
34 35
35 36 import httplib
36 37 import logging
37 38
38 39 logger = logging.getLogger(__name__)
39 40
40 41
41 42 class ReadNotReady(Exception):
42 43 """Raised when read() is attempted but not enough data is loaded."""
43 44
44 45
45 46 class HTTPRemoteClosedError(httplib.HTTPException):
46 47 """The server closed the remote socket in the middle of a response."""
47 48
48 49
49 50 class AbstractReader(object):
50 51 """Abstract base class for response readers.
51 52
52 53 Subclasses must implement _load, and should implement _close if
53 54 it's not an error for the server to close their socket without
54 55 some termination condition being detected during _load.
55 56 """
56 57 def __init__(self):
57 58 self._finished = False
58 59 self._done_chunks = []
59 60 self.available_data = 0
60 61
61 62 def _addchunk(self, data):
62 63 self._done_chunks.append(data)
63 64 self.available_data += len(data)
64 65
65 66 def _pushchunk(self, data):
66 67 self._done_chunks.insert(0, data)
67 68 self.available_data += len(data)
68 69
69 70 def _popchunk(self):
70 71 b = self._done_chunks.pop(0)
71 72 self.available_data -= len(b)
72 73
73 74 return b
74 75
75 76 def done(self):
76 77 """Returns true if the response body is entirely read."""
77 78 return self._finished
78 79
79 80 def read(self, amt):
80 81 """Read amt bytes from the response body."""
81 82 if self.available_data < amt and not self._finished:
82 83 raise ReadNotReady()
83 84 blocks = []
84 85 need = amt
85 86 while self._done_chunks:
86 87 b = self._popchunk()
87 88 if len(b) > need:
88 89 nb = b[:need]
89 90 self._pushchunk(b[need:])
90 91 b = nb
91 92 blocks.append(b)
92 93 need -= len(b)
93 94 if need == 0:
94 95 break
95 96 result = ''.join(blocks)
96 97 assert len(result) == amt or (self._finished and len(result) < amt)
97 98
98 99 return result
99 100
100 101 def readto(self, delimstr, blocks = None):
101 """return available data chunks up to the first one in which delimstr
102 occurs. No data will be returned after delimstr -- the chunk in which
103 it occurs will be split and the remainder pushed back onto the available
104 data queue. If blocks is supplied chunks will be added to blocks, otherwise
105 a new list will be allocated.
102 """return available data chunks up to the first one in which
103 delimstr occurs. No data will be returned after delimstr --
104 the chunk in which it occurs will be split and the remainder
105 pushed back onto the available data queue. If blocks is
106 supplied chunks will be added to blocks, otherwise a new list
107 will be allocated.
106 108 """
107 109 if blocks is None:
108 110 blocks = []
109 111
110 112 while self._done_chunks:
111 113 b = self._popchunk()
112 114 i = b.find(delimstr) + len(delimstr)
113 115 if i:
114 116 if i < len(b):
115 117 self._pushchunk(b[i:])
116 118 blocks.append(b[:i])
117 119 break
118 120 else:
119 121 blocks.append(b)
120 122
121 123 return blocks
122 124
123 125 def _load(self, data): # pragma: no cover
124 126 """Subclasses must implement this.
125 127
126 128 As data is available to be read out of this object, it should
127 129 be placed into the _done_chunks list. Subclasses should not
128 130 rely on data remaining in _done_chunks forever, as it may be
129 131 reaped if the client is parsing data as it comes in.
130 132 """
131 133 raise NotImplementedError
132 134
133 135 def _close(self):
134 136 """Default implementation of close.
135 137
136 138 The default implementation assumes that the reader will mark
137 139 the response as finished on the _finished attribute once the
138 140 entire response body has been read. In the event that this is
139 141 not true, the subclass should override the implementation of
140 142 close (for example, close-is-end responses have to set
141 143 self._finished in the close handler.)
142 144 """
143 145 if not self._finished:
144 146 raise HTTPRemoteClosedError(
145 147 'server appears to have closed the socket mid-response')
146 148
147 149
148 150 class AbstractSimpleReader(AbstractReader):
149 151 """Abstract base class for simple readers that require no response decoding.
150 152
151 153 Examples of such responses are Connection: Close (close-is-end)
152 154 and responses that specify a content length.
153 155 """
154 156 def _load(self, data):
155 157 if data:
156 158 assert not self._finished, (
157 159 'tried to add data (%r) to a closed reader!' % data)
158 160 logger.debug('%s read an additional %d data',
159 161 self.name, len(data)) # pylint: disable=E1101
160 162 self._addchunk(data)
161 163
162 164
163 165 class CloseIsEndReader(AbstractSimpleReader):
164 166 """Reader for responses that specify Connection: Close for length."""
165 167 name = 'close-is-end'
166 168
167 169 def _close(self):
168 170 logger.info('Marking close-is-end reader as closed.')
169 171 self._finished = True
170 172
171 173
172 174 class ContentLengthReader(AbstractSimpleReader):
173 175 """Reader for responses that specify an exact content length."""
174 176 name = 'content-length'
175 177
176 178 def __init__(self, amount):
177 179 AbstractSimpleReader.__init__(self)
178 180 self._amount = amount
179 181 if amount == 0:
180 182 self._finished = True
181 183 self._amount_seen = 0
182 184
183 185 def _load(self, data):
184 186 AbstractSimpleReader._load(self, data)
185 187 self._amount_seen += len(data)
186 188 if self._amount_seen >= self._amount:
187 189 self._finished = True
188 190 logger.debug('content-length read complete')
189 191
190 192
191 193 class ChunkedReader(AbstractReader):
192 194 """Reader for chunked transfer encoding responses."""
193 195 def __init__(self, eol):
194 196 AbstractReader.__init__(self)
195 197 self._eol = eol
196 198 self._leftover_skip_amt = 0
197 199 self._leftover_data = ''
198 200
199 201 def _load(self, data):
200 202 assert not self._finished, 'tried to add data to a closed reader!'
201 203 logger.debug('chunked read an additional %d data', len(data))
202 204 position = 0
203 205 if self._leftover_data:
204 206 logger.debug(
205 207 'chunked reader trying to finish block from leftover data')
206 208 # TODO: avoid this string concatenation if possible
207 209 data = self._leftover_data + data
208 210 position = self._leftover_skip_amt
209 211 self._leftover_data = ''
210 212 self._leftover_skip_amt = 0
211 213 datalen = len(data)
212 214 while position < datalen:
213 215 split = data.find(self._eol, position)
214 216 if split == -1:
215 217 self._leftover_data = data
216 218 self._leftover_skip_amt = position
217 219 return
218 220 amt = int(data[position:split], base=16)
219 221 block_start = split + len(self._eol)
220 222 # If the whole data chunk plus the eol trailer hasn't
221 223 # loaded, we'll wait for the next load.
222 224 if block_start + amt + len(self._eol) > len(data):
223 225 self._leftover_data = data
224 226 self._leftover_skip_amt = position
225 227 return
226 228 if amt == 0:
227 229 self._finished = True
228 230 logger.debug('closing chunked reader due to chunk of length 0')
229 231 return
230 232 self._addchunk(data[block_start:block_start + amt])
231 233 position = block_start + amt + len(self._eol)
232 234 # no-check-code
@@ -1,138 +1,140 b''
1 1 # Copyright 2010, Google Inc.
2 2 # All rights reserved.
3 3 #
4 4 # Redistribution and use in source and binary forms, with or without
5 5 # modification, are permitted provided that the following conditions are
6 6 # met:
7 7 #
8 8 # * Redistributions of source code must retain the above copyright
9 9 # notice, this list of conditions and the following disclaimer.
10 10 # * Redistributions in binary form must reproduce the above
11 11 # copyright notice, this list of conditions and the following disclaimer
12 12 # in the documentation and/or other materials provided with the
13 13 # distribution.
14 14 # * Neither the name of Google Inc. nor the names of its
15 15 # contributors may be used to endorse or promote products derived from
16 16 # this software without specific prior written permission.
17 17
18 18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 29 """Abstraction to simplify socket use for Python < 2.6
30 30
31 31 This will attempt to use the ssl module and the new
32 32 socket.create_connection method, but fall back to the old
33 33 methods if those are unavailable.
34 34 """
35 from __future__ import absolute_import
36
35 37 import logging
36 38 import socket
37 39
38 40 logger = logging.getLogger(__name__)
39 41
40 42 try:
41 43 import ssl
42 44 # make demandimporters load the module
43 45 ssl.wrap_socket # pylint: disable=W0104
44 46 have_ssl = True
45 47 except ImportError:
46 48 import httplib
47 49 import urllib2
48 50 have_ssl = getattr(urllib2, 'HTTPSHandler', False)
49 51 ssl = False
50 52
51 53
52 54 try:
53 55 create_connection = socket.create_connection
54 56 except AttributeError:
55 57 def create_connection(address):
56 58 """Backport of socket.create_connection from Python 2.6."""
57 59 host, port = address
58 60 msg = "getaddrinfo returns an empty list"
59 61 sock = None
60 62 for res in socket.getaddrinfo(host, port, 0,
61 63 socket.SOCK_STREAM):
62 64 af, socktype, proto, unused_canonname, sa = res
63 65 try:
64 66 sock = socket.socket(af, socktype, proto)
65 67 logger.info("connect: (%s, %s)", host, port)
66 68 sock.connect(sa)
67 69 except socket.error as msg:
68 70 logger.info('connect fail: %s %s', host, port)
69 71 if sock:
70 72 sock.close()
71 73 sock = None
72 74 continue
73 75 break
74 76 if not sock:
75 77 raise socket.error(msg)
76 78 return sock
77 79
78 80 if ssl:
79 81 wrap_socket = ssl.wrap_socket
80 82 CERT_NONE = ssl.CERT_NONE
81 83 CERT_OPTIONAL = ssl.CERT_OPTIONAL
82 84 CERT_REQUIRED = ssl.CERT_REQUIRED
83 85 else:
84 86 class FakeSocket(httplib.FakeSocket):
85 87 """Socket wrapper that supports SSL."""
86 88
87 89 # Silence lint about this goofy backport class
88 90 # pylint: disable=W0232,E1101,R0903,R0913,C0111
89 91
90 92 # backport the behavior from Python 2.6, which is to busy wait
91 93 # on the socket instead of anything nice. Sigh.
92 94 # See http://bugs.python.org/issue3890 for more info.
93 95 def recv(self, buflen=1024, flags=0):
94 96 """ssl-aware wrapper around socket.recv
95 97 """
96 98 if flags != 0:
97 99 raise ValueError(
98 100 "non-zero flags not allowed in calls to recv() on %s" %
99 101 self.__class__)
100 102 while True:
101 103 try:
102 104 return self._ssl.read(buflen)
103 105 except socket.sslerror as x:
104 106 if x.args[0] == socket.SSL_ERROR_WANT_READ:
105 107 continue
106 108 else:
107 109 raise x
108 110
109 111 _PROTOCOL_SSLv23 = 2
110 112
111 113 CERT_NONE = 0
112 114 CERT_OPTIONAL = 1
113 115 CERT_REQUIRED = 2
114 116
115 117 # Disable unused-argument because we're making a dumb wrapper
116 118 # that's like an upstream method.
117 119 #
118 120 # pylint: disable=W0613,R0913
119 121 def wrap_socket(sock, keyfile=None, certfile=None,
120 122 server_side=False, cert_reqs=CERT_NONE,
121 123 ssl_version=_PROTOCOL_SSLv23, ca_certs=None,
122 124 do_handshake_on_connect=True,
123 125 suppress_ragged_eofs=True):
124 126 """Backport of ssl.wrap_socket from Python 2.6."""
125 127 if cert_reqs != CERT_NONE and ca_certs:
126 128 raise CertificateValidationUnsupported(
127 129 'SSL certificate validation requires the ssl module'
128 130 '(included in Python 2.6 and later.)')
129 131 sslob = socket.ssl(sock)
130 132 # borrow httplib's workaround for no ssl.wrap_socket
131 133 sock = FakeSocket(sock, sslob)
132 134 return sock
133 135 # pylint: enable=W0613,R0913
134 136
135 137
136 138 class CertificateValidationUnsupported(Exception):
137 139 """Exception raised when cert validation is requested but unavailable."""
138 140 # no-check-code
@@ -1,189 +1,186 b''
1 1 #require test-repo
2 2
3 3 $ cd "$TESTDIR"/..
4 4
5 5 $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs python contrib/check-py3-compat.py
6 6 contrib/casesmash.py not using absolute_import
7 7 contrib/check-code.py not using absolute_import
8 8 contrib/check-code.py requires print_function
9 9 contrib/check-config.py not using absolute_import
10 10 contrib/check-config.py requires print_function
11 11 contrib/debugcmdserver.py not using absolute_import
12 12 contrib/debugcmdserver.py requires print_function
13 13 contrib/debugshell.py not using absolute_import
14 14 contrib/fixpax.py not using absolute_import
15 15 contrib/fixpax.py requires print_function
16 16 contrib/hgclient.py not using absolute_import
17 17 contrib/hgclient.py requires print_function
18 18 contrib/hgfixes/fix_bytes.py not using absolute_import
19 19 contrib/hgfixes/fix_bytesmod.py not using absolute_import
20 20 contrib/hgfixes/fix_leftover_imports.py not using absolute_import
21 21 contrib/import-checker.py not using absolute_import
22 22 contrib/import-checker.py requires print_function
23 23 contrib/memory.py not using absolute_import
24 24 contrib/perf.py not using absolute_import
25 25 contrib/python-hook-examples.py not using absolute_import
26 26 contrib/revsetbenchmarks.py not using absolute_import
27 27 contrib/revsetbenchmarks.py requires print_function
28 28 contrib/showstack.py not using absolute_import
29 29 contrib/synthrepo.py not using absolute_import
30 30 contrib/win32/hgwebdir_wsgi.py not using absolute_import
31 31 doc/check-seclevel.py not using absolute_import
32 32 doc/gendoc.py not using absolute_import
33 33 doc/hgmanpage.py not using absolute_import
34 34 hgext/__init__.py not using absolute_import
35 35 hgext/acl.py not using absolute_import
36 36 hgext/blackbox.py not using absolute_import
37 37 hgext/bugzilla.py not using absolute_import
38 38 hgext/censor.py not using absolute_import
39 39 hgext/children.py not using absolute_import
40 40 hgext/churn.py not using absolute_import
41 41 hgext/clonebundles.py not using absolute_import
42 42 hgext/color.py not using absolute_import
43 43 hgext/convert/__init__.py not using absolute_import
44 44 hgext/convert/bzr.py not using absolute_import
45 45 hgext/convert/common.py not using absolute_import
46 46 hgext/convert/convcmd.py not using absolute_import
47 47 hgext/convert/cvs.py not using absolute_import
48 48 hgext/convert/cvsps.py not using absolute_import
49 49 hgext/convert/darcs.py not using absolute_import
50 50 hgext/convert/filemap.py not using absolute_import
51 51 hgext/convert/git.py not using absolute_import
52 52 hgext/convert/gnuarch.py not using absolute_import
53 53 hgext/convert/hg.py not using absolute_import
54 54 hgext/convert/monotone.py not using absolute_import
55 55 hgext/convert/p4.py not using absolute_import
56 56 hgext/convert/subversion.py not using absolute_import
57 57 hgext/convert/transport.py not using absolute_import
58 58 hgext/eol.py not using absolute_import
59 59 hgext/extdiff.py not using absolute_import
60 60 hgext/factotum.py not using absolute_import
61 61 hgext/fetch.py not using absolute_import
62 62 hgext/gpg.py not using absolute_import
63 63 hgext/graphlog.py not using absolute_import
64 64 hgext/hgcia.py not using absolute_import
65 65 hgext/hgk.py not using absolute_import
66 66 hgext/highlight/__init__.py not using absolute_import
67 67 hgext/highlight/highlight.py not using absolute_import
68 68 hgext/histedit.py not using absolute_import
69 69 hgext/keyword.py not using absolute_import
70 70 hgext/largefiles/__init__.py not using absolute_import
71 71 hgext/largefiles/basestore.py not using absolute_import
72 72 hgext/largefiles/lfcommands.py not using absolute_import
73 73 hgext/largefiles/lfutil.py not using absolute_import
74 74 hgext/largefiles/localstore.py not using absolute_import
75 75 hgext/largefiles/overrides.py not using absolute_import
76 76 hgext/largefiles/proto.py not using absolute_import
77 77 hgext/largefiles/remotestore.py not using absolute_import
78 78 hgext/largefiles/reposetup.py not using absolute_import
79 79 hgext/largefiles/uisetup.py not using absolute_import
80 80 hgext/largefiles/wirestore.py not using absolute_import
81 81 hgext/mq.py not using absolute_import
82 82 hgext/notify.py not using absolute_import
83 83 hgext/pager.py not using absolute_import
84 84 hgext/patchbomb.py not using absolute_import
85 85 hgext/purge.py not using absolute_import
86 86 hgext/rebase.py not using absolute_import
87 87 hgext/record.py not using absolute_import
88 88 hgext/relink.py not using absolute_import
89 89 hgext/schemes.py not using absolute_import
90 90 hgext/share.py not using absolute_import
91 91 hgext/shelve.py not using absolute_import
92 92 hgext/strip.py not using absolute_import
93 93 hgext/transplant.py not using absolute_import
94 94 hgext/win32mbcs.py not using absolute_import
95 95 hgext/win32text.py not using absolute_import
96 96 hgext/zeroconf/Zeroconf.py not using absolute_import
97 97 hgext/zeroconf/Zeroconf.py requires print_function
98 98 hgext/zeroconf/__init__.py not using absolute_import
99 99 i18n/check-translation.py not using absolute_import
100 100 i18n/polib.py not using absolute_import
101 101 mercurial/cmdutil.py not using absolute_import
102 102 mercurial/commands.py not using absolute_import
103 103 mercurial/dispatch.py requires print_function
104 mercurial/httpclient/__init__.py not using absolute_import
105 mercurial/httpclient/_readers.py not using absolute_import
106 mercurial/httpclient/socketutil.py not using absolute_import
107 104 mercurial/keepalive.py requires print_function
108 105 mercurial/lsprof.py requires print_function
109 106 mercurial/lsprofcalltree.py requires print_function
110 107 mercurial/mail.py requires print_function
111 108 setup.py not using absolute_import
112 109 tests/filterpyflakes.py requires print_function
113 110 tests/generate-working-copy-states.py requires print_function
114 111 tests/get-with-headers.py requires print_function
115 112 tests/heredoctest.py requires print_function
116 113 tests/hypothesishelpers.py not using absolute_import
117 114 tests/hypothesishelpers.py requires print_function
118 115 tests/killdaemons.py not using absolute_import
119 116 tests/md5sum.py not using absolute_import
120 117 tests/mockblackbox.py not using absolute_import
121 118 tests/printenv.py not using absolute_import
122 119 tests/readlink.py not using absolute_import
123 120 tests/readlink.py requires print_function
124 121 tests/revlog-formatv0.py not using absolute_import
125 122 tests/run-tests.py not using absolute_import
126 123 tests/seq.py not using absolute_import
127 124 tests/seq.py requires print_function
128 125 tests/silenttestrunner.py not using absolute_import
129 126 tests/silenttestrunner.py requires print_function
130 127 tests/sitecustomize.py not using absolute_import
131 128 tests/svn-safe-append.py not using absolute_import
132 129 tests/svnxml.py not using absolute_import
133 130 tests/test-ancestor.py requires print_function
134 131 tests/test-atomictempfile.py not using absolute_import
135 132 tests/test-batching.py not using absolute_import
136 133 tests/test-batching.py requires print_function
137 134 tests/test-bdiff.py not using absolute_import
138 135 tests/test-bdiff.py requires print_function
139 136 tests/test-context.py not using absolute_import
140 137 tests/test-context.py requires print_function
141 138 tests/test-demandimport.py not using absolute_import
142 139 tests/test-demandimport.py requires print_function
143 140 tests/test-dispatch.py not using absolute_import
144 141 tests/test-dispatch.py requires print_function
145 142 tests/test-doctest.py not using absolute_import
146 143 tests/test-duplicateoptions.py not using absolute_import
147 144 tests/test-duplicateoptions.py requires print_function
148 145 tests/test-filecache.py not using absolute_import
149 146 tests/test-filecache.py requires print_function
150 147 tests/test-filelog.py not using absolute_import
151 148 tests/test-filelog.py requires print_function
152 149 tests/test-hg-parseurl.py not using absolute_import
153 150 tests/test-hg-parseurl.py requires print_function
154 151 tests/test-hgweb-auth.py not using absolute_import
155 152 tests/test-hgweb-auth.py requires print_function
156 153 tests/test-hgwebdir-paths.py not using absolute_import
157 154 tests/test-hybridencode.py not using absolute_import
158 155 tests/test-hybridencode.py requires print_function
159 156 tests/test-lrucachedict.py not using absolute_import
160 157 tests/test-lrucachedict.py requires print_function
161 158 tests/test-manifest.py not using absolute_import
162 159 tests/test-minirst.py not using absolute_import
163 160 tests/test-minirst.py requires print_function
164 161 tests/test-parseindex2.py not using absolute_import
165 162 tests/test-parseindex2.py requires print_function
166 163 tests/test-pathencode.py not using absolute_import
167 164 tests/test-pathencode.py requires print_function
168 165 tests/test-propertycache.py not using absolute_import
169 166 tests/test-propertycache.py requires print_function
170 167 tests/test-revlog-ancestry.py not using absolute_import
171 168 tests/test-revlog-ancestry.py requires print_function
172 169 tests/test-run-tests.py not using absolute_import
173 170 tests/test-simplemerge.py not using absolute_import
174 171 tests/test-status-inprocess.py not using absolute_import
175 172 tests/test-status-inprocess.py requires print_function
176 173 tests/test-symlink-os-yes-fs-no.py not using absolute_import
177 174 tests/test-trusted.py not using absolute_import
178 175 tests/test-trusted.py requires print_function
179 176 tests/test-ui-color.py not using absolute_import
180 177 tests/test-ui-color.py requires print_function
181 178 tests/test-ui-config.py not using absolute_import
182 179 tests/test-ui-config.py requires print_function
183 180 tests/test-ui-verbosity.py not using absolute_import
184 181 tests/test-ui-verbosity.py requires print_function
185 182 tests/test-url.py not using absolute_import
186 183 tests/test-url.py requires print_function
187 184 tests/test-walkrepo.py requires print_function
188 185 tests/test-wireproto.py requires print_function
189 186 tests/tinyproxy.py requires print_function
General Comments 0
You need to be logged in to leave comments. Login now