##// END OF EJS Templates
httpclient: update to 54868ef054d2 of httpplus...
Augie Fackler -
r29442:456609cb default
parent child Browse files
Show More
@@ -1,842 +1,910 b''
1 1 # Copyright 2010, Google Inc.
2 2 # All rights reserved.
3 3 #
4 4 # Redistribution and use in source and binary forms, with or without
5 5 # modification, are permitted provided that the following conditions are
6 6 # met:
7 7 #
8 8 # * Redistributions of source code must retain the above copyright
9 9 # notice, this list of conditions and the following disclaimer.
10 10 # * Redistributions in binary form must reproduce the above
11 11 # copyright notice, this list of conditions and the following disclaimer
12 12 # in the documentation and/or other materials provided with the
13 13 # distribution.
14 14 # * Neither the name of Google Inc. nor the names of its
15 15 # contributors may be used to endorse or promote products derived from
16 16 # this software without specific prior written permission.
17 17
18 18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 29 """Improved HTTP/1.1 client library
30 30
31 31 This library contains an HTTPConnection which is similar to the one in
32 32 httplib, but has several additional features:
33 33
34 34 * supports keepalives natively
35 35 * uses select() to block for incoming data
36 36 * notices when the server responds early to a request
37 37 * implements ssl inline instead of in a different class
38 38 """
39 39 from __future__ import absolute_import
40 40
41 41 # Many functions in this file have too many arguments.
42 42 # pylint: disable=R0913
43
43 import email
44 import email.message
44 45 import errno
45 46 import inspect
46 47 import logging
47 import rfc822
48 48 import select
49 49 import socket
50 import ssl
51 import sys
50 52
51 53 try:
52 54 import cStringIO as io
53 55 io.StringIO
54 56 except ImportError:
55 57 import io
56 58
57 59 try:
58 60 import httplib
59 61 httplib.HTTPException
60 62 except ImportError:
61 63 import http.client as httplib
62 64
63 65 from . import (
64 66 _readers,
65 socketutil,
66 67 )
67 68
68 69 logger = logging.getLogger(__name__)
69 70
70 71 __all__ = ['HTTPConnection', 'HTTPResponse']
71 72
72 HTTP_VER_1_0 = 'HTTP/1.0'
73 HTTP_VER_1_1 = 'HTTP/1.1'
73 HTTP_VER_1_0 = b'HTTP/1.0'
74 HTTP_VER_1_1 = b'HTTP/1.1'
74 75
75 76 OUTGOING_BUFFER_SIZE = 1 << 15
76 77 INCOMING_BUFFER_SIZE = 1 << 20
77 78
78 79 HDR_ACCEPT_ENCODING = 'accept-encoding'
79 80 HDR_CONNECTION_CTRL = 'connection'
80 81 HDR_CONTENT_LENGTH = 'content-length'
81 82 HDR_XFER_ENCODING = 'transfer-encoding'
82 83
83 84 XFER_ENCODING_CHUNKED = 'chunked'
84 85
85 86 CONNECTION_CLOSE = 'close'
86 87
87 EOL = '\r\n'
88 EOL = b'\r\n'
88 89 _END_HEADERS = EOL * 2
89 90
90 91 # Based on some searching around, 1 second seems like a reasonable
91 92 # default here.
92 93 TIMEOUT_ASSUME_CONTINUE = 1
93 94 TIMEOUT_DEFAULT = None
94 95
96 if sys.version_info > (3, 0):
97 _unicode = str
98 else:
99 _unicode = unicode
100
101 def _ensurebytes(data):
102 if not isinstance(data, (_unicode, bytes)):
103 data = str(data)
104 if not isinstance(data, bytes):
105 try:
106 return data.encode('latin-1')
107 except UnicodeEncodeError as err:
108 raise UnicodeEncodeError(
109 err.encoding,
110 err.object,
111 err.start,
112 err.end,
113 '%r is not valid Latin-1 Use .encode("utf-8") '
114 'if sending as utf-8 is desired.' % (
115 data[err.start:err.end],))
116 return data
117
118 class _CompatMessage(email.message.Message):
119 """Workaround for rfc822.Message and email.message.Message API diffs."""
120
121 @classmethod
122 def from_string(cls, s):
123 if sys.version_info > (3, 0):
124 # Python 3 can't decode headers from bytes, so we have to
125 # trust RFC 2616 and decode the headers as iso-8859-1
126 # bytes.
127 s = s.decode('iso-8859-1')
128 headers = email.message_from_string(s, _class=_CompatMessage)
129 # Fix multi-line headers to match httplib's behavior from
130 # Python 2.x, since email.message.Message handles them in
131 # slightly different ways.
132 if sys.version_info < (3, 0):
133 new = []
134 for h, v in headers._headers:
135 if '\r\n' in v:
136 v = '\n'.join([' ' + x.lstrip() for x in v.split('\r\n')])[1:]
137 new.append((h, v))
138 headers._headers = new
139 return headers
140
141 def getheaders(self, key):
142 return self.get_all(key)
143
144 def getheader(self, key, default=None):
145 return self.get(key, failobj=default)
146
95 147
96 148 class HTTPResponse(object):
97 149 """Response from an HTTP server.
98 150
99 151 The response will continue to load as available. If you need the
100 152 complete response before continuing, check the .complete() method.
101 153 """
102 154 def __init__(self, sock, timeout, method):
103 155 self.sock = sock
104 156 self.method = method
105 self.raw_response = ''
157 self.raw_response = b''
106 158 self._headers_len = 0
107 159 self.headers = None
108 160 self.will_close = False
109 self.status_line = ''
161 self.status_line = b''
110 162 self.status = None
111 163 self.continued = False
112 164 self.http_version = None
113 165 self.reason = None
114 166 self._reader = None
115 167
116 168 self._read_location = 0
117 169 self._eol = EOL
118 170
119 171 self._timeout = timeout
120 172
121 173 @property
122 174 def _end_headers(self):
123 175 return self._eol * 2
124 176
125 177 def complete(self):
126 178 """Returns true if this response is completely loaded.
127 179
128 180 Note that if this is a connection where complete means the
129 181 socket is closed, this will nearly always return False, even
130 182 in cases where all the data has actually been loaded.
131 183 """
132 184 if self._reader:
133 185 return self._reader.done()
134 186
135 187 def _close(self):
136 188 if self._reader is not None:
137 189 # We're a friend of the reader class here.
138 190 # pylint: disable=W0212
139 191 self._reader._close()
140 192
141 193 def getheader(self, header, default=None):
142 194 return self.headers.getheader(header, default=default)
143 195
144 196 def getheaders(self):
197 if sys.version_info < (3, 0):
198 return [(k.lower(), v) for k, v in self.headers.items()]
199 # Starting in Python 3, headers aren't lowercased before being
200 # returned here.
145 201 return self.headers.items()
146 202
147 203 def readline(self):
148 204 """Read a single line from the response body.
149 205
150 206 This may block until either a line ending is found or the
151 207 response is complete.
152 208 """
153 209 blocks = []
154 210 while True:
155 self._reader.readto('\n', blocks)
211 self._reader.readto(b'\n', blocks)
156 212
157 if blocks and blocks[-1][-1] == '\n' or self.complete():
213 if blocks and blocks[-1][-1:] == b'\n' or self.complete():
158 214 break
159 215
160 216 self._select()
161 217
162 return ''.join(blocks)
218 return b''.join(blocks)
163 219
164 220 def read(self, length=None):
165 221 """Read data from the response body."""
166 222 # if length is None, unbounded read
167 223 while (not self.complete() # never select on a finished read
168 224 and (not length # unbounded, so we wait for complete()
169 225 or length > self._reader.available_data)):
170 226 self._select()
171 227 if not length:
172 228 length = self._reader.available_data
173 229 r = self._reader.read(length)
174 230 if self.complete() and self.will_close:
175 231 self.sock.close()
176 232 return r
177 233
178 234 def _select(self):
179 235 r, unused_write, unused_err = select.select(
180 236 [self.sock], [], [], self._timeout)
181 237 if not r:
182 238 # socket was not readable. If the response is not
183 239 # complete, raise a timeout.
184 240 if not self.complete():
185 241 logger.info('timed out with timeout of %s', self._timeout)
186 242 raise HTTPTimeoutException('timeout reading data')
187 243 try:
188 244 data = self.sock.recv(INCOMING_BUFFER_SIZE)
189 except socket.sslerror as e:
190 if e.args[0] != socket.SSL_ERROR_WANT_READ:
245 except ssl.SSLError as e:
246 if e.args[0] != ssl.SSL_ERROR_WANT_READ:
191 247 raise
192 248 logger.debug('SSL_ERROR_WANT_READ in _select, should retry later')
193 249 return True
194 250 logger.debug('response read %d data during _select', len(data))
195 251 # If the socket was readable and no data was read, that means
196 252 # the socket was closed. Inform the reader (if any) so it can
197 253 # raise an exception if this is an invalid situation.
198 254 if not data:
199 255 if self._reader:
200 256 # We're a friend of the reader class here.
201 257 # pylint: disable=W0212
202 258 self._reader._close()
203 259 return False
204 260 else:
205 261 self._load_response(data)
206 262 return True
207 263
208 264 # This method gets replaced by _load later, which confuses pylint.
209 265 def _load_response(self, data): # pylint: disable=E0202
210 266 # Being here implies we're not at the end of the headers yet,
211 267 # since at the end of this method if headers were completely
212 268 # loaded we replace this method with the load() method of the
213 269 # reader we created.
214 270 self.raw_response += data
215 271 # This is a bogus server with bad line endings
216 272 if self._eol not in self.raw_response:
217 for bad_eol in ('\n', '\r'):
273 for bad_eol in (b'\n', b'\r'):
218 274 if (bad_eol in self.raw_response
219 275 # verify that bad_eol is not the end of the incoming data
220 276 # as this could be a response line that just got
221 277 # split between \r and \n.
222 278 and (self.raw_response.index(bad_eol) <
223 279 (len(self.raw_response) - 1))):
224 280 logger.info('bogus line endings detected, '
225 281 'using %r for EOL', bad_eol)
226 282 self._eol = bad_eol
227 283 break
228 284 # exit early if not at end of headers
229 285 if self._end_headers not in self.raw_response or self.headers:
230 286 return
231 287
232 288 # handle 100-continue response
233 289 hdrs, body = self.raw_response.split(self._end_headers, 1)
234 unused_http_ver, status = hdrs.split(' ', 1)
235 if status.startswith('100'):
290 unused_http_ver, status = hdrs.split(b' ', 1)
291 if status.startswith(b'100'):
236 292 self.raw_response = body
237 293 self.continued = True
238 294 logger.debug('continue seen, setting body to %r', body)
239 295 return
240 296
241 297 # arriving here means we should parse response headers
242 298 # as all headers have arrived completely
243 299 hdrs, body = self.raw_response.split(self._end_headers, 1)
244 300 del self.raw_response
245 301 if self._eol in hdrs:
246 302 self.status_line, hdrs = hdrs.split(self._eol, 1)
247 303 else:
248 304 self.status_line = hdrs
249 hdrs = ''
305 hdrs = b''
250 306 # TODO HTTP < 1.0 support
251 307 (self.http_version, self.status,
252 self.reason) = self.status_line.split(' ', 2)
308 self.reason) = self.status_line.split(b' ', 2)
253 309 self.status = int(self.status)
254 310 if self._eol != EOL:
255 hdrs = hdrs.replace(self._eol, '\r\n')
256 headers = rfc822.Message(io.StringIO(hdrs))
311 hdrs = hdrs.replace(self._eol, b'\r\n')
312 headers = _CompatMessage.from_string(hdrs)
257 313 content_len = None
258 314 if HDR_CONTENT_LENGTH in headers:
259 315 content_len = int(headers[HDR_CONTENT_LENGTH])
260 316 if self.http_version == HTTP_VER_1_0:
261 317 self.will_close = True
262 318 elif HDR_CONNECTION_CTRL in headers:
263 319 self.will_close = (
264 320 headers[HDR_CONNECTION_CTRL].lower() == CONNECTION_CLOSE)
265 321 if (HDR_XFER_ENCODING in headers
266 322 and headers[HDR_XFER_ENCODING].lower() == XFER_ENCODING_CHUNKED):
267 323 self._reader = _readers.ChunkedReader(self._eol)
268 324 logger.debug('using a chunked reader')
269 325 else:
270 326 # HEAD responses are forbidden from returning a body, and
271 327 # it's implausible for a CONNECT response to use
272 328 # close-is-end logic for an OK response.
273 if (self.method == 'HEAD' or
274 (self.method == 'CONNECT' and content_len is None)):
329 if (self.method == b'HEAD' or
330 (self.method == b'CONNECT' and content_len is None)):
275 331 content_len = 0
276 332 if content_len is not None:
277 333 logger.debug('using a content-length reader with length %d',
278 334 content_len)
279 335 self._reader = _readers.ContentLengthReader(content_len)
280 336 else:
281 337 # Response body had no length specified and is not
282 338 # chunked, so the end of the body will only be
283 339 # identifiable by the termination of the socket by the
284 340 # server. My interpretation of the spec means that we
285 341 # are correct in hitting this case if
286 342 # transfer-encoding, content-length, and
287 343 # connection-control were left unspecified.
288 344 self._reader = _readers.CloseIsEndReader()
289 345 logger.debug('using a close-is-end reader')
290 346 self.will_close = True
291 347
292 348 if body:
293 349 # We're a friend of the reader class here.
294 350 # pylint: disable=W0212
295 351 self._reader._load(body)
296 352 logger.debug('headers complete')
297 353 self.headers = headers
298 354 # We're a friend of the reader class here.
299 355 # pylint: disable=W0212
300 356 self._load_response = self._reader._load
301 357
302 358 def _foldheaders(headers):
303 359 """Given some headers, rework them so we can safely overwrite values.
304 360
305 361 >>> _foldheaders({'Accept-Encoding': 'wat'})
306 362 {'accept-encoding': ('Accept-Encoding', 'wat')}
307 363 """
308 return dict((k.lower(), (k, v)) for k, v in headers.iteritems())
364 return dict((k.lower(), (k, v)) for k, v in headers.items())
309 365
310 366 try:
311 367 inspect.signature
312 368 def _handlesarg(func, arg):
313 369 """ Try to determine if func accepts arg
314 370
315 371 If it takes arg, return True
316 372 If it happens to take **args, then it could do anything:
317 373 * It could throw a different TypeError, just for fun
318 374 * It could throw an ArgumentError or anything else
319 375 * It could choose not to throw an Exception at all
320 376 ... return 'unknown'
321 377
322 378 Otherwise, return False
323 379 """
324 380 params = inspect.signature(func).parameters
325 381 if arg in params:
326 382 return True
327 383 for p in params:
328 384 if params[p].kind == inspect._ParameterKind.VAR_KEYWORD:
329 385 return 'unknown'
330 386 return False
331 387 except AttributeError:
332 388 def _handlesarg(func, arg):
333 389 """ Try to determine if func accepts arg
334 390
335 391 If it takes arg, return True
336 392 If it happens to take **args, then it could do anything:
337 393 * It could throw a different TypeError, just for fun
338 394 * It could throw an ArgumentError or anything else
339 395 * It could choose not to throw an Exception at all
340 396 ... return 'unknown'
341 397
342 398 Otherwise, return False
343 399 """
344 400 spec = inspect.getargspec(func)
345 401 if arg in spec.args:
346 402 return True
347 403 if spec.keywords:
348 404 return 'unknown'
349 405 return False
350 406
351 407 class HTTPConnection(object):
352 408 """Connection to a single http server.
353 409
354 410 Supports 100-continue and keepalives natively. Uses select() for
355 411 non-blocking socket operations.
356 412 """
357 413 http_version = HTTP_VER_1_1
358 414 response_class = HTTPResponse
359 415
360 416 def __init__(self, host, port=None, use_ssl=None, ssl_validator=None,
361 417 timeout=TIMEOUT_DEFAULT,
362 418 continue_timeout=TIMEOUT_ASSUME_CONTINUE,
363 419 proxy_hostport=None, proxy_headers=None,
364 420 ssl_wrap_socket=None, **ssl_opts):
365 421 """Create a new HTTPConnection.
366 422
367 423 Args:
368 424 host: The host to which we'll connect.
369 425 port: Optional. The port over which we'll connect. Default 80 for
370 426 non-ssl, 443 for ssl.
371 427 use_ssl: Optional. Whether to use ssl. Defaults to False if port is
372 428 not 443, true if port is 443.
373 429 ssl_validator: a function(socket) to validate the ssl cert
374 430 timeout: Optional. Connection timeout, default is TIMEOUT_DEFAULT.
375 431 continue_timeout: Optional. Timeout for waiting on an expected
376 432 "100 Continue" response. Default is TIMEOUT_ASSUME_CONTINUE.
377 433 proxy_hostport: Optional. Tuple of (host, port) to use as an http
378 434 proxy for the connection. Default is to not use a proxy.
379 435 proxy_headers: Optional dict of header keys and values to send to
380 436 a proxy when using CONNECT. For compatibility with
381 437 httplib, the Proxy-Authorization header may be
382 438 specified in headers for request(), which will clobber
383 439 any such header specified here if specified. Providing
384 440 this option and not proxy_hostport will raise an
385 441 ValueError.
386 442 ssl_wrap_socket: Optional function to use for wrapping
387 443 sockets. If unspecified, the one from the ssl module will
388 444 be used if available, or something that's compatible with
389 445 it if on a Python older than 2.6.
390 446
391 447 Any extra keyword arguments to this function will be provided
392 448 to the ssl_wrap_socket method. If no ssl
393 449 """
394 if port is None and host.count(':') == 1 or ']:' in host:
395 host, port = host.rsplit(':', 1)
450 host = _ensurebytes(host)
451 if port is None and host.count(b':') == 1 or b']:' in host:
452 host, port = host.rsplit(b':', 1)
396 453 port = int(port)
397 if '[' in host:
454 if b'[' in host:
398 455 host = host[1:-1]
399 456 if ssl_wrap_socket is not None:
400 457 _wrap_socket = ssl_wrap_socket
401 458 else:
402 _wrap_socket = socketutil.wrap_socket
459 _wrap_socket = ssl.wrap_socket
403 460 call_wrap_socket = None
404 461 handlesubar = _handlesarg(_wrap_socket, 'server_hostname')
405 462 if handlesubar is True:
406 463 # supports server_hostname
407 464 call_wrap_socket = _wrap_socket
408 465 handlesnobar = _handlesarg(_wrap_socket, 'serverhostname')
409 466 if handlesnobar is True and handlesubar is not True:
410 467 # supports serverhostname
411 468 def call_wrap_socket(sock, server_hostname=None, **ssl_opts):
412 469 return _wrap_socket(sock, serverhostname=server_hostname,
413 470 **ssl_opts)
414 471 if handlesubar is False and handlesnobar is False:
415 472 # does not support either
416 473 def call_wrap_socket(sock, server_hostname=None, **ssl_opts):
417 474 return _wrap_socket(sock, **ssl_opts)
418 475 if call_wrap_socket is None:
419 476 # we assume it takes **args
420 477 def call_wrap_socket(sock, **ssl_opts):
421 478 if 'server_hostname' in ssl_opts:
422 479 ssl_opts['serverhostname'] = ssl_opts['server_hostname']
423 480 return _wrap_socket(sock, **ssl_opts)
424 481 self._ssl_wrap_socket = call_wrap_socket
425 482 if use_ssl is None and port is None:
426 483 use_ssl = False
427 484 port = 80
428 485 elif use_ssl is None:
429 486 use_ssl = (port == 443)
430 487 elif port is None:
431 488 port = (use_ssl and 443 or 80)
432 489 self.port = port
433 if use_ssl and not socketutil.have_ssl:
434 raise Exception('ssl requested but unavailable on this Python')
435 490 self.ssl = use_ssl
436 491 self.ssl_opts = ssl_opts
437 492 self._ssl_validator = ssl_validator
438 493 self.host = host
439 494 self.sock = None
440 495 self._current_response = None
441 496 self._current_response_taken = False
442 497 if proxy_hostport is None:
443 498 self._proxy_host = self._proxy_port = None
444 499 if proxy_headers:
445 500 raise ValueError(
446 501 'proxy_headers may not be specified unless '
447 502 'proxy_hostport is also specified.')
448 503 else:
449 504 self._proxy_headers = {}
450 505 else:
451 506 self._proxy_host, self._proxy_port = proxy_hostport
452 507 self._proxy_headers = _foldheaders(proxy_headers or {})
453 508
454 509 self.timeout = timeout
455 510 self.continue_timeout = continue_timeout
456 511
457 512 def _connect(self, proxy_headers):
458 513 """Connect to the host and port specified in __init__."""
459 514 if self.sock:
460 515 return
461 516 if self._proxy_host is not None:
462 517 logger.info('Connecting to http proxy %s:%s',
463 518 self._proxy_host, self._proxy_port)
464 sock = socketutil.create_connection((self._proxy_host,
465 self._proxy_port))
519 sock = socket.create_connection((self._proxy_host,
520 self._proxy_port))
466 521 if self.ssl:
467 data = self._buildheaders('CONNECT', '%s:%d' % (self.host,
468 self.port),
522 data = self._buildheaders(b'CONNECT', b'%s:%d' % (self.host,
523 self.port),
469 524 proxy_headers, HTTP_VER_1_0)
470 525 sock.send(data)
471 526 sock.setblocking(0)
472 r = self.response_class(sock, self.timeout, 'CONNECT')
527 r = self.response_class(sock, self.timeout, b'CONNECT')
473 528 timeout_exc = HTTPTimeoutException(
474 529 'Timed out waiting for CONNECT response from proxy')
475 530 while not r.complete():
476 531 try:
477 532 # We're a friend of the response class, so let
478 533 # us use the private attribute.
479 534 # pylint: disable=W0212
480 535 if not r._select():
481 536 if not r.complete():
482 537 raise timeout_exc
483 538 except HTTPTimeoutException:
484 539 # This raise/except pattern looks goofy, but
485 540 # _select can raise the timeout as well as the
486 541 # loop body. I wish it wasn't this convoluted,
487 542 # but I don't have a better solution
488 543 # immediately handy.
489 544 raise timeout_exc
490 545 if r.status != 200:
491 546 raise HTTPProxyConnectFailedException(
492 547 'Proxy connection failed: %d %s' % (r.status,
493 548 r.read()))
494 549 logger.info('CONNECT (for SSL) to %s:%s via proxy succeeded.',
495 550 self.host, self.port)
496 551 else:
497 sock = socketutil.create_connection((self.host, self.port))
552 sock = socket.create_connection((self.host, self.port))
498 553 if self.ssl:
499 554 # This is the default, but in the case of proxied SSL
500 555 # requests the proxy logic above will have cleared
501 556 # blocking mode, so re-enable it just to be safe.
502 557 sock.setblocking(1)
503 558 logger.debug('wrapping socket for ssl with options %r',
504 559 self.ssl_opts)
505 560 sock = self._ssl_wrap_socket(sock, server_hostname=self.host,
506 561 **self.ssl_opts)
507 562 if self._ssl_validator:
508 563 self._ssl_validator(sock)
509 564 sock.setblocking(0)
510 565 self.sock = sock
511 566
512 567 def _buildheaders(self, method, path, headers, http_ver):
513 568 if self.ssl and self.port == 443 or self.port == 80:
514 569 # default port for protocol, so leave it out
515 570 hdrhost = self.host
516 571 else:
517 572 # include nonstandard port in header
518 if ':' in self.host: # must be IPv6
519 hdrhost = '[%s]:%d' % (self.host, self.port)
573 if b':' in self.host: # must be IPv6
574 hdrhost = b'[%s]:%d' % (self.host, self.port)
520 575 else:
521 hdrhost = '%s:%d' % (self.host, self.port)
576 hdrhost = b'%s:%d' % (self.host, self.port)
522 577 if self._proxy_host and not self.ssl:
523 578 # When talking to a regular http proxy we must send the
524 579 # full URI, but in all other cases we must not (although
525 580 # technically RFC 2616 says servers must accept our
526 581 # request if we screw up, experimentally few do that
527 582 # correctly.)
528 assert path[0] == '/', 'path must start with a /'
529 path = 'http://%s%s' % (hdrhost, path)
530 outgoing = ['%s %s %s%s' % (method, path, http_ver, EOL)]
531 headers['host'] = ('Host', hdrhost)
583 assert path[0:1] == b'/', 'path must start with a /'
584 path = b'http://%s%s' % (hdrhost, path)
585 outgoing = [b'%s %s %s%s' % (method, path, http_ver, EOL)]
586 headers[b'host'] = (b'Host', hdrhost)
532 587 headers[HDR_ACCEPT_ENCODING] = (HDR_ACCEPT_ENCODING, 'identity')
533 for hdr, val in headers.itervalues():
534 outgoing.append('%s: %s%s' % (hdr, val, EOL))
588 for hdr, val in sorted((_ensurebytes(h), _ensurebytes(v))
589 for h, v in headers.values()):
590 outgoing.append(b'%s: %s%s' % (hdr, val, EOL))
535 591 outgoing.append(EOL)
536 return ''.join(outgoing)
592 return b''.join(outgoing)
537 593
538 594 def close(self):
539 595 """Close the connection to the server.
540 596
541 597 This is a no-op if the connection is already closed. The
542 598 connection may automatically close if requested by the server
543 599 or required by the nature of a response.
544 600 """
545 601 if self.sock is None:
546 602 return
547 603 self.sock.close()
548 604 self.sock = None
549 605 logger.info('closed connection to %s on %s', self.host, self.port)
550 606
551 607 def busy(self):
552 608 """Returns True if this connection object is currently in use.
553 609
554 610 If a response is still pending, this will return True, even if
555 611 the request has finished sending. In the future,
556 612 HTTPConnection may transparently juggle multiple connections
557 613 to the server, in which case this will be useful to detect if
558 614 any of those connections is ready for use.
559 615 """
560 616 cr = self._current_response
561 617 if cr is not None:
562 618 if self._current_response_taken:
563 619 if cr.will_close:
564 620 self.sock = None
565 621 self._current_response = None
566 622 return False
567 623 elif cr.complete():
568 624 self._current_response = None
569 625 return False
570 626 return True
571 627 return False
572 628
573 629 def _reconnect(self, where, pheaders):
574 630 logger.info('reconnecting during %s', where)
575 631 self.close()
576 632 self._connect(pheaders)
577 633
578 634 def request(self, method, path, body=None, headers={},
579 635 expect_continue=False):
580 636 """Send a request to the server.
581 637
582 638 For increased flexibility, this does not return the response
583 639 object. Future versions of HTTPConnection that juggle multiple
584 640 sockets will be able to send (for example) 5 requests all at
585 641 once, and then let the requests arrive as data is
586 642 available. Use the `getresponse()` method to retrieve the
587 643 response.
588 644 """
645 method = _ensurebytes(method)
646 path = _ensurebytes(path)
589 647 if self.busy():
590 648 raise httplib.CannotSendRequest(
591 649 'Can not send another request before '
592 650 'current response is read!')
593 651 self._current_response_taken = False
594 652
595 653 logger.info('sending %s request for %s to %s on port %s',
596 654 method, path, self.host, self.port)
655
597 656 hdrs = _foldheaders(headers)
598 if hdrs.get('expect', ('', ''))[1].lower() == '100-continue':
657 # Figure out headers that have to be computed from the request
658 # body.
659 chunked = False
660 if body and HDR_CONTENT_LENGTH not in hdrs:
661 if getattr(body, '__len__', False):
662 hdrs[HDR_CONTENT_LENGTH] = (HDR_CONTENT_LENGTH,
663 b'%d' % len(body))
664 elif getattr(body, 'read', False):
665 hdrs[HDR_XFER_ENCODING] = (HDR_XFER_ENCODING,
666 XFER_ENCODING_CHUNKED)
667 chunked = True
668 else:
669 raise BadRequestData('body has no __len__() nor read()')
670 # Figure out expect-continue header
671 if hdrs.get('expect', ('', ''))[1].lower() == b'100-continue':
599 672 expect_continue = True
600 673 elif expect_continue:
601 hdrs['expect'] = ('Expect', '100-Continue')
674 hdrs['expect'] = (b'Expect', b'100-Continue')
602 675 # httplib compatibility: if the user specified a
603 676 # proxy-authorization header, that's actually intended for a
604 677 # proxy CONNECT action, not the real request, but only if
605 678 # we're going to use a proxy.
606 679 pheaders = dict(self._proxy_headers)
607 680 if self._proxy_host and self.ssl:
608 681 pa = hdrs.pop('proxy-authorization', None)
609 682 if pa is not None:
610 683 pheaders['proxy-authorization'] = pa
611
612 chunked = False
613 if body and HDR_CONTENT_LENGTH not in hdrs:
614 if getattr(body, '__len__', False):
615 hdrs[HDR_CONTENT_LENGTH] = (HDR_CONTENT_LENGTH, len(body))
616 elif getattr(body, 'read', False):
617 hdrs[HDR_XFER_ENCODING] = (HDR_XFER_ENCODING,
618 XFER_ENCODING_CHUNKED)
619 chunked = True
620 else:
621 raise BadRequestData('body has no __len__() nor read()')
684 # Build header data
685 outgoing_headers = self._buildheaders(
686 method, path, hdrs, self.http_version)
622 687
623 688 # If we're reusing the underlying socket, there are some
624 689 # conditions where we'll want to retry, so make a note of the
625 690 # state of self.sock
626 691 fresh_socket = self.sock is None
627 692 self._connect(pheaders)
628 outgoing_headers = self._buildheaders(
629 method, path, hdrs, self.http_version)
630 693 response = None
631 694 first = True
632 695
633 696 while ((outgoing_headers or body)
634 697 and not (response and response.complete())):
635 698 select_timeout = self.timeout
636 699 out = outgoing_headers or body
637 700 blocking_on_continue = False
638 701 if expect_continue and not outgoing_headers and not (
639 702 response and (response.headers or response.continued)):
640 703 logger.info(
641 704 'waiting up to %s seconds for'
642 705 ' continue response from server',
643 706 self.continue_timeout)
644 707 select_timeout = self.continue_timeout
645 708 blocking_on_continue = True
646 709 out = False
647 710 if out:
648 711 w = [self.sock]
649 712 else:
650 713 w = []
651 714 r, w, x = select.select([self.sock], w, [], select_timeout)
652 715 # if we were expecting a 100 continue and it's been long
653 716 # enough, just go ahead and assume it's ok. This is the
654 717 # recommended behavior from the RFC.
655 718 if r == w == x == []:
656 719 if blocking_on_continue:
657 720 expect_continue = False
658 721 logger.info('no response to continue expectation from '
659 722 'server, optimistically sending request body')
660 723 else:
661 724 raise HTTPTimeoutException('timeout sending data')
662 725 was_first = first
663 726
664 727 # incoming data
665 728 if r:
666 729 try:
667 730 try:
668 731 data = r[0].recv(INCOMING_BUFFER_SIZE)
669 except socket.sslerror as e:
670 if e.args[0] != socket.SSL_ERROR_WANT_READ:
732 except ssl.SSLError as e:
733 if e.args[0] != ssl.SSL_ERROR_WANT_READ:
671 734 raise
672 735 logger.debug('SSL_ERROR_WANT_READ while sending '
673 736 'data, retrying...')
674 737 continue
675 738 if not data:
676 739 logger.info('socket appears closed in read')
677 740 self.sock = None
678 741 self._current_response = None
679 742 if response is not None:
680 743 # We're a friend of the response class, so let
681 744 # us use the private attribute.
682 745 # pylint: disable=W0212
683 746 response._close()
684 747 # This if/elif ladder is a bit subtle,
685 748 # comments in each branch should help.
686 749 if response is not None and response.complete():
687 750 # Server responded completely and then
688 751 # closed the socket. We should just shut
689 752 # things down and let the caller get their
690 753 # response.
691 754 logger.info('Got an early response, '
692 755 'aborting remaining request.')
693 756 break
694 757 elif was_first and response is None:
695 758 # Most likely a keepalive that got killed
696 759 # on the server's end. Commonly happens
697 760 # after getting a really large response
698 761 # from the server.
699 762 logger.info(
700 763 'Connection appeared closed in read on first'
701 764 ' request loop iteration, will retry.')
702 765 self._reconnect('read', pheaders)
703 766 continue
704 767 else:
705 768 # We didn't just send the first data hunk,
706 769 # and either have a partial response or no
707 770 # response at all. There's really nothing
708 771 # meaningful we can do here.
709 772 raise HTTPStateError(
710 773 'Connection appears closed after '
711 774 'some request data was written, but the '
712 775 'response was missing or incomplete!')
713 776 logger.debug('read %d bytes in request()', len(data))
714 777 if response is None:
715 778 response = self.response_class(
716 779 r[0], self.timeout, method)
717 780 # We're a friend of the response class, so let us
718 781 # use the private attribute.
719 782 # pylint: disable=W0212
720 783 response._load_response(data)
721 784 # Jump to the next select() call so we load more
722 785 # data if the server is still sending us content.
723 786 continue
724 787 except socket.error as e:
725 788 if e[0] != errno.EPIPE and not was_first:
726 789 raise
727 790
728 791 # outgoing data
729 792 if w and out:
730 793 try:
731 794 if getattr(out, 'read', False):
732 795 # pylint guesses the type of out incorrectly here
733 796 # pylint: disable=E1103
734 797 data = out.read(OUTGOING_BUFFER_SIZE)
735 798 if not data:
736 799 continue
737 800 if len(data) < OUTGOING_BUFFER_SIZE:
738 801 if chunked:
739 body = '0' + EOL + EOL
802 body = b'0' + EOL + EOL
740 803 else:
741 804 body = None
742 805 if chunked:
743 out = hex(len(data))[2:] + EOL + data + EOL
806 # This encode is okay because we know
807 # hex() is building us only 0-9 and a-f
808 # digits.
809 asciilen = hex(len(data))[2:].encode('ascii')
810 out = asciilen + EOL + data + EOL
744 811 else:
745 812 out = data
746 813 amt = w[0].send(out)
747 814 except socket.error as e:
748 if e[0] == socket.SSL_ERROR_WANT_WRITE and self.ssl:
815 if e[0] == ssl.SSL_ERROR_WANT_WRITE and self.ssl:
749 816 # This means that SSL hasn't flushed its buffer into
750 817 # the socket yet.
751 818 # TODO: find a way to block on ssl flushing its buffer
752 819 # similar to selecting on a raw socket.
753 820 continue
754 821 if e[0] == errno.EWOULDBLOCK or e[0] == errno.EAGAIN:
755 822 continue
756 823 elif (e[0] not in (errno.ECONNRESET, errno.EPIPE)
757 824 and not first):
758 825 raise
759 826 self._reconnect('write', pheaders)
760 827 amt = self.sock.send(out)
761 828 logger.debug('sent %d', amt)
762 829 first = False
763 830 if out is body:
764 831 body = out[amt:]
765 832 else:
766 833 outgoing_headers = out[amt:]
834 # End of request-sending loop.
767 835
768 836 # close if the server response said to or responded before eating
769 837 # the whole request
770 838 if response is None:
771 839 response = self.response_class(self.sock, self.timeout, method)
772 840 if not fresh_socket:
773 841 if not response._select():
774 842 # This means the response failed to get any response
775 843 # data at all, and in all probability the socket was
776 844 # closed before the server even saw our request. Try
777 845 # the request again on a fresh socket.
778 846 logger.debug('response._select() failed during request().'
779 847 ' Assuming request needs to be retried.')
780 848 self.sock = None
781 849 # Call this method explicitly to re-try the
782 850 # request. We don't use self.request() because
783 851 # some tools (notably Mercurial) expect to be able
784 852 # to subclass and redefine request(), and they
785 853 # don't have the same argspec as we do.
786 854 #
787 855 # TODO restructure sending of requests to avoid
788 856 # this recursion
789 857 return HTTPConnection.request(
790 858 self, method, path, body=body, headers=headers,
791 859 expect_continue=expect_continue)
792 860 data_left = bool(outgoing_headers or body)
793 861 if data_left:
794 862 logger.info('stopped sending request early, '
795 863 'will close the socket to be safe.')
796 864 response.will_close = True
797 865 if response.will_close:
798 866 # The socket will be closed by the response, so we disown
799 867 # the socket
800 868 self.sock = None
801 869 self._current_response = response
802 870
803 871 def getresponse(self):
804 872 """Returns the response to the most recent request."""
805 873 if self._current_response is None:
806 874 raise httplib.ResponseNotReady()
807 875 r = self._current_response
808 876 while r.headers is None:
809 877 # We're a friend of the response class, so let us use the
810 878 # private attribute.
811 879 # pylint: disable=W0212
812 880 if not r._select() and not r.complete():
813 881 raise _readers.HTTPRemoteClosedError()
814 882 if r.will_close:
815 883 self.sock = None
816 884 self._current_response = None
817 885 elif r.complete():
818 886 self._current_response = None
819 887 else:
820 888 self._current_response_taken = True
821 889 return r
822 890
823 891
824 892 class HTTPTimeoutException(httplib.HTTPException):
825 893 """A timeout occurred while waiting on the server."""
826 894
827 895
828 896 class BadRequestData(httplib.HTTPException):
829 897 """Request body object has neither __len__ nor read."""
830 898
831 899
832 900 class HTTPProxyConnectFailedException(httplib.HTTPException):
833 901 """Connecting to the HTTP proxy failed."""
834 902
835 903
836 904 class HTTPStateError(httplib.HTTPException):
837 905 """Invalid internal state encountered."""
838 906
839 907 # Forward this exception type from _readers since it needs to be part
840 908 # of the public API.
841 909 HTTPRemoteClosedError = _readers.HTTPRemoteClosedError
842 910 # no-check-code
@@ -1,239 +1,239 b''
1 1 # Copyright 2011, Google Inc.
2 2 # All rights reserved.
3 3 #
4 4 # Redistribution and use in source and binary forms, with or without
5 5 # modification, are permitted provided that the following conditions are
6 6 # met:
7 7 #
8 8 # * Redistributions of source code must retain the above copyright
9 9 # notice, this list of conditions and the following disclaimer.
10 10 # * Redistributions in binary form must reproduce the above
11 11 # copyright notice, this list of conditions and the following disclaimer
12 12 # in the documentation and/or other materials provided with the
13 13 # distribution.
14 14 # * Neither the name of Google Inc. nor the names of its
15 15 # contributors may be used to endorse or promote products derived from
16 16 # this software without specific prior written permission.
17 17
18 18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 19 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 20 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 21 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 22 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 23 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 24 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 25 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 26 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 28 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 29 """Reader objects to abstract out different body response types.
30 30
31 31 This module is package-private. It is not expected that these will
32 32 have any clients outside of httpplus.
33 33 """
34 34 from __future__ import absolute_import
35 35
36 36 try:
37 37 import httplib
38 38 httplib.HTTPException
39 39 except ImportError:
40 40 import http.client as httplib
41 41
42 42 import logging
43 43
44 44 logger = logging.getLogger(__name__)
45 45
46 46
47 47 class ReadNotReady(Exception):
48 48 """Raised when read() is attempted but not enough data is loaded."""
49 49
50 50
51 51 class HTTPRemoteClosedError(httplib.HTTPException):
52 52 """The server closed the remote socket in the middle of a response."""
53 53
54 54
55 55 class AbstractReader(object):
56 56 """Abstract base class for response readers.
57 57
58 58 Subclasses must implement _load, and should implement _close if
59 59 it's not an error for the server to close their socket without
60 60 some termination condition being detected during _load.
61 61 """
62 62 def __init__(self):
63 63 self._finished = False
64 64 self._done_chunks = []
65 65 self.available_data = 0
66 66
67 67 def _addchunk(self, data):
68 68 self._done_chunks.append(data)
69 69 self.available_data += len(data)
70 70
71 71 def _pushchunk(self, data):
72 72 self._done_chunks.insert(0, data)
73 73 self.available_data += len(data)
74 74
75 75 def _popchunk(self):
76 76 b = self._done_chunks.pop(0)
77 77 self.available_data -= len(b)
78 78
79 79 return b
80 80
81 81 def done(self):
82 82 """Returns true if the response body is entirely read."""
83 83 return self._finished
84 84
85 85 def read(self, amt):
86 86 """Read amt bytes from the response body."""
87 87 if self.available_data < amt and not self._finished:
88 88 raise ReadNotReady()
89 89 blocks = []
90 90 need = amt
91 91 while self._done_chunks:
92 92 b = self._popchunk()
93 93 if len(b) > need:
94 94 nb = b[:need]
95 95 self._pushchunk(b[need:])
96 96 b = nb
97 97 blocks.append(b)
98 98 need -= len(b)
99 99 if need == 0:
100 100 break
101 result = ''.join(blocks)
101 result = b''.join(blocks)
102 102 assert len(result) == amt or (self._finished and len(result) < amt)
103 103
104 104 return result
105 105
106 106 def readto(self, delimstr, blocks = None):
107 107 """return available data chunks up to the first one in which
108 108 delimstr occurs. No data will be returned after delimstr --
109 109 the chunk in which it occurs will be split and the remainder
110 110 pushed back onto the available data queue. If blocks is
111 111 supplied chunks will be added to blocks, otherwise a new list
112 112 will be allocated.
113 113 """
114 114 if blocks is None:
115 115 blocks = []
116 116
117 117 while self._done_chunks:
118 118 b = self._popchunk()
119 119 i = b.find(delimstr) + len(delimstr)
120 120 if i:
121 121 if i < len(b):
122 122 self._pushchunk(b[i:])
123 123 blocks.append(b[:i])
124 124 break
125 125 else:
126 126 blocks.append(b)
127 127
128 128 return blocks
129 129
130 130 def _load(self, data): # pragma: no cover
131 131 """Subclasses must implement this.
132 132
133 133 As data is available to be read out of this object, it should
134 134 be placed into the _done_chunks list. Subclasses should not
135 135 rely on data remaining in _done_chunks forever, as it may be
136 136 reaped if the client is parsing data as it comes in.
137 137 """
138 138 raise NotImplementedError
139 139
140 140 def _close(self):
141 141 """Default implementation of close.
142 142
143 143 The default implementation assumes that the reader will mark
144 144 the response as finished on the _finished attribute once the
145 145 entire response body has been read. In the event that this is
146 146 not true, the subclass should override the implementation of
147 147 close (for example, close-is-end responses have to set
148 148 self._finished in the close handler.)
149 149 """
150 150 if not self._finished:
151 151 raise HTTPRemoteClosedError(
152 152 'server appears to have closed the socket mid-response')
153 153
154 154
155 155 class AbstractSimpleReader(AbstractReader):
156 156 """Abstract base class for simple readers that require no response decoding.
157 157
158 158 Examples of such responses are Connection: Close (close-is-end)
159 159 and responses that specify a content length.
160 160 """
161 161 def _load(self, data):
162 162 if data:
163 163 assert not self._finished, (
164 164 'tried to add data (%r) to a closed reader!' % data)
165 165 logger.debug('%s read an additional %d data',
166 166 self.name, len(data)) # pylint: disable=E1101
167 167 self._addchunk(data)
168 168
169 169
170 170 class CloseIsEndReader(AbstractSimpleReader):
171 171 """Reader for responses that specify Connection: Close for length."""
172 172 name = 'close-is-end'
173 173
174 174 def _close(self):
175 175 logger.info('Marking close-is-end reader as closed.')
176 176 self._finished = True
177 177
178 178
179 179 class ContentLengthReader(AbstractSimpleReader):
180 180 """Reader for responses that specify an exact content length."""
181 181 name = 'content-length'
182 182
183 183 def __init__(self, amount):
184 184 AbstractSimpleReader.__init__(self)
185 185 self._amount = amount
186 186 if amount == 0:
187 187 self._finished = True
188 188 self._amount_seen = 0
189 189
190 190 def _load(self, data):
191 191 AbstractSimpleReader._load(self, data)
192 192 self._amount_seen += len(data)
193 193 if self._amount_seen >= self._amount:
194 194 self._finished = True
195 195 logger.debug('content-length read complete')
196 196
197 197
198 198 class ChunkedReader(AbstractReader):
199 199 """Reader for chunked transfer encoding responses."""
200 200 def __init__(self, eol):
201 201 AbstractReader.__init__(self)
202 202 self._eol = eol
203 203 self._leftover_skip_amt = 0
204 204 self._leftover_data = ''
205 205
206 206 def _load(self, data):
207 207 assert not self._finished, 'tried to add data to a closed reader!'
208 208 logger.debug('chunked read an additional %d data', len(data))
209 209 position = 0
210 210 if self._leftover_data:
211 211 logger.debug(
212 212 'chunked reader trying to finish block from leftover data')
213 213 # TODO: avoid this string concatenation if possible
214 214 data = self._leftover_data + data
215 215 position = self._leftover_skip_amt
216 216 self._leftover_data = ''
217 217 self._leftover_skip_amt = 0
218 218 datalen = len(data)
219 219 while position < datalen:
220 220 split = data.find(self._eol, position)
221 221 if split == -1:
222 222 self._leftover_data = data
223 223 self._leftover_skip_amt = position
224 224 return
225 225 amt = int(data[position:split], base=16)
226 226 block_start = split + len(self._eol)
227 227 # If the whole data chunk plus the eol trailer hasn't
228 228 # loaded, we'll wait for the next load.
229 229 if block_start + amt + len(self._eol) > len(data):
230 230 self._leftover_data = data
231 231 self._leftover_skip_amt = position
232 232 return
233 233 if amt == 0:
234 234 self._finished = True
235 235 logger.debug('closing chunked reader due to chunk of length 0')
236 236 return
237 237 self._addchunk(data[block_start:block_start + amt])
238 238 position = block_start + amt + len(self._eol)
239 239 # no-check-code
@@ -1,20 +1,19 b''
1 1 #require test-repo
2 2
3 3 $ . "$TESTDIR/helpers-testrepo.sh"
4 4 $ check_code="$TESTDIR"/../contrib/check-code.py
5 5 $ cd "$TESTDIR"/..
6 6
7 7 New errors are not allowed. Warnings are strongly discouraged.
8 8 (The writing "no-che?k-code" is for not skipping this file when checking.)
9 9
10 10 $ hg locate | sed 's-\\-/-g' |
11 11 > xargs "$check_code" --warnings --per-file=0 || false
12 12 Skipping hgext/fsmonitor/pywatchman/__init__.py it has no-che?k-code (glob)
13 13 Skipping hgext/fsmonitor/pywatchman/bser.c it has no-che?k-code (glob)
14 14 Skipping hgext/fsmonitor/pywatchman/capabilities.py it has no-che?k-code (glob)
15 15 Skipping hgext/fsmonitor/pywatchman/msc_stdint.h it has no-che?k-code (glob)
16 16 Skipping hgext/fsmonitor/pywatchman/pybser.py it has no-che?k-code (glob)
17 17 Skipping i18n/polib.py it has no-che?k-code (glob)
18 18 Skipping mercurial/httpclient/__init__.py it has no-che?k-code (glob)
19 19 Skipping mercurial/httpclient/_readers.py it has no-che?k-code (glob)
20 Skipping mercurial/httpclient/socketutil.py it has no-che?k-code (glob)
1 NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now