##// END OF EJS Templates
prevent transient leaks of file handle by using new helper functions...
prevent transient leaks of file handle by using new helper functions These leaks may occur in environments that don't employ a reference counting GC, i.e. PyPy. This implies: - changing opener(...).read() calls to opener.read(...) - changing opener(...).write() calls to opener.write(...) - changing open(...).read(...) to util.readfile(...) - changing open(...).write(...) to util.writefile(...)

File last commit:

r14149:091c86a7 default
r14168:135e2447 default
Show More
httprepo.py
233 lines | 8.5 KiB | text/x-python | PythonLexer
# httprepo.py - HTTP repository proxy classes for mercurial
#
# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from node import nullid
from i18n import _
import changegroup, statichttprepo, error, url, util, wireproto
import os, urllib, urllib2, zlib, httplib
import errno, socket
def zgenerator(f):
zd = zlib.decompressobj()
try:
for chunk in util.filechunkiter(f):
while chunk:
yield zd.decompress(chunk, 2**18)
chunk = zd.unconsumed_tail
except httplib.HTTPException:
raise IOError(None, _('connection ended unexpectedly'))
yield zd.flush()
class httprepository(wireproto.wirerepository):
def __init__(self, ui, path):
self.path = path
self.caps = None
self.handler = None
u = util.url(path)
if u.query or u.fragment:
raise util.Abort(_('unsupported URL component: "%s"') %
(u.query or u.fragment))
# urllib cannot handle URLs with embedded user or passwd
self._url, authinfo = u.authinfo()
self.ui = ui
self.ui.debug('using %s\n' % self._url)
self.urlopener = url.opener(ui, authinfo)
def __del__(self):
for h in self.urlopener.handlers:
h.close()
if hasattr(h, "close_all"):
h.close_all()
def url(self):
return self.path
# look up capabilities only when needed
def _fetchcaps(self):
self.caps = set(self._call('capabilities').split())
def get_caps(self):
if self.caps is None:
try:
self._fetchcaps()
except error.RepoError:
self.caps = set()
self.ui.debug('capabilities: %s\n' %
(' '.join(self.caps or ['none'])))
return self.caps
capabilities = property(get_caps)
def lock(self):
raise util.Abort(_('operation not supported over http'))
def _callstream(self, cmd, **args):
if cmd == 'pushkey':
args['data'] = ''
data = args.pop('data', None)
headers = args.pop('headers', {})
self.ui.debug("sending %s command\n" % cmd)
q = [('cmd', cmd)]
headersize = 0
if len(args) > 0:
httpheader = self.capable('httpheader')
if httpheader:
headersize = int(httpheader.split(',')[0])
if headersize > 0:
# The headers can typically carry more data than the URL.
encargs = urllib.urlencode(sorted(args.items()))
headerfmt = 'X-HgArg-%s'
contentlen = headersize - len(headerfmt % '000' + ': \r\n')
headernum = 0
for i in xrange(0, len(encargs), contentlen):
headernum += 1
header = headerfmt % str(headernum)
headers[header] = encargs[i:i + contentlen]
varyheaders = [headerfmt % str(h) for h in range(1, headernum + 1)]
headers['Vary'] = ','.join(varyheaders)
else:
q += sorted(args.items())
qs = '?%s' % urllib.urlencode(q)
cu = "%s%s" % (self._url, qs)
req = urllib2.Request(cu, data, headers)
if data is not None:
# len(data) is broken if data doesn't fit into Py_ssize_t
# add the header ourself to avoid OverflowError
size = data.__len__()
self.ui.debug("sending %s bytes\n" % size)
req.add_unredirected_header('Content-Length', '%d' % size)
try:
resp = self.urlopener.open(req)
except urllib2.HTTPError, inst:
if inst.code == 401:
raise util.Abort(_('authorization failed'))
raise
except httplib.HTTPException, inst:
self.ui.debug('http error while sending %s command\n' % cmd)
self.ui.traceback()
raise IOError(None, inst)
except IndexError:
# this only happens with Python 2.3, later versions raise URLError
raise util.Abort(_('http error, possibly caused by proxy setting'))
# record the url we got redirected to
resp_url = resp.geturl()
if resp_url.endswith(qs):
resp_url = resp_url[:-len(qs)]
if self._url.rstrip('/') != resp_url.rstrip('/'):
self.ui.status(_('real URL is %s\n') % resp_url)
self._url = resp_url
try:
proto = resp.getheader('content-type')
except AttributeError:
proto = resp.headers.get('content-type', '')
safeurl = util.hidepassword(self._url)
# accept old "text/plain" and "application/hg-changegroup" for now
if not (proto.startswith('application/mercurial-') or
proto.startswith('text/plain') or
proto.startswith('application/hg-changegroup')):
self.ui.debug("requested URL: '%s'\n" % util.hidepassword(cu))
raise error.RepoError(
_("'%s' does not appear to be an hg repository:\n"
"---%%<--- (%s)\n%s\n---%%<---\n")
% (safeurl, proto or 'no content-type', resp.read()))
if proto.startswith('application/mercurial-'):
try:
version = proto.split('-', 1)[1]
version_info = tuple([int(n) for n in version.split('.')])
except ValueError:
raise error.RepoError(_("'%s' sent a broken Content-Type "
"header (%s)") % (safeurl, proto))
if version_info > (0, 1):
raise error.RepoError(_("'%s' uses newer protocol %s") %
(safeurl, version))
return resp
def _call(self, cmd, **args):
fp = self._callstream(cmd, **args)
try:
return fp.read()
finally:
# if using keepalive, allow connection to be reused
fp.close()
def _callpush(self, cmd, cg, **args):
# have to stream bundle to a temp file because we do not have
# http 1.1 chunked transfer.
types = self.capable('unbundle')
try:
types = types.split(',')
except AttributeError:
# servers older than d1b16a746db6 will send 'unbundle' as a
# boolean capability. They only support headerless/uncompressed
# bundles.
types = [""]
for x in types:
if x in changegroup.bundletypes:
type = x
break
tempname = changegroup.writebundle(cg, None, type)
fp = url.httpsendfile(self.ui, tempname, "rb")
headers = {'Content-Type': 'application/mercurial-0.1'}
try:
try:
r = self._call(cmd, data=fp, headers=headers, **args)
return r.split('\n', 1)
except socket.error, err:
if err.args[0] in (errno.ECONNRESET, errno.EPIPE):
raise util.Abort(_('push failed: %s') % err.args[1])
raise util.Abort(err.args[1])
finally:
fp.close()
os.unlink(tempname)
def _abort(self, exception):
raise exception
def _decompress(self, stream):
return util.chunkbuffer(zgenerator(stream))
class httpsrepository(httprepository):
def __init__(self, ui, path):
if not url.has_https:
raise util.Abort(_('Python support for SSL and HTTPS '
'is not installed'))
httprepository.__init__(self, ui, path)
def instance(ui, path, create):
if create:
raise util.Abort(_('cannot create new http repository'))
try:
if path.startswith('https:'):
inst = httpsrepository(ui, path)
else:
inst = httprepository(ui, path)
try:
# Try to do useful work when checking compatibility.
# Usually saves a roundtrip since we want the caps anyway.
inst._fetchcaps()
except error.RepoError:
# No luck, try older compatibility check.
inst.between([(nullid, nullid)])
return inst
except error.RepoError, httpexception:
try:
r = statichttprepo.instance(ui, "static-" + path, create)
ui.note('(falling back to static-http)\n')
return r
except error.RepoError:
raise httpexception # use the original http RepoError instead