##// END OF EJS Templates
py3: conditionalize SocketServer import...
Pulkit Goyal -
r29433:33770d2b default
parent child Browse files
Show More
@@ -1,707 +1,708 b''
1 # chgserver.py - command server extension for cHg
1 # chgserver.py - command server extension for cHg
2 #
2 #
3 # Copyright 2011 Yuya Nishihara <yuya@tcha.org>
3 # Copyright 2011 Yuya Nishihara <yuya@tcha.org>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """command server extension for cHg (EXPERIMENTAL)
8 """command server extension for cHg (EXPERIMENTAL)
9
9
10 'S' channel (read/write)
10 'S' channel (read/write)
11 propagate ui.system() request to client
11 propagate ui.system() request to client
12
12
13 'attachio' command
13 'attachio' command
14 attach client's stdio passed by sendmsg()
14 attach client's stdio passed by sendmsg()
15
15
16 'chdir' command
16 'chdir' command
17 change current directory
17 change current directory
18
18
19 'getpager' command
19 'getpager' command
20 checks if pager is enabled and which pager should be executed
20 checks if pager is enabled and which pager should be executed
21
21
22 'setenv' command
22 'setenv' command
23 replace os.environ completely
23 replace os.environ completely
24
24
25 'setumask' command
25 'setumask' command
26 set umask
26 set umask
27
27
28 'validate' command
28 'validate' command
29 reload the config and check if the server is up to date
29 reload the config and check if the server is up to date
30
30
31 Config
31 Config
32 ------
32 ------
33
33
34 ::
34 ::
35
35
36 [chgserver]
36 [chgserver]
37 idletimeout = 3600 # seconds, after which an idle server will exit
37 idletimeout = 3600 # seconds, after which an idle server will exit
38 skiphash = False # whether to skip config or env change checks
38 skiphash = False # whether to skip config or env change checks
39 """
39 """
40
40
41 from __future__ import absolute_import
41 from __future__ import absolute_import
42
42
43 import SocketServer
44 import errno
43 import errno
45 import gc
44 import gc
46 import hashlib
45 import hashlib
47 import inspect
46 import inspect
48 import os
47 import os
49 import random
48 import random
50 import re
49 import re
51 import signal
50 import signal
52 import struct
51 import struct
53 import sys
52 import sys
54 import threading
53 import threading
55 import time
54 import time
56 import traceback
55 import traceback
57
56
58 from mercurial.i18n import _
57 from mercurial.i18n import _
59
58
60 from mercurial import (
59 from mercurial import (
61 cmdutil,
60 cmdutil,
62 commands,
61 commands,
63 commandserver,
62 commandserver,
64 dispatch,
63 dispatch,
65 error,
64 error,
66 extensions,
65 extensions,
67 osutil,
66 osutil,
68 util,
67 util,
69 )
68 )
70
69
70 socketserver = util.socketserver
71
71 # Note for extension authors: ONLY specify testedwith = 'internal' for
72 # Note for extension authors: ONLY specify testedwith = 'internal' for
72 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
73 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
73 # be specifying the version(s) of Mercurial they are tested with, or
74 # be specifying the version(s) of Mercurial they are tested with, or
74 # leave the attribute unspecified.
75 # leave the attribute unspecified.
75 testedwith = 'internal'
76 testedwith = 'internal'
76
77
77 _log = commandserver.log
78 _log = commandserver.log
78
79
79 def _hashlist(items):
80 def _hashlist(items):
80 """return sha1 hexdigest for a list"""
81 """return sha1 hexdigest for a list"""
81 return hashlib.sha1(str(items)).hexdigest()
82 return hashlib.sha1(str(items)).hexdigest()
82
83
83 # sensitive config sections affecting confighash
84 # sensitive config sections affecting confighash
84 _configsections = [
85 _configsections = [
85 'alias', # affects global state commands.table
86 'alias', # affects global state commands.table
86 'extdiff', # uisetup will register new commands
87 'extdiff', # uisetup will register new commands
87 'extensions',
88 'extensions',
88 ]
89 ]
89
90
90 # sensitive environment variables affecting confighash
91 # sensitive environment variables affecting confighash
91 _envre = re.compile(r'''\A(?:
92 _envre = re.compile(r'''\A(?:
92 CHGHG
93 CHGHG
93 |HG.*
94 |HG.*
94 |LANG(?:UAGE)?
95 |LANG(?:UAGE)?
95 |LC_.*
96 |LC_.*
96 |LD_.*
97 |LD_.*
97 |PATH
98 |PATH
98 |PYTHON.*
99 |PYTHON.*
99 |TERM(?:INFO)?
100 |TERM(?:INFO)?
100 |TZ
101 |TZ
101 )\Z''', re.X)
102 )\Z''', re.X)
102
103
103 def _confighash(ui):
104 def _confighash(ui):
104 """return a quick hash for detecting config/env changes
105 """return a quick hash for detecting config/env changes
105
106
106 confighash is the hash of sensitive config items and environment variables.
107 confighash is the hash of sensitive config items and environment variables.
107
108
108 for chgserver, it is designed that once confighash changes, the server is
109 for chgserver, it is designed that once confighash changes, the server is
109 not qualified to serve its client and should redirect the client to a new
110 not qualified to serve its client and should redirect the client to a new
110 server. different from mtimehash, confighash change will not mark the
111 server. different from mtimehash, confighash change will not mark the
111 server outdated and exit since the user can have different configs at the
112 server outdated and exit since the user can have different configs at the
112 same time.
113 same time.
113 """
114 """
114 sectionitems = []
115 sectionitems = []
115 for section in _configsections:
116 for section in _configsections:
116 sectionitems.append(ui.configitems(section))
117 sectionitems.append(ui.configitems(section))
117 sectionhash = _hashlist(sectionitems)
118 sectionhash = _hashlist(sectionitems)
118 envitems = [(k, v) for k, v in os.environ.iteritems() if _envre.match(k)]
119 envitems = [(k, v) for k, v in os.environ.iteritems() if _envre.match(k)]
119 envhash = _hashlist(sorted(envitems))
120 envhash = _hashlist(sorted(envitems))
120 return sectionhash[:6] + envhash[:6]
121 return sectionhash[:6] + envhash[:6]
121
122
122 def _getmtimepaths(ui):
123 def _getmtimepaths(ui):
123 """get a list of paths that should be checked to detect change
124 """get a list of paths that should be checked to detect change
124
125
125 The list will include:
126 The list will include:
126 - extensions (will not cover all files for complex extensions)
127 - extensions (will not cover all files for complex extensions)
127 - mercurial/__version__.py
128 - mercurial/__version__.py
128 - python binary
129 - python binary
129 """
130 """
130 modules = [m for n, m in extensions.extensions(ui)]
131 modules = [m for n, m in extensions.extensions(ui)]
131 try:
132 try:
132 from mercurial import __version__
133 from mercurial import __version__
133 modules.append(__version__)
134 modules.append(__version__)
134 except ImportError:
135 except ImportError:
135 pass
136 pass
136 files = [sys.executable]
137 files = [sys.executable]
137 for m in modules:
138 for m in modules:
138 try:
139 try:
139 files.append(inspect.getabsfile(m))
140 files.append(inspect.getabsfile(m))
140 except TypeError:
141 except TypeError:
141 pass
142 pass
142 return sorted(set(files))
143 return sorted(set(files))
143
144
144 def _mtimehash(paths):
145 def _mtimehash(paths):
145 """return a quick hash for detecting file changes
146 """return a quick hash for detecting file changes
146
147
147 mtimehash calls stat on given paths and calculate a hash based on size and
148 mtimehash calls stat on given paths and calculate a hash based on size and
148 mtime of each file. mtimehash does not read file content because reading is
149 mtime of each file. mtimehash does not read file content because reading is
149 expensive. therefore it's not 100% reliable for detecting content changes.
150 expensive. therefore it's not 100% reliable for detecting content changes.
150 it's possible to return different hashes for same file contents.
151 it's possible to return different hashes for same file contents.
151 it's also possible to return a same hash for different file contents for
152 it's also possible to return a same hash for different file contents for
152 some carefully crafted situation.
153 some carefully crafted situation.
153
154
154 for chgserver, it is designed that once mtimehash changes, the server is
155 for chgserver, it is designed that once mtimehash changes, the server is
155 considered outdated immediately and should no longer provide service.
156 considered outdated immediately and should no longer provide service.
156 """
157 """
157 def trystat(path):
158 def trystat(path):
158 try:
159 try:
159 st = os.stat(path)
160 st = os.stat(path)
160 return (st.st_mtime, st.st_size)
161 return (st.st_mtime, st.st_size)
161 except OSError:
162 except OSError:
162 # could be ENOENT, EPERM etc. not fatal in any case
163 # could be ENOENT, EPERM etc. not fatal in any case
163 pass
164 pass
164 return _hashlist(map(trystat, paths))[:12]
165 return _hashlist(map(trystat, paths))[:12]
165
166
166 class hashstate(object):
167 class hashstate(object):
167 """a structure storing confighash, mtimehash, paths used for mtimehash"""
168 """a structure storing confighash, mtimehash, paths used for mtimehash"""
168 def __init__(self, confighash, mtimehash, mtimepaths):
169 def __init__(self, confighash, mtimehash, mtimepaths):
169 self.confighash = confighash
170 self.confighash = confighash
170 self.mtimehash = mtimehash
171 self.mtimehash = mtimehash
171 self.mtimepaths = mtimepaths
172 self.mtimepaths = mtimepaths
172
173
173 @staticmethod
174 @staticmethod
174 def fromui(ui, mtimepaths=None):
175 def fromui(ui, mtimepaths=None):
175 if mtimepaths is None:
176 if mtimepaths is None:
176 mtimepaths = _getmtimepaths(ui)
177 mtimepaths = _getmtimepaths(ui)
177 confighash = _confighash(ui)
178 confighash = _confighash(ui)
178 mtimehash = _mtimehash(mtimepaths)
179 mtimehash = _mtimehash(mtimepaths)
179 _log('confighash = %s mtimehash = %s\n' % (confighash, mtimehash))
180 _log('confighash = %s mtimehash = %s\n' % (confighash, mtimehash))
180 return hashstate(confighash, mtimehash, mtimepaths)
181 return hashstate(confighash, mtimehash, mtimepaths)
181
182
182 # copied from hgext/pager.py:uisetup()
183 # copied from hgext/pager.py:uisetup()
183 def _setuppagercmd(ui, options, cmd):
184 def _setuppagercmd(ui, options, cmd):
184 if not ui.formatted():
185 if not ui.formatted():
185 return
186 return
186
187
187 p = ui.config("pager", "pager", os.environ.get("PAGER"))
188 p = ui.config("pager", "pager", os.environ.get("PAGER"))
188 usepager = False
189 usepager = False
189 always = util.parsebool(options['pager'])
190 always = util.parsebool(options['pager'])
190 auto = options['pager'] == 'auto'
191 auto = options['pager'] == 'auto'
191
192
192 if not p:
193 if not p:
193 pass
194 pass
194 elif always:
195 elif always:
195 usepager = True
196 usepager = True
196 elif not auto:
197 elif not auto:
197 usepager = False
198 usepager = False
198 else:
199 else:
199 attended = ['annotate', 'cat', 'diff', 'export', 'glog', 'log', 'qdiff']
200 attended = ['annotate', 'cat', 'diff', 'export', 'glog', 'log', 'qdiff']
200 attend = ui.configlist('pager', 'attend', attended)
201 attend = ui.configlist('pager', 'attend', attended)
201 ignore = ui.configlist('pager', 'ignore')
202 ignore = ui.configlist('pager', 'ignore')
202 cmds, _ = cmdutil.findcmd(cmd, commands.table)
203 cmds, _ = cmdutil.findcmd(cmd, commands.table)
203
204
204 for cmd in cmds:
205 for cmd in cmds:
205 var = 'attend-%s' % cmd
206 var = 'attend-%s' % cmd
206 if ui.config('pager', var):
207 if ui.config('pager', var):
207 usepager = ui.configbool('pager', var)
208 usepager = ui.configbool('pager', var)
208 break
209 break
209 if (cmd in attend or
210 if (cmd in attend or
210 (cmd not in ignore and not attend)):
211 (cmd not in ignore and not attend)):
211 usepager = True
212 usepager = True
212 break
213 break
213
214
214 if usepager:
215 if usepager:
215 ui.setconfig('ui', 'formatted', ui.formatted(), 'pager')
216 ui.setconfig('ui', 'formatted', ui.formatted(), 'pager')
216 ui.setconfig('ui', 'interactive', False, 'pager')
217 ui.setconfig('ui', 'interactive', False, 'pager')
217 return p
218 return p
218
219
219 def _newchgui(srcui, csystem):
220 def _newchgui(srcui, csystem):
220 class chgui(srcui.__class__):
221 class chgui(srcui.__class__):
221 def __init__(self, src=None):
222 def __init__(self, src=None):
222 super(chgui, self).__init__(src)
223 super(chgui, self).__init__(src)
223 if src:
224 if src:
224 self._csystem = getattr(src, '_csystem', csystem)
225 self._csystem = getattr(src, '_csystem', csystem)
225 else:
226 else:
226 self._csystem = csystem
227 self._csystem = csystem
227
228
228 def system(self, cmd, environ=None, cwd=None, onerr=None,
229 def system(self, cmd, environ=None, cwd=None, onerr=None,
229 errprefix=None):
230 errprefix=None):
230 # fallback to the original system method if the output needs to be
231 # fallback to the original system method if the output needs to be
231 # captured (to self._buffers), or the output stream is not stdout
232 # captured (to self._buffers), or the output stream is not stdout
232 # (e.g. stderr, cStringIO), because the chg client is not aware of
233 # (e.g. stderr, cStringIO), because the chg client is not aware of
233 # these situations and will behave differently (write to stdout).
234 # these situations and will behave differently (write to stdout).
234 if (any(s[1] for s in self._bufferstates)
235 if (any(s[1] for s in self._bufferstates)
235 or not util.safehasattr(self.fout, 'fileno')
236 or not util.safehasattr(self.fout, 'fileno')
236 or self.fout.fileno() != sys.stdout.fileno()):
237 or self.fout.fileno() != sys.stdout.fileno()):
237 return super(chgui, self).system(cmd, environ, cwd, onerr,
238 return super(chgui, self).system(cmd, environ, cwd, onerr,
238 errprefix)
239 errprefix)
239 # copied from mercurial/util.py:system()
240 # copied from mercurial/util.py:system()
240 self.flush()
241 self.flush()
241 def py2shell(val):
242 def py2shell(val):
242 if val is None or val is False:
243 if val is None or val is False:
243 return '0'
244 return '0'
244 if val is True:
245 if val is True:
245 return '1'
246 return '1'
246 return str(val)
247 return str(val)
247 env = os.environ.copy()
248 env = os.environ.copy()
248 if environ:
249 if environ:
249 env.update((k, py2shell(v)) for k, v in environ.iteritems())
250 env.update((k, py2shell(v)) for k, v in environ.iteritems())
250 env['HG'] = util.hgexecutable()
251 env['HG'] = util.hgexecutable()
251 rc = self._csystem(cmd, env, cwd)
252 rc = self._csystem(cmd, env, cwd)
252 if rc and onerr:
253 if rc and onerr:
253 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
254 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
254 util.explainexit(rc)[0])
255 util.explainexit(rc)[0])
255 if errprefix:
256 if errprefix:
256 errmsg = '%s: %s' % (errprefix, errmsg)
257 errmsg = '%s: %s' % (errprefix, errmsg)
257 raise onerr(errmsg)
258 raise onerr(errmsg)
258 return rc
259 return rc
259
260
260 return chgui(srcui)
261 return chgui(srcui)
261
262
262 def _loadnewui(srcui, args):
263 def _loadnewui(srcui, args):
263 newui = srcui.__class__()
264 newui = srcui.__class__()
264 for a in ['fin', 'fout', 'ferr', 'environ']:
265 for a in ['fin', 'fout', 'ferr', 'environ']:
265 setattr(newui, a, getattr(srcui, a))
266 setattr(newui, a, getattr(srcui, a))
266 if util.safehasattr(srcui, '_csystem'):
267 if util.safehasattr(srcui, '_csystem'):
267 newui._csystem = srcui._csystem
268 newui._csystem = srcui._csystem
268
269
269 # internal config: extensions.chgserver
270 # internal config: extensions.chgserver
270 newui.setconfig('extensions', 'chgserver',
271 newui.setconfig('extensions', 'chgserver',
271 srcui.config('extensions', 'chgserver'), '--config')
272 srcui.config('extensions', 'chgserver'), '--config')
272
273
273 # command line args
274 # command line args
274 args = args[:]
275 args = args[:]
275 dispatch._parseconfig(newui, dispatch._earlygetopt(['--config'], args))
276 dispatch._parseconfig(newui, dispatch._earlygetopt(['--config'], args))
276
277
277 # stolen from tortoisehg.util.copydynamicconfig()
278 # stolen from tortoisehg.util.copydynamicconfig()
278 for section, name, value in srcui.walkconfig():
279 for section, name, value in srcui.walkconfig():
279 source = srcui.configsource(section, name)
280 source = srcui.configsource(section, name)
280 if ':' in source or source == '--config':
281 if ':' in source or source == '--config':
281 # path:line or command line
282 # path:line or command line
282 continue
283 continue
283 if source == 'none':
284 if source == 'none':
284 # ui.configsource returns 'none' by default
285 # ui.configsource returns 'none' by default
285 source = ''
286 source = ''
286 newui.setconfig(section, name, value, source)
287 newui.setconfig(section, name, value, source)
287
288
288 # load wd and repo config, copied from dispatch.py
289 # load wd and repo config, copied from dispatch.py
289 cwds = dispatch._earlygetopt(['--cwd'], args)
290 cwds = dispatch._earlygetopt(['--cwd'], args)
290 cwd = cwds and os.path.realpath(cwds[-1]) or None
291 cwd = cwds and os.path.realpath(cwds[-1]) or None
291 rpath = dispatch._earlygetopt(["-R", "--repository", "--repo"], args)
292 rpath = dispatch._earlygetopt(["-R", "--repository", "--repo"], args)
292 path, newlui = dispatch._getlocal(newui, rpath, wd=cwd)
293 path, newlui = dispatch._getlocal(newui, rpath, wd=cwd)
293
294
294 return (newui, newlui)
295 return (newui, newlui)
295
296
296 class channeledsystem(object):
297 class channeledsystem(object):
297 """Propagate ui.system() request in the following format:
298 """Propagate ui.system() request in the following format:
298
299
299 payload length (unsigned int),
300 payload length (unsigned int),
300 cmd, '\0',
301 cmd, '\0',
301 cwd, '\0',
302 cwd, '\0',
302 envkey, '=', val, '\0',
303 envkey, '=', val, '\0',
303 ...
304 ...
304 envkey, '=', val
305 envkey, '=', val
305
306
306 and waits:
307 and waits:
307
308
308 exitcode length (unsigned int),
309 exitcode length (unsigned int),
309 exitcode (int)
310 exitcode (int)
310 """
311 """
311 def __init__(self, in_, out, channel):
312 def __init__(self, in_, out, channel):
312 self.in_ = in_
313 self.in_ = in_
313 self.out = out
314 self.out = out
314 self.channel = channel
315 self.channel = channel
315
316
316 def __call__(self, cmd, environ, cwd):
317 def __call__(self, cmd, environ, cwd):
317 args = [util.quotecommand(cmd), os.path.abspath(cwd or '.')]
318 args = [util.quotecommand(cmd), os.path.abspath(cwd or '.')]
318 args.extend('%s=%s' % (k, v) for k, v in environ.iteritems())
319 args.extend('%s=%s' % (k, v) for k, v in environ.iteritems())
319 data = '\0'.join(args)
320 data = '\0'.join(args)
320 self.out.write(struct.pack('>cI', self.channel, len(data)))
321 self.out.write(struct.pack('>cI', self.channel, len(data)))
321 self.out.write(data)
322 self.out.write(data)
322 self.out.flush()
323 self.out.flush()
323
324
324 length = self.in_.read(4)
325 length = self.in_.read(4)
325 length, = struct.unpack('>I', length)
326 length, = struct.unpack('>I', length)
326 if length != 4:
327 if length != 4:
327 raise error.Abort(_('invalid response'))
328 raise error.Abort(_('invalid response'))
328 rc, = struct.unpack('>i', self.in_.read(4))
329 rc, = struct.unpack('>i', self.in_.read(4))
329 return rc
330 return rc
330
331
331 _iochannels = [
332 _iochannels = [
332 # server.ch, ui.fp, mode
333 # server.ch, ui.fp, mode
333 ('cin', 'fin', 'rb'),
334 ('cin', 'fin', 'rb'),
334 ('cout', 'fout', 'wb'),
335 ('cout', 'fout', 'wb'),
335 ('cerr', 'ferr', 'wb'),
336 ('cerr', 'ferr', 'wb'),
336 ]
337 ]
337
338
338 class chgcmdserver(commandserver.server):
339 class chgcmdserver(commandserver.server):
339 def __init__(self, ui, repo, fin, fout, sock, hashstate, baseaddress):
340 def __init__(self, ui, repo, fin, fout, sock, hashstate, baseaddress):
340 super(chgcmdserver, self).__init__(
341 super(chgcmdserver, self).__init__(
341 _newchgui(ui, channeledsystem(fin, fout, 'S')), repo, fin, fout)
342 _newchgui(ui, channeledsystem(fin, fout, 'S')), repo, fin, fout)
342 self.clientsock = sock
343 self.clientsock = sock
343 self._oldios = [] # original (self.ch, ui.fp, fd) before "attachio"
344 self._oldios = [] # original (self.ch, ui.fp, fd) before "attachio"
344 self.hashstate = hashstate
345 self.hashstate = hashstate
345 self.baseaddress = baseaddress
346 self.baseaddress = baseaddress
346 if hashstate is not None:
347 if hashstate is not None:
347 self.capabilities = self.capabilities.copy()
348 self.capabilities = self.capabilities.copy()
348 self.capabilities['validate'] = chgcmdserver.validate
349 self.capabilities['validate'] = chgcmdserver.validate
349
350
350 def cleanup(self):
351 def cleanup(self):
351 # dispatch._runcatch() does not flush outputs if exception is not
352 # dispatch._runcatch() does not flush outputs if exception is not
352 # handled by dispatch._dispatch()
353 # handled by dispatch._dispatch()
353 self.ui.flush()
354 self.ui.flush()
354 self._restoreio()
355 self._restoreio()
355
356
356 def attachio(self):
357 def attachio(self):
357 """Attach to client's stdio passed via unix domain socket; all
358 """Attach to client's stdio passed via unix domain socket; all
358 channels except cresult will no longer be used
359 channels except cresult will no longer be used
359 """
360 """
360 # tell client to sendmsg() with 1-byte payload, which makes it
361 # tell client to sendmsg() with 1-byte payload, which makes it
361 # distinctive from "attachio\n" command consumed by client.read()
362 # distinctive from "attachio\n" command consumed by client.read()
362 self.clientsock.sendall(struct.pack('>cI', 'I', 1))
363 self.clientsock.sendall(struct.pack('>cI', 'I', 1))
363 clientfds = osutil.recvfds(self.clientsock.fileno())
364 clientfds = osutil.recvfds(self.clientsock.fileno())
364 _log('received fds: %r\n' % clientfds)
365 _log('received fds: %r\n' % clientfds)
365
366
366 ui = self.ui
367 ui = self.ui
367 ui.flush()
368 ui.flush()
368 first = self._saveio()
369 first = self._saveio()
369 for fd, (cn, fn, mode) in zip(clientfds, _iochannels):
370 for fd, (cn, fn, mode) in zip(clientfds, _iochannels):
370 assert fd > 0
371 assert fd > 0
371 fp = getattr(ui, fn)
372 fp = getattr(ui, fn)
372 os.dup2(fd, fp.fileno())
373 os.dup2(fd, fp.fileno())
373 os.close(fd)
374 os.close(fd)
374 if not first:
375 if not first:
375 continue
376 continue
376 # reset buffering mode when client is first attached. as we want
377 # reset buffering mode when client is first attached. as we want
377 # to see output immediately on pager, the mode stays unchanged
378 # to see output immediately on pager, the mode stays unchanged
378 # when client re-attached. ferr is unchanged because it should
379 # when client re-attached. ferr is unchanged because it should
379 # be unbuffered no matter if it is a tty or not.
380 # be unbuffered no matter if it is a tty or not.
380 if fn == 'ferr':
381 if fn == 'ferr':
381 newfp = fp
382 newfp = fp
382 else:
383 else:
383 # make it line buffered explicitly because the default is
384 # make it line buffered explicitly because the default is
384 # decided on first write(), where fout could be a pager.
385 # decided on first write(), where fout could be a pager.
385 if fp.isatty():
386 if fp.isatty():
386 bufsize = 1 # line buffered
387 bufsize = 1 # line buffered
387 else:
388 else:
388 bufsize = -1 # system default
389 bufsize = -1 # system default
389 newfp = os.fdopen(fp.fileno(), mode, bufsize)
390 newfp = os.fdopen(fp.fileno(), mode, bufsize)
390 setattr(ui, fn, newfp)
391 setattr(ui, fn, newfp)
391 setattr(self, cn, newfp)
392 setattr(self, cn, newfp)
392
393
393 self.cresult.write(struct.pack('>i', len(clientfds)))
394 self.cresult.write(struct.pack('>i', len(clientfds)))
394
395
395 def _saveio(self):
396 def _saveio(self):
396 if self._oldios:
397 if self._oldios:
397 return False
398 return False
398 ui = self.ui
399 ui = self.ui
399 for cn, fn, _mode in _iochannels:
400 for cn, fn, _mode in _iochannels:
400 ch = getattr(self, cn)
401 ch = getattr(self, cn)
401 fp = getattr(ui, fn)
402 fp = getattr(ui, fn)
402 fd = os.dup(fp.fileno())
403 fd = os.dup(fp.fileno())
403 self._oldios.append((ch, fp, fd))
404 self._oldios.append((ch, fp, fd))
404 return True
405 return True
405
406
406 def _restoreio(self):
407 def _restoreio(self):
407 ui = self.ui
408 ui = self.ui
408 for (ch, fp, fd), (cn, fn, _mode) in zip(self._oldios, _iochannels):
409 for (ch, fp, fd), (cn, fn, _mode) in zip(self._oldios, _iochannels):
409 newfp = getattr(ui, fn)
410 newfp = getattr(ui, fn)
410 # close newfp while it's associated with client; otherwise it
411 # close newfp while it's associated with client; otherwise it
411 # would be closed when newfp is deleted
412 # would be closed when newfp is deleted
412 if newfp is not fp:
413 if newfp is not fp:
413 newfp.close()
414 newfp.close()
414 # restore original fd: fp is open again
415 # restore original fd: fp is open again
415 os.dup2(fd, fp.fileno())
416 os.dup2(fd, fp.fileno())
416 os.close(fd)
417 os.close(fd)
417 setattr(self, cn, ch)
418 setattr(self, cn, ch)
418 setattr(ui, fn, fp)
419 setattr(ui, fn, fp)
419 del self._oldios[:]
420 del self._oldios[:]
420
421
421 def validate(self):
422 def validate(self):
422 """Reload the config and check if the server is up to date
423 """Reload the config and check if the server is up to date
423
424
424 Read a list of '\0' separated arguments.
425 Read a list of '\0' separated arguments.
425 Write a non-empty list of '\0' separated instruction strings or '\0'
426 Write a non-empty list of '\0' separated instruction strings or '\0'
426 if the list is empty.
427 if the list is empty.
427 An instruction string could be either:
428 An instruction string could be either:
428 - "unlink $path", the client should unlink the path to stop the
429 - "unlink $path", the client should unlink the path to stop the
429 outdated server.
430 outdated server.
430 - "redirect $path", the client should attempt to connect to $path
431 - "redirect $path", the client should attempt to connect to $path
431 first. If it does not work, start a new server. It implies
432 first. If it does not work, start a new server. It implies
432 "reconnect".
433 "reconnect".
433 - "exit $n", the client should exit directly with code n.
434 - "exit $n", the client should exit directly with code n.
434 This may happen if we cannot parse the config.
435 This may happen if we cannot parse the config.
435 - "reconnect", the client should close the connection and
436 - "reconnect", the client should close the connection and
436 reconnect.
437 reconnect.
437 If neither "reconnect" nor "redirect" is included in the instruction
438 If neither "reconnect" nor "redirect" is included in the instruction
438 list, the client can continue with this server after completing all
439 list, the client can continue with this server after completing all
439 the instructions.
440 the instructions.
440 """
441 """
441 args = self._readlist()
442 args = self._readlist()
442 try:
443 try:
443 self.ui, lui = _loadnewui(self.ui, args)
444 self.ui, lui = _loadnewui(self.ui, args)
444 except error.ParseError as inst:
445 except error.ParseError as inst:
445 dispatch._formatparse(self.ui.warn, inst)
446 dispatch._formatparse(self.ui.warn, inst)
446 self.ui.flush()
447 self.ui.flush()
447 self.cresult.write('exit 255')
448 self.cresult.write('exit 255')
448 return
449 return
449 newhash = hashstate.fromui(lui, self.hashstate.mtimepaths)
450 newhash = hashstate.fromui(lui, self.hashstate.mtimepaths)
450 insts = []
451 insts = []
451 if newhash.mtimehash != self.hashstate.mtimehash:
452 if newhash.mtimehash != self.hashstate.mtimehash:
452 addr = _hashaddress(self.baseaddress, self.hashstate.confighash)
453 addr = _hashaddress(self.baseaddress, self.hashstate.confighash)
453 insts.append('unlink %s' % addr)
454 insts.append('unlink %s' % addr)
454 # mtimehash is empty if one or more extensions fail to load.
455 # mtimehash is empty if one or more extensions fail to load.
455 # to be compatible with hg, still serve the client this time.
456 # to be compatible with hg, still serve the client this time.
456 if self.hashstate.mtimehash:
457 if self.hashstate.mtimehash:
457 insts.append('reconnect')
458 insts.append('reconnect')
458 if newhash.confighash != self.hashstate.confighash:
459 if newhash.confighash != self.hashstate.confighash:
459 addr = _hashaddress(self.baseaddress, newhash.confighash)
460 addr = _hashaddress(self.baseaddress, newhash.confighash)
460 insts.append('redirect %s' % addr)
461 insts.append('redirect %s' % addr)
461 _log('validate: %s\n' % insts)
462 _log('validate: %s\n' % insts)
462 self.cresult.write('\0'.join(insts) or '\0')
463 self.cresult.write('\0'.join(insts) or '\0')
463
464
464 def chdir(self):
465 def chdir(self):
465 """Change current directory
466 """Change current directory
466
467
467 Note that the behavior of --cwd option is bit different from this.
468 Note that the behavior of --cwd option is bit different from this.
468 It does not affect --config parameter.
469 It does not affect --config parameter.
469 """
470 """
470 path = self._readstr()
471 path = self._readstr()
471 if not path:
472 if not path:
472 return
473 return
473 _log('chdir to %r\n' % path)
474 _log('chdir to %r\n' % path)
474 os.chdir(path)
475 os.chdir(path)
475
476
476 def setumask(self):
477 def setumask(self):
477 """Change umask"""
478 """Change umask"""
478 mask = struct.unpack('>I', self._read(4))[0]
479 mask = struct.unpack('>I', self._read(4))[0]
479 _log('setumask %r\n' % mask)
480 _log('setumask %r\n' % mask)
480 os.umask(mask)
481 os.umask(mask)
481
482
482 def getpager(self):
483 def getpager(self):
483 """Read cmdargs and write pager command to r-channel if enabled
484 """Read cmdargs and write pager command to r-channel if enabled
484
485
485 If pager isn't enabled, this writes '\0' because channeledoutput
486 If pager isn't enabled, this writes '\0' because channeledoutput
486 does not allow to write empty data.
487 does not allow to write empty data.
487 """
488 """
488 args = self._readlist()
489 args = self._readlist()
489 try:
490 try:
490 cmd, _func, args, options, _cmdoptions = dispatch._parse(self.ui,
491 cmd, _func, args, options, _cmdoptions = dispatch._parse(self.ui,
491 args)
492 args)
492 except (error.Abort, error.AmbiguousCommand, error.CommandError,
493 except (error.Abort, error.AmbiguousCommand, error.CommandError,
493 error.UnknownCommand):
494 error.UnknownCommand):
494 cmd = None
495 cmd = None
495 options = {}
496 options = {}
496 if not cmd or 'pager' not in options:
497 if not cmd or 'pager' not in options:
497 self.cresult.write('\0')
498 self.cresult.write('\0')
498 return
499 return
499
500
500 pagercmd = _setuppagercmd(self.ui, options, cmd)
501 pagercmd = _setuppagercmd(self.ui, options, cmd)
501 if pagercmd:
502 if pagercmd:
502 # Python's SIGPIPE is SIG_IGN by default. change to SIG_DFL so
503 # Python's SIGPIPE is SIG_IGN by default. change to SIG_DFL so
503 # we can exit if the pipe to the pager is closed
504 # we can exit if the pipe to the pager is closed
504 if util.safehasattr(signal, 'SIGPIPE') and \
505 if util.safehasattr(signal, 'SIGPIPE') and \
505 signal.getsignal(signal.SIGPIPE) == signal.SIG_IGN:
506 signal.getsignal(signal.SIGPIPE) == signal.SIG_IGN:
506 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
507 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
507 self.cresult.write(pagercmd)
508 self.cresult.write(pagercmd)
508 else:
509 else:
509 self.cresult.write('\0')
510 self.cresult.write('\0')
510
511
511 def setenv(self):
512 def setenv(self):
512 """Clear and update os.environ
513 """Clear and update os.environ
513
514
514 Note that not all variables can make an effect on the running process.
515 Note that not all variables can make an effect on the running process.
515 """
516 """
516 l = self._readlist()
517 l = self._readlist()
517 try:
518 try:
518 newenv = dict(s.split('=', 1) for s in l)
519 newenv = dict(s.split('=', 1) for s in l)
519 except ValueError:
520 except ValueError:
520 raise ValueError('unexpected value in setenv request')
521 raise ValueError('unexpected value in setenv request')
521 _log('setenv: %r\n' % sorted(newenv.keys()))
522 _log('setenv: %r\n' % sorted(newenv.keys()))
522 os.environ.clear()
523 os.environ.clear()
523 os.environ.update(newenv)
524 os.environ.update(newenv)
524
525
525 capabilities = commandserver.server.capabilities.copy()
526 capabilities = commandserver.server.capabilities.copy()
526 capabilities.update({'attachio': attachio,
527 capabilities.update({'attachio': attachio,
527 'chdir': chdir,
528 'chdir': chdir,
528 'getpager': getpager,
529 'getpager': getpager,
529 'setenv': setenv,
530 'setenv': setenv,
530 'setumask': setumask})
531 'setumask': setumask})
531
532
532 # copied from mercurial/commandserver.py
533 # copied from mercurial/commandserver.py
533 class _requesthandler(SocketServer.StreamRequestHandler):
534 class _requesthandler(socketserver.StreamRequestHandler):
534 def handle(self):
535 def handle(self):
535 # use a different process group from the master process, making this
536 # use a different process group from the master process, making this
536 # process pass kernel "is_current_pgrp_orphaned" check so signals like
537 # process pass kernel "is_current_pgrp_orphaned" check so signals like
537 # SIGTSTP, SIGTTIN, SIGTTOU are not ignored.
538 # SIGTSTP, SIGTTIN, SIGTTOU are not ignored.
538 os.setpgid(0, 0)
539 os.setpgid(0, 0)
539 # change random state otherwise forked request handlers would have a
540 # change random state otherwise forked request handlers would have a
540 # same state inherited from parent.
541 # same state inherited from parent.
541 random.seed()
542 random.seed()
542 ui = self.server.ui
543 ui = self.server.ui
543 repo = self.server.repo
544 repo = self.server.repo
544 sv = None
545 sv = None
545 try:
546 try:
546 sv = chgcmdserver(ui, repo, self.rfile, self.wfile, self.connection,
547 sv = chgcmdserver(ui, repo, self.rfile, self.wfile, self.connection,
547 self.server.hashstate, self.server.baseaddress)
548 self.server.hashstate, self.server.baseaddress)
548 try:
549 try:
549 sv.serve()
550 sv.serve()
550 # handle exceptions that may be raised by command server. most of
551 # handle exceptions that may be raised by command server. most of
551 # known exceptions are caught by dispatch.
552 # known exceptions are caught by dispatch.
552 except error.Abort as inst:
553 except error.Abort as inst:
553 ui.warn(_('abort: %s\n') % inst)
554 ui.warn(_('abort: %s\n') % inst)
554 except IOError as inst:
555 except IOError as inst:
555 if inst.errno != errno.EPIPE:
556 if inst.errno != errno.EPIPE:
556 raise
557 raise
557 except KeyboardInterrupt:
558 except KeyboardInterrupt:
558 pass
559 pass
559 finally:
560 finally:
560 sv.cleanup()
561 sv.cleanup()
561 except: # re-raises
562 except: # re-raises
562 # also write traceback to error channel. otherwise client cannot
563 # also write traceback to error channel. otherwise client cannot
563 # see it because it is written to server's stderr by default.
564 # see it because it is written to server's stderr by default.
564 if sv:
565 if sv:
565 cerr = sv.cerr
566 cerr = sv.cerr
566 else:
567 else:
567 cerr = commandserver.channeledoutput(self.wfile, 'e')
568 cerr = commandserver.channeledoutput(self.wfile, 'e')
568 traceback.print_exc(file=cerr)
569 traceback.print_exc(file=cerr)
569 raise
570 raise
570 finally:
571 finally:
571 # trigger __del__ since ForkingMixIn uses os._exit
572 # trigger __del__ since ForkingMixIn uses os._exit
572 gc.collect()
573 gc.collect()
573
574
574 def _tempaddress(address):
575 def _tempaddress(address):
575 return '%s.%d.tmp' % (address, os.getpid())
576 return '%s.%d.tmp' % (address, os.getpid())
576
577
577 def _hashaddress(address, hashstr):
578 def _hashaddress(address, hashstr):
578 return '%s-%s' % (address, hashstr)
579 return '%s-%s' % (address, hashstr)
579
580
580 class AutoExitMixIn: # use old-style to comply with SocketServer design
581 class AutoExitMixIn: # use old-style to comply with SocketServer design
581 lastactive = time.time()
582 lastactive = time.time()
582 idletimeout = 3600 # default 1 hour
583 idletimeout = 3600 # default 1 hour
583
584
584 def startautoexitthread(self):
585 def startautoexitthread(self):
585 # note: the auto-exit check here is cheap enough to not use a thread,
586 # note: the auto-exit check here is cheap enough to not use a thread,
586 # be done in serve_forever. however SocketServer is hook-unfriendly,
587 # be done in serve_forever. however SocketServer is hook-unfriendly,
587 # you simply cannot hook serve_forever without copying a lot of code.
588 # you simply cannot hook serve_forever without copying a lot of code.
588 # besides, serve_forever's docstring suggests using thread.
589 # besides, serve_forever's docstring suggests using thread.
589 thread = threading.Thread(target=self._autoexitloop)
590 thread = threading.Thread(target=self._autoexitloop)
590 thread.daemon = True
591 thread.daemon = True
591 thread.start()
592 thread.start()
592
593
593 def _autoexitloop(self, interval=1):
594 def _autoexitloop(self, interval=1):
594 while True:
595 while True:
595 time.sleep(interval)
596 time.sleep(interval)
596 if not self.issocketowner():
597 if not self.issocketowner():
597 _log('%s is not owned, exiting.\n' % self.server_address)
598 _log('%s is not owned, exiting.\n' % self.server_address)
598 break
599 break
599 if time.time() - self.lastactive > self.idletimeout:
600 if time.time() - self.lastactive > self.idletimeout:
600 _log('being idle too long. exiting.\n')
601 _log('being idle too long. exiting.\n')
601 break
602 break
602 self.shutdown()
603 self.shutdown()
603
604
604 def process_request(self, request, address):
605 def process_request(self, request, address):
605 self.lastactive = time.time()
606 self.lastactive = time.time()
606 return SocketServer.ForkingMixIn.process_request(
607 return socketserver.ForkingMixIn.process_request(
607 self, request, address)
608 self, request, address)
608
609
609 def server_bind(self):
610 def server_bind(self):
610 # use a unique temp address so we can stat the file and do ownership
611 # use a unique temp address so we can stat the file and do ownership
611 # check later
612 # check later
612 tempaddress = _tempaddress(self.server_address)
613 tempaddress = _tempaddress(self.server_address)
613 # use relative path instead of full path at bind() if possible, since
614 # use relative path instead of full path at bind() if possible, since
614 # AF_UNIX path has very small length limit (107 chars) on common
615 # AF_UNIX path has very small length limit (107 chars) on common
615 # platforms (see sys/un.h)
616 # platforms (see sys/un.h)
616 dirname, basename = os.path.split(tempaddress)
617 dirname, basename = os.path.split(tempaddress)
617 bakwdfd = None
618 bakwdfd = None
618 if dirname:
619 if dirname:
619 bakwdfd = os.open('.', os.O_DIRECTORY)
620 bakwdfd = os.open('.', os.O_DIRECTORY)
620 os.chdir(dirname)
621 os.chdir(dirname)
621 self.socket.bind(basename)
622 self.socket.bind(basename)
622 self._socketstat = os.stat(basename)
623 self._socketstat = os.stat(basename)
623 # rename will replace the old socket file if exists atomically. the
624 # rename will replace the old socket file if exists atomically. the
624 # old server will detect ownership change and exit.
625 # old server will detect ownership change and exit.
625 util.rename(basename, self.server_address)
626 util.rename(basename, self.server_address)
626 if bakwdfd:
627 if bakwdfd:
627 os.fchdir(bakwdfd)
628 os.fchdir(bakwdfd)
628 os.close(bakwdfd)
629 os.close(bakwdfd)
629
630
630 def issocketowner(self):
631 def issocketowner(self):
631 try:
632 try:
632 stat = os.stat(self.server_address)
633 stat = os.stat(self.server_address)
633 return (stat.st_ino == self._socketstat.st_ino and
634 return (stat.st_ino == self._socketstat.st_ino and
634 stat.st_mtime == self._socketstat.st_mtime)
635 stat.st_mtime == self._socketstat.st_mtime)
635 except OSError:
636 except OSError:
636 return False
637 return False
637
638
638 def unlinksocketfile(self):
639 def unlinksocketfile(self):
639 if not self.issocketowner():
640 if not self.issocketowner():
640 return
641 return
641 # it is possible to have a race condition here that we may
642 # it is possible to have a race condition here that we may
642 # remove another server's socket file. but that's okay
643 # remove another server's socket file. but that's okay
643 # since that server will detect and exit automatically and
644 # since that server will detect and exit automatically and
644 # the client will start a new server on demand.
645 # the client will start a new server on demand.
645 try:
646 try:
646 os.unlink(self.server_address)
647 os.unlink(self.server_address)
647 except OSError as exc:
648 except OSError as exc:
648 if exc.errno != errno.ENOENT:
649 if exc.errno != errno.ENOENT:
649 raise
650 raise
650
651
651 class chgunixservice(commandserver.unixservice):
652 class chgunixservice(commandserver.unixservice):
652 def init(self):
653 def init(self):
653 if self.repo:
654 if self.repo:
654 # one chgserver can serve multiple repos. drop repo infomation
655 # one chgserver can serve multiple repos. drop repo infomation
655 self.ui.setconfig('bundle', 'mainreporoot', '', 'repo')
656 self.ui.setconfig('bundle', 'mainreporoot', '', 'repo')
656 self.repo = None
657 self.repo = None
657 self._inithashstate()
658 self._inithashstate()
658 self._checkextensions()
659 self._checkextensions()
659 class cls(AutoExitMixIn, SocketServer.ForkingMixIn,
660 class cls(AutoExitMixIn, socketserver.ForkingMixIn,
660 SocketServer.UnixStreamServer):
661 socketserver.UnixStreamServer):
661 ui = self.ui
662 ui = self.ui
662 repo = self.repo
663 repo = self.repo
663 hashstate = self.hashstate
664 hashstate = self.hashstate
664 baseaddress = self.baseaddress
665 baseaddress = self.baseaddress
665 self.server = cls(self.address, _requesthandler)
666 self.server = cls(self.address, _requesthandler)
666 self.server.idletimeout = self.ui.configint(
667 self.server.idletimeout = self.ui.configint(
667 'chgserver', 'idletimeout', self.server.idletimeout)
668 'chgserver', 'idletimeout', self.server.idletimeout)
668 self.server.startautoexitthread()
669 self.server.startautoexitthread()
669 self._createsymlink()
670 self._createsymlink()
670
671
671 def _inithashstate(self):
672 def _inithashstate(self):
672 self.baseaddress = self.address
673 self.baseaddress = self.address
673 if self.ui.configbool('chgserver', 'skiphash', False):
674 if self.ui.configbool('chgserver', 'skiphash', False):
674 self.hashstate = None
675 self.hashstate = None
675 return
676 return
676 self.hashstate = hashstate.fromui(self.ui)
677 self.hashstate = hashstate.fromui(self.ui)
677 self.address = _hashaddress(self.address, self.hashstate.confighash)
678 self.address = _hashaddress(self.address, self.hashstate.confighash)
678
679
679 def _checkextensions(self):
680 def _checkextensions(self):
680 if not self.hashstate:
681 if not self.hashstate:
681 return
682 return
682 if extensions.notloaded():
683 if extensions.notloaded():
683 # one or more extensions failed to load. mtimehash becomes
684 # one or more extensions failed to load. mtimehash becomes
684 # meaningless because we do not know the paths of those extensions.
685 # meaningless because we do not know the paths of those extensions.
685 # set mtimehash to an illegal hash value to invalidate the server.
686 # set mtimehash to an illegal hash value to invalidate the server.
686 self.hashstate.mtimehash = ''
687 self.hashstate.mtimehash = ''
687
688
688 def _createsymlink(self):
689 def _createsymlink(self):
689 if self.baseaddress == self.address:
690 if self.baseaddress == self.address:
690 return
691 return
691 tempaddress = _tempaddress(self.baseaddress)
692 tempaddress = _tempaddress(self.baseaddress)
692 os.symlink(os.path.basename(self.address), tempaddress)
693 os.symlink(os.path.basename(self.address), tempaddress)
693 util.rename(tempaddress, self.baseaddress)
694 util.rename(tempaddress, self.baseaddress)
694
695
695 def run(self):
696 def run(self):
696 try:
697 try:
697 self.server.serve_forever()
698 self.server.serve_forever()
698 finally:
699 finally:
699 self.server.unlinksocketfile()
700 self.server.unlinksocketfile()
700
701
701 def uisetup(ui):
702 def uisetup(ui):
702 commandserver._servicemap['chgunix'] = chgunixservice
703 commandserver._servicemap['chgunix'] = chgunixservice
703
704
704 # CHGINTERNALMARK is temporarily set by chg client to detect if chg will
705 # CHGINTERNALMARK is temporarily set by chg client to detect if chg will
705 # start another chg. drop it to avoid possible side effects.
706 # start another chg. drop it to avoid possible side effects.
706 if 'CHGINTERNALMARK' in os.environ:
707 if 'CHGINTERNALMARK' in os.environ:
707 del os.environ['CHGINTERNALMARK']
708 del os.environ['CHGINTERNALMARK']
@@ -1,398 +1,399 b''
1 # commandserver.py - communicate with Mercurial's API over a pipe
1 # commandserver.py - communicate with Mercurial's API over a pipe
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import SocketServer
11 import errno
10 import errno
12 import os
11 import os
13 import struct
12 import struct
14 import sys
13 import sys
15 import traceback
14 import traceback
16
15
17 from .i18n import _
16 from .i18n import _
18 from . import (
17 from . import (
19 encoding,
18 encoding,
20 error,
19 error,
21 util,
20 util,
22 )
21 )
23
22
23 socketserver = util.socketserver
24
24 logfile = None
25 logfile = None
25
26
26 def log(*args):
27 def log(*args):
27 if not logfile:
28 if not logfile:
28 return
29 return
29
30
30 for a in args:
31 for a in args:
31 logfile.write(str(a))
32 logfile.write(str(a))
32
33
33 logfile.flush()
34 logfile.flush()
34
35
35 class channeledoutput(object):
36 class channeledoutput(object):
36 """
37 """
37 Write data to out in the following format:
38 Write data to out in the following format:
38
39
39 data length (unsigned int),
40 data length (unsigned int),
40 data
41 data
41 """
42 """
42 def __init__(self, out, channel):
43 def __init__(self, out, channel):
43 self.out = out
44 self.out = out
44 self.channel = channel
45 self.channel = channel
45
46
46 @property
47 @property
47 def name(self):
48 def name(self):
48 return '<%c-channel>' % self.channel
49 return '<%c-channel>' % self.channel
49
50
50 def write(self, data):
51 def write(self, data):
51 if not data:
52 if not data:
52 return
53 return
53 self.out.write(struct.pack('>cI', self.channel, len(data)))
54 self.out.write(struct.pack('>cI', self.channel, len(data)))
54 self.out.write(data)
55 self.out.write(data)
55 self.out.flush()
56 self.out.flush()
56
57
57 def __getattr__(self, attr):
58 def __getattr__(self, attr):
58 if attr in ('isatty', 'fileno', 'tell', 'seek'):
59 if attr in ('isatty', 'fileno', 'tell', 'seek'):
59 raise AttributeError(attr)
60 raise AttributeError(attr)
60 return getattr(self.out, attr)
61 return getattr(self.out, attr)
61
62
62 class channeledinput(object):
63 class channeledinput(object):
63 """
64 """
64 Read data from in_.
65 Read data from in_.
65
66
66 Requests for input are written to out in the following format:
67 Requests for input are written to out in the following format:
67 channel identifier - 'I' for plain input, 'L' line based (1 byte)
68 channel identifier - 'I' for plain input, 'L' line based (1 byte)
68 how many bytes to send at most (unsigned int),
69 how many bytes to send at most (unsigned int),
69
70
70 The client replies with:
71 The client replies with:
71 data length (unsigned int), 0 meaning EOF
72 data length (unsigned int), 0 meaning EOF
72 data
73 data
73 """
74 """
74
75
75 maxchunksize = 4 * 1024
76 maxchunksize = 4 * 1024
76
77
77 def __init__(self, in_, out, channel):
78 def __init__(self, in_, out, channel):
78 self.in_ = in_
79 self.in_ = in_
79 self.out = out
80 self.out = out
80 self.channel = channel
81 self.channel = channel
81
82
82 @property
83 @property
83 def name(self):
84 def name(self):
84 return '<%c-channel>' % self.channel
85 return '<%c-channel>' % self.channel
85
86
86 def read(self, size=-1):
87 def read(self, size=-1):
87 if size < 0:
88 if size < 0:
88 # if we need to consume all the clients input, ask for 4k chunks
89 # if we need to consume all the clients input, ask for 4k chunks
89 # so the pipe doesn't fill up risking a deadlock
90 # so the pipe doesn't fill up risking a deadlock
90 size = self.maxchunksize
91 size = self.maxchunksize
91 s = self._read(size, self.channel)
92 s = self._read(size, self.channel)
92 buf = s
93 buf = s
93 while s:
94 while s:
94 s = self._read(size, self.channel)
95 s = self._read(size, self.channel)
95 buf += s
96 buf += s
96
97
97 return buf
98 return buf
98 else:
99 else:
99 return self._read(size, self.channel)
100 return self._read(size, self.channel)
100
101
101 def _read(self, size, channel):
102 def _read(self, size, channel):
102 if not size:
103 if not size:
103 return ''
104 return ''
104 assert size > 0
105 assert size > 0
105
106
106 # tell the client we need at most size bytes
107 # tell the client we need at most size bytes
107 self.out.write(struct.pack('>cI', channel, size))
108 self.out.write(struct.pack('>cI', channel, size))
108 self.out.flush()
109 self.out.flush()
109
110
110 length = self.in_.read(4)
111 length = self.in_.read(4)
111 length = struct.unpack('>I', length)[0]
112 length = struct.unpack('>I', length)[0]
112 if not length:
113 if not length:
113 return ''
114 return ''
114 else:
115 else:
115 return self.in_.read(length)
116 return self.in_.read(length)
116
117
117 def readline(self, size=-1):
118 def readline(self, size=-1):
118 if size < 0:
119 if size < 0:
119 size = self.maxchunksize
120 size = self.maxchunksize
120 s = self._read(size, 'L')
121 s = self._read(size, 'L')
121 buf = s
122 buf = s
122 # keep asking for more until there's either no more or
123 # keep asking for more until there's either no more or
123 # we got a full line
124 # we got a full line
124 while s and s[-1] != '\n':
125 while s and s[-1] != '\n':
125 s = self._read(size, 'L')
126 s = self._read(size, 'L')
126 buf += s
127 buf += s
127
128
128 return buf
129 return buf
129 else:
130 else:
130 return self._read(size, 'L')
131 return self._read(size, 'L')
131
132
132 def __iter__(self):
133 def __iter__(self):
133 return self
134 return self
134
135
135 def next(self):
136 def next(self):
136 l = self.readline()
137 l = self.readline()
137 if not l:
138 if not l:
138 raise StopIteration
139 raise StopIteration
139 return l
140 return l
140
141
141 def __getattr__(self, attr):
142 def __getattr__(self, attr):
142 if attr in ('isatty', 'fileno', 'tell', 'seek'):
143 if attr in ('isatty', 'fileno', 'tell', 'seek'):
143 raise AttributeError(attr)
144 raise AttributeError(attr)
144 return getattr(self.in_, attr)
145 return getattr(self.in_, attr)
145
146
146 class server(object):
147 class server(object):
147 """
148 """
148 Listens for commands on fin, runs them and writes the output on a channel
149 Listens for commands on fin, runs them and writes the output on a channel
149 based stream to fout.
150 based stream to fout.
150 """
151 """
151 def __init__(self, ui, repo, fin, fout):
152 def __init__(self, ui, repo, fin, fout):
152 self.cwd = os.getcwd()
153 self.cwd = os.getcwd()
153
154
154 # developer config: cmdserver.log
155 # developer config: cmdserver.log
155 logpath = ui.config("cmdserver", "log", None)
156 logpath = ui.config("cmdserver", "log", None)
156 if logpath:
157 if logpath:
157 global logfile
158 global logfile
158 if logpath == '-':
159 if logpath == '-':
159 # write log on a special 'd' (debug) channel
160 # write log on a special 'd' (debug) channel
160 logfile = channeledoutput(fout, 'd')
161 logfile = channeledoutput(fout, 'd')
161 else:
162 else:
162 logfile = open(logpath, 'a')
163 logfile = open(logpath, 'a')
163
164
164 if repo:
165 if repo:
165 # the ui here is really the repo ui so take its baseui so we don't
166 # the ui here is really the repo ui so take its baseui so we don't
166 # end up with its local configuration
167 # end up with its local configuration
167 self.ui = repo.baseui
168 self.ui = repo.baseui
168 self.repo = repo
169 self.repo = repo
169 self.repoui = repo.ui
170 self.repoui = repo.ui
170 else:
171 else:
171 self.ui = ui
172 self.ui = ui
172 self.repo = self.repoui = None
173 self.repo = self.repoui = None
173
174
174 self.cerr = channeledoutput(fout, 'e')
175 self.cerr = channeledoutput(fout, 'e')
175 self.cout = channeledoutput(fout, 'o')
176 self.cout = channeledoutput(fout, 'o')
176 self.cin = channeledinput(fin, fout, 'I')
177 self.cin = channeledinput(fin, fout, 'I')
177 self.cresult = channeledoutput(fout, 'r')
178 self.cresult = channeledoutput(fout, 'r')
178
179
179 self.client = fin
180 self.client = fin
180
181
181 def _read(self, size):
182 def _read(self, size):
182 if not size:
183 if not size:
183 return ''
184 return ''
184
185
185 data = self.client.read(size)
186 data = self.client.read(size)
186
187
187 # is the other end closed?
188 # is the other end closed?
188 if not data:
189 if not data:
189 raise EOFError
190 raise EOFError
190
191
191 return data
192 return data
192
193
193 def _readstr(self):
194 def _readstr(self):
194 """read a string from the channel
195 """read a string from the channel
195
196
196 format:
197 format:
197 data length (uint32), data
198 data length (uint32), data
198 """
199 """
199 length = struct.unpack('>I', self._read(4))[0]
200 length = struct.unpack('>I', self._read(4))[0]
200 if not length:
201 if not length:
201 return ''
202 return ''
202 return self._read(length)
203 return self._read(length)
203
204
204 def _readlist(self):
205 def _readlist(self):
205 """read a list of NULL separated strings from the channel"""
206 """read a list of NULL separated strings from the channel"""
206 s = self._readstr()
207 s = self._readstr()
207 if s:
208 if s:
208 return s.split('\0')
209 return s.split('\0')
209 else:
210 else:
210 return []
211 return []
211
212
212 def runcommand(self):
213 def runcommand(self):
213 """ reads a list of \0 terminated arguments, executes
214 """ reads a list of \0 terminated arguments, executes
214 and writes the return code to the result channel """
215 and writes the return code to the result channel """
215 from . import dispatch # avoid cycle
216 from . import dispatch # avoid cycle
216
217
217 args = self._readlist()
218 args = self._readlist()
218
219
219 # copy the uis so changes (e.g. --config or --verbose) don't
220 # copy the uis so changes (e.g. --config or --verbose) don't
220 # persist between requests
221 # persist between requests
221 copiedui = self.ui.copy()
222 copiedui = self.ui.copy()
222 uis = [copiedui]
223 uis = [copiedui]
223 if self.repo:
224 if self.repo:
224 self.repo.baseui = copiedui
225 self.repo.baseui = copiedui
225 # clone ui without using ui.copy because this is protected
226 # clone ui without using ui.copy because this is protected
226 repoui = self.repoui.__class__(self.repoui)
227 repoui = self.repoui.__class__(self.repoui)
227 repoui.copy = copiedui.copy # redo copy protection
228 repoui.copy = copiedui.copy # redo copy protection
228 uis.append(repoui)
229 uis.append(repoui)
229 self.repo.ui = self.repo.dirstate._ui = repoui
230 self.repo.ui = self.repo.dirstate._ui = repoui
230 self.repo.invalidateall()
231 self.repo.invalidateall()
231
232
232 for ui in uis:
233 for ui in uis:
233 ui.resetstate()
234 ui.resetstate()
234 # any kind of interaction must use server channels, but chg may
235 # any kind of interaction must use server channels, but chg may
235 # replace channels by fully functional tty files. so nontty is
236 # replace channels by fully functional tty files. so nontty is
236 # enforced only if cin is a channel.
237 # enforced only if cin is a channel.
237 if not util.safehasattr(self.cin, 'fileno'):
238 if not util.safehasattr(self.cin, 'fileno'):
238 ui.setconfig('ui', 'nontty', 'true', 'commandserver')
239 ui.setconfig('ui', 'nontty', 'true', 'commandserver')
239
240
240 req = dispatch.request(args[:], copiedui, self.repo, self.cin,
241 req = dispatch.request(args[:], copiedui, self.repo, self.cin,
241 self.cout, self.cerr)
242 self.cout, self.cerr)
242
243
243 ret = (dispatch.dispatch(req) or 0) & 255 # might return None
244 ret = (dispatch.dispatch(req) or 0) & 255 # might return None
244
245
245 # restore old cwd
246 # restore old cwd
246 if '--cwd' in args:
247 if '--cwd' in args:
247 os.chdir(self.cwd)
248 os.chdir(self.cwd)
248
249
249 self.cresult.write(struct.pack('>i', int(ret)))
250 self.cresult.write(struct.pack('>i', int(ret)))
250
251
251 def getencoding(self):
252 def getencoding(self):
252 """ writes the current encoding to the result channel """
253 """ writes the current encoding to the result channel """
253 self.cresult.write(encoding.encoding)
254 self.cresult.write(encoding.encoding)
254
255
255 def serveone(self):
256 def serveone(self):
256 cmd = self.client.readline()[:-1]
257 cmd = self.client.readline()[:-1]
257 if cmd:
258 if cmd:
258 handler = self.capabilities.get(cmd)
259 handler = self.capabilities.get(cmd)
259 if handler:
260 if handler:
260 handler(self)
261 handler(self)
261 else:
262 else:
262 # clients are expected to check what commands are supported by
263 # clients are expected to check what commands are supported by
263 # looking at the servers capabilities
264 # looking at the servers capabilities
264 raise error.Abort(_('unknown command %s') % cmd)
265 raise error.Abort(_('unknown command %s') % cmd)
265
266
266 return cmd != ''
267 return cmd != ''
267
268
268 capabilities = {'runcommand' : runcommand,
269 capabilities = {'runcommand' : runcommand,
269 'getencoding' : getencoding}
270 'getencoding' : getencoding}
270
271
271 def serve(self):
272 def serve(self):
272 hellomsg = 'capabilities: ' + ' '.join(sorted(self.capabilities))
273 hellomsg = 'capabilities: ' + ' '.join(sorted(self.capabilities))
273 hellomsg += '\n'
274 hellomsg += '\n'
274 hellomsg += 'encoding: ' + encoding.encoding
275 hellomsg += 'encoding: ' + encoding.encoding
275 hellomsg += '\n'
276 hellomsg += '\n'
276 hellomsg += 'pid: %d' % util.getpid()
277 hellomsg += 'pid: %d' % util.getpid()
277
278
278 # write the hello msg in -one- chunk
279 # write the hello msg in -one- chunk
279 self.cout.write(hellomsg)
280 self.cout.write(hellomsg)
280
281
281 try:
282 try:
282 while self.serveone():
283 while self.serveone():
283 pass
284 pass
284 except EOFError:
285 except EOFError:
285 # we'll get here if the client disconnected while we were reading
286 # we'll get here if the client disconnected while we were reading
286 # its request
287 # its request
287 return 1
288 return 1
288
289
289 return 0
290 return 0
290
291
291 def _protectio(ui):
292 def _protectio(ui):
292 """ duplicates streams and redirect original to null if ui uses stdio """
293 """ duplicates streams and redirect original to null if ui uses stdio """
293 ui.flush()
294 ui.flush()
294 newfiles = []
295 newfiles = []
295 nullfd = os.open(os.devnull, os.O_RDWR)
296 nullfd = os.open(os.devnull, os.O_RDWR)
296 for f, sysf, mode in [(ui.fin, sys.stdin, 'rb'),
297 for f, sysf, mode in [(ui.fin, sys.stdin, 'rb'),
297 (ui.fout, sys.stdout, 'wb')]:
298 (ui.fout, sys.stdout, 'wb')]:
298 if f is sysf:
299 if f is sysf:
299 newfd = os.dup(f.fileno())
300 newfd = os.dup(f.fileno())
300 os.dup2(nullfd, f.fileno())
301 os.dup2(nullfd, f.fileno())
301 f = os.fdopen(newfd, mode)
302 f = os.fdopen(newfd, mode)
302 newfiles.append(f)
303 newfiles.append(f)
303 os.close(nullfd)
304 os.close(nullfd)
304 return tuple(newfiles)
305 return tuple(newfiles)
305
306
306 def _restoreio(ui, fin, fout):
307 def _restoreio(ui, fin, fout):
307 """ restores streams from duplicated ones """
308 """ restores streams from duplicated ones """
308 ui.flush()
309 ui.flush()
309 for f, uif in [(fin, ui.fin), (fout, ui.fout)]:
310 for f, uif in [(fin, ui.fin), (fout, ui.fout)]:
310 if f is not uif:
311 if f is not uif:
311 os.dup2(f.fileno(), uif.fileno())
312 os.dup2(f.fileno(), uif.fileno())
312 f.close()
313 f.close()
313
314
314 class pipeservice(object):
315 class pipeservice(object):
315 def __init__(self, ui, repo, opts):
316 def __init__(self, ui, repo, opts):
316 self.ui = ui
317 self.ui = ui
317 self.repo = repo
318 self.repo = repo
318
319
319 def init(self):
320 def init(self):
320 pass
321 pass
321
322
322 def run(self):
323 def run(self):
323 ui = self.ui
324 ui = self.ui
324 # redirect stdio to null device so that broken extensions or in-process
325 # redirect stdio to null device so that broken extensions or in-process
325 # hooks will never cause corruption of channel protocol.
326 # hooks will never cause corruption of channel protocol.
326 fin, fout = _protectio(ui)
327 fin, fout = _protectio(ui)
327 try:
328 try:
328 sv = server(ui, self.repo, fin, fout)
329 sv = server(ui, self.repo, fin, fout)
329 return sv.serve()
330 return sv.serve()
330 finally:
331 finally:
331 _restoreio(ui, fin, fout)
332 _restoreio(ui, fin, fout)
332
333
333 class _requesthandler(SocketServer.StreamRequestHandler):
334 class _requesthandler(socketserver.StreamRequestHandler):
334 def handle(self):
335 def handle(self):
335 ui = self.server.ui
336 ui = self.server.ui
336 repo = self.server.repo
337 repo = self.server.repo
337 sv = None
338 sv = None
338 try:
339 try:
339 sv = server(ui, repo, self.rfile, self.wfile)
340 sv = server(ui, repo, self.rfile, self.wfile)
340 try:
341 try:
341 sv.serve()
342 sv.serve()
342 # handle exceptions that may be raised by command server. most of
343 # handle exceptions that may be raised by command server. most of
343 # known exceptions are caught by dispatch.
344 # known exceptions are caught by dispatch.
344 except error.Abort as inst:
345 except error.Abort as inst:
345 ui.warn(_('abort: %s\n') % inst)
346 ui.warn(_('abort: %s\n') % inst)
346 except IOError as inst:
347 except IOError as inst:
347 if inst.errno != errno.EPIPE:
348 if inst.errno != errno.EPIPE:
348 raise
349 raise
349 except KeyboardInterrupt:
350 except KeyboardInterrupt:
350 pass
351 pass
351 except: # re-raises
352 except: # re-raises
352 # also write traceback to error channel. otherwise client cannot
353 # also write traceback to error channel. otherwise client cannot
353 # see it because it is written to server's stderr by default.
354 # see it because it is written to server's stderr by default.
354 if sv:
355 if sv:
355 cerr = sv.cerr
356 cerr = sv.cerr
356 else:
357 else:
357 cerr = channeledoutput(self.wfile, 'e')
358 cerr = channeledoutput(self.wfile, 'e')
358 traceback.print_exc(file=cerr)
359 traceback.print_exc(file=cerr)
359 raise
360 raise
360
361
361 class unixservice(object):
362 class unixservice(object):
362 """
363 """
363 Listens on unix domain socket and forks server per connection
364 Listens on unix domain socket and forks server per connection
364 """
365 """
365 def __init__(self, ui, repo, opts):
366 def __init__(self, ui, repo, opts):
366 self.ui = ui
367 self.ui = ui
367 self.repo = repo
368 self.repo = repo
368 self.address = opts['address']
369 self.address = opts['address']
369 if not util.safehasattr(SocketServer, 'UnixStreamServer'):
370 if not util.safehasattr(socketserver, 'UnixStreamServer'):
370 raise error.Abort(_('unsupported platform'))
371 raise error.Abort(_('unsupported platform'))
371 if not self.address:
372 if not self.address:
372 raise error.Abort(_('no socket path specified with --address'))
373 raise error.Abort(_('no socket path specified with --address'))
373
374
374 def init(self):
375 def init(self):
375 class cls(SocketServer.ForkingMixIn, SocketServer.UnixStreamServer):
376 class cls(socketserver.ForkingMixIn, socketserver.UnixStreamServer):
376 ui = self.ui
377 ui = self.ui
377 repo = self.repo
378 repo = self.repo
378 self.server = cls(self.address, _requesthandler)
379 self.server = cls(self.address, _requesthandler)
379 self.ui.status(_('listening at %s\n') % self.address)
380 self.ui.status(_('listening at %s\n') % self.address)
380 self.ui.flush() # avoid buffering of status message
381 self.ui.flush() # avoid buffering of status message
381
382
382 def run(self):
383 def run(self):
383 try:
384 try:
384 self.server.serve_forever()
385 self.server.serve_forever()
385 finally:
386 finally:
386 os.unlink(self.address)
387 os.unlink(self.address)
387
388
388 _servicemap = {
389 _servicemap = {
389 'pipe': pipeservice,
390 'pipe': pipeservice,
390 'unix': unixservice,
391 'unix': unixservice,
391 }
392 }
392
393
393 def createservice(ui, repo, opts):
394 def createservice(ui, repo, opts):
394 mode = opts['cmdserver']
395 mode = opts['cmdserver']
395 try:
396 try:
396 return _servicemap[mode](ui, repo, opts)
397 return _servicemap[mode](ui, repo, opts)
397 except KeyError:
398 except KeyError:
398 raise error.Abort(_('unknown mode %s') % mode)
399 raise error.Abort(_('unknown mode %s') % mode)
@@ -1,324 +1,324 b''
1 # hgweb/server.py - The standalone hg web server.
1 # hgweb/server.py - The standalone hg web server.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import BaseHTTPServer
11 import BaseHTTPServer
12 import SocketServer
13 import errno
12 import errno
14 import os
13 import os
15 import socket
14 import socket
16 import sys
15 import sys
17 import traceback
16 import traceback
18
17
19 from ..i18n import _
18 from ..i18n import _
20
19
21 from .. import (
20 from .. import (
22 error,
21 error,
23 util,
22 util,
24 )
23 )
25
24
25 socketserver = util.socketserver
26 urlerr = util.urlerr
26 urlerr = util.urlerr
27 urlreq = util.urlreq
27 urlreq = util.urlreq
28
28
29 from . import (
29 from . import (
30 common,
30 common,
31 )
31 )
32
32
33 def _splitURI(uri):
33 def _splitURI(uri):
34 """Return path and query that has been split from uri
34 """Return path and query that has been split from uri
35
35
36 Just like CGI environment, the path is unquoted, the query is
36 Just like CGI environment, the path is unquoted, the query is
37 not.
37 not.
38 """
38 """
39 if '?' in uri:
39 if '?' in uri:
40 path, query = uri.split('?', 1)
40 path, query = uri.split('?', 1)
41 else:
41 else:
42 path, query = uri, ''
42 path, query = uri, ''
43 return urlreq.unquote(path), query
43 return urlreq.unquote(path), query
44
44
45 class _error_logger(object):
45 class _error_logger(object):
46 def __init__(self, handler):
46 def __init__(self, handler):
47 self.handler = handler
47 self.handler = handler
48 def flush(self):
48 def flush(self):
49 pass
49 pass
50 def write(self, str):
50 def write(self, str):
51 self.writelines(str.split('\n'))
51 self.writelines(str.split('\n'))
52 def writelines(self, seq):
52 def writelines(self, seq):
53 for msg in seq:
53 for msg in seq:
54 self.handler.log_error("HG error: %s", msg)
54 self.handler.log_error("HG error: %s", msg)
55
55
56 class _httprequesthandler(BaseHTTPServer.BaseHTTPRequestHandler):
56 class _httprequesthandler(BaseHTTPServer.BaseHTTPRequestHandler):
57
57
58 url_scheme = 'http'
58 url_scheme = 'http'
59
59
60 @staticmethod
60 @staticmethod
61 def preparehttpserver(httpserver, ssl_cert):
61 def preparehttpserver(httpserver, ssl_cert):
62 """Prepare .socket of new HTTPServer instance"""
62 """Prepare .socket of new HTTPServer instance"""
63 pass
63 pass
64
64
65 def __init__(self, *args, **kargs):
65 def __init__(self, *args, **kargs):
66 self.protocol_version = 'HTTP/1.1'
66 self.protocol_version = 'HTTP/1.1'
67 BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
67 BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
68
68
69 def _log_any(self, fp, format, *args):
69 def _log_any(self, fp, format, *args):
70 fp.write("%s - - [%s] %s\n" % (self.client_address[0],
70 fp.write("%s - - [%s] %s\n" % (self.client_address[0],
71 self.log_date_time_string(),
71 self.log_date_time_string(),
72 format % args))
72 format % args))
73 fp.flush()
73 fp.flush()
74
74
75 def log_error(self, format, *args):
75 def log_error(self, format, *args):
76 self._log_any(self.server.errorlog, format, *args)
76 self._log_any(self.server.errorlog, format, *args)
77
77
78 def log_message(self, format, *args):
78 def log_message(self, format, *args):
79 self._log_any(self.server.accesslog, format, *args)
79 self._log_any(self.server.accesslog, format, *args)
80
80
81 def log_request(self, code='-', size='-'):
81 def log_request(self, code='-', size='-'):
82 xheaders = []
82 xheaders = []
83 if util.safehasattr(self, 'headers'):
83 if util.safehasattr(self, 'headers'):
84 xheaders = [h for h in self.headers.items()
84 xheaders = [h for h in self.headers.items()
85 if h[0].startswith('x-')]
85 if h[0].startswith('x-')]
86 self.log_message('"%s" %s %s%s',
86 self.log_message('"%s" %s %s%s',
87 self.requestline, str(code), str(size),
87 self.requestline, str(code), str(size),
88 ''.join([' %s:%s' % h for h in sorted(xheaders)]))
88 ''.join([' %s:%s' % h for h in sorted(xheaders)]))
89
89
90 def do_write(self):
90 def do_write(self):
91 try:
91 try:
92 self.do_hgweb()
92 self.do_hgweb()
93 except socket.error as inst:
93 except socket.error as inst:
94 if inst[0] != errno.EPIPE:
94 if inst[0] != errno.EPIPE:
95 raise
95 raise
96
96
97 def do_POST(self):
97 def do_POST(self):
98 try:
98 try:
99 self.do_write()
99 self.do_write()
100 except Exception:
100 except Exception:
101 self._start_response("500 Internal Server Error", [])
101 self._start_response("500 Internal Server Error", [])
102 self._write("Internal Server Error")
102 self._write("Internal Server Error")
103 self._done()
103 self._done()
104 tb = "".join(traceback.format_exception(*sys.exc_info()))
104 tb = "".join(traceback.format_exception(*sys.exc_info()))
105 self.log_error("Exception happened during processing "
105 self.log_error("Exception happened during processing "
106 "request '%s':\n%s", self.path, tb)
106 "request '%s':\n%s", self.path, tb)
107
107
108 def do_GET(self):
108 def do_GET(self):
109 self.do_POST()
109 self.do_POST()
110
110
111 def do_hgweb(self):
111 def do_hgweb(self):
112 path, query = _splitURI(self.path)
112 path, query = _splitURI(self.path)
113
113
114 env = {}
114 env = {}
115 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
115 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
116 env['REQUEST_METHOD'] = self.command
116 env['REQUEST_METHOD'] = self.command
117 env['SERVER_NAME'] = self.server.server_name
117 env['SERVER_NAME'] = self.server.server_name
118 env['SERVER_PORT'] = str(self.server.server_port)
118 env['SERVER_PORT'] = str(self.server.server_port)
119 env['REQUEST_URI'] = self.path
119 env['REQUEST_URI'] = self.path
120 env['SCRIPT_NAME'] = self.server.prefix
120 env['SCRIPT_NAME'] = self.server.prefix
121 env['PATH_INFO'] = path[len(self.server.prefix):]
121 env['PATH_INFO'] = path[len(self.server.prefix):]
122 env['REMOTE_HOST'] = self.client_address[0]
122 env['REMOTE_HOST'] = self.client_address[0]
123 env['REMOTE_ADDR'] = self.client_address[0]
123 env['REMOTE_ADDR'] = self.client_address[0]
124 if query:
124 if query:
125 env['QUERY_STRING'] = query
125 env['QUERY_STRING'] = query
126
126
127 if self.headers.typeheader is None:
127 if self.headers.typeheader is None:
128 env['CONTENT_TYPE'] = self.headers.type
128 env['CONTENT_TYPE'] = self.headers.type
129 else:
129 else:
130 env['CONTENT_TYPE'] = self.headers.typeheader
130 env['CONTENT_TYPE'] = self.headers.typeheader
131 length = self.headers.getheader('content-length')
131 length = self.headers.getheader('content-length')
132 if length:
132 if length:
133 env['CONTENT_LENGTH'] = length
133 env['CONTENT_LENGTH'] = length
134 for header in [h for h in self.headers.keys()
134 for header in [h for h in self.headers.keys()
135 if h not in ('content-type', 'content-length')]:
135 if h not in ('content-type', 'content-length')]:
136 hkey = 'HTTP_' + header.replace('-', '_').upper()
136 hkey = 'HTTP_' + header.replace('-', '_').upper()
137 hval = self.headers.getheader(header)
137 hval = self.headers.getheader(header)
138 hval = hval.replace('\n', '').strip()
138 hval = hval.replace('\n', '').strip()
139 if hval:
139 if hval:
140 env[hkey] = hval
140 env[hkey] = hval
141 env['SERVER_PROTOCOL'] = self.request_version
141 env['SERVER_PROTOCOL'] = self.request_version
142 env['wsgi.version'] = (1, 0)
142 env['wsgi.version'] = (1, 0)
143 env['wsgi.url_scheme'] = self.url_scheme
143 env['wsgi.url_scheme'] = self.url_scheme
144 if env.get('HTTP_EXPECT', '').lower() == '100-continue':
144 if env.get('HTTP_EXPECT', '').lower() == '100-continue':
145 self.rfile = common.continuereader(self.rfile, self.wfile.write)
145 self.rfile = common.continuereader(self.rfile, self.wfile.write)
146
146
147 env['wsgi.input'] = self.rfile
147 env['wsgi.input'] = self.rfile
148 env['wsgi.errors'] = _error_logger(self)
148 env['wsgi.errors'] = _error_logger(self)
149 env['wsgi.multithread'] = isinstance(self.server,
149 env['wsgi.multithread'] = isinstance(self.server,
150 SocketServer.ThreadingMixIn)
150 socketserver.ThreadingMixIn)
151 env['wsgi.multiprocess'] = isinstance(self.server,
151 env['wsgi.multiprocess'] = isinstance(self.server,
152 SocketServer.ForkingMixIn)
152 socketserver.ForkingMixIn)
153 env['wsgi.run_once'] = 0
153 env['wsgi.run_once'] = 0
154
154
155 self.saved_status = None
155 self.saved_status = None
156 self.saved_headers = []
156 self.saved_headers = []
157 self.sent_headers = False
157 self.sent_headers = False
158 self.length = None
158 self.length = None
159 self._chunked = None
159 self._chunked = None
160 for chunk in self.server.application(env, self._start_response):
160 for chunk in self.server.application(env, self._start_response):
161 self._write(chunk)
161 self._write(chunk)
162 if not self.sent_headers:
162 if not self.sent_headers:
163 self.send_headers()
163 self.send_headers()
164 self._done()
164 self._done()
165
165
166 def send_headers(self):
166 def send_headers(self):
167 if not self.saved_status:
167 if not self.saved_status:
168 raise AssertionError("Sending headers before "
168 raise AssertionError("Sending headers before "
169 "start_response() called")
169 "start_response() called")
170 saved_status = self.saved_status.split(None, 1)
170 saved_status = self.saved_status.split(None, 1)
171 saved_status[0] = int(saved_status[0])
171 saved_status[0] = int(saved_status[0])
172 self.send_response(*saved_status)
172 self.send_response(*saved_status)
173 self.length = None
173 self.length = None
174 self._chunked = False
174 self._chunked = False
175 for h in self.saved_headers:
175 for h in self.saved_headers:
176 self.send_header(*h)
176 self.send_header(*h)
177 if h[0].lower() == 'content-length':
177 if h[0].lower() == 'content-length':
178 self.length = int(h[1])
178 self.length = int(h[1])
179 if (self.length is None and
179 if (self.length is None and
180 saved_status[0] != common.HTTP_NOT_MODIFIED):
180 saved_status[0] != common.HTTP_NOT_MODIFIED):
181 self._chunked = (not self.close_connection and
181 self._chunked = (not self.close_connection and
182 self.request_version == "HTTP/1.1")
182 self.request_version == "HTTP/1.1")
183 if self._chunked:
183 if self._chunked:
184 self.send_header('Transfer-Encoding', 'chunked')
184 self.send_header('Transfer-Encoding', 'chunked')
185 else:
185 else:
186 self.send_header('Connection', 'close')
186 self.send_header('Connection', 'close')
187 self.end_headers()
187 self.end_headers()
188 self.sent_headers = True
188 self.sent_headers = True
189
189
190 def _start_response(self, http_status, headers, exc_info=None):
190 def _start_response(self, http_status, headers, exc_info=None):
191 code, msg = http_status.split(None, 1)
191 code, msg = http_status.split(None, 1)
192 code = int(code)
192 code = int(code)
193 self.saved_status = http_status
193 self.saved_status = http_status
194 bad_headers = ('connection', 'transfer-encoding')
194 bad_headers = ('connection', 'transfer-encoding')
195 self.saved_headers = [h for h in headers
195 self.saved_headers = [h for h in headers
196 if h[0].lower() not in bad_headers]
196 if h[0].lower() not in bad_headers]
197 return self._write
197 return self._write
198
198
199 def _write(self, data):
199 def _write(self, data):
200 if not self.saved_status:
200 if not self.saved_status:
201 raise AssertionError("data written before start_response() called")
201 raise AssertionError("data written before start_response() called")
202 elif not self.sent_headers:
202 elif not self.sent_headers:
203 self.send_headers()
203 self.send_headers()
204 if self.length is not None:
204 if self.length is not None:
205 if len(data) > self.length:
205 if len(data) > self.length:
206 raise AssertionError("Content-length header sent, but more "
206 raise AssertionError("Content-length header sent, but more "
207 "bytes than specified are being written.")
207 "bytes than specified are being written.")
208 self.length = self.length - len(data)
208 self.length = self.length - len(data)
209 elif self._chunked and data:
209 elif self._chunked and data:
210 data = '%x\r\n%s\r\n' % (len(data), data)
210 data = '%x\r\n%s\r\n' % (len(data), data)
211 self.wfile.write(data)
211 self.wfile.write(data)
212 self.wfile.flush()
212 self.wfile.flush()
213
213
214 def _done(self):
214 def _done(self):
215 if self._chunked:
215 if self._chunked:
216 self.wfile.write('0\r\n\r\n')
216 self.wfile.write('0\r\n\r\n')
217 self.wfile.flush()
217 self.wfile.flush()
218
218
219 class _httprequesthandlerssl(_httprequesthandler):
219 class _httprequesthandlerssl(_httprequesthandler):
220 """HTTPS handler based on Python's ssl module"""
220 """HTTPS handler based on Python's ssl module"""
221
221
222 url_scheme = 'https'
222 url_scheme = 'https'
223
223
224 @staticmethod
224 @staticmethod
225 def preparehttpserver(httpserver, ssl_cert):
225 def preparehttpserver(httpserver, ssl_cert):
226 try:
226 try:
227 import ssl
227 import ssl
228 ssl.wrap_socket
228 ssl.wrap_socket
229 except ImportError:
229 except ImportError:
230 raise error.Abort(_("SSL support is unavailable"))
230 raise error.Abort(_("SSL support is unavailable"))
231 httpserver.socket = ssl.wrap_socket(
231 httpserver.socket = ssl.wrap_socket(
232 httpserver.socket, server_side=True,
232 httpserver.socket, server_side=True,
233 certfile=ssl_cert, ssl_version=ssl.PROTOCOL_TLSv1)
233 certfile=ssl_cert, ssl_version=ssl.PROTOCOL_TLSv1)
234
234
235 def setup(self):
235 def setup(self):
236 self.connection = self.request
236 self.connection = self.request
237 self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
237 self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
238 self.wfile = socket._fileobject(self.request, "wb", self.wbufsize)
238 self.wfile = socket._fileobject(self.request, "wb", self.wbufsize)
239
239
240 try:
240 try:
241 import threading
241 import threading
242 threading.activeCount() # silence pyflakes and bypass demandimport
242 threading.activeCount() # silence pyflakes and bypass demandimport
243 _mixin = SocketServer.ThreadingMixIn
243 _mixin = socketserver.ThreadingMixIn
244 except ImportError:
244 except ImportError:
245 if util.safehasattr(os, "fork"):
245 if util.safehasattr(os, "fork"):
246 _mixin = SocketServer.ForkingMixIn
246 _mixin = socketserver.ForkingMixIn
247 else:
247 else:
248 class _mixin(object):
248 class _mixin(object):
249 pass
249 pass
250
250
251 def openlog(opt, default):
251 def openlog(opt, default):
252 if opt and opt != '-':
252 if opt and opt != '-':
253 return open(opt, 'a')
253 return open(opt, 'a')
254 return default
254 return default
255
255
256 class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer):
256 class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer):
257
257
258 # SO_REUSEADDR has broken semantics on windows
258 # SO_REUSEADDR has broken semantics on windows
259 if os.name == 'nt':
259 if os.name == 'nt':
260 allow_reuse_address = 0
260 allow_reuse_address = 0
261
261
262 def __init__(self, ui, app, addr, handler, **kwargs):
262 def __init__(self, ui, app, addr, handler, **kwargs):
263 BaseHTTPServer.HTTPServer.__init__(self, addr, handler, **kwargs)
263 BaseHTTPServer.HTTPServer.__init__(self, addr, handler, **kwargs)
264 self.daemon_threads = True
264 self.daemon_threads = True
265 self.application = app
265 self.application = app
266
266
267 handler.preparehttpserver(self, ui.config('web', 'certificate'))
267 handler.preparehttpserver(self, ui.config('web', 'certificate'))
268
268
269 prefix = ui.config('web', 'prefix', '')
269 prefix = ui.config('web', 'prefix', '')
270 if prefix:
270 if prefix:
271 prefix = '/' + prefix.strip('/')
271 prefix = '/' + prefix.strip('/')
272 self.prefix = prefix
272 self.prefix = prefix
273
273
274 alog = openlog(ui.config('web', 'accesslog', '-'), sys.stdout)
274 alog = openlog(ui.config('web', 'accesslog', '-'), sys.stdout)
275 elog = openlog(ui.config('web', 'errorlog', '-'), sys.stderr)
275 elog = openlog(ui.config('web', 'errorlog', '-'), sys.stderr)
276 self.accesslog = alog
276 self.accesslog = alog
277 self.errorlog = elog
277 self.errorlog = elog
278
278
279 self.addr, self.port = self.socket.getsockname()[0:2]
279 self.addr, self.port = self.socket.getsockname()[0:2]
280 self.fqaddr = socket.getfqdn(addr[0])
280 self.fqaddr = socket.getfqdn(addr[0])
281
281
282 class IPv6HTTPServer(MercurialHTTPServer):
282 class IPv6HTTPServer(MercurialHTTPServer):
283 address_family = getattr(socket, 'AF_INET6', None)
283 address_family = getattr(socket, 'AF_INET6', None)
284 def __init__(self, *args, **kwargs):
284 def __init__(self, *args, **kwargs):
285 if self.address_family is None:
285 if self.address_family is None:
286 raise error.RepoError(_('IPv6 is not available on this system'))
286 raise error.RepoError(_('IPv6 is not available on this system'))
287 super(IPv6HTTPServer, self).__init__(*args, **kwargs)
287 super(IPv6HTTPServer, self).__init__(*args, **kwargs)
288
288
289 def create_server(ui, app):
289 def create_server(ui, app):
290
290
291 if ui.config('web', 'certificate'):
291 if ui.config('web', 'certificate'):
292 handler = _httprequesthandlerssl
292 handler = _httprequesthandlerssl
293 else:
293 else:
294 handler = _httprequesthandler
294 handler = _httprequesthandler
295
295
296 if ui.configbool('web', 'ipv6'):
296 if ui.configbool('web', 'ipv6'):
297 cls = IPv6HTTPServer
297 cls = IPv6HTTPServer
298 else:
298 else:
299 cls = MercurialHTTPServer
299 cls = MercurialHTTPServer
300
300
301 # ugly hack due to python issue5853 (for threaded use)
301 # ugly hack due to python issue5853 (for threaded use)
302 try:
302 try:
303 import mimetypes
303 import mimetypes
304 mimetypes.init()
304 mimetypes.init()
305 except UnicodeDecodeError:
305 except UnicodeDecodeError:
306 # Python 2.x's mimetypes module attempts to decode strings
306 # Python 2.x's mimetypes module attempts to decode strings
307 # from Windows' ANSI APIs as ascii (fail), then re-encode them
307 # from Windows' ANSI APIs as ascii (fail), then re-encode them
308 # as ascii (clown fail), because the default Python Unicode
308 # as ascii (clown fail), because the default Python Unicode
309 # codec is hardcoded as ascii.
309 # codec is hardcoded as ascii.
310
310
311 sys.argv # unwrap demand-loader so that reload() works
311 sys.argv # unwrap demand-loader so that reload() works
312 reload(sys) # resurrect sys.setdefaultencoding()
312 reload(sys) # resurrect sys.setdefaultencoding()
313 oldenc = sys.getdefaultencoding()
313 oldenc = sys.getdefaultencoding()
314 sys.setdefaultencoding("latin1") # or any full 8-bit encoding
314 sys.setdefaultencoding("latin1") # or any full 8-bit encoding
315 mimetypes.init()
315 mimetypes.init()
316 sys.setdefaultencoding(oldenc)
316 sys.setdefaultencoding(oldenc)
317
317
318 address = ui.config('web', 'address', '')
318 address = ui.config('web', 'address', '')
319 port = util.getport(ui.config('web', 'port', 8000))
319 port = util.getport(ui.config('web', 'port', 8000))
320 try:
320 try:
321 return cls(ui, app, (address, port), handler)
321 return cls(ui, app, (address, port), handler)
322 except socket.error as inst:
322 except socket.error as inst:
323 raise error.Abort(_("cannot start server at '%s:%d': %s")
323 raise error.Abort(_("cannot start server at '%s:%d': %s")
324 % (address, port, inst.args[1]))
324 % (address, port, inst.args[1]))
@@ -1,145 +1,152 b''
1 # pycompat.py - portability shim for python 3
1 # pycompat.py - portability shim for python 3
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 """Mercurial portability shim for python 3.
6 """Mercurial portability shim for python 3.
7
7
8 This contains aliases to hide python version-specific details from the core.
8 This contains aliases to hide python version-specific details from the core.
9 """
9 """
10
10
11 from __future__ import absolute_import
11 from __future__ import absolute_import
12
12
13 try:
13 try:
14 import cPickle as pickle
14 import cPickle as pickle
15 pickle.dumps
15 pickle.dumps
16 except ImportError:
16 except ImportError:
17 import pickle
17 import pickle
18 pickle.dumps # silence pyflakes
18 pickle.dumps # silence pyflakes
19
19
20 try:
20 try:
21 import SocketServer as socketserver
22 socketserver.ThreadingMixIn
23 except ImportError:
24 import socketserver
25 socketserver.ThreadingMixIn
26
27 try:
21 import xmlrpclib
28 import xmlrpclib
22 xmlrpclib.Transport
29 xmlrpclib.Transport
23 except ImportError:
30 except ImportError:
24 import xmlrpc.client as xmlrpclib
31 import xmlrpc.client as xmlrpclib
25 xmlrpclib.Transport
32 xmlrpclib.Transport
26
33
27 try:
34 try:
28 import urlparse
35 import urlparse
29 urlparse.urlparse
36 urlparse.urlparse
30 except ImportError:
37 except ImportError:
31 import urllib.parse as urlparse
38 import urllib.parse as urlparse
32 urlparse.urlparse
39 urlparse.urlparse
33
40
34 try:
41 try:
35 import cStringIO as io
42 import cStringIO as io
36 stringio = io.StringIO
43 stringio = io.StringIO
37 except ImportError:
44 except ImportError:
38 import io
45 import io
39 stringio = io.StringIO
46 stringio = io.StringIO
40
47
41 try:
48 try:
42 import Queue as _queue
49 import Queue as _queue
43 _queue.Queue
50 _queue.Queue
44 except ImportError:
51 except ImportError:
45 import queue as _queue
52 import queue as _queue
46 empty = _queue.Empty
53 empty = _queue.Empty
47 queue = _queue.Queue
54 queue = _queue.Queue
48
55
49 class _pycompatstub(object):
56 class _pycompatstub(object):
50 pass
57 pass
51
58
52 def _alias(alias, origin, items):
59 def _alias(alias, origin, items):
53 """ populate a _pycompatstub
60 """ populate a _pycompatstub
54
61
55 copies items from origin to alias
62 copies items from origin to alias
56 """
63 """
57 def hgcase(item):
64 def hgcase(item):
58 return item.replace('_', '').lower()
65 return item.replace('_', '').lower()
59 for item in items:
66 for item in items:
60 try:
67 try:
61 setattr(alias, hgcase(item), getattr(origin, item))
68 setattr(alias, hgcase(item), getattr(origin, item))
62 except AttributeError:
69 except AttributeError:
63 pass
70 pass
64
71
65 urlreq = _pycompatstub()
72 urlreq = _pycompatstub()
66 urlerr = _pycompatstub()
73 urlerr = _pycompatstub()
67 try:
74 try:
68 import urllib2
75 import urllib2
69 import urllib
76 import urllib
70 _alias(urlreq, urllib, (
77 _alias(urlreq, urllib, (
71 "addclosehook",
78 "addclosehook",
72 "addinfourl",
79 "addinfourl",
73 "ftpwrapper",
80 "ftpwrapper",
74 "pathname2url",
81 "pathname2url",
75 "quote",
82 "quote",
76 "splitattr",
83 "splitattr",
77 "splitpasswd",
84 "splitpasswd",
78 "splitport",
85 "splitport",
79 "splituser",
86 "splituser",
80 "unquote",
87 "unquote",
81 "url2pathname",
88 "url2pathname",
82 "urlencode",
89 "urlencode",
83 "urlencode",
90 "urlencode",
84 ))
91 ))
85 _alias(urlreq, urllib2, (
92 _alias(urlreq, urllib2, (
86 "AbstractHTTPHandler",
93 "AbstractHTTPHandler",
87 "BaseHandler",
94 "BaseHandler",
88 "build_opener",
95 "build_opener",
89 "FileHandler",
96 "FileHandler",
90 "FTPHandler",
97 "FTPHandler",
91 "HTTPBasicAuthHandler",
98 "HTTPBasicAuthHandler",
92 "HTTPDigestAuthHandler",
99 "HTTPDigestAuthHandler",
93 "HTTPHandler",
100 "HTTPHandler",
94 "HTTPPasswordMgrWithDefaultRealm",
101 "HTTPPasswordMgrWithDefaultRealm",
95 "HTTPSHandler",
102 "HTTPSHandler",
96 "install_opener",
103 "install_opener",
97 "ProxyHandler",
104 "ProxyHandler",
98 "Request",
105 "Request",
99 "urlopen",
106 "urlopen",
100 ))
107 ))
101 _alias(urlerr, urllib2, (
108 _alias(urlerr, urllib2, (
102 "HTTPError",
109 "HTTPError",
103 "URLError",
110 "URLError",
104 ))
111 ))
105
112
106 except ImportError:
113 except ImportError:
107 import urllib.request
114 import urllib.request
108 _alias(urlreq, urllib.request, (
115 _alias(urlreq, urllib.request, (
109 "AbstractHTTPHandler",
116 "AbstractHTTPHandler",
110 "addclosehook",
117 "addclosehook",
111 "addinfourl",
118 "addinfourl",
112 "BaseHandler",
119 "BaseHandler",
113 "build_opener",
120 "build_opener",
114 "FileHandler",
121 "FileHandler",
115 "FTPHandler",
122 "FTPHandler",
116 "ftpwrapper",
123 "ftpwrapper",
117 "HTTPHandler",
124 "HTTPHandler",
118 "HTTPSHandler",
125 "HTTPSHandler",
119 "install_opener",
126 "install_opener",
120 "pathname2url",
127 "pathname2url",
121 "HTTPBasicAuthHandler",
128 "HTTPBasicAuthHandler",
122 "HTTPDigestAuthHandler",
129 "HTTPDigestAuthHandler",
123 "HTTPPasswordMgrWithDefaultRealm",
130 "HTTPPasswordMgrWithDefaultRealm",
124 "ProxyHandler",
131 "ProxyHandler",
125 "quote",
132 "quote",
126 "Request",
133 "Request",
127 "splitattr",
134 "splitattr",
128 "splitpasswd",
135 "splitpasswd",
129 "splitport",
136 "splitport",
130 "splituser",
137 "splituser",
131 "unquote",
138 "unquote",
132 "url2pathname",
139 "url2pathname",
133 "urlopen",
140 "urlopen",
134 ))
141 ))
135 import urllib.error
142 import urllib.error
136 _alias(urlerr, urllib.error, (
143 _alias(urlerr, urllib.error, (
137 "HTTPError",
144 "HTTPError",
138 "URLError",
145 "URLError",
139 ))
146 ))
140
147
141 try:
148 try:
142 xrange
149 xrange
143 except NameError:
150 except NameError:
144 import builtins
151 import builtins
145 builtins.xrange = range
152 builtins.xrange = range
@@ -1,2855 +1,2856 b''
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from __future__ import absolute_import
16 from __future__ import absolute_import
17
17
18 import bz2
18 import bz2
19 import calendar
19 import calendar
20 import collections
20 import collections
21 import datetime
21 import datetime
22 import errno
22 import errno
23 import gc
23 import gc
24 import hashlib
24 import hashlib
25 import imp
25 import imp
26 import os
26 import os
27 import re as remod
27 import re as remod
28 import shutil
28 import shutil
29 import signal
29 import signal
30 import socket
30 import socket
31 import subprocess
31 import subprocess
32 import sys
32 import sys
33 import tempfile
33 import tempfile
34 import textwrap
34 import textwrap
35 import time
35 import time
36 import traceback
36 import traceback
37 import zlib
37 import zlib
38
38
39 from . import (
39 from . import (
40 encoding,
40 encoding,
41 error,
41 error,
42 i18n,
42 i18n,
43 osutil,
43 osutil,
44 parsers,
44 parsers,
45 pycompat,
45 pycompat,
46 )
46 )
47
47
48 for attr in (
48 for attr in (
49 'empty',
49 'empty',
50 'pickle',
50 'pickle',
51 'queue',
51 'queue',
52 'urlerr',
52 'urlerr',
53 'urlparse',
53 'urlparse',
54 # we do import urlreq, but we do it outside the loop
54 # we do import urlreq, but we do it outside the loop
55 #'urlreq',
55 #'urlreq',
56 'stringio',
56 'stringio',
57 'socketserver',
57 'xmlrpclib',
58 'xmlrpclib',
58 ):
59 ):
59 globals()[attr] = getattr(pycompat, attr)
60 globals()[attr] = getattr(pycompat, attr)
60
61
61 # This line is to make pyflakes happy:
62 # This line is to make pyflakes happy:
62 urlreq = pycompat.urlreq
63 urlreq = pycompat.urlreq
63
64
64 if os.name == 'nt':
65 if os.name == 'nt':
65 from . import windows as platform
66 from . import windows as platform
66 else:
67 else:
67 from . import posix as platform
68 from . import posix as platform
68
69
69 _ = i18n._
70 _ = i18n._
70
71
71 cachestat = platform.cachestat
72 cachestat = platform.cachestat
72 checkexec = platform.checkexec
73 checkexec = platform.checkexec
73 checklink = platform.checklink
74 checklink = platform.checklink
74 copymode = platform.copymode
75 copymode = platform.copymode
75 executablepath = platform.executablepath
76 executablepath = platform.executablepath
76 expandglobs = platform.expandglobs
77 expandglobs = platform.expandglobs
77 explainexit = platform.explainexit
78 explainexit = platform.explainexit
78 findexe = platform.findexe
79 findexe = platform.findexe
79 gethgcmd = platform.gethgcmd
80 gethgcmd = platform.gethgcmd
80 getuser = platform.getuser
81 getuser = platform.getuser
81 getpid = os.getpid
82 getpid = os.getpid
82 groupmembers = platform.groupmembers
83 groupmembers = platform.groupmembers
83 groupname = platform.groupname
84 groupname = platform.groupname
84 hidewindow = platform.hidewindow
85 hidewindow = platform.hidewindow
85 isexec = platform.isexec
86 isexec = platform.isexec
86 isowner = platform.isowner
87 isowner = platform.isowner
87 localpath = platform.localpath
88 localpath = platform.localpath
88 lookupreg = platform.lookupreg
89 lookupreg = platform.lookupreg
89 makedir = platform.makedir
90 makedir = platform.makedir
90 nlinks = platform.nlinks
91 nlinks = platform.nlinks
91 normpath = platform.normpath
92 normpath = platform.normpath
92 normcase = platform.normcase
93 normcase = platform.normcase
93 normcasespec = platform.normcasespec
94 normcasespec = platform.normcasespec
94 normcasefallback = platform.normcasefallback
95 normcasefallback = platform.normcasefallback
95 openhardlinks = platform.openhardlinks
96 openhardlinks = platform.openhardlinks
96 oslink = platform.oslink
97 oslink = platform.oslink
97 parsepatchoutput = platform.parsepatchoutput
98 parsepatchoutput = platform.parsepatchoutput
98 pconvert = platform.pconvert
99 pconvert = platform.pconvert
99 poll = platform.poll
100 poll = platform.poll
100 popen = platform.popen
101 popen = platform.popen
101 posixfile = platform.posixfile
102 posixfile = platform.posixfile
102 quotecommand = platform.quotecommand
103 quotecommand = platform.quotecommand
103 readpipe = platform.readpipe
104 readpipe = platform.readpipe
104 rename = platform.rename
105 rename = platform.rename
105 removedirs = platform.removedirs
106 removedirs = platform.removedirs
106 samedevice = platform.samedevice
107 samedevice = platform.samedevice
107 samefile = platform.samefile
108 samefile = platform.samefile
108 samestat = platform.samestat
109 samestat = platform.samestat
109 setbinary = platform.setbinary
110 setbinary = platform.setbinary
110 setflags = platform.setflags
111 setflags = platform.setflags
111 setsignalhandler = platform.setsignalhandler
112 setsignalhandler = platform.setsignalhandler
112 shellquote = platform.shellquote
113 shellquote = platform.shellquote
113 spawndetached = platform.spawndetached
114 spawndetached = platform.spawndetached
114 split = platform.split
115 split = platform.split
115 sshargs = platform.sshargs
116 sshargs = platform.sshargs
116 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
117 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
117 statisexec = platform.statisexec
118 statisexec = platform.statisexec
118 statislink = platform.statislink
119 statislink = platform.statislink
119 termwidth = platform.termwidth
120 termwidth = platform.termwidth
120 testpid = platform.testpid
121 testpid = platform.testpid
121 umask = platform.umask
122 umask = platform.umask
122 unlink = platform.unlink
123 unlink = platform.unlink
123 unlinkpath = platform.unlinkpath
124 unlinkpath = platform.unlinkpath
124 username = platform.username
125 username = platform.username
125
126
126 # Python compatibility
127 # Python compatibility
127
128
128 _notset = object()
129 _notset = object()
129
130
130 # disable Python's problematic floating point timestamps (issue4836)
131 # disable Python's problematic floating point timestamps (issue4836)
131 # (Python hypocritically says you shouldn't change this behavior in
132 # (Python hypocritically says you shouldn't change this behavior in
132 # libraries, and sure enough Mercurial is not a library.)
133 # libraries, and sure enough Mercurial is not a library.)
133 os.stat_float_times(False)
134 os.stat_float_times(False)
134
135
135 def safehasattr(thing, attr):
136 def safehasattr(thing, attr):
136 return getattr(thing, attr, _notset) is not _notset
137 return getattr(thing, attr, _notset) is not _notset
137
138
138 DIGESTS = {
139 DIGESTS = {
139 'md5': hashlib.md5,
140 'md5': hashlib.md5,
140 'sha1': hashlib.sha1,
141 'sha1': hashlib.sha1,
141 'sha512': hashlib.sha512,
142 'sha512': hashlib.sha512,
142 }
143 }
143 # List of digest types from strongest to weakest
144 # List of digest types from strongest to weakest
144 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
145 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
145
146
146 for k in DIGESTS_BY_STRENGTH:
147 for k in DIGESTS_BY_STRENGTH:
147 assert k in DIGESTS
148 assert k in DIGESTS
148
149
149 class digester(object):
150 class digester(object):
150 """helper to compute digests.
151 """helper to compute digests.
151
152
152 This helper can be used to compute one or more digests given their name.
153 This helper can be used to compute one or more digests given their name.
153
154
154 >>> d = digester(['md5', 'sha1'])
155 >>> d = digester(['md5', 'sha1'])
155 >>> d.update('foo')
156 >>> d.update('foo')
156 >>> [k for k in sorted(d)]
157 >>> [k for k in sorted(d)]
157 ['md5', 'sha1']
158 ['md5', 'sha1']
158 >>> d['md5']
159 >>> d['md5']
159 'acbd18db4cc2f85cedef654fccc4a4d8'
160 'acbd18db4cc2f85cedef654fccc4a4d8'
160 >>> d['sha1']
161 >>> d['sha1']
161 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
162 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
162 >>> digester.preferred(['md5', 'sha1'])
163 >>> digester.preferred(['md5', 'sha1'])
163 'sha1'
164 'sha1'
164 """
165 """
165
166
166 def __init__(self, digests, s=''):
167 def __init__(self, digests, s=''):
167 self._hashes = {}
168 self._hashes = {}
168 for k in digests:
169 for k in digests:
169 if k not in DIGESTS:
170 if k not in DIGESTS:
170 raise Abort(_('unknown digest type: %s') % k)
171 raise Abort(_('unknown digest type: %s') % k)
171 self._hashes[k] = DIGESTS[k]()
172 self._hashes[k] = DIGESTS[k]()
172 if s:
173 if s:
173 self.update(s)
174 self.update(s)
174
175
175 def update(self, data):
176 def update(self, data):
176 for h in self._hashes.values():
177 for h in self._hashes.values():
177 h.update(data)
178 h.update(data)
178
179
179 def __getitem__(self, key):
180 def __getitem__(self, key):
180 if key not in DIGESTS:
181 if key not in DIGESTS:
181 raise Abort(_('unknown digest type: %s') % k)
182 raise Abort(_('unknown digest type: %s') % k)
182 return self._hashes[key].hexdigest()
183 return self._hashes[key].hexdigest()
183
184
184 def __iter__(self):
185 def __iter__(self):
185 return iter(self._hashes)
186 return iter(self._hashes)
186
187
187 @staticmethod
188 @staticmethod
188 def preferred(supported):
189 def preferred(supported):
189 """returns the strongest digest type in both supported and DIGESTS."""
190 """returns the strongest digest type in both supported and DIGESTS."""
190
191
191 for k in DIGESTS_BY_STRENGTH:
192 for k in DIGESTS_BY_STRENGTH:
192 if k in supported:
193 if k in supported:
193 return k
194 return k
194 return None
195 return None
195
196
196 class digestchecker(object):
197 class digestchecker(object):
197 """file handle wrapper that additionally checks content against a given
198 """file handle wrapper that additionally checks content against a given
198 size and digests.
199 size and digests.
199
200
200 d = digestchecker(fh, size, {'md5': '...'})
201 d = digestchecker(fh, size, {'md5': '...'})
201
202
202 When multiple digests are given, all of them are validated.
203 When multiple digests are given, all of them are validated.
203 """
204 """
204
205
205 def __init__(self, fh, size, digests):
206 def __init__(self, fh, size, digests):
206 self._fh = fh
207 self._fh = fh
207 self._size = size
208 self._size = size
208 self._got = 0
209 self._got = 0
209 self._digests = dict(digests)
210 self._digests = dict(digests)
210 self._digester = digester(self._digests.keys())
211 self._digester = digester(self._digests.keys())
211
212
212 def read(self, length=-1):
213 def read(self, length=-1):
213 content = self._fh.read(length)
214 content = self._fh.read(length)
214 self._digester.update(content)
215 self._digester.update(content)
215 self._got += len(content)
216 self._got += len(content)
216 return content
217 return content
217
218
218 def validate(self):
219 def validate(self):
219 if self._size != self._got:
220 if self._size != self._got:
220 raise Abort(_('size mismatch: expected %d, got %d') %
221 raise Abort(_('size mismatch: expected %d, got %d') %
221 (self._size, self._got))
222 (self._size, self._got))
222 for k, v in self._digests.items():
223 for k, v in self._digests.items():
223 if v != self._digester[k]:
224 if v != self._digester[k]:
224 # i18n: first parameter is a digest name
225 # i18n: first parameter is a digest name
225 raise Abort(_('%s mismatch: expected %s, got %s') %
226 raise Abort(_('%s mismatch: expected %s, got %s') %
226 (k, v, self._digester[k]))
227 (k, v, self._digester[k]))
227
228
228 try:
229 try:
229 buffer = buffer
230 buffer = buffer
230 except NameError:
231 except NameError:
231 if sys.version_info[0] < 3:
232 if sys.version_info[0] < 3:
232 def buffer(sliceable, offset=0):
233 def buffer(sliceable, offset=0):
233 return sliceable[offset:]
234 return sliceable[offset:]
234 else:
235 else:
235 def buffer(sliceable, offset=0):
236 def buffer(sliceable, offset=0):
236 return memoryview(sliceable)[offset:]
237 return memoryview(sliceable)[offset:]
237
238
238 closefds = os.name == 'posix'
239 closefds = os.name == 'posix'
239
240
240 _chunksize = 4096
241 _chunksize = 4096
241
242
242 class bufferedinputpipe(object):
243 class bufferedinputpipe(object):
243 """a manually buffered input pipe
244 """a manually buffered input pipe
244
245
245 Python will not let us use buffered IO and lazy reading with 'polling' at
246 Python will not let us use buffered IO and lazy reading with 'polling' at
246 the same time. We cannot probe the buffer state and select will not detect
247 the same time. We cannot probe the buffer state and select will not detect
247 that data are ready to read if they are already buffered.
248 that data are ready to read if they are already buffered.
248
249
249 This class let us work around that by implementing its own buffering
250 This class let us work around that by implementing its own buffering
250 (allowing efficient readline) while offering a way to know if the buffer is
251 (allowing efficient readline) while offering a way to know if the buffer is
251 empty from the output (allowing collaboration of the buffer with polling).
252 empty from the output (allowing collaboration of the buffer with polling).
252
253
253 This class lives in the 'util' module because it makes use of the 'os'
254 This class lives in the 'util' module because it makes use of the 'os'
254 module from the python stdlib.
255 module from the python stdlib.
255 """
256 """
256
257
257 def __init__(self, input):
258 def __init__(self, input):
258 self._input = input
259 self._input = input
259 self._buffer = []
260 self._buffer = []
260 self._eof = False
261 self._eof = False
261 self._lenbuf = 0
262 self._lenbuf = 0
262
263
263 @property
264 @property
264 def hasbuffer(self):
265 def hasbuffer(self):
265 """True is any data is currently buffered
266 """True is any data is currently buffered
266
267
267 This will be used externally a pre-step for polling IO. If there is
268 This will be used externally a pre-step for polling IO. If there is
268 already data then no polling should be set in place."""
269 already data then no polling should be set in place."""
269 return bool(self._buffer)
270 return bool(self._buffer)
270
271
271 @property
272 @property
272 def closed(self):
273 def closed(self):
273 return self._input.closed
274 return self._input.closed
274
275
275 def fileno(self):
276 def fileno(self):
276 return self._input.fileno()
277 return self._input.fileno()
277
278
278 def close(self):
279 def close(self):
279 return self._input.close()
280 return self._input.close()
280
281
281 def read(self, size):
282 def read(self, size):
282 while (not self._eof) and (self._lenbuf < size):
283 while (not self._eof) and (self._lenbuf < size):
283 self._fillbuffer()
284 self._fillbuffer()
284 return self._frombuffer(size)
285 return self._frombuffer(size)
285
286
286 def readline(self, *args, **kwargs):
287 def readline(self, *args, **kwargs):
287 if 1 < len(self._buffer):
288 if 1 < len(self._buffer):
288 # this should not happen because both read and readline end with a
289 # this should not happen because both read and readline end with a
289 # _frombuffer call that collapse it.
290 # _frombuffer call that collapse it.
290 self._buffer = [''.join(self._buffer)]
291 self._buffer = [''.join(self._buffer)]
291 self._lenbuf = len(self._buffer[0])
292 self._lenbuf = len(self._buffer[0])
292 lfi = -1
293 lfi = -1
293 if self._buffer:
294 if self._buffer:
294 lfi = self._buffer[-1].find('\n')
295 lfi = self._buffer[-1].find('\n')
295 while (not self._eof) and lfi < 0:
296 while (not self._eof) and lfi < 0:
296 self._fillbuffer()
297 self._fillbuffer()
297 if self._buffer:
298 if self._buffer:
298 lfi = self._buffer[-1].find('\n')
299 lfi = self._buffer[-1].find('\n')
299 size = lfi + 1
300 size = lfi + 1
300 if lfi < 0: # end of file
301 if lfi < 0: # end of file
301 size = self._lenbuf
302 size = self._lenbuf
302 elif 1 < len(self._buffer):
303 elif 1 < len(self._buffer):
303 # we need to take previous chunks into account
304 # we need to take previous chunks into account
304 size += self._lenbuf - len(self._buffer[-1])
305 size += self._lenbuf - len(self._buffer[-1])
305 return self._frombuffer(size)
306 return self._frombuffer(size)
306
307
307 def _frombuffer(self, size):
308 def _frombuffer(self, size):
308 """return at most 'size' data from the buffer
309 """return at most 'size' data from the buffer
309
310
310 The data are removed from the buffer."""
311 The data are removed from the buffer."""
311 if size == 0 or not self._buffer:
312 if size == 0 or not self._buffer:
312 return ''
313 return ''
313 buf = self._buffer[0]
314 buf = self._buffer[0]
314 if 1 < len(self._buffer):
315 if 1 < len(self._buffer):
315 buf = ''.join(self._buffer)
316 buf = ''.join(self._buffer)
316
317
317 data = buf[:size]
318 data = buf[:size]
318 buf = buf[len(data):]
319 buf = buf[len(data):]
319 if buf:
320 if buf:
320 self._buffer = [buf]
321 self._buffer = [buf]
321 self._lenbuf = len(buf)
322 self._lenbuf = len(buf)
322 else:
323 else:
323 self._buffer = []
324 self._buffer = []
324 self._lenbuf = 0
325 self._lenbuf = 0
325 return data
326 return data
326
327
327 def _fillbuffer(self):
328 def _fillbuffer(self):
328 """read data to the buffer"""
329 """read data to the buffer"""
329 data = os.read(self._input.fileno(), _chunksize)
330 data = os.read(self._input.fileno(), _chunksize)
330 if not data:
331 if not data:
331 self._eof = True
332 self._eof = True
332 else:
333 else:
333 self._lenbuf += len(data)
334 self._lenbuf += len(data)
334 self._buffer.append(data)
335 self._buffer.append(data)
335
336
336 def popen2(cmd, env=None, newlines=False):
337 def popen2(cmd, env=None, newlines=False):
337 # Setting bufsize to -1 lets the system decide the buffer size.
338 # Setting bufsize to -1 lets the system decide the buffer size.
338 # The default for bufsize is 0, meaning unbuffered. This leads to
339 # The default for bufsize is 0, meaning unbuffered. This leads to
339 # poor performance on Mac OS X: http://bugs.python.org/issue4194
340 # poor performance on Mac OS X: http://bugs.python.org/issue4194
340 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
341 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
341 close_fds=closefds,
342 close_fds=closefds,
342 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
343 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
343 universal_newlines=newlines,
344 universal_newlines=newlines,
344 env=env)
345 env=env)
345 return p.stdin, p.stdout
346 return p.stdin, p.stdout
346
347
347 def popen3(cmd, env=None, newlines=False):
348 def popen3(cmd, env=None, newlines=False):
348 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
349 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
349 return stdin, stdout, stderr
350 return stdin, stdout, stderr
350
351
351 def popen4(cmd, env=None, newlines=False, bufsize=-1):
352 def popen4(cmd, env=None, newlines=False, bufsize=-1):
352 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
353 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
353 close_fds=closefds,
354 close_fds=closefds,
354 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
355 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
355 stderr=subprocess.PIPE,
356 stderr=subprocess.PIPE,
356 universal_newlines=newlines,
357 universal_newlines=newlines,
357 env=env)
358 env=env)
358 return p.stdin, p.stdout, p.stderr, p
359 return p.stdin, p.stdout, p.stderr, p
359
360
360 def version():
361 def version():
361 """Return version information if available."""
362 """Return version information if available."""
362 try:
363 try:
363 from . import __version__
364 from . import __version__
364 return __version__.version
365 return __version__.version
365 except ImportError:
366 except ImportError:
366 return 'unknown'
367 return 'unknown'
367
368
368 def versiontuple(v=None, n=4):
369 def versiontuple(v=None, n=4):
369 """Parses a Mercurial version string into an N-tuple.
370 """Parses a Mercurial version string into an N-tuple.
370
371
371 The version string to be parsed is specified with the ``v`` argument.
372 The version string to be parsed is specified with the ``v`` argument.
372 If it isn't defined, the current Mercurial version string will be parsed.
373 If it isn't defined, the current Mercurial version string will be parsed.
373
374
374 ``n`` can be 2, 3, or 4. Here is how some version strings map to
375 ``n`` can be 2, 3, or 4. Here is how some version strings map to
375 returned values:
376 returned values:
376
377
377 >>> v = '3.6.1+190-df9b73d2d444'
378 >>> v = '3.6.1+190-df9b73d2d444'
378 >>> versiontuple(v, 2)
379 >>> versiontuple(v, 2)
379 (3, 6)
380 (3, 6)
380 >>> versiontuple(v, 3)
381 >>> versiontuple(v, 3)
381 (3, 6, 1)
382 (3, 6, 1)
382 >>> versiontuple(v, 4)
383 >>> versiontuple(v, 4)
383 (3, 6, 1, '190-df9b73d2d444')
384 (3, 6, 1, '190-df9b73d2d444')
384
385
385 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
386 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
386 (3, 6, 1, '190-df9b73d2d444+20151118')
387 (3, 6, 1, '190-df9b73d2d444+20151118')
387
388
388 >>> v = '3.6'
389 >>> v = '3.6'
389 >>> versiontuple(v, 2)
390 >>> versiontuple(v, 2)
390 (3, 6)
391 (3, 6)
391 >>> versiontuple(v, 3)
392 >>> versiontuple(v, 3)
392 (3, 6, None)
393 (3, 6, None)
393 >>> versiontuple(v, 4)
394 >>> versiontuple(v, 4)
394 (3, 6, None, None)
395 (3, 6, None, None)
395 """
396 """
396 if not v:
397 if not v:
397 v = version()
398 v = version()
398 parts = v.split('+', 1)
399 parts = v.split('+', 1)
399 if len(parts) == 1:
400 if len(parts) == 1:
400 vparts, extra = parts[0], None
401 vparts, extra = parts[0], None
401 else:
402 else:
402 vparts, extra = parts
403 vparts, extra = parts
403
404
404 vints = []
405 vints = []
405 for i in vparts.split('.'):
406 for i in vparts.split('.'):
406 try:
407 try:
407 vints.append(int(i))
408 vints.append(int(i))
408 except ValueError:
409 except ValueError:
409 break
410 break
410 # (3, 6) -> (3, 6, None)
411 # (3, 6) -> (3, 6, None)
411 while len(vints) < 3:
412 while len(vints) < 3:
412 vints.append(None)
413 vints.append(None)
413
414
414 if n == 2:
415 if n == 2:
415 return (vints[0], vints[1])
416 return (vints[0], vints[1])
416 if n == 3:
417 if n == 3:
417 return (vints[0], vints[1], vints[2])
418 return (vints[0], vints[1], vints[2])
418 if n == 4:
419 if n == 4:
419 return (vints[0], vints[1], vints[2], extra)
420 return (vints[0], vints[1], vints[2], extra)
420
421
421 # used by parsedate
422 # used by parsedate
422 defaultdateformats = (
423 defaultdateformats = (
423 '%Y-%m-%d %H:%M:%S',
424 '%Y-%m-%d %H:%M:%S',
424 '%Y-%m-%d %I:%M:%S%p',
425 '%Y-%m-%d %I:%M:%S%p',
425 '%Y-%m-%d %H:%M',
426 '%Y-%m-%d %H:%M',
426 '%Y-%m-%d %I:%M%p',
427 '%Y-%m-%d %I:%M%p',
427 '%Y-%m-%d',
428 '%Y-%m-%d',
428 '%m-%d',
429 '%m-%d',
429 '%m/%d',
430 '%m/%d',
430 '%m/%d/%y',
431 '%m/%d/%y',
431 '%m/%d/%Y',
432 '%m/%d/%Y',
432 '%a %b %d %H:%M:%S %Y',
433 '%a %b %d %H:%M:%S %Y',
433 '%a %b %d %I:%M:%S%p %Y',
434 '%a %b %d %I:%M:%S%p %Y',
434 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
435 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
435 '%b %d %H:%M:%S %Y',
436 '%b %d %H:%M:%S %Y',
436 '%b %d %I:%M:%S%p %Y',
437 '%b %d %I:%M:%S%p %Y',
437 '%b %d %H:%M:%S',
438 '%b %d %H:%M:%S',
438 '%b %d %I:%M:%S%p',
439 '%b %d %I:%M:%S%p',
439 '%b %d %H:%M',
440 '%b %d %H:%M',
440 '%b %d %I:%M%p',
441 '%b %d %I:%M%p',
441 '%b %d %Y',
442 '%b %d %Y',
442 '%b %d',
443 '%b %d',
443 '%H:%M:%S',
444 '%H:%M:%S',
444 '%I:%M:%S%p',
445 '%I:%M:%S%p',
445 '%H:%M',
446 '%H:%M',
446 '%I:%M%p',
447 '%I:%M%p',
447 )
448 )
448
449
449 extendeddateformats = defaultdateformats + (
450 extendeddateformats = defaultdateformats + (
450 "%Y",
451 "%Y",
451 "%Y-%m",
452 "%Y-%m",
452 "%b",
453 "%b",
453 "%b %Y",
454 "%b %Y",
454 )
455 )
455
456
456 def cachefunc(func):
457 def cachefunc(func):
457 '''cache the result of function calls'''
458 '''cache the result of function calls'''
458 # XXX doesn't handle keywords args
459 # XXX doesn't handle keywords args
459 if func.__code__.co_argcount == 0:
460 if func.__code__.co_argcount == 0:
460 cache = []
461 cache = []
461 def f():
462 def f():
462 if len(cache) == 0:
463 if len(cache) == 0:
463 cache.append(func())
464 cache.append(func())
464 return cache[0]
465 return cache[0]
465 return f
466 return f
466 cache = {}
467 cache = {}
467 if func.__code__.co_argcount == 1:
468 if func.__code__.co_argcount == 1:
468 # we gain a small amount of time because
469 # we gain a small amount of time because
469 # we don't need to pack/unpack the list
470 # we don't need to pack/unpack the list
470 def f(arg):
471 def f(arg):
471 if arg not in cache:
472 if arg not in cache:
472 cache[arg] = func(arg)
473 cache[arg] = func(arg)
473 return cache[arg]
474 return cache[arg]
474 else:
475 else:
475 def f(*args):
476 def f(*args):
476 if args not in cache:
477 if args not in cache:
477 cache[args] = func(*args)
478 cache[args] = func(*args)
478 return cache[args]
479 return cache[args]
479
480
480 return f
481 return f
481
482
482 class sortdict(dict):
483 class sortdict(dict):
483 '''a simple sorted dictionary'''
484 '''a simple sorted dictionary'''
484 def __init__(self, data=None):
485 def __init__(self, data=None):
485 self._list = []
486 self._list = []
486 if data:
487 if data:
487 self.update(data)
488 self.update(data)
488 def copy(self):
489 def copy(self):
489 return sortdict(self)
490 return sortdict(self)
490 def __setitem__(self, key, val):
491 def __setitem__(self, key, val):
491 if key in self:
492 if key in self:
492 self._list.remove(key)
493 self._list.remove(key)
493 self._list.append(key)
494 self._list.append(key)
494 dict.__setitem__(self, key, val)
495 dict.__setitem__(self, key, val)
495 def __iter__(self):
496 def __iter__(self):
496 return self._list.__iter__()
497 return self._list.__iter__()
497 def update(self, src):
498 def update(self, src):
498 if isinstance(src, dict):
499 if isinstance(src, dict):
499 src = src.iteritems()
500 src = src.iteritems()
500 for k, v in src:
501 for k, v in src:
501 self[k] = v
502 self[k] = v
502 def clear(self):
503 def clear(self):
503 dict.clear(self)
504 dict.clear(self)
504 self._list = []
505 self._list = []
505 def items(self):
506 def items(self):
506 return [(k, self[k]) for k in self._list]
507 return [(k, self[k]) for k in self._list]
507 def __delitem__(self, key):
508 def __delitem__(self, key):
508 dict.__delitem__(self, key)
509 dict.__delitem__(self, key)
509 self._list.remove(key)
510 self._list.remove(key)
510 def pop(self, key, *args, **kwargs):
511 def pop(self, key, *args, **kwargs):
511 dict.pop(self, key, *args, **kwargs)
512 dict.pop(self, key, *args, **kwargs)
512 try:
513 try:
513 self._list.remove(key)
514 self._list.remove(key)
514 except ValueError:
515 except ValueError:
515 pass
516 pass
516 def keys(self):
517 def keys(self):
517 return self._list
518 return self._list
518 def iterkeys(self):
519 def iterkeys(self):
519 return self._list.__iter__()
520 return self._list.__iter__()
520 def iteritems(self):
521 def iteritems(self):
521 for k in self._list:
522 for k in self._list:
522 yield k, self[k]
523 yield k, self[k]
523 def insert(self, index, key, val):
524 def insert(self, index, key, val):
524 self._list.insert(index, key)
525 self._list.insert(index, key)
525 dict.__setitem__(self, key, val)
526 dict.__setitem__(self, key, val)
526
527
527 class _lrucachenode(object):
528 class _lrucachenode(object):
528 """A node in a doubly linked list.
529 """A node in a doubly linked list.
529
530
530 Holds a reference to nodes on either side as well as a key-value
531 Holds a reference to nodes on either side as well as a key-value
531 pair for the dictionary entry.
532 pair for the dictionary entry.
532 """
533 """
533 __slots__ = ('next', 'prev', 'key', 'value')
534 __slots__ = ('next', 'prev', 'key', 'value')
534
535
535 def __init__(self):
536 def __init__(self):
536 self.next = None
537 self.next = None
537 self.prev = None
538 self.prev = None
538
539
539 self.key = _notset
540 self.key = _notset
540 self.value = None
541 self.value = None
541
542
542 def markempty(self):
543 def markempty(self):
543 """Mark the node as emptied."""
544 """Mark the node as emptied."""
544 self.key = _notset
545 self.key = _notset
545
546
546 class lrucachedict(object):
547 class lrucachedict(object):
547 """Dict that caches most recent accesses and sets.
548 """Dict that caches most recent accesses and sets.
548
549
549 The dict consists of an actual backing dict - indexed by original
550 The dict consists of an actual backing dict - indexed by original
550 key - and a doubly linked circular list defining the order of entries in
551 key - and a doubly linked circular list defining the order of entries in
551 the cache.
552 the cache.
552
553
553 The head node is the newest entry in the cache. If the cache is full,
554 The head node is the newest entry in the cache. If the cache is full,
554 we recycle head.prev and make it the new head. Cache accesses result in
555 we recycle head.prev and make it the new head. Cache accesses result in
555 the node being moved to before the existing head and being marked as the
556 the node being moved to before the existing head and being marked as the
556 new head node.
557 new head node.
557 """
558 """
558 def __init__(self, max):
559 def __init__(self, max):
559 self._cache = {}
560 self._cache = {}
560
561
561 self._head = head = _lrucachenode()
562 self._head = head = _lrucachenode()
562 head.prev = head
563 head.prev = head
563 head.next = head
564 head.next = head
564 self._size = 1
565 self._size = 1
565 self._capacity = max
566 self._capacity = max
566
567
567 def __len__(self):
568 def __len__(self):
568 return len(self._cache)
569 return len(self._cache)
569
570
570 def __contains__(self, k):
571 def __contains__(self, k):
571 return k in self._cache
572 return k in self._cache
572
573
573 def __iter__(self):
574 def __iter__(self):
574 # We don't have to iterate in cache order, but why not.
575 # We don't have to iterate in cache order, but why not.
575 n = self._head
576 n = self._head
576 for i in range(len(self._cache)):
577 for i in range(len(self._cache)):
577 yield n.key
578 yield n.key
578 n = n.next
579 n = n.next
579
580
580 def __getitem__(self, k):
581 def __getitem__(self, k):
581 node = self._cache[k]
582 node = self._cache[k]
582 self._movetohead(node)
583 self._movetohead(node)
583 return node.value
584 return node.value
584
585
585 def __setitem__(self, k, v):
586 def __setitem__(self, k, v):
586 node = self._cache.get(k)
587 node = self._cache.get(k)
587 # Replace existing value and mark as newest.
588 # Replace existing value and mark as newest.
588 if node is not None:
589 if node is not None:
589 node.value = v
590 node.value = v
590 self._movetohead(node)
591 self._movetohead(node)
591 return
592 return
592
593
593 if self._size < self._capacity:
594 if self._size < self._capacity:
594 node = self._addcapacity()
595 node = self._addcapacity()
595 else:
596 else:
596 # Grab the last/oldest item.
597 # Grab the last/oldest item.
597 node = self._head.prev
598 node = self._head.prev
598
599
599 # At capacity. Kill the old entry.
600 # At capacity. Kill the old entry.
600 if node.key is not _notset:
601 if node.key is not _notset:
601 del self._cache[node.key]
602 del self._cache[node.key]
602
603
603 node.key = k
604 node.key = k
604 node.value = v
605 node.value = v
605 self._cache[k] = node
606 self._cache[k] = node
606 # And mark it as newest entry. No need to adjust order since it
607 # And mark it as newest entry. No need to adjust order since it
607 # is already self._head.prev.
608 # is already self._head.prev.
608 self._head = node
609 self._head = node
609
610
610 def __delitem__(self, k):
611 def __delitem__(self, k):
611 node = self._cache.pop(k)
612 node = self._cache.pop(k)
612 node.markempty()
613 node.markempty()
613
614
614 # Temporarily mark as newest item before re-adjusting head to make
615 # Temporarily mark as newest item before re-adjusting head to make
615 # this node the oldest item.
616 # this node the oldest item.
616 self._movetohead(node)
617 self._movetohead(node)
617 self._head = node.next
618 self._head = node.next
618
619
619 # Additional dict methods.
620 # Additional dict methods.
620
621
621 def get(self, k, default=None):
622 def get(self, k, default=None):
622 try:
623 try:
623 return self._cache[k]
624 return self._cache[k]
624 except KeyError:
625 except KeyError:
625 return default
626 return default
626
627
627 def clear(self):
628 def clear(self):
628 n = self._head
629 n = self._head
629 while n.key is not _notset:
630 while n.key is not _notset:
630 n.markempty()
631 n.markempty()
631 n = n.next
632 n = n.next
632
633
633 self._cache.clear()
634 self._cache.clear()
634
635
635 def copy(self):
636 def copy(self):
636 result = lrucachedict(self._capacity)
637 result = lrucachedict(self._capacity)
637 n = self._head.prev
638 n = self._head.prev
638 # Iterate in oldest-to-newest order, so the copy has the right ordering
639 # Iterate in oldest-to-newest order, so the copy has the right ordering
639 for i in range(len(self._cache)):
640 for i in range(len(self._cache)):
640 result[n.key] = n.value
641 result[n.key] = n.value
641 n = n.prev
642 n = n.prev
642 return result
643 return result
643
644
644 def _movetohead(self, node):
645 def _movetohead(self, node):
645 """Mark a node as the newest, making it the new head.
646 """Mark a node as the newest, making it the new head.
646
647
647 When a node is accessed, it becomes the freshest entry in the LRU
648 When a node is accessed, it becomes the freshest entry in the LRU
648 list, which is denoted by self._head.
649 list, which is denoted by self._head.
649
650
650 Visually, let's make ``N`` the new head node (* denotes head):
651 Visually, let's make ``N`` the new head node (* denotes head):
651
652
652 previous/oldest <-> head <-> next/next newest
653 previous/oldest <-> head <-> next/next newest
653
654
654 ----<->--- A* ---<->-----
655 ----<->--- A* ---<->-----
655 | |
656 | |
656 E <-> D <-> N <-> C <-> B
657 E <-> D <-> N <-> C <-> B
657
658
658 To:
659 To:
659
660
660 ----<->--- N* ---<->-----
661 ----<->--- N* ---<->-----
661 | |
662 | |
662 E <-> D <-> C <-> B <-> A
663 E <-> D <-> C <-> B <-> A
663
664
664 This requires the following moves:
665 This requires the following moves:
665
666
666 C.next = D (node.prev.next = node.next)
667 C.next = D (node.prev.next = node.next)
667 D.prev = C (node.next.prev = node.prev)
668 D.prev = C (node.next.prev = node.prev)
668 E.next = N (head.prev.next = node)
669 E.next = N (head.prev.next = node)
669 N.prev = E (node.prev = head.prev)
670 N.prev = E (node.prev = head.prev)
670 N.next = A (node.next = head)
671 N.next = A (node.next = head)
671 A.prev = N (head.prev = node)
672 A.prev = N (head.prev = node)
672 """
673 """
673 head = self._head
674 head = self._head
674 # C.next = D
675 # C.next = D
675 node.prev.next = node.next
676 node.prev.next = node.next
676 # D.prev = C
677 # D.prev = C
677 node.next.prev = node.prev
678 node.next.prev = node.prev
678 # N.prev = E
679 # N.prev = E
679 node.prev = head.prev
680 node.prev = head.prev
680 # N.next = A
681 # N.next = A
681 # It is tempting to do just "head" here, however if node is
682 # It is tempting to do just "head" here, however if node is
682 # adjacent to head, this will do bad things.
683 # adjacent to head, this will do bad things.
683 node.next = head.prev.next
684 node.next = head.prev.next
684 # E.next = N
685 # E.next = N
685 node.next.prev = node
686 node.next.prev = node
686 # A.prev = N
687 # A.prev = N
687 node.prev.next = node
688 node.prev.next = node
688
689
689 self._head = node
690 self._head = node
690
691
691 def _addcapacity(self):
692 def _addcapacity(self):
692 """Add a node to the circular linked list.
693 """Add a node to the circular linked list.
693
694
694 The new node is inserted before the head node.
695 The new node is inserted before the head node.
695 """
696 """
696 head = self._head
697 head = self._head
697 node = _lrucachenode()
698 node = _lrucachenode()
698 head.prev.next = node
699 head.prev.next = node
699 node.prev = head.prev
700 node.prev = head.prev
700 node.next = head
701 node.next = head
701 head.prev = node
702 head.prev = node
702 self._size += 1
703 self._size += 1
703 return node
704 return node
704
705
705 def lrucachefunc(func):
706 def lrucachefunc(func):
706 '''cache most recent results of function calls'''
707 '''cache most recent results of function calls'''
707 cache = {}
708 cache = {}
708 order = collections.deque()
709 order = collections.deque()
709 if func.__code__.co_argcount == 1:
710 if func.__code__.co_argcount == 1:
710 def f(arg):
711 def f(arg):
711 if arg not in cache:
712 if arg not in cache:
712 if len(cache) > 20:
713 if len(cache) > 20:
713 del cache[order.popleft()]
714 del cache[order.popleft()]
714 cache[arg] = func(arg)
715 cache[arg] = func(arg)
715 else:
716 else:
716 order.remove(arg)
717 order.remove(arg)
717 order.append(arg)
718 order.append(arg)
718 return cache[arg]
719 return cache[arg]
719 else:
720 else:
720 def f(*args):
721 def f(*args):
721 if args not in cache:
722 if args not in cache:
722 if len(cache) > 20:
723 if len(cache) > 20:
723 del cache[order.popleft()]
724 del cache[order.popleft()]
724 cache[args] = func(*args)
725 cache[args] = func(*args)
725 else:
726 else:
726 order.remove(args)
727 order.remove(args)
727 order.append(args)
728 order.append(args)
728 return cache[args]
729 return cache[args]
729
730
730 return f
731 return f
731
732
732 class propertycache(object):
733 class propertycache(object):
733 def __init__(self, func):
734 def __init__(self, func):
734 self.func = func
735 self.func = func
735 self.name = func.__name__
736 self.name = func.__name__
736 def __get__(self, obj, type=None):
737 def __get__(self, obj, type=None):
737 result = self.func(obj)
738 result = self.func(obj)
738 self.cachevalue(obj, result)
739 self.cachevalue(obj, result)
739 return result
740 return result
740
741
741 def cachevalue(self, obj, value):
742 def cachevalue(self, obj, value):
742 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
743 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
743 obj.__dict__[self.name] = value
744 obj.__dict__[self.name] = value
744
745
745 def pipefilter(s, cmd):
746 def pipefilter(s, cmd):
746 '''filter string S through command CMD, returning its output'''
747 '''filter string S through command CMD, returning its output'''
747 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
748 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
748 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
749 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
749 pout, perr = p.communicate(s)
750 pout, perr = p.communicate(s)
750 return pout
751 return pout
751
752
752 def tempfilter(s, cmd):
753 def tempfilter(s, cmd):
753 '''filter string S through a pair of temporary files with CMD.
754 '''filter string S through a pair of temporary files with CMD.
754 CMD is used as a template to create the real command to be run,
755 CMD is used as a template to create the real command to be run,
755 with the strings INFILE and OUTFILE replaced by the real names of
756 with the strings INFILE and OUTFILE replaced by the real names of
756 the temporary files generated.'''
757 the temporary files generated.'''
757 inname, outname = None, None
758 inname, outname = None, None
758 try:
759 try:
759 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
760 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
760 fp = os.fdopen(infd, 'wb')
761 fp = os.fdopen(infd, 'wb')
761 fp.write(s)
762 fp.write(s)
762 fp.close()
763 fp.close()
763 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
764 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
764 os.close(outfd)
765 os.close(outfd)
765 cmd = cmd.replace('INFILE', inname)
766 cmd = cmd.replace('INFILE', inname)
766 cmd = cmd.replace('OUTFILE', outname)
767 cmd = cmd.replace('OUTFILE', outname)
767 code = os.system(cmd)
768 code = os.system(cmd)
768 if sys.platform == 'OpenVMS' and code & 1:
769 if sys.platform == 'OpenVMS' and code & 1:
769 code = 0
770 code = 0
770 if code:
771 if code:
771 raise Abort(_("command '%s' failed: %s") %
772 raise Abort(_("command '%s' failed: %s") %
772 (cmd, explainexit(code)))
773 (cmd, explainexit(code)))
773 return readfile(outname)
774 return readfile(outname)
774 finally:
775 finally:
775 try:
776 try:
776 if inname:
777 if inname:
777 os.unlink(inname)
778 os.unlink(inname)
778 except OSError:
779 except OSError:
779 pass
780 pass
780 try:
781 try:
781 if outname:
782 if outname:
782 os.unlink(outname)
783 os.unlink(outname)
783 except OSError:
784 except OSError:
784 pass
785 pass
785
786
786 filtertable = {
787 filtertable = {
787 'tempfile:': tempfilter,
788 'tempfile:': tempfilter,
788 'pipe:': pipefilter,
789 'pipe:': pipefilter,
789 }
790 }
790
791
791 def filter(s, cmd):
792 def filter(s, cmd):
792 "filter a string through a command that transforms its input to its output"
793 "filter a string through a command that transforms its input to its output"
793 for name, fn in filtertable.iteritems():
794 for name, fn in filtertable.iteritems():
794 if cmd.startswith(name):
795 if cmd.startswith(name):
795 return fn(s, cmd[len(name):].lstrip())
796 return fn(s, cmd[len(name):].lstrip())
796 return pipefilter(s, cmd)
797 return pipefilter(s, cmd)
797
798
798 def binary(s):
799 def binary(s):
799 """return true if a string is binary data"""
800 """return true if a string is binary data"""
800 return bool(s and '\0' in s)
801 return bool(s and '\0' in s)
801
802
802 def increasingchunks(source, min=1024, max=65536):
803 def increasingchunks(source, min=1024, max=65536):
803 '''return no less than min bytes per chunk while data remains,
804 '''return no less than min bytes per chunk while data remains,
804 doubling min after each chunk until it reaches max'''
805 doubling min after each chunk until it reaches max'''
805 def log2(x):
806 def log2(x):
806 if not x:
807 if not x:
807 return 0
808 return 0
808 i = 0
809 i = 0
809 while x:
810 while x:
810 x >>= 1
811 x >>= 1
811 i += 1
812 i += 1
812 return i - 1
813 return i - 1
813
814
814 buf = []
815 buf = []
815 blen = 0
816 blen = 0
816 for chunk in source:
817 for chunk in source:
817 buf.append(chunk)
818 buf.append(chunk)
818 blen += len(chunk)
819 blen += len(chunk)
819 if blen >= min:
820 if blen >= min:
820 if min < max:
821 if min < max:
821 min = min << 1
822 min = min << 1
822 nmin = 1 << log2(blen)
823 nmin = 1 << log2(blen)
823 if nmin > min:
824 if nmin > min:
824 min = nmin
825 min = nmin
825 if min > max:
826 if min > max:
826 min = max
827 min = max
827 yield ''.join(buf)
828 yield ''.join(buf)
828 blen = 0
829 blen = 0
829 buf = []
830 buf = []
830 if buf:
831 if buf:
831 yield ''.join(buf)
832 yield ''.join(buf)
832
833
833 Abort = error.Abort
834 Abort = error.Abort
834
835
835 def always(fn):
836 def always(fn):
836 return True
837 return True
837
838
838 def never(fn):
839 def never(fn):
839 return False
840 return False
840
841
841 def nogc(func):
842 def nogc(func):
842 """disable garbage collector
843 """disable garbage collector
843
844
844 Python's garbage collector triggers a GC each time a certain number of
845 Python's garbage collector triggers a GC each time a certain number of
845 container objects (the number being defined by gc.get_threshold()) are
846 container objects (the number being defined by gc.get_threshold()) are
846 allocated even when marked not to be tracked by the collector. Tracking has
847 allocated even when marked not to be tracked by the collector. Tracking has
847 no effect on when GCs are triggered, only on what objects the GC looks
848 no effect on when GCs are triggered, only on what objects the GC looks
848 into. As a workaround, disable GC while building complex (huge)
849 into. As a workaround, disable GC while building complex (huge)
849 containers.
850 containers.
850
851
851 This garbage collector issue have been fixed in 2.7.
852 This garbage collector issue have been fixed in 2.7.
852 """
853 """
853 def wrapper(*args, **kwargs):
854 def wrapper(*args, **kwargs):
854 gcenabled = gc.isenabled()
855 gcenabled = gc.isenabled()
855 gc.disable()
856 gc.disable()
856 try:
857 try:
857 return func(*args, **kwargs)
858 return func(*args, **kwargs)
858 finally:
859 finally:
859 if gcenabled:
860 if gcenabled:
860 gc.enable()
861 gc.enable()
861 return wrapper
862 return wrapper
862
863
863 def pathto(root, n1, n2):
864 def pathto(root, n1, n2):
864 '''return the relative path from one place to another.
865 '''return the relative path from one place to another.
865 root should use os.sep to separate directories
866 root should use os.sep to separate directories
866 n1 should use os.sep to separate directories
867 n1 should use os.sep to separate directories
867 n2 should use "/" to separate directories
868 n2 should use "/" to separate directories
868 returns an os.sep-separated path.
869 returns an os.sep-separated path.
869
870
870 If n1 is a relative path, it's assumed it's
871 If n1 is a relative path, it's assumed it's
871 relative to root.
872 relative to root.
872 n2 should always be relative to root.
873 n2 should always be relative to root.
873 '''
874 '''
874 if not n1:
875 if not n1:
875 return localpath(n2)
876 return localpath(n2)
876 if os.path.isabs(n1):
877 if os.path.isabs(n1):
877 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
878 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
878 return os.path.join(root, localpath(n2))
879 return os.path.join(root, localpath(n2))
879 n2 = '/'.join((pconvert(root), n2))
880 n2 = '/'.join((pconvert(root), n2))
880 a, b = splitpath(n1), n2.split('/')
881 a, b = splitpath(n1), n2.split('/')
881 a.reverse()
882 a.reverse()
882 b.reverse()
883 b.reverse()
883 while a and b and a[-1] == b[-1]:
884 while a and b and a[-1] == b[-1]:
884 a.pop()
885 a.pop()
885 b.pop()
886 b.pop()
886 b.reverse()
887 b.reverse()
887 return os.sep.join((['..'] * len(a)) + b) or '.'
888 return os.sep.join((['..'] * len(a)) + b) or '.'
888
889
889 def mainfrozen():
890 def mainfrozen():
890 """return True if we are a frozen executable.
891 """return True if we are a frozen executable.
891
892
892 The code supports py2exe (most common, Windows only) and tools/freeze
893 The code supports py2exe (most common, Windows only) and tools/freeze
893 (portable, not much used).
894 (portable, not much used).
894 """
895 """
895 return (safehasattr(sys, "frozen") or # new py2exe
896 return (safehasattr(sys, "frozen") or # new py2exe
896 safehasattr(sys, "importers") or # old py2exe
897 safehasattr(sys, "importers") or # old py2exe
897 imp.is_frozen("__main__")) # tools/freeze
898 imp.is_frozen("__main__")) # tools/freeze
898
899
899 # the location of data files matching the source code
900 # the location of data files matching the source code
900 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
901 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
901 # executable version (py2exe) doesn't support __file__
902 # executable version (py2exe) doesn't support __file__
902 datapath = os.path.dirname(sys.executable)
903 datapath = os.path.dirname(sys.executable)
903 else:
904 else:
904 datapath = os.path.dirname(__file__)
905 datapath = os.path.dirname(__file__)
905
906
906 i18n.setdatapath(datapath)
907 i18n.setdatapath(datapath)
907
908
908 _hgexecutable = None
909 _hgexecutable = None
909
910
910 def hgexecutable():
911 def hgexecutable():
911 """return location of the 'hg' executable.
912 """return location of the 'hg' executable.
912
913
913 Defaults to $HG or 'hg' in the search path.
914 Defaults to $HG or 'hg' in the search path.
914 """
915 """
915 if _hgexecutable is None:
916 if _hgexecutable is None:
916 hg = os.environ.get('HG')
917 hg = os.environ.get('HG')
917 mainmod = sys.modules['__main__']
918 mainmod = sys.modules['__main__']
918 if hg:
919 if hg:
919 _sethgexecutable(hg)
920 _sethgexecutable(hg)
920 elif mainfrozen():
921 elif mainfrozen():
921 if getattr(sys, 'frozen', None) == 'macosx_app':
922 if getattr(sys, 'frozen', None) == 'macosx_app':
922 # Env variable set by py2app
923 # Env variable set by py2app
923 _sethgexecutable(os.environ['EXECUTABLEPATH'])
924 _sethgexecutable(os.environ['EXECUTABLEPATH'])
924 else:
925 else:
925 _sethgexecutable(sys.executable)
926 _sethgexecutable(sys.executable)
926 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
927 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
927 _sethgexecutable(mainmod.__file__)
928 _sethgexecutable(mainmod.__file__)
928 else:
929 else:
929 exe = findexe('hg') or os.path.basename(sys.argv[0])
930 exe = findexe('hg') or os.path.basename(sys.argv[0])
930 _sethgexecutable(exe)
931 _sethgexecutable(exe)
931 return _hgexecutable
932 return _hgexecutable
932
933
933 def _sethgexecutable(path):
934 def _sethgexecutable(path):
934 """set location of the 'hg' executable"""
935 """set location of the 'hg' executable"""
935 global _hgexecutable
936 global _hgexecutable
936 _hgexecutable = path
937 _hgexecutable = path
937
938
938 def _isstdout(f):
939 def _isstdout(f):
939 fileno = getattr(f, 'fileno', None)
940 fileno = getattr(f, 'fileno', None)
940 return fileno and fileno() == sys.__stdout__.fileno()
941 return fileno and fileno() == sys.__stdout__.fileno()
941
942
942 def system(cmd, environ=None, cwd=None, onerr=None, errprefix=None, out=None):
943 def system(cmd, environ=None, cwd=None, onerr=None, errprefix=None, out=None):
943 '''enhanced shell command execution.
944 '''enhanced shell command execution.
944 run with environment maybe modified, maybe in different dir.
945 run with environment maybe modified, maybe in different dir.
945
946
946 if command fails and onerr is None, return status, else raise onerr
947 if command fails and onerr is None, return status, else raise onerr
947 object as exception.
948 object as exception.
948
949
949 if out is specified, it is assumed to be a file-like object that has a
950 if out is specified, it is assumed to be a file-like object that has a
950 write() method. stdout and stderr will be redirected to out.'''
951 write() method. stdout and stderr will be redirected to out.'''
951 if environ is None:
952 if environ is None:
952 environ = {}
953 environ = {}
953 try:
954 try:
954 sys.stdout.flush()
955 sys.stdout.flush()
955 except Exception:
956 except Exception:
956 pass
957 pass
957 def py2shell(val):
958 def py2shell(val):
958 'convert python object into string that is useful to shell'
959 'convert python object into string that is useful to shell'
959 if val is None or val is False:
960 if val is None or val is False:
960 return '0'
961 return '0'
961 if val is True:
962 if val is True:
962 return '1'
963 return '1'
963 return str(val)
964 return str(val)
964 origcmd = cmd
965 origcmd = cmd
965 cmd = quotecommand(cmd)
966 cmd = quotecommand(cmd)
966 if sys.platform == 'plan9' and (sys.version_info[0] == 2
967 if sys.platform == 'plan9' and (sys.version_info[0] == 2
967 and sys.version_info[1] < 7):
968 and sys.version_info[1] < 7):
968 # subprocess kludge to work around issues in half-baked Python
969 # subprocess kludge to work around issues in half-baked Python
969 # ports, notably bichued/python:
970 # ports, notably bichued/python:
970 if not cwd is None:
971 if not cwd is None:
971 os.chdir(cwd)
972 os.chdir(cwd)
972 rc = os.system(cmd)
973 rc = os.system(cmd)
973 else:
974 else:
974 env = dict(os.environ)
975 env = dict(os.environ)
975 env.update((k, py2shell(v)) for k, v in environ.iteritems())
976 env.update((k, py2shell(v)) for k, v in environ.iteritems())
976 env['HG'] = hgexecutable()
977 env['HG'] = hgexecutable()
977 if out is None or _isstdout(out):
978 if out is None or _isstdout(out):
978 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
979 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
979 env=env, cwd=cwd)
980 env=env, cwd=cwd)
980 else:
981 else:
981 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
982 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
982 env=env, cwd=cwd, stdout=subprocess.PIPE,
983 env=env, cwd=cwd, stdout=subprocess.PIPE,
983 stderr=subprocess.STDOUT)
984 stderr=subprocess.STDOUT)
984 while True:
985 while True:
985 line = proc.stdout.readline()
986 line = proc.stdout.readline()
986 if not line:
987 if not line:
987 break
988 break
988 out.write(line)
989 out.write(line)
989 proc.wait()
990 proc.wait()
990 rc = proc.returncode
991 rc = proc.returncode
991 if sys.platform == 'OpenVMS' and rc & 1:
992 if sys.platform == 'OpenVMS' and rc & 1:
992 rc = 0
993 rc = 0
993 if rc and onerr:
994 if rc and onerr:
994 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
995 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
995 explainexit(rc)[0])
996 explainexit(rc)[0])
996 if errprefix:
997 if errprefix:
997 errmsg = '%s: %s' % (errprefix, errmsg)
998 errmsg = '%s: %s' % (errprefix, errmsg)
998 raise onerr(errmsg)
999 raise onerr(errmsg)
999 return rc
1000 return rc
1000
1001
1001 def checksignature(func):
1002 def checksignature(func):
1002 '''wrap a function with code to check for calling errors'''
1003 '''wrap a function with code to check for calling errors'''
1003 def check(*args, **kwargs):
1004 def check(*args, **kwargs):
1004 try:
1005 try:
1005 return func(*args, **kwargs)
1006 return func(*args, **kwargs)
1006 except TypeError:
1007 except TypeError:
1007 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1008 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1008 raise error.SignatureError
1009 raise error.SignatureError
1009 raise
1010 raise
1010
1011
1011 return check
1012 return check
1012
1013
1013 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1014 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1014 '''copy a file, preserving mode and optionally other stat info like
1015 '''copy a file, preserving mode and optionally other stat info like
1015 atime/mtime
1016 atime/mtime
1016
1017
1017 checkambig argument is used with filestat, and is useful only if
1018 checkambig argument is used with filestat, and is useful only if
1018 destination file is guarded by any lock (e.g. repo.lock or
1019 destination file is guarded by any lock (e.g. repo.lock or
1019 repo.wlock).
1020 repo.wlock).
1020
1021
1021 copystat and checkambig should be exclusive.
1022 copystat and checkambig should be exclusive.
1022 '''
1023 '''
1023 assert not (copystat and checkambig)
1024 assert not (copystat and checkambig)
1024 oldstat = None
1025 oldstat = None
1025 if os.path.lexists(dest):
1026 if os.path.lexists(dest):
1026 if checkambig:
1027 if checkambig:
1027 oldstat = checkambig and filestat(dest)
1028 oldstat = checkambig and filestat(dest)
1028 unlink(dest)
1029 unlink(dest)
1029 # hardlinks are problematic on CIFS, quietly ignore this flag
1030 # hardlinks are problematic on CIFS, quietly ignore this flag
1030 # until we find a way to work around it cleanly (issue4546)
1031 # until we find a way to work around it cleanly (issue4546)
1031 if False and hardlink:
1032 if False and hardlink:
1032 try:
1033 try:
1033 oslink(src, dest)
1034 oslink(src, dest)
1034 return
1035 return
1035 except (IOError, OSError):
1036 except (IOError, OSError):
1036 pass # fall back to normal copy
1037 pass # fall back to normal copy
1037 if os.path.islink(src):
1038 if os.path.islink(src):
1038 os.symlink(os.readlink(src), dest)
1039 os.symlink(os.readlink(src), dest)
1039 # copytime is ignored for symlinks, but in general copytime isn't needed
1040 # copytime is ignored for symlinks, but in general copytime isn't needed
1040 # for them anyway
1041 # for them anyway
1041 else:
1042 else:
1042 try:
1043 try:
1043 shutil.copyfile(src, dest)
1044 shutil.copyfile(src, dest)
1044 if copystat:
1045 if copystat:
1045 # copystat also copies mode
1046 # copystat also copies mode
1046 shutil.copystat(src, dest)
1047 shutil.copystat(src, dest)
1047 else:
1048 else:
1048 shutil.copymode(src, dest)
1049 shutil.copymode(src, dest)
1049 if oldstat and oldstat.stat:
1050 if oldstat and oldstat.stat:
1050 newstat = filestat(dest)
1051 newstat = filestat(dest)
1051 if newstat.isambig(oldstat):
1052 if newstat.isambig(oldstat):
1052 # stat of copied file is ambiguous to original one
1053 # stat of copied file is ambiguous to original one
1053 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1054 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1054 os.utime(dest, (advanced, advanced))
1055 os.utime(dest, (advanced, advanced))
1055 except shutil.Error as inst:
1056 except shutil.Error as inst:
1056 raise Abort(str(inst))
1057 raise Abort(str(inst))
1057
1058
1058 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1059 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1059 """Copy a directory tree using hardlinks if possible."""
1060 """Copy a directory tree using hardlinks if possible."""
1060 num = 0
1061 num = 0
1061
1062
1062 if hardlink is None:
1063 if hardlink is None:
1063 hardlink = (os.stat(src).st_dev ==
1064 hardlink = (os.stat(src).st_dev ==
1064 os.stat(os.path.dirname(dst)).st_dev)
1065 os.stat(os.path.dirname(dst)).st_dev)
1065 if hardlink:
1066 if hardlink:
1066 topic = _('linking')
1067 topic = _('linking')
1067 else:
1068 else:
1068 topic = _('copying')
1069 topic = _('copying')
1069
1070
1070 if os.path.isdir(src):
1071 if os.path.isdir(src):
1071 os.mkdir(dst)
1072 os.mkdir(dst)
1072 for name, kind in osutil.listdir(src):
1073 for name, kind in osutil.listdir(src):
1073 srcname = os.path.join(src, name)
1074 srcname = os.path.join(src, name)
1074 dstname = os.path.join(dst, name)
1075 dstname = os.path.join(dst, name)
1075 def nprog(t, pos):
1076 def nprog(t, pos):
1076 if pos is not None:
1077 if pos is not None:
1077 return progress(t, pos + num)
1078 return progress(t, pos + num)
1078 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1079 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1079 num += n
1080 num += n
1080 else:
1081 else:
1081 if hardlink:
1082 if hardlink:
1082 try:
1083 try:
1083 oslink(src, dst)
1084 oslink(src, dst)
1084 except (IOError, OSError):
1085 except (IOError, OSError):
1085 hardlink = False
1086 hardlink = False
1086 shutil.copy(src, dst)
1087 shutil.copy(src, dst)
1087 else:
1088 else:
1088 shutil.copy(src, dst)
1089 shutil.copy(src, dst)
1089 num += 1
1090 num += 1
1090 progress(topic, num)
1091 progress(topic, num)
1091 progress(topic, None)
1092 progress(topic, None)
1092
1093
1093 return hardlink, num
1094 return hardlink, num
1094
1095
1095 _winreservednames = '''con prn aux nul
1096 _winreservednames = '''con prn aux nul
1096 com1 com2 com3 com4 com5 com6 com7 com8 com9
1097 com1 com2 com3 com4 com5 com6 com7 com8 com9
1097 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1098 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1098 _winreservedchars = ':*?"<>|'
1099 _winreservedchars = ':*?"<>|'
1099 def checkwinfilename(path):
1100 def checkwinfilename(path):
1100 r'''Check that the base-relative path is a valid filename on Windows.
1101 r'''Check that the base-relative path is a valid filename on Windows.
1101 Returns None if the path is ok, or a UI string describing the problem.
1102 Returns None if the path is ok, or a UI string describing the problem.
1102
1103
1103 >>> checkwinfilename("just/a/normal/path")
1104 >>> checkwinfilename("just/a/normal/path")
1104 >>> checkwinfilename("foo/bar/con.xml")
1105 >>> checkwinfilename("foo/bar/con.xml")
1105 "filename contains 'con', which is reserved on Windows"
1106 "filename contains 'con', which is reserved on Windows"
1106 >>> checkwinfilename("foo/con.xml/bar")
1107 >>> checkwinfilename("foo/con.xml/bar")
1107 "filename contains 'con', which is reserved on Windows"
1108 "filename contains 'con', which is reserved on Windows"
1108 >>> checkwinfilename("foo/bar/xml.con")
1109 >>> checkwinfilename("foo/bar/xml.con")
1109 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1110 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1110 "filename contains 'AUX', which is reserved on Windows"
1111 "filename contains 'AUX', which is reserved on Windows"
1111 >>> checkwinfilename("foo/bar/bla:.txt")
1112 >>> checkwinfilename("foo/bar/bla:.txt")
1112 "filename contains ':', which is reserved on Windows"
1113 "filename contains ':', which is reserved on Windows"
1113 >>> checkwinfilename("foo/bar/b\07la.txt")
1114 >>> checkwinfilename("foo/bar/b\07la.txt")
1114 "filename contains '\\x07', which is invalid on Windows"
1115 "filename contains '\\x07', which is invalid on Windows"
1115 >>> checkwinfilename("foo/bar/bla ")
1116 >>> checkwinfilename("foo/bar/bla ")
1116 "filename ends with ' ', which is not allowed on Windows"
1117 "filename ends with ' ', which is not allowed on Windows"
1117 >>> checkwinfilename("../bar")
1118 >>> checkwinfilename("../bar")
1118 >>> checkwinfilename("foo\\")
1119 >>> checkwinfilename("foo\\")
1119 "filename ends with '\\', which is invalid on Windows"
1120 "filename ends with '\\', which is invalid on Windows"
1120 >>> checkwinfilename("foo\\/bar")
1121 >>> checkwinfilename("foo\\/bar")
1121 "directory name ends with '\\', which is invalid on Windows"
1122 "directory name ends with '\\', which is invalid on Windows"
1122 '''
1123 '''
1123 if path.endswith('\\'):
1124 if path.endswith('\\'):
1124 return _("filename ends with '\\', which is invalid on Windows")
1125 return _("filename ends with '\\', which is invalid on Windows")
1125 if '\\/' in path:
1126 if '\\/' in path:
1126 return _("directory name ends with '\\', which is invalid on Windows")
1127 return _("directory name ends with '\\', which is invalid on Windows")
1127 for n in path.replace('\\', '/').split('/'):
1128 for n in path.replace('\\', '/').split('/'):
1128 if not n:
1129 if not n:
1129 continue
1130 continue
1130 for c in n:
1131 for c in n:
1131 if c in _winreservedchars:
1132 if c in _winreservedchars:
1132 return _("filename contains '%s', which is reserved "
1133 return _("filename contains '%s', which is reserved "
1133 "on Windows") % c
1134 "on Windows") % c
1134 if ord(c) <= 31:
1135 if ord(c) <= 31:
1135 return _("filename contains %r, which is invalid "
1136 return _("filename contains %r, which is invalid "
1136 "on Windows") % c
1137 "on Windows") % c
1137 base = n.split('.')[0]
1138 base = n.split('.')[0]
1138 if base and base.lower() in _winreservednames:
1139 if base and base.lower() in _winreservednames:
1139 return _("filename contains '%s', which is reserved "
1140 return _("filename contains '%s', which is reserved "
1140 "on Windows") % base
1141 "on Windows") % base
1141 t = n[-1]
1142 t = n[-1]
1142 if t in '. ' and n not in '..':
1143 if t in '. ' and n not in '..':
1143 return _("filename ends with '%s', which is not allowed "
1144 return _("filename ends with '%s', which is not allowed "
1144 "on Windows") % t
1145 "on Windows") % t
1145
1146
1146 if os.name == 'nt':
1147 if os.name == 'nt':
1147 checkosfilename = checkwinfilename
1148 checkosfilename = checkwinfilename
1148 else:
1149 else:
1149 checkosfilename = platform.checkosfilename
1150 checkosfilename = platform.checkosfilename
1150
1151
1151 def makelock(info, pathname):
1152 def makelock(info, pathname):
1152 try:
1153 try:
1153 return os.symlink(info, pathname)
1154 return os.symlink(info, pathname)
1154 except OSError as why:
1155 except OSError as why:
1155 if why.errno == errno.EEXIST:
1156 if why.errno == errno.EEXIST:
1156 raise
1157 raise
1157 except AttributeError: # no symlink in os
1158 except AttributeError: # no symlink in os
1158 pass
1159 pass
1159
1160
1160 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1161 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1161 os.write(ld, info)
1162 os.write(ld, info)
1162 os.close(ld)
1163 os.close(ld)
1163
1164
1164 def readlock(pathname):
1165 def readlock(pathname):
1165 try:
1166 try:
1166 return os.readlink(pathname)
1167 return os.readlink(pathname)
1167 except OSError as why:
1168 except OSError as why:
1168 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1169 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1169 raise
1170 raise
1170 except AttributeError: # no symlink in os
1171 except AttributeError: # no symlink in os
1171 pass
1172 pass
1172 fp = posixfile(pathname)
1173 fp = posixfile(pathname)
1173 r = fp.read()
1174 r = fp.read()
1174 fp.close()
1175 fp.close()
1175 return r
1176 return r
1176
1177
1177 def fstat(fp):
1178 def fstat(fp):
1178 '''stat file object that may not have fileno method.'''
1179 '''stat file object that may not have fileno method.'''
1179 try:
1180 try:
1180 return os.fstat(fp.fileno())
1181 return os.fstat(fp.fileno())
1181 except AttributeError:
1182 except AttributeError:
1182 return os.stat(fp.name)
1183 return os.stat(fp.name)
1183
1184
1184 # File system features
1185 # File system features
1185
1186
1186 def checkcase(path):
1187 def checkcase(path):
1187 """
1188 """
1188 Return true if the given path is on a case-sensitive filesystem
1189 Return true if the given path is on a case-sensitive filesystem
1189
1190
1190 Requires a path (like /foo/.hg) ending with a foldable final
1191 Requires a path (like /foo/.hg) ending with a foldable final
1191 directory component.
1192 directory component.
1192 """
1193 """
1193 s1 = os.lstat(path)
1194 s1 = os.lstat(path)
1194 d, b = os.path.split(path)
1195 d, b = os.path.split(path)
1195 b2 = b.upper()
1196 b2 = b.upper()
1196 if b == b2:
1197 if b == b2:
1197 b2 = b.lower()
1198 b2 = b.lower()
1198 if b == b2:
1199 if b == b2:
1199 return True # no evidence against case sensitivity
1200 return True # no evidence against case sensitivity
1200 p2 = os.path.join(d, b2)
1201 p2 = os.path.join(d, b2)
1201 try:
1202 try:
1202 s2 = os.lstat(p2)
1203 s2 = os.lstat(p2)
1203 if s2 == s1:
1204 if s2 == s1:
1204 return False
1205 return False
1205 return True
1206 return True
1206 except OSError:
1207 except OSError:
1207 return True
1208 return True
1208
1209
1209 try:
1210 try:
1210 import re2
1211 import re2
1211 _re2 = None
1212 _re2 = None
1212 except ImportError:
1213 except ImportError:
1213 _re2 = False
1214 _re2 = False
1214
1215
1215 class _re(object):
1216 class _re(object):
1216 def _checkre2(self):
1217 def _checkre2(self):
1217 global _re2
1218 global _re2
1218 try:
1219 try:
1219 # check if match works, see issue3964
1220 # check if match works, see issue3964
1220 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1221 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1221 except ImportError:
1222 except ImportError:
1222 _re2 = False
1223 _re2 = False
1223
1224
1224 def compile(self, pat, flags=0):
1225 def compile(self, pat, flags=0):
1225 '''Compile a regular expression, using re2 if possible
1226 '''Compile a regular expression, using re2 if possible
1226
1227
1227 For best performance, use only re2-compatible regexp features. The
1228 For best performance, use only re2-compatible regexp features. The
1228 only flags from the re module that are re2-compatible are
1229 only flags from the re module that are re2-compatible are
1229 IGNORECASE and MULTILINE.'''
1230 IGNORECASE and MULTILINE.'''
1230 if _re2 is None:
1231 if _re2 is None:
1231 self._checkre2()
1232 self._checkre2()
1232 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1233 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1233 if flags & remod.IGNORECASE:
1234 if flags & remod.IGNORECASE:
1234 pat = '(?i)' + pat
1235 pat = '(?i)' + pat
1235 if flags & remod.MULTILINE:
1236 if flags & remod.MULTILINE:
1236 pat = '(?m)' + pat
1237 pat = '(?m)' + pat
1237 try:
1238 try:
1238 return re2.compile(pat)
1239 return re2.compile(pat)
1239 except re2.error:
1240 except re2.error:
1240 pass
1241 pass
1241 return remod.compile(pat, flags)
1242 return remod.compile(pat, flags)
1242
1243
1243 @propertycache
1244 @propertycache
1244 def escape(self):
1245 def escape(self):
1245 '''Return the version of escape corresponding to self.compile.
1246 '''Return the version of escape corresponding to self.compile.
1246
1247
1247 This is imperfect because whether re2 or re is used for a particular
1248 This is imperfect because whether re2 or re is used for a particular
1248 function depends on the flags, etc, but it's the best we can do.
1249 function depends on the flags, etc, but it's the best we can do.
1249 '''
1250 '''
1250 global _re2
1251 global _re2
1251 if _re2 is None:
1252 if _re2 is None:
1252 self._checkre2()
1253 self._checkre2()
1253 if _re2:
1254 if _re2:
1254 return re2.escape
1255 return re2.escape
1255 else:
1256 else:
1256 return remod.escape
1257 return remod.escape
1257
1258
1258 re = _re()
1259 re = _re()
1259
1260
1260 _fspathcache = {}
1261 _fspathcache = {}
1261 def fspath(name, root):
1262 def fspath(name, root):
1262 '''Get name in the case stored in the filesystem
1263 '''Get name in the case stored in the filesystem
1263
1264
1264 The name should be relative to root, and be normcase-ed for efficiency.
1265 The name should be relative to root, and be normcase-ed for efficiency.
1265
1266
1266 Note that this function is unnecessary, and should not be
1267 Note that this function is unnecessary, and should not be
1267 called, for case-sensitive filesystems (simply because it's expensive).
1268 called, for case-sensitive filesystems (simply because it's expensive).
1268
1269
1269 The root should be normcase-ed, too.
1270 The root should be normcase-ed, too.
1270 '''
1271 '''
1271 def _makefspathcacheentry(dir):
1272 def _makefspathcacheentry(dir):
1272 return dict((normcase(n), n) for n in os.listdir(dir))
1273 return dict((normcase(n), n) for n in os.listdir(dir))
1273
1274
1274 seps = os.sep
1275 seps = os.sep
1275 if os.altsep:
1276 if os.altsep:
1276 seps = seps + os.altsep
1277 seps = seps + os.altsep
1277 # Protect backslashes. This gets silly very quickly.
1278 # Protect backslashes. This gets silly very quickly.
1278 seps.replace('\\','\\\\')
1279 seps.replace('\\','\\\\')
1279 pattern = remod.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
1280 pattern = remod.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
1280 dir = os.path.normpath(root)
1281 dir = os.path.normpath(root)
1281 result = []
1282 result = []
1282 for part, sep in pattern.findall(name):
1283 for part, sep in pattern.findall(name):
1283 if sep:
1284 if sep:
1284 result.append(sep)
1285 result.append(sep)
1285 continue
1286 continue
1286
1287
1287 if dir not in _fspathcache:
1288 if dir not in _fspathcache:
1288 _fspathcache[dir] = _makefspathcacheentry(dir)
1289 _fspathcache[dir] = _makefspathcacheentry(dir)
1289 contents = _fspathcache[dir]
1290 contents = _fspathcache[dir]
1290
1291
1291 found = contents.get(part)
1292 found = contents.get(part)
1292 if not found:
1293 if not found:
1293 # retry "once per directory" per "dirstate.walk" which
1294 # retry "once per directory" per "dirstate.walk" which
1294 # may take place for each patches of "hg qpush", for example
1295 # may take place for each patches of "hg qpush", for example
1295 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1296 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1296 found = contents.get(part)
1297 found = contents.get(part)
1297
1298
1298 result.append(found or part)
1299 result.append(found or part)
1299 dir = os.path.join(dir, part)
1300 dir = os.path.join(dir, part)
1300
1301
1301 return ''.join(result)
1302 return ''.join(result)
1302
1303
1303 def checknlink(testfile):
1304 def checknlink(testfile):
1304 '''check whether hardlink count reporting works properly'''
1305 '''check whether hardlink count reporting works properly'''
1305
1306
1306 # testfile may be open, so we need a separate file for checking to
1307 # testfile may be open, so we need a separate file for checking to
1307 # work around issue2543 (or testfile may get lost on Samba shares)
1308 # work around issue2543 (or testfile may get lost on Samba shares)
1308 f1 = testfile + ".hgtmp1"
1309 f1 = testfile + ".hgtmp1"
1309 if os.path.lexists(f1):
1310 if os.path.lexists(f1):
1310 return False
1311 return False
1311 try:
1312 try:
1312 posixfile(f1, 'w').close()
1313 posixfile(f1, 'w').close()
1313 except IOError:
1314 except IOError:
1314 return False
1315 return False
1315
1316
1316 f2 = testfile + ".hgtmp2"
1317 f2 = testfile + ".hgtmp2"
1317 fd = None
1318 fd = None
1318 try:
1319 try:
1319 oslink(f1, f2)
1320 oslink(f1, f2)
1320 # nlinks() may behave differently for files on Windows shares if
1321 # nlinks() may behave differently for files on Windows shares if
1321 # the file is open.
1322 # the file is open.
1322 fd = posixfile(f2)
1323 fd = posixfile(f2)
1323 return nlinks(f2) > 1
1324 return nlinks(f2) > 1
1324 except OSError:
1325 except OSError:
1325 return False
1326 return False
1326 finally:
1327 finally:
1327 if fd is not None:
1328 if fd is not None:
1328 fd.close()
1329 fd.close()
1329 for f in (f1, f2):
1330 for f in (f1, f2):
1330 try:
1331 try:
1331 os.unlink(f)
1332 os.unlink(f)
1332 except OSError:
1333 except OSError:
1333 pass
1334 pass
1334
1335
1335 def endswithsep(path):
1336 def endswithsep(path):
1336 '''Check path ends with os.sep or os.altsep.'''
1337 '''Check path ends with os.sep or os.altsep.'''
1337 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
1338 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
1338
1339
1339 def splitpath(path):
1340 def splitpath(path):
1340 '''Split path by os.sep.
1341 '''Split path by os.sep.
1341 Note that this function does not use os.altsep because this is
1342 Note that this function does not use os.altsep because this is
1342 an alternative of simple "xxx.split(os.sep)".
1343 an alternative of simple "xxx.split(os.sep)".
1343 It is recommended to use os.path.normpath() before using this
1344 It is recommended to use os.path.normpath() before using this
1344 function if need.'''
1345 function if need.'''
1345 return path.split(os.sep)
1346 return path.split(os.sep)
1346
1347
1347 def gui():
1348 def gui():
1348 '''Are we running in a GUI?'''
1349 '''Are we running in a GUI?'''
1349 if sys.platform == 'darwin':
1350 if sys.platform == 'darwin':
1350 if 'SSH_CONNECTION' in os.environ:
1351 if 'SSH_CONNECTION' in os.environ:
1351 # handle SSH access to a box where the user is logged in
1352 # handle SSH access to a box where the user is logged in
1352 return False
1353 return False
1353 elif getattr(osutil, 'isgui', None):
1354 elif getattr(osutil, 'isgui', None):
1354 # check if a CoreGraphics session is available
1355 # check if a CoreGraphics session is available
1355 return osutil.isgui()
1356 return osutil.isgui()
1356 else:
1357 else:
1357 # pure build; use a safe default
1358 # pure build; use a safe default
1358 return True
1359 return True
1359 else:
1360 else:
1360 return os.name == "nt" or os.environ.get("DISPLAY")
1361 return os.name == "nt" or os.environ.get("DISPLAY")
1361
1362
1362 def mktempcopy(name, emptyok=False, createmode=None):
1363 def mktempcopy(name, emptyok=False, createmode=None):
1363 """Create a temporary file with the same contents from name
1364 """Create a temporary file with the same contents from name
1364
1365
1365 The permission bits are copied from the original file.
1366 The permission bits are copied from the original file.
1366
1367
1367 If the temporary file is going to be truncated immediately, you
1368 If the temporary file is going to be truncated immediately, you
1368 can use emptyok=True as an optimization.
1369 can use emptyok=True as an optimization.
1369
1370
1370 Returns the name of the temporary file.
1371 Returns the name of the temporary file.
1371 """
1372 """
1372 d, fn = os.path.split(name)
1373 d, fn = os.path.split(name)
1373 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1374 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1374 os.close(fd)
1375 os.close(fd)
1375 # Temporary files are created with mode 0600, which is usually not
1376 # Temporary files are created with mode 0600, which is usually not
1376 # what we want. If the original file already exists, just copy
1377 # what we want. If the original file already exists, just copy
1377 # its mode. Otherwise, manually obey umask.
1378 # its mode. Otherwise, manually obey umask.
1378 copymode(name, temp, createmode)
1379 copymode(name, temp, createmode)
1379 if emptyok:
1380 if emptyok:
1380 return temp
1381 return temp
1381 try:
1382 try:
1382 try:
1383 try:
1383 ifp = posixfile(name, "rb")
1384 ifp = posixfile(name, "rb")
1384 except IOError as inst:
1385 except IOError as inst:
1385 if inst.errno == errno.ENOENT:
1386 if inst.errno == errno.ENOENT:
1386 return temp
1387 return temp
1387 if not getattr(inst, 'filename', None):
1388 if not getattr(inst, 'filename', None):
1388 inst.filename = name
1389 inst.filename = name
1389 raise
1390 raise
1390 ofp = posixfile(temp, "wb")
1391 ofp = posixfile(temp, "wb")
1391 for chunk in filechunkiter(ifp):
1392 for chunk in filechunkiter(ifp):
1392 ofp.write(chunk)
1393 ofp.write(chunk)
1393 ifp.close()
1394 ifp.close()
1394 ofp.close()
1395 ofp.close()
1395 except: # re-raises
1396 except: # re-raises
1396 try: os.unlink(temp)
1397 try: os.unlink(temp)
1397 except OSError: pass
1398 except OSError: pass
1398 raise
1399 raise
1399 return temp
1400 return temp
1400
1401
1401 class filestat(object):
1402 class filestat(object):
1402 """help to exactly detect change of a file
1403 """help to exactly detect change of a file
1403
1404
1404 'stat' attribute is result of 'os.stat()' if specified 'path'
1405 'stat' attribute is result of 'os.stat()' if specified 'path'
1405 exists. Otherwise, it is None. This can avoid preparative
1406 exists. Otherwise, it is None. This can avoid preparative
1406 'exists()' examination on client side of this class.
1407 'exists()' examination on client side of this class.
1407 """
1408 """
1408 def __init__(self, path):
1409 def __init__(self, path):
1409 try:
1410 try:
1410 self.stat = os.stat(path)
1411 self.stat = os.stat(path)
1411 except OSError as err:
1412 except OSError as err:
1412 if err.errno != errno.ENOENT:
1413 if err.errno != errno.ENOENT:
1413 raise
1414 raise
1414 self.stat = None
1415 self.stat = None
1415
1416
1416 __hash__ = object.__hash__
1417 __hash__ = object.__hash__
1417
1418
1418 def __eq__(self, old):
1419 def __eq__(self, old):
1419 try:
1420 try:
1420 # if ambiguity between stat of new and old file is
1421 # if ambiguity between stat of new and old file is
1421 # avoided, comparision of size, ctime and mtime is enough
1422 # avoided, comparision of size, ctime and mtime is enough
1422 # to exactly detect change of a file regardless of platform
1423 # to exactly detect change of a file regardless of platform
1423 return (self.stat.st_size == old.stat.st_size and
1424 return (self.stat.st_size == old.stat.st_size and
1424 self.stat.st_ctime == old.stat.st_ctime and
1425 self.stat.st_ctime == old.stat.st_ctime and
1425 self.stat.st_mtime == old.stat.st_mtime)
1426 self.stat.st_mtime == old.stat.st_mtime)
1426 except AttributeError:
1427 except AttributeError:
1427 return False
1428 return False
1428
1429
1429 def isambig(self, old):
1430 def isambig(self, old):
1430 """Examine whether new (= self) stat is ambiguous against old one
1431 """Examine whether new (= self) stat is ambiguous against old one
1431
1432
1432 "S[N]" below means stat of a file at N-th change:
1433 "S[N]" below means stat of a file at N-th change:
1433
1434
1434 - S[n-1].ctime < S[n].ctime: can detect change of a file
1435 - S[n-1].ctime < S[n].ctime: can detect change of a file
1435 - S[n-1].ctime == S[n].ctime
1436 - S[n-1].ctime == S[n].ctime
1436 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1437 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1437 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1438 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1438 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1439 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1439 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1440 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1440
1441
1441 Case (*2) above means that a file was changed twice or more at
1442 Case (*2) above means that a file was changed twice or more at
1442 same time in sec (= S[n-1].ctime), and comparison of timestamp
1443 same time in sec (= S[n-1].ctime), and comparison of timestamp
1443 is ambiguous.
1444 is ambiguous.
1444
1445
1445 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1446 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1446 timestamp is ambiguous".
1447 timestamp is ambiguous".
1447
1448
1448 But advancing mtime only in case (*2) doesn't work as
1449 But advancing mtime only in case (*2) doesn't work as
1449 expected, because naturally advanced S[n].mtime in case (*1)
1450 expected, because naturally advanced S[n].mtime in case (*1)
1450 might be equal to manually advanced S[n-1 or earlier].mtime.
1451 might be equal to manually advanced S[n-1 or earlier].mtime.
1451
1452
1452 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1453 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1453 treated as ambiguous regardless of mtime, to avoid overlooking
1454 treated as ambiguous regardless of mtime, to avoid overlooking
1454 by confliction between such mtime.
1455 by confliction between such mtime.
1455
1456
1456 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1457 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1457 S[n].mtime", even if size of a file isn't changed.
1458 S[n].mtime", even if size of a file isn't changed.
1458 """
1459 """
1459 try:
1460 try:
1460 return (self.stat.st_ctime == old.stat.st_ctime)
1461 return (self.stat.st_ctime == old.stat.st_ctime)
1461 except AttributeError:
1462 except AttributeError:
1462 return False
1463 return False
1463
1464
1464 def __ne__(self, other):
1465 def __ne__(self, other):
1465 return not self == other
1466 return not self == other
1466
1467
1467 class atomictempfile(object):
1468 class atomictempfile(object):
1468 '''writable file object that atomically updates a file
1469 '''writable file object that atomically updates a file
1469
1470
1470 All writes will go to a temporary copy of the original file. Call
1471 All writes will go to a temporary copy of the original file. Call
1471 close() when you are done writing, and atomictempfile will rename
1472 close() when you are done writing, and atomictempfile will rename
1472 the temporary copy to the original name, making the changes
1473 the temporary copy to the original name, making the changes
1473 visible. If the object is destroyed without being closed, all your
1474 visible. If the object is destroyed without being closed, all your
1474 writes are discarded.
1475 writes are discarded.
1475
1476
1476 checkambig argument of constructor is used with filestat, and is
1477 checkambig argument of constructor is used with filestat, and is
1477 useful only if target file is guarded by any lock (e.g. repo.lock
1478 useful only if target file is guarded by any lock (e.g. repo.lock
1478 or repo.wlock).
1479 or repo.wlock).
1479 '''
1480 '''
1480 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1481 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1481 self.__name = name # permanent name
1482 self.__name = name # permanent name
1482 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1483 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1483 createmode=createmode)
1484 createmode=createmode)
1484 self._fp = posixfile(self._tempname, mode)
1485 self._fp = posixfile(self._tempname, mode)
1485 self._checkambig = checkambig
1486 self._checkambig = checkambig
1486
1487
1487 # delegated methods
1488 # delegated methods
1488 self.read = self._fp.read
1489 self.read = self._fp.read
1489 self.write = self._fp.write
1490 self.write = self._fp.write
1490 self.seek = self._fp.seek
1491 self.seek = self._fp.seek
1491 self.tell = self._fp.tell
1492 self.tell = self._fp.tell
1492 self.fileno = self._fp.fileno
1493 self.fileno = self._fp.fileno
1493
1494
1494 def close(self):
1495 def close(self):
1495 if not self._fp.closed:
1496 if not self._fp.closed:
1496 self._fp.close()
1497 self._fp.close()
1497 filename = localpath(self.__name)
1498 filename = localpath(self.__name)
1498 oldstat = self._checkambig and filestat(filename)
1499 oldstat = self._checkambig and filestat(filename)
1499 if oldstat and oldstat.stat:
1500 if oldstat and oldstat.stat:
1500 rename(self._tempname, filename)
1501 rename(self._tempname, filename)
1501 newstat = filestat(filename)
1502 newstat = filestat(filename)
1502 if newstat.isambig(oldstat):
1503 if newstat.isambig(oldstat):
1503 # stat of changed file is ambiguous to original one
1504 # stat of changed file is ambiguous to original one
1504 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1505 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1505 os.utime(filename, (advanced, advanced))
1506 os.utime(filename, (advanced, advanced))
1506 else:
1507 else:
1507 rename(self._tempname, filename)
1508 rename(self._tempname, filename)
1508
1509
1509 def discard(self):
1510 def discard(self):
1510 if not self._fp.closed:
1511 if not self._fp.closed:
1511 try:
1512 try:
1512 os.unlink(self._tempname)
1513 os.unlink(self._tempname)
1513 except OSError:
1514 except OSError:
1514 pass
1515 pass
1515 self._fp.close()
1516 self._fp.close()
1516
1517
1517 def __del__(self):
1518 def __del__(self):
1518 if safehasattr(self, '_fp'): # constructor actually did something
1519 if safehasattr(self, '_fp'): # constructor actually did something
1519 self.discard()
1520 self.discard()
1520
1521
1521 def __enter__(self):
1522 def __enter__(self):
1522 return self
1523 return self
1523
1524
1524 def __exit__(self, exctype, excvalue, traceback):
1525 def __exit__(self, exctype, excvalue, traceback):
1525 if exctype is not None:
1526 if exctype is not None:
1526 self.discard()
1527 self.discard()
1527 else:
1528 else:
1528 self.close()
1529 self.close()
1529
1530
1530 def makedirs(name, mode=None, notindexed=False):
1531 def makedirs(name, mode=None, notindexed=False):
1531 """recursive directory creation with parent mode inheritance
1532 """recursive directory creation with parent mode inheritance
1532
1533
1533 Newly created directories are marked as "not to be indexed by
1534 Newly created directories are marked as "not to be indexed by
1534 the content indexing service", if ``notindexed`` is specified
1535 the content indexing service", if ``notindexed`` is specified
1535 for "write" mode access.
1536 for "write" mode access.
1536 """
1537 """
1537 try:
1538 try:
1538 makedir(name, notindexed)
1539 makedir(name, notindexed)
1539 except OSError as err:
1540 except OSError as err:
1540 if err.errno == errno.EEXIST:
1541 if err.errno == errno.EEXIST:
1541 return
1542 return
1542 if err.errno != errno.ENOENT or not name:
1543 if err.errno != errno.ENOENT or not name:
1543 raise
1544 raise
1544 parent = os.path.dirname(os.path.abspath(name))
1545 parent = os.path.dirname(os.path.abspath(name))
1545 if parent == name:
1546 if parent == name:
1546 raise
1547 raise
1547 makedirs(parent, mode, notindexed)
1548 makedirs(parent, mode, notindexed)
1548 try:
1549 try:
1549 makedir(name, notindexed)
1550 makedir(name, notindexed)
1550 except OSError as err:
1551 except OSError as err:
1551 # Catch EEXIST to handle races
1552 # Catch EEXIST to handle races
1552 if err.errno == errno.EEXIST:
1553 if err.errno == errno.EEXIST:
1553 return
1554 return
1554 raise
1555 raise
1555 if mode is not None:
1556 if mode is not None:
1556 os.chmod(name, mode)
1557 os.chmod(name, mode)
1557
1558
1558 def readfile(path):
1559 def readfile(path):
1559 with open(path, 'rb') as fp:
1560 with open(path, 'rb') as fp:
1560 return fp.read()
1561 return fp.read()
1561
1562
1562 def writefile(path, text):
1563 def writefile(path, text):
1563 with open(path, 'wb') as fp:
1564 with open(path, 'wb') as fp:
1564 fp.write(text)
1565 fp.write(text)
1565
1566
1566 def appendfile(path, text):
1567 def appendfile(path, text):
1567 with open(path, 'ab') as fp:
1568 with open(path, 'ab') as fp:
1568 fp.write(text)
1569 fp.write(text)
1569
1570
1570 class chunkbuffer(object):
1571 class chunkbuffer(object):
1571 """Allow arbitrary sized chunks of data to be efficiently read from an
1572 """Allow arbitrary sized chunks of data to be efficiently read from an
1572 iterator over chunks of arbitrary size."""
1573 iterator over chunks of arbitrary size."""
1573
1574
1574 def __init__(self, in_iter):
1575 def __init__(self, in_iter):
1575 """in_iter is the iterator that's iterating over the input chunks.
1576 """in_iter is the iterator that's iterating over the input chunks.
1576 targetsize is how big a buffer to try to maintain."""
1577 targetsize is how big a buffer to try to maintain."""
1577 def splitbig(chunks):
1578 def splitbig(chunks):
1578 for chunk in chunks:
1579 for chunk in chunks:
1579 if len(chunk) > 2**20:
1580 if len(chunk) > 2**20:
1580 pos = 0
1581 pos = 0
1581 while pos < len(chunk):
1582 while pos < len(chunk):
1582 end = pos + 2 ** 18
1583 end = pos + 2 ** 18
1583 yield chunk[pos:end]
1584 yield chunk[pos:end]
1584 pos = end
1585 pos = end
1585 else:
1586 else:
1586 yield chunk
1587 yield chunk
1587 self.iter = splitbig(in_iter)
1588 self.iter = splitbig(in_iter)
1588 self._queue = collections.deque()
1589 self._queue = collections.deque()
1589 self._chunkoffset = 0
1590 self._chunkoffset = 0
1590
1591
1591 def read(self, l=None):
1592 def read(self, l=None):
1592 """Read L bytes of data from the iterator of chunks of data.
1593 """Read L bytes of data from the iterator of chunks of data.
1593 Returns less than L bytes if the iterator runs dry.
1594 Returns less than L bytes if the iterator runs dry.
1594
1595
1595 If size parameter is omitted, read everything"""
1596 If size parameter is omitted, read everything"""
1596 if l is None:
1597 if l is None:
1597 return ''.join(self.iter)
1598 return ''.join(self.iter)
1598
1599
1599 left = l
1600 left = l
1600 buf = []
1601 buf = []
1601 queue = self._queue
1602 queue = self._queue
1602 while left > 0:
1603 while left > 0:
1603 # refill the queue
1604 # refill the queue
1604 if not queue:
1605 if not queue:
1605 target = 2**18
1606 target = 2**18
1606 for chunk in self.iter:
1607 for chunk in self.iter:
1607 queue.append(chunk)
1608 queue.append(chunk)
1608 target -= len(chunk)
1609 target -= len(chunk)
1609 if target <= 0:
1610 if target <= 0:
1610 break
1611 break
1611 if not queue:
1612 if not queue:
1612 break
1613 break
1613
1614
1614 # The easy way to do this would be to queue.popleft(), modify the
1615 # The easy way to do this would be to queue.popleft(), modify the
1615 # chunk (if necessary), then queue.appendleft(). However, for cases
1616 # chunk (if necessary), then queue.appendleft(). However, for cases
1616 # where we read partial chunk content, this incurs 2 dequeue
1617 # where we read partial chunk content, this incurs 2 dequeue
1617 # mutations and creates a new str for the remaining chunk in the
1618 # mutations and creates a new str for the remaining chunk in the
1618 # queue. Our code below avoids this overhead.
1619 # queue. Our code below avoids this overhead.
1619
1620
1620 chunk = queue[0]
1621 chunk = queue[0]
1621 chunkl = len(chunk)
1622 chunkl = len(chunk)
1622 offset = self._chunkoffset
1623 offset = self._chunkoffset
1623
1624
1624 # Use full chunk.
1625 # Use full chunk.
1625 if offset == 0 and left >= chunkl:
1626 if offset == 0 and left >= chunkl:
1626 left -= chunkl
1627 left -= chunkl
1627 queue.popleft()
1628 queue.popleft()
1628 buf.append(chunk)
1629 buf.append(chunk)
1629 # self._chunkoffset remains at 0.
1630 # self._chunkoffset remains at 0.
1630 continue
1631 continue
1631
1632
1632 chunkremaining = chunkl - offset
1633 chunkremaining = chunkl - offset
1633
1634
1634 # Use all of unconsumed part of chunk.
1635 # Use all of unconsumed part of chunk.
1635 if left >= chunkremaining:
1636 if left >= chunkremaining:
1636 left -= chunkremaining
1637 left -= chunkremaining
1637 queue.popleft()
1638 queue.popleft()
1638 # offset == 0 is enabled by block above, so this won't merely
1639 # offset == 0 is enabled by block above, so this won't merely
1639 # copy via ``chunk[0:]``.
1640 # copy via ``chunk[0:]``.
1640 buf.append(chunk[offset:])
1641 buf.append(chunk[offset:])
1641 self._chunkoffset = 0
1642 self._chunkoffset = 0
1642
1643
1643 # Partial chunk needed.
1644 # Partial chunk needed.
1644 else:
1645 else:
1645 buf.append(chunk[offset:offset + left])
1646 buf.append(chunk[offset:offset + left])
1646 self._chunkoffset += left
1647 self._chunkoffset += left
1647 left -= chunkremaining
1648 left -= chunkremaining
1648
1649
1649 return ''.join(buf)
1650 return ''.join(buf)
1650
1651
1651 def filechunkiter(f, size=65536, limit=None):
1652 def filechunkiter(f, size=65536, limit=None):
1652 """Create a generator that produces the data in the file size
1653 """Create a generator that produces the data in the file size
1653 (default 65536) bytes at a time, up to optional limit (default is
1654 (default 65536) bytes at a time, up to optional limit (default is
1654 to read all data). Chunks may be less than size bytes if the
1655 to read all data). Chunks may be less than size bytes if the
1655 chunk is the last chunk in the file, or the file is a socket or
1656 chunk is the last chunk in the file, or the file is a socket or
1656 some other type of file that sometimes reads less data than is
1657 some other type of file that sometimes reads less data than is
1657 requested."""
1658 requested."""
1658 assert size >= 0
1659 assert size >= 0
1659 assert limit is None or limit >= 0
1660 assert limit is None or limit >= 0
1660 while True:
1661 while True:
1661 if limit is None:
1662 if limit is None:
1662 nbytes = size
1663 nbytes = size
1663 else:
1664 else:
1664 nbytes = min(limit, size)
1665 nbytes = min(limit, size)
1665 s = nbytes and f.read(nbytes)
1666 s = nbytes and f.read(nbytes)
1666 if not s:
1667 if not s:
1667 break
1668 break
1668 if limit:
1669 if limit:
1669 limit -= len(s)
1670 limit -= len(s)
1670 yield s
1671 yield s
1671
1672
1672 def makedate(timestamp=None):
1673 def makedate(timestamp=None):
1673 '''Return a unix timestamp (or the current time) as a (unixtime,
1674 '''Return a unix timestamp (or the current time) as a (unixtime,
1674 offset) tuple based off the local timezone.'''
1675 offset) tuple based off the local timezone.'''
1675 if timestamp is None:
1676 if timestamp is None:
1676 timestamp = time.time()
1677 timestamp = time.time()
1677 if timestamp < 0:
1678 if timestamp < 0:
1678 hint = _("check your clock")
1679 hint = _("check your clock")
1679 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1680 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1680 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1681 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1681 datetime.datetime.fromtimestamp(timestamp))
1682 datetime.datetime.fromtimestamp(timestamp))
1682 tz = delta.days * 86400 + delta.seconds
1683 tz = delta.days * 86400 + delta.seconds
1683 return timestamp, tz
1684 return timestamp, tz
1684
1685
1685 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1686 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1686 """represent a (unixtime, offset) tuple as a localized time.
1687 """represent a (unixtime, offset) tuple as a localized time.
1687 unixtime is seconds since the epoch, and offset is the time zone's
1688 unixtime is seconds since the epoch, and offset is the time zone's
1688 number of seconds away from UTC.
1689 number of seconds away from UTC.
1689
1690
1690 >>> datestr((0, 0))
1691 >>> datestr((0, 0))
1691 'Thu Jan 01 00:00:00 1970 +0000'
1692 'Thu Jan 01 00:00:00 1970 +0000'
1692 >>> datestr((42, 0))
1693 >>> datestr((42, 0))
1693 'Thu Jan 01 00:00:42 1970 +0000'
1694 'Thu Jan 01 00:00:42 1970 +0000'
1694 >>> datestr((-42, 0))
1695 >>> datestr((-42, 0))
1695 'Wed Dec 31 23:59:18 1969 +0000'
1696 'Wed Dec 31 23:59:18 1969 +0000'
1696 >>> datestr((0x7fffffff, 0))
1697 >>> datestr((0x7fffffff, 0))
1697 'Tue Jan 19 03:14:07 2038 +0000'
1698 'Tue Jan 19 03:14:07 2038 +0000'
1698 >>> datestr((-0x80000000, 0))
1699 >>> datestr((-0x80000000, 0))
1699 'Fri Dec 13 20:45:52 1901 +0000'
1700 'Fri Dec 13 20:45:52 1901 +0000'
1700 """
1701 """
1701 t, tz = date or makedate()
1702 t, tz = date or makedate()
1702 if "%1" in format or "%2" in format or "%z" in format:
1703 if "%1" in format or "%2" in format or "%z" in format:
1703 sign = (tz > 0) and "-" or "+"
1704 sign = (tz > 0) and "-" or "+"
1704 minutes = abs(tz) // 60
1705 minutes = abs(tz) // 60
1705 q, r = divmod(minutes, 60)
1706 q, r = divmod(minutes, 60)
1706 format = format.replace("%z", "%1%2")
1707 format = format.replace("%z", "%1%2")
1707 format = format.replace("%1", "%c%02d" % (sign, q))
1708 format = format.replace("%1", "%c%02d" % (sign, q))
1708 format = format.replace("%2", "%02d" % r)
1709 format = format.replace("%2", "%02d" % r)
1709 d = t - tz
1710 d = t - tz
1710 if d > 0x7fffffff:
1711 if d > 0x7fffffff:
1711 d = 0x7fffffff
1712 d = 0x7fffffff
1712 elif d < -0x80000000:
1713 elif d < -0x80000000:
1713 d = -0x80000000
1714 d = -0x80000000
1714 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1715 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1715 # because they use the gmtime() system call which is buggy on Windows
1716 # because they use the gmtime() system call which is buggy on Windows
1716 # for negative values.
1717 # for negative values.
1717 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1718 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1718 s = t.strftime(format)
1719 s = t.strftime(format)
1719 return s
1720 return s
1720
1721
1721 def shortdate(date=None):
1722 def shortdate(date=None):
1722 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1723 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1723 return datestr(date, format='%Y-%m-%d')
1724 return datestr(date, format='%Y-%m-%d')
1724
1725
1725 def parsetimezone(tz):
1726 def parsetimezone(tz):
1726 """parse a timezone string and return an offset integer"""
1727 """parse a timezone string and return an offset integer"""
1727 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1728 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1728 sign = (tz[0] == "+") and 1 or -1
1729 sign = (tz[0] == "+") and 1 or -1
1729 hours = int(tz[1:3])
1730 hours = int(tz[1:3])
1730 minutes = int(tz[3:5])
1731 minutes = int(tz[3:5])
1731 return -sign * (hours * 60 + minutes) * 60
1732 return -sign * (hours * 60 + minutes) * 60
1732 if tz == "GMT" or tz == "UTC":
1733 if tz == "GMT" or tz == "UTC":
1733 return 0
1734 return 0
1734 return None
1735 return None
1735
1736
1736 def strdate(string, format, defaults=[]):
1737 def strdate(string, format, defaults=[]):
1737 """parse a localized time string and return a (unixtime, offset) tuple.
1738 """parse a localized time string and return a (unixtime, offset) tuple.
1738 if the string cannot be parsed, ValueError is raised."""
1739 if the string cannot be parsed, ValueError is raised."""
1739 # NOTE: unixtime = localunixtime + offset
1740 # NOTE: unixtime = localunixtime + offset
1740 offset, date = parsetimezone(string.split()[-1]), string
1741 offset, date = parsetimezone(string.split()[-1]), string
1741 if offset is not None:
1742 if offset is not None:
1742 date = " ".join(string.split()[:-1])
1743 date = " ".join(string.split()[:-1])
1743
1744
1744 # add missing elements from defaults
1745 # add missing elements from defaults
1745 usenow = False # default to using biased defaults
1746 usenow = False # default to using biased defaults
1746 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1747 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1747 found = [True for p in part if ("%"+p) in format]
1748 found = [True for p in part if ("%"+p) in format]
1748 if not found:
1749 if not found:
1749 date += "@" + defaults[part][usenow]
1750 date += "@" + defaults[part][usenow]
1750 format += "@%" + part[0]
1751 format += "@%" + part[0]
1751 else:
1752 else:
1752 # We've found a specific time element, less specific time
1753 # We've found a specific time element, less specific time
1753 # elements are relative to today
1754 # elements are relative to today
1754 usenow = True
1755 usenow = True
1755
1756
1756 timetuple = time.strptime(date, format)
1757 timetuple = time.strptime(date, format)
1757 localunixtime = int(calendar.timegm(timetuple))
1758 localunixtime = int(calendar.timegm(timetuple))
1758 if offset is None:
1759 if offset is None:
1759 # local timezone
1760 # local timezone
1760 unixtime = int(time.mktime(timetuple))
1761 unixtime = int(time.mktime(timetuple))
1761 offset = unixtime - localunixtime
1762 offset = unixtime - localunixtime
1762 else:
1763 else:
1763 unixtime = localunixtime + offset
1764 unixtime = localunixtime + offset
1764 return unixtime, offset
1765 return unixtime, offset
1765
1766
1766 def parsedate(date, formats=None, bias=None):
1767 def parsedate(date, formats=None, bias=None):
1767 """parse a localized date/time and return a (unixtime, offset) tuple.
1768 """parse a localized date/time and return a (unixtime, offset) tuple.
1768
1769
1769 The date may be a "unixtime offset" string or in one of the specified
1770 The date may be a "unixtime offset" string or in one of the specified
1770 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1771 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1771
1772
1772 >>> parsedate(' today ') == parsedate(\
1773 >>> parsedate(' today ') == parsedate(\
1773 datetime.date.today().strftime('%b %d'))
1774 datetime.date.today().strftime('%b %d'))
1774 True
1775 True
1775 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1776 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1776 datetime.timedelta(days=1)\
1777 datetime.timedelta(days=1)\
1777 ).strftime('%b %d'))
1778 ).strftime('%b %d'))
1778 True
1779 True
1779 >>> now, tz = makedate()
1780 >>> now, tz = makedate()
1780 >>> strnow, strtz = parsedate('now')
1781 >>> strnow, strtz = parsedate('now')
1781 >>> (strnow - now) < 1
1782 >>> (strnow - now) < 1
1782 True
1783 True
1783 >>> tz == strtz
1784 >>> tz == strtz
1784 True
1785 True
1785 """
1786 """
1786 if bias is None:
1787 if bias is None:
1787 bias = {}
1788 bias = {}
1788 if not date:
1789 if not date:
1789 return 0, 0
1790 return 0, 0
1790 if isinstance(date, tuple) and len(date) == 2:
1791 if isinstance(date, tuple) and len(date) == 2:
1791 return date
1792 return date
1792 if not formats:
1793 if not formats:
1793 formats = defaultdateformats
1794 formats = defaultdateformats
1794 date = date.strip()
1795 date = date.strip()
1795
1796
1796 if date == 'now' or date == _('now'):
1797 if date == 'now' or date == _('now'):
1797 return makedate()
1798 return makedate()
1798 if date == 'today' or date == _('today'):
1799 if date == 'today' or date == _('today'):
1799 date = datetime.date.today().strftime('%b %d')
1800 date = datetime.date.today().strftime('%b %d')
1800 elif date == 'yesterday' or date == _('yesterday'):
1801 elif date == 'yesterday' or date == _('yesterday'):
1801 date = (datetime.date.today() -
1802 date = (datetime.date.today() -
1802 datetime.timedelta(days=1)).strftime('%b %d')
1803 datetime.timedelta(days=1)).strftime('%b %d')
1803
1804
1804 try:
1805 try:
1805 when, offset = map(int, date.split(' '))
1806 when, offset = map(int, date.split(' '))
1806 except ValueError:
1807 except ValueError:
1807 # fill out defaults
1808 # fill out defaults
1808 now = makedate()
1809 now = makedate()
1809 defaults = {}
1810 defaults = {}
1810 for part in ("d", "mb", "yY", "HI", "M", "S"):
1811 for part in ("d", "mb", "yY", "HI", "M", "S"):
1811 # this piece is for rounding the specific end of unknowns
1812 # this piece is for rounding the specific end of unknowns
1812 b = bias.get(part)
1813 b = bias.get(part)
1813 if b is None:
1814 if b is None:
1814 if part[0] in "HMS":
1815 if part[0] in "HMS":
1815 b = "00"
1816 b = "00"
1816 else:
1817 else:
1817 b = "0"
1818 b = "0"
1818
1819
1819 # this piece is for matching the generic end to today's date
1820 # this piece is for matching the generic end to today's date
1820 n = datestr(now, "%" + part[0])
1821 n = datestr(now, "%" + part[0])
1821
1822
1822 defaults[part] = (b, n)
1823 defaults[part] = (b, n)
1823
1824
1824 for format in formats:
1825 for format in formats:
1825 try:
1826 try:
1826 when, offset = strdate(date, format, defaults)
1827 when, offset = strdate(date, format, defaults)
1827 except (ValueError, OverflowError):
1828 except (ValueError, OverflowError):
1828 pass
1829 pass
1829 else:
1830 else:
1830 break
1831 break
1831 else:
1832 else:
1832 raise Abort(_('invalid date: %r') % date)
1833 raise Abort(_('invalid date: %r') % date)
1833 # validate explicit (probably user-specified) date and
1834 # validate explicit (probably user-specified) date and
1834 # time zone offset. values must fit in signed 32 bits for
1835 # time zone offset. values must fit in signed 32 bits for
1835 # current 32-bit linux runtimes. timezones go from UTC-12
1836 # current 32-bit linux runtimes. timezones go from UTC-12
1836 # to UTC+14
1837 # to UTC+14
1837 if when < -0x80000000 or when > 0x7fffffff:
1838 if when < -0x80000000 or when > 0x7fffffff:
1838 raise Abort(_('date exceeds 32 bits: %d') % when)
1839 raise Abort(_('date exceeds 32 bits: %d') % when)
1839 if offset < -50400 or offset > 43200:
1840 if offset < -50400 or offset > 43200:
1840 raise Abort(_('impossible time zone offset: %d') % offset)
1841 raise Abort(_('impossible time zone offset: %d') % offset)
1841 return when, offset
1842 return when, offset
1842
1843
1843 def matchdate(date):
1844 def matchdate(date):
1844 """Return a function that matches a given date match specifier
1845 """Return a function that matches a given date match specifier
1845
1846
1846 Formats include:
1847 Formats include:
1847
1848
1848 '{date}' match a given date to the accuracy provided
1849 '{date}' match a given date to the accuracy provided
1849
1850
1850 '<{date}' on or before a given date
1851 '<{date}' on or before a given date
1851
1852
1852 '>{date}' on or after a given date
1853 '>{date}' on or after a given date
1853
1854
1854 >>> p1 = parsedate("10:29:59")
1855 >>> p1 = parsedate("10:29:59")
1855 >>> p2 = parsedate("10:30:00")
1856 >>> p2 = parsedate("10:30:00")
1856 >>> p3 = parsedate("10:30:59")
1857 >>> p3 = parsedate("10:30:59")
1857 >>> p4 = parsedate("10:31:00")
1858 >>> p4 = parsedate("10:31:00")
1858 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1859 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1859 >>> f = matchdate("10:30")
1860 >>> f = matchdate("10:30")
1860 >>> f(p1[0])
1861 >>> f(p1[0])
1861 False
1862 False
1862 >>> f(p2[0])
1863 >>> f(p2[0])
1863 True
1864 True
1864 >>> f(p3[0])
1865 >>> f(p3[0])
1865 True
1866 True
1866 >>> f(p4[0])
1867 >>> f(p4[0])
1867 False
1868 False
1868 >>> f(p5[0])
1869 >>> f(p5[0])
1869 False
1870 False
1870 """
1871 """
1871
1872
1872 def lower(date):
1873 def lower(date):
1873 d = {'mb': "1", 'd': "1"}
1874 d = {'mb': "1", 'd': "1"}
1874 return parsedate(date, extendeddateformats, d)[0]
1875 return parsedate(date, extendeddateformats, d)[0]
1875
1876
1876 def upper(date):
1877 def upper(date):
1877 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
1878 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
1878 for days in ("31", "30", "29"):
1879 for days in ("31", "30", "29"):
1879 try:
1880 try:
1880 d["d"] = days
1881 d["d"] = days
1881 return parsedate(date, extendeddateformats, d)[0]
1882 return parsedate(date, extendeddateformats, d)[0]
1882 except Abort:
1883 except Abort:
1883 pass
1884 pass
1884 d["d"] = "28"
1885 d["d"] = "28"
1885 return parsedate(date, extendeddateformats, d)[0]
1886 return parsedate(date, extendeddateformats, d)[0]
1886
1887
1887 date = date.strip()
1888 date = date.strip()
1888
1889
1889 if not date:
1890 if not date:
1890 raise Abort(_("dates cannot consist entirely of whitespace"))
1891 raise Abort(_("dates cannot consist entirely of whitespace"))
1891 elif date[0] == "<":
1892 elif date[0] == "<":
1892 if not date[1:]:
1893 if not date[1:]:
1893 raise Abort(_("invalid day spec, use '<DATE'"))
1894 raise Abort(_("invalid day spec, use '<DATE'"))
1894 when = upper(date[1:])
1895 when = upper(date[1:])
1895 return lambda x: x <= when
1896 return lambda x: x <= when
1896 elif date[0] == ">":
1897 elif date[0] == ">":
1897 if not date[1:]:
1898 if not date[1:]:
1898 raise Abort(_("invalid day spec, use '>DATE'"))
1899 raise Abort(_("invalid day spec, use '>DATE'"))
1899 when = lower(date[1:])
1900 when = lower(date[1:])
1900 return lambda x: x >= when
1901 return lambda x: x >= when
1901 elif date[0] == "-":
1902 elif date[0] == "-":
1902 try:
1903 try:
1903 days = int(date[1:])
1904 days = int(date[1:])
1904 except ValueError:
1905 except ValueError:
1905 raise Abort(_("invalid day spec: %s") % date[1:])
1906 raise Abort(_("invalid day spec: %s") % date[1:])
1906 if days < 0:
1907 if days < 0:
1907 raise Abort(_('%s must be nonnegative (see "hg help dates")')
1908 raise Abort(_('%s must be nonnegative (see "hg help dates")')
1908 % date[1:])
1909 % date[1:])
1909 when = makedate()[0] - days * 3600 * 24
1910 when = makedate()[0] - days * 3600 * 24
1910 return lambda x: x >= when
1911 return lambda x: x >= when
1911 elif " to " in date:
1912 elif " to " in date:
1912 a, b = date.split(" to ")
1913 a, b = date.split(" to ")
1913 start, stop = lower(a), upper(b)
1914 start, stop = lower(a), upper(b)
1914 return lambda x: x >= start and x <= stop
1915 return lambda x: x >= start and x <= stop
1915 else:
1916 else:
1916 start, stop = lower(date), upper(date)
1917 start, stop = lower(date), upper(date)
1917 return lambda x: x >= start and x <= stop
1918 return lambda x: x >= start and x <= stop
1918
1919
1919 def stringmatcher(pattern):
1920 def stringmatcher(pattern):
1920 """
1921 """
1921 accepts a string, possibly starting with 're:' or 'literal:' prefix.
1922 accepts a string, possibly starting with 're:' or 'literal:' prefix.
1922 returns the matcher name, pattern, and matcher function.
1923 returns the matcher name, pattern, and matcher function.
1923 missing or unknown prefixes are treated as literal matches.
1924 missing or unknown prefixes are treated as literal matches.
1924
1925
1925 helper for tests:
1926 helper for tests:
1926 >>> def test(pattern, *tests):
1927 >>> def test(pattern, *tests):
1927 ... kind, pattern, matcher = stringmatcher(pattern)
1928 ... kind, pattern, matcher = stringmatcher(pattern)
1928 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
1929 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
1929
1930
1930 exact matching (no prefix):
1931 exact matching (no prefix):
1931 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
1932 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
1932 ('literal', 'abcdefg', [False, False, True])
1933 ('literal', 'abcdefg', [False, False, True])
1933
1934
1934 regex matching ('re:' prefix)
1935 regex matching ('re:' prefix)
1935 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
1936 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
1936 ('re', 'a.+b', [False, False, True])
1937 ('re', 'a.+b', [False, False, True])
1937
1938
1938 force exact matches ('literal:' prefix)
1939 force exact matches ('literal:' prefix)
1939 >>> test('literal:re:foobar', 'foobar', 're:foobar')
1940 >>> test('literal:re:foobar', 'foobar', 're:foobar')
1940 ('literal', 're:foobar', [False, True])
1941 ('literal', 're:foobar', [False, True])
1941
1942
1942 unknown prefixes are ignored and treated as literals
1943 unknown prefixes are ignored and treated as literals
1943 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
1944 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
1944 ('literal', 'foo:bar', [False, False, True])
1945 ('literal', 'foo:bar', [False, False, True])
1945 """
1946 """
1946 if pattern.startswith('re:'):
1947 if pattern.startswith('re:'):
1947 pattern = pattern[3:]
1948 pattern = pattern[3:]
1948 try:
1949 try:
1949 regex = remod.compile(pattern)
1950 regex = remod.compile(pattern)
1950 except remod.error as e:
1951 except remod.error as e:
1951 raise error.ParseError(_('invalid regular expression: %s')
1952 raise error.ParseError(_('invalid regular expression: %s')
1952 % e)
1953 % e)
1953 return 're', pattern, regex.search
1954 return 're', pattern, regex.search
1954 elif pattern.startswith('literal:'):
1955 elif pattern.startswith('literal:'):
1955 pattern = pattern[8:]
1956 pattern = pattern[8:]
1956 return 'literal', pattern, pattern.__eq__
1957 return 'literal', pattern, pattern.__eq__
1957
1958
1958 def shortuser(user):
1959 def shortuser(user):
1959 """Return a short representation of a user name or email address."""
1960 """Return a short representation of a user name or email address."""
1960 f = user.find('@')
1961 f = user.find('@')
1961 if f >= 0:
1962 if f >= 0:
1962 user = user[:f]
1963 user = user[:f]
1963 f = user.find('<')
1964 f = user.find('<')
1964 if f >= 0:
1965 if f >= 0:
1965 user = user[f + 1:]
1966 user = user[f + 1:]
1966 f = user.find(' ')
1967 f = user.find(' ')
1967 if f >= 0:
1968 if f >= 0:
1968 user = user[:f]
1969 user = user[:f]
1969 f = user.find('.')
1970 f = user.find('.')
1970 if f >= 0:
1971 if f >= 0:
1971 user = user[:f]
1972 user = user[:f]
1972 return user
1973 return user
1973
1974
1974 def emailuser(user):
1975 def emailuser(user):
1975 """Return the user portion of an email address."""
1976 """Return the user portion of an email address."""
1976 f = user.find('@')
1977 f = user.find('@')
1977 if f >= 0:
1978 if f >= 0:
1978 user = user[:f]
1979 user = user[:f]
1979 f = user.find('<')
1980 f = user.find('<')
1980 if f >= 0:
1981 if f >= 0:
1981 user = user[f + 1:]
1982 user = user[f + 1:]
1982 return user
1983 return user
1983
1984
1984 def email(author):
1985 def email(author):
1985 '''get email of author.'''
1986 '''get email of author.'''
1986 r = author.find('>')
1987 r = author.find('>')
1987 if r == -1:
1988 if r == -1:
1988 r = None
1989 r = None
1989 return author[author.find('<') + 1:r]
1990 return author[author.find('<') + 1:r]
1990
1991
1991 def ellipsis(text, maxlength=400):
1992 def ellipsis(text, maxlength=400):
1992 """Trim string to at most maxlength (default: 400) columns in display."""
1993 """Trim string to at most maxlength (default: 400) columns in display."""
1993 return encoding.trim(text, maxlength, ellipsis='...')
1994 return encoding.trim(text, maxlength, ellipsis='...')
1994
1995
1995 def unitcountfn(*unittable):
1996 def unitcountfn(*unittable):
1996 '''return a function that renders a readable count of some quantity'''
1997 '''return a function that renders a readable count of some quantity'''
1997
1998
1998 def go(count):
1999 def go(count):
1999 for multiplier, divisor, format in unittable:
2000 for multiplier, divisor, format in unittable:
2000 if count >= divisor * multiplier:
2001 if count >= divisor * multiplier:
2001 return format % (count / float(divisor))
2002 return format % (count / float(divisor))
2002 return unittable[-1][2] % count
2003 return unittable[-1][2] % count
2003
2004
2004 return go
2005 return go
2005
2006
2006 bytecount = unitcountfn(
2007 bytecount = unitcountfn(
2007 (100, 1 << 30, _('%.0f GB')),
2008 (100, 1 << 30, _('%.0f GB')),
2008 (10, 1 << 30, _('%.1f GB')),
2009 (10, 1 << 30, _('%.1f GB')),
2009 (1, 1 << 30, _('%.2f GB')),
2010 (1, 1 << 30, _('%.2f GB')),
2010 (100, 1 << 20, _('%.0f MB')),
2011 (100, 1 << 20, _('%.0f MB')),
2011 (10, 1 << 20, _('%.1f MB')),
2012 (10, 1 << 20, _('%.1f MB')),
2012 (1, 1 << 20, _('%.2f MB')),
2013 (1, 1 << 20, _('%.2f MB')),
2013 (100, 1 << 10, _('%.0f KB')),
2014 (100, 1 << 10, _('%.0f KB')),
2014 (10, 1 << 10, _('%.1f KB')),
2015 (10, 1 << 10, _('%.1f KB')),
2015 (1, 1 << 10, _('%.2f KB')),
2016 (1, 1 << 10, _('%.2f KB')),
2016 (1, 1, _('%.0f bytes')),
2017 (1, 1, _('%.0f bytes')),
2017 )
2018 )
2018
2019
2019 def uirepr(s):
2020 def uirepr(s):
2020 # Avoid double backslash in Windows path repr()
2021 # Avoid double backslash in Windows path repr()
2021 return repr(s).replace('\\\\', '\\')
2022 return repr(s).replace('\\\\', '\\')
2022
2023
2023 # delay import of textwrap
2024 # delay import of textwrap
2024 def MBTextWrapper(**kwargs):
2025 def MBTextWrapper(**kwargs):
2025 class tw(textwrap.TextWrapper):
2026 class tw(textwrap.TextWrapper):
2026 """
2027 """
2027 Extend TextWrapper for width-awareness.
2028 Extend TextWrapper for width-awareness.
2028
2029
2029 Neither number of 'bytes' in any encoding nor 'characters' is
2030 Neither number of 'bytes' in any encoding nor 'characters' is
2030 appropriate to calculate terminal columns for specified string.
2031 appropriate to calculate terminal columns for specified string.
2031
2032
2032 Original TextWrapper implementation uses built-in 'len()' directly,
2033 Original TextWrapper implementation uses built-in 'len()' directly,
2033 so overriding is needed to use width information of each characters.
2034 so overriding is needed to use width information of each characters.
2034
2035
2035 In addition, characters classified into 'ambiguous' width are
2036 In addition, characters classified into 'ambiguous' width are
2036 treated as wide in East Asian area, but as narrow in other.
2037 treated as wide in East Asian area, but as narrow in other.
2037
2038
2038 This requires use decision to determine width of such characters.
2039 This requires use decision to determine width of such characters.
2039 """
2040 """
2040 def _cutdown(self, ucstr, space_left):
2041 def _cutdown(self, ucstr, space_left):
2041 l = 0
2042 l = 0
2042 colwidth = encoding.ucolwidth
2043 colwidth = encoding.ucolwidth
2043 for i in xrange(len(ucstr)):
2044 for i in xrange(len(ucstr)):
2044 l += colwidth(ucstr[i])
2045 l += colwidth(ucstr[i])
2045 if space_left < l:
2046 if space_left < l:
2046 return (ucstr[:i], ucstr[i:])
2047 return (ucstr[:i], ucstr[i:])
2047 return ucstr, ''
2048 return ucstr, ''
2048
2049
2049 # overriding of base class
2050 # overriding of base class
2050 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2051 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2051 space_left = max(width - cur_len, 1)
2052 space_left = max(width - cur_len, 1)
2052
2053
2053 if self.break_long_words:
2054 if self.break_long_words:
2054 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2055 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2055 cur_line.append(cut)
2056 cur_line.append(cut)
2056 reversed_chunks[-1] = res
2057 reversed_chunks[-1] = res
2057 elif not cur_line:
2058 elif not cur_line:
2058 cur_line.append(reversed_chunks.pop())
2059 cur_line.append(reversed_chunks.pop())
2059
2060
2060 # this overriding code is imported from TextWrapper of Python 2.6
2061 # this overriding code is imported from TextWrapper of Python 2.6
2061 # to calculate columns of string by 'encoding.ucolwidth()'
2062 # to calculate columns of string by 'encoding.ucolwidth()'
2062 def _wrap_chunks(self, chunks):
2063 def _wrap_chunks(self, chunks):
2063 colwidth = encoding.ucolwidth
2064 colwidth = encoding.ucolwidth
2064
2065
2065 lines = []
2066 lines = []
2066 if self.width <= 0:
2067 if self.width <= 0:
2067 raise ValueError("invalid width %r (must be > 0)" % self.width)
2068 raise ValueError("invalid width %r (must be > 0)" % self.width)
2068
2069
2069 # Arrange in reverse order so items can be efficiently popped
2070 # Arrange in reverse order so items can be efficiently popped
2070 # from a stack of chucks.
2071 # from a stack of chucks.
2071 chunks.reverse()
2072 chunks.reverse()
2072
2073
2073 while chunks:
2074 while chunks:
2074
2075
2075 # Start the list of chunks that will make up the current line.
2076 # Start the list of chunks that will make up the current line.
2076 # cur_len is just the length of all the chunks in cur_line.
2077 # cur_len is just the length of all the chunks in cur_line.
2077 cur_line = []
2078 cur_line = []
2078 cur_len = 0
2079 cur_len = 0
2079
2080
2080 # Figure out which static string will prefix this line.
2081 # Figure out which static string will prefix this line.
2081 if lines:
2082 if lines:
2082 indent = self.subsequent_indent
2083 indent = self.subsequent_indent
2083 else:
2084 else:
2084 indent = self.initial_indent
2085 indent = self.initial_indent
2085
2086
2086 # Maximum width for this line.
2087 # Maximum width for this line.
2087 width = self.width - len(indent)
2088 width = self.width - len(indent)
2088
2089
2089 # First chunk on line is whitespace -- drop it, unless this
2090 # First chunk on line is whitespace -- drop it, unless this
2090 # is the very beginning of the text (i.e. no lines started yet).
2091 # is the very beginning of the text (i.e. no lines started yet).
2091 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
2092 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
2092 del chunks[-1]
2093 del chunks[-1]
2093
2094
2094 while chunks:
2095 while chunks:
2095 l = colwidth(chunks[-1])
2096 l = colwidth(chunks[-1])
2096
2097
2097 # Can at least squeeze this chunk onto the current line.
2098 # Can at least squeeze this chunk onto the current line.
2098 if cur_len + l <= width:
2099 if cur_len + l <= width:
2099 cur_line.append(chunks.pop())
2100 cur_line.append(chunks.pop())
2100 cur_len += l
2101 cur_len += l
2101
2102
2102 # Nope, this line is full.
2103 # Nope, this line is full.
2103 else:
2104 else:
2104 break
2105 break
2105
2106
2106 # The current line is full, and the next chunk is too big to
2107 # The current line is full, and the next chunk is too big to
2107 # fit on *any* line (not just this one).
2108 # fit on *any* line (not just this one).
2108 if chunks and colwidth(chunks[-1]) > width:
2109 if chunks and colwidth(chunks[-1]) > width:
2109 self._handle_long_word(chunks, cur_line, cur_len, width)
2110 self._handle_long_word(chunks, cur_line, cur_len, width)
2110
2111
2111 # If the last chunk on this line is all whitespace, drop it.
2112 # If the last chunk on this line is all whitespace, drop it.
2112 if (self.drop_whitespace and
2113 if (self.drop_whitespace and
2113 cur_line and cur_line[-1].strip() == ''):
2114 cur_line and cur_line[-1].strip() == ''):
2114 del cur_line[-1]
2115 del cur_line[-1]
2115
2116
2116 # Convert current line back to a string and store it in list
2117 # Convert current line back to a string and store it in list
2117 # of all lines (return value).
2118 # of all lines (return value).
2118 if cur_line:
2119 if cur_line:
2119 lines.append(indent + ''.join(cur_line))
2120 lines.append(indent + ''.join(cur_line))
2120
2121
2121 return lines
2122 return lines
2122
2123
2123 global MBTextWrapper
2124 global MBTextWrapper
2124 MBTextWrapper = tw
2125 MBTextWrapper = tw
2125 return tw(**kwargs)
2126 return tw(**kwargs)
2126
2127
2127 def wrap(line, width, initindent='', hangindent=''):
2128 def wrap(line, width, initindent='', hangindent=''):
2128 maxindent = max(len(hangindent), len(initindent))
2129 maxindent = max(len(hangindent), len(initindent))
2129 if width <= maxindent:
2130 if width <= maxindent:
2130 # adjust for weird terminal size
2131 # adjust for weird terminal size
2131 width = max(78, maxindent + 1)
2132 width = max(78, maxindent + 1)
2132 line = line.decode(encoding.encoding, encoding.encodingmode)
2133 line = line.decode(encoding.encoding, encoding.encodingmode)
2133 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
2134 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
2134 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
2135 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
2135 wrapper = MBTextWrapper(width=width,
2136 wrapper = MBTextWrapper(width=width,
2136 initial_indent=initindent,
2137 initial_indent=initindent,
2137 subsequent_indent=hangindent)
2138 subsequent_indent=hangindent)
2138 return wrapper.fill(line).encode(encoding.encoding)
2139 return wrapper.fill(line).encode(encoding.encoding)
2139
2140
2140 def iterlines(iterator):
2141 def iterlines(iterator):
2141 for chunk in iterator:
2142 for chunk in iterator:
2142 for line in chunk.splitlines():
2143 for line in chunk.splitlines():
2143 yield line
2144 yield line
2144
2145
2145 def expandpath(path):
2146 def expandpath(path):
2146 return os.path.expanduser(os.path.expandvars(path))
2147 return os.path.expanduser(os.path.expandvars(path))
2147
2148
2148 def hgcmd():
2149 def hgcmd():
2149 """Return the command used to execute current hg
2150 """Return the command used to execute current hg
2150
2151
2151 This is different from hgexecutable() because on Windows we want
2152 This is different from hgexecutable() because on Windows we want
2152 to avoid things opening new shell windows like batch files, so we
2153 to avoid things opening new shell windows like batch files, so we
2153 get either the python call or current executable.
2154 get either the python call or current executable.
2154 """
2155 """
2155 if mainfrozen():
2156 if mainfrozen():
2156 if getattr(sys, 'frozen', None) == 'macosx_app':
2157 if getattr(sys, 'frozen', None) == 'macosx_app':
2157 # Env variable set by py2app
2158 # Env variable set by py2app
2158 return [os.environ['EXECUTABLEPATH']]
2159 return [os.environ['EXECUTABLEPATH']]
2159 else:
2160 else:
2160 return [sys.executable]
2161 return [sys.executable]
2161 return gethgcmd()
2162 return gethgcmd()
2162
2163
2163 def rundetached(args, condfn):
2164 def rundetached(args, condfn):
2164 """Execute the argument list in a detached process.
2165 """Execute the argument list in a detached process.
2165
2166
2166 condfn is a callable which is called repeatedly and should return
2167 condfn is a callable which is called repeatedly and should return
2167 True once the child process is known to have started successfully.
2168 True once the child process is known to have started successfully.
2168 At this point, the child process PID is returned. If the child
2169 At this point, the child process PID is returned. If the child
2169 process fails to start or finishes before condfn() evaluates to
2170 process fails to start or finishes before condfn() evaluates to
2170 True, return -1.
2171 True, return -1.
2171 """
2172 """
2172 # Windows case is easier because the child process is either
2173 # Windows case is easier because the child process is either
2173 # successfully starting and validating the condition or exiting
2174 # successfully starting and validating the condition or exiting
2174 # on failure. We just poll on its PID. On Unix, if the child
2175 # on failure. We just poll on its PID. On Unix, if the child
2175 # process fails to start, it will be left in a zombie state until
2176 # process fails to start, it will be left in a zombie state until
2176 # the parent wait on it, which we cannot do since we expect a long
2177 # the parent wait on it, which we cannot do since we expect a long
2177 # running process on success. Instead we listen for SIGCHLD telling
2178 # running process on success. Instead we listen for SIGCHLD telling
2178 # us our child process terminated.
2179 # us our child process terminated.
2179 terminated = set()
2180 terminated = set()
2180 def handler(signum, frame):
2181 def handler(signum, frame):
2181 terminated.add(os.wait())
2182 terminated.add(os.wait())
2182 prevhandler = None
2183 prevhandler = None
2183 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2184 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2184 if SIGCHLD is not None:
2185 if SIGCHLD is not None:
2185 prevhandler = signal.signal(SIGCHLD, handler)
2186 prevhandler = signal.signal(SIGCHLD, handler)
2186 try:
2187 try:
2187 pid = spawndetached(args)
2188 pid = spawndetached(args)
2188 while not condfn():
2189 while not condfn():
2189 if ((pid in terminated or not testpid(pid))
2190 if ((pid in terminated or not testpid(pid))
2190 and not condfn()):
2191 and not condfn()):
2191 return -1
2192 return -1
2192 time.sleep(0.1)
2193 time.sleep(0.1)
2193 return pid
2194 return pid
2194 finally:
2195 finally:
2195 if prevhandler is not None:
2196 if prevhandler is not None:
2196 signal.signal(signal.SIGCHLD, prevhandler)
2197 signal.signal(signal.SIGCHLD, prevhandler)
2197
2198
2198 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2199 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2199 """Return the result of interpolating items in the mapping into string s.
2200 """Return the result of interpolating items in the mapping into string s.
2200
2201
2201 prefix is a single character string, or a two character string with
2202 prefix is a single character string, or a two character string with
2202 a backslash as the first character if the prefix needs to be escaped in
2203 a backslash as the first character if the prefix needs to be escaped in
2203 a regular expression.
2204 a regular expression.
2204
2205
2205 fn is an optional function that will be applied to the replacement text
2206 fn is an optional function that will be applied to the replacement text
2206 just before replacement.
2207 just before replacement.
2207
2208
2208 escape_prefix is an optional flag that allows using doubled prefix for
2209 escape_prefix is an optional flag that allows using doubled prefix for
2209 its escaping.
2210 its escaping.
2210 """
2211 """
2211 fn = fn or (lambda s: s)
2212 fn = fn or (lambda s: s)
2212 patterns = '|'.join(mapping.keys())
2213 patterns = '|'.join(mapping.keys())
2213 if escape_prefix:
2214 if escape_prefix:
2214 patterns += '|' + prefix
2215 patterns += '|' + prefix
2215 if len(prefix) > 1:
2216 if len(prefix) > 1:
2216 prefix_char = prefix[1:]
2217 prefix_char = prefix[1:]
2217 else:
2218 else:
2218 prefix_char = prefix
2219 prefix_char = prefix
2219 mapping[prefix_char] = prefix_char
2220 mapping[prefix_char] = prefix_char
2220 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2221 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2221 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2222 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2222
2223
2223 def getport(port):
2224 def getport(port):
2224 """Return the port for a given network service.
2225 """Return the port for a given network service.
2225
2226
2226 If port is an integer, it's returned as is. If it's a string, it's
2227 If port is an integer, it's returned as is. If it's a string, it's
2227 looked up using socket.getservbyname(). If there's no matching
2228 looked up using socket.getservbyname(). If there's no matching
2228 service, error.Abort is raised.
2229 service, error.Abort is raised.
2229 """
2230 """
2230 try:
2231 try:
2231 return int(port)
2232 return int(port)
2232 except ValueError:
2233 except ValueError:
2233 pass
2234 pass
2234
2235
2235 try:
2236 try:
2236 return socket.getservbyname(port)
2237 return socket.getservbyname(port)
2237 except socket.error:
2238 except socket.error:
2238 raise Abort(_("no port number associated with service '%s'") % port)
2239 raise Abort(_("no port number associated with service '%s'") % port)
2239
2240
2240 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2241 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2241 '0': False, 'no': False, 'false': False, 'off': False,
2242 '0': False, 'no': False, 'false': False, 'off': False,
2242 'never': False}
2243 'never': False}
2243
2244
2244 def parsebool(s):
2245 def parsebool(s):
2245 """Parse s into a boolean.
2246 """Parse s into a boolean.
2246
2247
2247 If s is not a valid boolean, returns None.
2248 If s is not a valid boolean, returns None.
2248 """
2249 """
2249 return _booleans.get(s.lower(), None)
2250 return _booleans.get(s.lower(), None)
2250
2251
2251 _hexdig = '0123456789ABCDEFabcdef'
2252 _hexdig = '0123456789ABCDEFabcdef'
2252 _hextochr = dict((a + b, chr(int(a + b, 16)))
2253 _hextochr = dict((a + b, chr(int(a + b, 16)))
2253 for a in _hexdig for b in _hexdig)
2254 for a in _hexdig for b in _hexdig)
2254
2255
2255 def _urlunquote(s):
2256 def _urlunquote(s):
2256 """Decode HTTP/HTML % encoding.
2257 """Decode HTTP/HTML % encoding.
2257
2258
2258 >>> _urlunquote('abc%20def')
2259 >>> _urlunquote('abc%20def')
2259 'abc def'
2260 'abc def'
2260 """
2261 """
2261 res = s.split('%')
2262 res = s.split('%')
2262 # fastpath
2263 # fastpath
2263 if len(res) == 1:
2264 if len(res) == 1:
2264 return s
2265 return s
2265 s = res[0]
2266 s = res[0]
2266 for item in res[1:]:
2267 for item in res[1:]:
2267 try:
2268 try:
2268 s += _hextochr[item[:2]] + item[2:]
2269 s += _hextochr[item[:2]] + item[2:]
2269 except KeyError:
2270 except KeyError:
2270 s += '%' + item
2271 s += '%' + item
2271 except UnicodeDecodeError:
2272 except UnicodeDecodeError:
2272 s += unichr(int(item[:2], 16)) + item[2:]
2273 s += unichr(int(item[:2], 16)) + item[2:]
2273 return s
2274 return s
2274
2275
2275 class url(object):
2276 class url(object):
2276 r"""Reliable URL parser.
2277 r"""Reliable URL parser.
2277
2278
2278 This parses URLs and provides attributes for the following
2279 This parses URLs and provides attributes for the following
2279 components:
2280 components:
2280
2281
2281 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2282 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2282
2283
2283 Missing components are set to None. The only exception is
2284 Missing components are set to None. The only exception is
2284 fragment, which is set to '' if present but empty.
2285 fragment, which is set to '' if present but empty.
2285
2286
2286 If parsefragment is False, fragment is included in query. If
2287 If parsefragment is False, fragment is included in query. If
2287 parsequery is False, query is included in path. If both are
2288 parsequery is False, query is included in path. If both are
2288 False, both fragment and query are included in path.
2289 False, both fragment and query are included in path.
2289
2290
2290 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2291 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2291
2292
2292 Note that for backward compatibility reasons, bundle URLs do not
2293 Note that for backward compatibility reasons, bundle URLs do not
2293 take host names. That means 'bundle://../' has a path of '../'.
2294 take host names. That means 'bundle://../' has a path of '../'.
2294
2295
2295 Examples:
2296 Examples:
2296
2297
2297 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2298 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2298 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2299 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2299 >>> url('ssh://[::1]:2200//home/joe/repo')
2300 >>> url('ssh://[::1]:2200//home/joe/repo')
2300 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2301 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2301 >>> url('file:///home/joe/repo')
2302 >>> url('file:///home/joe/repo')
2302 <url scheme: 'file', path: '/home/joe/repo'>
2303 <url scheme: 'file', path: '/home/joe/repo'>
2303 >>> url('file:///c:/temp/foo/')
2304 >>> url('file:///c:/temp/foo/')
2304 <url scheme: 'file', path: 'c:/temp/foo/'>
2305 <url scheme: 'file', path: 'c:/temp/foo/'>
2305 >>> url('bundle:foo')
2306 >>> url('bundle:foo')
2306 <url scheme: 'bundle', path: 'foo'>
2307 <url scheme: 'bundle', path: 'foo'>
2307 >>> url('bundle://../foo')
2308 >>> url('bundle://../foo')
2308 <url scheme: 'bundle', path: '../foo'>
2309 <url scheme: 'bundle', path: '../foo'>
2309 >>> url(r'c:\foo\bar')
2310 >>> url(r'c:\foo\bar')
2310 <url path: 'c:\\foo\\bar'>
2311 <url path: 'c:\\foo\\bar'>
2311 >>> url(r'\\blah\blah\blah')
2312 >>> url(r'\\blah\blah\blah')
2312 <url path: '\\\\blah\\blah\\blah'>
2313 <url path: '\\\\blah\\blah\\blah'>
2313 >>> url(r'\\blah\blah\blah#baz')
2314 >>> url(r'\\blah\blah\blah#baz')
2314 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2315 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2315 >>> url(r'file:///C:\users\me')
2316 >>> url(r'file:///C:\users\me')
2316 <url scheme: 'file', path: 'C:\\users\\me'>
2317 <url scheme: 'file', path: 'C:\\users\\me'>
2317
2318
2318 Authentication credentials:
2319 Authentication credentials:
2319
2320
2320 >>> url('ssh://joe:xyz@x/repo')
2321 >>> url('ssh://joe:xyz@x/repo')
2321 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2322 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2322 >>> url('ssh://joe@x/repo')
2323 >>> url('ssh://joe@x/repo')
2323 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2324 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2324
2325
2325 Query strings and fragments:
2326 Query strings and fragments:
2326
2327
2327 >>> url('http://host/a?b#c')
2328 >>> url('http://host/a?b#c')
2328 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2329 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2329 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2330 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2330 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2331 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2331 """
2332 """
2332
2333
2333 _safechars = "!~*'()+"
2334 _safechars = "!~*'()+"
2334 _safepchars = "/!~*'()+:\\"
2335 _safepchars = "/!~*'()+:\\"
2335 _matchscheme = remod.compile(r'^[a-zA-Z0-9+.\-]+:').match
2336 _matchscheme = remod.compile(r'^[a-zA-Z0-9+.\-]+:').match
2336
2337
2337 def __init__(self, path, parsequery=True, parsefragment=True):
2338 def __init__(self, path, parsequery=True, parsefragment=True):
2338 # We slowly chomp away at path until we have only the path left
2339 # We slowly chomp away at path until we have only the path left
2339 self.scheme = self.user = self.passwd = self.host = None
2340 self.scheme = self.user = self.passwd = self.host = None
2340 self.port = self.path = self.query = self.fragment = None
2341 self.port = self.path = self.query = self.fragment = None
2341 self._localpath = True
2342 self._localpath = True
2342 self._hostport = ''
2343 self._hostport = ''
2343 self._origpath = path
2344 self._origpath = path
2344
2345
2345 if parsefragment and '#' in path:
2346 if parsefragment and '#' in path:
2346 path, self.fragment = path.split('#', 1)
2347 path, self.fragment = path.split('#', 1)
2347 if not path:
2348 if not path:
2348 path = None
2349 path = None
2349
2350
2350 # special case for Windows drive letters and UNC paths
2351 # special case for Windows drive letters and UNC paths
2351 if hasdriveletter(path) or path.startswith(r'\\'):
2352 if hasdriveletter(path) or path.startswith(r'\\'):
2352 self.path = path
2353 self.path = path
2353 return
2354 return
2354
2355
2355 # For compatibility reasons, we can't handle bundle paths as
2356 # For compatibility reasons, we can't handle bundle paths as
2356 # normal URLS
2357 # normal URLS
2357 if path.startswith('bundle:'):
2358 if path.startswith('bundle:'):
2358 self.scheme = 'bundle'
2359 self.scheme = 'bundle'
2359 path = path[7:]
2360 path = path[7:]
2360 if path.startswith('//'):
2361 if path.startswith('//'):
2361 path = path[2:]
2362 path = path[2:]
2362 self.path = path
2363 self.path = path
2363 return
2364 return
2364
2365
2365 if self._matchscheme(path):
2366 if self._matchscheme(path):
2366 parts = path.split(':', 1)
2367 parts = path.split(':', 1)
2367 if parts[0]:
2368 if parts[0]:
2368 self.scheme, path = parts
2369 self.scheme, path = parts
2369 self._localpath = False
2370 self._localpath = False
2370
2371
2371 if not path:
2372 if not path:
2372 path = None
2373 path = None
2373 if self._localpath:
2374 if self._localpath:
2374 self.path = ''
2375 self.path = ''
2375 return
2376 return
2376 else:
2377 else:
2377 if self._localpath:
2378 if self._localpath:
2378 self.path = path
2379 self.path = path
2379 return
2380 return
2380
2381
2381 if parsequery and '?' in path:
2382 if parsequery and '?' in path:
2382 path, self.query = path.split('?', 1)
2383 path, self.query = path.split('?', 1)
2383 if not path:
2384 if not path:
2384 path = None
2385 path = None
2385 if not self.query:
2386 if not self.query:
2386 self.query = None
2387 self.query = None
2387
2388
2388 # // is required to specify a host/authority
2389 # // is required to specify a host/authority
2389 if path and path.startswith('//'):
2390 if path and path.startswith('//'):
2390 parts = path[2:].split('/', 1)
2391 parts = path[2:].split('/', 1)
2391 if len(parts) > 1:
2392 if len(parts) > 1:
2392 self.host, path = parts
2393 self.host, path = parts
2393 else:
2394 else:
2394 self.host = parts[0]
2395 self.host = parts[0]
2395 path = None
2396 path = None
2396 if not self.host:
2397 if not self.host:
2397 self.host = None
2398 self.host = None
2398 # path of file:///d is /d
2399 # path of file:///d is /d
2399 # path of file:///d:/ is d:/, not /d:/
2400 # path of file:///d:/ is d:/, not /d:/
2400 if path and not hasdriveletter(path):
2401 if path and not hasdriveletter(path):
2401 path = '/' + path
2402 path = '/' + path
2402
2403
2403 if self.host and '@' in self.host:
2404 if self.host and '@' in self.host:
2404 self.user, self.host = self.host.rsplit('@', 1)
2405 self.user, self.host = self.host.rsplit('@', 1)
2405 if ':' in self.user:
2406 if ':' in self.user:
2406 self.user, self.passwd = self.user.split(':', 1)
2407 self.user, self.passwd = self.user.split(':', 1)
2407 if not self.host:
2408 if not self.host:
2408 self.host = None
2409 self.host = None
2409
2410
2410 # Don't split on colons in IPv6 addresses without ports
2411 # Don't split on colons in IPv6 addresses without ports
2411 if (self.host and ':' in self.host and
2412 if (self.host and ':' in self.host and
2412 not (self.host.startswith('[') and self.host.endswith(']'))):
2413 not (self.host.startswith('[') and self.host.endswith(']'))):
2413 self._hostport = self.host
2414 self._hostport = self.host
2414 self.host, self.port = self.host.rsplit(':', 1)
2415 self.host, self.port = self.host.rsplit(':', 1)
2415 if not self.host:
2416 if not self.host:
2416 self.host = None
2417 self.host = None
2417
2418
2418 if (self.host and self.scheme == 'file' and
2419 if (self.host and self.scheme == 'file' and
2419 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2420 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2420 raise Abort(_('file:// URLs can only refer to localhost'))
2421 raise Abort(_('file:// URLs can only refer to localhost'))
2421
2422
2422 self.path = path
2423 self.path = path
2423
2424
2424 # leave the query string escaped
2425 # leave the query string escaped
2425 for a in ('user', 'passwd', 'host', 'port',
2426 for a in ('user', 'passwd', 'host', 'port',
2426 'path', 'fragment'):
2427 'path', 'fragment'):
2427 v = getattr(self, a)
2428 v = getattr(self, a)
2428 if v is not None:
2429 if v is not None:
2429 setattr(self, a, _urlunquote(v))
2430 setattr(self, a, _urlunquote(v))
2430
2431
2431 def __repr__(self):
2432 def __repr__(self):
2432 attrs = []
2433 attrs = []
2433 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2434 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2434 'query', 'fragment'):
2435 'query', 'fragment'):
2435 v = getattr(self, a)
2436 v = getattr(self, a)
2436 if v is not None:
2437 if v is not None:
2437 attrs.append('%s: %r' % (a, v))
2438 attrs.append('%s: %r' % (a, v))
2438 return '<url %s>' % ', '.join(attrs)
2439 return '<url %s>' % ', '.join(attrs)
2439
2440
2440 def __str__(self):
2441 def __str__(self):
2441 r"""Join the URL's components back into a URL string.
2442 r"""Join the URL's components back into a URL string.
2442
2443
2443 Examples:
2444 Examples:
2444
2445
2445 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2446 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2446 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2447 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2447 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2448 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2448 'http://user:pw@host:80/?foo=bar&baz=42'
2449 'http://user:pw@host:80/?foo=bar&baz=42'
2449 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2450 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2450 'http://user:pw@host:80/?foo=bar%3dbaz'
2451 'http://user:pw@host:80/?foo=bar%3dbaz'
2451 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2452 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2452 'ssh://user:pw@[::1]:2200//home/joe#'
2453 'ssh://user:pw@[::1]:2200//home/joe#'
2453 >>> str(url('http://localhost:80//'))
2454 >>> str(url('http://localhost:80//'))
2454 'http://localhost:80//'
2455 'http://localhost:80//'
2455 >>> str(url('http://localhost:80/'))
2456 >>> str(url('http://localhost:80/'))
2456 'http://localhost:80/'
2457 'http://localhost:80/'
2457 >>> str(url('http://localhost:80'))
2458 >>> str(url('http://localhost:80'))
2458 'http://localhost:80/'
2459 'http://localhost:80/'
2459 >>> str(url('bundle:foo'))
2460 >>> str(url('bundle:foo'))
2460 'bundle:foo'
2461 'bundle:foo'
2461 >>> str(url('bundle://../foo'))
2462 >>> str(url('bundle://../foo'))
2462 'bundle:../foo'
2463 'bundle:../foo'
2463 >>> str(url('path'))
2464 >>> str(url('path'))
2464 'path'
2465 'path'
2465 >>> str(url('file:///tmp/foo/bar'))
2466 >>> str(url('file:///tmp/foo/bar'))
2466 'file:///tmp/foo/bar'
2467 'file:///tmp/foo/bar'
2467 >>> str(url('file:///c:/tmp/foo/bar'))
2468 >>> str(url('file:///c:/tmp/foo/bar'))
2468 'file:///c:/tmp/foo/bar'
2469 'file:///c:/tmp/foo/bar'
2469 >>> print url(r'bundle:foo\bar')
2470 >>> print url(r'bundle:foo\bar')
2470 bundle:foo\bar
2471 bundle:foo\bar
2471 >>> print url(r'file:///D:\data\hg')
2472 >>> print url(r'file:///D:\data\hg')
2472 file:///D:\data\hg
2473 file:///D:\data\hg
2473 """
2474 """
2474 if self._localpath:
2475 if self._localpath:
2475 s = self.path
2476 s = self.path
2476 if self.scheme == 'bundle':
2477 if self.scheme == 'bundle':
2477 s = 'bundle:' + s
2478 s = 'bundle:' + s
2478 if self.fragment:
2479 if self.fragment:
2479 s += '#' + self.fragment
2480 s += '#' + self.fragment
2480 return s
2481 return s
2481
2482
2482 s = self.scheme + ':'
2483 s = self.scheme + ':'
2483 if self.user or self.passwd or self.host:
2484 if self.user or self.passwd or self.host:
2484 s += '//'
2485 s += '//'
2485 elif self.scheme and (not self.path or self.path.startswith('/')
2486 elif self.scheme and (not self.path or self.path.startswith('/')
2486 or hasdriveletter(self.path)):
2487 or hasdriveletter(self.path)):
2487 s += '//'
2488 s += '//'
2488 if hasdriveletter(self.path):
2489 if hasdriveletter(self.path):
2489 s += '/'
2490 s += '/'
2490 if self.user:
2491 if self.user:
2491 s += urlreq.quote(self.user, safe=self._safechars)
2492 s += urlreq.quote(self.user, safe=self._safechars)
2492 if self.passwd:
2493 if self.passwd:
2493 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2494 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2494 if self.user or self.passwd:
2495 if self.user or self.passwd:
2495 s += '@'
2496 s += '@'
2496 if self.host:
2497 if self.host:
2497 if not (self.host.startswith('[') and self.host.endswith(']')):
2498 if not (self.host.startswith('[') and self.host.endswith(']')):
2498 s += urlreq.quote(self.host)
2499 s += urlreq.quote(self.host)
2499 else:
2500 else:
2500 s += self.host
2501 s += self.host
2501 if self.port:
2502 if self.port:
2502 s += ':' + urlreq.quote(self.port)
2503 s += ':' + urlreq.quote(self.port)
2503 if self.host:
2504 if self.host:
2504 s += '/'
2505 s += '/'
2505 if self.path:
2506 if self.path:
2506 # TODO: similar to the query string, we should not unescape the
2507 # TODO: similar to the query string, we should not unescape the
2507 # path when we store it, the path might contain '%2f' = '/',
2508 # path when we store it, the path might contain '%2f' = '/',
2508 # which we should *not* escape.
2509 # which we should *not* escape.
2509 s += urlreq.quote(self.path, safe=self._safepchars)
2510 s += urlreq.quote(self.path, safe=self._safepchars)
2510 if self.query:
2511 if self.query:
2511 # we store the query in escaped form.
2512 # we store the query in escaped form.
2512 s += '?' + self.query
2513 s += '?' + self.query
2513 if self.fragment is not None:
2514 if self.fragment is not None:
2514 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2515 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2515 return s
2516 return s
2516
2517
2517 def authinfo(self):
2518 def authinfo(self):
2518 user, passwd = self.user, self.passwd
2519 user, passwd = self.user, self.passwd
2519 try:
2520 try:
2520 self.user, self.passwd = None, None
2521 self.user, self.passwd = None, None
2521 s = str(self)
2522 s = str(self)
2522 finally:
2523 finally:
2523 self.user, self.passwd = user, passwd
2524 self.user, self.passwd = user, passwd
2524 if not self.user:
2525 if not self.user:
2525 return (s, None)
2526 return (s, None)
2526 # authinfo[1] is passed to urllib2 password manager, and its
2527 # authinfo[1] is passed to urllib2 password manager, and its
2527 # URIs must not contain credentials. The host is passed in the
2528 # URIs must not contain credentials. The host is passed in the
2528 # URIs list because Python < 2.4.3 uses only that to search for
2529 # URIs list because Python < 2.4.3 uses only that to search for
2529 # a password.
2530 # a password.
2530 return (s, (None, (s, self.host),
2531 return (s, (None, (s, self.host),
2531 self.user, self.passwd or ''))
2532 self.user, self.passwd or ''))
2532
2533
2533 def isabs(self):
2534 def isabs(self):
2534 if self.scheme and self.scheme != 'file':
2535 if self.scheme and self.scheme != 'file':
2535 return True # remote URL
2536 return True # remote URL
2536 if hasdriveletter(self.path):
2537 if hasdriveletter(self.path):
2537 return True # absolute for our purposes - can't be joined()
2538 return True # absolute for our purposes - can't be joined()
2538 if self.path.startswith(r'\\'):
2539 if self.path.startswith(r'\\'):
2539 return True # Windows UNC path
2540 return True # Windows UNC path
2540 if self.path.startswith('/'):
2541 if self.path.startswith('/'):
2541 return True # POSIX-style
2542 return True # POSIX-style
2542 return False
2543 return False
2543
2544
2544 def localpath(self):
2545 def localpath(self):
2545 if self.scheme == 'file' or self.scheme == 'bundle':
2546 if self.scheme == 'file' or self.scheme == 'bundle':
2546 path = self.path or '/'
2547 path = self.path or '/'
2547 # For Windows, we need to promote hosts containing drive
2548 # For Windows, we need to promote hosts containing drive
2548 # letters to paths with drive letters.
2549 # letters to paths with drive letters.
2549 if hasdriveletter(self._hostport):
2550 if hasdriveletter(self._hostport):
2550 path = self._hostport + '/' + self.path
2551 path = self._hostport + '/' + self.path
2551 elif (self.host is not None and self.path
2552 elif (self.host is not None and self.path
2552 and not hasdriveletter(path)):
2553 and not hasdriveletter(path)):
2553 path = '/' + path
2554 path = '/' + path
2554 return path
2555 return path
2555 return self._origpath
2556 return self._origpath
2556
2557
2557 def islocal(self):
2558 def islocal(self):
2558 '''whether localpath will return something that posixfile can open'''
2559 '''whether localpath will return something that posixfile can open'''
2559 return (not self.scheme or self.scheme == 'file'
2560 return (not self.scheme or self.scheme == 'file'
2560 or self.scheme == 'bundle')
2561 or self.scheme == 'bundle')
2561
2562
2562 def hasscheme(path):
2563 def hasscheme(path):
2563 return bool(url(path).scheme)
2564 return bool(url(path).scheme)
2564
2565
2565 def hasdriveletter(path):
2566 def hasdriveletter(path):
2566 return path and path[1:2] == ':' and path[0:1].isalpha()
2567 return path and path[1:2] == ':' and path[0:1].isalpha()
2567
2568
2568 def urllocalpath(path):
2569 def urllocalpath(path):
2569 return url(path, parsequery=False, parsefragment=False).localpath()
2570 return url(path, parsequery=False, parsefragment=False).localpath()
2570
2571
2571 def hidepassword(u):
2572 def hidepassword(u):
2572 '''hide user credential in a url string'''
2573 '''hide user credential in a url string'''
2573 u = url(u)
2574 u = url(u)
2574 if u.passwd:
2575 if u.passwd:
2575 u.passwd = '***'
2576 u.passwd = '***'
2576 return str(u)
2577 return str(u)
2577
2578
2578 def removeauth(u):
2579 def removeauth(u):
2579 '''remove all authentication information from a url string'''
2580 '''remove all authentication information from a url string'''
2580 u = url(u)
2581 u = url(u)
2581 u.user = u.passwd = None
2582 u.user = u.passwd = None
2582 return str(u)
2583 return str(u)
2583
2584
2584 def isatty(fp):
2585 def isatty(fp):
2585 try:
2586 try:
2586 return fp.isatty()
2587 return fp.isatty()
2587 except AttributeError:
2588 except AttributeError:
2588 return False
2589 return False
2589
2590
2590 timecount = unitcountfn(
2591 timecount = unitcountfn(
2591 (1, 1e3, _('%.0f s')),
2592 (1, 1e3, _('%.0f s')),
2592 (100, 1, _('%.1f s')),
2593 (100, 1, _('%.1f s')),
2593 (10, 1, _('%.2f s')),
2594 (10, 1, _('%.2f s')),
2594 (1, 1, _('%.3f s')),
2595 (1, 1, _('%.3f s')),
2595 (100, 0.001, _('%.1f ms')),
2596 (100, 0.001, _('%.1f ms')),
2596 (10, 0.001, _('%.2f ms')),
2597 (10, 0.001, _('%.2f ms')),
2597 (1, 0.001, _('%.3f ms')),
2598 (1, 0.001, _('%.3f ms')),
2598 (100, 0.000001, _('%.1f us')),
2599 (100, 0.000001, _('%.1f us')),
2599 (10, 0.000001, _('%.2f us')),
2600 (10, 0.000001, _('%.2f us')),
2600 (1, 0.000001, _('%.3f us')),
2601 (1, 0.000001, _('%.3f us')),
2601 (100, 0.000000001, _('%.1f ns')),
2602 (100, 0.000000001, _('%.1f ns')),
2602 (10, 0.000000001, _('%.2f ns')),
2603 (10, 0.000000001, _('%.2f ns')),
2603 (1, 0.000000001, _('%.3f ns')),
2604 (1, 0.000000001, _('%.3f ns')),
2604 )
2605 )
2605
2606
2606 _timenesting = [0]
2607 _timenesting = [0]
2607
2608
2608 def timed(func):
2609 def timed(func):
2609 '''Report the execution time of a function call to stderr.
2610 '''Report the execution time of a function call to stderr.
2610
2611
2611 During development, use as a decorator when you need to measure
2612 During development, use as a decorator when you need to measure
2612 the cost of a function, e.g. as follows:
2613 the cost of a function, e.g. as follows:
2613
2614
2614 @util.timed
2615 @util.timed
2615 def foo(a, b, c):
2616 def foo(a, b, c):
2616 pass
2617 pass
2617 '''
2618 '''
2618
2619
2619 def wrapper(*args, **kwargs):
2620 def wrapper(*args, **kwargs):
2620 start = time.time()
2621 start = time.time()
2621 indent = 2
2622 indent = 2
2622 _timenesting[0] += indent
2623 _timenesting[0] += indent
2623 try:
2624 try:
2624 return func(*args, **kwargs)
2625 return func(*args, **kwargs)
2625 finally:
2626 finally:
2626 elapsed = time.time() - start
2627 elapsed = time.time() - start
2627 _timenesting[0] -= indent
2628 _timenesting[0] -= indent
2628 sys.stderr.write('%s%s: %s\n' %
2629 sys.stderr.write('%s%s: %s\n' %
2629 (' ' * _timenesting[0], func.__name__,
2630 (' ' * _timenesting[0], func.__name__,
2630 timecount(elapsed)))
2631 timecount(elapsed)))
2631 return wrapper
2632 return wrapper
2632
2633
2633 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2634 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2634 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2635 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2635
2636
2636 def sizetoint(s):
2637 def sizetoint(s):
2637 '''Convert a space specifier to a byte count.
2638 '''Convert a space specifier to a byte count.
2638
2639
2639 >>> sizetoint('30')
2640 >>> sizetoint('30')
2640 30
2641 30
2641 >>> sizetoint('2.2kb')
2642 >>> sizetoint('2.2kb')
2642 2252
2643 2252
2643 >>> sizetoint('6M')
2644 >>> sizetoint('6M')
2644 6291456
2645 6291456
2645 '''
2646 '''
2646 t = s.strip().lower()
2647 t = s.strip().lower()
2647 try:
2648 try:
2648 for k, u in _sizeunits:
2649 for k, u in _sizeunits:
2649 if t.endswith(k):
2650 if t.endswith(k):
2650 return int(float(t[:-len(k)]) * u)
2651 return int(float(t[:-len(k)]) * u)
2651 return int(t)
2652 return int(t)
2652 except ValueError:
2653 except ValueError:
2653 raise error.ParseError(_("couldn't parse size: %s") % s)
2654 raise error.ParseError(_("couldn't parse size: %s") % s)
2654
2655
2655 class hooks(object):
2656 class hooks(object):
2656 '''A collection of hook functions that can be used to extend a
2657 '''A collection of hook functions that can be used to extend a
2657 function's behavior. Hooks are called in lexicographic order,
2658 function's behavior. Hooks are called in lexicographic order,
2658 based on the names of their sources.'''
2659 based on the names of their sources.'''
2659
2660
2660 def __init__(self):
2661 def __init__(self):
2661 self._hooks = []
2662 self._hooks = []
2662
2663
2663 def add(self, source, hook):
2664 def add(self, source, hook):
2664 self._hooks.append((source, hook))
2665 self._hooks.append((source, hook))
2665
2666
2666 def __call__(self, *args):
2667 def __call__(self, *args):
2667 self._hooks.sort(key=lambda x: x[0])
2668 self._hooks.sort(key=lambda x: x[0])
2668 results = []
2669 results = []
2669 for source, hook in self._hooks:
2670 for source, hook in self._hooks:
2670 results.append(hook(*args))
2671 results.append(hook(*args))
2671 return results
2672 return results
2672
2673
2673 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s'):
2674 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s'):
2674 '''Yields lines for a nicely formatted stacktrace.
2675 '''Yields lines for a nicely formatted stacktrace.
2675 Skips the 'skip' last entries.
2676 Skips the 'skip' last entries.
2676 Each file+linenumber is formatted according to fileline.
2677 Each file+linenumber is formatted according to fileline.
2677 Each line is formatted according to line.
2678 Each line is formatted according to line.
2678 If line is None, it yields:
2679 If line is None, it yields:
2679 length of longest filepath+line number,
2680 length of longest filepath+line number,
2680 filepath+linenumber,
2681 filepath+linenumber,
2681 function
2682 function
2682
2683
2683 Not be used in production code but very convenient while developing.
2684 Not be used in production code but very convenient while developing.
2684 '''
2685 '''
2685 entries = [(fileline % (fn, ln), func)
2686 entries = [(fileline % (fn, ln), func)
2686 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
2687 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
2687 if entries:
2688 if entries:
2688 fnmax = max(len(entry[0]) for entry in entries)
2689 fnmax = max(len(entry[0]) for entry in entries)
2689 for fnln, func in entries:
2690 for fnln, func in entries:
2690 if line is None:
2691 if line is None:
2691 yield (fnmax, fnln, func)
2692 yield (fnmax, fnln, func)
2692 else:
2693 else:
2693 yield line % (fnmax, fnln, func)
2694 yield line % (fnmax, fnln, func)
2694
2695
2695 def debugstacktrace(msg='stacktrace', skip=0, f=sys.stderr, otherf=sys.stdout):
2696 def debugstacktrace(msg='stacktrace', skip=0, f=sys.stderr, otherf=sys.stdout):
2696 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2697 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2697 Skips the 'skip' last entries. By default it will flush stdout first.
2698 Skips the 'skip' last entries. By default it will flush stdout first.
2698 It can be used everywhere and intentionally does not require an ui object.
2699 It can be used everywhere and intentionally does not require an ui object.
2699 Not be used in production code but very convenient while developing.
2700 Not be used in production code but very convenient while developing.
2700 '''
2701 '''
2701 if otherf:
2702 if otherf:
2702 otherf.flush()
2703 otherf.flush()
2703 f.write('%s at:\n' % msg)
2704 f.write('%s at:\n' % msg)
2704 for line in getstackframes(skip + 1):
2705 for line in getstackframes(skip + 1):
2705 f.write(line)
2706 f.write(line)
2706 f.flush()
2707 f.flush()
2707
2708
2708 class dirs(object):
2709 class dirs(object):
2709 '''a multiset of directory names from a dirstate or manifest'''
2710 '''a multiset of directory names from a dirstate or manifest'''
2710
2711
2711 def __init__(self, map, skip=None):
2712 def __init__(self, map, skip=None):
2712 self._dirs = {}
2713 self._dirs = {}
2713 addpath = self.addpath
2714 addpath = self.addpath
2714 if safehasattr(map, 'iteritems') and skip is not None:
2715 if safehasattr(map, 'iteritems') and skip is not None:
2715 for f, s in map.iteritems():
2716 for f, s in map.iteritems():
2716 if s[0] != skip:
2717 if s[0] != skip:
2717 addpath(f)
2718 addpath(f)
2718 else:
2719 else:
2719 for f in map:
2720 for f in map:
2720 addpath(f)
2721 addpath(f)
2721
2722
2722 def addpath(self, path):
2723 def addpath(self, path):
2723 dirs = self._dirs
2724 dirs = self._dirs
2724 for base in finddirs(path):
2725 for base in finddirs(path):
2725 if base in dirs:
2726 if base in dirs:
2726 dirs[base] += 1
2727 dirs[base] += 1
2727 return
2728 return
2728 dirs[base] = 1
2729 dirs[base] = 1
2729
2730
2730 def delpath(self, path):
2731 def delpath(self, path):
2731 dirs = self._dirs
2732 dirs = self._dirs
2732 for base in finddirs(path):
2733 for base in finddirs(path):
2733 if dirs[base] > 1:
2734 if dirs[base] > 1:
2734 dirs[base] -= 1
2735 dirs[base] -= 1
2735 return
2736 return
2736 del dirs[base]
2737 del dirs[base]
2737
2738
2738 def __iter__(self):
2739 def __iter__(self):
2739 return self._dirs.iterkeys()
2740 return self._dirs.iterkeys()
2740
2741
2741 def __contains__(self, d):
2742 def __contains__(self, d):
2742 return d in self._dirs
2743 return d in self._dirs
2743
2744
2744 if safehasattr(parsers, 'dirs'):
2745 if safehasattr(parsers, 'dirs'):
2745 dirs = parsers.dirs
2746 dirs = parsers.dirs
2746
2747
2747 def finddirs(path):
2748 def finddirs(path):
2748 pos = path.rfind('/')
2749 pos = path.rfind('/')
2749 while pos != -1:
2750 while pos != -1:
2750 yield path[:pos]
2751 yield path[:pos]
2751 pos = path.rfind('/', 0, pos)
2752 pos = path.rfind('/', 0, pos)
2752
2753
2753 # compression utility
2754 # compression utility
2754
2755
2755 class nocompress(object):
2756 class nocompress(object):
2756 def compress(self, x):
2757 def compress(self, x):
2757 return x
2758 return x
2758 def flush(self):
2759 def flush(self):
2759 return ""
2760 return ""
2760
2761
2761 compressors = {
2762 compressors = {
2762 None: nocompress,
2763 None: nocompress,
2763 # lambda to prevent early import
2764 # lambda to prevent early import
2764 'BZ': lambda: bz2.BZ2Compressor(),
2765 'BZ': lambda: bz2.BZ2Compressor(),
2765 'GZ': lambda: zlib.compressobj(),
2766 'GZ': lambda: zlib.compressobj(),
2766 }
2767 }
2767 # also support the old form by courtesies
2768 # also support the old form by courtesies
2768 compressors['UN'] = compressors[None]
2769 compressors['UN'] = compressors[None]
2769
2770
2770 def _makedecompressor(decompcls):
2771 def _makedecompressor(decompcls):
2771 def generator(f):
2772 def generator(f):
2772 d = decompcls()
2773 d = decompcls()
2773 for chunk in filechunkiter(f):
2774 for chunk in filechunkiter(f):
2774 yield d.decompress(chunk)
2775 yield d.decompress(chunk)
2775 def func(fh):
2776 def func(fh):
2776 return chunkbuffer(generator(fh))
2777 return chunkbuffer(generator(fh))
2777 return func
2778 return func
2778
2779
2779 class ctxmanager(object):
2780 class ctxmanager(object):
2780 '''A context manager for use in 'with' blocks to allow multiple
2781 '''A context manager for use in 'with' blocks to allow multiple
2781 contexts to be entered at once. This is both safer and more
2782 contexts to be entered at once. This is both safer and more
2782 flexible than contextlib.nested.
2783 flexible than contextlib.nested.
2783
2784
2784 Once Mercurial supports Python 2.7+, this will become mostly
2785 Once Mercurial supports Python 2.7+, this will become mostly
2785 unnecessary.
2786 unnecessary.
2786 '''
2787 '''
2787
2788
2788 def __init__(self, *args):
2789 def __init__(self, *args):
2789 '''Accepts a list of no-argument functions that return context
2790 '''Accepts a list of no-argument functions that return context
2790 managers. These will be invoked at __call__ time.'''
2791 managers. These will be invoked at __call__ time.'''
2791 self._pending = args
2792 self._pending = args
2792 self._atexit = []
2793 self._atexit = []
2793
2794
2794 def __enter__(self):
2795 def __enter__(self):
2795 return self
2796 return self
2796
2797
2797 def enter(self):
2798 def enter(self):
2798 '''Create and enter context managers in the order in which they were
2799 '''Create and enter context managers in the order in which they were
2799 passed to the constructor.'''
2800 passed to the constructor.'''
2800 values = []
2801 values = []
2801 for func in self._pending:
2802 for func in self._pending:
2802 obj = func()
2803 obj = func()
2803 values.append(obj.__enter__())
2804 values.append(obj.__enter__())
2804 self._atexit.append(obj.__exit__)
2805 self._atexit.append(obj.__exit__)
2805 del self._pending
2806 del self._pending
2806 return values
2807 return values
2807
2808
2808 def atexit(self, func, *args, **kwargs):
2809 def atexit(self, func, *args, **kwargs):
2809 '''Add a function to call when this context manager exits. The
2810 '''Add a function to call when this context manager exits. The
2810 ordering of multiple atexit calls is unspecified, save that
2811 ordering of multiple atexit calls is unspecified, save that
2811 they will happen before any __exit__ functions.'''
2812 they will happen before any __exit__ functions.'''
2812 def wrapper(exc_type, exc_val, exc_tb):
2813 def wrapper(exc_type, exc_val, exc_tb):
2813 func(*args, **kwargs)
2814 func(*args, **kwargs)
2814 self._atexit.append(wrapper)
2815 self._atexit.append(wrapper)
2815 return func
2816 return func
2816
2817
2817 def __exit__(self, exc_type, exc_val, exc_tb):
2818 def __exit__(self, exc_type, exc_val, exc_tb):
2818 '''Context managers are exited in the reverse order from which
2819 '''Context managers are exited in the reverse order from which
2819 they were created.'''
2820 they were created.'''
2820 received = exc_type is not None
2821 received = exc_type is not None
2821 suppressed = False
2822 suppressed = False
2822 pending = None
2823 pending = None
2823 self._atexit.reverse()
2824 self._atexit.reverse()
2824 for exitfunc in self._atexit:
2825 for exitfunc in self._atexit:
2825 try:
2826 try:
2826 if exitfunc(exc_type, exc_val, exc_tb):
2827 if exitfunc(exc_type, exc_val, exc_tb):
2827 suppressed = True
2828 suppressed = True
2828 exc_type = None
2829 exc_type = None
2829 exc_val = None
2830 exc_val = None
2830 exc_tb = None
2831 exc_tb = None
2831 except BaseException:
2832 except BaseException:
2832 pending = sys.exc_info()
2833 pending = sys.exc_info()
2833 exc_type, exc_val, exc_tb = pending = sys.exc_info()
2834 exc_type, exc_val, exc_tb = pending = sys.exc_info()
2834 del self._atexit
2835 del self._atexit
2835 if pending:
2836 if pending:
2836 raise exc_val
2837 raise exc_val
2837 return received and suppressed
2838 return received and suppressed
2838
2839
2839 def _bz2():
2840 def _bz2():
2840 d = bz2.BZ2Decompressor()
2841 d = bz2.BZ2Decompressor()
2841 # Bzip2 stream start with BZ, but we stripped it.
2842 # Bzip2 stream start with BZ, but we stripped it.
2842 # we put it back for good measure.
2843 # we put it back for good measure.
2843 d.decompress('BZ')
2844 d.decompress('BZ')
2844 return d
2845 return d
2845
2846
2846 decompressors = {None: lambda fh: fh,
2847 decompressors = {None: lambda fh: fh,
2847 '_truncatedBZ': _makedecompressor(_bz2),
2848 '_truncatedBZ': _makedecompressor(_bz2),
2848 'BZ': _makedecompressor(lambda: bz2.BZ2Decompressor()),
2849 'BZ': _makedecompressor(lambda: bz2.BZ2Decompressor()),
2849 'GZ': _makedecompressor(lambda: zlib.decompressobj()),
2850 'GZ': _makedecompressor(lambda: zlib.decompressobj()),
2850 }
2851 }
2851 # also support the old form by courtesies
2852 # also support the old form by courtesies
2852 decompressors['UN'] = decompressors[None]
2853 decompressors['UN'] = decompressors[None]
2853
2854
2854 # convenient shortcut
2855 # convenient shortcut
2855 dst = debugstacktrace
2856 dst = debugstacktrace
@@ -1,151 +1,150 b''
1 #require test-repo
1 #require test-repo
2
2
3 $ . "$TESTDIR/helpers-testrepo.sh"
3 $ . "$TESTDIR/helpers-testrepo.sh"
4 $ cd "$TESTDIR"/..
4 $ cd "$TESTDIR"/..
5
5
6 $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs python contrib/check-py3-compat.py
6 $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs python contrib/check-py3-compat.py
7 hgext/fsmonitor/pywatchman/__init__.py not using absolute_import
7 hgext/fsmonitor/pywatchman/__init__.py not using absolute_import
8 hgext/fsmonitor/pywatchman/__init__.py requires print_function
8 hgext/fsmonitor/pywatchman/__init__.py requires print_function
9 hgext/fsmonitor/pywatchman/capabilities.py not using absolute_import
9 hgext/fsmonitor/pywatchman/capabilities.py not using absolute_import
10 hgext/fsmonitor/pywatchman/pybser.py not using absolute_import
10 hgext/fsmonitor/pywatchman/pybser.py not using absolute_import
11 hgext/highlight/__init__.py not using absolute_import
11 hgext/highlight/__init__.py not using absolute_import
12 hgext/highlight/highlight.py not using absolute_import
12 hgext/highlight/highlight.py not using absolute_import
13 hgext/share.py not using absolute_import
13 hgext/share.py not using absolute_import
14 hgext/win32text.py not using absolute_import
14 hgext/win32text.py not using absolute_import
15 i18n/check-translation.py not using absolute_import
15 i18n/check-translation.py not using absolute_import
16 i18n/polib.py not using absolute_import
16 i18n/polib.py not using absolute_import
17 setup.py not using absolute_import
17 setup.py not using absolute_import
18 tests/heredoctest.py requires print_function
18 tests/heredoctest.py requires print_function
19 tests/md5sum.py not using absolute_import
19 tests/md5sum.py not using absolute_import
20 tests/readlink.py not using absolute_import
20 tests/readlink.py not using absolute_import
21 tests/run-tests.py not using absolute_import
21 tests/run-tests.py not using absolute_import
22 tests/test-demandimport.py not using absolute_import
22 tests/test-demandimport.py not using absolute_import
23
23
24 #if py3exe
24 #if py3exe
25 $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs $PYTHON3 contrib/check-py3-compat.py
25 $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs $PYTHON3 contrib/check-py3-compat.py
26 doc/hgmanpage.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
26 doc/hgmanpage.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
27 hgext/automv.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob)
27 hgext/automv.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob)
28 hgext/blackbox.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
28 hgext/blackbox.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
29 hgext/bugzilla.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
29 hgext/bugzilla.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
30 hgext/censor.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
30 hgext/censor.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
31 hgext/chgserver.py: error importing module: <ImportError> No module named 'SocketServer' (line *) (glob)
31 hgext/chgserver.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
32 hgext/children.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
32 hgext/children.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
33 hgext/churn.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
33 hgext/churn.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
34 hgext/clonebundles.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
34 hgext/clonebundles.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
35 hgext/color.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
35 hgext/color.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
36 hgext/convert/bzr.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
36 hgext/convert/bzr.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
37 hgext/convert/convcmd.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
37 hgext/convert/convcmd.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
38 hgext/convert/cvs.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
38 hgext/convert/cvs.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
39 hgext/convert/cvsps.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
39 hgext/convert/cvsps.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
40 hgext/convert/darcs.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
40 hgext/convert/darcs.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
41 hgext/convert/filemap.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
41 hgext/convert/filemap.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
42 hgext/convert/git.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
42 hgext/convert/git.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
43 hgext/convert/gnuarch.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
43 hgext/convert/gnuarch.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
44 hgext/convert/hg.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
44 hgext/convert/hg.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
45 hgext/convert/monotone.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
45 hgext/convert/monotone.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
46 hgext/convert/p*.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
46 hgext/convert/p*.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
47 hgext/convert/subversion.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
47 hgext/convert/subversion.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
48 hgext/convert/transport.py: error importing module: <ImportError> No module named 'svn.client' (line *) (glob)
48 hgext/convert/transport.py: error importing module: <ImportError> No module named 'svn.client' (line *) (glob)
49 hgext/eol.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
49 hgext/eol.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
50 hgext/extdiff.py: error importing module: <SyntaxError> invalid syntax (archival.py, line *) (line *) (glob)
50 hgext/extdiff.py: error importing module: <SyntaxError> invalid syntax (archival.py, line *) (line *) (glob)
51 hgext/factotum.py: error importing: <ImportError> No module named 'rfc822' (error at __init__.py:*) (glob)
51 hgext/factotum.py: error importing: <ImportError> No module named 'rfc822' (error at __init__.py:*) (glob)
52 hgext/fetch.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
52 hgext/fetch.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
53 hgext/fsmonitor/watchmanclient.py: error importing module: <SystemError> Parent module 'hgext.fsmonitor' not loaded, cannot perform relative import (line *) (glob)
53 hgext/fsmonitor/watchmanclient.py: error importing module: <SystemError> Parent module 'hgext.fsmonitor' not loaded, cannot perform relative import (line *) (glob)
54 hgext/gpg.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
54 hgext/gpg.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
55 hgext/graphlog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
55 hgext/graphlog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
56 hgext/hgk.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
56 hgext/hgk.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
57 hgext/histedit.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
57 hgext/histedit.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
58 hgext/keyword.py: error importing: <ImportError> No module named 'BaseHTTPServer' (error at common.py:*) (glob)
58 hgext/keyword.py: error importing: <ImportError> No module named 'BaseHTTPServer' (error at common.py:*) (glob)
59 hgext/largefiles/basestore.py: error importing module: <SystemError> Parent module 'hgext.largefiles' not loaded, cannot perform relative import (line *) (glob)
59 hgext/largefiles/basestore.py: error importing module: <SystemError> Parent module 'hgext.largefiles' not loaded, cannot perform relative import (line *) (glob)
60 hgext/largefiles/lfcommands.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
60 hgext/largefiles/lfcommands.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
61 hgext/largefiles/lfutil.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
61 hgext/largefiles/lfutil.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
62 hgext/largefiles/localstore.py: error importing module: <SystemError> Parent module 'hgext.largefiles' not loaded, cannot perform relative import (line *) (glob)
62 hgext/largefiles/localstore.py: error importing module: <SystemError> Parent module 'hgext.largefiles' not loaded, cannot perform relative import (line *) (glob)
63 hgext/largefiles/overrides.py: error importing module: <SyntaxError> invalid syntax (archival.py, line *) (line *) (glob)
63 hgext/largefiles/overrides.py: error importing module: <SyntaxError> invalid syntax (archival.py, line *) (line *) (glob)
64 hgext/largefiles/proto.py: error importing: <ImportError> No module named 'httplib' (error at httppeer.py:*) (glob)
64 hgext/largefiles/proto.py: error importing: <ImportError> No module named 'httplib' (error at httppeer.py:*) (glob)
65 hgext/largefiles/remotestore.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at wireproto.py:*) (glob)
65 hgext/largefiles/remotestore.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at wireproto.py:*) (glob)
66 hgext/largefiles/reposetup.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
66 hgext/largefiles/reposetup.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
67 hgext/largefiles/storefactory.py: error importing: <SyntaxError> invalid syntax (bundle2.py, line *) (error at bundlerepo.py:*) (glob)
67 hgext/largefiles/storefactory.py: error importing: <SyntaxError> invalid syntax (bundle2.py, line *) (error at bundlerepo.py:*) (glob)
68 hgext/largefiles/uisetup.py: error importing: <ImportError> No module named 'BaseHTTPServer' (error at common.py:*) (glob)
68 hgext/largefiles/uisetup.py: error importing: <ImportError> No module named 'BaseHTTPServer' (error at common.py:*) (glob)
69 hgext/largefiles/wirestore.py: error importing module: <SystemError> Parent module 'hgext.largefiles' not loaded, cannot perform relative import (line *) (glob)
69 hgext/largefiles/wirestore.py: error importing module: <SystemError> Parent module 'hgext.largefiles' not loaded, cannot perform relative import (line *) (glob)
70 hgext/mq.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
70 hgext/mq.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
71 hgext/notify.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
71 hgext/notify.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
72 hgext/pager.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
72 hgext/pager.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
73 hgext/patchbomb.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
73 hgext/patchbomb.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
74 hgext/purge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
74 hgext/purge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
75 hgext/rebase.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
75 hgext/rebase.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
76 hgext/record.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
76 hgext/record.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
77 hgext/relink.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
77 hgext/relink.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
78 hgext/schemes.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
78 hgext/schemes.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
79 hgext/share.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
79 hgext/share.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
80 hgext/shelve.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
80 hgext/shelve.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
81 hgext/strip.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
81 hgext/strip.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
82 hgext/transplant.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
82 hgext/transplant.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
83 mercurial/archival.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
83 mercurial/archival.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
84 mercurial/branchmap.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
84 mercurial/branchmap.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
85 mercurial/bundle*.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
85 mercurial/bundle*.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
86 mercurial/bundlerepo.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
86 mercurial/bundlerepo.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
87 mercurial/changegroup.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
87 mercurial/changegroup.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
88 mercurial/changelog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
88 mercurial/changelog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
89 mercurial/cmdutil.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
89 mercurial/cmdutil.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
90 mercurial/commands.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
90 mercurial/commands.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
91 mercurial/commandserver.py: error importing module: <ImportError> No module named 'SocketServer' (line *) (glob)
92 mercurial/context.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
91 mercurial/context.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
93 mercurial/copies.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
92 mercurial/copies.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
94 mercurial/crecord.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
93 mercurial/crecord.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
95 mercurial/dirstate.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
94 mercurial/dirstate.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
96 mercurial/discovery.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
95 mercurial/discovery.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
97 mercurial/dispatch.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
96 mercurial/dispatch.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
98 mercurial/exchange.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
97 mercurial/exchange.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
99 mercurial/extensions.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
98 mercurial/extensions.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
100 mercurial/filelog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
99 mercurial/filelog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
101 mercurial/filemerge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
100 mercurial/filemerge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
102 mercurial/fileset.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
101 mercurial/fileset.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
103 mercurial/formatter.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
102 mercurial/formatter.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
104 mercurial/graphmod.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
103 mercurial/graphmod.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
105 mercurial/help.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
104 mercurial/help.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
106 mercurial/hg.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
105 mercurial/hg.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
107 mercurial/hgweb/common.py: error importing module: <ImportError> No module named 'BaseHTTPServer' (line *) (glob)
106 mercurial/hgweb/common.py: error importing module: <ImportError> No module named 'BaseHTTPServer' (line *) (glob)
108 mercurial/hgweb/hgweb_mod.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
107 mercurial/hgweb/hgweb_mod.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
109 mercurial/hgweb/hgwebdir_mod.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
108 mercurial/hgweb/hgwebdir_mod.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
110 mercurial/hgweb/protocol.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
109 mercurial/hgweb/protocol.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
111 mercurial/hgweb/request.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
110 mercurial/hgweb/request.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
112 mercurial/hgweb/server.py: error importing module: <ImportError> No module named 'BaseHTTPServer' (line *) (glob)
111 mercurial/hgweb/server.py: error importing module: <ImportError> No module named 'BaseHTTPServer' (line *) (glob)
113 mercurial/hgweb/webcommands.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
112 mercurial/hgweb/webcommands.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
114 mercurial/hgweb/webutil.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
113 mercurial/hgweb/webutil.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
115 mercurial/hgweb/wsgicgi.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
114 mercurial/hgweb/wsgicgi.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
116 mercurial/hook.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
115 mercurial/hook.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
117 mercurial/httpconnection.py: error importing: <ImportError> No module named 'rfc822' (error at __init__.py:*) (glob)
116 mercurial/httpconnection.py: error importing: <ImportError> No module named 'rfc822' (error at __init__.py:*) (glob)
118 mercurial/httppeer.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
117 mercurial/httppeer.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
119 mercurial/keepalive.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
118 mercurial/keepalive.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
120 mercurial/localrepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
119 mercurial/localrepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
121 mercurial/mail.py: error importing module: <AttributeError> module 'email' has no attribute 'Header' (line *) (glob)
120 mercurial/mail.py: error importing module: <AttributeError> module 'email' has no attribute 'Header' (line *) (glob)
122 mercurial/manifest.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
121 mercurial/manifest.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
123 mercurial/merge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
122 mercurial/merge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
124 mercurial/namespaces.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
123 mercurial/namespaces.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
125 mercurial/patch.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
124 mercurial/patch.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
126 mercurial/pure/mpatch.py: error importing module: <ImportError> cannot import name 'pycompat' (line *) (glob)
125 mercurial/pure/mpatch.py: error importing module: <ImportError> cannot import name 'pycompat' (line *) (glob)
127 mercurial/pure/parsers.py: error importing module: <ImportError> No module named 'mercurial.pure.node' (line *) (glob)
126 mercurial/pure/parsers.py: error importing module: <ImportError> No module named 'mercurial.pure.node' (line *) (glob)
128 mercurial/repair.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
127 mercurial/repair.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
129 mercurial/revlog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
128 mercurial/revlog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
130 mercurial/revset.py: error importing module: <AttributeError> 'dict' object has no attribute 'iteritems' (line *) (glob)
129 mercurial/revset.py: error importing module: <AttributeError> 'dict' object has no attribute 'iteritems' (line *) (glob)
131 mercurial/scmutil.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
130 mercurial/scmutil.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
132 mercurial/scmwindows.py: error importing module: <ImportError> No module named '_winreg' (line *) (glob)
131 mercurial/scmwindows.py: error importing module: <ImportError> No module named '_winreg' (line *) (glob)
133 mercurial/simplemerge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
132 mercurial/simplemerge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
134 mercurial/sshpeer.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at wireproto.py:*) (glob)
133 mercurial/sshpeer.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at wireproto.py:*) (glob)
135 mercurial/sshserver.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
134 mercurial/sshserver.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
136 mercurial/statichttprepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
135 mercurial/statichttprepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
137 mercurial/store.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
136 mercurial/store.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
138 mercurial/streamclone.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
137 mercurial/streamclone.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
139 mercurial/subrepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
138 mercurial/subrepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
140 mercurial/templatefilters.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
139 mercurial/templatefilters.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
141 mercurial/templatekw.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
140 mercurial/templatekw.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
142 mercurial/templater.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
141 mercurial/templater.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
143 mercurial/ui.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
142 mercurial/ui.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
144 mercurial/unionrepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
143 mercurial/unionrepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
145 mercurial/url.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
144 mercurial/url.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
146 mercurial/verify.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
145 mercurial/verify.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
147 mercurial/win*.py: error importing module: <ImportError> No module named 'msvcrt' (line *) (glob)
146 mercurial/win*.py: error importing module: <ImportError> No module named 'msvcrt' (line *) (glob)
148 mercurial/windows.py: error importing module: <ImportError> No module named '_winreg' (line *) (glob)
147 mercurial/windows.py: error importing module: <ImportError> No module named '_winreg' (line *) (glob)
149 mercurial/wireproto.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
148 mercurial/wireproto.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
150
149
151 #endif
150 #endif
@@ -1,161 +1,161 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2
2
3 from __future__ import absolute_import, print_function
3 from __future__ import absolute_import, print_function
4
4
5 __doc__ = """Tiny HTTP Proxy.
5 __doc__ = """Tiny HTTP Proxy.
6
6
7 This module implements GET, HEAD, POST, PUT and DELETE methods
7 This module implements GET, HEAD, POST, PUT and DELETE methods
8 on BaseHTTPServer, and behaves as an HTTP proxy. The CONNECT
8 on BaseHTTPServer, and behaves as an HTTP proxy. The CONNECT
9 method is also implemented experimentally, but has not been
9 method is also implemented experimentally, but has not been
10 tested yet.
10 tested yet.
11
11
12 Any help will be greatly appreciated. SUZUKI Hisao
12 Any help will be greatly appreciated. SUZUKI Hisao
13 """
13 """
14
14
15 __version__ = "0.2.1"
15 __version__ = "0.2.1"
16
16
17 import BaseHTTPServer
17 import BaseHTTPServer
18 import SocketServer
19 import os
18 import os
20 import select
19 import select
21 import socket
20 import socket
22 import sys
21 import sys
23
22
24 from mercurial import util
23 from mercurial import util
25
24
26 urlparse = util.urlparse
25 urlparse = util.urlparse
26 socketserver = util.socketserver
27
27
28 class ProxyHandler (BaseHTTPServer.BaseHTTPRequestHandler):
28 class ProxyHandler (BaseHTTPServer.BaseHTTPRequestHandler):
29 __base = BaseHTTPServer.BaseHTTPRequestHandler
29 __base = BaseHTTPServer.BaseHTTPRequestHandler
30 __base_handle = __base.handle
30 __base_handle = __base.handle
31
31
32 server_version = "TinyHTTPProxy/" + __version__
32 server_version = "TinyHTTPProxy/" + __version__
33 rbufsize = 0 # self.rfile Be unbuffered
33 rbufsize = 0 # self.rfile Be unbuffered
34
34
35 def handle(self):
35 def handle(self):
36 (ip, port) = self.client_address
36 (ip, port) = self.client_address
37 allowed = getattr(self, 'allowed_clients', None)
37 allowed = getattr(self, 'allowed_clients', None)
38 if allowed is not None and ip not in allowed:
38 if allowed is not None and ip not in allowed:
39 self.raw_requestline = self.rfile.readline()
39 self.raw_requestline = self.rfile.readline()
40 if self.parse_request():
40 if self.parse_request():
41 self.send_error(403)
41 self.send_error(403)
42 else:
42 else:
43 self.__base_handle()
43 self.__base_handle()
44
44
45 def log_request(self, code='-', size='-'):
45 def log_request(self, code='-', size='-'):
46 xheaders = [h for h in self.headers.items() if h[0].startswith('x-')]
46 xheaders = [h for h in self.headers.items() if h[0].startswith('x-')]
47 self.log_message('"%s" %s %s%s',
47 self.log_message('"%s" %s %s%s',
48 self.requestline, str(code), str(size),
48 self.requestline, str(code), str(size),
49 ''.join([' %s:%s' % h for h in sorted(xheaders)]))
49 ''.join([' %s:%s' % h for h in sorted(xheaders)]))
50
50
51 def _connect_to(self, netloc, soc):
51 def _connect_to(self, netloc, soc):
52 i = netloc.find(':')
52 i = netloc.find(':')
53 if i >= 0:
53 if i >= 0:
54 host_port = netloc[:i], int(netloc[i + 1:])
54 host_port = netloc[:i], int(netloc[i + 1:])
55 else:
55 else:
56 host_port = netloc, 80
56 host_port = netloc, 80
57 print("\t" "connect to %s:%d" % host_port)
57 print("\t" "connect to %s:%d" % host_port)
58 try: soc.connect(host_port)
58 try: soc.connect(host_port)
59 except socket.error as arg:
59 except socket.error as arg:
60 try: msg = arg[1]
60 try: msg = arg[1]
61 except (IndexError, TypeError): msg = arg
61 except (IndexError, TypeError): msg = arg
62 self.send_error(404, msg)
62 self.send_error(404, msg)
63 return 0
63 return 0
64 return 1
64 return 1
65
65
66 def do_CONNECT(self):
66 def do_CONNECT(self):
67 soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
67 soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
68 try:
68 try:
69 if self._connect_to(self.path, soc):
69 if self._connect_to(self.path, soc):
70 self.log_request(200)
70 self.log_request(200)
71 self.wfile.write(self.protocol_version +
71 self.wfile.write(self.protocol_version +
72 " 200 Connection established\r\n")
72 " 200 Connection established\r\n")
73 self.wfile.write("Proxy-agent: %s\r\n" % self.version_string())
73 self.wfile.write("Proxy-agent: %s\r\n" % self.version_string())
74 self.wfile.write("\r\n")
74 self.wfile.write("\r\n")
75 self._read_write(soc, 300)
75 self._read_write(soc, 300)
76 finally:
76 finally:
77 print("\t" "bye")
77 print("\t" "bye")
78 soc.close()
78 soc.close()
79 self.connection.close()
79 self.connection.close()
80
80
81 def do_GET(self):
81 def do_GET(self):
82 (scm, netloc, path, params, query, fragment) = urlparse.urlparse(
82 (scm, netloc, path, params, query, fragment) = urlparse.urlparse(
83 self.path, 'http')
83 self.path, 'http')
84 if scm != 'http' or fragment or not netloc:
84 if scm != 'http' or fragment or not netloc:
85 self.send_error(400, "bad url %s" % self.path)
85 self.send_error(400, "bad url %s" % self.path)
86 return
86 return
87 soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
87 soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
88 try:
88 try:
89 if self._connect_to(netloc, soc):
89 if self._connect_to(netloc, soc):
90 self.log_request()
90 self.log_request()
91 soc.send("%s %s %s\r\n" % (
91 soc.send("%s %s %s\r\n" % (
92 self.command,
92 self.command,
93 urlparse.urlunparse(('', '', path, params, query, '')),
93 urlparse.urlunparse(('', '', path, params, query, '')),
94 self.request_version))
94 self.request_version))
95 self.headers['Connection'] = 'close'
95 self.headers['Connection'] = 'close'
96 del self.headers['Proxy-Connection']
96 del self.headers['Proxy-Connection']
97 for key_val in self.headers.items():
97 for key_val in self.headers.items():
98 soc.send("%s: %s\r\n" % key_val)
98 soc.send("%s: %s\r\n" % key_val)
99 soc.send("\r\n")
99 soc.send("\r\n")
100 self._read_write(soc)
100 self._read_write(soc)
101 finally:
101 finally:
102 print("\t" "bye")
102 print("\t" "bye")
103 soc.close()
103 soc.close()
104 self.connection.close()
104 self.connection.close()
105
105
106 def _read_write(self, soc, max_idling=20):
106 def _read_write(self, soc, max_idling=20):
107 iw = [self.connection, soc]
107 iw = [self.connection, soc]
108 ow = []
108 ow = []
109 count = 0
109 count = 0
110 while True:
110 while True:
111 count += 1
111 count += 1
112 (ins, _, exs) = select.select(iw, ow, iw, 3)
112 (ins, _, exs) = select.select(iw, ow, iw, 3)
113 if exs:
113 if exs:
114 break
114 break
115 if ins:
115 if ins:
116 for i in ins:
116 for i in ins:
117 if i is soc:
117 if i is soc:
118 out = self.connection
118 out = self.connection
119 else:
119 else:
120 out = soc
120 out = soc
121 try:
121 try:
122 data = i.recv(8192)
122 data = i.recv(8192)
123 except socket.error:
123 except socket.error:
124 break
124 break
125 if data:
125 if data:
126 out.send(data)
126 out.send(data)
127 count = 0
127 count = 0
128 else:
128 else:
129 print("\t" "idle", count)
129 print("\t" "idle", count)
130 if count == max_idling:
130 if count == max_idling:
131 break
131 break
132
132
133 do_HEAD = do_GET
133 do_HEAD = do_GET
134 do_POST = do_GET
134 do_POST = do_GET
135 do_PUT = do_GET
135 do_PUT = do_GET
136 do_DELETE = do_GET
136 do_DELETE = do_GET
137
137
138 class ThreadingHTTPServer (SocketServer.ThreadingMixIn,
138 class ThreadingHTTPServer (socketserver.ThreadingMixIn,
139 BaseHTTPServer.HTTPServer):
139 BaseHTTPServer.HTTPServer):
140 def __init__(self, *args, **kwargs):
140 def __init__(self, *args, **kwargs):
141 BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
141 BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
142 a = open("proxy.pid", "w")
142 a = open("proxy.pid", "w")
143 a.write(str(os.getpid()) + "\n")
143 a.write(str(os.getpid()) + "\n")
144 a.close()
144 a.close()
145
145
146 if __name__ == '__main__':
146 if __name__ == '__main__':
147 argv = sys.argv
147 argv = sys.argv
148 if argv[1:] and argv[1] in ('-h', '--help'):
148 if argv[1:] and argv[1] in ('-h', '--help'):
149 print(argv[0], "[port [allowed_client_name ...]]")
149 print(argv[0], "[port [allowed_client_name ...]]")
150 else:
150 else:
151 if argv[2:]:
151 if argv[2:]:
152 allowed = []
152 allowed = []
153 for name in argv[2:]:
153 for name in argv[2:]:
154 client = socket.gethostbyname(name)
154 client = socket.gethostbyname(name)
155 allowed.append(client)
155 allowed.append(client)
156 print("Accept: %s (%s)" % (client, name))
156 print("Accept: %s (%s)" % (client, name))
157 ProxyHandler.allowed_clients = allowed
157 ProxyHandler.allowed_clients = allowed
158 del argv[2:]
158 del argv[2:]
159 else:
159 else:
160 print("Any clients will be served...")
160 print("Any clients will be served...")
161 BaseHTTPServer.test(ProxyHandler, ThreadingHTTPServer)
161 BaseHTTPServer.test(ProxyHandler, ThreadingHTTPServer)
General Comments 0
You need to be logged in to leave comments. Login now