##// END OF EJS Templates
dispatch: move part of callcatch to scmutil...
Jun Wu -
r30520:4338f87d default
parent child Browse files
Show More
@@ -1,975 +1,885 b''
1 # dispatch.py - command dispatching for mercurial
1 # dispatch.py - command dispatching for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import, print_function
8 from __future__ import absolute_import, print_function
9
9
10 import atexit
10 import atexit
11 import difflib
11 import difflib
12 import errno
12 import errno
13 import os
13 import os
14 import pdb
14 import pdb
15 import re
15 import re
16 import shlex
16 import shlex
17 import signal
17 import signal
18 import socket
19 import sys
18 import sys
20 import time
19 import time
21 import traceback
20 import traceback
22
21
23
22
24 from .i18n import _
23 from .i18n import _
25
24
26 from . import (
25 from . import (
27 cmdutil,
26 cmdutil,
28 commands,
27 commands,
29 debugcommands,
28 debugcommands,
30 demandimport,
29 demandimport,
31 encoding,
30 encoding,
32 error,
31 error,
33 extensions,
32 extensions,
34 fancyopts,
33 fancyopts,
35 fileset,
34 fileset,
36 hg,
35 hg,
37 hook,
36 hook,
38 profiling,
37 profiling,
39 pycompat,
38 pycompat,
40 revset,
39 revset,
40 scmutil,
41 templatefilters,
41 templatefilters,
42 templatekw,
42 templatekw,
43 templater,
43 templater,
44 ui as uimod,
44 ui as uimod,
45 util,
45 util,
46 )
46 )
47
47
48 class request(object):
48 class request(object):
49 def __init__(self, args, ui=None, repo=None, fin=None, fout=None,
49 def __init__(self, args, ui=None, repo=None, fin=None, fout=None,
50 ferr=None):
50 ferr=None):
51 self.args = args
51 self.args = args
52 self.ui = ui
52 self.ui = ui
53 self.repo = repo
53 self.repo = repo
54
54
55 # input/output/error streams
55 # input/output/error streams
56 self.fin = fin
56 self.fin = fin
57 self.fout = fout
57 self.fout = fout
58 self.ferr = ferr
58 self.ferr = ferr
59
59
60 def run():
60 def run():
61 "run the command in sys.argv"
61 "run the command in sys.argv"
62 sys.exit((dispatch(request(pycompat.sysargv[1:])) or 0) & 255)
62 sys.exit((dispatch(request(pycompat.sysargv[1:])) or 0) & 255)
63
63
64 def _getsimilar(symbols, value):
64 def _getsimilar(symbols, value):
65 sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
65 sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
66 # The cutoff for similarity here is pretty arbitrary. It should
66 # The cutoff for similarity here is pretty arbitrary. It should
67 # probably be investigated and tweaked.
67 # probably be investigated and tweaked.
68 return [s for s in symbols if sim(s) > 0.6]
68 return [s for s in symbols if sim(s) > 0.6]
69
69
70 def _reportsimilar(write, similar):
70 def _reportsimilar(write, similar):
71 if len(similar) == 1:
71 if len(similar) == 1:
72 write(_("(did you mean %s?)\n") % similar[0])
72 write(_("(did you mean %s?)\n") % similar[0])
73 elif similar:
73 elif similar:
74 ss = ", ".join(sorted(similar))
74 ss = ", ".join(sorted(similar))
75 write(_("(did you mean one of %s?)\n") % ss)
75 write(_("(did you mean one of %s?)\n") % ss)
76
76
77 def _formatparse(write, inst):
77 def _formatparse(write, inst):
78 similar = []
78 similar = []
79 if isinstance(inst, error.UnknownIdentifier):
79 if isinstance(inst, error.UnknownIdentifier):
80 # make sure to check fileset first, as revset can invoke fileset
80 # make sure to check fileset first, as revset can invoke fileset
81 similar = _getsimilar(inst.symbols, inst.function)
81 similar = _getsimilar(inst.symbols, inst.function)
82 if len(inst.args) > 1:
82 if len(inst.args) > 1:
83 write(_("hg: parse error at %s: %s\n") %
83 write(_("hg: parse error at %s: %s\n") %
84 (inst.args[1], inst.args[0]))
84 (inst.args[1], inst.args[0]))
85 if (inst.args[0][0] == ' '):
85 if (inst.args[0][0] == ' '):
86 write(_("unexpected leading whitespace\n"))
86 write(_("unexpected leading whitespace\n"))
87 else:
87 else:
88 write(_("hg: parse error: %s\n") % inst.args[0])
88 write(_("hg: parse error: %s\n") % inst.args[0])
89 _reportsimilar(write, similar)
89 _reportsimilar(write, similar)
90 if inst.hint:
90 if inst.hint:
91 write(_("(%s)\n") % inst.hint)
91 write(_("(%s)\n") % inst.hint)
92
92
93 def dispatch(req):
93 def dispatch(req):
94 "run the command specified in req.args"
94 "run the command specified in req.args"
95 if req.ferr:
95 if req.ferr:
96 ferr = req.ferr
96 ferr = req.ferr
97 elif req.ui:
97 elif req.ui:
98 ferr = req.ui.ferr
98 ferr = req.ui.ferr
99 else:
99 else:
100 ferr = util.stderr
100 ferr = util.stderr
101
101
102 try:
102 try:
103 if not req.ui:
103 if not req.ui:
104 req.ui = uimod.ui()
104 req.ui = uimod.ui()
105 if '--traceback' in req.args:
105 if '--traceback' in req.args:
106 req.ui.setconfig('ui', 'traceback', 'on', '--traceback')
106 req.ui.setconfig('ui', 'traceback', 'on', '--traceback')
107
107
108 # set ui streams from the request
108 # set ui streams from the request
109 if req.fin:
109 if req.fin:
110 req.ui.fin = req.fin
110 req.ui.fin = req.fin
111 if req.fout:
111 if req.fout:
112 req.ui.fout = req.fout
112 req.ui.fout = req.fout
113 if req.ferr:
113 if req.ferr:
114 req.ui.ferr = req.ferr
114 req.ui.ferr = req.ferr
115 except error.Abort as inst:
115 except error.Abort as inst:
116 ferr.write(_("abort: %s\n") % inst)
116 ferr.write(_("abort: %s\n") % inst)
117 if inst.hint:
117 if inst.hint:
118 ferr.write(_("(%s)\n") % inst.hint)
118 ferr.write(_("(%s)\n") % inst.hint)
119 return -1
119 return -1
120 except error.ParseError as inst:
120 except error.ParseError as inst:
121 _formatparse(ferr.write, inst)
121 _formatparse(ferr.write, inst)
122 return -1
122 return -1
123
123
124 msg = ' '.join(' ' in a and repr(a) or a for a in req.args)
124 msg = ' '.join(' ' in a and repr(a) or a for a in req.args)
125 starttime = time.time()
125 starttime = time.time()
126 ret = None
126 ret = None
127 try:
127 try:
128 ret = _runcatch(req)
128 ret = _runcatch(req)
129 except KeyboardInterrupt:
129 except KeyboardInterrupt:
130 try:
130 try:
131 req.ui.warn(_("interrupted!\n"))
131 req.ui.warn(_("interrupted!\n"))
132 except IOError as inst:
132 except IOError as inst:
133 if inst.errno != errno.EPIPE:
133 if inst.errno != errno.EPIPE:
134 raise
134 raise
135 ret = -1
135 ret = -1
136 finally:
136 finally:
137 duration = time.time() - starttime
137 duration = time.time() - starttime
138 req.ui.flush()
138 req.ui.flush()
139 req.ui.log("commandfinish", "%s exited %s after %0.2f seconds\n",
139 req.ui.log("commandfinish", "%s exited %s after %0.2f seconds\n",
140 msg, ret or 0, duration)
140 msg, ret or 0, duration)
141 return ret
141 return ret
142
142
143 def _runcatch(req):
143 def _runcatch(req):
144 def catchterm(*args):
144 def catchterm(*args):
145 raise error.SignalInterrupt
145 raise error.SignalInterrupt
146
146
147 ui = req.ui
147 ui = req.ui
148 try:
148 try:
149 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
149 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
150 num = getattr(signal, name, None)
150 num = getattr(signal, name, None)
151 if num:
151 if num:
152 signal.signal(num, catchterm)
152 signal.signal(num, catchterm)
153 except ValueError:
153 except ValueError:
154 pass # happens if called in a thread
154 pass # happens if called in a thread
155
155
156 def _runcatchfunc():
156 def _runcatchfunc():
157 try:
157 try:
158 debugger = 'pdb'
158 debugger = 'pdb'
159 debugtrace = {
159 debugtrace = {
160 'pdb' : pdb.set_trace
160 'pdb' : pdb.set_trace
161 }
161 }
162 debugmortem = {
162 debugmortem = {
163 'pdb' : pdb.post_mortem
163 'pdb' : pdb.post_mortem
164 }
164 }
165
165
166 # read --config before doing anything else
166 # read --config before doing anything else
167 # (e.g. to change trust settings for reading .hg/hgrc)
167 # (e.g. to change trust settings for reading .hg/hgrc)
168 cfgs = _parseconfig(req.ui, _earlygetopt(['--config'], req.args))
168 cfgs = _parseconfig(req.ui, _earlygetopt(['--config'], req.args))
169
169
170 if req.repo:
170 if req.repo:
171 # copy configs that were passed on the cmdline (--config) to
171 # copy configs that were passed on the cmdline (--config) to
172 # the repo ui
172 # the repo ui
173 for sec, name, val in cfgs:
173 for sec, name, val in cfgs:
174 req.repo.ui.setconfig(sec, name, val, source='--config')
174 req.repo.ui.setconfig(sec, name, val, source='--config')
175
175
176 # developer config: ui.debugger
176 # developer config: ui.debugger
177 debugger = ui.config("ui", "debugger")
177 debugger = ui.config("ui", "debugger")
178 debugmod = pdb
178 debugmod = pdb
179 if not debugger or ui.plain():
179 if not debugger or ui.plain():
180 # if we are in HGPLAIN mode, then disable custom debugging
180 # if we are in HGPLAIN mode, then disable custom debugging
181 debugger = 'pdb'
181 debugger = 'pdb'
182 elif '--debugger' in req.args:
182 elif '--debugger' in req.args:
183 # This import can be slow for fancy debuggers, so only
183 # This import can be slow for fancy debuggers, so only
184 # do it when absolutely necessary, i.e. when actual
184 # do it when absolutely necessary, i.e. when actual
185 # debugging has been requested
185 # debugging has been requested
186 with demandimport.deactivated():
186 with demandimport.deactivated():
187 try:
187 try:
188 debugmod = __import__(debugger)
188 debugmod = __import__(debugger)
189 except ImportError:
189 except ImportError:
190 pass # Leave debugmod = pdb
190 pass # Leave debugmod = pdb
191
191
192 debugtrace[debugger] = debugmod.set_trace
192 debugtrace[debugger] = debugmod.set_trace
193 debugmortem[debugger] = debugmod.post_mortem
193 debugmortem[debugger] = debugmod.post_mortem
194
194
195 # enter the debugger before command execution
195 # enter the debugger before command execution
196 if '--debugger' in req.args:
196 if '--debugger' in req.args:
197 ui.warn(_("entering debugger - "
197 ui.warn(_("entering debugger - "
198 "type c to continue starting hg or h for help\n"))
198 "type c to continue starting hg or h for help\n"))
199
199
200 if (debugger != 'pdb' and
200 if (debugger != 'pdb' and
201 debugtrace[debugger] == debugtrace['pdb']):
201 debugtrace[debugger] == debugtrace['pdb']):
202 ui.warn(_("%s debugger specified "
202 ui.warn(_("%s debugger specified "
203 "but its module was not found\n") % debugger)
203 "but its module was not found\n") % debugger)
204 with demandimport.deactivated():
204 with demandimport.deactivated():
205 debugtrace[debugger]()
205 debugtrace[debugger]()
206 try:
206 try:
207 return _dispatch(req)
207 return _dispatch(req)
208 finally:
208 finally:
209 ui.flush()
209 ui.flush()
210 except: # re-raises
210 except: # re-raises
211 # enter the debugger when we hit an exception
211 # enter the debugger when we hit an exception
212 if '--debugger' in req.args:
212 if '--debugger' in req.args:
213 traceback.print_exc()
213 traceback.print_exc()
214 debugmortem[debugger](sys.exc_info()[2])
214 debugmortem[debugger](sys.exc_info()[2])
215 ui.traceback()
215 ui.traceback()
216 raise
216 raise
217
217
218 return callcatch(ui, _runcatchfunc)
218 return callcatch(ui, _runcatchfunc)
219
219
220 def callcatch(ui, func):
220 def callcatch(ui, func):
221 """call func() with global exception handling
221 """like scmutil.callcatch but handles more high-level exceptions about
222
222 config parsing and commands. besides, use handlecommandexception to handle
223 return func() if no exception happens. otherwise do some error handling
223 uncaught exceptions.
224 and return an exit code accordingly.
225 """
224 """
226 try:
225 try:
227 return func()
226 return scmutil.callcatch(ui, func)
228 # Global exception handling, alphabetically
229 # Mercurial-specific first, followed by built-in and library exceptions
230 except error.AmbiguousCommand as inst:
227 except error.AmbiguousCommand as inst:
231 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
228 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
232 (inst.args[0], " ".join(inst.args[1])))
229 (inst.args[0], " ".join(inst.args[1])))
233 except error.ParseError as inst:
234 _formatparse(ui.warn, inst)
235 return -1
236 except error.LockHeld as inst:
237 if inst.errno == errno.ETIMEDOUT:
238 reason = _('timed out waiting for lock held by %s') % inst.locker
239 else:
240 reason = _('lock held by %s') % inst.locker
241 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
242 except error.LockUnavailable as inst:
243 ui.warn(_("abort: could not lock %s: %s\n") %
244 (inst.desc or inst.filename, inst.strerror))
245 except error.CommandError as inst:
230 except error.CommandError as inst:
246 if inst.args[0]:
231 if inst.args[0]:
247 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
232 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
248 commands.help_(ui, inst.args[0], full=False, command=True)
233 commands.help_(ui, inst.args[0], full=False, command=True)
249 else:
234 else:
250 ui.warn(_("hg: %s\n") % inst.args[1])
235 ui.warn(_("hg: %s\n") % inst.args[1])
251 commands.help_(ui, 'shortlist')
236 commands.help_(ui, 'shortlist')
252 except error.OutOfBandError as inst:
237 except error.ParseError as inst:
253 if inst.args:
238 _formatparse(ui.warn, inst)
254 msg = _("abort: remote error:\n")
239 return -1
255 else:
256 msg = _("abort: remote error\n")
257 ui.warn(msg)
258 if inst.args:
259 ui.warn(''.join(inst.args))
260 if inst.hint:
261 ui.warn('(%s)\n' % inst.hint)
262 except error.RepoError as inst:
263 ui.warn(_("abort: %s!\n") % inst)
264 if inst.hint:
265 ui.warn(_("(%s)\n") % inst.hint)
266 except error.ResponseError as inst:
267 ui.warn(_("abort: %s") % inst.args[0])
268 if not isinstance(inst.args[1], basestring):
269 ui.warn(" %r\n" % (inst.args[1],))
270 elif not inst.args[1]:
271 ui.warn(_(" empty string\n"))
272 else:
273 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
274 except error.CensoredNodeError as inst:
275 ui.warn(_("abort: file censored %s!\n") % inst)
276 except error.RevlogError as inst:
277 ui.warn(_("abort: %s!\n") % inst)
278 except error.SignalInterrupt:
279 ui.warn(_("killed!\n"))
280 except error.UnknownCommand as inst:
240 except error.UnknownCommand as inst:
281 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
241 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
282 try:
242 try:
283 # check if the command is in a disabled extension
243 # check if the command is in a disabled extension
284 # (but don't check for extensions themselves)
244 # (but don't check for extensions themselves)
285 commands.help_(ui, inst.args[0], unknowncmd=True)
245 commands.help_(ui, inst.args[0], unknowncmd=True)
286 except (error.UnknownCommand, error.Abort):
246 except (error.UnknownCommand, error.Abort):
287 suggested = False
247 suggested = False
288 if len(inst.args) == 2:
248 if len(inst.args) == 2:
289 sim = _getsimilar(inst.args[1], inst.args[0])
249 sim = _getsimilar(inst.args[1], inst.args[0])
290 if sim:
250 if sim:
291 _reportsimilar(ui.warn, sim)
251 _reportsimilar(ui.warn, sim)
292 suggested = True
252 suggested = True
293 if not suggested:
253 if not suggested:
294 commands.help_(ui, 'shortlist')
254 commands.help_(ui, 'shortlist')
295 except error.InterventionRequired as inst:
255 except IOError:
296 ui.warn("%s\n" % inst)
256 raise
297 if inst.hint:
298 ui.warn(_("(%s)\n") % inst.hint)
299 return 1
300 except error.Abort as inst:
301 ui.warn(_("abort: %s\n") % inst)
302 if inst.hint:
303 ui.warn(_("(%s)\n") % inst.hint)
304 except ImportError as inst:
305 ui.warn(_("abort: %s!\n") % inst)
306 m = str(inst).split()[-1]
307 if m in "mpatch bdiff".split():
308 ui.warn(_("(did you forget to compile extensions?)\n"))
309 elif m in "zlib".split():
310 ui.warn(_("(is your Python install correct?)\n"))
311 except IOError as inst:
312 if util.safehasattr(inst, "code"):
313 ui.warn(_("abort: %s\n") % inst)
314 elif util.safehasattr(inst, "reason"):
315 try: # usually it is in the form (errno, strerror)
316 reason = inst.reason.args[1]
317 except (AttributeError, IndexError):
318 # it might be anything, for example a string
319 reason = inst.reason
320 if isinstance(reason, unicode):
321 # SSLError of Python 2.7.9 contains a unicode
322 reason = reason.encode(encoding.encoding, 'replace')
323 ui.warn(_("abort: error: %s\n") % reason)
324 elif (util.safehasattr(inst, "args")
325 and inst.args and inst.args[0] == errno.EPIPE):
326 pass
327 elif getattr(inst, "strerror", None):
328 if getattr(inst, "filename", None):
329 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
330 else:
331 ui.warn(_("abort: %s\n") % inst.strerror)
332 else:
333 raise
334 except OSError as inst:
335 if getattr(inst, "filename", None) is not None:
336 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
337 else:
338 ui.warn(_("abort: %s\n") % inst.strerror)
339 except KeyboardInterrupt:
257 except KeyboardInterrupt:
340 raise
258 raise
341 except MemoryError:
259 except: # probably re-raises
342 ui.warn(_("abort: out of memory\n"))
343 except SystemExit as inst:
344 # Commands shouldn't sys.exit directly, but give a return code.
345 # Just in case catch this and and pass exit code to caller.
346 return inst.code
347 except socket.error as inst:
348 ui.warn(_("abort: %s\n") % inst.args[-1])
349 except: # perhaps re-raises
350 if not handlecommandexception(ui):
260 if not handlecommandexception(ui):
351 raise
261 raise
352
262
353 return -1
263 return -1
354
264
355 def aliasargs(fn, givenargs):
265 def aliasargs(fn, givenargs):
356 args = getattr(fn, 'args', [])
266 args = getattr(fn, 'args', [])
357 if args:
267 if args:
358 cmd = ' '.join(map(util.shellquote, args))
268 cmd = ' '.join(map(util.shellquote, args))
359
269
360 nums = []
270 nums = []
361 def replacer(m):
271 def replacer(m):
362 num = int(m.group(1)) - 1
272 num = int(m.group(1)) - 1
363 nums.append(num)
273 nums.append(num)
364 if num < len(givenargs):
274 if num < len(givenargs):
365 return givenargs[num]
275 return givenargs[num]
366 raise error.Abort(_('too few arguments for command alias'))
276 raise error.Abort(_('too few arguments for command alias'))
367 cmd = re.sub(r'\$(\d+|\$)', replacer, cmd)
277 cmd = re.sub(r'\$(\d+|\$)', replacer, cmd)
368 givenargs = [x for i, x in enumerate(givenargs)
278 givenargs = [x for i, x in enumerate(givenargs)
369 if i not in nums]
279 if i not in nums]
370 args = shlex.split(cmd)
280 args = shlex.split(cmd)
371 return args + givenargs
281 return args + givenargs
372
282
373 def aliasinterpolate(name, args, cmd):
283 def aliasinterpolate(name, args, cmd):
374 '''interpolate args into cmd for shell aliases
284 '''interpolate args into cmd for shell aliases
375
285
376 This also handles $0, $@ and "$@".
286 This also handles $0, $@ and "$@".
377 '''
287 '''
378 # util.interpolate can't deal with "$@" (with quotes) because it's only
288 # util.interpolate can't deal with "$@" (with quotes) because it's only
379 # built to match prefix + patterns.
289 # built to match prefix + patterns.
380 replacemap = dict(('$%d' % (i + 1), arg) for i, arg in enumerate(args))
290 replacemap = dict(('$%d' % (i + 1), arg) for i, arg in enumerate(args))
381 replacemap['$0'] = name
291 replacemap['$0'] = name
382 replacemap['$$'] = '$'
292 replacemap['$$'] = '$'
383 replacemap['$@'] = ' '.join(args)
293 replacemap['$@'] = ' '.join(args)
384 # Typical Unix shells interpolate "$@" (with quotes) as all the positional
294 # Typical Unix shells interpolate "$@" (with quotes) as all the positional
385 # parameters, separated out into words. Emulate the same behavior here by
295 # parameters, separated out into words. Emulate the same behavior here by
386 # quoting the arguments individually. POSIX shells will then typically
296 # quoting the arguments individually. POSIX shells will then typically
387 # tokenize each argument into exactly one word.
297 # tokenize each argument into exactly one word.
388 replacemap['"$@"'] = ' '.join(util.shellquote(arg) for arg in args)
298 replacemap['"$@"'] = ' '.join(util.shellquote(arg) for arg in args)
389 # escape '\$' for regex
299 # escape '\$' for regex
390 regex = '|'.join(replacemap.keys()).replace('$', r'\$')
300 regex = '|'.join(replacemap.keys()).replace('$', r'\$')
391 r = re.compile(regex)
301 r = re.compile(regex)
392 return r.sub(lambda x: replacemap[x.group()], cmd)
302 return r.sub(lambda x: replacemap[x.group()], cmd)
393
303
394 class cmdalias(object):
304 class cmdalias(object):
395 def __init__(self, name, definition, cmdtable, source):
305 def __init__(self, name, definition, cmdtable, source):
396 self.name = self.cmd = name
306 self.name = self.cmd = name
397 self.cmdname = ''
307 self.cmdname = ''
398 self.definition = definition
308 self.definition = definition
399 self.fn = None
309 self.fn = None
400 self.givenargs = []
310 self.givenargs = []
401 self.opts = []
311 self.opts = []
402 self.help = ''
312 self.help = ''
403 self.badalias = None
313 self.badalias = None
404 self.unknowncmd = False
314 self.unknowncmd = False
405 self.source = source
315 self.source = source
406
316
407 try:
317 try:
408 aliases, entry = cmdutil.findcmd(self.name, cmdtable)
318 aliases, entry = cmdutil.findcmd(self.name, cmdtable)
409 for alias, e in cmdtable.iteritems():
319 for alias, e in cmdtable.iteritems():
410 if e is entry:
320 if e is entry:
411 self.cmd = alias
321 self.cmd = alias
412 break
322 break
413 self.shadows = True
323 self.shadows = True
414 except error.UnknownCommand:
324 except error.UnknownCommand:
415 self.shadows = False
325 self.shadows = False
416
326
417 if not self.definition:
327 if not self.definition:
418 self.badalias = _("no definition for alias '%s'") % self.name
328 self.badalias = _("no definition for alias '%s'") % self.name
419 return
329 return
420
330
421 if self.definition.startswith('!'):
331 if self.definition.startswith('!'):
422 self.shell = True
332 self.shell = True
423 def fn(ui, *args):
333 def fn(ui, *args):
424 env = {'HG_ARGS': ' '.join((self.name,) + args)}
334 env = {'HG_ARGS': ' '.join((self.name,) + args)}
425 def _checkvar(m):
335 def _checkvar(m):
426 if m.groups()[0] == '$':
336 if m.groups()[0] == '$':
427 return m.group()
337 return m.group()
428 elif int(m.groups()[0]) <= len(args):
338 elif int(m.groups()[0]) <= len(args):
429 return m.group()
339 return m.group()
430 else:
340 else:
431 ui.debug("No argument found for substitution "
341 ui.debug("No argument found for substitution "
432 "of %i variable in alias '%s' definition."
342 "of %i variable in alias '%s' definition."
433 % (int(m.groups()[0]), self.name))
343 % (int(m.groups()[0]), self.name))
434 return ''
344 return ''
435 cmd = re.sub(r'\$(\d+|\$)', _checkvar, self.definition[1:])
345 cmd = re.sub(r'\$(\d+|\$)', _checkvar, self.definition[1:])
436 cmd = aliasinterpolate(self.name, args, cmd)
346 cmd = aliasinterpolate(self.name, args, cmd)
437 return ui.system(cmd, environ=env)
347 return ui.system(cmd, environ=env)
438 self.fn = fn
348 self.fn = fn
439 return
349 return
440
350
441 try:
351 try:
442 args = shlex.split(self.definition)
352 args = shlex.split(self.definition)
443 except ValueError as inst:
353 except ValueError as inst:
444 self.badalias = (_("error in definition for alias '%s': %s")
354 self.badalias = (_("error in definition for alias '%s': %s")
445 % (self.name, inst))
355 % (self.name, inst))
446 return
356 return
447 self.cmdname = cmd = args.pop(0)
357 self.cmdname = cmd = args.pop(0)
448 self.givenargs = args
358 self.givenargs = args
449
359
450 for invalidarg in ("--cwd", "-R", "--repository", "--repo", "--config"):
360 for invalidarg in ("--cwd", "-R", "--repository", "--repo", "--config"):
451 if _earlygetopt([invalidarg], args):
361 if _earlygetopt([invalidarg], args):
452 self.badalias = (_("error in definition for alias '%s': %s may "
362 self.badalias = (_("error in definition for alias '%s': %s may "
453 "only be given on the command line")
363 "only be given on the command line")
454 % (self.name, invalidarg))
364 % (self.name, invalidarg))
455 return
365 return
456
366
457 try:
367 try:
458 tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1]
368 tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1]
459 if len(tableentry) > 2:
369 if len(tableentry) > 2:
460 self.fn, self.opts, self.help = tableentry
370 self.fn, self.opts, self.help = tableentry
461 else:
371 else:
462 self.fn, self.opts = tableentry
372 self.fn, self.opts = tableentry
463
373
464 if self.help.startswith("hg " + cmd):
374 if self.help.startswith("hg " + cmd):
465 # drop prefix in old-style help lines so hg shows the alias
375 # drop prefix in old-style help lines so hg shows the alias
466 self.help = self.help[4 + len(cmd):]
376 self.help = self.help[4 + len(cmd):]
467 self.__doc__ = self.fn.__doc__
377 self.__doc__ = self.fn.__doc__
468
378
469 except error.UnknownCommand:
379 except error.UnknownCommand:
470 self.badalias = (_("alias '%s' resolves to unknown command '%s'")
380 self.badalias = (_("alias '%s' resolves to unknown command '%s'")
471 % (self.name, cmd))
381 % (self.name, cmd))
472 self.unknowncmd = True
382 self.unknowncmd = True
473 except error.AmbiguousCommand:
383 except error.AmbiguousCommand:
474 self.badalias = (_("alias '%s' resolves to ambiguous command '%s'")
384 self.badalias = (_("alias '%s' resolves to ambiguous command '%s'")
475 % (self.name, cmd))
385 % (self.name, cmd))
476
386
477 @property
387 @property
478 def args(self):
388 def args(self):
479 args = map(util.expandpath, self.givenargs)
389 args = map(util.expandpath, self.givenargs)
480 return aliasargs(self.fn, args)
390 return aliasargs(self.fn, args)
481
391
482 def __getattr__(self, name):
392 def __getattr__(self, name):
483 adefaults = {'norepo': True, 'optionalrepo': False, 'inferrepo': False}
393 adefaults = {'norepo': True, 'optionalrepo': False, 'inferrepo': False}
484 if name not in adefaults:
394 if name not in adefaults:
485 raise AttributeError(name)
395 raise AttributeError(name)
486 if self.badalias or util.safehasattr(self, 'shell'):
396 if self.badalias or util.safehasattr(self, 'shell'):
487 return adefaults[name]
397 return adefaults[name]
488 return getattr(self.fn, name)
398 return getattr(self.fn, name)
489
399
490 def __call__(self, ui, *args, **opts):
400 def __call__(self, ui, *args, **opts):
491 if self.badalias:
401 if self.badalias:
492 hint = None
402 hint = None
493 if self.unknowncmd:
403 if self.unknowncmd:
494 try:
404 try:
495 # check if the command is in a disabled extension
405 # check if the command is in a disabled extension
496 cmd, ext = extensions.disabledcmd(ui, self.cmdname)[:2]
406 cmd, ext = extensions.disabledcmd(ui, self.cmdname)[:2]
497 hint = _("'%s' is provided by '%s' extension") % (cmd, ext)
407 hint = _("'%s' is provided by '%s' extension") % (cmd, ext)
498 except error.UnknownCommand:
408 except error.UnknownCommand:
499 pass
409 pass
500 raise error.Abort(self.badalias, hint=hint)
410 raise error.Abort(self.badalias, hint=hint)
501 if self.shadows:
411 if self.shadows:
502 ui.debug("alias '%s' shadows command '%s'\n" %
412 ui.debug("alias '%s' shadows command '%s'\n" %
503 (self.name, self.cmdname))
413 (self.name, self.cmdname))
504
414
505 ui.log('commandalias', "alias '%s' expands to '%s'\n",
415 ui.log('commandalias', "alias '%s' expands to '%s'\n",
506 self.name, self.definition)
416 self.name, self.definition)
507 if util.safehasattr(self, 'shell'):
417 if util.safehasattr(self, 'shell'):
508 return self.fn(ui, *args, **opts)
418 return self.fn(ui, *args, **opts)
509 else:
419 else:
510 try:
420 try:
511 return util.checksignature(self.fn)(ui, *args, **opts)
421 return util.checksignature(self.fn)(ui, *args, **opts)
512 except error.SignatureError:
422 except error.SignatureError:
513 args = ' '.join([self.cmdname] + self.args)
423 args = ' '.join([self.cmdname] + self.args)
514 ui.debug("alias '%s' expands to '%s'\n" % (self.name, args))
424 ui.debug("alias '%s' expands to '%s'\n" % (self.name, args))
515 raise
425 raise
516
426
517 def addaliases(ui, cmdtable):
427 def addaliases(ui, cmdtable):
518 # aliases are processed after extensions have been loaded, so they
428 # aliases are processed after extensions have been loaded, so they
519 # may use extension commands. Aliases can also use other alias definitions,
429 # may use extension commands. Aliases can also use other alias definitions,
520 # but only if they have been defined prior to the current definition.
430 # but only if they have been defined prior to the current definition.
521 for alias, definition in ui.configitems('alias'):
431 for alias, definition in ui.configitems('alias'):
522 source = ui.configsource('alias', alias)
432 source = ui.configsource('alias', alias)
523 aliasdef = cmdalias(alias, definition, cmdtable, source)
433 aliasdef = cmdalias(alias, definition, cmdtable, source)
524
434
525 try:
435 try:
526 olddef = cmdtable[aliasdef.cmd][0]
436 olddef = cmdtable[aliasdef.cmd][0]
527 if olddef.definition == aliasdef.definition:
437 if olddef.definition == aliasdef.definition:
528 continue
438 continue
529 except (KeyError, AttributeError):
439 except (KeyError, AttributeError):
530 # definition might not exist or it might not be a cmdalias
440 # definition might not exist or it might not be a cmdalias
531 pass
441 pass
532
442
533 cmdtable[aliasdef.name] = (aliasdef, aliasdef.opts, aliasdef.help)
443 cmdtable[aliasdef.name] = (aliasdef, aliasdef.opts, aliasdef.help)
534
444
535 def _parse(ui, args):
445 def _parse(ui, args):
536 options = {}
446 options = {}
537 cmdoptions = {}
447 cmdoptions = {}
538
448
539 try:
449 try:
540 args = fancyopts.fancyopts(args, commands.globalopts, options)
450 args = fancyopts.fancyopts(args, commands.globalopts, options)
541 except fancyopts.getopt.GetoptError as inst:
451 except fancyopts.getopt.GetoptError as inst:
542 raise error.CommandError(None, inst)
452 raise error.CommandError(None, inst)
543
453
544 if args:
454 if args:
545 cmd, args = args[0], args[1:]
455 cmd, args = args[0], args[1:]
546 aliases, entry = cmdutil.findcmd(cmd, commands.table,
456 aliases, entry = cmdutil.findcmd(cmd, commands.table,
547 ui.configbool("ui", "strict"))
457 ui.configbool("ui", "strict"))
548 cmd = aliases[0]
458 cmd = aliases[0]
549 args = aliasargs(entry[0], args)
459 args = aliasargs(entry[0], args)
550 defaults = ui.config("defaults", cmd)
460 defaults = ui.config("defaults", cmd)
551 if defaults:
461 if defaults:
552 args = map(util.expandpath, shlex.split(defaults)) + args
462 args = map(util.expandpath, shlex.split(defaults)) + args
553 c = list(entry[1])
463 c = list(entry[1])
554 else:
464 else:
555 cmd = None
465 cmd = None
556 c = []
466 c = []
557
467
558 # combine global options into local
468 # combine global options into local
559 for o in commands.globalopts:
469 for o in commands.globalopts:
560 c.append((o[0], o[1], options[o[1]], o[3]))
470 c.append((o[0], o[1], options[o[1]], o[3]))
561
471
562 try:
472 try:
563 args = fancyopts.fancyopts(args, c, cmdoptions, gnu=True)
473 args = fancyopts.fancyopts(args, c, cmdoptions, gnu=True)
564 except fancyopts.getopt.GetoptError as inst:
474 except fancyopts.getopt.GetoptError as inst:
565 raise error.CommandError(cmd, inst)
475 raise error.CommandError(cmd, inst)
566
476
567 # separate global options back out
477 # separate global options back out
568 for o in commands.globalopts:
478 for o in commands.globalopts:
569 n = o[1]
479 n = o[1]
570 options[n] = cmdoptions[n]
480 options[n] = cmdoptions[n]
571 del cmdoptions[n]
481 del cmdoptions[n]
572
482
573 return (cmd, cmd and entry[0] or None, args, options, cmdoptions)
483 return (cmd, cmd and entry[0] or None, args, options, cmdoptions)
574
484
575 def _parseconfig(ui, config):
485 def _parseconfig(ui, config):
576 """parse the --config options from the command line"""
486 """parse the --config options from the command line"""
577 configs = []
487 configs = []
578
488
579 for cfg in config:
489 for cfg in config:
580 try:
490 try:
581 name, value = [cfgelem.strip()
491 name, value = [cfgelem.strip()
582 for cfgelem in cfg.split('=', 1)]
492 for cfgelem in cfg.split('=', 1)]
583 section, name = name.split('.', 1)
493 section, name = name.split('.', 1)
584 if not section or not name:
494 if not section or not name:
585 raise IndexError
495 raise IndexError
586 ui.setconfig(section, name, value, '--config')
496 ui.setconfig(section, name, value, '--config')
587 configs.append((section, name, value))
497 configs.append((section, name, value))
588 except (IndexError, ValueError):
498 except (IndexError, ValueError):
589 raise error.Abort(_('malformed --config option: %r '
499 raise error.Abort(_('malformed --config option: %r '
590 '(use --config section.name=value)') % cfg)
500 '(use --config section.name=value)') % cfg)
591
501
592 return configs
502 return configs
593
503
594 def _earlygetopt(aliases, args):
504 def _earlygetopt(aliases, args):
595 """Return list of values for an option (or aliases).
505 """Return list of values for an option (or aliases).
596
506
597 The values are listed in the order they appear in args.
507 The values are listed in the order they appear in args.
598 The options and values are removed from args.
508 The options and values are removed from args.
599
509
600 >>> args = ['x', '--cwd', 'foo', 'y']
510 >>> args = ['x', '--cwd', 'foo', 'y']
601 >>> _earlygetopt(['--cwd'], args), args
511 >>> _earlygetopt(['--cwd'], args), args
602 (['foo'], ['x', 'y'])
512 (['foo'], ['x', 'y'])
603
513
604 >>> args = ['x', '--cwd=bar', 'y']
514 >>> args = ['x', '--cwd=bar', 'y']
605 >>> _earlygetopt(['--cwd'], args), args
515 >>> _earlygetopt(['--cwd'], args), args
606 (['bar'], ['x', 'y'])
516 (['bar'], ['x', 'y'])
607
517
608 >>> args = ['x', '-R', 'foo', 'y']
518 >>> args = ['x', '-R', 'foo', 'y']
609 >>> _earlygetopt(['-R'], args), args
519 >>> _earlygetopt(['-R'], args), args
610 (['foo'], ['x', 'y'])
520 (['foo'], ['x', 'y'])
611
521
612 >>> args = ['x', '-Rbar', 'y']
522 >>> args = ['x', '-Rbar', 'y']
613 >>> _earlygetopt(['-R'], args), args
523 >>> _earlygetopt(['-R'], args), args
614 (['bar'], ['x', 'y'])
524 (['bar'], ['x', 'y'])
615 """
525 """
616 try:
526 try:
617 argcount = args.index("--")
527 argcount = args.index("--")
618 except ValueError:
528 except ValueError:
619 argcount = len(args)
529 argcount = len(args)
620 shortopts = [opt for opt in aliases if len(opt) == 2]
530 shortopts = [opt for opt in aliases if len(opt) == 2]
621 values = []
531 values = []
622 pos = 0
532 pos = 0
623 while pos < argcount:
533 while pos < argcount:
624 fullarg = arg = args[pos]
534 fullarg = arg = args[pos]
625 equals = arg.find('=')
535 equals = arg.find('=')
626 if equals > -1:
536 if equals > -1:
627 arg = arg[:equals]
537 arg = arg[:equals]
628 if arg in aliases:
538 if arg in aliases:
629 del args[pos]
539 del args[pos]
630 if equals > -1:
540 if equals > -1:
631 values.append(fullarg[equals + 1:])
541 values.append(fullarg[equals + 1:])
632 argcount -= 1
542 argcount -= 1
633 else:
543 else:
634 if pos + 1 >= argcount:
544 if pos + 1 >= argcount:
635 # ignore and let getopt report an error if there is no value
545 # ignore and let getopt report an error if there is no value
636 break
546 break
637 values.append(args.pop(pos))
547 values.append(args.pop(pos))
638 argcount -= 2
548 argcount -= 2
639 elif arg[:2] in shortopts:
549 elif arg[:2] in shortopts:
640 # short option can have no following space, e.g. hg log -Rfoo
550 # short option can have no following space, e.g. hg log -Rfoo
641 values.append(args.pop(pos)[2:])
551 values.append(args.pop(pos)[2:])
642 argcount -= 1
552 argcount -= 1
643 else:
553 else:
644 pos += 1
554 pos += 1
645 return values
555 return values
646
556
647 def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions):
557 def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions):
648 # run pre-hook, and abort if it fails
558 # run pre-hook, and abort if it fails
649 hook.hook(lui, repo, "pre-%s" % cmd, True, args=" ".join(fullargs),
559 hook.hook(lui, repo, "pre-%s" % cmd, True, args=" ".join(fullargs),
650 pats=cmdpats, opts=cmdoptions)
560 pats=cmdpats, opts=cmdoptions)
651 try:
561 try:
652 ret = _runcommand(ui, options, cmd, d)
562 ret = _runcommand(ui, options, cmd, d)
653 # run post-hook, passing command result
563 # run post-hook, passing command result
654 hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
564 hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
655 result=ret, pats=cmdpats, opts=cmdoptions)
565 result=ret, pats=cmdpats, opts=cmdoptions)
656 except Exception:
566 except Exception:
657 # run failure hook and re-raise
567 # run failure hook and re-raise
658 hook.hook(lui, repo, "fail-%s" % cmd, False, args=" ".join(fullargs),
568 hook.hook(lui, repo, "fail-%s" % cmd, False, args=" ".join(fullargs),
659 pats=cmdpats, opts=cmdoptions)
569 pats=cmdpats, opts=cmdoptions)
660 raise
570 raise
661 return ret
571 return ret
662
572
663 def _getlocal(ui, rpath, wd=None):
573 def _getlocal(ui, rpath, wd=None):
664 """Return (path, local ui object) for the given target path.
574 """Return (path, local ui object) for the given target path.
665
575
666 Takes paths in [cwd]/.hg/hgrc into account."
576 Takes paths in [cwd]/.hg/hgrc into account."
667 """
577 """
668 if wd is None:
578 if wd is None:
669 try:
579 try:
670 wd = pycompat.getcwd()
580 wd = pycompat.getcwd()
671 except OSError as e:
581 except OSError as e:
672 raise error.Abort(_("error getting current working directory: %s") %
582 raise error.Abort(_("error getting current working directory: %s") %
673 e.strerror)
583 e.strerror)
674 path = cmdutil.findrepo(wd) or ""
584 path = cmdutil.findrepo(wd) or ""
675 if not path:
585 if not path:
676 lui = ui
586 lui = ui
677 else:
587 else:
678 lui = ui.copy()
588 lui = ui.copy()
679 lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
589 lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
680
590
681 if rpath and rpath[-1]:
591 if rpath and rpath[-1]:
682 path = lui.expandpath(rpath[-1])
592 path = lui.expandpath(rpath[-1])
683 lui = ui.copy()
593 lui = ui.copy()
684 lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
594 lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
685
595
686 return path, lui
596 return path, lui
687
597
688 def _checkshellalias(lui, ui, args):
598 def _checkshellalias(lui, ui, args):
689 """Return the function to run the shell alias, if it is required"""
599 """Return the function to run the shell alias, if it is required"""
690 options = {}
600 options = {}
691
601
692 try:
602 try:
693 args = fancyopts.fancyopts(args, commands.globalopts, options)
603 args = fancyopts.fancyopts(args, commands.globalopts, options)
694 except fancyopts.getopt.GetoptError:
604 except fancyopts.getopt.GetoptError:
695 return
605 return
696
606
697 if not args:
607 if not args:
698 return
608 return
699
609
700 cmdtable = commands.table
610 cmdtable = commands.table
701
611
702 cmd = args[0]
612 cmd = args[0]
703 try:
613 try:
704 strict = ui.configbool("ui", "strict")
614 strict = ui.configbool("ui", "strict")
705 aliases, entry = cmdutil.findcmd(cmd, cmdtable, strict)
615 aliases, entry = cmdutil.findcmd(cmd, cmdtable, strict)
706 except (error.AmbiguousCommand, error.UnknownCommand):
616 except (error.AmbiguousCommand, error.UnknownCommand):
707 return
617 return
708
618
709 cmd = aliases[0]
619 cmd = aliases[0]
710 fn = entry[0]
620 fn = entry[0]
711
621
712 if cmd and util.safehasattr(fn, 'shell'):
622 if cmd and util.safehasattr(fn, 'shell'):
713 d = lambda: fn(ui, *args[1:])
623 d = lambda: fn(ui, *args[1:])
714 return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d,
624 return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d,
715 [], {})
625 [], {})
716
626
717 _loaded = set()
627 _loaded = set()
718
628
719 # list of (objname, loadermod, loadername) tuple:
629 # list of (objname, loadermod, loadername) tuple:
720 # - objname is the name of an object in extension module, from which
630 # - objname is the name of an object in extension module, from which
721 # extra information is loaded
631 # extra information is loaded
722 # - loadermod is the module where loader is placed
632 # - loadermod is the module where loader is placed
723 # - loadername is the name of the function, which takes (ui, extensionname,
633 # - loadername is the name of the function, which takes (ui, extensionname,
724 # extraobj) arguments
634 # extraobj) arguments
725 extraloaders = [
635 extraloaders = [
726 ('cmdtable', commands, 'loadcmdtable'),
636 ('cmdtable', commands, 'loadcmdtable'),
727 ('filesetpredicate', fileset, 'loadpredicate'),
637 ('filesetpredicate', fileset, 'loadpredicate'),
728 ('revsetpredicate', revset, 'loadpredicate'),
638 ('revsetpredicate', revset, 'loadpredicate'),
729 ('templatefilter', templatefilters, 'loadfilter'),
639 ('templatefilter', templatefilters, 'loadfilter'),
730 ('templatefunc', templater, 'loadfunction'),
640 ('templatefunc', templater, 'loadfunction'),
731 ('templatekeyword', templatekw, 'loadkeyword'),
641 ('templatekeyword', templatekw, 'loadkeyword'),
732 ]
642 ]
733
643
734 def _dispatch(req):
644 def _dispatch(req):
735 args = req.args
645 args = req.args
736 ui = req.ui
646 ui = req.ui
737
647
738 # check for cwd
648 # check for cwd
739 cwd = _earlygetopt(['--cwd'], args)
649 cwd = _earlygetopt(['--cwd'], args)
740 if cwd:
650 if cwd:
741 os.chdir(cwd[-1])
651 os.chdir(cwd[-1])
742
652
743 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
653 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
744 path, lui = _getlocal(ui, rpath)
654 path, lui = _getlocal(ui, rpath)
745
655
746 # Configure extensions in phases: uisetup, extsetup, cmdtable, and
656 # Configure extensions in phases: uisetup, extsetup, cmdtable, and
747 # reposetup. Programs like TortoiseHg will call _dispatch several
657 # reposetup. Programs like TortoiseHg will call _dispatch several
748 # times so we keep track of configured extensions in _loaded.
658 # times so we keep track of configured extensions in _loaded.
749 extensions.loadall(lui)
659 extensions.loadall(lui)
750 exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded]
660 exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded]
751 # Propagate any changes to lui.__class__ by extensions
661 # Propagate any changes to lui.__class__ by extensions
752 ui.__class__ = lui.__class__
662 ui.__class__ = lui.__class__
753
663
754 # (uisetup and extsetup are handled in extensions.loadall)
664 # (uisetup and extsetup are handled in extensions.loadall)
755
665
756 for name, module in exts:
666 for name, module in exts:
757 for objname, loadermod, loadername in extraloaders:
667 for objname, loadermod, loadername in extraloaders:
758 extraobj = getattr(module, objname, None)
668 extraobj = getattr(module, objname, None)
759 if extraobj is not None:
669 if extraobj is not None:
760 getattr(loadermod, loadername)(ui, name, extraobj)
670 getattr(loadermod, loadername)(ui, name, extraobj)
761 _loaded.add(name)
671 _loaded.add(name)
762
672
763 # (reposetup is handled in hg.repository)
673 # (reposetup is handled in hg.repository)
764
674
765 # Side-effect of accessing is debugcommands module is guaranteed to be
675 # Side-effect of accessing is debugcommands module is guaranteed to be
766 # imported and commands.table is populated.
676 # imported and commands.table is populated.
767 debugcommands.command
677 debugcommands.command
768
678
769 addaliases(lui, commands.table)
679 addaliases(lui, commands.table)
770
680
771 # All aliases and commands are completely defined, now.
681 # All aliases and commands are completely defined, now.
772 # Check abbreviation/ambiguity of shell alias.
682 # Check abbreviation/ambiguity of shell alias.
773 shellaliasfn = _checkshellalias(lui, ui, args)
683 shellaliasfn = _checkshellalias(lui, ui, args)
774 if shellaliasfn:
684 if shellaliasfn:
775 with profiling.maybeprofile(lui):
685 with profiling.maybeprofile(lui):
776 return shellaliasfn()
686 return shellaliasfn()
777
687
778 # check for fallback encoding
688 # check for fallback encoding
779 fallback = lui.config('ui', 'fallbackencoding')
689 fallback = lui.config('ui', 'fallbackencoding')
780 if fallback:
690 if fallback:
781 encoding.fallbackencoding = fallback
691 encoding.fallbackencoding = fallback
782
692
783 fullargs = args
693 fullargs = args
784 cmd, func, args, options, cmdoptions = _parse(lui, args)
694 cmd, func, args, options, cmdoptions = _parse(lui, args)
785
695
786 if options["config"]:
696 if options["config"]:
787 raise error.Abort(_("option --config may not be abbreviated!"))
697 raise error.Abort(_("option --config may not be abbreviated!"))
788 if options["cwd"]:
698 if options["cwd"]:
789 raise error.Abort(_("option --cwd may not be abbreviated!"))
699 raise error.Abort(_("option --cwd may not be abbreviated!"))
790 if options["repository"]:
700 if options["repository"]:
791 raise error.Abort(_(
701 raise error.Abort(_(
792 "option -R has to be separated from other options (e.g. not -qR) "
702 "option -R has to be separated from other options (e.g. not -qR) "
793 "and --repository may only be abbreviated as --repo!"))
703 "and --repository may only be abbreviated as --repo!"))
794
704
795 if options["encoding"]:
705 if options["encoding"]:
796 encoding.encoding = options["encoding"]
706 encoding.encoding = options["encoding"]
797 if options["encodingmode"]:
707 if options["encodingmode"]:
798 encoding.encodingmode = options["encodingmode"]
708 encoding.encodingmode = options["encodingmode"]
799 if options["time"]:
709 if options["time"]:
800 def get_times():
710 def get_times():
801 t = os.times()
711 t = os.times()
802 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
712 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
803 t = (t[0], t[1], t[2], t[3], time.clock())
713 t = (t[0], t[1], t[2], t[3], time.clock())
804 return t
714 return t
805 s = get_times()
715 s = get_times()
806 def print_time():
716 def print_time():
807 t = get_times()
717 t = get_times()
808 ui.warn(_("time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
718 ui.warn(_("time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
809 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
719 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
810 atexit.register(print_time)
720 atexit.register(print_time)
811
721
812 uis = set([ui, lui])
722 uis = set([ui, lui])
813
723
814 if req.repo:
724 if req.repo:
815 uis.add(req.repo.ui)
725 uis.add(req.repo.ui)
816
726
817 if options['verbose'] or options['debug'] or options['quiet']:
727 if options['verbose'] or options['debug'] or options['quiet']:
818 for opt in ('verbose', 'debug', 'quiet'):
728 for opt in ('verbose', 'debug', 'quiet'):
819 val = str(bool(options[opt]))
729 val = str(bool(options[opt]))
820 for ui_ in uis:
730 for ui_ in uis:
821 ui_.setconfig('ui', opt, val, '--' + opt)
731 ui_.setconfig('ui', opt, val, '--' + opt)
822
732
823 if options['profile']:
733 if options['profile']:
824 for ui_ in uis:
734 for ui_ in uis:
825 ui_.setconfig('profiling', 'enabled', 'true', '--profile')
735 ui_.setconfig('profiling', 'enabled', 'true', '--profile')
826
736
827 if options['traceback']:
737 if options['traceback']:
828 for ui_ in uis:
738 for ui_ in uis:
829 ui_.setconfig('ui', 'traceback', 'on', '--traceback')
739 ui_.setconfig('ui', 'traceback', 'on', '--traceback')
830
740
831 if options['noninteractive']:
741 if options['noninteractive']:
832 for ui_ in uis:
742 for ui_ in uis:
833 ui_.setconfig('ui', 'interactive', 'off', '-y')
743 ui_.setconfig('ui', 'interactive', 'off', '-y')
834
744
835 if cmdoptions.get('insecure', False):
745 if cmdoptions.get('insecure', False):
836 for ui_ in uis:
746 for ui_ in uis:
837 ui_.insecureconnections = True
747 ui_.insecureconnections = True
838
748
839 if options['version']:
749 if options['version']:
840 return commands.version_(ui)
750 return commands.version_(ui)
841 if options['help']:
751 if options['help']:
842 return commands.help_(ui, cmd, command=cmd is not None)
752 return commands.help_(ui, cmd, command=cmd is not None)
843 elif not cmd:
753 elif not cmd:
844 return commands.help_(ui, 'shortlist')
754 return commands.help_(ui, 'shortlist')
845
755
846 with profiling.maybeprofile(lui):
756 with profiling.maybeprofile(lui):
847 repo = None
757 repo = None
848 cmdpats = args[:]
758 cmdpats = args[:]
849 if not func.norepo:
759 if not func.norepo:
850 # use the repo from the request only if we don't have -R
760 # use the repo from the request only if we don't have -R
851 if not rpath and not cwd:
761 if not rpath and not cwd:
852 repo = req.repo
762 repo = req.repo
853
763
854 if repo:
764 if repo:
855 # set the descriptors of the repo ui to those of ui
765 # set the descriptors of the repo ui to those of ui
856 repo.ui.fin = ui.fin
766 repo.ui.fin = ui.fin
857 repo.ui.fout = ui.fout
767 repo.ui.fout = ui.fout
858 repo.ui.ferr = ui.ferr
768 repo.ui.ferr = ui.ferr
859 else:
769 else:
860 try:
770 try:
861 repo = hg.repository(ui, path=path)
771 repo = hg.repository(ui, path=path)
862 if not repo.local():
772 if not repo.local():
863 raise error.Abort(_("repository '%s' is not local")
773 raise error.Abort(_("repository '%s' is not local")
864 % path)
774 % path)
865 repo.ui.setconfig("bundle", "mainreporoot", repo.root,
775 repo.ui.setconfig("bundle", "mainreporoot", repo.root,
866 'repo')
776 'repo')
867 except error.RequirementError:
777 except error.RequirementError:
868 raise
778 raise
869 except error.RepoError:
779 except error.RepoError:
870 if rpath and rpath[-1]: # invalid -R path
780 if rpath and rpath[-1]: # invalid -R path
871 raise
781 raise
872 if not func.optionalrepo:
782 if not func.optionalrepo:
873 if func.inferrepo and args and not path:
783 if func.inferrepo and args and not path:
874 # try to infer -R from command args
784 # try to infer -R from command args
875 repos = map(cmdutil.findrepo, args)
785 repos = map(cmdutil.findrepo, args)
876 guess = repos[0]
786 guess = repos[0]
877 if guess and repos.count(guess) == len(repos):
787 if guess and repos.count(guess) == len(repos):
878 req.args = ['--repository', guess] + fullargs
788 req.args = ['--repository', guess] + fullargs
879 return _dispatch(req)
789 return _dispatch(req)
880 if not path:
790 if not path:
881 raise error.RepoError(_("no repository found in"
791 raise error.RepoError(_("no repository found in"
882 " '%s' (.hg not found)")
792 " '%s' (.hg not found)")
883 % pycompat.getcwd())
793 % pycompat.getcwd())
884 raise
794 raise
885 if repo:
795 if repo:
886 ui = repo.ui
796 ui = repo.ui
887 if options['hidden']:
797 if options['hidden']:
888 repo = repo.unfiltered()
798 repo = repo.unfiltered()
889 args.insert(0, repo)
799 args.insert(0, repo)
890 elif rpath:
800 elif rpath:
891 ui.warn(_("warning: --repository ignored\n"))
801 ui.warn(_("warning: --repository ignored\n"))
892
802
893 msg = ' '.join(' ' in a and repr(a) or a for a in fullargs)
803 msg = ' '.join(' ' in a and repr(a) or a for a in fullargs)
894 ui.log("command", '%s\n', msg)
804 ui.log("command", '%s\n', msg)
895 d = lambda: util.checksignature(func)(ui, *args, **cmdoptions)
805 d = lambda: util.checksignature(func)(ui, *args, **cmdoptions)
896 try:
806 try:
897 return runcommand(lui, repo, cmd, fullargs, ui, options, d,
807 return runcommand(lui, repo, cmd, fullargs, ui, options, d,
898 cmdpats, cmdoptions)
808 cmdpats, cmdoptions)
899 finally:
809 finally:
900 if repo and repo != req.repo:
810 if repo and repo != req.repo:
901 repo.close()
811 repo.close()
902
812
903 def _runcommand(ui, options, cmd, cmdfunc):
813 def _runcommand(ui, options, cmd, cmdfunc):
904 """Run a command function, possibly with profiling enabled."""
814 """Run a command function, possibly with profiling enabled."""
905 try:
815 try:
906 return cmdfunc()
816 return cmdfunc()
907 except error.SignatureError:
817 except error.SignatureError:
908 raise error.CommandError(cmd, _('invalid arguments'))
818 raise error.CommandError(cmd, _('invalid arguments'))
909
819
910 def _exceptionwarning(ui):
820 def _exceptionwarning(ui):
911 """Produce a warning message for the current active exception"""
821 """Produce a warning message for the current active exception"""
912
822
913 # For compatibility checking, we discard the portion of the hg
823 # For compatibility checking, we discard the portion of the hg
914 # version after the + on the assumption that if a "normal
824 # version after the + on the assumption that if a "normal
915 # user" is running a build with a + in it the packager
825 # user" is running a build with a + in it the packager
916 # probably built from fairly close to a tag and anyone with a
826 # probably built from fairly close to a tag and anyone with a
917 # 'make local' copy of hg (where the version number can be out
827 # 'make local' copy of hg (where the version number can be out
918 # of date) will be clueful enough to notice the implausible
828 # of date) will be clueful enough to notice the implausible
919 # version number and try updating.
829 # version number and try updating.
920 ct = util.versiontuple(n=2)
830 ct = util.versiontuple(n=2)
921 worst = None, ct, ''
831 worst = None, ct, ''
922 if ui.config('ui', 'supportcontact', None) is None:
832 if ui.config('ui', 'supportcontact', None) is None:
923 for name, mod in extensions.extensions():
833 for name, mod in extensions.extensions():
924 testedwith = getattr(mod, 'testedwith', '')
834 testedwith = getattr(mod, 'testedwith', '')
925 report = getattr(mod, 'buglink', _('the extension author.'))
835 report = getattr(mod, 'buglink', _('the extension author.'))
926 if not testedwith.strip():
836 if not testedwith.strip():
927 # We found an untested extension. It's likely the culprit.
837 # We found an untested extension. It's likely the culprit.
928 worst = name, 'unknown', report
838 worst = name, 'unknown', report
929 break
839 break
930
840
931 # Never blame on extensions bundled with Mercurial.
841 # Never blame on extensions bundled with Mercurial.
932 if extensions.ismoduleinternal(mod):
842 if extensions.ismoduleinternal(mod):
933 continue
843 continue
934
844
935 tested = [util.versiontuple(t, 2) for t in testedwith.split()]
845 tested = [util.versiontuple(t, 2) for t in testedwith.split()]
936 if ct in tested:
846 if ct in tested:
937 continue
847 continue
938
848
939 lower = [t for t in tested if t < ct]
849 lower = [t for t in tested if t < ct]
940 nearest = max(lower or tested)
850 nearest = max(lower or tested)
941 if worst[0] is None or nearest < worst[1]:
851 if worst[0] is None or nearest < worst[1]:
942 worst = name, nearest, report
852 worst = name, nearest, report
943 if worst[0] is not None:
853 if worst[0] is not None:
944 name, testedwith, report = worst
854 name, testedwith, report = worst
945 if not isinstance(testedwith, str):
855 if not isinstance(testedwith, str):
946 testedwith = '.'.join([str(c) for c in testedwith])
856 testedwith = '.'.join([str(c) for c in testedwith])
947 warning = (_('** Unknown exception encountered with '
857 warning = (_('** Unknown exception encountered with '
948 'possibly-broken third-party extension %s\n'
858 'possibly-broken third-party extension %s\n'
949 '** which supports versions %s of Mercurial.\n'
859 '** which supports versions %s of Mercurial.\n'
950 '** Please disable %s and try your action again.\n'
860 '** Please disable %s and try your action again.\n'
951 '** If that fixes the bug please report it to %s\n')
861 '** If that fixes the bug please report it to %s\n')
952 % (name, testedwith, name, report))
862 % (name, testedwith, name, report))
953 else:
863 else:
954 bugtracker = ui.config('ui', 'supportcontact', None)
864 bugtracker = ui.config('ui', 'supportcontact', None)
955 if bugtracker is None:
865 if bugtracker is None:
956 bugtracker = _("https://mercurial-scm.org/wiki/BugTracker")
866 bugtracker = _("https://mercurial-scm.org/wiki/BugTracker")
957 warning = (_("** unknown exception encountered, "
867 warning = (_("** unknown exception encountered, "
958 "please report by visiting\n** ") + bugtracker + '\n')
868 "please report by visiting\n** ") + bugtracker + '\n')
959 warning += ((_("** Python %s\n") % sys.version.replace('\n', '')) +
869 warning += ((_("** Python %s\n") % sys.version.replace('\n', '')) +
960 (_("** Mercurial Distributed SCM (version %s)\n") %
870 (_("** Mercurial Distributed SCM (version %s)\n") %
961 util.version()) +
871 util.version()) +
962 (_("** Extensions loaded: %s\n") %
872 (_("** Extensions loaded: %s\n") %
963 ", ".join([x[0] for x in extensions.extensions()])))
873 ", ".join([x[0] for x in extensions.extensions()])))
964 return warning
874 return warning
965
875
966 def handlecommandexception(ui):
876 def handlecommandexception(ui):
967 """Produce a warning message for broken commands
877 """Produce a warning message for broken commands
968
878
969 Called when handling an exception; the exception is reraised if
879 Called when handling an exception; the exception is reraised if
970 this function returns False, ignored otherwise.
880 this function returns False, ignored otherwise.
971 """
881 """
972 warning = _exceptionwarning(ui)
882 warning = _exceptionwarning(ui)
973 ui.log("commandexception", "%s\n%s\n", warning, traceback.format_exc())
883 ui.log("commandexception", "%s\n%s\n", warning, traceback.format_exc())
974 ui.warn(warning)
884 ui.warn(warning)
975 return False # re-raise the exception
885 return False # re-raise the exception
@@ -1,1470 +1,1573 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import contextlib
10 import contextlib
11 import errno
11 import errno
12 import glob
12 import glob
13 import hashlib
13 import hashlib
14 import os
14 import os
15 import re
15 import re
16 import shutil
16 import shutil
17 import socket
17 import stat
18 import stat
18 import tempfile
19 import tempfile
19 import threading
20 import threading
20
21
21 from .i18n import _
22 from .i18n import _
22 from .node import wdirrev
23 from .node import wdirrev
23 from . import (
24 from . import (
24 encoding,
25 encoding,
25 error,
26 error,
26 match as matchmod,
27 match as matchmod,
27 osutil,
28 osutil,
28 pathutil,
29 pathutil,
29 phases,
30 phases,
30 pycompat,
31 pycompat,
31 revset,
32 revset,
32 similar,
33 similar,
33 util,
34 util,
34 )
35 )
35
36
36 if os.name == 'nt':
37 if os.name == 'nt':
37 from . import scmwindows as scmplatform
38 from . import scmwindows as scmplatform
38 else:
39 else:
39 from . import scmposix as scmplatform
40 from . import scmposix as scmplatform
40
41
41 systemrcpath = scmplatform.systemrcpath
42 systemrcpath = scmplatform.systemrcpath
42 userrcpath = scmplatform.userrcpath
43 userrcpath = scmplatform.userrcpath
43 termsize = scmplatform.termsize
44 termsize = scmplatform.termsize
44
45
45 class status(tuple):
46 class status(tuple):
46 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
47 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
47 and 'ignored' properties are only relevant to the working copy.
48 and 'ignored' properties are only relevant to the working copy.
48 '''
49 '''
49
50
50 __slots__ = ()
51 __slots__ = ()
51
52
52 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
53 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
53 clean):
54 clean):
54 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
55 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
55 ignored, clean))
56 ignored, clean))
56
57
57 @property
58 @property
58 def modified(self):
59 def modified(self):
59 '''files that have been modified'''
60 '''files that have been modified'''
60 return self[0]
61 return self[0]
61
62
62 @property
63 @property
63 def added(self):
64 def added(self):
64 '''files that have been added'''
65 '''files that have been added'''
65 return self[1]
66 return self[1]
66
67
67 @property
68 @property
68 def removed(self):
69 def removed(self):
69 '''files that have been removed'''
70 '''files that have been removed'''
70 return self[2]
71 return self[2]
71
72
72 @property
73 @property
73 def deleted(self):
74 def deleted(self):
74 '''files that are in the dirstate, but have been deleted from the
75 '''files that are in the dirstate, but have been deleted from the
75 working copy (aka "missing")
76 working copy (aka "missing")
76 '''
77 '''
77 return self[3]
78 return self[3]
78
79
79 @property
80 @property
80 def unknown(self):
81 def unknown(self):
81 '''files not in the dirstate that are not ignored'''
82 '''files not in the dirstate that are not ignored'''
82 return self[4]
83 return self[4]
83
84
84 @property
85 @property
85 def ignored(self):
86 def ignored(self):
86 '''files not in the dirstate that are ignored (by _dirignore())'''
87 '''files not in the dirstate that are ignored (by _dirignore())'''
87 return self[5]
88 return self[5]
88
89
89 @property
90 @property
90 def clean(self):
91 def clean(self):
91 '''files that have not been modified'''
92 '''files that have not been modified'''
92 return self[6]
93 return self[6]
93
94
94 def __repr__(self, *args, **kwargs):
95 def __repr__(self, *args, **kwargs):
95 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
96 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
96 'unknown=%r, ignored=%r, clean=%r>') % self)
97 'unknown=%r, ignored=%r, clean=%r>') % self)
97
98
98 def itersubrepos(ctx1, ctx2):
99 def itersubrepos(ctx1, ctx2):
99 """find subrepos in ctx1 or ctx2"""
100 """find subrepos in ctx1 or ctx2"""
100 # Create a (subpath, ctx) mapping where we prefer subpaths from
101 # Create a (subpath, ctx) mapping where we prefer subpaths from
101 # ctx1. The subpaths from ctx2 are important when the .hgsub file
102 # ctx1. The subpaths from ctx2 are important when the .hgsub file
102 # has been modified (in ctx2) but not yet committed (in ctx1).
103 # has been modified (in ctx2) but not yet committed (in ctx1).
103 subpaths = dict.fromkeys(ctx2.substate, ctx2)
104 subpaths = dict.fromkeys(ctx2.substate, ctx2)
104 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
105 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
105
106
106 missing = set()
107 missing = set()
107
108
108 for subpath in ctx2.substate:
109 for subpath in ctx2.substate:
109 if subpath not in ctx1.substate:
110 if subpath not in ctx1.substate:
110 del subpaths[subpath]
111 del subpaths[subpath]
111 missing.add(subpath)
112 missing.add(subpath)
112
113
113 for subpath, ctx in sorted(subpaths.iteritems()):
114 for subpath, ctx in sorted(subpaths.iteritems()):
114 yield subpath, ctx.sub(subpath)
115 yield subpath, ctx.sub(subpath)
115
116
116 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
117 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
117 # status and diff will have an accurate result when it does
118 # status and diff will have an accurate result when it does
118 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
119 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
119 # against itself.
120 # against itself.
120 for subpath in missing:
121 for subpath in missing:
121 yield subpath, ctx2.nullsub(subpath, ctx1)
122 yield subpath, ctx2.nullsub(subpath, ctx1)
122
123
123 def nochangesfound(ui, repo, excluded=None):
124 def nochangesfound(ui, repo, excluded=None):
124 '''Report no changes for push/pull, excluded is None or a list of
125 '''Report no changes for push/pull, excluded is None or a list of
125 nodes excluded from the push/pull.
126 nodes excluded from the push/pull.
126 '''
127 '''
127 secretlist = []
128 secretlist = []
128 if excluded:
129 if excluded:
129 for n in excluded:
130 for n in excluded:
130 if n not in repo:
131 if n not in repo:
131 # discovery should not have included the filtered revision,
132 # discovery should not have included the filtered revision,
132 # we have to explicitly exclude it until discovery is cleanup.
133 # we have to explicitly exclude it until discovery is cleanup.
133 continue
134 continue
134 ctx = repo[n]
135 ctx = repo[n]
135 if ctx.phase() >= phases.secret and not ctx.extinct():
136 if ctx.phase() >= phases.secret and not ctx.extinct():
136 secretlist.append(n)
137 secretlist.append(n)
137
138
138 if secretlist:
139 if secretlist:
139 ui.status(_("no changes found (ignored %d secret changesets)\n")
140 ui.status(_("no changes found (ignored %d secret changesets)\n")
140 % len(secretlist))
141 % len(secretlist))
141 else:
142 else:
142 ui.status(_("no changes found\n"))
143 ui.status(_("no changes found\n"))
143
144
145 def callcatch(ui, func):
146 """call func() with global exception handling
147
148 return func() if no exception happens. otherwise do some error handling
149 and return an exit code accordingly. does not handle all exceptions.
150 """
151 try:
152 return func()
153 # Global exception handling, alphabetically
154 # Mercurial-specific first, followed by built-in and library exceptions
155 except error.LockHeld as inst:
156 if inst.errno == errno.ETIMEDOUT:
157 reason = _('timed out waiting for lock held by %s') % inst.locker
158 else:
159 reason = _('lock held by %s') % inst.locker
160 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
161 except error.LockUnavailable as inst:
162 ui.warn(_("abort: could not lock %s: %s\n") %
163 (inst.desc or inst.filename, inst.strerror))
164 except error.OutOfBandError as inst:
165 if inst.args:
166 msg = _("abort: remote error:\n")
167 else:
168 msg = _("abort: remote error\n")
169 ui.warn(msg)
170 if inst.args:
171 ui.warn(''.join(inst.args))
172 if inst.hint:
173 ui.warn('(%s)\n' % inst.hint)
174 except error.RepoError as inst:
175 ui.warn(_("abort: %s!\n") % inst)
176 if inst.hint:
177 ui.warn(_("(%s)\n") % inst.hint)
178 except error.ResponseError as inst:
179 ui.warn(_("abort: %s") % inst.args[0])
180 if not isinstance(inst.args[1], basestring):
181 ui.warn(" %r\n" % (inst.args[1],))
182 elif not inst.args[1]:
183 ui.warn(_(" empty string\n"))
184 else:
185 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
186 except error.CensoredNodeError as inst:
187 ui.warn(_("abort: file censored %s!\n") % inst)
188 except error.RevlogError as inst:
189 ui.warn(_("abort: %s!\n") % inst)
190 except error.SignalInterrupt:
191 ui.warn(_("killed!\n"))
192 except error.InterventionRequired as inst:
193 ui.warn("%s\n" % inst)
194 if inst.hint:
195 ui.warn(_("(%s)\n") % inst.hint)
196 return 1
197 except error.Abort as inst:
198 ui.warn(_("abort: %s\n") % inst)
199 if inst.hint:
200 ui.warn(_("(%s)\n") % inst.hint)
201 except ImportError as inst:
202 ui.warn(_("abort: %s!\n") % inst)
203 m = str(inst).split()[-1]
204 if m in "mpatch bdiff".split():
205 ui.warn(_("(did you forget to compile extensions?)\n"))
206 elif m in "zlib".split():
207 ui.warn(_("(is your Python install correct?)\n"))
208 except IOError as inst:
209 if util.safehasattr(inst, "code"):
210 ui.warn(_("abort: %s\n") % inst)
211 elif util.safehasattr(inst, "reason"):
212 try: # usually it is in the form (errno, strerror)
213 reason = inst.reason.args[1]
214 except (AttributeError, IndexError):
215 # it might be anything, for example a string
216 reason = inst.reason
217 if isinstance(reason, unicode):
218 # SSLError of Python 2.7.9 contains a unicode
219 reason = reason.encode(encoding.encoding, 'replace')
220 ui.warn(_("abort: error: %s\n") % reason)
221 elif (util.safehasattr(inst, "args")
222 and inst.args and inst.args[0] == errno.EPIPE):
223 pass
224 elif getattr(inst, "strerror", None):
225 if getattr(inst, "filename", None):
226 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
227 else:
228 ui.warn(_("abort: %s\n") % inst.strerror)
229 else:
230 raise
231 except OSError as inst:
232 if getattr(inst, "filename", None) is not None:
233 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
234 else:
235 ui.warn(_("abort: %s\n") % inst.strerror)
236 except MemoryError:
237 ui.warn(_("abort: out of memory\n"))
238 except SystemExit as inst:
239 # Commands shouldn't sys.exit directly, but give a return code.
240 # Just in case catch this and and pass exit code to caller.
241 return inst.code
242 except socket.error as inst:
243 ui.warn(_("abort: %s\n") % inst.args[-1])
244
245 return -1
246
144 def checknewlabel(repo, lbl, kind):
247 def checknewlabel(repo, lbl, kind):
145 # Do not use the "kind" parameter in ui output.
248 # Do not use the "kind" parameter in ui output.
146 # It makes strings difficult to translate.
249 # It makes strings difficult to translate.
147 if lbl in ['tip', '.', 'null']:
250 if lbl in ['tip', '.', 'null']:
148 raise error.Abort(_("the name '%s' is reserved") % lbl)
251 raise error.Abort(_("the name '%s' is reserved") % lbl)
149 for c in (':', '\0', '\n', '\r'):
252 for c in (':', '\0', '\n', '\r'):
150 if c in lbl:
253 if c in lbl:
151 raise error.Abort(_("%r cannot be used in a name") % c)
254 raise error.Abort(_("%r cannot be used in a name") % c)
152 try:
255 try:
153 int(lbl)
256 int(lbl)
154 raise error.Abort(_("cannot use an integer as a name"))
257 raise error.Abort(_("cannot use an integer as a name"))
155 except ValueError:
258 except ValueError:
156 pass
259 pass
157
260
158 def checkfilename(f):
261 def checkfilename(f):
159 '''Check that the filename f is an acceptable filename for a tracked file'''
262 '''Check that the filename f is an acceptable filename for a tracked file'''
160 if '\r' in f or '\n' in f:
263 if '\r' in f or '\n' in f:
161 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
264 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
162
265
163 def checkportable(ui, f):
266 def checkportable(ui, f):
164 '''Check if filename f is portable and warn or abort depending on config'''
267 '''Check if filename f is portable and warn or abort depending on config'''
165 checkfilename(f)
268 checkfilename(f)
166 abort, warn = checkportabilityalert(ui)
269 abort, warn = checkportabilityalert(ui)
167 if abort or warn:
270 if abort or warn:
168 msg = util.checkwinfilename(f)
271 msg = util.checkwinfilename(f)
169 if msg:
272 if msg:
170 msg = "%s: %r" % (msg, f)
273 msg = "%s: %r" % (msg, f)
171 if abort:
274 if abort:
172 raise error.Abort(msg)
275 raise error.Abort(msg)
173 ui.warn(_("warning: %s\n") % msg)
276 ui.warn(_("warning: %s\n") % msg)
174
277
175 def checkportabilityalert(ui):
278 def checkportabilityalert(ui):
176 '''check if the user's config requests nothing, a warning, or abort for
279 '''check if the user's config requests nothing, a warning, or abort for
177 non-portable filenames'''
280 non-portable filenames'''
178 val = ui.config('ui', 'portablefilenames', 'warn')
281 val = ui.config('ui', 'portablefilenames', 'warn')
179 lval = val.lower()
282 lval = val.lower()
180 bval = util.parsebool(val)
283 bval = util.parsebool(val)
181 abort = os.name == 'nt' or lval == 'abort'
284 abort = os.name == 'nt' or lval == 'abort'
182 warn = bval or lval == 'warn'
285 warn = bval or lval == 'warn'
183 if bval is None and not (warn or abort or lval == 'ignore'):
286 if bval is None and not (warn or abort or lval == 'ignore'):
184 raise error.ConfigError(
287 raise error.ConfigError(
185 _("ui.portablefilenames value is invalid ('%s')") % val)
288 _("ui.portablefilenames value is invalid ('%s')") % val)
186 return abort, warn
289 return abort, warn
187
290
188 class casecollisionauditor(object):
291 class casecollisionauditor(object):
189 def __init__(self, ui, abort, dirstate):
292 def __init__(self, ui, abort, dirstate):
190 self._ui = ui
293 self._ui = ui
191 self._abort = abort
294 self._abort = abort
192 allfiles = '\0'.join(dirstate._map)
295 allfiles = '\0'.join(dirstate._map)
193 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
296 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
194 self._dirstate = dirstate
297 self._dirstate = dirstate
195 # The purpose of _newfiles is so that we don't complain about
298 # The purpose of _newfiles is so that we don't complain about
196 # case collisions if someone were to call this object with the
299 # case collisions if someone were to call this object with the
197 # same filename twice.
300 # same filename twice.
198 self._newfiles = set()
301 self._newfiles = set()
199
302
200 def __call__(self, f):
303 def __call__(self, f):
201 if f in self._newfiles:
304 if f in self._newfiles:
202 return
305 return
203 fl = encoding.lower(f)
306 fl = encoding.lower(f)
204 if fl in self._loweredfiles and f not in self._dirstate:
307 if fl in self._loweredfiles and f not in self._dirstate:
205 msg = _('possible case-folding collision for %s') % f
308 msg = _('possible case-folding collision for %s') % f
206 if self._abort:
309 if self._abort:
207 raise error.Abort(msg)
310 raise error.Abort(msg)
208 self._ui.warn(_("warning: %s\n") % msg)
311 self._ui.warn(_("warning: %s\n") % msg)
209 self._loweredfiles.add(fl)
312 self._loweredfiles.add(fl)
210 self._newfiles.add(f)
313 self._newfiles.add(f)
211
314
212 def filteredhash(repo, maxrev):
315 def filteredhash(repo, maxrev):
213 """build hash of filtered revisions in the current repoview.
316 """build hash of filtered revisions in the current repoview.
214
317
215 Multiple caches perform up-to-date validation by checking that the
318 Multiple caches perform up-to-date validation by checking that the
216 tiprev and tipnode stored in the cache file match the current repository.
319 tiprev and tipnode stored in the cache file match the current repository.
217 However, this is not sufficient for validating repoviews because the set
320 However, this is not sufficient for validating repoviews because the set
218 of revisions in the view may change without the repository tiprev and
321 of revisions in the view may change without the repository tiprev and
219 tipnode changing.
322 tipnode changing.
220
323
221 This function hashes all the revs filtered from the view and returns
324 This function hashes all the revs filtered from the view and returns
222 that SHA-1 digest.
325 that SHA-1 digest.
223 """
326 """
224 cl = repo.changelog
327 cl = repo.changelog
225 if not cl.filteredrevs:
328 if not cl.filteredrevs:
226 return None
329 return None
227 key = None
330 key = None
228 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
331 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
229 if revs:
332 if revs:
230 s = hashlib.sha1()
333 s = hashlib.sha1()
231 for rev in revs:
334 for rev in revs:
232 s.update('%s;' % rev)
335 s.update('%s;' % rev)
233 key = s.digest()
336 key = s.digest()
234 return key
337 return key
235
338
236 class abstractvfs(object):
339 class abstractvfs(object):
237 """Abstract base class; cannot be instantiated"""
340 """Abstract base class; cannot be instantiated"""
238
341
239 def __init__(self, *args, **kwargs):
342 def __init__(self, *args, **kwargs):
240 '''Prevent instantiation; don't call this from subclasses.'''
343 '''Prevent instantiation; don't call this from subclasses.'''
241 raise NotImplementedError('attempted instantiating ' + str(type(self)))
344 raise NotImplementedError('attempted instantiating ' + str(type(self)))
242
345
243 def tryread(self, path):
346 def tryread(self, path):
244 '''gracefully return an empty string for missing files'''
347 '''gracefully return an empty string for missing files'''
245 try:
348 try:
246 return self.read(path)
349 return self.read(path)
247 except IOError as inst:
350 except IOError as inst:
248 if inst.errno != errno.ENOENT:
351 if inst.errno != errno.ENOENT:
249 raise
352 raise
250 return ""
353 return ""
251
354
252 def tryreadlines(self, path, mode='rb'):
355 def tryreadlines(self, path, mode='rb'):
253 '''gracefully return an empty array for missing files'''
356 '''gracefully return an empty array for missing files'''
254 try:
357 try:
255 return self.readlines(path, mode=mode)
358 return self.readlines(path, mode=mode)
256 except IOError as inst:
359 except IOError as inst:
257 if inst.errno != errno.ENOENT:
360 if inst.errno != errno.ENOENT:
258 raise
361 raise
259 return []
362 return []
260
363
261 @util.propertycache
364 @util.propertycache
262 def open(self):
365 def open(self):
263 '''Open ``path`` file, which is relative to vfs root.
366 '''Open ``path`` file, which is relative to vfs root.
264
367
265 Newly created directories are marked as "not to be indexed by
368 Newly created directories are marked as "not to be indexed by
266 the content indexing service", if ``notindexed`` is specified
369 the content indexing service", if ``notindexed`` is specified
267 for "write" mode access.
370 for "write" mode access.
268 '''
371 '''
269 return self.__call__
372 return self.__call__
270
373
271 def read(self, path):
374 def read(self, path):
272 with self(path, 'rb') as fp:
375 with self(path, 'rb') as fp:
273 return fp.read()
376 return fp.read()
274
377
275 def readlines(self, path, mode='rb'):
378 def readlines(self, path, mode='rb'):
276 with self(path, mode=mode) as fp:
379 with self(path, mode=mode) as fp:
277 return fp.readlines()
380 return fp.readlines()
278
381
279 def write(self, path, data, backgroundclose=False):
382 def write(self, path, data, backgroundclose=False):
280 with self(path, 'wb', backgroundclose=backgroundclose) as fp:
383 with self(path, 'wb', backgroundclose=backgroundclose) as fp:
281 return fp.write(data)
384 return fp.write(data)
282
385
283 def writelines(self, path, data, mode='wb', notindexed=False):
386 def writelines(self, path, data, mode='wb', notindexed=False):
284 with self(path, mode=mode, notindexed=notindexed) as fp:
387 with self(path, mode=mode, notindexed=notindexed) as fp:
285 return fp.writelines(data)
388 return fp.writelines(data)
286
389
287 def append(self, path, data):
390 def append(self, path, data):
288 with self(path, 'ab') as fp:
391 with self(path, 'ab') as fp:
289 return fp.write(data)
392 return fp.write(data)
290
393
291 def basename(self, path):
394 def basename(self, path):
292 """return base element of a path (as os.path.basename would do)
395 """return base element of a path (as os.path.basename would do)
293
396
294 This exists to allow handling of strange encoding if needed."""
397 This exists to allow handling of strange encoding if needed."""
295 return os.path.basename(path)
398 return os.path.basename(path)
296
399
297 def chmod(self, path, mode):
400 def chmod(self, path, mode):
298 return os.chmod(self.join(path), mode)
401 return os.chmod(self.join(path), mode)
299
402
300 def dirname(self, path):
403 def dirname(self, path):
301 """return dirname element of a path (as os.path.dirname would do)
404 """return dirname element of a path (as os.path.dirname would do)
302
405
303 This exists to allow handling of strange encoding if needed."""
406 This exists to allow handling of strange encoding if needed."""
304 return os.path.dirname(path)
407 return os.path.dirname(path)
305
408
306 def exists(self, path=None):
409 def exists(self, path=None):
307 return os.path.exists(self.join(path))
410 return os.path.exists(self.join(path))
308
411
309 def fstat(self, fp):
412 def fstat(self, fp):
310 return util.fstat(fp)
413 return util.fstat(fp)
311
414
312 def isdir(self, path=None):
415 def isdir(self, path=None):
313 return os.path.isdir(self.join(path))
416 return os.path.isdir(self.join(path))
314
417
315 def isfile(self, path=None):
418 def isfile(self, path=None):
316 return os.path.isfile(self.join(path))
419 return os.path.isfile(self.join(path))
317
420
318 def islink(self, path=None):
421 def islink(self, path=None):
319 return os.path.islink(self.join(path))
422 return os.path.islink(self.join(path))
320
423
321 def isfileorlink(self, path=None):
424 def isfileorlink(self, path=None):
322 '''return whether path is a regular file or a symlink
425 '''return whether path is a regular file or a symlink
323
426
324 Unlike isfile, this doesn't follow symlinks.'''
427 Unlike isfile, this doesn't follow symlinks.'''
325 try:
428 try:
326 st = self.lstat(path)
429 st = self.lstat(path)
327 except OSError:
430 except OSError:
328 return False
431 return False
329 mode = st.st_mode
432 mode = st.st_mode
330 return stat.S_ISREG(mode) or stat.S_ISLNK(mode)
433 return stat.S_ISREG(mode) or stat.S_ISLNK(mode)
331
434
332 def reljoin(self, *paths):
435 def reljoin(self, *paths):
333 """join various elements of a path together (as os.path.join would do)
436 """join various elements of a path together (as os.path.join would do)
334
437
335 The vfs base is not injected so that path stay relative. This exists
438 The vfs base is not injected so that path stay relative. This exists
336 to allow handling of strange encoding if needed."""
439 to allow handling of strange encoding if needed."""
337 return os.path.join(*paths)
440 return os.path.join(*paths)
338
441
339 def split(self, path):
442 def split(self, path):
340 """split top-most element of a path (as os.path.split would do)
443 """split top-most element of a path (as os.path.split would do)
341
444
342 This exists to allow handling of strange encoding if needed."""
445 This exists to allow handling of strange encoding if needed."""
343 return os.path.split(path)
446 return os.path.split(path)
344
447
345 def lexists(self, path=None):
448 def lexists(self, path=None):
346 return os.path.lexists(self.join(path))
449 return os.path.lexists(self.join(path))
347
450
348 def lstat(self, path=None):
451 def lstat(self, path=None):
349 return os.lstat(self.join(path))
452 return os.lstat(self.join(path))
350
453
351 def listdir(self, path=None):
454 def listdir(self, path=None):
352 return os.listdir(self.join(path))
455 return os.listdir(self.join(path))
353
456
354 def makedir(self, path=None, notindexed=True):
457 def makedir(self, path=None, notindexed=True):
355 return util.makedir(self.join(path), notindexed)
458 return util.makedir(self.join(path), notindexed)
356
459
357 def makedirs(self, path=None, mode=None):
460 def makedirs(self, path=None, mode=None):
358 return util.makedirs(self.join(path), mode)
461 return util.makedirs(self.join(path), mode)
359
462
360 def makelock(self, info, path):
463 def makelock(self, info, path):
361 return util.makelock(info, self.join(path))
464 return util.makelock(info, self.join(path))
362
465
363 def mkdir(self, path=None):
466 def mkdir(self, path=None):
364 return os.mkdir(self.join(path))
467 return os.mkdir(self.join(path))
365
468
366 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
469 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
367 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
470 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
368 dir=self.join(dir), text=text)
471 dir=self.join(dir), text=text)
369 dname, fname = util.split(name)
472 dname, fname = util.split(name)
370 if dir:
473 if dir:
371 return fd, os.path.join(dir, fname)
474 return fd, os.path.join(dir, fname)
372 else:
475 else:
373 return fd, fname
476 return fd, fname
374
477
375 def readdir(self, path=None, stat=None, skip=None):
478 def readdir(self, path=None, stat=None, skip=None):
376 return osutil.listdir(self.join(path), stat, skip)
479 return osutil.listdir(self.join(path), stat, skip)
377
480
378 def readlock(self, path):
481 def readlock(self, path):
379 return util.readlock(self.join(path))
482 return util.readlock(self.join(path))
380
483
381 def rename(self, src, dst, checkambig=False):
484 def rename(self, src, dst, checkambig=False):
382 """Rename from src to dst
485 """Rename from src to dst
383
486
384 checkambig argument is used with util.filestat, and is useful
487 checkambig argument is used with util.filestat, and is useful
385 only if destination file is guarded by any lock
488 only if destination file is guarded by any lock
386 (e.g. repo.lock or repo.wlock).
489 (e.g. repo.lock or repo.wlock).
387 """
490 """
388 dstpath = self.join(dst)
491 dstpath = self.join(dst)
389 oldstat = checkambig and util.filestat(dstpath)
492 oldstat = checkambig and util.filestat(dstpath)
390 if oldstat and oldstat.stat:
493 if oldstat and oldstat.stat:
391 ret = util.rename(self.join(src), dstpath)
494 ret = util.rename(self.join(src), dstpath)
392 newstat = util.filestat(dstpath)
495 newstat = util.filestat(dstpath)
393 if newstat.isambig(oldstat):
496 if newstat.isambig(oldstat):
394 # stat of renamed file is ambiguous to original one
497 # stat of renamed file is ambiguous to original one
395 newstat.avoidambig(dstpath, oldstat)
498 newstat.avoidambig(dstpath, oldstat)
396 return ret
499 return ret
397 return util.rename(self.join(src), dstpath)
500 return util.rename(self.join(src), dstpath)
398
501
399 def readlink(self, path):
502 def readlink(self, path):
400 return os.readlink(self.join(path))
503 return os.readlink(self.join(path))
401
504
402 def removedirs(self, path=None):
505 def removedirs(self, path=None):
403 """Remove a leaf directory and all empty intermediate ones
506 """Remove a leaf directory and all empty intermediate ones
404 """
507 """
405 return util.removedirs(self.join(path))
508 return util.removedirs(self.join(path))
406
509
407 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
510 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
408 """Remove a directory tree recursively
511 """Remove a directory tree recursively
409
512
410 If ``forcibly``, this tries to remove READ-ONLY files, too.
513 If ``forcibly``, this tries to remove READ-ONLY files, too.
411 """
514 """
412 if forcibly:
515 if forcibly:
413 def onerror(function, path, excinfo):
516 def onerror(function, path, excinfo):
414 if function is not os.remove:
517 if function is not os.remove:
415 raise
518 raise
416 # read-only files cannot be unlinked under Windows
519 # read-only files cannot be unlinked under Windows
417 s = os.stat(path)
520 s = os.stat(path)
418 if (s.st_mode & stat.S_IWRITE) != 0:
521 if (s.st_mode & stat.S_IWRITE) != 0:
419 raise
522 raise
420 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
523 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
421 os.remove(path)
524 os.remove(path)
422 else:
525 else:
423 onerror = None
526 onerror = None
424 return shutil.rmtree(self.join(path),
527 return shutil.rmtree(self.join(path),
425 ignore_errors=ignore_errors, onerror=onerror)
528 ignore_errors=ignore_errors, onerror=onerror)
426
529
427 def setflags(self, path, l, x):
530 def setflags(self, path, l, x):
428 return util.setflags(self.join(path), l, x)
531 return util.setflags(self.join(path), l, x)
429
532
430 def stat(self, path=None):
533 def stat(self, path=None):
431 return os.stat(self.join(path))
534 return os.stat(self.join(path))
432
535
433 def unlink(self, path=None):
536 def unlink(self, path=None):
434 return util.unlink(self.join(path))
537 return util.unlink(self.join(path))
435
538
436 def unlinkpath(self, path=None, ignoremissing=False):
539 def unlinkpath(self, path=None, ignoremissing=False):
437 return util.unlinkpath(self.join(path), ignoremissing)
540 return util.unlinkpath(self.join(path), ignoremissing)
438
541
439 def utime(self, path=None, t=None):
542 def utime(self, path=None, t=None):
440 return os.utime(self.join(path), t)
543 return os.utime(self.join(path), t)
441
544
442 def walk(self, path=None, onerror=None):
545 def walk(self, path=None, onerror=None):
443 """Yield (dirpath, dirs, files) tuple for each directories under path
546 """Yield (dirpath, dirs, files) tuple for each directories under path
444
547
445 ``dirpath`` is relative one from the root of this vfs. This
548 ``dirpath`` is relative one from the root of this vfs. This
446 uses ``os.sep`` as path separator, even you specify POSIX
549 uses ``os.sep`` as path separator, even you specify POSIX
447 style ``path``.
550 style ``path``.
448
551
449 "The root of this vfs" is represented as empty ``dirpath``.
552 "The root of this vfs" is represented as empty ``dirpath``.
450 """
553 """
451 root = os.path.normpath(self.join(None))
554 root = os.path.normpath(self.join(None))
452 # when dirpath == root, dirpath[prefixlen:] becomes empty
555 # when dirpath == root, dirpath[prefixlen:] becomes empty
453 # because len(dirpath) < prefixlen.
556 # because len(dirpath) < prefixlen.
454 prefixlen = len(pathutil.normasprefix(root))
557 prefixlen = len(pathutil.normasprefix(root))
455 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
558 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
456 yield (dirpath[prefixlen:], dirs, files)
559 yield (dirpath[prefixlen:], dirs, files)
457
560
458 @contextlib.contextmanager
561 @contextlib.contextmanager
459 def backgroundclosing(self, ui, expectedcount=-1):
562 def backgroundclosing(self, ui, expectedcount=-1):
460 """Allow files to be closed asynchronously.
563 """Allow files to be closed asynchronously.
461
564
462 When this context manager is active, ``backgroundclose`` can be passed
565 When this context manager is active, ``backgroundclose`` can be passed
463 to ``__call__``/``open`` to result in the file possibly being closed
566 to ``__call__``/``open`` to result in the file possibly being closed
464 asynchronously, on a background thread.
567 asynchronously, on a background thread.
465 """
568 """
466 # This is an arbitrary restriction and could be changed if we ever
569 # This is an arbitrary restriction and could be changed if we ever
467 # have a use case.
570 # have a use case.
468 vfs = getattr(self, 'vfs', self)
571 vfs = getattr(self, 'vfs', self)
469 if getattr(vfs, '_backgroundfilecloser', None):
572 if getattr(vfs, '_backgroundfilecloser', None):
470 raise error.Abort(
573 raise error.Abort(
471 _('can only have 1 active background file closer'))
574 _('can only have 1 active background file closer'))
472
575
473 with backgroundfilecloser(ui, expectedcount=expectedcount) as bfc:
576 with backgroundfilecloser(ui, expectedcount=expectedcount) as bfc:
474 try:
577 try:
475 vfs._backgroundfilecloser = bfc
578 vfs._backgroundfilecloser = bfc
476 yield bfc
579 yield bfc
477 finally:
580 finally:
478 vfs._backgroundfilecloser = None
581 vfs._backgroundfilecloser = None
479
582
480 class vfs(abstractvfs):
583 class vfs(abstractvfs):
481 '''Operate files relative to a base directory
584 '''Operate files relative to a base directory
482
585
483 This class is used to hide the details of COW semantics and
586 This class is used to hide the details of COW semantics and
484 remote file access from higher level code.
587 remote file access from higher level code.
485 '''
588 '''
486 def __init__(self, base, audit=True, expandpath=False, realpath=False):
589 def __init__(self, base, audit=True, expandpath=False, realpath=False):
487 if expandpath:
590 if expandpath:
488 base = util.expandpath(base)
591 base = util.expandpath(base)
489 if realpath:
592 if realpath:
490 base = os.path.realpath(base)
593 base = os.path.realpath(base)
491 self.base = base
594 self.base = base
492 self.mustaudit = audit
595 self.mustaudit = audit
493 self.createmode = None
596 self.createmode = None
494 self._trustnlink = None
597 self._trustnlink = None
495
598
496 @property
599 @property
497 def mustaudit(self):
600 def mustaudit(self):
498 return self._audit
601 return self._audit
499
602
500 @mustaudit.setter
603 @mustaudit.setter
501 def mustaudit(self, onoff):
604 def mustaudit(self, onoff):
502 self._audit = onoff
605 self._audit = onoff
503 if onoff:
606 if onoff:
504 self.audit = pathutil.pathauditor(self.base)
607 self.audit = pathutil.pathauditor(self.base)
505 else:
608 else:
506 self.audit = util.always
609 self.audit = util.always
507
610
508 @util.propertycache
611 @util.propertycache
509 def _cansymlink(self):
612 def _cansymlink(self):
510 return util.checklink(self.base)
613 return util.checklink(self.base)
511
614
512 @util.propertycache
615 @util.propertycache
513 def _chmod(self):
616 def _chmod(self):
514 return util.checkexec(self.base)
617 return util.checkexec(self.base)
515
618
516 def _fixfilemode(self, name):
619 def _fixfilemode(self, name):
517 if self.createmode is None or not self._chmod:
620 if self.createmode is None or not self._chmod:
518 return
621 return
519 os.chmod(name, self.createmode & 0o666)
622 os.chmod(name, self.createmode & 0o666)
520
623
521 def __call__(self, path, mode="r", text=False, atomictemp=False,
624 def __call__(self, path, mode="r", text=False, atomictemp=False,
522 notindexed=False, backgroundclose=False, checkambig=False):
625 notindexed=False, backgroundclose=False, checkambig=False):
523 '''Open ``path`` file, which is relative to vfs root.
626 '''Open ``path`` file, which is relative to vfs root.
524
627
525 Newly created directories are marked as "not to be indexed by
628 Newly created directories are marked as "not to be indexed by
526 the content indexing service", if ``notindexed`` is specified
629 the content indexing service", if ``notindexed`` is specified
527 for "write" mode access.
630 for "write" mode access.
528
631
529 If ``backgroundclose`` is passed, the file may be closed asynchronously.
632 If ``backgroundclose`` is passed, the file may be closed asynchronously.
530 It can only be used if the ``self.backgroundclosing()`` context manager
633 It can only be used if the ``self.backgroundclosing()`` context manager
531 is active. This should only be specified if the following criteria hold:
634 is active. This should only be specified if the following criteria hold:
532
635
533 1. There is a potential for writing thousands of files. Unless you
636 1. There is a potential for writing thousands of files. Unless you
534 are writing thousands of files, the performance benefits of
637 are writing thousands of files, the performance benefits of
535 asynchronously closing files is not realized.
638 asynchronously closing files is not realized.
536 2. Files are opened exactly once for the ``backgroundclosing``
639 2. Files are opened exactly once for the ``backgroundclosing``
537 active duration and are therefore free of race conditions between
640 active duration and are therefore free of race conditions between
538 closing a file on a background thread and reopening it. (If the
641 closing a file on a background thread and reopening it. (If the
539 file were opened multiple times, there could be unflushed data
642 file were opened multiple times, there could be unflushed data
540 because the original file handle hasn't been flushed/closed yet.)
643 because the original file handle hasn't been flushed/closed yet.)
541
644
542 ``checkambig`` argument is passed to atomictemplfile (valid
645 ``checkambig`` argument is passed to atomictemplfile (valid
543 only for writing), and is useful only if target file is
646 only for writing), and is useful only if target file is
544 guarded by any lock (e.g. repo.lock or repo.wlock).
647 guarded by any lock (e.g. repo.lock or repo.wlock).
545 '''
648 '''
546 if self._audit:
649 if self._audit:
547 r = util.checkosfilename(path)
650 r = util.checkosfilename(path)
548 if r:
651 if r:
549 raise error.Abort("%s: %r" % (r, path))
652 raise error.Abort("%s: %r" % (r, path))
550 self.audit(path)
653 self.audit(path)
551 f = self.join(path)
654 f = self.join(path)
552
655
553 if not text and "b" not in mode:
656 if not text and "b" not in mode:
554 mode += "b" # for that other OS
657 mode += "b" # for that other OS
555
658
556 nlink = -1
659 nlink = -1
557 if mode not in ('r', 'rb'):
660 if mode not in ('r', 'rb'):
558 dirname, basename = util.split(f)
661 dirname, basename = util.split(f)
559 # If basename is empty, then the path is malformed because it points
662 # If basename is empty, then the path is malformed because it points
560 # to a directory. Let the posixfile() call below raise IOError.
663 # to a directory. Let the posixfile() call below raise IOError.
561 if basename:
664 if basename:
562 if atomictemp:
665 if atomictemp:
563 util.makedirs(dirname, self.createmode, notindexed)
666 util.makedirs(dirname, self.createmode, notindexed)
564 return util.atomictempfile(f, mode, self.createmode,
667 return util.atomictempfile(f, mode, self.createmode,
565 checkambig=checkambig)
668 checkambig=checkambig)
566 try:
669 try:
567 if 'w' in mode:
670 if 'w' in mode:
568 util.unlink(f)
671 util.unlink(f)
569 nlink = 0
672 nlink = 0
570 else:
673 else:
571 # nlinks() may behave differently for files on Windows
674 # nlinks() may behave differently for files on Windows
572 # shares if the file is open.
675 # shares if the file is open.
573 with util.posixfile(f):
676 with util.posixfile(f):
574 nlink = util.nlinks(f)
677 nlink = util.nlinks(f)
575 if nlink < 1:
678 if nlink < 1:
576 nlink = 2 # force mktempcopy (issue1922)
679 nlink = 2 # force mktempcopy (issue1922)
577 except (OSError, IOError) as e:
680 except (OSError, IOError) as e:
578 if e.errno != errno.ENOENT:
681 if e.errno != errno.ENOENT:
579 raise
682 raise
580 nlink = 0
683 nlink = 0
581 util.makedirs(dirname, self.createmode, notindexed)
684 util.makedirs(dirname, self.createmode, notindexed)
582 if nlink > 0:
685 if nlink > 0:
583 if self._trustnlink is None:
686 if self._trustnlink is None:
584 self._trustnlink = nlink > 1 or util.checknlink(f)
687 self._trustnlink = nlink > 1 or util.checknlink(f)
585 if nlink > 1 or not self._trustnlink:
688 if nlink > 1 or not self._trustnlink:
586 util.rename(util.mktempcopy(f), f)
689 util.rename(util.mktempcopy(f), f)
587 fp = util.posixfile(f, mode)
690 fp = util.posixfile(f, mode)
588 if nlink == 0:
691 if nlink == 0:
589 self._fixfilemode(f)
692 self._fixfilemode(f)
590
693
591 if checkambig:
694 if checkambig:
592 if mode in ('r', 'rb'):
695 if mode in ('r', 'rb'):
593 raise error.Abort(_('implementation error: mode %s is not'
696 raise error.Abort(_('implementation error: mode %s is not'
594 ' valid for checkambig=True') % mode)
697 ' valid for checkambig=True') % mode)
595 fp = checkambigatclosing(fp)
698 fp = checkambigatclosing(fp)
596
699
597 if backgroundclose:
700 if backgroundclose:
598 if not self._backgroundfilecloser:
701 if not self._backgroundfilecloser:
599 raise error.Abort(_('backgroundclose can only be used when a '
702 raise error.Abort(_('backgroundclose can only be used when a '
600 'backgroundclosing context manager is active')
703 'backgroundclosing context manager is active')
601 )
704 )
602
705
603 fp = delayclosedfile(fp, self._backgroundfilecloser)
706 fp = delayclosedfile(fp, self._backgroundfilecloser)
604
707
605 return fp
708 return fp
606
709
607 def symlink(self, src, dst):
710 def symlink(self, src, dst):
608 self.audit(dst)
711 self.audit(dst)
609 linkname = self.join(dst)
712 linkname = self.join(dst)
610 try:
713 try:
611 os.unlink(linkname)
714 os.unlink(linkname)
612 except OSError:
715 except OSError:
613 pass
716 pass
614
717
615 util.makedirs(os.path.dirname(linkname), self.createmode)
718 util.makedirs(os.path.dirname(linkname), self.createmode)
616
719
617 if self._cansymlink:
720 if self._cansymlink:
618 try:
721 try:
619 os.symlink(src, linkname)
722 os.symlink(src, linkname)
620 except OSError as err:
723 except OSError as err:
621 raise OSError(err.errno, _('could not symlink to %r: %s') %
724 raise OSError(err.errno, _('could not symlink to %r: %s') %
622 (src, err.strerror), linkname)
725 (src, err.strerror), linkname)
623 else:
726 else:
624 self.write(dst, src)
727 self.write(dst, src)
625
728
626 def join(self, path, *insidef):
729 def join(self, path, *insidef):
627 if path:
730 if path:
628 return os.path.join(self.base, path, *insidef)
731 return os.path.join(self.base, path, *insidef)
629 else:
732 else:
630 return self.base
733 return self.base
631
734
632 opener = vfs
735 opener = vfs
633
736
634 class auditvfs(object):
737 class auditvfs(object):
635 def __init__(self, vfs):
738 def __init__(self, vfs):
636 self.vfs = vfs
739 self.vfs = vfs
637
740
638 @property
741 @property
639 def mustaudit(self):
742 def mustaudit(self):
640 return self.vfs.mustaudit
743 return self.vfs.mustaudit
641
744
642 @mustaudit.setter
745 @mustaudit.setter
643 def mustaudit(self, onoff):
746 def mustaudit(self, onoff):
644 self.vfs.mustaudit = onoff
747 self.vfs.mustaudit = onoff
645
748
646 @property
749 @property
647 def options(self):
750 def options(self):
648 return self.vfs.options
751 return self.vfs.options
649
752
650 @options.setter
753 @options.setter
651 def options(self, value):
754 def options(self, value):
652 self.vfs.options = value
755 self.vfs.options = value
653
756
654 class filtervfs(abstractvfs, auditvfs):
757 class filtervfs(abstractvfs, auditvfs):
655 '''Wrapper vfs for filtering filenames with a function.'''
758 '''Wrapper vfs for filtering filenames with a function.'''
656
759
657 def __init__(self, vfs, filter):
760 def __init__(self, vfs, filter):
658 auditvfs.__init__(self, vfs)
761 auditvfs.__init__(self, vfs)
659 self._filter = filter
762 self._filter = filter
660
763
661 def __call__(self, path, *args, **kwargs):
764 def __call__(self, path, *args, **kwargs):
662 return self.vfs(self._filter(path), *args, **kwargs)
765 return self.vfs(self._filter(path), *args, **kwargs)
663
766
664 def join(self, path, *insidef):
767 def join(self, path, *insidef):
665 if path:
768 if path:
666 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
769 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
667 else:
770 else:
668 return self.vfs.join(path)
771 return self.vfs.join(path)
669
772
670 filteropener = filtervfs
773 filteropener = filtervfs
671
774
672 class readonlyvfs(abstractvfs, auditvfs):
775 class readonlyvfs(abstractvfs, auditvfs):
673 '''Wrapper vfs preventing any writing.'''
776 '''Wrapper vfs preventing any writing.'''
674
777
675 def __init__(self, vfs):
778 def __init__(self, vfs):
676 auditvfs.__init__(self, vfs)
779 auditvfs.__init__(self, vfs)
677
780
678 def __call__(self, path, mode='r', *args, **kw):
781 def __call__(self, path, mode='r', *args, **kw):
679 if mode not in ('r', 'rb'):
782 if mode not in ('r', 'rb'):
680 raise error.Abort(_('this vfs is read only'))
783 raise error.Abort(_('this vfs is read only'))
681 return self.vfs(path, mode, *args, **kw)
784 return self.vfs(path, mode, *args, **kw)
682
785
683 def join(self, path, *insidef):
786 def join(self, path, *insidef):
684 return self.vfs.join(path, *insidef)
787 return self.vfs.join(path, *insidef)
685
788
686 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
789 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
687 '''yield every hg repository under path, always recursively.
790 '''yield every hg repository under path, always recursively.
688 The recurse flag will only control recursion into repo working dirs'''
791 The recurse flag will only control recursion into repo working dirs'''
689 def errhandler(err):
792 def errhandler(err):
690 if err.filename == path:
793 if err.filename == path:
691 raise err
794 raise err
692 samestat = getattr(os.path, 'samestat', None)
795 samestat = getattr(os.path, 'samestat', None)
693 if followsym and samestat is not None:
796 if followsym and samestat is not None:
694 def adddir(dirlst, dirname):
797 def adddir(dirlst, dirname):
695 match = False
798 match = False
696 dirstat = os.stat(dirname)
799 dirstat = os.stat(dirname)
697 for lstdirstat in dirlst:
800 for lstdirstat in dirlst:
698 if samestat(dirstat, lstdirstat):
801 if samestat(dirstat, lstdirstat):
699 match = True
802 match = True
700 break
803 break
701 if not match:
804 if not match:
702 dirlst.append(dirstat)
805 dirlst.append(dirstat)
703 return not match
806 return not match
704 else:
807 else:
705 followsym = False
808 followsym = False
706
809
707 if (seen_dirs is None) and followsym:
810 if (seen_dirs is None) and followsym:
708 seen_dirs = []
811 seen_dirs = []
709 adddir(seen_dirs, path)
812 adddir(seen_dirs, path)
710 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
813 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
711 dirs.sort()
814 dirs.sort()
712 if '.hg' in dirs:
815 if '.hg' in dirs:
713 yield root # found a repository
816 yield root # found a repository
714 qroot = os.path.join(root, '.hg', 'patches')
817 qroot = os.path.join(root, '.hg', 'patches')
715 if os.path.isdir(os.path.join(qroot, '.hg')):
818 if os.path.isdir(os.path.join(qroot, '.hg')):
716 yield qroot # we have a patch queue repo here
819 yield qroot # we have a patch queue repo here
717 if recurse:
820 if recurse:
718 # avoid recursing inside the .hg directory
821 # avoid recursing inside the .hg directory
719 dirs.remove('.hg')
822 dirs.remove('.hg')
720 else:
823 else:
721 dirs[:] = [] # don't descend further
824 dirs[:] = [] # don't descend further
722 elif followsym:
825 elif followsym:
723 newdirs = []
826 newdirs = []
724 for d in dirs:
827 for d in dirs:
725 fname = os.path.join(root, d)
828 fname = os.path.join(root, d)
726 if adddir(seen_dirs, fname):
829 if adddir(seen_dirs, fname):
727 if os.path.islink(fname):
830 if os.path.islink(fname):
728 for hgname in walkrepos(fname, True, seen_dirs):
831 for hgname in walkrepos(fname, True, seen_dirs):
729 yield hgname
832 yield hgname
730 else:
833 else:
731 newdirs.append(d)
834 newdirs.append(d)
732 dirs[:] = newdirs
835 dirs[:] = newdirs
733
836
734 def osrcpath():
837 def osrcpath():
735 '''return default os-specific hgrc search path'''
838 '''return default os-specific hgrc search path'''
736 path = []
839 path = []
737 defaultpath = os.path.join(util.datapath, 'default.d')
840 defaultpath = os.path.join(util.datapath, 'default.d')
738 if os.path.isdir(defaultpath):
841 if os.path.isdir(defaultpath):
739 for f, kind in osutil.listdir(defaultpath):
842 for f, kind in osutil.listdir(defaultpath):
740 if f.endswith('.rc'):
843 if f.endswith('.rc'):
741 path.append(os.path.join(defaultpath, f))
844 path.append(os.path.join(defaultpath, f))
742 path.extend(systemrcpath())
845 path.extend(systemrcpath())
743 path.extend(userrcpath())
846 path.extend(userrcpath())
744 path = [os.path.normpath(f) for f in path]
847 path = [os.path.normpath(f) for f in path]
745 return path
848 return path
746
849
747 _rcpath = None
850 _rcpath = None
748
851
749 def rcpath():
852 def rcpath():
750 '''return hgrc search path. if env var HGRCPATH is set, use it.
853 '''return hgrc search path. if env var HGRCPATH is set, use it.
751 for each item in path, if directory, use files ending in .rc,
854 for each item in path, if directory, use files ending in .rc,
752 else use item.
855 else use item.
753 make HGRCPATH empty to only look in .hg/hgrc of current repo.
856 make HGRCPATH empty to only look in .hg/hgrc of current repo.
754 if no HGRCPATH, use default os-specific path.'''
857 if no HGRCPATH, use default os-specific path.'''
755 global _rcpath
858 global _rcpath
756 if _rcpath is None:
859 if _rcpath is None:
757 if 'HGRCPATH' in encoding.environ:
860 if 'HGRCPATH' in encoding.environ:
758 _rcpath = []
861 _rcpath = []
759 for p in encoding.environ['HGRCPATH'].split(pycompat.ospathsep):
862 for p in encoding.environ['HGRCPATH'].split(pycompat.ospathsep):
760 if not p:
863 if not p:
761 continue
864 continue
762 p = util.expandpath(p)
865 p = util.expandpath(p)
763 if os.path.isdir(p):
866 if os.path.isdir(p):
764 for f, kind in osutil.listdir(p):
867 for f, kind in osutil.listdir(p):
765 if f.endswith('.rc'):
868 if f.endswith('.rc'):
766 _rcpath.append(os.path.join(p, f))
869 _rcpath.append(os.path.join(p, f))
767 else:
870 else:
768 _rcpath.append(p)
871 _rcpath.append(p)
769 else:
872 else:
770 _rcpath = osrcpath()
873 _rcpath = osrcpath()
771 return _rcpath
874 return _rcpath
772
875
773 def intrev(rev):
876 def intrev(rev):
774 """Return integer for a given revision that can be used in comparison or
877 """Return integer for a given revision that can be used in comparison or
775 arithmetic operation"""
878 arithmetic operation"""
776 if rev is None:
879 if rev is None:
777 return wdirrev
880 return wdirrev
778 return rev
881 return rev
779
882
780 def revsingle(repo, revspec, default='.'):
883 def revsingle(repo, revspec, default='.'):
781 if not revspec and revspec != 0:
884 if not revspec and revspec != 0:
782 return repo[default]
885 return repo[default]
783
886
784 l = revrange(repo, [revspec])
887 l = revrange(repo, [revspec])
785 if not l:
888 if not l:
786 raise error.Abort(_('empty revision set'))
889 raise error.Abort(_('empty revision set'))
787 return repo[l.last()]
890 return repo[l.last()]
788
891
789 def _pairspec(revspec):
892 def _pairspec(revspec):
790 tree = revset.parse(revspec)
893 tree = revset.parse(revspec)
791 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
894 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
792
895
793 def revpair(repo, revs):
896 def revpair(repo, revs):
794 if not revs:
897 if not revs:
795 return repo.dirstate.p1(), None
898 return repo.dirstate.p1(), None
796
899
797 l = revrange(repo, revs)
900 l = revrange(repo, revs)
798
901
799 if not l:
902 if not l:
800 first = second = None
903 first = second = None
801 elif l.isascending():
904 elif l.isascending():
802 first = l.min()
905 first = l.min()
803 second = l.max()
906 second = l.max()
804 elif l.isdescending():
907 elif l.isdescending():
805 first = l.max()
908 first = l.max()
806 second = l.min()
909 second = l.min()
807 else:
910 else:
808 first = l.first()
911 first = l.first()
809 second = l.last()
912 second = l.last()
810
913
811 if first is None:
914 if first is None:
812 raise error.Abort(_('empty revision range'))
915 raise error.Abort(_('empty revision range'))
813 if (first == second and len(revs) >= 2
916 if (first == second and len(revs) >= 2
814 and not all(revrange(repo, [r]) for r in revs)):
917 and not all(revrange(repo, [r]) for r in revs)):
815 raise error.Abort(_('empty revision on one side of range'))
918 raise error.Abort(_('empty revision on one side of range'))
816
919
817 # if top-level is range expression, the result must always be a pair
920 # if top-level is range expression, the result must always be a pair
818 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
921 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
819 return repo.lookup(first), None
922 return repo.lookup(first), None
820
923
821 return repo.lookup(first), repo.lookup(second)
924 return repo.lookup(first), repo.lookup(second)
822
925
823 def revrange(repo, specs):
926 def revrange(repo, specs):
824 """Execute 1 to many revsets and return the union.
927 """Execute 1 to many revsets and return the union.
825
928
826 This is the preferred mechanism for executing revsets using user-specified
929 This is the preferred mechanism for executing revsets using user-specified
827 config options, such as revset aliases.
930 config options, such as revset aliases.
828
931
829 The revsets specified by ``specs`` will be executed via a chained ``OR``
932 The revsets specified by ``specs`` will be executed via a chained ``OR``
830 expression. If ``specs`` is empty, an empty result is returned.
933 expression. If ``specs`` is empty, an empty result is returned.
831
934
832 ``specs`` can contain integers, in which case they are assumed to be
935 ``specs`` can contain integers, in which case they are assumed to be
833 revision numbers.
936 revision numbers.
834
937
835 It is assumed the revsets are already formatted. If you have arguments
938 It is assumed the revsets are already formatted. If you have arguments
836 that need to be expanded in the revset, call ``revset.formatspec()``
939 that need to be expanded in the revset, call ``revset.formatspec()``
837 and pass the result as an element of ``specs``.
940 and pass the result as an element of ``specs``.
838
941
839 Specifying a single revset is allowed.
942 Specifying a single revset is allowed.
840
943
841 Returns a ``revset.abstractsmartset`` which is a list-like interface over
944 Returns a ``revset.abstractsmartset`` which is a list-like interface over
842 integer revisions.
945 integer revisions.
843 """
946 """
844 allspecs = []
947 allspecs = []
845 for spec in specs:
948 for spec in specs:
846 if isinstance(spec, int):
949 if isinstance(spec, int):
847 spec = revset.formatspec('rev(%d)', spec)
950 spec = revset.formatspec('rev(%d)', spec)
848 allspecs.append(spec)
951 allspecs.append(spec)
849 m = revset.matchany(repo.ui, allspecs, repo)
952 m = revset.matchany(repo.ui, allspecs, repo)
850 return m(repo)
953 return m(repo)
851
954
852 def meaningfulparents(repo, ctx):
955 def meaningfulparents(repo, ctx):
853 """Return list of meaningful (or all if debug) parentrevs for rev.
956 """Return list of meaningful (or all if debug) parentrevs for rev.
854
957
855 For merges (two non-nullrev revisions) both parents are meaningful.
958 For merges (two non-nullrev revisions) both parents are meaningful.
856 Otherwise the first parent revision is considered meaningful if it
959 Otherwise the first parent revision is considered meaningful if it
857 is not the preceding revision.
960 is not the preceding revision.
858 """
961 """
859 parents = ctx.parents()
962 parents = ctx.parents()
860 if len(parents) > 1:
963 if len(parents) > 1:
861 return parents
964 return parents
862 if repo.ui.debugflag:
965 if repo.ui.debugflag:
863 return [parents[0], repo['null']]
966 return [parents[0], repo['null']]
864 if parents[0].rev() >= intrev(ctx.rev()) - 1:
967 if parents[0].rev() >= intrev(ctx.rev()) - 1:
865 return []
968 return []
866 return parents
969 return parents
867
970
868 def expandpats(pats):
971 def expandpats(pats):
869 '''Expand bare globs when running on windows.
972 '''Expand bare globs when running on windows.
870 On posix we assume it already has already been done by sh.'''
973 On posix we assume it already has already been done by sh.'''
871 if not util.expandglobs:
974 if not util.expandglobs:
872 return list(pats)
975 return list(pats)
873 ret = []
976 ret = []
874 for kindpat in pats:
977 for kindpat in pats:
875 kind, pat = matchmod._patsplit(kindpat, None)
978 kind, pat = matchmod._patsplit(kindpat, None)
876 if kind is None:
979 if kind is None:
877 try:
980 try:
878 globbed = glob.glob(pat)
981 globbed = glob.glob(pat)
879 except re.error:
982 except re.error:
880 globbed = [pat]
983 globbed = [pat]
881 if globbed:
984 if globbed:
882 ret.extend(globbed)
985 ret.extend(globbed)
883 continue
986 continue
884 ret.append(kindpat)
987 ret.append(kindpat)
885 return ret
988 return ret
886
989
887 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
990 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
888 badfn=None):
991 badfn=None):
889 '''Return a matcher and the patterns that were used.
992 '''Return a matcher and the patterns that were used.
890 The matcher will warn about bad matches, unless an alternate badfn callback
993 The matcher will warn about bad matches, unless an alternate badfn callback
891 is provided.'''
994 is provided.'''
892 if pats == ("",):
995 if pats == ("",):
893 pats = []
996 pats = []
894 if opts is None:
997 if opts is None:
895 opts = {}
998 opts = {}
896 if not globbed and default == 'relpath':
999 if not globbed and default == 'relpath':
897 pats = expandpats(pats or [])
1000 pats = expandpats(pats or [])
898
1001
899 def bad(f, msg):
1002 def bad(f, msg):
900 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
1003 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
901
1004
902 if badfn is None:
1005 if badfn is None:
903 badfn = bad
1006 badfn = bad
904
1007
905 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
1008 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
906 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
1009 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
907
1010
908 if m.always():
1011 if m.always():
909 pats = []
1012 pats = []
910 return m, pats
1013 return m, pats
911
1014
912 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
1015 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
913 badfn=None):
1016 badfn=None):
914 '''Return a matcher that will warn about bad matches.'''
1017 '''Return a matcher that will warn about bad matches.'''
915 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
1018 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
916
1019
917 def matchall(repo):
1020 def matchall(repo):
918 '''Return a matcher that will efficiently match everything.'''
1021 '''Return a matcher that will efficiently match everything.'''
919 return matchmod.always(repo.root, repo.getcwd())
1022 return matchmod.always(repo.root, repo.getcwd())
920
1023
921 def matchfiles(repo, files, badfn=None):
1024 def matchfiles(repo, files, badfn=None):
922 '''Return a matcher that will efficiently match exactly these files.'''
1025 '''Return a matcher that will efficiently match exactly these files.'''
923 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
1026 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
924
1027
925 def origpath(ui, repo, filepath):
1028 def origpath(ui, repo, filepath):
926 '''customize where .orig files are created
1029 '''customize where .orig files are created
927
1030
928 Fetch user defined path from config file: [ui] origbackuppath = <path>
1031 Fetch user defined path from config file: [ui] origbackuppath = <path>
929 Fall back to default (filepath) if not specified
1032 Fall back to default (filepath) if not specified
930 '''
1033 '''
931 origbackuppath = ui.config('ui', 'origbackuppath', None)
1034 origbackuppath = ui.config('ui', 'origbackuppath', None)
932 if origbackuppath is None:
1035 if origbackuppath is None:
933 return filepath + ".orig"
1036 return filepath + ".orig"
934
1037
935 filepathfromroot = os.path.relpath(filepath, start=repo.root)
1038 filepathfromroot = os.path.relpath(filepath, start=repo.root)
936 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
1039 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
937
1040
938 origbackupdir = repo.vfs.dirname(fullorigpath)
1041 origbackupdir = repo.vfs.dirname(fullorigpath)
939 if not repo.vfs.exists(origbackupdir):
1042 if not repo.vfs.exists(origbackupdir):
940 ui.note(_('creating directory: %s\n') % origbackupdir)
1043 ui.note(_('creating directory: %s\n') % origbackupdir)
941 util.makedirs(origbackupdir)
1044 util.makedirs(origbackupdir)
942
1045
943 return fullorigpath + ".orig"
1046 return fullorigpath + ".orig"
944
1047
945 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
1048 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
946 if opts is None:
1049 if opts is None:
947 opts = {}
1050 opts = {}
948 m = matcher
1051 m = matcher
949 if dry_run is None:
1052 if dry_run is None:
950 dry_run = opts.get('dry_run')
1053 dry_run = opts.get('dry_run')
951 if similarity is None:
1054 if similarity is None:
952 similarity = float(opts.get('similarity') or 0)
1055 similarity = float(opts.get('similarity') or 0)
953
1056
954 ret = 0
1057 ret = 0
955 join = lambda f: os.path.join(prefix, f)
1058 join = lambda f: os.path.join(prefix, f)
956
1059
957 wctx = repo[None]
1060 wctx = repo[None]
958 for subpath in sorted(wctx.substate):
1061 for subpath in sorted(wctx.substate):
959 submatch = matchmod.subdirmatcher(subpath, m)
1062 submatch = matchmod.subdirmatcher(subpath, m)
960 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1063 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
961 sub = wctx.sub(subpath)
1064 sub = wctx.sub(subpath)
962 try:
1065 try:
963 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
1066 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
964 ret = 1
1067 ret = 1
965 except error.LookupError:
1068 except error.LookupError:
966 repo.ui.status(_("skipping missing subrepository: %s\n")
1069 repo.ui.status(_("skipping missing subrepository: %s\n")
967 % join(subpath))
1070 % join(subpath))
968
1071
969 rejected = []
1072 rejected = []
970 def badfn(f, msg):
1073 def badfn(f, msg):
971 if f in m.files():
1074 if f in m.files():
972 m.bad(f, msg)
1075 m.bad(f, msg)
973 rejected.append(f)
1076 rejected.append(f)
974
1077
975 badmatch = matchmod.badmatch(m, badfn)
1078 badmatch = matchmod.badmatch(m, badfn)
976 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1079 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
977 badmatch)
1080 badmatch)
978
1081
979 unknownset = set(unknown + forgotten)
1082 unknownset = set(unknown + forgotten)
980 toprint = unknownset.copy()
1083 toprint = unknownset.copy()
981 toprint.update(deleted)
1084 toprint.update(deleted)
982 for abs in sorted(toprint):
1085 for abs in sorted(toprint):
983 if repo.ui.verbose or not m.exact(abs):
1086 if repo.ui.verbose or not m.exact(abs):
984 if abs in unknownset:
1087 if abs in unknownset:
985 status = _('adding %s\n') % m.uipath(abs)
1088 status = _('adding %s\n') % m.uipath(abs)
986 else:
1089 else:
987 status = _('removing %s\n') % m.uipath(abs)
1090 status = _('removing %s\n') % m.uipath(abs)
988 repo.ui.status(status)
1091 repo.ui.status(status)
989
1092
990 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1093 renames = _findrenames(repo, m, added + unknown, removed + deleted,
991 similarity)
1094 similarity)
992
1095
993 if not dry_run:
1096 if not dry_run:
994 _markchanges(repo, unknown + forgotten, deleted, renames)
1097 _markchanges(repo, unknown + forgotten, deleted, renames)
995
1098
996 for f in rejected:
1099 for f in rejected:
997 if f in m.files():
1100 if f in m.files():
998 return 1
1101 return 1
999 return ret
1102 return ret
1000
1103
1001 def marktouched(repo, files, similarity=0.0):
1104 def marktouched(repo, files, similarity=0.0):
1002 '''Assert that files have somehow been operated upon. files are relative to
1105 '''Assert that files have somehow been operated upon. files are relative to
1003 the repo root.'''
1106 the repo root.'''
1004 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1107 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1005 rejected = []
1108 rejected = []
1006
1109
1007 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1110 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1008
1111
1009 if repo.ui.verbose:
1112 if repo.ui.verbose:
1010 unknownset = set(unknown + forgotten)
1113 unknownset = set(unknown + forgotten)
1011 toprint = unknownset.copy()
1114 toprint = unknownset.copy()
1012 toprint.update(deleted)
1115 toprint.update(deleted)
1013 for abs in sorted(toprint):
1116 for abs in sorted(toprint):
1014 if abs in unknownset:
1117 if abs in unknownset:
1015 status = _('adding %s\n') % abs
1118 status = _('adding %s\n') % abs
1016 else:
1119 else:
1017 status = _('removing %s\n') % abs
1120 status = _('removing %s\n') % abs
1018 repo.ui.status(status)
1121 repo.ui.status(status)
1019
1122
1020 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1123 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1021 similarity)
1124 similarity)
1022
1125
1023 _markchanges(repo, unknown + forgotten, deleted, renames)
1126 _markchanges(repo, unknown + forgotten, deleted, renames)
1024
1127
1025 for f in rejected:
1128 for f in rejected:
1026 if f in m.files():
1129 if f in m.files():
1027 return 1
1130 return 1
1028 return 0
1131 return 0
1029
1132
1030 def _interestingfiles(repo, matcher):
1133 def _interestingfiles(repo, matcher):
1031 '''Walk dirstate with matcher, looking for files that addremove would care
1134 '''Walk dirstate with matcher, looking for files that addremove would care
1032 about.
1135 about.
1033
1136
1034 This is different from dirstate.status because it doesn't care about
1137 This is different from dirstate.status because it doesn't care about
1035 whether files are modified or clean.'''
1138 whether files are modified or clean.'''
1036 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1139 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1037 audit_path = pathutil.pathauditor(repo.root)
1140 audit_path = pathutil.pathauditor(repo.root)
1038
1141
1039 ctx = repo[None]
1142 ctx = repo[None]
1040 dirstate = repo.dirstate
1143 dirstate = repo.dirstate
1041 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
1144 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
1042 full=False)
1145 full=False)
1043 for abs, st in walkresults.iteritems():
1146 for abs, st in walkresults.iteritems():
1044 dstate = dirstate[abs]
1147 dstate = dirstate[abs]
1045 if dstate == '?' and audit_path.check(abs):
1148 if dstate == '?' and audit_path.check(abs):
1046 unknown.append(abs)
1149 unknown.append(abs)
1047 elif dstate != 'r' and not st:
1150 elif dstate != 'r' and not st:
1048 deleted.append(abs)
1151 deleted.append(abs)
1049 elif dstate == 'r' and st:
1152 elif dstate == 'r' and st:
1050 forgotten.append(abs)
1153 forgotten.append(abs)
1051 # for finding renames
1154 # for finding renames
1052 elif dstate == 'r' and not st:
1155 elif dstate == 'r' and not st:
1053 removed.append(abs)
1156 removed.append(abs)
1054 elif dstate == 'a':
1157 elif dstate == 'a':
1055 added.append(abs)
1158 added.append(abs)
1056
1159
1057 return added, unknown, deleted, removed, forgotten
1160 return added, unknown, deleted, removed, forgotten
1058
1161
1059 def _findrenames(repo, matcher, added, removed, similarity):
1162 def _findrenames(repo, matcher, added, removed, similarity):
1060 '''Find renames from removed files to added ones.'''
1163 '''Find renames from removed files to added ones.'''
1061 renames = {}
1164 renames = {}
1062 if similarity > 0:
1165 if similarity > 0:
1063 for old, new, score in similar.findrenames(repo, added, removed,
1166 for old, new, score in similar.findrenames(repo, added, removed,
1064 similarity):
1167 similarity):
1065 if (repo.ui.verbose or not matcher.exact(old)
1168 if (repo.ui.verbose or not matcher.exact(old)
1066 or not matcher.exact(new)):
1169 or not matcher.exact(new)):
1067 repo.ui.status(_('recording removal of %s as rename to %s '
1170 repo.ui.status(_('recording removal of %s as rename to %s '
1068 '(%d%% similar)\n') %
1171 '(%d%% similar)\n') %
1069 (matcher.rel(old), matcher.rel(new),
1172 (matcher.rel(old), matcher.rel(new),
1070 score * 100))
1173 score * 100))
1071 renames[new] = old
1174 renames[new] = old
1072 return renames
1175 return renames
1073
1176
1074 def _markchanges(repo, unknown, deleted, renames):
1177 def _markchanges(repo, unknown, deleted, renames):
1075 '''Marks the files in unknown as added, the files in deleted as removed,
1178 '''Marks the files in unknown as added, the files in deleted as removed,
1076 and the files in renames as copied.'''
1179 and the files in renames as copied.'''
1077 wctx = repo[None]
1180 wctx = repo[None]
1078 with repo.wlock():
1181 with repo.wlock():
1079 wctx.forget(deleted)
1182 wctx.forget(deleted)
1080 wctx.add(unknown)
1183 wctx.add(unknown)
1081 for new, old in renames.iteritems():
1184 for new, old in renames.iteritems():
1082 wctx.copy(old, new)
1185 wctx.copy(old, new)
1083
1186
1084 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1187 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1085 """Update the dirstate to reflect the intent of copying src to dst. For
1188 """Update the dirstate to reflect the intent of copying src to dst. For
1086 different reasons it might not end with dst being marked as copied from src.
1189 different reasons it might not end with dst being marked as copied from src.
1087 """
1190 """
1088 origsrc = repo.dirstate.copied(src) or src
1191 origsrc = repo.dirstate.copied(src) or src
1089 if dst == origsrc: # copying back a copy?
1192 if dst == origsrc: # copying back a copy?
1090 if repo.dirstate[dst] not in 'mn' and not dryrun:
1193 if repo.dirstate[dst] not in 'mn' and not dryrun:
1091 repo.dirstate.normallookup(dst)
1194 repo.dirstate.normallookup(dst)
1092 else:
1195 else:
1093 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1196 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1094 if not ui.quiet:
1197 if not ui.quiet:
1095 ui.warn(_("%s has not been committed yet, so no copy "
1198 ui.warn(_("%s has not been committed yet, so no copy "
1096 "data will be stored for %s.\n")
1199 "data will be stored for %s.\n")
1097 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1200 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1098 if repo.dirstate[dst] in '?r' and not dryrun:
1201 if repo.dirstate[dst] in '?r' and not dryrun:
1099 wctx.add([dst])
1202 wctx.add([dst])
1100 elif not dryrun:
1203 elif not dryrun:
1101 wctx.copy(origsrc, dst)
1204 wctx.copy(origsrc, dst)
1102
1205
1103 def readrequires(opener, supported):
1206 def readrequires(opener, supported):
1104 '''Reads and parses .hg/requires and checks if all entries found
1207 '''Reads and parses .hg/requires and checks if all entries found
1105 are in the list of supported features.'''
1208 are in the list of supported features.'''
1106 requirements = set(opener.read("requires").splitlines())
1209 requirements = set(opener.read("requires").splitlines())
1107 missings = []
1210 missings = []
1108 for r in requirements:
1211 for r in requirements:
1109 if r not in supported:
1212 if r not in supported:
1110 if not r or not r[0].isalnum():
1213 if not r or not r[0].isalnum():
1111 raise error.RequirementError(_(".hg/requires file is corrupt"))
1214 raise error.RequirementError(_(".hg/requires file is corrupt"))
1112 missings.append(r)
1215 missings.append(r)
1113 missings.sort()
1216 missings.sort()
1114 if missings:
1217 if missings:
1115 raise error.RequirementError(
1218 raise error.RequirementError(
1116 _("repository requires features unknown to this Mercurial: %s")
1219 _("repository requires features unknown to this Mercurial: %s")
1117 % " ".join(missings),
1220 % " ".join(missings),
1118 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1221 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1119 " for more information"))
1222 " for more information"))
1120 return requirements
1223 return requirements
1121
1224
1122 def writerequires(opener, requirements):
1225 def writerequires(opener, requirements):
1123 with opener('requires', 'w') as fp:
1226 with opener('requires', 'w') as fp:
1124 for r in sorted(requirements):
1227 for r in sorted(requirements):
1125 fp.write("%s\n" % r)
1228 fp.write("%s\n" % r)
1126
1229
1127 class filecachesubentry(object):
1230 class filecachesubentry(object):
1128 def __init__(self, path, stat):
1231 def __init__(self, path, stat):
1129 self.path = path
1232 self.path = path
1130 self.cachestat = None
1233 self.cachestat = None
1131 self._cacheable = None
1234 self._cacheable = None
1132
1235
1133 if stat:
1236 if stat:
1134 self.cachestat = filecachesubentry.stat(self.path)
1237 self.cachestat = filecachesubentry.stat(self.path)
1135
1238
1136 if self.cachestat:
1239 if self.cachestat:
1137 self._cacheable = self.cachestat.cacheable()
1240 self._cacheable = self.cachestat.cacheable()
1138 else:
1241 else:
1139 # None means we don't know yet
1242 # None means we don't know yet
1140 self._cacheable = None
1243 self._cacheable = None
1141
1244
1142 def refresh(self):
1245 def refresh(self):
1143 if self.cacheable():
1246 if self.cacheable():
1144 self.cachestat = filecachesubentry.stat(self.path)
1247 self.cachestat = filecachesubentry.stat(self.path)
1145
1248
1146 def cacheable(self):
1249 def cacheable(self):
1147 if self._cacheable is not None:
1250 if self._cacheable is not None:
1148 return self._cacheable
1251 return self._cacheable
1149
1252
1150 # we don't know yet, assume it is for now
1253 # we don't know yet, assume it is for now
1151 return True
1254 return True
1152
1255
1153 def changed(self):
1256 def changed(self):
1154 # no point in going further if we can't cache it
1257 # no point in going further if we can't cache it
1155 if not self.cacheable():
1258 if not self.cacheable():
1156 return True
1259 return True
1157
1260
1158 newstat = filecachesubentry.stat(self.path)
1261 newstat = filecachesubentry.stat(self.path)
1159
1262
1160 # we may not know if it's cacheable yet, check again now
1263 # we may not know if it's cacheable yet, check again now
1161 if newstat and self._cacheable is None:
1264 if newstat and self._cacheable is None:
1162 self._cacheable = newstat.cacheable()
1265 self._cacheable = newstat.cacheable()
1163
1266
1164 # check again
1267 # check again
1165 if not self._cacheable:
1268 if not self._cacheable:
1166 return True
1269 return True
1167
1270
1168 if self.cachestat != newstat:
1271 if self.cachestat != newstat:
1169 self.cachestat = newstat
1272 self.cachestat = newstat
1170 return True
1273 return True
1171 else:
1274 else:
1172 return False
1275 return False
1173
1276
1174 @staticmethod
1277 @staticmethod
1175 def stat(path):
1278 def stat(path):
1176 try:
1279 try:
1177 return util.cachestat(path)
1280 return util.cachestat(path)
1178 except OSError as e:
1281 except OSError as e:
1179 if e.errno != errno.ENOENT:
1282 if e.errno != errno.ENOENT:
1180 raise
1283 raise
1181
1284
1182 class filecacheentry(object):
1285 class filecacheentry(object):
1183 def __init__(self, paths, stat=True):
1286 def __init__(self, paths, stat=True):
1184 self._entries = []
1287 self._entries = []
1185 for path in paths:
1288 for path in paths:
1186 self._entries.append(filecachesubentry(path, stat))
1289 self._entries.append(filecachesubentry(path, stat))
1187
1290
1188 def changed(self):
1291 def changed(self):
1189 '''true if any entry has changed'''
1292 '''true if any entry has changed'''
1190 for entry in self._entries:
1293 for entry in self._entries:
1191 if entry.changed():
1294 if entry.changed():
1192 return True
1295 return True
1193 return False
1296 return False
1194
1297
1195 def refresh(self):
1298 def refresh(self):
1196 for entry in self._entries:
1299 for entry in self._entries:
1197 entry.refresh()
1300 entry.refresh()
1198
1301
1199 class filecache(object):
1302 class filecache(object):
1200 '''A property like decorator that tracks files under .hg/ for updates.
1303 '''A property like decorator that tracks files under .hg/ for updates.
1201
1304
1202 Records stat info when called in _filecache.
1305 Records stat info when called in _filecache.
1203
1306
1204 On subsequent calls, compares old stat info with new info, and recreates the
1307 On subsequent calls, compares old stat info with new info, and recreates the
1205 object when any of the files changes, updating the new stat info in
1308 object when any of the files changes, updating the new stat info in
1206 _filecache.
1309 _filecache.
1207
1310
1208 Mercurial either atomic renames or appends for files under .hg,
1311 Mercurial either atomic renames or appends for files under .hg,
1209 so to ensure the cache is reliable we need the filesystem to be able
1312 so to ensure the cache is reliable we need the filesystem to be able
1210 to tell us if a file has been replaced. If it can't, we fallback to
1313 to tell us if a file has been replaced. If it can't, we fallback to
1211 recreating the object on every call (essentially the same behavior as
1314 recreating the object on every call (essentially the same behavior as
1212 propertycache).
1315 propertycache).
1213
1316
1214 '''
1317 '''
1215 def __init__(self, *paths):
1318 def __init__(self, *paths):
1216 self.paths = paths
1319 self.paths = paths
1217
1320
1218 def join(self, obj, fname):
1321 def join(self, obj, fname):
1219 """Used to compute the runtime path of a cached file.
1322 """Used to compute the runtime path of a cached file.
1220
1323
1221 Users should subclass filecache and provide their own version of this
1324 Users should subclass filecache and provide their own version of this
1222 function to call the appropriate join function on 'obj' (an instance
1325 function to call the appropriate join function on 'obj' (an instance
1223 of the class that its member function was decorated).
1326 of the class that its member function was decorated).
1224 """
1327 """
1225 return obj.join(fname)
1328 return obj.join(fname)
1226
1329
1227 def __call__(self, func):
1330 def __call__(self, func):
1228 self.func = func
1331 self.func = func
1229 self.name = func.__name__
1332 self.name = func.__name__
1230 return self
1333 return self
1231
1334
1232 def __get__(self, obj, type=None):
1335 def __get__(self, obj, type=None):
1233 # if accessed on the class, return the descriptor itself.
1336 # if accessed on the class, return the descriptor itself.
1234 if obj is None:
1337 if obj is None:
1235 return self
1338 return self
1236 # do we need to check if the file changed?
1339 # do we need to check if the file changed?
1237 if self.name in obj.__dict__:
1340 if self.name in obj.__dict__:
1238 assert self.name in obj._filecache, self.name
1341 assert self.name in obj._filecache, self.name
1239 return obj.__dict__[self.name]
1342 return obj.__dict__[self.name]
1240
1343
1241 entry = obj._filecache.get(self.name)
1344 entry = obj._filecache.get(self.name)
1242
1345
1243 if entry:
1346 if entry:
1244 if entry.changed():
1347 if entry.changed():
1245 entry.obj = self.func(obj)
1348 entry.obj = self.func(obj)
1246 else:
1349 else:
1247 paths = [self.join(obj, path) for path in self.paths]
1350 paths = [self.join(obj, path) for path in self.paths]
1248
1351
1249 # We stat -before- creating the object so our cache doesn't lie if
1352 # We stat -before- creating the object so our cache doesn't lie if
1250 # a writer modified between the time we read and stat
1353 # a writer modified between the time we read and stat
1251 entry = filecacheentry(paths, True)
1354 entry = filecacheentry(paths, True)
1252 entry.obj = self.func(obj)
1355 entry.obj = self.func(obj)
1253
1356
1254 obj._filecache[self.name] = entry
1357 obj._filecache[self.name] = entry
1255
1358
1256 obj.__dict__[self.name] = entry.obj
1359 obj.__dict__[self.name] = entry.obj
1257 return entry.obj
1360 return entry.obj
1258
1361
1259 def __set__(self, obj, value):
1362 def __set__(self, obj, value):
1260 if self.name not in obj._filecache:
1363 if self.name not in obj._filecache:
1261 # we add an entry for the missing value because X in __dict__
1364 # we add an entry for the missing value because X in __dict__
1262 # implies X in _filecache
1365 # implies X in _filecache
1263 paths = [self.join(obj, path) for path in self.paths]
1366 paths = [self.join(obj, path) for path in self.paths]
1264 ce = filecacheentry(paths, False)
1367 ce = filecacheentry(paths, False)
1265 obj._filecache[self.name] = ce
1368 obj._filecache[self.name] = ce
1266 else:
1369 else:
1267 ce = obj._filecache[self.name]
1370 ce = obj._filecache[self.name]
1268
1371
1269 ce.obj = value # update cached copy
1372 ce.obj = value # update cached copy
1270 obj.__dict__[self.name] = value # update copy returned by obj.x
1373 obj.__dict__[self.name] = value # update copy returned by obj.x
1271
1374
1272 def __delete__(self, obj):
1375 def __delete__(self, obj):
1273 try:
1376 try:
1274 del obj.__dict__[self.name]
1377 del obj.__dict__[self.name]
1275 except KeyError:
1378 except KeyError:
1276 raise AttributeError(self.name)
1379 raise AttributeError(self.name)
1277
1380
1278 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1381 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1279 if lock is None:
1382 if lock is None:
1280 raise error.LockInheritanceContractViolation(
1383 raise error.LockInheritanceContractViolation(
1281 'lock can only be inherited while held')
1384 'lock can only be inherited while held')
1282 if environ is None:
1385 if environ is None:
1283 environ = {}
1386 environ = {}
1284 with lock.inherit() as locker:
1387 with lock.inherit() as locker:
1285 environ[envvar] = locker
1388 environ[envvar] = locker
1286 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1389 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1287
1390
1288 def wlocksub(repo, cmd, *args, **kwargs):
1391 def wlocksub(repo, cmd, *args, **kwargs):
1289 """run cmd as a subprocess that allows inheriting repo's wlock
1392 """run cmd as a subprocess that allows inheriting repo's wlock
1290
1393
1291 This can only be called while the wlock is held. This takes all the
1394 This can only be called while the wlock is held. This takes all the
1292 arguments that ui.system does, and returns the exit code of the
1395 arguments that ui.system does, and returns the exit code of the
1293 subprocess."""
1396 subprocess."""
1294 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1397 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1295 **kwargs)
1398 **kwargs)
1296
1399
1297 def gdinitconfig(ui):
1400 def gdinitconfig(ui):
1298 """helper function to know if a repo should be created as general delta
1401 """helper function to know if a repo should be created as general delta
1299 """
1402 """
1300 # experimental config: format.generaldelta
1403 # experimental config: format.generaldelta
1301 return (ui.configbool('format', 'generaldelta', False)
1404 return (ui.configbool('format', 'generaldelta', False)
1302 or ui.configbool('format', 'usegeneraldelta', True))
1405 or ui.configbool('format', 'usegeneraldelta', True))
1303
1406
1304 def gddeltaconfig(ui):
1407 def gddeltaconfig(ui):
1305 """helper function to know if incoming delta should be optimised
1408 """helper function to know if incoming delta should be optimised
1306 """
1409 """
1307 # experimental config: format.generaldelta
1410 # experimental config: format.generaldelta
1308 return ui.configbool('format', 'generaldelta', False)
1411 return ui.configbool('format', 'generaldelta', False)
1309
1412
1310 class closewrapbase(object):
1413 class closewrapbase(object):
1311 """Base class of wrapper, which hooks closing
1414 """Base class of wrapper, which hooks closing
1312
1415
1313 Do not instantiate outside of the vfs layer.
1416 Do not instantiate outside of the vfs layer.
1314 """
1417 """
1315 def __init__(self, fh):
1418 def __init__(self, fh):
1316 object.__setattr__(self, '_origfh', fh)
1419 object.__setattr__(self, '_origfh', fh)
1317
1420
1318 def __getattr__(self, attr):
1421 def __getattr__(self, attr):
1319 return getattr(self._origfh, attr)
1422 return getattr(self._origfh, attr)
1320
1423
1321 def __setattr__(self, attr, value):
1424 def __setattr__(self, attr, value):
1322 return setattr(self._origfh, attr, value)
1425 return setattr(self._origfh, attr, value)
1323
1426
1324 def __delattr__(self, attr):
1427 def __delattr__(self, attr):
1325 return delattr(self._origfh, attr)
1428 return delattr(self._origfh, attr)
1326
1429
1327 def __enter__(self):
1430 def __enter__(self):
1328 return self._origfh.__enter__()
1431 return self._origfh.__enter__()
1329
1432
1330 def __exit__(self, exc_type, exc_value, exc_tb):
1433 def __exit__(self, exc_type, exc_value, exc_tb):
1331 raise NotImplementedError('attempted instantiating ' + str(type(self)))
1434 raise NotImplementedError('attempted instantiating ' + str(type(self)))
1332
1435
1333 def close(self):
1436 def close(self):
1334 raise NotImplementedError('attempted instantiating ' + str(type(self)))
1437 raise NotImplementedError('attempted instantiating ' + str(type(self)))
1335
1438
1336 class delayclosedfile(closewrapbase):
1439 class delayclosedfile(closewrapbase):
1337 """Proxy for a file object whose close is delayed.
1440 """Proxy for a file object whose close is delayed.
1338
1441
1339 Do not instantiate outside of the vfs layer.
1442 Do not instantiate outside of the vfs layer.
1340 """
1443 """
1341 def __init__(self, fh, closer):
1444 def __init__(self, fh, closer):
1342 super(delayclosedfile, self).__init__(fh)
1445 super(delayclosedfile, self).__init__(fh)
1343 object.__setattr__(self, '_closer', closer)
1446 object.__setattr__(self, '_closer', closer)
1344
1447
1345 def __exit__(self, exc_type, exc_value, exc_tb):
1448 def __exit__(self, exc_type, exc_value, exc_tb):
1346 self._closer.close(self._origfh)
1449 self._closer.close(self._origfh)
1347
1450
1348 def close(self):
1451 def close(self):
1349 self._closer.close(self._origfh)
1452 self._closer.close(self._origfh)
1350
1453
1351 class backgroundfilecloser(object):
1454 class backgroundfilecloser(object):
1352 """Coordinates background closing of file handles on multiple threads."""
1455 """Coordinates background closing of file handles on multiple threads."""
1353 def __init__(self, ui, expectedcount=-1):
1456 def __init__(self, ui, expectedcount=-1):
1354 self._running = False
1457 self._running = False
1355 self._entered = False
1458 self._entered = False
1356 self._threads = []
1459 self._threads = []
1357 self._threadexception = None
1460 self._threadexception = None
1358
1461
1359 # Only Windows/NTFS has slow file closing. So only enable by default
1462 # Only Windows/NTFS has slow file closing. So only enable by default
1360 # on that platform. But allow to be enabled elsewhere for testing.
1463 # on that platform. But allow to be enabled elsewhere for testing.
1361 defaultenabled = os.name == 'nt'
1464 defaultenabled = os.name == 'nt'
1362 enabled = ui.configbool('worker', 'backgroundclose', defaultenabled)
1465 enabled = ui.configbool('worker', 'backgroundclose', defaultenabled)
1363
1466
1364 if not enabled:
1467 if not enabled:
1365 return
1468 return
1366
1469
1367 # There is overhead to starting and stopping the background threads.
1470 # There is overhead to starting and stopping the background threads.
1368 # Don't do background processing unless the file count is large enough
1471 # Don't do background processing unless the file count is large enough
1369 # to justify it.
1472 # to justify it.
1370 minfilecount = ui.configint('worker', 'backgroundcloseminfilecount',
1473 minfilecount = ui.configint('worker', 'backgroundcloseminfilecount',
1371 2048)
1474 2048)
1372 # FUTURE dynamically start background threads after minfilecount closes.
1475 # FUTURE dynamically start background threads after minfilecount closes.
1373 # (We don't currently have any callers that don't know their file count)
1476 # (We don't currently have any callers that don't know their file count)
1374 if expectedcount > 0 and expectedcount < minfilecount:
1477 if expectedcount > 0 and expectedcount < minfilecount:
1375 return
1478 return
1376
1479
1377 # Windows defaults to a limit of 512 open files. A buffer of 128
1480 # Windows defaults to a limit of 512 open files. A buffer of 128
1378 # should give us enough headway.
1481 # should give us enough headway.
1379 maxqueue = ui.configint('worker', 'backgroundclosemaxqueue', 384)
1482 maxqueue = ui.configint('worker', 'backgroundclosemaxqueue', 384)
1380 threadcount = ui.configint('worker', 'backgroundclosethreadcount', 4)
1483 threadcount = ui.configint('worker', 'backgroundclosethreadcount', 4)
1381
1484
1382 ui.debug('starting %d threads for background file closing\n' %
1485 ui.debug('starting %d threads for background file closing\n' %
1383 threadcount)
1486 threadcount)
1384
1487
1385 self._queue = util.queue(maxsize=maxqueue)
1488 self._queue = util.queue(maxsize=maxqueue)
1386 self._running = True
1489 self._running = True
1387
1490
1388 for i in range(threadcount):
1491 for i in range(threadcount):
1389 t = threading.Thread(target=self._worker, name='backgroundcloser')
1492 t = threading.Thread(target=self._worker, name='backgroundcloser')
1390 self._threads.append(t)
1493 self._threads.append(t)
1391 t.start()
1494 t.start()
1392
1495
1393 def __enter__(self):
1496 def __enter__(self):
1394 self._entered = True
1497 self._entered = True
1395 return self
1498 return self
1396
1499
1397 def __exit__(self, exc_type, exc_value, exc_tb):
1500 def __exit__(self, exc_type, exc_value, exc_tb):
1398 self._running = False
1501 self._running = False
1399
1502
1400 # Wait for threads to finish closing so open files don't linger for
1503 # Wait for threads to finish closing so open files don't linger for
1401 # longer than lifetime of context manager.
1504 # longer than lifetime of context manager.
1402 for t in self._threads:
1505 for t in self._threads:
1403 t.join()
1506 t.join()
1404
1507
1405 def _worker(self):
1508 def _worker(self):
1406 """Main routine for worker thread."""
1509 """Main routine for worker thread."""
1407 while True:
1510 while True:
1408 try:
1511 try:
1409 fh = self._queue.get(block=True, timeout=0.100)
1512 fh = self._queue.get(block=True, timeout=0.100)
1410 # Need to catch or the thread will terminate and
1513 # Need to catch or the thread will terminate and
1411 # we could orphan file descriptors.
1514 # we could orphan file descriptors.
1412 try:
1515 try:
1413 fh.close()
1516 fh.close()
1414 except Exception as e:
1517 except Exception as e:
1415 # Stash so can re-raise from main thread later.
1518 # Stash so can re-raise from main thread later.
1416 self._threadexception = e
1519 self._threadexception = e
1417 except util.empty:
1520 except util.empty:
1418 if not self._running:
1521 if not self._running:
1419 break
1522 break
1420
1523
1421 def close(self, fh):
1524 def close(self, fh):
1422 """Schedule a file for closing."""
1525 """Schedule a file for closing."""
1423 if not self._entered:
1526 if not self._entered:
1424 raise error.Abort(_('can only call close() when context manager '
1527 raise error.Abort(_('can only call close() when context manager '
1425 'active'))
1528 'active'))
1426
1529
1427 # If a background thread encountered an exception, raise now so we fail
1530 # If a background thread encountered an exception, raise now so we fail
1428 # fast. Otherwise we may potentially go on for minutes until the error
1531 # fast. Otherwise we may potentially go on for minutes until the error
1429 # is acted on.
1532 # is acted on.
1430 if self._threadexception:
1533 if self._threadexception:
1431 e = self._threadexception
1534 e = self._threadexception
1432 self._threadexception = None
1535 self._threadexception = None
1433 raise e
1536 raise e
1434
1537
1435 # If we're not actively running, close synchronously.
1538 # If we're not actively running, close synchronously.
1436 if not self._running:
1539 if not self._running:
1437 fh.close()
1540 fh.close()
1438 return
1541 return
1439
1542
1440 self._queue.put(fh, block=True, timeout=None)
1543 self._queue.put(fh, block=True, timeout=None)
1441
1544
1442 class checkambigatclosing(closewrapbase):
1545 class checkambigatclosing(closewrapbase):
1443 """Proxy for a file object, to avoid ambiguity of file stat
1546 """Proxy for a file object, to avoid ambiguity of file stat
1444
1547
1445 See also util.filestat for detail about "ambiguity of file stat".
1548 See also util.filestat for detail about "ambiguity of file stat".
1446
1549
1447 This proxy is useful only if the target file is guarded by any
1550 This proxy is useful only if the target file is guarded by any
1448 lock (e.g. repo.lock or repo.wlock)
1551 lock (e.g. repo.lock or repo.wlock)
1449
1552
1450 Do not instantiate outside of the vfs layer.
1553 Do not instantiate outside of the vfs layer.
1451 """
1554 """
1452 def __init__(self, fh):
1555 def __init__(self, fh):
1453 super(checkambigatclosing, self).__init__(fh)
1556 super(checkambigatclosing, self).__init__(fh)
1454 object.__setattr__(self, '_oldstat', util.filestat(fh.name))
1557 object.__setattr__(self, '_oldstat', util.filestat(fh.name))
1455
1558
1456 def _checkambig(self):
1559 def _checkambig(self):
1457 oldstat = self._oldstat
1560 oldstat = self._oldstat
1458 if oldstat.stat:
1561 if oldstat.stat:
1459 newstat = util.filestat(self._origfh.name)
1562 newstat = util.filestat(self._origfh.name)
1460 if newstat.isambig(oldstat):
1563 if newstat.isambig(oldstat):
1461 # stat of changed file is ambiguous to original one
1564 # stat of changed file is ambiguous to original one
1462 newstat.avoidambig(self._origfh.name, oldstat)
1565 newstat.avoidambig(self._origfh.name, oldstat)
1463
1566
1464 def __exit__(self, exc_type, exc_value, exc_tb):
1567 def __exit__(self, exc_type, exc_value, exc_tb):
1465 self._origfh.__exit__(exc_type, exc_value, exc_tb)
1568 self._origfh.__exit__(exc_type, exc_value, exc_tb)
1466 self._checkambig()
1569 self._checkambig()
1467
1570
1468 def close(self):
1571 def close(self):
1469 self._origfh.close()
1572 self._origfh.close()
1470 self._checkambig()
1573 self._checkambig()
@@ -1,176 +1,179 b''
1
1
2 $ cat << EOF > buggylocking.py
2 $ cat << EOF > buggylocking.py
3 > """A small extension that tests our developer warnings
3 > """A small extension that tests our developer warnings
4 > """
4 > """
5 >
5 >
6 > from mercurial import cmdutil, repair, revset
6 > from mercurial import cmdutil, repair, revset
7 >
7 >
8 > cmdtable = {}
8 > cmdtable = {}
9 > command = cmdutil.command(cmdtable)
9 > command = cmdutil.command(cmdtable)
10 >
10 >
11 > @command('buggylocking', [], '')
11 > @command('buggylocking', [], '')
12 > def buggylocking(ui, repo):
12 > def buggylocking(ui, repo):
13 > lo = repo.lock()
13 > lo = repo.lock()
14 > wl = repo.wlock()
14 > wl = repo.wlock()
15 > wl.release()
15 > wl.release()
16 > lo.release()
16 > lo.release()
17 >
17 >
18 > @command('buggytransaction', [], '')
18 > @command('buggytransaction', [], '')
19 > def buggylocking(ui, repo):
19 > def buggylocking(ui, repo):
20 > tr = repo.transaction('buggy')
20 > tr = repo.transaction('buggy')
21 > # make sure we rollback the transaction as we don't want to rely on the__del__
21 > # make sure we rollback the transaction as we don't want to rely on the__del__
22 > tr.release()
22 > tr.release()
23 >
23 >
24 > @command('properlocking', [], '')
24 > @command('properlocking', [], '')
25 > def properlocking(ui, repo):
25 > def properlocking(ui, repo):
26 > """check that reentrance is fine"""
26 > """check that reentrance is fine"""
27 > wl = repo.wlock()
27 > wl = repo.wlock()
28 > lo = repo.lock()
28 > lo = repo.lock()
29 > tr = repo.transaction('proper')
29 > tr = repo.transaction('proper')
30 > tr2 = repo.transaction('proper')
30 > tr2 = repo.transaction('proper')
31 > lo2 = repo.lock()
31 > lo2 = repo.lock()
32 > wl2 = repo.wlock()
32 > wl2 = repo.wlock()
33 > wl2.release()
33 > wl2.release()
34 > lo2.release()
34 > lo2.release()
35 > tr2.close()
35 > tr2.close()
36 > tr.close()
36 > tr.close()
37 > lo.release()
37 > lo.release()
38 > wl.release()
38 > wl.release()
39 >
39 >
40 > @command('nowaitlocking', [], '')
40 > @command('nowaitlocking', [], '')
41 > def nowaitlocking(ui, repo):
41 > def nowaitlocking(ui, repo):
42 > lo = repo.lock()
42 > lo = repo.lock()
43 > wl = repo.wlock(wait=False)
43 > wl = repo.wlock(wait=False)
44 > wl.release()
44 > wl.release()
45 > lo.release()
45 > lo.release()
46 >
46 >
47 > @command('stripintr', [], '')
47 > @command('stripintr', [], '')
48 > def stripintr(ui, repo):
48 > def stripintr(ui, repo):
49 > lo = repo.lock()
49 > lo = repo.lock()
50 > tr = repo.transaction('foobar')
50 > tr = repo.transaction('foobar')
51 > try:
51 > try:
52 > repair.strip(repo.ui, repo, [repo['.'].node()])
52 > repair.strip(repo.ui, repo, [repo['.'].node()])
53 > finally:
53 > finally:
54 > lo.release()
54 > lo.release()
55 > @command('oldanddeprecated', [], '')
55 > @command('oldanddeprecated', [], '')
56 > def oldanddeprecated(ui, repo):
56 > def oldanddeprecated(ui, repo):
57 > """test deprecation warning API"""
57 > """test deprecation warning API"""
58 > def foobar(ui):
58 > def foobar(ui):
59 > ui.deprecwarn('foorbar is deprecated, go shopping', '42.1337')
59 > ui.deprecwarn('foorbar is deprecated, go shopping', '42.1337')
60 > foobar(ui)
60 > foobar(ui)
61 >
61 >
62 > def oldstylerevset(repo, subset, x):
62 > def oldstylerevset(repo, subset, x):
63 > return list(subset)
63 > return list(subset)
64 >
64 >
65 > revset.symbols['oldstyle'] = oldstylerevset
65 > revset.symbols['oldstyle'] = oldstylerevset
66 > EOF
66 > EOF
67
67
68 $ cat << EOF >> $HGRCPATH
68 $ cat << EOF >> $HGRCPATH
69 > [extensions]
69 > [extensions]
70 > buggylocking=$TESTTMP/buggylocking.py
70 > buggylocking=$TESTTMP/buggylocking.py
71 > mock=$TESTDIR/mockblackbox.py
71 > mock=$TESTDIR/mockblackbox.py
72 > blackbox=
72 > blackbox=
73 > [devel]
73 > [devel]
74 > all-warnings=1
74 > all-warnings=1
75 > EOF
75 > EOF
76
76
77 $ hg init lock-checker
77 $ hg init lock-checker
78 $ cd lock-checker
78 $ cd lock-checker
79 $ hg buggylocking
79 $ hg buggylocking
80 devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
80 devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
81 $ cat << EOF >> $HGRCPATH
81 $ cat << EOF >> $HGRCPATH
82 > [devel]
82 > [devel]
83 > all=0
83 > all=0
84 > check-locks=1
84 > check-locks=1
85 > EOF
85 > EOF
86 $ hg buggylocking
86 $ hg buggylocking
87 devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
87 devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
88 $ hg buggylocking --traceback
88 $ hg buggylocking --traceback
89 devel-warn: "wlock" acquired after "lock" at:
89 devel-warn: "wlock" acquired after "lock" at:
90 */hg:* in * (glob)
90 */hg:* in * (glob)
91 */mercurial/dispatch.py:* in run (glob)
91 */mercurial/dispatch.py:* in run (glob)
92 */mercurial/dispatch.py:* in dispatch (glob)
92 */mercurial/dispatch.py:* in dispatch (glob)
93 */mercurial/dispatch.py:* in _runcatch (glob)
93 */mercurial/dispatch.py:* in _runcatch (glob)
94 */mercurial/dispatch.py:* in callcatch (glob)
94 */mercurial/dispatch.py:* in callcatch (glob)
95 */mercurial/scmutil.py* in callcatch (glob)
95 */mercurial/dispatch.py:* in _runcatchfunc (glob)
96 */mercurial/dispatch.py:* in _runcatchfunc (glob)
96 */mercurial/dispatch.py:* in _dispatch (glob)
97 */mercurial/dispatch.py:* in _dispatch (glob)
97 */mercurial/dispatch.py:* in runcommand (glob)
98 */mercurial/dispatch.py:* in runcommand (glob)
98 */mercurial/dispatch.py:* in _runcommand (glob)
99 */mercurial/dispatch.py:* in _runcommand (glob)
99 */mercurial/dispatch.py:* in <lambda> (glob)
100 */mercurial/dispatch.py:* in <lambda> (glob)
100 */mercurial/util.py:* in check (glob)
101 */mercurial/util.py:* in check (glob)
101 $TESTTMP/buggylocking.py:* in buggylocking (glob)
102 $TESTTMP/buggylocking.py:* in buggylocking (glob)
102 $ hg properlocking
103 $ hg properlocking
103 $ hg nowaitlocking
104 $ hg nowaitlocking
104
105
105 $ echo a > a
106 $ echo a > a
106 $ hg add a
107 $ hg add a
107 $ hg commit -m a
108 $ hg commit -m a
108 $ hg stripintr
109 $ hg stripintr
109 saved backup bundle to $TESTTMP/lock-checker/.hg/strip-backup/*-backup.hg (glob)
110 saved backup bundle to $TESTTMP/lock-checker/.hg/strip-backup/*-backup.hg (glob)
110 abort: programming error: cannot strip from inside a transaction
111 abort: programming error: cannot strip from inside a transaction
111 (contact your extension maintainer)
112 (contact your extension maintainer)
112 [255]
113 [255]
113
114
114 $ hg log -r "oldstyle()" -T '{rev}\n'
115 $ hg log -r "oldstyle()" -T '{rev}\n'
115 devel-warn: revset "oldstyle" uses list instead of smartset
116 devel-warn: revset "oldstyle" uses list instead of smartset
116 (compatibility will be dropped after Mercurial-3.9, update your code.) at: *mercurial/revset.py:* (mfunc) (glob)
117 (compatibility will be dropped after Mercurial-3.9, update your code.) at: *mercurial/revset.py:* (mfunc) (glob)
117 0
118 0
118 $ hg oldanddeprecated
119 $ hg oldanddeprecated
119 devel-warn: foorbar is deprecated, go shopping
120 devel-warn: foorbar is deprecated, go shopping
120 (compatibility will be dropped after Mercurial-42.1337, update your code.) at: $TESTTMP/buggylocking.py:* (oldanddeprecated) (glob)
121 (compatibility will be dropped after Mercurial-42.1337, update your code.) at: $TESTTMP/buggylocking.py:* (oldanddeprecated) (glob)
121
122
122 $ hg oldanddeprecated --traceback
123 $ hg oldanddeprecated --traceback
123 devel-warn: foorbar is deprecated, go shopping
124 devel-warn: foorbar is deprecated, go shopping
124 (compatibility will be dropped after Mercurial-42.1337, update your code.) at:
125 (compatibility will be dropped after Mercurial-42.1337, update your code.) at:
125 */hg:* in <module> (glob)
126 */hg:* in <module> (glob)
126 */mercurial/dispatch.py:* in run (glob)
127 */mercurial/dispatch.py:* in run (glob)
127 */mercurial/dispatch.py:* in dispatch (glob)
128 */mercurial/dispatch.py:* in dispatch (glob)
128 */mercurial/dispatch.py:* in _runcatch (glob)
129 */mercurial/dispatch.py:* in _runcatch (glob)
129 */mercurial/dispatch.py:* in callcatch (glob)
130 */mercurial/dispatch.py:* in callcatch (glob)
131 */mercurial/scmutil.py* in callcatch (glob)
130 */mercurial/dispatch.py:* in _runcatchfunc (glob)
132 */mercurial/dispatch.py:* in _runcatchfunc (glob)
131 */mercurial/dispatch.py:* in _dispatch (glob)
133 */mercurial/dispatch.py:* in _dispatch (glob)
132 */mercurial/dispatch.py:* in runcommand (glob)
134 */mercurial/dispatch.py:* in runcommand (glob)
133 */mercurial/dispatch.py:* in _runcommand (glob)
135 */mercurial/dispatch.py:* in _runcommand (glob)
134 */mercurial/dispatch.py:* in <lambda> (glob)
136 */mercurial/dispatch.py:* in <lambda> (glob)
135 */mercurial/util.py:* in check (glob)
137 */mercurial/util.py:* in check (glob)
136 $TESTTMP/buggylocking.py:* in oldanddeprecated (glob)
138 $TESTTMP/buggylocking.py:* in oldanddeprecated (glob)
137 $ hg blackbox -l 9
139 $ hg blackbox -l 9
138 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: revset "oldstyle" uses list instead of smartset
140 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: revset "oldstyle" uses list instead of smartset
139 (compatibility will be dropped after Mercurial-3.9, update your code.) at: *mercurial/revset.py:* (mfunc) (glob)
141 (compatibility will be dropped after Mercurial-3.9, update your code.) at: *mercurial/revset.py:* (mfunc) (glob)
140 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> log -r oldstyle() -T {rev}\n exited 0 after * seconds (glob)
142 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> log -r oldstyle() -T {rev}\n exited 0 after * seconds (glob)
141 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated
143 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated
142 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: foorbar is deprecated, go shopping
144 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: foorbar is deprecated, go shopping
143 (compatibility will be dropped after Mercurial-42.1337, update your code.) at: $TESTTMP/buggylocking.py:* (oldanddeprecated) (glob)
145 (compatibility will be dropped after Mercurial-42.1337, update your code.) at: $TESTTMP/buggylocking.py:* (oldanddeprecated) (glob)
144 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated exited 0 after * seconds (glob)
146 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated exited 0 after * seconds (glob)
145 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated --traceback
147 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated --traceback
146 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: foorbar is deprecated, go shopping
148 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: foorbar is deprecated, go shopping
147 (compatibility will be dropped after Mercurial-42.1337, update your code.) at:
149 (compatibility will be dropped after Mercurial-42.1337, update your code.) at:
148 */hg:* in <module> (glob)
150 */hg:* in <module> (glob)
149 */mercurial/dispatch.py:* in run (glob)
151 */mercurial/dispatch.py:* in run (glob)
150 */mercurial/dispatch.py:* in dispatch (glob)
152 */mercurial/dispatch.py:* in dispatch (glob)
151 */mercurial/dispatch.py:* in _runcatch (glob)
153 */mercurial/dispatch.py:* in _runcatch (glob)
152 */mercurial/dispatch.py:* in callcatch (glob)
154 */mercurial/dispatch.py:* in callcatch (glob)
155 */mercurial/scmutil.py* in callcatch (glob)
153 */mercurial/dispatch.py:* in _runcatchfunc (glob)
156 */mercurial/dispatch.py:* in _runcatchfunc (glob)
154 */mercurial/dispatch.py:* in _dispatch (glob)
157 */mercurial/dispatch.py:* in _dispatch (glob)
155 */mercurial/dispatch.py:* in runcommand (glob)
158 */mercurial/dispatch.py:* in runcommand (glob)
156 */mercurial/dispatch.py:* in _runcommand (glob)
159 */mercurial/dispatch.py:* in _runcommand (glob)
157 */mercurial/dispatch.py:* in <lambda> (glob)
160 */mercurial/dispatch.py:* in <lambda> (glob)
158 */mercurial/util.py:* in check (glob)
161 */mercurial/util.py:* in check (glob)
159 $TESTTMP/buggylocking.py:* in oldanddeprecated (glob)
162 $TESTTMP/buggylocking.py:* in oldanddeprecated (glob)
160 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated --traceback exited 0 after * seconds (glob)
163 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated --traceback exited 0 after * seconds (glob)
161 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> blackbox -l 9
164 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> blackbox -l 9
162
165
163 Test programming error failure:
166 Test programming error failure:
164
167
165 $ hg buggytransaction 2>&1 | egrep -v '^ '
168 $ hg buggytransaction 2>&1 | egrep -v '^ '
166 ** Unknown exception encountered with possibly-broken third-party extension buggylocking
169 ** Unknown exception encountered with possibly-broken third-party extension buggylocking
167 ** which supports versions unknown of Mercurial.
170 ** which supports versions unknown of Mercurial.
168 ** Please disable buggylocking and try your action again.
171 ** Please disable buggylocking and try your action again.
169 ** If that fixes the bug please report it to the extension author.
172 ** If that fixes the bug please report it to the extension author.
170 ** Python * (glob)
173 ** Python * (glob)
171 ** Mercurial Distributed SCM (*) (glob)
174 ** Mercurial Distributed SCM (*) (glob)
172 ** Extensions loaded: * (glob)
175 ** Extensions loaded: * (glob)
173 Traceback (most recent call last):
176 Traceback (most recent call last):
174 RuntimeError: programming error: transaction requires locking
177 RuntimeError: programming error: transaction requires locking
175
178
176 $ cd ..
179 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now