##// END OF EJS Templates
dispatch: move part of callcatch to scmutil...
Jun Wu -
r30520:4338f87d default
parent child Browse files
Show More
@@ -1,975 +1,885 b''
1 1 # dispatch.py - command dispatching for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import, print_function
9 9
10 10 import atexit
11 11 import difflib
12 12 import errno
13 13 import os
14 14 import pdb
15 15 import re
16 16 import shlex
17 17 import signal
18 import socket
19 18 import sys
20 19 import time
21 20 import traceback
22 21
23 22
24 23 from .i18n import _
25 24
26 25 from . import (
27 26 cmdutil,
28 27 commands,
29 28 debugcommands,
30 29 demandimport,
31 30 encoding,
32 31 error,
33 32 extensions,
34 33 fancyopts,
35 34 fileset,
36 35 hg,
37 36 hook,
38 37 profiling,
39 38 pycompat,
40 39 revset,
40 scmutil,
41 41 templatefilters,
42 42 templatekw,
43 43 templater,
44 44 ui as uimod,
45 45 util,
46 46 )
47 47
48 48 class request(object):
49 49 def __init__(self, args, ui=None, repo=None, fin=None, fout=None,
50 50 ferr=None):
51 51 self.args = args
52 52 self.ui = ui
53 53 self.repo = repo
54 54
55 55 # input/output/error streams
56 56 self.fin = fin
57 57 self.fout = fout
58 58 self.ferr = ferr
59 59
60 60 def run():
61 61 "run the command in sys.argv"
62 62 sys.exit((dispatch(request(pycompat.sysargv[1:])) or 0) & 255)
63 63
64 64 def _getsimilar(symbols, value):
65 65 sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
66 66 # The cutoff for similarity here is pretty arbitrary. It should
67 67 # probably be investigated and tweaked.
68 68 return [s for s in symbols if sim(s) > 0.6]
69 69
70 70 def _reportsimilar(write, similar):
71 71 if len(similar) == 1:
72 72 write(_("(did you mean %s?)\n") % similar[0])
73 73 elif similar:
74 74 ss = ", ".join(sorted(similar))
75 75 write(_("(did you mean one of %s?)\n") % ss)
76 76
77 77 def _formatparse(write, inst):
78 78 similar = []
79 79 if isinstance(inst, error.UnknownIdentifier):
80 80 # make sure to check fileset first, as revset can invoke fileset
81 81 similar = _getsimilar(inst.symbols, inst.function)
82 82 if len(inst.args) > 1:
83 83 write(_("hg: parse error at %s: %s\n") %
84 84 (inst.args[1], inst.args[0]))
85 85 if (inst.args[0][0] == ' '):
86 86 write(_("unexpected leading whitespace\n"))
87 87 else:
88 88 write(_("hg: parse error: %s\n") % inst.args[0])
89 89 _reportsimilar(write, similar)
90 90 if inst.hint:
91 91 write(_("(%s)\n") % inst.hint)
92 92
93 93 def dispatch(req):
94 94 "run the command specified in req.args"
95 95 if req.ferr:
96 96 ferr = req.ferr
97 97 elif req.ui:
98 98 ferr = req.ui.ferr
99 99 else:
100 100 ferr = util.stderr
101 101
102 102 try:
103 103 if not req.ui:
104 104 req.ui = uimod.ui()
105 105 if '--traceback' in req.args:
106 106 req.ui.setconfig('ui', 'traceback', 'on', '--traceback')
107 107
108 108 # set ui streams from the request
109 109 if req.fin:
110 110 req.ui.fin = req.fin
111 111 if req.fout:
112 112 req.ui.fout = req.fout
113 113 if req.ferr:
114 114 req.ui.ferr = req.ferr
115 115 except error.Abort as inst:
116 116 ferr.write(_("abort: %s\n") % inst)
117 117 if inst.hint:
118 118 ferr.write(_("(%s)\n") % inst.hint)
119 119 return -1
120 120 except error.ParseError as inst:
121 121 _formatparse(ferr.write, inst)
122 122 return -1
123 123
124 124 msg = ' '.join(' ' in a and repr(a) or a for a in req.args)
125 125 starttime = time.time()
126 126 ret = None
127 127 try:
128 128 ret = _runcatch(req)
129 129 except KeyboardInterrupt:
130 130 try:
131 131 req.ui.warn(_("interrupted!\n"))
132 132 except IOError as inst:
133 133 if inst.errno != errno.EPIPE:
134 134 raise
135 135 ret = -1
136 136 finally:
137 137 duration = time.time() - starttime
138 138 req.ui.flush()
139 139 req.ui.log("commandfinish", "%s exited %s after %0.2f seconds\n",
140 140 msg, ret or 0, duration)
141 141 return ret
142 142
143 143 def _runcatch(req):
144 144 def catchterm(*args):
145 145 raise error.SignalInterrupt
146 146
147 147 ui = req.ui
148 148 try:
149 149 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
150 150 num = getattr(signal, name, None)
151 151 if num:
152 152 signal.signal(num, catchterm)
153 153 except ValueError:
154 154 pass # happens if called in a thread
155 155
156 156 def _runcatchfunc():
157 157 try:
158 158 debugger = 'pdb'
159 159 debugtrace = {
160 160 'pdb' : pdb.set_trace
161 161 }
162 162 debugmortem = {
163 163 'pdb' : pdb.post_mortem
164 164 }
165 165
166 166 # read --config before doing anything else
167 167 # (e.g. to change trust settings for reading .hg/hgrc)
168 168 cfgs = _parseconfig(req.ui, _earlygetopt(['--config'], req.args))
169 169
170 170 if req.repo:
171 171 # copy configs that were passed on the cmdline (--config) to
172 172 # the repo ui
173 173 for sec, name, val in cfgs:
174 174 req.repo.ui.setconfig(sec, name, val, source='--config')
175 175
176 176 # developer config: ui.debugger
177 177 debugger = ui.config("ui", "debugger")
178 178 debugmod = pdb
179 179 if not debugger or ui.plain():
180 180 # if we are in HGPLAIN mode, then disable custom debugging
181 181 debugger = 'pdb'
182 182 elif '--debugger' in req.args:
183 183 # This import can be slow for fancy debuggers, so only
184 184 # do it when absolutely necessary, i.e. when actual
185 185 # debugging has been requested
186 186 with demandimport.deactivated():
187 187 try:
188 188 debugmod = __import__(debugger)
189 189 except ImportError:
190 190 pass # Leave debugmod = pdb
191 191
192 192 debugtrace[debugger] = debugmod.set_trace
193 193 debugmortem[debugger] = debugmod.post_mortem
194 194
195 195 # enter the debugger before command execution
196 196 if '--debugger' in req.args:
197 197 ui.warn(_("entering debugger - "
198 198 "type c to continue starting hg or h for help\n"))
199 199
200 200 if (debugger != 'pdb' and
201 201 debugtrace[debugger] == debugtrace['pdb']):
202 202 ui.warn(_("%s debugger specified "
203 203 "but its module was not found\n") % debugger)
204 204 with demandimport.deactivated():
205 205 debugtrace[debugger]()
206 206 try:
207 207 return _dispatch(req)
208 208 finally:
209 209 ui.flush()
210 210 except: # re-raises
211 211 # enter the debugger when we hit an exception
212 212 if '--debugger' in req.args:
213 213 traceback.print_exc()
214 214 debugmortem[debugger](sys.exc_info()[2])
215 215 ui.traceback()
216 216 raise
217 217
218 218 return callcatch(ui, _runcatchfunc)
219 219
220 220 def callcatch(ui, func):
221 """call func() with global exception handling
222
223 return func() if no exception happens. otherwise do some error handling
224 and return an exit code accordingly.
221 """like scmutil.callcatch but handles more high-level exceptions about
222 config parsing and commands. besides, use handlecommandexception to handle
223 uncaught exceptions.
225 224 """
226 225 try:
227 return func()
228 # Global exception handling, alphabetically
229 # Mercurial-specific first, followed by built-in and library exceptions
226 return scmutil.callcatch(ui, func)
230 227 except error.AmbiguousCommand as inst:
231 228 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
232 229 (inst.args[0], " ".join(inst.args[1])))
233 except error.ParseError as inst:
234 _formatparse(ui.warn, inst)
235 return -1
236 except error.LockHeld as inst:
237 if inst.errno == errno.ETIMEDOUT:
238 reason = _('timed out waiting for lock held by %s') % inst.locker
239 else:
240 reason = _('lock held by %s') % inst.locker
241 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
242 except error.LockUnavailable as inst:
243 ui.warn(_("abort: could not lock %s: %s\n") %
244 (inst.desc or inst.filename, inst.strerror))
245 230 except error.CommandError as inst:
246 231 if inst.args[0]:
247 232 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
248 233 commands.help_(ui, inst.args[0], full=False, command=True)
249 234 else:
250 235 ui.warn(_("hg: %s\n") % inst.args[1])
251 236 commands.help_(ui, 'shortlist')
252 except error.OutOfBandError as inst:
253 if inst.args:
254 msg = _("abort: remote error:\n")
255 else:
256 msg = _("abort: remote error\n")
257 ui.warn(msg)
258 if inst.args:
259 ui.warn(''.join(inst.args))
260 if inst.hint:
261 ui.warn('(%s)\n' % inst.hint)
262 except error.RepoError as inst:
263 ui.warn(_("abort: %s!\n") % inst)
264 if inst.hint:
265 ui.warn(_("(%s)\n") % inst.hint)
266 except error.ResponseError as inst:
267 ui.warn(_("abort: %s") % inst.args[0])
268 if not isinstance(inst.args[1], basestring):
269 ui.warn(" %r\n" % (inst.args[1],))
270 elif not inst.args[1]:
271 ui.warn(_(" empty string\n"))
272 else:
273 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
274 except error.CensoredNodeError as inst:
275 ui.warn(_("abort: file censored %s!\n") % inst)
276 except error.RevlogError as inst:
277 ui.warn(_("abort: %s!\n") % inst)
278 except error.SignalInterrupt:
279 ui.warn(_("killed!\n"))
237 except error.ParseError as inst:
238 _formatparse(ui.warn, inst)
239 return -1
280 240 except error.UnknownCommand as inst:
281 241 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
282 242 try:
283 243 # check if the command is in a disabled extension
284 244 # (but don't check for extensions themselves)
285 245 commands.help_(ui, inst.args[0], unknowncmd=True)
286 246 except (error.UnknownCommand, error.Abort):
287 247 suggested = False
288 248 if len(inst.args) == 2:
289 249 sim = _getsimilar(inst.args[1], inst.args[0])
290 250 if sim:
291 251 _reportsimilar(ui.warn, sim)
292 252 suggested = True
293 253 if not suggested:
294 254 commands.help_(ui, 'shortlist')
295 except error.InterventionRequired as inst:
296 ui.warn("%s\n" % inst)
297 if inst.hint:
298 ui.warn(_("(%s)\n") % inst.hint)
299 return 1
300 except error.Abort as inst:
301 ui.warn(_("abort: %s\n") % inst)
302 if inst.hint:
303 ui.warn(_("(%s)\n") % inst.hint)
304 except ImportError as inst:
305 ui.warn(_("abort: %s!\n") % inst)
306 m = str(inst).split()[-1]
307 if m in "mpatch bdiff".split():
308 ui.warn(_("(did you forget to compile extensions?)\n"))
309 elif m in "zlib".split():
310 ui.warn(_("(is your Python install correct?)\n"))
311 except IOError as inst:
312 if util.safehasattr(inst, "code"):
313 ui.warn(_("abort: %s\n") % inst)
314 elif util.safehasattr(inst, "reason"):
315 try: # usually it is in the form (errno, strerror)
316 reason = inst.reason.args[1]
317 except (AttributeError, IndexError):
318 # it might be anything, for example a string
319 reason = inst.reason
320 if isinstance(reason, unicode):
321 # SSLError of Python 2.7.9 contains a unicode
322 reason = reason.encode(encoding.encoding, 'replace')
323 ui.warn(_("abort: error: %s\n") % reason)
324 elif (util.safehasattr(inst, "args")
325 and inst.args and inst.args[0] == errno.EPIPE):
326 pass
327 elif getattr(inst, "strerror", None):
328 if getattr(inst, "filename", None):
329 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
330 else:
331 ui.warn(_("abort: %s\n") % inst.strerror)
332 else:
333 raise
334 except OSError as inst:
335 if getattr(inst, "filename", None) is not None:
336 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
337 else:
338 ui.warn(_("abort: %s\n") % inst.strerror)
255 except IOError:
256 raise
339 257 except KeyboardInterrupt:
340 258 raise
341 except MemoryError:
342 ui.warn(_("abort: out of memory\n"))
343 except SystemExit as inst:
344 # Commands shouldn't sys.exit directly, but give a return code.
345 # Just in case catch this and and pass exit code to caller.
346 return inst.code
347 except socket.error as inst:
348 ui.warn(_("abort: %s\n") % inst.args[-1])
349 except: # perhaps re-raises
259 except: # probably re-raises
350 260 if not handlecommandexception(ui):
351 261 raise
352 262
353 263 return -1
354 264
355 265 def aliasargs(fn, givenargs):
356 266 args = getattr(fn, 'args', [])
357 267 if args:
358 268 cmd = ' '.join(map(util.shellquote, args))
359 269
360 270 nums = []
361 271 def replacer(m):
362 272 num = int(m.group(1)) - 1
363 273 nums.append(num)
364 274 if num < len(givenargs):
365 275 return givenargs[num]
366 276 raise error.Abort(_('too few arguments for command alias'))
367 277 cmd = re.sub(r'\$(\d+|\$)', replacer, cmd)
368 278 givenargs = [x for i, x in enumerate(givenargs)
369 279 if i not in nums]
370 280 args = shlex.split(cmd)
371 281 return args + givenargs
372 282
373 283 def aliasinterpolate(name, args, cmd):
374 284 '''interpolate args into cmd for shell aliases
375 285
376 286 This also handles $0, $@ and "$@".
377 287 '''
378 288 # util.interpolate can't deal with "$@" (with quotes) because it's only
379 289 # built to match prefix + patterns.
380 290 replacemap = dict(('$%d' % (i + 1), arg) for i, arg in enumerate(args))
381 291 replacemap['$0'] = name
382 292 replacemap['$$'] = '$'
383 293 replacemap['$@'] = ' '.join(args)
384 294 # Typical Unix shells interpolate "$@" (with quotes) as all the positional
385 295 # parameters, separated out into words. Emulate the same behavior here by
386 296 # quoting the arguments individually. POSIX shells will then typically
387 297 # tokenize each argument into exactly one word.
388 298 replacemap['"$@"'] = ' '.join(util.shellquote(arg) for arg in args)
389 299 # escape '\$' for regex
390 300 regex = '|'.join(replacemap.keys()).replace('$', r'\$')
391 301 r = re.compile(regex)
392 302 return r.sub(lambda x: replacemap[x.group()], cmd)
393 303
394 304 class cmdalias(object):
395 305 def __init__(self, name, definition, cmdtable, source):
396 306 self.name = self.cmd = name
397 307 self.cmdname = ''
398 308 self.definition = definition
399 309 self.fn = None
400 310 self.givenargs = []
401 311 self.opts = []
402 312 self.help = ''
403 313 self.badalias = None
404 314 self.unknowncmd = False
405 315 self.source = source
406 316
407 317 try:
408 318 aliases, entry = cmdutil.findcmd(self.name, cmdtable)
409 319 for alias, e in cmdtable.iteritems():
410 320 if e is entry:
411 321 self.cmd = alias
412 322 break
413 323 self.shadows = True
414 324 except error.UnknownCommand:
415 325 self.shadows = False
416 326
417 327 if not self.definition:
418 328 self.badalias = _("no definition for alias '%s'") % self.name
419 329 return
420 330
421 331 if self.definition.startswith('!'):
422 332 self.shell = True
423 333 def fn(ui, *args):
424 334 env = {'HG_ARGS': ' '.join((self.name,) + args)}
425 335 def _checkvar(m):
426 336 if m.groups()[0] == '$':
427 337 return m.group()
428 338 elif int(m.groups()[0]) <= len(args):
429 339 return m.group()
430 340 else:
431 341 ui.debug("No argument found for substitution "
432 342 "of %i variable in alias '%s' definition."
433 343 % (int(m.groups()[0]), self.name))
434 344 return ''
435 345 cmd = re.sub(r'\$(\d+|\$)', _checkvar, self.definition[1:])
436 346 cmd = aliasinterpolate(self.name, args, cmd)
437 347 return ui.system(cmd, environ=env)
438 348 self.fn = fn
439 349 return
440 350
441 351 try:
442 352 args = shlex.split(self.definition)
443 353 except ValueError as inst:
444 354 self.badalias = (_("error in definition for alias '%s': %s")
445 355 % (self.name, inst))
446 356 return
447 357 self.cmdname = cmd = args.pop(0)
448 358 self.givenargs = args
449 359
450 360 for invalidarg in ("--cwd", "-R", "--repository", "--repo", "--config"):
451 361 if _earlygetopt([invalidarg], args):
452 362 self.badalias = (_("error in definition for alias '%s': %s may "
453 363 "only be given on the command line")
454 364 % (self.name, invalidarg))
455 365 return
456 366
457 367 try:
458 368 tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1]
459 369 if len(tableentry) > 2:
460 370 self.fn, self.opts, self.help = tableentry
461 371 else:
462 372 self.fn, self.opts = tableentry
463 373
464 374 if self.help.startswith("hg " + cmd):
465 375 # drop prefix in old-style help lines so hg shows the alias
466 376 self.help = self.help[4 + len(cmd):]
467 377 self.__doc__ = self.fn.__doc__
468 378
469 379 except error.UnknownCommand:
470 380 self.badalias = (_("alias '%s' resolves to unknown command '%s'")
471 381 % (self.name, cmd))
472 382 self.unknowncmd = True
473 383 except error.AmbiguousCommand:
474 384 self.badalias = (_("alias '%s' resolves to ambiguous command '%s'")
475 385 % (self.name, cmd))
476 386
477 387 @property
478 388 def args(self):
479 389 args = map(util.expandpath, self.givenargs)
480 390 return aliasargs(self.fn, args)
481 391
482 392 def __getattr__(self, name):
483 393 adefaults = {'norepo': True, 'optionalrepo': False, 'inferrepo': False}
484 394 if name not in adefaults:
485 395 raise AttributeError(name)
486 396 if self.badalias or util.safehasattr(self, 'shell'):
487 397 return adefaults[name]
488 398 return getattr(self.fn, name)
489 399
490 400 def __call__(self, ui, *args, **opts):
491 401 if self.badalias:
492 402 hint = None
493 403 if self.unknowncmd:
494 404 try:
495 405 # check if the command is in a disabled extension
496 406 cmd, ext = extensions.disabledcmd(ui, self.cmdname)[:2]
497 407 hint = _("'%s' is provided by '%s' extension") % (cmd, ext)
498 408 except error.UnknownCommand:
499 409 pass
500 410 raise error.Abort(self.badalias, hint=hint)
501 411 if self.shadows:
502 412 ui.debug("alias '%s' shadows command '%s'\n" %
503 413 (self.name, self.cmdname))
504 414
505 415 ui.log('commandalias', "alias '%s' expands to '%s'\n",
506 416 self.name, self.definition)
507 417 if util.safehasattr(self, 'shell'):
508 418 return self.fn(ui, *args, **opts)
509 419 else:
510 420 try:
511 421 return util.checksignature(self.fn)(ui, *args, **opts)
512 422 except error.SignatureError:
513 423 args = ' '.join([self.cmdname] + self.args)
514 424 ui.debug("alias '%s' expands to '%s'\n" % (self.name, args))
515 425 raise
516 426
517 427 def addaliases(ui, cmdtable):
518 428 # aliases are processed after extensions have been loaded, so they
519 429 # may use extension commands. Aliases can also use other alias definitions,
520 430 # but only if they have been defined prior to the current definition.
521 431 for alias, definition in ui.configitems('alias'):
522 432 source = ui.configsource('alias', alias)
523 433 aliasdef = cmdalias(alias, definition, cmdtable, source)
524 434
525 435 try:
526 436 olddef = cmdtable[aliasdef.cmd][0]
527 437 if olddef.definition == aliasdef.definition:
528 438 continue
529 439 except (KeyError, AttributeError):
530 440 # definition might not exist or it might not be a cmdalias
531 441 pass
532 442
533 443 cmdtable[aliasdef.name] = (aliasdef, aliasdef.opts, aliasdef.help)
534 444
535 445 def _parse(ui, args):
536 446 options = {}
537 447 cmdoptions = {}
538 448
539 449 try:
540 450 args = fancyopts.fancyopts(args, commands.globalopts, options)
541 451 except fancyopts.getopt.GetoptError as inst:
542 452 raise error.CommandError(None, inst)
543 453
544 454 if args:
545 455 cmd, args = args[0], args[1:]
546 456 aliases, entry = cmdutil.findcmd(cmd, commands.table,
547 457 ui.configbool("ui", "strict"))
548 458 cmd = aliases[0]
549 459 args = aliasargs(entry[0], args)
550 460 defaults = ui.config("defaults", cmd)
551 461 if defaults:
552 462 args = map(util.expandpath, shlex.split(defaults)) + args
553 463 c = list(entry[1])
554 464 else:
555 465 cmd = None
556 466 c = []
557 467
558 468 # combine global options into local
559 469 for o in commands.globalopts:
560 470 c.append((o[0], o[1], options[o[1]], o[3]))
561 471
562 472 try:
563 473 args = fancyopts.fancyopts(args, c, cmdoptions, gnu=True)
564 474 except fancyopts.getopt.GetoptError as inst:
565 475 raise error.CommandError(cmd, inst)
566 476
567 477 # separate global options back out
568 478 for o in commands.globalopts:
569 479 n = o[1]
570 480 options[n] = cmdoptions[n]
571 481 del cmdoptions[n]
572 482
573 483 return (cmd, cmd and entry[0] or None, args, options, cmdoptions)
574 484
575 485 def _parseconfig(ui, config):
576 486 """parse the --config options from the command line"""
577 487 configs = []
578 488
579 489 for cfg in config:
580 490 try:
581 491 name, value = [cfgelem.strip()
582 492 for cfgelem in cfg.split('=', 1)]
583 493 section, name = name.split('.', 1)
584 494 if not section or not name:
585 495 raise IndexError
586 496 ui.setconfig(section, name, value, '--config')
587 497 configs.append((section, name, value))
588 498 except (IndexError, ValueError):
589 499 raise error.Abort(_('malformed --config option: %r '
590 500 '(use --config section.name=value)') % cfg)
591 501
592 502 return configs
593 503
594 504 def _earlygetopt(aliases, args):
595 505 """Return list of values for an option (or aliases).
596 506
597 507 The values are listed in the order they appear in args.
598 508 The options and values are removed from args.
599 509
600 510 >>> args = ['x', '--cwd', 'foo', 'y']
601 511 >>> _earlygetopt(['--cwd'], args), args
602 512 (['foo'], ['x', 'y'])
603 513
604 514 >>> args = ['x', '--cwd=bar', 'y']
605 515 >>> _earlygetopt(['--cwd'], args), args
606 516 (['bar'], ['x', 'y'])
607 517
608 518 >>> args = ['x', '-R', 'foo', 'y']
609 519 >>> _earlygetopt(['-R'], args), args
610 520 (['foo'], ['x', 'y'])
611 521
612 522 >>> args = ['x', '-Rbar', 'y']
613 523 >>> _earlygetopt(['-R'], args), args
614 524 (['bar'], ['x', 'y'])
615 525 """
616 526 try:
617 527 argcount = args.index("--")
618 528 except ValueError:
619 529 argcount = len(args)
620 530 shortopts = [opt for opt in aliases if len(opt) == 2]
621 531 values = []
622 532 pos = 0
623 533 while pos < argcount:
624 534 fullarg = arg = args[pos]
625 535 equals = arg.find('=')
626 536 if equals > -1:
627 537 arg = arg[:equals]
628 538 if arg in aliases:
629 539 del args[pos]
630 540 if equals > -1:
631 541 values.append(fullarg[equals + 1:])
632 542 argcount -= 1
633 543 else:
634 544 if pos + 1 >= argcount:
635 545 # ignore and let getopt report an error if there is no value
636 546 break
637 547 values.append(args.pop(pos))
638 548 argcount -= 2
639 549 elif arg[:2] in shortopts:
640 550 # short option can have no following space, e.g. hg log -Rfoo
641 551 values.append(args.pop(pos)[2:])
642 552 argcount -= 1
643 553 else:
644 554 pos += 1
645 555 return values
646 556
647 557 def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions):
648 558 # run pre-hook, and abort if it fails
649 559 hook.hook(lui, repo, "pre-%s" % cmd, True, args=" ".join(fullargs),
650 560 pats=cmdpats, opts=cmdoptions)
651 561 try:
652 562 ret = _runcommand(ui, options, cmd, d)
653 563 # run post-hook, passing command result
654 564 hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
655 565 result=ret, pats=cmdpats, opts=cmdoptions)
656 566 except Exception:
657 567 # run failure hook and re-raise
658 568 hook.hook(lui, repo, "fail-%s" % cmd, False, args=" ".join(fullargs),
659 569 pats=cmdpats, opts=cmdoptions)
660 570 raise
661 571 return ret
662 572
663 573 def _getlocal(ui, rpath, wd=None):
664 574 """Return (path, local ui object) for the given target path.
665 575
666 576 Takes paths in [cwd]/.hg/hgrc into account."
667 577 """
668 578 if wd is None:
669 579 try:
670 580 wd = pycompat.getcwd()
671 581 except OSError as e:
672 582 raise error.Abort(_("error getting current working directory: %s") %
673 583 e.strerror)
674 584 path = cmdutil.findrepo(wd) or ""
675 585 if not path:
676 586 lui = ui
677 587 else:
678 588 lui = ui.copy()
679 589 lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
680 590
681 591 if rpath and rpath[-1]:
682 592 path = lui.expandpath(rpath[-1])
683 593 lui = ui.copy()
684 594 lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
685 595
686 596 return path, lui
687 597
688 598 def _checkshellalias(lui, ui, args):
689 599 """Return the function to run the shell alias, if it is required"""
690 600 options = {}
691 601
692 602 try:
693 603 args = fancyopts.fancyopts(args, commands.globalopts, options)
694 604 except fancyopts.getopt.GetoptError:
695 605 return
696 606
697 607 if not args:
698 608 return
699 609
700 610 cmdtable = commands.table
701 611
702 612 cmd = args[0]
703 613 try:
704 614 strict = ui.configbool("ui", "strict")
705 615 aliases, entry = cmdutil.findcmd(cmd, cmdtable, strict)
706 616 except (error.AmbiguousCommand, error.UnknownCommand):
707 617 return
708 618
709 619 cmd = aliases[0]
710 620 fn = entry[0]
711 621
712 622 if cmd and util.safehasattr(fn, 'shell'):
713 623 d = lambda: fn(ui, *args[1:])
714 624 return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d,
715 625 [], {})
716 626
717 627 _loaded = set()
718 628
719 629 # list of (objname, loadermod, loadername) tuple:
720 630 # - objname is the name of an object in extension module, from which
721 631 # extra information is loaded
722 632 # - loadermod is the module where loader is placed
723 633 # - loadername is the name of the function, which takes (ui, extensionname,
724 634 # extraobj) arguments
725 635 extraloaders = [
726 636 ('cmdtable', commands, 'loadcmdtable'),
727 637 ('filesetpredicate', fileset, 'loadpredicate'),
728 638 ('revsetpredicate', revset, 'loadpredicate'),
729 639 ('templatefilter', templatefilters, 'loadfilter'),
730 640 ('templatefunc', templater, 'loadfunction'),
731 641 ('templatekeyword', templatekw, 'loadkeyword'),
732 642 ]
733 643
734 644 def _dispatch(req):
735 645 args = req.args
736 646 ui = req.ui
737 647
738 648 # check for cwd
739 649 cwd = _earlygetopt(['--cwd'], args)
740 650 if cwd:
741 651 os.chdir(cwd[-1])
742 652
743 653 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
744 654 path, lui = _getlocal(ui, rpath)
745 655
746 656 # Configure extensions in phases: uisetup, extsetup, cmdtable, and
747 657 # reposetup. Programs like TortoiseHg will call _dispatch several
748 658 # times so we keep track of configured extensions in _loaded.
749 659 extensions.loadall(lui)
750 660 exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded]
751 661 # Propagate any changes to lui.__class__ by extensions
752 662 ui.__class__ = lui.__class__
753 663
754 664 # (uisetup and extsetup are handled in extensions.loadall)
755 665
756 666 for name, module in exts:
757 667 for objname, loadermod, loadername in extraloaders:
758 668 extraobj = getattr(module, objname, None)
759 669 if extraobj is not None:
760 670 getattr(loadermod, loadername)(ui, name, extraobj)
761 671 _loaded.add(name)
762 672
763 673 # (reposetup is handled in hg.repository)
764 674
765 675 # Side-effect of accessing is debugcommands module is guaranteed to be
766 676 # imported and commands.table is populated.
767 677 debugcommands.command
768 678
769 679 addaliases(lui, commands.table)
770 680
771 681 # All aliases and commands are completely defined, now.
772 682 # Check abbreviation/ambiguity of shell alias.
773 683 shellaliasfn = _checkshellalias(lui, ui, args)
774 684 if shellaliasfn:
775 685 with profiling.maybeprofile(lui):
776 686 return shellaliasfn()
777 687
778 688 # check for fallback encoding
779 689 fallback = lui.config('ui', 'fallbackencoding')
780 690 if fallback:
781 691 encoding.fallbackencoding = fallback
782 692
783 693 fullargs = args
784 694 cmd, func, args, options, cmdoptions = _parse(lui, args)
785 695
786 696 if options["config"]:
787 697 raise error.Abort(_("option --config may not be abbreviated!"))
788 698 if options["cwd"]:
789 699 raise error.Abort(_("option --cwd may not be abbreviated!"))
790 700 if options["repository"]:
791 701 raise error.Abort(_(
792 702 "option -R has to be separated from other options (e.g. not -qR) "
793 703 "and --repository may only be abbreviated as --repo!"))
794 704
795 705 if options["encoding"]:
796 706 encoding.encoding = options["encoding"]
797 707 if options["encodingmode"]:
798 708 encoding.encodingmode = options["encodingmode"]
799 709 if options["time"]:
800 710 def get_times():
801 711 t = os.times()
802 712 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
803 713 t = (t[0], t[1], t[2], t[3], time.clock())
804 714 return t
805 715 s = get_times()
806 716 def print_time():
807 717 t = get_times()
808 718 ui.warn(_("time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
809 719 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
810 720 atexit.register(print_time)
811 721
812 722 uis = set([ui, lui])
813 723
814 724 if req.repo:
815 725 uis.add(req.repo.ui)
816 726
817 727 if options['verbose'] or options['debug'] or options['quiet']:
818 728 for opt in ('verbose', 'debug', 'quiet'):
819 729 val = str(bool(options[opt]))
820 730 for ui_ in uis:
821 731 ui_.setconfig('ui', opt, val, '--' + opt)
822 732
823 733 if options['profile']:
824 734 for ui_ in uis:
825 735 ui_.setconfig('profiling', 'enabled', 'true', '--profile')
826 736
827 737 if options['traceback']:
828 738 for ui_ in uis:
829 739 ui_.setconfig('ui', 'traceback', 'on', '--traceback')
830 740
831 741 if options['noninteractive']:
832 742 for ui_ in uis:
833 743 ui_.setconfig('ui', 'interactive', 'off', '-y')
834 744
835 745 if cmdoptions.get('insecure', False):
836 746 for ui_ in uis:
837 747 ui_.insecureconnections = True
838 748
839 749 if options['version']:
840 750 return commands.version_(ui)
841 751 if options['help']:
842 752 return commands.help_(ui, cmd, command=cmd is not None)
843 753 elif not cmd:
844 754 return commands.help_(ui, 'shortlist')
845 755
846 756 with profiling.maybeprofile(lui):
847 757 repo = None
848 758 cmdpats = args[:]
849 759 if not func.norepo:
850 760 # use the repo from the request only if we don't have -R
851 761 if not rpath and not cwd:
852 762 repo = req.repo
853 763
854 764 if repo:
855 765 # set the descriptors of the repo ui to those of ui
856 766 repo.ui.fin = ui.fin
857 767 repo.ui.fout = ui.fout
858 768 repo.ui.ferr = ui.ferr
859 769 else:
860 770 try:
861 771 repo = hg.repository(ui, path=path)
862 772 if not repo.local():
863 773 raise error.Abort(_("repository '%s' is not local")
864 774 % path)
865 775 repo.ui.setconfig("bundle", "mainreporoot", repo.root,
866 776 'repo')
867 777 except error.RequirementError:
868 778 raise
869 779 except error.RepoError:
870 780 if rpath and rpath[-1]: # invalid -R path
871 781 raise
872 782 if not func.optionalrepo:
873 783 if func.inferrepo and args and not path:
874 784 # try to infer -R from command args
875 785 repos = map(cmdutil.findrepo, args)
876 786 guess = repos[0]
877 787 if guess and repos.count(guess) == len(repos):
878 788 req.args = ['--repository', guess] + fullargs
879 789 return _dispatch(req)
880 790 if not path:
881 791 raise error.RepoError(_("no repository found in"
882 792 " '%s' (.hg not found)")
883 793 % pycompat.getcwd())
884 794 raise
885 795 if repo:
886 796 ui = repo.ui
887 797 if options['hidden']:
888 798 repo = repo.unfiltered()
889 799 args.insert(0, repo)
890 800 elif rpath:
891 801 ui.warn(_("warning: --repository ignored\n"))
892 802
893 803 msg = ' '.join(' ' in a and repr(a) or a for a in fullargs)
894 804 ui.log("command", '%s\n', msg)
895 805 d = lambda: util.checksignature(func)(ui, *args, **cmdoptions)
896 806 try:
897 807 return runcommand(lui, repo, cmd, fullargs, ui, options, d,
898 808 cmdpats, cmdoptions)
899 809 finally:
900 810 if repo and repo != req.repo:
901 811 repo.close()
902 812
903 813 def _runcommand(ui, options, cmd, cmdfunc):
904 814 """Run a command function, possibly with profiling enabled."""
905 815 try:
906 816 return cmdfunc()
907 817 except error.SignatureError:
908 818 raise error.CommandError(cmd, _('invalid arguments'))
909 819
910 820 def _exceptionwarning(ui):
911 821 """Produce a warning message for the current active exception"""
912 822
913 823 # For compatibility checking, we discard the portion of the hg
914 824 # version after the + on the assumption that if a "normal
915 825 # user" is running a build with a + in it the packager
916 826 # probably built from fairly close to a tag and anyone with a
917 827 # 'make local' copy of hg (where the version number can be out
918 828 # of date) will be clueful enough to notice the implausible
919 829 # version number and try updating.
920 830 ct = util.versiontuple(n=2)
921 831 worst = None, ct, ''
922 832 if ui.config('ui', 'supportcontact', None) is None:
923 833 for name, mod in extensions.extensions():
924 834 testedwith = getattr(mod, 'testedwith', '')
925 835 report = getattr(mod, 'buglink', _('the extension author.'))
926 836 if not testedwith.strip():
927 837 # We found an untested extension. It's likely the culprit.
928 838 worst = name, 'unknown', report
929 839 break
930 840
931 841 # Never blame on extensions bundled with Mercurial.
932 842 if extensions.ismoduleinternal(mod):
933 843 continue
934 844
935 845 tested = [util.versiontuple(t, 2) for t in testedwith.split()]
936 846 if ct in tested:
937 847 continue
938 848
939 849 lower = [t for t in tested if t < ct]
940 850 nearest = max(lower or tested)
941 851 if worst[0] is None or nearest < worst[1]:
942 852 worst = name, nearest, report
943 853 if worst[0] is not None:
944 854 name, testedwith, report = worst
945 855 if not isinstance(testedwith, str):
946 856 testedwith = '.'.join([str(c) for c in testedwith])
947 857 warning = (_('** Unknown exception encountered with '
948 858 'possibly-broken third-party extension %s\n'
949 859 '** which supports versions %s of Mercurial.\n'
950 860 '** Please disable %s and try your action again.\n'
951 861 '** If that fixes the bug please report it to %s\n')
952 862 % (name, testedwith, name, report))
953 863 else:
954 864 bugtracker = ui.config('ui', 'supportcontact', None)
955 865 if bugtracker is None:
956 866 bugtracker = _("https://mercurial-scm.org/wiki/BugTracker")
957 867 warning = (_("** unknown exception encountered, "
958 868 "please report by visiting\n** ") + bugtracker + '\n')
959 869 warning += ((_("** Python %s\n") % sys.version.replace('\n', '')) +
960 870 (_("** Mercurial Distributed SCM (version %s)\n") %
961 871 util.version()) +
962 872 (_("** Extensions loaded: %s\n") %
963 873 ", ".join([x[0] for x in extensions.extensions()])))
964 874 return warning
965 875
966 876 def handlecommandexception(ui):
967 877 """Produce a warning message for broken commands
968 878
969 879 Called when handling an exception; the exception is reraised if
970 880 this function returns False, ignored otherwise.
971 881 """
972 882 warning = _exceptionwarning(ui)
973 883 ui.log("commandexception", "%s\n%s\n", warning, traceback.format_exc())
974 884 ui.warn(warning)
975 885 return False # re-raise the exception
@@ -1,1470 +1,1573 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import contextlib
11 11 import errno
12 12 import glob
13 13 import hashlib
14 14 import os
15 15 import re
16 16 import shutil
17 import socket
17 18 import stat
18 19 import tempfile
19 20 import threading
20 21
21 22 from .i18n import _
22 23 from .node import wdirrev
23 24 from . import (
24 25 encoding,
25 26 error,
26 27 match as matchmod,
27 28 osutil,
28 29 pathutil,
29 30 phases,
30 31 pycompat,
31 32 revset,
32 33 similar,
33 34 util,
34 35 )
35 36
36 37 if os.name == 'nt':
37 38 from . import scmwindows as scmplatform
38 39 else:
39 40 from . import scmposix as scmplatform
40 41
41 42 systemrcpath = scmplatform.systemrcpath
42 43 userrcpath = scmplatform.userrcpath
43 44 termsize = scmplatform.termsize
44 45
45 46 class status(tuple):
46 47 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
47 48 and 'ignored' properties are only relevant to the working copy.
48 49 '''
49 50
50 51 __slots__ = ()
51 52
52 53 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
53 54 clean):
54 55 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
55 56 ignored, clean))
56 57
57 58 @property
58 59 def modified(self):
59 60 '''files that have been modified'''
60 61 return self[0]
61 62
62 63 @property
63 64 def added(self):
64 65 '''files that have been added'''
65 66 return self[1]
66 67
67 68 @property
68 69 def removed(self):
69 70 '''files that have been removed'''
70 71 return self[2]
71 72
72 73 @property
73 74 def deleted(self):
74 75 '''files that are in the dirstate, but have been deleted from the
75 76 working copy (aka "missing")
76 77 '''
77 78 return self[3]
78 79
79 80 @property
80 81 def unknown(self):
81 82 '''files not in the dirstate that are not ignored'''
82 83 return self[4]
83 84
84 85 @property
85 86 def ignored(self):
86 87 '''files not in the dirstate that are ignored (by _dirignore())'''
87 88 return self[5]
88 89
89 90 @property
90 91 def clean(self):
91 92 '''files that have not been modified'''
92 93 return self[6]
93 94
94 95 def __repr__(self, *args, **kwargs):
95 96 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
96 97 'unknown=%r, ignored=%r, clean=%r>') % self)
97 98
98 99 def itersubrepos(ctx1, ctx2):
99 100 """find subrepos in ctx1 or ctx2"""
100 101 # Create a (subpath, ctx) mapping where we prefer subpaths from
101 102 # ctx1. The subpaths from ctx2 are important when the .hgsub file
102 103 # has been modified (in ctx2) but not yet committed (in ctx1).
103 104 subpaths = dict.fromkeys(ctx2.substate, ctx2)
104 105 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
105 106
106 107 missing = set()
107 108
108 109 for subpath in ctx2.substate:
109 110 if subpath not in ctx1.substate:
110 111 del subpaths[subpath]
111 112 missing.add(subpath)
112 113
113 114 for subpath, ctx in sorted(subpaths.iteritems()):
114 115 yield subpath, ctx.sub(subpath)
115 116
116 117 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
117 118 # status and diff will have an accurate result when it does
118 119 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
119 120 # against itself.
120 121 for subpath in missing:
121 122 yield subpath, ctx2.nullsub(subpath, ctx1)
122 123
123 124 def nochangesfound(ui, repo, excluded=None):
124 125 '''Report no changes for push/pull, excluded is None or a list of
125 126 nodes excluded from the push/pull.
126 127 '''
127 128 secretlist = []
128 129 if excluded:
129 130 for n in excluded:
130 131 if n not in repo:
131 132 # discovery should not have included the filtered revision,
132 133 # we have to explicitly exclude it until discovery is cleanup.
133 134 continue
134 135 ctx = repo[n]
135 136 if ctx.phase() >= phases.secret and not ctx.extinct():
136 137 secretlist.append(n)
137 138
138 139 if secretlist:
139 140 ui.status(_("no changes found (ignored %d secret changesets)\n")
140 141 % len(secretlist))
141 142 else:
142 143 ui.status(_("no changes found\n"))
143 144
145 def callcatch(ui, func):
146 """call func() with global exception handling
147
148 return func() if no exception happens. otherwise do some error handling
149 and return an exit code accordingly. does not handle all exceptions.
150 """
151 try:
152 return func()
153 # Global exception handling, alphabetically
154 # Mercurial-specific first, followed by built-in and library exceptions
155 except error.LockHeld as inst:
156 if inst.errno == errno.ETIMEDOUT:
157 reason = _('timed out waiting for lock held by %s') % inst.locker
158 else:
159 reason = _('lock held by %s') % inst.locker
160 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
161 except error.LockUnavailable as inst:
162 ui.warn(_("abort: could not lock %s: %s\n") %
163 (inst.desc or inst.filename, inst.strerror))
164 except error.OutOfBandError as inst:
165 if inst.args:
166 msg = _("abort: remote error:\n")
167 else:
168 msg = _("abort: remote error\n")
169 ui.warn(msg)
170 if inst.args:
171 ui.warn(''.join(inst.args))
172 if inst.hint:
173 ui.warn('(%s)\n' % inst.hint)
174 except error.RepoError as inst:
175 ui.warn(_("abort: %s!\n") % inst)
176 if inst.hint:
177 ui.warn(_("(%s)\n") % inst.hint)
178 except error.ResponseError as inst:
179 ui.warn(_("abort: %s") % inst.args[0])
180 if not isinstance(inst.args[1], basestring):
181 ui.warn(" %r\n" % (inst.args[1],))
182 elif not inst.args[1]:
183 ui.warn(_(" empty string\n"))
184 else:
185 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
186 except error.CensoredNodeError as inst:
187 ui.warn(_("abort: file censored %s!\n") % inst)
188 except error.RevlogError as inst:
189 ui.warn(_("abort: %s!\n") % inst)
190 except error.SignalInterrupt:
191 ui.warn(_("killed!\n"))
192 except error.InterventionRequired as inst:
193 ui.warn("%s\n" % inst)
194 if inst.hint:
195 ui.warn(_("(%s)\n") % inst.hint)
196 return 1
197 except error.Abort as inst:
198 ui.warn(_("abort: %s\n") % inst)
199 if inst.hint:
200 ui.warn(_("(%s)\n") % inst.hint)
201 except ImportError as inst:
202 ui.warn(_("abort: %s!\n") % inst)
203 m = str(inst).split()[-1]
204 if m in "mpatch bdiff".split():
205 ui.warn(_("(did you forget to compile extensions?)\n"))
206 elif m in "zlib".split():
207 ui.warn(_("(is your Python install correct?)\n"))
208 except IOError as inst:
209 if util.safehasattr(inst, "code"):
210 ui.warn(_("abort: %s\n") % inst)
211 elif util.safehasattr(inst, "reason"):
212 try: # usually it is in the form (errno, strerror)
213 reason = inst.reason.args[1]
214 except (AttributeError, IndexError):
215 # it might be anything, for example a string
216 reason = inst.reason
217 if isinstance(reason, unicode):
218 # SSLError of Python 2.7.9 contains a unicode
219 reason = reason.encode(encoding.encoding, 'replace')
220 ui.warn(_("abort: error: %s\n") % reason)
221 elif (util.safehasattr(inst, "args")
222 and inst.args and inst.args[0] == errno.EPIPE):
223 pass
224 elif getattr(inst, "strerror", None):
225 if getattr(inst, "filename", None):
226 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
227 else:
228 ui.warn(_("abort: %s\n") % inst.strerror)
229 else:
230 raise
231 except OSError as inst:
232 if getattr(inst, "filename", None) is not None:
233 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
234 else:
235 ui.warn(_("abort: %s\n") % inst.strerror)
236 except MemoryError:
237 ui.warn(_("abort: out of memory\n"))
238 except SystemExit as inst:
239 # Commands shouldn't sys.exit directly, but give a return code.
240 # Just in case catch this and and pass exit code to caller.
241 return inst.code
242 except socket.error as inst:
243 ui.warn(_("abort: %s\n") % inst.args[-1])
244
245 return -1
246
144 247 def checknewlabel(repo, lbl, kind):
145 248 # Do not use the "kind" parameter in ui output.
146 249 # It makes strings difficult to translate.
147 250 if lbl in ['tip', '.', 'null']:
148 251 raise error.Abort(_("the name '%s' is reserved") % lbl)
149 252 for c in (':', '\0', '\n', '\r'):
150 253 if c in lbl:
151 254 raise error.Abort(_("%r cannot be used in a name") % c)
152 255 try:
153 256 int(lbl)
154 257 raise error.Abort(_("cannot use an integer as a name"))
155 258 except ValueError:
156 259 pass
157 260
158 261 def checkfilename(f):
159 262 '''Check that the filename f is an acceptable filename for a tracked file'''
160 263 if '\r' in f or '\n' in f:
161 264 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
162 265
163 266 def checkportable(ui, f):
164 267 '''Check if filename f is portable and warn or abort depending on config'''
165 268 checkfilename(f)
166 269 abort, warn = checkportabilityalert(ui)
167 270 if abort or warn:
168 271 msg = util.checkwinfilename(f)
169 272 if msg:
170 273 msg = "%s: %r" % (msg, f)
171 274 if abort:
172 275 raise error.Abort(msg)
173 276 ui.warn(_("warning: %s\n") % msg)
174 277
175 278 def checkportabilityalert(ui):
176 279 '''check if the user's config requests nothing, a warning, or abort for
177 280 non-portable filenames'''
178 281 val = ui.config('ui', 'portablefilenames', 'warn')
179 282 lval = val.lower()
180 283 bval = util.parsebool(val)
181 284 abort = os.name == 'nt' or lval == 'abort'
182 285 warn = bval or lval == 'warn'
183 286 if bval is None and not (warn or abort or lval == 'ignore'):
184 287 raise error.ConfigError(
185 288 _("ui.portablefilenames value is invalid ('%s')") % val)
186 289 return abort, warn
187 290
188 291 class casecollisionauditor(object):
189 292 def __init__(self, ui, abort, dirstate):
190 293 self._ui = ui
191 294 self._abort = abort
192 295 allfiles = '\0'.join(dirstate._map)
193 296 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
194 297 self._dirstate = dirstate
195 298 # The purpose of _newfiles is so that we don't complain about
196 299 # case collisions if someone were to call this object with the
197 300 # same filename twice.
198 301 self._newfiles = set()
199 302
200 303 def __call__(self, f):
201 304 if f in self._newfiles:
202 305 return
203 306 fl = encoding.lower(f)
204 307 if fl in self._loweredfiles and f not in self._dirstate:
205 308 msg = _('possible case-folding collision for %s') % f
206 309 if self._abort:
207 310 raise error.Abort(msg)
208 311 self._ui.warn(_("warning: %s\n") % msg)
209 312 self._loweredfiles.add(fl)
210 313 self._newfiles.add(f)
211 314
212 315 def filteredhash(repo, maxrev):
213 316 """build hash of filtered revisions in the current repoview.
214 317
215 318 Multiple caches perform up-to-date validation by checking that the
216 319 tiprev and tipnode stored in the cache file match the current repository.
217 320 However, this is not sufficient for validating repoviews because the set
218 321 of revisions in the view may change without the repository tiprev and
219 322 tipnode changing.
220 323
221 324 This function hashes all the revs filtered from the view and returns
222 325 that SHA-1 digest.
223 326 """
224 327 cl = repo.changelog
225 328 if not cl.filteredrevs:
226 329 return None
227 330 key = None
228 331 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
229 332 if revs:
230 333 s = hashlib.sha1()
231 334 for rev in revs:
232 335 s.update('%s;' % rev)
233 336 key = s.digest()
234 337 return key
235 338
236 339 class abstractvfs(object):
237 340 """Abstract base class; cannot be instantiated"""
238 341
239 342 def __init__(self, *args, **kwargs):
240 343 '''Prevent instantiation; don't call this from subclasses.'''
241 344 raise NotImplementedError('attempted instantiating ' + str(type(self)))
242 345
243 346 def tryread(self, path):
244 347 '''gracefully return an empty string for missing files'''
245 348 try:
246 349 return self.read(path)
247 350 except IOError as inst:
248 351 if inst.errno != errno.ENOENT:
249 352 raise
250 353 return ""
251 354
252 355 def tryreadlines(self, path, mode='rb'):
253 356 '''gracefully return an empty array for missing files'''
254 357 try:
255 358 return self.readlines(path, mode=mode)
256 359 except IOError as inst:
257 360 if inst.errno != errno.ENOENT:
258 361 raise
259 362 return []
260 363
261 364 @util.propertycache
262 365 def open(self):
263 366 '''Open ``path`` file, which is relative to vfs root.
264 367
265 368 Newly created directories are marked as "not to be indexed by
266 369 the content indexing service", if ``notindexed`` is specified
267 370 for "write" mode access.
268 371 '''
269 372 return self.__call__
270 373
271 374 def read(self, path):
272 375 with self(path, 'rb') as fp:
273 376 return fp.read()
274 377
275 378 def readlines(self, path, mode='rb'):
276 379 with self(path, mode=mode) as fp:
277 380 return fp.readlines()
278 381
279 382 def write(self, path, data, backgroundclose=False):
280 383 with self(path, 'wb', backgroundclose=backgroundclose) as fp:
281 384 return fp.write(data)
282 385
283 386 def writelines(self, path, data, mode='wb', notindexed=False):
284 387 with self(path, mode=mode, notindexed=notindexed) as fp:
285 388 return fp.writelines(data)
286 389
287 390 def append(self, path, data):
288 391 with self(path, 'ab') as fp:
289 392 return fp.write(data)
290 393
291 394 def basename(self, path):
292 395 """return base element of a path (as os.path.basename would do)
293 396
294 397 This exists to allow handling of strange encoding if needed."""
295 398 return os.path.basename(path)
296 399
297 400 def chmod(self, path, mode):
298 401 return os.chmod(self.join(path), mode)
299 402
300 403 def dirname(self, path):
301 404 """return dirname element of a path (as os.path.dirname would do)
302 405
303 406 This exists to allow handling of strange encoding if needed."""
304 407 return os.path.dirname(path)
305 408
306 409 def exists(self, path=None):
307 410 return os.path.exists(self.join(path))
308 411
309 412 def fstat(self, fp):
310 413 return util.fstat(fp)
311 414
312 415 def isdir(self, path=None):
313 416 return os.path.isdir(self.join(path))
314 417
315 418 def isfile(self, path=None):
316 419 return os.path.isfile(self.join(path))
317 420
318 421 def islink(self, path=None):
319 422 return os.path.islink(self.join(path))
320 423
321 424 def isfileorlink(self, path=None):
322 425 '''return whether path is a regular file or a symlink
323 426
324 427 Unlike isfile, this doesn't follow symlinks.'''
325 428 try:
326 429 st = self.lstat(path)
327 430 except OSError:
328 431 return False
329 432 mode = st.st_mode
330 433 return stat.S_ISREG(mode) or stat.S_ISLNK(mode)
331 434
332 435 def reljoin(self, *paths):
333 436 """join various elements of a path together (as os.path.join would do)
334 437
335 438 The vfs base is not injected so that path stay relative. This exists
336 439 to allow handling of strange encoding if needed."""
337 440 return os.path.join(*paths)
338 441
339 442 def split(self, path):
340 443 """split top-most element of a path (as os.path.split would do)
341 444
342 445 This exists to allow handling of strange encoding if needed."""
343 446 return os.path.split(path)
344 447
345 448 def lexists(self, path=None):
346 449 return os.path.lexists(self.join(path))
347 450
348 451 def lstat(self, path=None):
349 452 return os.lstat(self.join(path))
350 453
351 454 def listdir(self, path=None):
352 455 return os.listdir(self.join(path))
353 456
354 457 def makedir(self, path=None, notindexed=True):
355 458 return util.makedir(self.join(path), notindexed)
356 459
357 460 def makedirs(self, path=None, mode=None):
358 461 return util.makedirs(self.join(path), mode)
359 462
360 463 def makelock(self, info, path):
361 464 return util.makelock(info, self.join(path))
362 465
363 466 def mkdir(self, path=None):
364 467 return os.mkdir(self.join(path))
365 468
366 469 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
367 470 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
368 471 dir=self.join(dir), text=text)
369 472 dname, fname = util.split(name)
370 473 if dir:
371 474 return fd, os.path.join(dir, fname)
372 475 else:
373 476 return fd, fname
374 477
375 478 def readdir(self, path=None, stat=None, skip=None):
376 479 return osutil.listdir(self.join(path), stat, skip)
377 480
378 481 def readlock(self, path):
379 482 return util.readlock(self.join(path))
380 483
381 484 def rename(self, src, dst, checkambig=False):
382 485 """Rename from src to dst
383 486
384 487 checkambig argument is used with util.filestat, and is useful
385 488 only if destination file is guarded by any lock
386 489 (e.g. repo.lock or repo.wlock).
387 490 """
388 491 dstpath = self.join(dst)
389 492 oldstat = checkambig and util.filestat(dstpath)
390 493 if oldstat and oldstat.stat:
391 494 ret = util.rename(self.join(src), dstpath)
392 495 newstat = util.filestat(dstpath)
393 496 if newstat.isambig(oldstat):
394 497 # stat of renamed file is ambiguous to original one
395 498 newstat.avoidambig(dstpath, oldstat)
396 499 return ret
397 500 return util.rename(self.join(src), dstpath)
398 501
399 502 def readlink(self, path):
400 503 return os.readlink(self.join(path))
401 504
402 505 def removedirs(self, path=None):
403 506 """Remove a leaf directory and all empty intermediate ones
404 507 """
405 508 return util.removedirs(self.join(path))
406 509
407 510 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
408 511 """Remove a directory tree recursively
409 512
410 513 If ``forcibly``, this tries to remove READ-ONLY files, too.
411 514 """
412 515 if forcibly:
413 516 def onerror(function, path, excinfo):
414 517 if function is not os.remove:
415 518 raise
416 519 # read-only files cannot be unlinked under Windows
417 520 s = os.stat(path)
418 521 if (s.st_mode & stat.S_IWRITE) != 0:
419 522 raise
420 523 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
421 524 os.remove(path)
422 525 else:
423 526 onerror = None
424 527 return shutil.rmtree(self.join(path),
425 528 ignore_errors=ignore_errors, onerror=onerror)
426 529
427 530 def setflags(self, path, l, x):
428 531 return util.setflags(self.join(path), l, x)
429 532
430 533 def stat(self, path=None):
431 534 return os.stat(self.join(path))
432 535
433 536 def unlink(self, path=None):
434 537 return util.unlink(self.join(path))
435 538
436 539 def unlinkpath(self, path=None, ignoremissing=False):
437 540 return util.unlinkpath(self.join(path), ignoremissing)
438 541
439 542 def utime(self, path=None, t=None):
440 543 return os.utime(self.join(path), t)
441 544
442 545 def walk(self, path=None, onerror=None):
443 546 """Yield (dirpath, dirs, files) tuple for each directories under path
444 547
445 548 ``dirpath`` is relative one from the root of this vfs. This
446 549 uses ``os.sep`` as path separator, even you specify POSIX
447 550 style ``path``.
448 551
449 552 "The root of this vfs" is represented as empty ``dirpath``.
450 553 """
451 554 root = os.path.normpath(self.join(None))
452 555 # when dirpath == root, dirpath[prefixlen:] becomes empty
453 556 # because len(dirpath) < prefixlen.
454 557 prefixlen = len(pathutil.normasprefix(root))
455 558 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
456 559 yield (dirpath[prefixlen:], dirs, files)
457 560
458 561 @contextlib.contextmanager
459 562 def backgroundclosing(self, ui, expectedcount=-1):
460 563 """Allow files to be closed asynchronously.
461 564
462 565 When this context manager is active, ``backgroundclose`` can be passed
463 566 to ``__call__``/``open`` to result in the file possibly being closed
464 567 asynchronously, on a background thread.
465 568 """
466 569 # This is an arbitrary restriction and could be changed if we ever
467 570 # have a use case.
468 571 vfs = getattr(self, 'vfs', self)
469 572 if getattr(vfs, '_backgroundfilecloser', None):
470 573 raise error.Abort(
471 574 _('can only have 1 active background file closer'))
472 575
473 576 with backgroundfilecloser(ui, expectedcount=expectedcount) as bfc:
474 577 try:
475 578 vfs._backgroundfilecloser = bfc
476 579 yield bfc
477 580 finally:
478 581 vfs._backgroundfilecloser = None
479 582
480 583 class vfs(abstractvfs):
481 584 '''Operate files relative to a base directory
482 585
483 586 This class is used to hide the details of COW semantics and
484 587 remote file access from higher level code.
485 588 '''
486 589 def __init__(self, base, audit=True, expandpath=False, realpath=False):
487 590 if expandpath:
488 591 base = util.expandpath(base)
489 592 if realpath:
490 593 base = os.path.realpath(base)
491 594 self.base = base
492 595 self.mustaudit = audit
493 596 self.createmode = None
494 597 self._trustnlink = None
495 598
496 599 @property
497 600 def mustaudit(self):
498 601 return self._audit
499 602
500 603 @mustaudit.setter
501 604 def mustaudit(self, onoff):
502 605 self._audit = onoff
503 606 if onoff:
504 607 self.audit = pathutil.pathauditor(self.base)
505 608 else:
506 609 self.audit = util.always
507 610
508 611 @util.propertycache
509 612 def _cansymlink(self):
510 613 return util.checklink(self.base)
511 614
512 615 @util.propertycache
513 616 def _chmod(self):
514 617 return util.checkexec(self.base)
515 618
516 619 def _fixfilemode(self, name):
517 620 if self.createmode is None or not self._chmod:
518 621 return
519 622 os.chmod(name, self.createmode & 0o666)
520 623
521 624 def __call__(self, path, mode="r", text=False, atomictemp=False,
522 625 notindexed=False, backgroundclose=False, checkambig=False):
523 626 '''Open ``path`` file, which is relative to vfs root.
524 627
525 628 Newly created directories are marked as "not to be indexed by
526 629 the content indexing service", if ``notindexed`` is specified
527 630 for "write" mode access.
528 631
529 632 If ``backgroundclose`` is passed, the file may be closed asynchronously.
530 633 It can only be used if the ``self.backgroundclosing()`` context manager
531 634 is active. This should only be specified if the following criteria hold:
532 635
533 636 1. There is a potential for writing thousands of files. Unless you
534 637 are writing thousands of files, the performance benefits of
535 638 asynchronously closing files is not realized.
536 639 2. Files are opened exactly once for the ``backgroundclosing``
537 640 active duration and are therefore free of race conditions between
538 641 closing a file on a background thread and reopening it. (If the
539 642 file were opened multiple times, there could be unflushed data
540 643 because the original file handle hasn't been flushed/closed yet.)
541 644
542 645 ``checkambig`` argument is passed to atomictemplfile (valid
543 646 only for writing), and is useful only if target file is
544 647 guarded by any lock (e.g. repo.lock or repo.wlock).
545 648 '''
546 649 if self._audit:
547 650 r = util.checkosfilename(path)
548 651 if r:
549 652 raise error.Abort("%s: %r" % (r, path))
550 653 self.audit(path)
551 654 f = self.join(path)
552 655
553 656 if not text and "b" not in mode:
554 657 mode += "b" # for that other OS
555 658
556 659 nlink = -1
557 660 if mode not in ('r', 'rb'):
558 661 dirname, basename = util.split(f)
559 662 # If basename is empty, then the path is malformed because it points
560 663 # to a directory. Let the posixfile() call below raise IOError.
561 664 if basename:
562 665 if atomictemp:
563 666 util.makedirs(dirname, self.createmode, notindexed)
564 667 return util.atomictempfile(f, mode, self.createmode,
565 668 checkambig=checkambig)
566 669 try:
567 670 if 'w' in mode:
568 671 util.unlink(f)
569 672 nlink = 0
570 673 else:
571 674 # nlinks() may behave differently for files on Windows
572 675 # shares if the file is open.
573 676 with util.posixfile(f):
574 677 nlink = util.nlinks(f)
575 678 if nlink < 1:
576 679 nlink = 2 # force mktempcopy (issue1922)
577 680 except (OSError, IOError) as e:
578 681 if e.errno != errno.ENOENT:
579 682 raise
580 683 nlink = 0
581 684 util.makedirs(dirname, self.createmode, notindexed)
582 685 if nlink > 0:
583 686 if self._trustnlink is None:
584 687 self._trustnlink = nlink > 1 or util.checknlink(f)
585 688 if nlink > 1 or not self._trustnlink:
586 689 util.rename(util.mktempcopy(f), f)
587 690 fp = util.posixfile(f, mode)
588 691 if nlink == 0:
589 692 self._fixfilemode(f)
590 693
591 694 if checkambig:
592 695 if mode in ('r', 'rb'):
593 696 raise error.Abort(_('implementation error: mode %s is not'
594 697 ' valid for checkambig=True') % mode)
595 698 fp = checkambigatclosing(fp)
596 699
597 700 if backgroundclose:
598 701 if not self._backgroundfilecloser:
599 702 raise error.Abort(_('backgroundclose can only be used when a '
600 703 'backgroundclosing context manager is active')
601 704 )
602 705
603 706 fp = delayclosedfile(fp, self._backgroundfilecloser)
604 707
605 708 return fp
606 709
607 710 def symlink(self, src, dst):
608 711 self.audit(dst)
609 712 linkname = self.join(dst)
610 713 try:
611 714 os.unlink(linkname)
612 715 except OSError:
613 716 pass
614 717
615 718 util.makedirs(os.path.dirname(linkname), self.createmode)
616 719
617 720 if self._cansymlink:
618 721 try:
619 722 os.symlink(src, linkname)
620 723 except OSError as err:
621 724 raise OSError(err.errno, _('could not symlink to %r: %s') %
622 725 (src, err.strerror), linkname)
623 726 else:
624 727 self.write(dst, src)
625 728
626 729 def join(self, path, *insidef):
627 730 if path:
628 731 return os.path.join(self.base, path, *insidef)
629 732 else:
630 733 return self.base
631 734
632 735 opener = vfs
633 736
634 737 class auditvfs(object):
635 738 def __init__(self, vfs):
636 739 self.vfs = vfs
637 740
638 741 @property
639 742 def mustaudit(self):
640 743 return self.vfs.mustaudit
641 744
642 745 @mustaudit.setter
643 746 def mustaudit(self, onoff):
644 747 self.vfs.mustaudit = onoff
645 748
646 749 @property
647 750 def options(self):
648 751 return self.vfs.options
649 752
650 753 @options.setter
651 754 def options(self, value):
652 755 self.vfs.options = value
653 756
654 757 class filtervfs(abstractvfs, auditvfs):
655 758 '''Wrapper vfs for filtering filenames with a function.'''
656 759
657 760 def __init__(self, vfs, filter):
658 761 auditvfs.__init__(self, vfs)
659 762 self._filter = filter
660 763
661 764 def __call__(self, path, *args, **kwargs):
662 765 return self.vfs(self._filter(path), *args, **kwargs)
663 766
664 767 def join(self, path, *insidef):
665 768 if path:
666 769 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
667 770 else:
668 771 return self.vfs.join(path)
669 772
670 773 filteropener = filtervfs
671 774
672 775 class readonlyvfs(abstractvfs, auditvfs):
673 776 '''Wrapper vfs preventing any writing.'''
674 777
675 778 def __init__(self, vfs):
676 779 auditvfs.__init__(self, vfs)
677 780
678 781 def __call__(self, path, mode='r', *args, **kw):
679 782 if mode not in ('r', 'rb'):
680 783 raise error.Abort(_('this vfs is read only'))
681 784 return self.vfs(path, mode, *args, **kw)
682 785
683 786 def join(self, path, *insidef):
684 787 return self.vfs.join(path, *insidef)
685 788
686 789 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
687 790 '''yield every hg repository under path, always recursively.
688 791 The recurse flag will only control recursion into repo working dirs'''
689 792 def errhandler(err):
690 793 if err.filename == path:
691 794 raise err
692 795 samestat = getattr(os.path, 'samestat', None)
693 796 if followsym and samestat is not None:
694 797 def adddir(dirlst, dirname):
695 798 match = False
696 799 dirstat = os.stat(dirname)
697 800 for lstdirstat in dirlst:
698 801 if samestat(dirstat, lstdirstat):
699 802 match = True
700 803 break
701 804 if not match:
702 805 dirlst.append(dirstat)
703 806 return not match
704 807 else:
705 808 followsym = False
706 809
707 810 if (seen_dirs is None) and followsym:
708 811 seen_dirs = []
709 812 adddir(seen_dirs, path)
710 813 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
711 814 dirs.sort()
712 815 if '.hg' in dirs:
713 816 yield root # found a repository
714 817 qroot = os.path.join(root, '.hg', 'patches')
715 818 if os.path.isdir(os.path.join(qroot, '.hg')):
716 819 yield qroot # we have a patch queue repo here
717 820 if recurse:
718 821 # avoid recursing inside the .hg directory
719 822 dirs.remove('.hg')
720 823 else:
721 824 dirs[:] = [] # don't descend further
722 825 elif followsym:
723 826 newdirs = []
724 827 for d in dirs:
725 828 fname = os.path.join(root, d)
726 829 if adddir(seen_dirs, fname):
727 830 if os.path.islink(fname):
728 831 for hgname in walkrepos(fname, True, seen_dirs):
729 832 yield hgname
730 833 else:
731 834 newdirs.append(d)
732 835 dirs[:] = newdirs
733 836
734 837 def osrcpath():
735 838 '''return default os-specific hgrc search path'''
736 839 path = []
737 840 defaultpath = os.path.join(util.datapath, 'default.d')
738 841 if os.path.isdir(defaultpath):
739 842 for f, kind in osutil.listdir(defaultpath):
740 843 if f.endswith('.rc'):
741 844 path.append(os.path.join(defaultpath, f))
742 845 path.extend(systemrcpath())
743 846 path.extend(userrcpath())
744 847 path = [os.path.normpath(f) for f in path]
745 848 return path
746 849
747 850 _rcpath = None
748 851
749 852 def rcpath():
750 853 '''return hgrc search path. if env var HGRCPATH is set, use it.
751 854 for each item in path, if directory, use files ending in .rc,
752 855 else use item.
753 856 make HGRCPATH empty to only look in .hg/hgrc of current repo.
754 857 if no HGRCPATH, use default os-specific path.'''
755 858 global _rcpath
756 859 if _rcpath is None:
757 860 if 'HGRCPATH' in encoding.environ:
758 861 _rcpath = []
759 862 for p in encoding.environ['HGRCPATH'].split(pycompat.ospathsep):
760 863 if not p:
761 864 continue
762 865 p = util.expandpath(p)
763 866 if os.path.isdir(p):
764 867 for f, kind in osutil.listdir(p):
765 868 if f.endswith('.rc'):
766 869 _rcpath.append(os.path.join(p, f))
767 870 else:
768 871 _rcpath.append(p)
769 872 else:
770 873 _rcpath = osrcpath()
771 874 return _rcpath
772 875
773 876 def intrev(rev):
774 877 """Return integer for a given revision that can be used in comparison or
775 878 arithmetic operation"""
776 879 if rev is None:
777 880 return wdirrev
778 881 return rev
779 882
780 883 def revsingle(repo, revspec, default='.'):
781 884 if not revspec and revspec != 0:
782 885 return repo[default]
783 886
784 887 l = revrange(repo, [revspec])
785 888 if not l:
786 889 raise error.Abort(_('empty revision set'))
787 890 return repo[l.last()]
788 891
789 892 def _pairspec(revspec):
790 893 tree = revset.parse(revspec)
791 894 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
792 895
793 896 def revpair(repo, revs):
794 897 if not revs:
795 898 return repo.dirstate.p1(), None
796 899
797 900 l = revrange(repo, revs)
798 901
799 902 if not l:
800 903 first = second = None
801 904 elif l.isascending():
802 905 first = l.min()
803 906 second = l.max()
804 907 elif l.isdescending():
805 908 first = l.max()
806 909 second = l.min()
807 910 else:
808 911 first = l.first()
809 912 second = l.last()
810 913
811 914 if first is None:
812 915 raise error.Abort(_('empty revision range'))
813 916 if (first == second and len(revs) >= 2
814 917 and not all(revrange(repo, [r]) for r in revs)):
815 918 raise error.Abort(_('empty revision on one side of range'))
816 919
817 920 # if top-level is range expression, the result must always be a pair
818 921 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
819 922 return repo.lookup(first), None
820 923
821 924 return repo.lookup(first), repo.lookup(second)
822 925
823 926 def revrange(repo, specs):
824 927 """Execute 1 to many revsets and return the union.
825 928
826 929 This is the preferred mechanism for executing revsets using user-specified
827 930 config options, such as revset aliases.
828 931
829 932 The revsets specified by ``specs`` will be executed via a chained ``OR``
830 933 expression. If ``specs`` is empty, an empty result is returned.
831 934
832 935 ``specs`` can contain integers, in which case they are assumed to be
833 936 revision numbers.
834 937
835 938 It is assumed the revsets are already formatted. If you have arguments
836 939 that need to be expanded in the revset, call ``revset.formatspec()``
837 940 and pass the result as an element of ``specs``.
838 941
839 942 Specifying a single revset is allowed.
840 943
841 944 Returns a ``revset.abstractsmartset`` which is a list-like interface over
842 945 integer revisions.
843 946 """
844 947 allspecs = []
845 948 for spec in specs:
846 949 if isinstance(spec, int):
847 950 spec = revset.formatspec('rev(%d)', spec)
848 951 allspecs.append(spec)
849 952 m = revset.matchany(repo.ui, allspecs, repo)
850 953 return m(repo)
851 954
852 955 def meaningfulparents(repo, ctx):
853 956 """Return list of meaningful (or all if debug) parentrevs for rev.
854 957
855 958 For merges (two non-nullrev revisions) both parents are meaningful.
856 959 Otherwise the first parent revision is considered meaningful if it
857 960 is not the preceding revision.
858 961 """
859 962 parents = ctx.parents()
860 963 if len(parents) > 1:
861 964 return parents
862 965 if repo.ui.debugflag:
863 966 return [parents[0], repo['null']]
864 967 if parents[0].rev() >= intrev(ctx.rev()) - 1:
865 968 return []
866 969 return parents
867 970
868 971 def expandpats(pats):
869 972 '''Expand bare globs when running on windows.
870 973 On posix we assume it already has already been done by sh.'''
871 974 if not util.expandglobs:
872 975 return list(pats)
873 976 ret = []
874 977 for kindpat in pats:
875 978 kind, pat = matchmod._patsplit(kindpat, None)
876 979 if kind is None:
877 980 try:
878 981 globbed = glob.glob(pat)
879 982 except re.error:
880 983 globbed = [pat]
881 984 if globbed:
882 985 ret.extend(globbed)
883 986 continue
884 987 ret.append(kindpat)
885 988 return ret
886 989
887 990 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
888 991 badfn=None):
889 992 '''Return a matcher and the patterns that were used.
890 993 The matcher will warn about bad matches, unless an alternate badfn callback
891 994 is provided.'''
892 995 if pats == ("",):
893 996 pats = []
894 997 if opts is None:
895 998 opts = {}
896 999 if not globbed and default == 'relpath':
897 1000 pats = expandpats(pats or [])
898 1001
899 1002 def bad(f, msg):
900 1003 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
901 1004
902 1005 if badfn is None:
903 1006 badfn = bad
904 1007
905 1008 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
906 1009 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
907 1010
908 1011 if m.always():
909 1012 pats = []
910 1013 return m, pats
911 1014
912 1015 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
913 1016 badfn=None):
914 1017 '''Return a matcher that will warn about bad matches.'''
915 1018 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
916 1019
917 1020 def matchall(repo):
918 1021 '''Return a matcher that will efficiently match everything.'''
919 1022 return matchmod.always(repo.root, repo.getcwd())
920 1023
921 1024 def matchfiles(repo, files, badfn=None):
922 1025 '''Return a matcher that will efficiently match exactly these files.'''
923 1026 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
924 1027
925 1028 def origpath(ui, repo, filepath):
926 1029 '''customize where .orig files are created
927 1030
928 1031 Fetch user defined path from config file: [ui] origbackuppath = <path>
929 1032 Fall back to default (filepath) if not specified
930 1033 '''
931 1034 origbackuppath = ui.config('ui', 'origbackuppath', None)
932 1035 if origbackuppath is None:
933 1036 return filepath + ".orig"
934 1037
935 1038 filepathfromroot = os.path.relpath(filepath, start=repo.root)
936 1039 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
937 1040
938 1041 origbackupdir = repo.vfs.dirname(fullorigpath)
939 1042 if not repo.vfs.exists(origbackupdir):
940 1043 ui.note(_('creating directory: %s\n') % origbackupdir)
941 1044 util.makedirs(origbackupdir)
942 1045
943 1046 return fullorigpath + ".orig"
944 1047
945 1048 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
946 1049 if opts is None:
947 1050 opts = {}
948 1051 m = matcher
949 1052 if dry_run is None:
950 1053 dry_run = opts.get('dry_run')
951 1054 if similarity is None:
952 1055 similarity = float(opts.get('similarity') or 0)
953 1056
954 1057 ret = 0
955 1058 join = lambda f: os.path.join(prefix, f)
956 1059
957 1060 wctx = repo[None]
958 1061 for subpath in sorted(wctx.substate):
959 1062 submatch = matchmod.subdirmatcher(subpath, m)
960 1063 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
961 1064 sub = wctx.sub(subpath)
962 1065 try:
963 1066 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
964 1067 ret = 1
965 1068 except error.LookupError:
966 1069 repo.ui.status(_("skipping missing subrepository: %s\n")
967 1070 % join(subpath))
968 1071
969 1072 rejected = []
970 1073 def badfn(f, msg):
971 1074 if f in m.files():
972 1075 m.bad(f, msg)
973 1076 rejected.append(f)
974 1077
975 1078 badmatch = matchmod.badmatch(m, badfn)
976 1079 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
977 1080 badmatch)
978 1081
979 1082 unknownset = set(unknown + forgotten)
980 1083 toprint = unknownset.copy()
981 1084 toprint.update(deleted)
982 1085 for abs in sorted(toprint):
983 1086 if repo.ui.verbose or not m.exact(abs):
984 1087 if abs in unknownset:
985 1088 status = _('adding %s\n') % m.uipath(abs)
986 1089 else:
987 1090 status = _('removing %s\n') % m.uipath(abs)
988 1091 repo.ui.status(status)
989 1092
990 1093 renames = _findrenames(repo, m, added + unknown, removed + deleted,
991 1094 similarity)
992 1095
993 1096 if not dry_run:
994 1097 _markchanges(repo, unknown + forgotten, deleted, renames)
995 1098
996 1099 for f in rejected:
997 1100 if f in m.files():
998 1101 return 1
999 1102 return ret
1000 1103
1001 1104 def marktouched(repo, files, similarity=0.0):
1002 1105 '''Assert that files have somehow been operated upon. files are relative to
1003 1106 the repo root.'''
1004 1107 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1005 1108 rejected = []
1006 1109
1007 1110 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1008 1111
1009 1112 if repo.ui.verbose:
1010 1113 unknownset = set(unknown + forgotten)
1011 1114 toprint = unknownset.copy()
1012 1115 toprint.update(deleted)
1013 1116 for abs in sorted(toprint):
1014 1117 if abs in unknownset:
1015 1118 status = _('adding %s\n') % abs
1016 1119 else:
1017 1120 status = _('removing %s\n') % abs
1018 1121 repo.ui.status(status)
1019 1122
1020 1123 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1021 1124 similarity)
1022 1125
1023 1126 _markchanges(repo, unknown + forgotten, deleted, renames)
1024 1127
1025 1128 for f in rejected:
1026 1129 if f in m.files():
1027 1130 return 1
1028 1131 return 0
1029 1132
1030 1133 def _interestingfiles(repo, matcher):
1031 1134 '''Walk dirstate with matcher, looking for files that addremove would care
1032 1135 about.
1033 1136
1034 1137 This is different from dirstate.status because it doesn't care about
1035 1138 whether files are modified or clean.'''
1036 1139 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1037 1140 audit_path = pathutil.pathauditor(repo.root)
1038 1141
1039 1142 ctx = repo[None]
1040 1143 dirstate = repo.dirstate
1041 1144 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
1042 1145 full=False)
1043 1146 for abs, st in walkresults.iteritems():
1044 1147 dstate = dirstate[abs]
1045 1148 if dstate == '?' and audit_path.check(abs):
1046 1149 unknown.append(abs)
1047 1150 elif dstate != 'r' and not st:
1048 1151 deleted.append(abs)
1049 1152 elif dstate == 'r' and st:
1050 1153 forgotten.append(abs)
1051 1154 # for finding renames
1052 1155 elif dstate == 'r' and not st:
1053 1156 removed.append(abs)
1054 1157 elif dstate == 'a':
1055 1158 added.append(abs)
1056 1159
1057 1160 return added, unknown, deleted, removed, forgotten
1058 1161
1059 1162 def _findrenames(repo, matcher, added, removed, similarity):
1060 1163 '''Find renames from removed files to added ones.'''
1061 1164 renames = {}
1062 1165 if similarity > 0:
1063 1166 for old, new, score in similar.findrenames(repo, added, removed,
1064 1167 similarity):
1065 1168 if (repo.ui.verbose or not matcher.exact(old)
1066 1169 or not matcher.exact(new)):
1067 1170 repo.ui.status(_('recording removal of %s as rename to %s '
1068 1171 '(%d%% similar)\n') %
1069 1172 (matcher.rel(old), matcher.rel(new),
1070 1173 score * 100))
1071 1174 renames[new] = old
1072 1175 return renames
1073 1176
1074 1177 def _markchanges(repo, unknown, deleted, renames):
1075 1178 '''Marks the files in unknown as added, the files in deleted as removed,
1076 1179 and the files in renames as copied.'''
1077 1180 wctx = repo[None]
1078 1181 with repo.wlock():
1079 1182 wctx.forget(deleted)
1080 1183 wctx.add(unknown)
1081 1184 for new, old in renames.iteritems():
1082 1185 wctx.copy(old, new)
1083 1186
1084 1187 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1085 1188 """Update the dirstate to reflect the intent of copying src to dst. For
1086 1189 different reasons it might not end with dst being marked as copied from src.
1087 1190 """
1088 1191 origsrc = repo.dirstate.copied(src) or src
1089 1192 if dst == origsrc: # copying back a copy?
1090 1193 if repo.dirstate[dst] not in 'mn' and not dryrun:
1091 1194 repo.dirstate.normallookup(dst)
1092 1195 else:
1093 1196 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1094 1197 if not ui.quiet:
1095 1198 ui.warn(_("%s has not been committed yet, so no copy "
1096 1199 "data will be stored for %s.\n")
1097 1200 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1098 1201 if repo.dirstate[dst] in '?r' and not dryrun:
1099 1202 wctx.add([dst])
1100 1203 elif not dryrun:
1101 1204 wctx.copy(origsrc, dst)
1102 1205
1103 1206 def readrequires(opener, supported):
1104 1207 '''Reads and parses .hg/requires and checks if all entries found
1105 1208 are in the list of supported features.'''
1106 1209 requirements = set(opener.read("requires").splitlines())
1107 1210 missings = []
1108 1211 for r in requirements:
1109 1212 if r not in supported:
1110 1213 if not r or not r[0].isalnum():
1111 1214 raise error.RequirementError(_(".hg/requires file is corrupt"))
1112 1215 missings.append(r)
1113 1216 missings.sort()
1114 1217 if missings:
1115 1218 raise error.RequirementError(
1116 1219 _("repository requires features unknown to this Mercurial: %s")
1117 1220 % " ".join(missings),
1118 1221 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1119 1222 " for more information"))
1120 1223 return requirements
1121 1224
1122 1225 def writerequires(opener, requirements):
1123 1226 with opener('requires', 'w') as fp:
1124 1227 for r in sorted(requirements):
1125 1228 fp.write("%s\n" % r)
1126 1229
1127 1230 class filecachesubentry(object):
1128 1231 def __init__(self, path, stat):
1129 1232 self.path = path
1130 1233 self.cachestat = None
1131 1234 self._cacheable = None
1132 1235
1133 1236 if stat:
1134 1237 self.cachestat = filecachesubentry.stat(self.path)
1135 1238
1136 1239 if self.cachestat:
1137 1240 self._cacheable = self.cachestat.cacheable()
1138 1241 else:
1139 1242 # None means we don't know yet
1140 1243 self._cacheable = None
1141 1244
1142 1245 def refresh(self):
1143 1246 if self.cacheable():
1144 1247 self.cachestat = filecachesubentry.stat(self.path)
1145 1248
1146 1249 def cacheable(self):
1147 1250 if self._cacheable is not None:
1148 1251 return self._cacheable
1149 1252
1150 1253 # we don't know yet, assume it is for now
1151 1254 return True
1152 1255
1153 1256 def changed(self):
1154 1257 # no point in going further if we can't cache it
1155 1258 if not self.cacheable():
1156 1259 return True
1157 1260
1158 1261 newstat = filecachesubentry.stat(self.path)
1159 1262
1160 1263 # we may not know if it's cacheable yet, check again now
1161 1264 if newstat and self._cacheable is None:
1162 1265 self._cacheable = newstat.cacheable()
1163 1266
1164 1267 # check again
1165 1268 if not self._cacheable:
1166 1269 return True
1167 1270
1168 1271 if self.cachestat != newstat:
1169 1272 self.cachestat = newstat
1170 1273 return True
1171 1274 else:
1172 1275 return False
1173 1276
1174 1277 @staticmethod
1175 1278 def stat(path):
1176 1279 try:
1177 1280 return util.cachestat(path)
1178 1281 except OSError as e:
1179 1282 if e.errno != errno.ENOENT:
1180 1283 raise
1181 1284
1182 1285 class filecacheentry(object):
1183 1286 def __init__(self, paths, stat=True):
1184 1287 self._entries = []
1185 1288 for path in paths:
1186 1289 self._entries.append(filecachesubentry(path, stat))
1187 1290
1188 1291 def changed(self):
1189 1292 '''true if any entry has changed'''
1190 1293 for entry in self._entries:
1191 1294 if entry.changed():
1192 1295 return True
1193 1296 return False
1194 1297
1195 1298 def refresh(self):
1196 1299 for entry in self._entries:
1197 1300 entry.refresh()
1198 1301
1199 1302 class filecache(object):
1200 1303 '''A property like decorator that tracks files under .hg/ for updates.
1201 1304
1202 1305 Records stat info when called in _filecache.
1203 1306
1204 1307 On subsequent calls, compares old stat info with new info, and recreates the
1205 1308 object when any of the files changes, updating the new stat info in
1206 1309 _filecache.
1207 1310
1208 1311 Mercurial either atomic renames or appends for files under .hg,
1209 1312 so to ensure the cache is reliable we need the filesystem to be able
1210 1313 to tell us if a file has been replaced. If it can't, we fallback to
1211 1314 recreating the object on every call (essentially the same behavior as
1212 1315 propertycache).
1213 1316
1214 1317 '''
1215 1318 def __init__(self, *paths):
1216 1319 self.paths = paths
1217 1320
1218 1321 def join(self, obj, fname):
1219 1322 """Used to compute the runtime path of a cached file.
1220 1323
1221 1324 Users should subclass filecache and provide their own version of this
1222 1325 function to call the appropriate join function on 'obj' (an instance
1223 1326 of the class that its member function was decorated).
1224 1327 """
1225 1328 return obj.join(fname)
1226 1329
1227 1330 def __call__(self, func):
1228 1331 self.func = func
1229 1332 self.name = func.__name__
1230 1333 return self
1231 1334
1232 1335 def __get__(self, obj, type=None):
1233 1336 # if accessed on the class, return the descriptor itself.
1234 1337 if obj is None:
1235 1338 return self
1236 1339 # do we need to check if the file changed?
1237 1340 if self.name in obj.__dict__:
1238 1341 assert self.name in obj._filecache, self.name
1239 1342 return obj.__dict__[self.name]
1240 1343
1241 1344 entry = obj._filecache.get(self.name)
1242 1345
1243 1346 if entry:
1244 1347 if entry.changed():
1245 1348 entry.obj = self.func(obj)
1246 1349 else:
1247 1350 paths = [self.join(obj, path) for path in self.paths]
1248 1351
1249 1352 # We stat -before- creating the object so our cache doesn't lie if
1250 1353 # a writer modified between the time we read and stat
1251 1354 entry = filecacheentry(paths, True)
1252 1355 entry.obj = self.func(obj)
1253 1356
1254 1357 obj._filecache[self.name] = entry
1255 1358
1256 1359 obj.__dict__[self.name] = entry.obj
1257 1360 return entry.obj
1258 1361
1259 1362 def __set__(self, obj, value):
1260 1363 if self.name not in obj._filecache:
1261 1364 # we add an entry for the missing value because X in __dict__
1262 1365 # implies X in _filecache
1263 1366 paths = [self.join(obj, path) for path in self.paths]
1264 1367 ce = filecacheentry(paths, False)
1265 1368 obj._filecache[self.name] = ce
1266 1369 else:
1267 1370 ce = obj._filecache[self.name]
1268 1371
1269 1372 ce.obj = value # update cached copy
1270 1373 obj.__dict__[self.name] = value # update copy returned by obj.x
1271 1374
1272 1375 def __delete__(self, obj):
1273 1376 try:
1274 1377 del obj.__dict__[self.name]
1275 1378 except KeyError:
1276 1379 raise AttributeError(self.name)
1277 1380
1278 1381 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1279 1382 if lock is None:
1280 1383 raise error.LockInheritanceContractViolation(
1281 1384 'lock can only be inherited while held')
1282 1385 if environ is None:
1283 1386 environ = {}
1284 1387 with lock.inherit() as locker:
1285 1388 environ[envvar] = locker
1286 1389 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1287 1390
1288 1391 def wlocksub(repo, cmd, *args, **kwargs):
1289 1392 """run cmd as a subprocess that allows inheriting repo's wlock
1290 1393
1291 1394 This can only be called while the wlock is held. This takes all the
1292 1395 arguments that ui.system does, and returns the exit code of the
1293 1396 subprocess."""
1294 1397 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1295 1398 **kwargs)
1296 1399
1297 1400 def gdinitconfig(ui):
1298 1401 """helper function to know if a repo should be created as general delta
1299 1402 """
1300 1403 # experimental config: format.generaldelta
1301 1404 return (ui.configbool('format', 'generaldelta', False)
1302 1405 or ui.configbool('format', 'usegeneraldelta', True))
1303 1406
1304 1407 def gddeltaconfig(ui):
1305 1408 """helper function to know if incoming delta should be optimised
1306 1409 """
1307 1410 # experimental config: format.generaldelta
1308 1411 return ui.configbool('format', 'generaldelta', False)
1309 1412
1310 1413 class closewrapbase(object):
1311 1414 """Base class of wrapper, which hooks closing
1312 1415
1313 1416 Do not instantiate outside of the vfs layer.
1314 1417 """
1315 1418 def __init__(self, fh):
1316 1419 object.__setattr__(self, '_origfh', fh)
1317 1420
1318 1421 def __getattr__(self, attr):
1319 1422 return getattr(self._origfh, attr)
1320 1423
1321 1424 def __setattr__(self, attr, value):
1322 1425 return setattr(self._origfh, attr, value)
1323 1426
1324 1427 def __delattr__(self, attr):
1325 1428 return delattr(self._origfh, attr)
1326 1429
1327 1430 def __enter__(self):
1328 1431 return self._origfh.__enter__()
1329 1432
1330 1433 def __exit__(self, exc_type, exc_value, exc_tb):
1331 1434 raise NotImplementedError('attempted instantiating ' + str(type(self)))
1332 1435
1333 1436 def close(self):
1334 1437 raise NotImplementedError('attempted instantiating ' + str(type(self)))
1335 1438
1336 1439 class delayclosedfile(closewrapbase):
1337 1440 """Proxy for a file object whose close is delayed.
1338 1441
1339 1442 Do not instantiate outside of the vfs layer.
1340 1443 """
1341 1444 def __init__(self, fh, closer):
1342 1445 super(delayclosedfile, self).__init__(fh)
1343 1446 object.__setattr__(self, '_closer', closer)
1344 1447
1345 1448 def __exit__(self, exc_type, exc_value, exc_tb):
1346 1449 self._closer.close(self._origfh)
1347 1450
1348 1451 def close(self):
1349 1452 self._closer.close(self._origfh)
1350 1453
1351 1454 class backgroundfilecloser(object):
1352 1455 """Coordinates background closing of file handles on multiple threads."""
1353 1456 def __init__(self, ui, expectedcount=-1):
1354 1457 self._running = False
1355 1458 self._entered = False
1356 1459 self._threads = []
1357 1460 self._threadexception = None
1358 1461
1359 1462 # Only Windows/NTFS has slow file closing. So only enable by default
1360 1463 # on that platform. But allow to be enabled elsewhere for testing.
1361 1464 defaultenabled = os.name == 'nt'
1362 1465 enabled = ui.configbool('worker', 'backgroundclose', defaultenabled)
1363 1466
1364 1467 if not enabled:
1365 1468 return
1366 1469
1367 1470 # There is overhead to starting and stopping the background threads.
1368 1471 # Don't do background processing unless the file count is large enough
1369 1472 # to justify it.
1370 1473 minfilecount = ui.configint('worker', 'backgroundcloseminfilecount',
1371 1474 2048)
1372 1475 # FUTURE dynamically start background threads after minfilecount closes.
1373 1476 # (We don't currently have any callers that don't know their file count)
1374 1477 if expectedcount > 0 and expectedcount < minfilecount:
1375 1478 return
1376 1479
1377 1480 # Windows defaults to a limit of 512 open files. A buffer of 128
1378 1481 # should give us enough headway.
1379 1482 maxqueue = ui.configint('worker', 'backgroundclosemaxqueue', 384)
1380 1483 threadcount = ui.configint('worker', 'backgroundclosethreadcount', 4)
1381 1484
1382 1485 ui.debug('starting %d threads for background file closing\n' %
1383 1486 threadcount)
1384 1487
1385 1488 self._queue = util.queue(maxsize=maxqueue)
1386 1489 self._running = True
1387 1490
1388 1491 for i in range(threadcount):
1389 1492 t = threading.Thread(target=self._worker, name='backgroundcloser')
1390 1493 self._threads.append(t)
1391 1494 t.start()
1392 1495
1393 1496 def __enter__(self):
1394 1497 self._entered = True
1395 1498 return self
1396 1499
1397 1500 def __exit__(self, exc_type, exc_value, exc_tb):
1398 1501 self._running = False
1399 1502
1400 1503 # Wait for threads to finish closing so open files don't linger for
1401 1504 # longer than lifetime of context manager.
1402 1505 for t in self._threads:
1403 1506 t.join()
1404 1507
1405 1508 def _worker(self):
1406 1509 """Main routine for worker thread."""
1407 1510 while True:
1408 1511 try:
1409 1512 fh = self._queue.get(block=True, timeout=0.100)
1410 1513 # Need to catch or the thread will terminate and
1411 1514 # we could orphan file descriptors.
1412 1515 try:
1413 1516 fh.close()
1414 1517 except Exception as e:
1415 1518 # Stash so can re-raise from main thread later.
1416 1519 self._threadexception = e
1417 1520 except util.empty:
1418 1521 if not self._running:
1419 1522 break
1420 1523
1421 1524 def close(self, fh):
1422 1525 """Schedule a file for closing."""
1423 1526 if not self._entered:
1424 1527 raise error.Abort(_('can only call close() when context manager '
1425 1528 'active'))
1426 1529
1427 1530 # If a background thread encountered an exception, raise now so we fail
1428 1531 # fast. Otherwise we may potentially go on for minutes until the error
1429 1532 # is acted on.
1430 1533 if self._threadexception:
1431 1534 e = self._threadexception
1432 1535 self._threadexception = None
1433 1536 raise e
1434 1537
1435 1538 # If we're not actively running, close synchronously.
1436 1539 if not self._running:
1437 1540 fh.close()
1438 1541 return
1439 1542
1440 1543 self._queue.put(fh, block=True, timeout=None)
1441 1544
1442 1545 class checkambigatclosing(closewrapbase):
1443 1546 """Proxy for a file object, to avoid ambiguity of file stat
1444 1547
1445 1548 See also util.filestat for detail about "ambiguity of file stat".
1446 1549
1447 1550 This proxy is useful only if the target file is guarded by any
1448 1551 lock (e.g. repo.lock or repo.wlock)
1449 1552
1450 1553 Do not instantiate outside of the vfs layer.
1451 1554 """
1452 1555 def __init__(self, fh):
1453 1556 super(checkambigatclosing, self).__init__(fh)
1454 1557 object.__setattr__(self, '_oldstat', util.filestat(fh.name))
1455 1558
1456 1559 def _checkambig(self):
1457 1560 oldstat = self._oldstat
1458 1561 if oldstat.stat:
1459 1562 newstat = util.filestat(self._origfh.name)
1460 1563 if newstat.isambig(oldstat):
1461 1564 # stat of changed file is ambiguous to original one
1462 1565 newstat.avoidambig(self._origfh.name, oldstat)
1463 1566
1464 1567 def __exit__(self, exc_type, exc_value, exc_tb):
1465 1568 self._origfh.__exit__(exc_type, exc_value, exc_tb)
1466 1569 self._checkambig()
1467 1570
1468 1571 def close(self):
1469 1572 self._origfh.close()
1470 1573 self._checkambig()
@@ -1,176 +1,179 b''
1 1
2 2 $ cat << EOF > buggylocking.py
3 3 > """A small extension that tests our developer warnings
4 4 > """
5 5 >
6 6 > from mercurial import cmdutil, repair, revset
7 7 >
8 8 > cmdtable = {}
9 9 > command = cmdutil.command(cmdtable)
10 10 >
11 11 > @command('buggylocking', [], '')
12 12 > def buggylocking(ui, repo):
13 13 > lo = repo.lock()
14 14 > wl = repo.wlock()
15 15 > wl.release()
16 16 > lo.release()
17 17 >
18 18 > @command('buggytransaction', [], '')
19 19 > def buggylocking(ui, repo):
20 20 > tr = repo.transaction('buggy')
21 21 > # make sure we rollback the transaction as we don't want to rely on the__del__
22 22 > tr.release()
23 23 >
24 24 > @command('properlocking', [], '')
25 25 > def properlocking(ui, repo):
26 26 > """check that reentrance is fine"""
27 27 > wl = repo.wlock()
28 28 > lo = repo.lock()
29 29 > tr = repo.transaction('proper')
30 30 > tr2 = repo.transaction('proper')
31 31 > lo2 = repo.lock()
32 32 > wl2 = repo.wlock()
33 33 > wl2.release()
34 34 > lo2.release()
35 35 > tr2.close()
36 36 > tr.close()
37 37 > lo.release()
38 38 > wl.release()
39 39 >
40 40 > @command('nowaitlocking', [], '')
41 41 > def nowaitlocking(ui, repo):
42 42 > lo = repo.lock()
43 43 > wl = repo.wlock(wait=False)
44 44 > wl.release()
45 45 > lo.release()
46 46 >
47 47 > @command('stripintr', [], '')
48 48 > def stripintr(ui, repo):
49 49 > lo = repo.lock()
50 50 > tr = repo.transaction('foobar')
51 51 > try:
52 52 > repair.strip(repo.ui, repo, [repo['.'].node()])
53 53 > finally:
54 54 > lo.release()
55 55 > @command('oldanddeprecated', [], '')
56 56 > def oldanddeprecated(ui, repo):
57 57 > """test deprecation warning API"""
58 58 > def foobar(ui):
59 59 > ui.deprecwarn('foorbar is deprecated, go shopping', '42.1337')
60 60 > foobar(ui)
61 61 >
62 62 > def oldstylerevset(repo, subset, x):
63 63 > return list(subset)
64 64 >
65 65 > revset.symbols['oldstyle'] = oldstylerevset
66 66 > EOF
67 67
68 68 $ cat << EOF >> $HGRCPATH
69 69 > [extensions]
70 70 > buggylocking=$TESTTMP/buggylocking.py
71 71 > mock=$TESTDIR/mockblackbox.py
72 72 > blackbox=
73 73 > [devel]
74 74 > all-warnings=1
75 75 > EOF
76 76
77 77 $ hg init lock-checker
78 78 $ cd lock-checker
79 79 $ hg buggylocking
80 80 devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
81 81 $ cat << EOF >> $HGRCPATH
82 82 > [devel]
83 83 > all=0
84 84 > check-locks=1
85 85 > EOF
86 86 $ hg buggylocking
87 87 devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
88 88 $ hg buggylocking --traceback
89 89 devel-warn: "wlock" acquired after "lock" at:
90 90 */hg:* in * (glob)
91 91 */mercurial/dispatch.py:* in run (glob)
92 92 */mercurial/dispatch.py:* in dispatch (glob)
93 93 */mercurial/dispatch.py:* in _runcatch (glob)
94 94 */mercurial/dispatch.py:* in callcatch (glob)
95 */mercurial/scmutil.py* in callcatch (glob)
95 96 */mercurial/dispatch.py:* in _runcatchfunc (glob)
96 97 */mercurial/dispatch.py:* in _dispatch (glob)
97 98 */mercurial/dispatch.py:* in runcommand (glob)
98 99 */mercurial/dispatch.py:* in _runcommand (glob)
99 100 */mercurial/dispatch.py:* in <lambda> (glob)
100 101 */mercurial/util.py:* in check (glob)
101 102 $TESTTMP/buggylocking.py:* in buggylocking (glob)
102 103 $ hg properlocking
103 104 $ hg nowaitlocking
104 105
105 106 $ echo a > a
106 107 $ hg add a
107 108 $ hg commit -m a
108 109 $ hg stripintr
109 110 saved backup bundle to $TESTTMP/lock-checker/.hg/strip-backup/*-backup.hg (glob)
110 111 abort: programming error: cannot strip from inside a transaction
111 112 (contact your extension maintainer)
112 113 [255]
113 114
114 115 $ hg log -r "oldstyle()" -T '{rev}\n'
115 116 devel-warn: revset "oldstyle" uses list instead of smartset
116 117 (compatibility will be dropped after Mercurial-3.9, update your code.) at: *mercurial/revset.py:* (mfunc) (glob)
117 118 0
118 119 $ hg oldanddeprecated
119 120 devel-warn: foorbar is deprecated, go shopping
120 121 (compatibility will be dropped after Mercurial-42.1337, update your code.) at: $TESTTMP/buggylocking.py:* (oldanddeprecated) (glob)
121 122
122 123 $ hg oldanddeprecated --traceback
123 124 devel-warn: foorbar is deprecated, go shopping
124 125 (compatibility will be dropped after Mercurial-42.1337, update your code.) at:
125 126 */hg:* in <module> (glob)
126 127 */mercurial/dispatch.py:* in run (glob)
127 128 */mercurial/dispatch.py:* in dispatch (glob)
128 129 */mercurial/dispatch.py:* in _runcatch (glob)
129 130 */mercurial/dispatch.py:* in callcatch (glob)
131 */mercurial/scmutil.py* in callcatch (glob)
130 132 */mercurial/dispatch.py:* in _runcatchfunc (glob)
131 133 */mercurial/dispatch.py:* in _dispatch (glob)
132 134 */mercurial/dispatch.py:* in runcommand (glob)
133 135 */mercurial/dispatch.py:* in _runcommand (glob)
134 136 */mercurial/dispatch.py:* in <lambda> (glob)
135 137 */mercurial/util.py:* in check (glob)
136 138 $TESTTMP/buggylocking.py:* in oldanddeprecated (glob)
137 139 $ hg blackbox -l 9
138 140 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: revset "oldstyle" uses list instead of smartset
139 141 (compatibility will be dropped after Mercurial-3.9, update your code.) at: *mercurial/revset.py:* (mfunc) (glob)
140 142 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> log -r oldstyle() -T {rev}\n exited 0 after * seconds (glob)
141 143 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated
142 144 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: foorbar is deprecated, go shopping
143 145 (compatibility will be dropped after Mercurial-42.1337, update your code.) at: $TESTTMP/buggylocking.py:* (oldanddeprecated) (glob)
144 146 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated exited 0 after * seconds (glob)
145 147 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated --traceback
146 148 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: foorbar is deprecated, go shopping
147 149 (compatibility will be dropped after Mercurial-42.1337, update your code.) at:
148 150 */hg:* in <module> (glob)
149 151 */mercurial/dispatch.py:* in run (glob)
150 152 */mercurial/dispatch.py:* in dispatch (glob)
151 153 */mercurial/dispatch.py:* in _runcatch (glob)
152 154 */mercurial/dispatch.py:* in callcatch (glob)
155 */mercurial/scmutil.py* in callcatch (glob)
153 156 */mercurial/dispatch.py:* in _runcatchfunc (glob)
154 157 */mercurial/dispatch.py:* in _dispatch (glob)
155 158 */mercurial/dispatch.py:* in runcommand (glob)
156 159 */mercurial/dispatch.py:* in _runcommand (glob)
157 160 */mercurial/dispatch.py:* in <lambda> (glob)
158 161 */mercurial/util.py:* in check (glob)
159 162 $TESTTMP/buggylocking.py:* in oldanddeprecated (glob)
160 163 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated --traceback exited 0 after * seconds (glob)
161 164 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> blackbox -l 9
162 165
163 166 Test programming error failure:
164 167
165 168 $ hg buggytransaction 2>&1 | egrep -v '^ '
166 169 ** Unknown exception encountered with possibly-broken third-party extension buggylocking
167 170 ** which supports versions unknown of Mercurial.
168 171 ** Please disable buggylocking and try your action again.
169 172 ** If that fixes the bug please report it to the extension author.
170 173 ** Python * (glob)
171 174 ** Mercurial Distributed SCM (*) (glob)
172 175 ** Extensions loaded: * (glob)
173 176 Traceback (most recent call last):
174 177 RuntimeError: programming error: transaction requires locking
175 178
176 179 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now