##// END OF EJS Templates
errors: format "abort: " text in a new Abort.format() method...
Martin von Zweigbergk -
r46497:600aec73 default
parent child Browse files
Show More
@@ -1,751 +1,749 b''
1 1 # chgserver.py - command server extension for cHg
2 2 #
3 3 # Copyright 2011 Yuya Nishihara <yuya@tcha.org>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """command server extension for cHg
9 9
10 10 'S' channel (read/write)
11 11 propagate ui.system() request to client
12 12
13 13 'attachio' command
14 14 attach client's stdio passed by sendmsg()
15 15
16 16 'chdir' command
17 17 change current directory
18 18
19 19 'setenv' command
20 20 replace os.environ completely
21 21
22 22 'setumask' command (DEPRECATED)
23 23 'setumask2' command
24 24 set umask
25 25
26 26 'validate' command
27 27 reload the config and check if the server is up to date
28 28
29 29 Config
30 30 ------
31 31
32 32 ::
33 33
34 34 [chgserver]
35 35 # how long (in seconds) should an idle chg server exit
36 36 idletimeout = 3600
37 37
38 38 # whether to skip config or env change checks
39 39 skiphash = False
40 40 """
41 41
42 42 from __future__ import absolute_import
43 43
44 44 import inspect
45 45 import os
46 46 import re
47 47 import socket
48 48 import stat
49 49 import struct
50 50 import time
51 51
52 52 from .i18n import _
53 53 from .pycompat import (
54 54 getattr,
55 55 setattr,
56 56 )
57 57
58 58 from . import (
59 59 commandserver,
60 60 encoding,
61 61 error,
62 62 extensions,
63 63 node,
64 64 pycompat,
65 65 util,
66 66 )
67 67
68 68 from .utils import (
69 69 hashutil,
70 70 procutil,
71 71 stringutil,
72 72 )
73 73
74 74
75 75 def _hashlist(items):
76 76 """return sha1 hexdigest for a list"""
77 77 return node.hex(hashutil.sha1(stringutil.pprint(items)).digest())
78 78
79 79
80 80 # sensitive config sections affecting confighash
81 81 _configsections = [
82 82 b'alias', # affects global state commands.table
83 83 b'diff-tools', # affects whether gui or not in extdiff's uisetup
84 84 b'eol', # uses setconfig('eol', ...)
85 85 b'extdiff', # uisetup will register new commands
86 86 b'extensions',
87 87 b'fastannotate', # affects annotate command and adds fastannonate cmd
88 88 b'merge-tools', # affects whether gui or not in extdiff's uisetup
89 89 b'schemes', # extsetup will update global hg.schemes
90 90 ]
91 91
92 92 _configsectionitems = [
93 93 (b'commands', b'show.aliasprefix'), # show.py reads it in extsetup
94 94 ]
95 95
96 96 # sensitive environment variables affecting confighash
97 97 _envre = re.compile(
98 98 br'''\A(?:
99 99 CHGHG
100 100 |HG(?:DEMANDIMPORT|EMITWARNINGS|MODULEPOLICY|PROF|RCPATH)?
101 101 |HG(?:ENCODING|PLAIN).*
102 102 |LANG(?:UAGE)?
103 103 |LC_.*
104 104 |LD_.*
105 105 |PATH
106 106 |PYTHON.*
107 107 |TERM(?:INFO)?
108 108 |TZ
109 109 )\Z''',
110 110 re.X,
111 111 )
112 112
113 113
114 114 def _confighash(ui):
115 115 """return a quick hash for detecting config/env changes
116 116
117 117 confighash is the hash of sensitive config items and environment variables.
118 118
119 119 for chgserver, it is designed that once confighash changes, the server is
120 120 not qualified to serve its client and should redirect the client to a new
121 121 server. different from mtimehash, confighash change will not mark the
122 122 server outdated and exit since the user can have different configs at the
123 123 same time.
124 124 """
125 125 sectionitems = []
126 126 for section in _configsections:
127 127 sectionitems.append(ui.configitems(section))
128 128 for section, item in _configsectionitems:
129 129 sectionitems.append(ui.config(section, item))
130 130 sectionhash = _hashlist(sectionitems)
131 131 # If $CHGHG is set, the change to $HG should not trigger a new chg server
132 132 if b'CHGHG' in encoding.environ:
133 133 ignored = {b'HG'}
134 134 else:
135 135 ignored = set()
136 136 envitems = [
137 137 (k, v)
138 138 for k, v in pycompat.iteritems(encoding.environ)
139 139 if _envre.match(k) and k not in ignored
140 140 ]
141 141 envhash = _hashlist(sorted(envitems))
142 142 return sectionhash[:6] + envhash[:6]
143 143
144 144
145 145 def _getmtimepaths(ui):
146 146 """get a list of paths that should be checked to detect change
147 147
148 148 The list will include:
149 149 - extensions (will not cover all files for complex extensions)
150 150 - mercurial/__version__.py
151 151 - python binary
152 152 """
153 153 modules = [m for n, m in extensions.extensions(ui)]
154 154 try:
155 155 from . import __version__
156 156
157 157 modules.append(__version__)
158 158 except ImportError:
159 159 pass
160 160 files = []
161 161 if pycompat.sysexecutable:
162 162 files.append(pycompat.sysexecutable)
163 163 for m in modules:
164 164 try:
165 165 files.append(pycompat.fsencode(inspect.getabsfile(m)))
166 166 except TypeError:
167 167 pass
168 168 return sorted(set(files))
169 169
170 170
171 171 def _mtimehash(paths):
172 172 """return a quick hash for detecting file changes
173 173
174 174 mtimehash calls stat on given paths and calculate a hash based on size and
175 175 mtime of each file. mtimehash does not read file content because reading is
176 176 expensive. therefore it's not 100% reliable for detecting content changes.
177 177 it's possible to return different hashes for same file contents.
178 178 it's also possible to return a same hash for different file contents for
179 179 some carefully crafted situation.
180 180
181 181 for chgserver, it is designed that once mtimehash changes, the server is
182 182 considered outdated immediately and should no longer provide service.
183 183
184 184 mtimehash is not included in confighash because we only know the paths of
185 185 extensions after importing them (there is imp.find_module but that faces
186 186 race conditions). We need to calculate confighash without importing.
187 187 """
188 188
189 189 def trystat(path):
190 190 try:
191 191 st = os.stat(path)
192 192 return (st[stat.ST_MTIME], st.st_size)
193 193 except OSError:
194 194 # could be ENOENT, EPERM etc. not fatal in any case
195 195 pass
196 196
197 197 return _hashlist(pycompat.maplist(trystat, paths))[:12]
198 198
199 199
200 200 class hashstate(object):
201 201 """a structure storing confighash, mtimehash, paths used for mtimehash"""
202 202
203 203 def __init__(self, confighash, mtimehash, mtimepaths):
204 204 self.confighash = confighash
205 205 self.mtimehash = mtimehash
206 206 self.mtimepaths = mtimepaths
207 207
208 208 @staticmethod
209 209 def fromui(ui, mtimepaths=None):
210 210 if mtimepaths is None:
211 211 mtimepaths = _getmtimepaths(ui)
212 212 confighash = _confighash(ui)
213 213 mtimehash = _mtimehash(mtimepaths)
214 214 ui.log(
215 215 b'cmdserver',
216 216 b'confighash = %s mtimehash = %s\n',
217 217 confighash,
218 218 mtimehash,
219 219 )
220 220 return hashstate(confighash, mtimehash, mtimepaths)
221 221
222 222
223 223 def _newchgui(srcui, csystem, attachio):
224 224 class chgui(srcui.__class__):
225 225 def __init__(self, src=None):
226 226 super(chgui, self).__init__(src)
227 227 if src:
228 228 self._csystem = getattr(src, '_csystem', csystem)
229 229 else:
230 230 self._csystem = csystem
231 231
232 232 def _runsystem(self, cmd, environ, cwd, out):
233 233 # fallback to the original system method if
234 234 # a. the output stream is not stdout (e.g. stderr, cStringIO),
235 235 # b. or stdout is redirected by protectfinout(),
236 236 # because the chg client is not aware of these situations and
237 237 # will behave differently (i.e. write to stdout).
238 238 if (
239 239 out is not self.fout
240 240 or not util.safehasattr(self.fout, b'fileno')
241 241 or self.fout.fileno() != procutil.stdout.fileno()
242 242 or self._finoutredirected
243 243 ):
244 244 return procutil.system(cmd, environ=environ, cwd=cwd, out=out)
245 245 self.flush()
246 246 return self._csystem(cmd, procutil.shellenviron(environ), cwd)
247 247
248 248 def _runpager(self, cmd, env=None):
249 249 self._csystem(
250 250 cmd,
251 251 procutil.shellenviron(env),
252 252 type=b'pager',
253 253 cmdtable={b'attachio': attachio},
254 254 )
255 255 return True
256 256
257 257 return chgui(srcui)
258 258
259 259
260 260 def _loadnewui(srcui, args, cdebug):
261 261 from . import dispatch # avoid cycle
262 262
263 263 newui = srcui.__class__.load()
264 264 for a in [b'fin', b'fout', b'ferr', b'environ']:
265 265 setattr(newui, a, getattr(srcui, a))
266 266 if util.safehasattr(srcui, b'_csystem'):
267 267 newui._csystem = srcui._csystem
268 268
269 269 # command line args
270 270 options = dispatch._earlyparseopts(newui, args)
271 271 dispatch._parseconfig(newui, options[b'config'])
272 272
273 273 # stolen from tortoisehg.util.copydynamicconfig()
274 274 for section, name, value in srcui.walkconfig():
275 275 source = srcui.configsource(section, name)
276 276 if b':' in source or source == b'--config' or source.startswith(b'$'):
277 277 # path:line or command line, or environ
278 278 continue
279 279 newui.setconfig(section, name, value, source)
280 280
281 281 # load wd and repo config, copied from dispatch.py
282 282 cwd = options[b'cwd']
283 283 cwd = cwd and os.path.realpath(cwd) or None
284 284 rpath = options[b'repository']
285 285 path, newlui = dispatch._getlocal(newui, rpath, wd=cwd)
286 286
287 287 extensions.populateui(newui)
288 288 commandserver.setuplogging(newui, fp=cdebug)
289 289 if newui is not newlui:
290 290 extensions.populateui(newlui)
291 291 commandserver.setuplogging(newlui, fp=cdebug)
292 292
293 293 return (newui, newlui)
294 294
295 295
296 296 class channeledsystem(object):
297 297 """Propagate ui.system() request in the following format:
298 298
299 299 payload length (unsigned int),
300 300 type, '\0',
301 301 cmd, '\0',
302 302 cwd, '\0',
303 303 envkey, '=', val, '\0',
304 304 ...
305 305 envkey, '=', val
306 306
307 307 if type == 'system', waits for:
308 308
309 309 exitcode length (unsigned int),
310 310 exitcode (int)
311 311
312 312 if type == 'pager', repetitively waits for a command name ending with '\n'
313 313 and executes it defined by cmdtable, or exits the loop if the command name
314 314 is empty.
315 315 """
316 316
317 317 def __init__(self, in_, out, channel):
318 318 self.in_ = in_
319 319 self.out = out
320 320 self.channel = channel
321 321
322 322 def __call__(self, cmd, environ, cwd=None, type=b'system', cmdtable=None):
323 323 args = [type, cmd, os.path.abspath(cwd or b'.')]
324 324 args.extend(b'%s=%s' % (k, v) for k, v in pycompat.iteritems(environ))
325 325 data = b'\0'.join(args)
326 326 self.out.write(struct.pack(b'>cI', self.channel, len(data)))
327 327 self.out.write(data)
328 328 self.out.flush()
329 329
330 330 if type == b'system':
331 331 length = self.in_.read(4)
332 332 (length,) = struct.unpack(b'>I', length)
333 333 if length != 4:
334 334 raise error.Abort(_(b'invalid response'))
335 335 (rc,) = struct.unpack(b'>i', self.in_.read(4))
336 336 return rc
337 337 elif type == b'pager':
338 338 while True:
339 339 cmd = self.in_.readline()[:-1]
340 340 if not cmd:
341 341 break
342 342 if cmdtable and cmd in cmdtable:
343 343 cmdtable[cmd]()
344 344 else:
345 345 raise error.Abort(_(b'unexpected command: %s') % cmd)
346 346 else:
347 347 raise error.ProgrammingError(b'invalid S channel type: %s' % type)
348 348
349 349
350 350 _iochannels = [
351 351 # server.ch, ui.fp, mode
352 352 (b'cin', b'fin', 'rb'),
353 353 (b'cout', b'fout', 'wb'),
354 354 (b'cerr', b'ferr', 'wb'),
355 355 ]
356 356
357 357
358 358 class chgcmdserver(commandserver.server):
359 359 def __init__(
360 360 self, ui, repo, fin, fout, sock, prereposetups, hashstate, baseaddress
361 361 ):
362 362 super(chgcmdserver, self).__init__(
363 363 _newchgui(ui, channeledsystem(fin, fout, b'S'), self.attachio),
364 364 repo,
365 365 fin,
366 366 fout,
367 367 prereposetups,
368 368 )
369 369 self.clientsock = sock
370 370 self._ioattached = False
371 371 self._oldios = [] # original (self.ch, ui.fp, fd) before "attachio"
372 372 self.hashstate = hashstate
373 373 self.baseaddress = baseaddress
374 374 if hashstate is not None:
375 375 self.capabilities = self.capabilities.copy()
376 376 self.capabilities[b'validate'] = chgcmdserver.validate
377 377
378 378 def cleanup(self):
379 379 super(chgcmdserver, self).cleanup()
380 380 # dispatch._runcatch() does not flush outputs if exception is not
381 381 # handled by dispatch._dispatch()
382 382 self.ui.flush()
383 383 self._restoreio()
384 384 self._ioattached = False
385 385
386 386 def attachio(self):
387 387 """Attach to client's stdio passed via unix domain socket; all
388 388 channels except cresult will no longer be used
389 389 """
390 390 # tell client to sendmsg() with 1-byte payload, which makes it
391 391 # distinctive from "attachio\n" command consumed by client.read()
392 392 self.clientsock.sendall(struct.pack(b'>cI', b'I', 1))
393 393 clientfds = util.recvfds(self.clientsock.fileno())
394 394 self.ui.log(b'chgserver', b'received fds: %r\n', clientfds)
395 395
396 396 ui = self.ui
397 397 ui.flush()
398 398 self._saveio()
399 399 for fd, (cn, fn, mode) in zip(clientfds, _iochannels):
400 400 assert fd > 0
401 401 fp = getattr(ui, fn)
402 402 os.dup2(fd, fp.fileno())
403 403 os.close(fd)
404 404 if self._ioattached:
405 405 continue
406 406 # reset buffering mode when client is first attached. as we want
407 407 # to see output immediately on pager, the mode stays unchanged
408 408 # when client re-attached. ferr is unchanged because it should
409 409 # be unbuffered no matter if it is a tty or not.
410 410 if fn == b'ferr':
411 411 newfp = fp
412 412 elif pycompat.ispy3:
413 413 # On Python 3, the standard library doesn't offer line-buffered
414 414 # binary streams, so wrap/unwrap it.
415 415 if fp.isatty():
416 416 newfp = procutil.make_line_buffered(fp)
417 417 else:
418 418 newfp = procutil.unwrap_line_buffered(fp)
419 419 else:
420 420 # Python 2 uses the I/O streams provided by the C library, so
421 421 # make it line-buffered explicitly. Otherwise the default would
422 422 # be decided on first write(), where fout could be a pager.
423 423 if fp.isatty():
424 424 bufsize = 1 # line buffered
425 425 else:
426 426 bufsize = -1 # system default
427 427 newfp = os.fdopen(fp.fileno(), mode, bufsize)
428 428 if newfp is not fp:
429 429 setattr(ui, fn, newfp)
430 430 setattr(self, cn, newfp)
431 431
432 432 self._ioattached = True
433 433 self.cresult.write(struct.pack(b'>i', len(clientfds)))
434 434
435 435 def _saveio(self):
436 436 if self._oldios:
437 437 return
438 438 ui = self.ui
439 439 for cn, fn, _mode in _iochannels:
440 440 ch = getattr(self, cn)
441 441 fp = getattr(ui, fn)
442 442 fd = os.dup(fp.fileno())
443 443 self._oldios.append((ch, fp, fd))
444 444
445 445 def _restoreio(self):
446 446 if not self._oldios:
447 447 return
448 448 nullfd = os.open(os.devnull, os.O_WRONLY)
449 449 ui = self.ui
450 450 for (ch, fp, fd), (cn, fn, mode) in zip(self._oldios, _iochannels):
451 451 newfp = getattr(ui, fn)
452 452 # On Python 2, newfp and fp may be separate file objects associated
453 453 # with the same fd, so we must close newfp while it's associated
454 454 # with the client. Otherwise the new associated fd would be closed
455 455 # when newfp gets deleted. On Python 3, newfp is just a wrapper
456 456 # around fp even if newfp is not fp, so deleting newfp is safe.
457 457 if not (pycompat.ispy3 or newfp is fp):
458 458 newfp.close()
459 459 # restore original fd: fp is open again
460 460 try:
461 461 if (pycompat.ispy3 or newfp is fp) and 'w' in mode:
462 462 # Discard buffered data which couldn't be flushed because
463 463 # of EPIPE. The data should belong to the current session
464 464 # and should never persist.
465 465 os.dup2(nullfd, fp.fileno())
466 466 fp.flush()
467 467 os.dup2(fd, fp.fileno())
468 468 except OSError as err:
469 469 # According to issue6330, running chg on heavy loaded systems
470 470 # can lead to EBUSY. [man dup2] indicates that, on Linux,
471 471 # EBUSY comes from a race condition between open() and dup2().
472 472 # However it's not clear why open() race occurred for
473 473 # newfd=stdin/out/err.
474 474 self.ui.log(
475 475 b'chgserver',
476 476 b'got %s while duplicating %s\n',
477 477 stringutil.forcebytestr(err),
478 478 fn,
479 479 )
480 480 os.close(fd)
481 481 setattr(self, cn, ch)
482 482 setattr(ui, fn, fp)
483 483 os.close(nullfd)
484 484 del self._oldios[:]
485 485
486 486 def validate(self):
487 487 """Reload the config and check if the server is up to date
488 488
489 489 Read a list of '\0' separated arguments.
490 490 Write a non-empty list of '\0' separated instruction strings or '\0'
491 491 if the list is empty.
492 492 An instruction string could be either:
493 493 - "unlink $path", the client should unlink the path to stop the
494 494 outdated server.
495 495 - "redirect $path", the client should attempt to connect to $path
496 496 first. If it does not work, start a new server. It implies
497 497 "reconnect".
498 498 - "exit $n", the client should exit directly with code n.
499 499 This may happen if we cannot parse the config.
500 500 - "reconnect", the client should close the connection and
501 501 reconnect.
502 502 If neither "reconnect" nor "redirect" is included in the instruction
503 503 list, the client can continue with this server after completing all
504 504 the instructions.
505 505 """
506 506 args = self._readlist()
507 507 try:
508 508 self.ui, lui = _loadnewui(self.ui, args, self.cdebug)
509 509 except error.ParseError as inst:
510 510 self.ui.warn(inst.format())
511 511 self.ui.flush()
512 512 self.cresult.write(b'exit 255')
513 513 return
514 514 except error.Abort as inst:
515 self.ui.error(_(b"abort: %s\n") % inst.message)
516 if inst.hint:
517 self.ui.error(_(b"(%s)\n") % inst.hint)
515 self.ui.error(inst.format())
518 516 self.ui.flush()
519 517 self.cresult.write(b'exit 255')
520 518 return
521 519 newhash = hashstate.fromui(lui, self.hashstate.mtimepaths)
522 520 insts = []
523 521 if newhash.mtimehash != self.hashstate.mtimehash:
524 522 addr = _hashaddress(self.baseaddress, self.hashstate.confighash)
525 523 insts.append(b'unlink %s' % addr)
526 524 # mtimehash is empty if one or more extensions fail to load.
527 525 # to be compatible with hg, still serve the client this time.
528 526 if self.hashstate.mtimehash:
529 527 insts.append(b'reconnect')
530 528 if newhash.confighash != self.hashstate.confighash:
531 529 addr = _hashaddress(self.baseaddress, newhash.confighash)
532 530 insts.append(b'redirect %s' % addr)
533 531 self.ui.log(b'chgserver', b'validate: %s\n', stringutil.pprint(insts))
534 532 self.cresult.write(b'\0'.join(insts) or b'\0')
535 533
536 534 def chdir(self):
537 535 """Change current directory
538 536
539 537 Note that the behavior of --cwd option is bit different from this.
540 538 It does not affect --config parameter.
541 539 """
542 540 path = self._readstr()
543 541 if not path:
544 542 return
545 543 self.ui.log(b'chgserver', b"chdir to '%s'\n", path)
546 544 os.chdir(path)
547 545
548 546 def setumask(self):
549 547 """Change umask (DEPRECATED)"""
550 548 # BUG: this does not follow the message frame structure, but kept for
551 549 # backward compatibility with old chg clients for some time
552 550 self._setumask(self._read(4))
553 551
554 552 def setumask2(self):
555 553 """Change umask"""
556 554 data = self._readstr()
557 555 if len(data) != 4:
558 556 raise ValueError(b'invalid mask length in setumask2 request')
559 557 self._setumask(data)
560 558
561 559 def _setumask(self, data):
562 560 mask = struct.unpack(b'>I', data)[0]
563 561 self.ui.log(b'chgserver', b'setumask %r\n', mask)
564 562 util.setumask(mask)
565 563
566 564 def runcommand(self):
567 565 # pager may be attached within the runcommand session, which should
568 566 # be detached at the end of the session. otherwise the pager wouldn't
569 567 # receive EOF.
570 568 globaloldios = self._oldios
571 569 self._oldios = []
572 570 try:
573 571 return super(chgcmdserver, self).runcommand()
574 572 finally:
575 573 self._restoreio()
576 574 self._oldios = globaloldios
577 575
578 576 def setenv(self):
579 577 """Clear and update os.environ
580 578
581 579 Note that not all variables can make an effect on the running process.
582 580 """
583 581 l = self._readlist()
584 582 try:
585 583 newenv = dict(s.split(b'=', 1) for s in l)
586 584 except ValueError:
587 585 raise ValueError(b'unexpected value in setenv request')
588 586 self.ui.log(b'chgserver', b'setenv: %r\n', sorted(newenv.keys()))
589 587
590 588 encoding.environ.clear()
591 589 encoding.environ.update(newenv)
592 590
593 591 capabilities = commandserver.server.capabilities.copy()
594 592 capabilities.update(
595 593 {
596 594 b'attachio': attachio,
597 595 b'chdir': chdir,
598 596 b'runcommand': runcommand,
599 597 b'setenv': setenv,
600 598 b'setumask': setumask,
601 599 b'setumask2': setumask2,
602 600 }
603 601 )
604 602
605 603 if util.safehasattr(procutil, b'setprocname'):
606 604
607 605 def setprocname(self):
608 606 """Change process title"""
609 607 name = self._readstr()
610 608 self.ui.log(b'chgserver', b'setprocname: %r\n', name)
611 609 procutil.setprocname(name)
612 610
613 611 capabilities[b'setprocname'] = setprocname
614 612
615 613
616 614 def _tempaddress(address):
617 615 return b'%s.%d.tmp' % (address, os.getpid())
618 616
619 617
620 618 def _hashaddress(address, hashstr):
621 619 # if the basename of address contains '.', use only the left part. this
622 620 # makes it possible for the client to pass 'server.tmp$PID' and follow by
623 621 # an atomic rename to avoid locking when spawning new servers.
624 622 dirname, basename = os.path.split(address)
625 623 basename = basename.split(b'.', 1)[0]
626 624 return b'%s-%s' % (os.path.join(dirname, basename), hashstr)
627 625
628 626
629 627 class chgunixservicehandler(object):
630 628 """Set of operations for chg services"""
631 629
632 630 pollinterval = 1 # [sec]
633 631
634 632 def __init__(self, ui):
635 633 self.ui = ui
636 634 self._idletimeout = ui.configint(b'chgserver', b'idletimeout')
637 635 self._lastactive = time.time()
638 636
639 637 def bindsocket(self, sock, address):
640 638 self._inithashstate(address)
641 639 self._checkextensions()
642 640 self._bind(sock)
643 641 self._createsymlink()
644 642 # no "listening at" message should be printed to simulate hg behavior
645 643
646 644 def _inithashstate(self, address):
647 645 self._baseaddress = address
648 646 if self.ui.configbool(b'chgserver', b'skiphash'):
649 647 self._hashstate = None
650 648 self._realaddress = address
651 649 return
652 650 self._hashstate = hashstate.fromui(self.ui)
653 651 self._realaddress = _hashaddress(address, self._hashstate.confighash)
654 652
655 653 def _checkextensions(self):
656 654 if not self._hashstate:
657 655 return
658 656 if extensions.notloaded():
659 657 # one or more extensions failed to load. mtimehash becomes
660 658 # meaningless because we do not know the paths of those extensions.
661 659 # set mtimehash to an illegal hash value to invalidate the server.
662 660 self._hashstate.mtimehash = b''
663 661
664 662 def _bind(self, sock):
665 663 # use a unique temp address so we can stat the file and do ownership
666 664 # check later
667 665 tempaddress = _tempaddress(self._realaddress)
668 666 util.bindunixsocket(sock, tempaddress)
669 667 self._socketstat = os.stat(tempaddress)
670 668 sock.listen(socket.SOMAXCONN)
671 669 # rename will replace the old socket file if exists atomically. the
672 670 # old server will detect ownership change and exit.
673 671 util.rename(tempaddress, self._realaddress)
674 672
675 673 def _createsymlink(self):
676 674 if self._baseaddress == self._realaddress:
677 675 return
678 676 tempaddress = _tempaddress(self._baseaddress)
679 677 os.symlink(os.path.basename(self._realaddress), tempaddress)
680 678 util.rename(tempaddress, self._baseaddress)
681 679
682 680 def _issocketowner(self):
683 681 try:
684 682 st = os.stat(self._realaddress)
685 683 return (
686 684 st.st_ino == self._socketstat.st_ino
687 685 and st[stat.ST_MTIME] == self._socketstat[stat.ST_MTIME]
688 686 )
689 687 except OSError:
690 688 return False
691 689
692 690 def unlinksocket(self, address):
693 691 if not self._issocketowner():
694 692 return
695 693 # it is possible to have a race condition here that we may
696 694 # remove another server's socket file. but that's okay
697 695 # since that server will detect and exit automatically and
698 696 # the client will start a new server on demand.
699 697 util.tryunlink(self._realaddress)
700 698
701 699 def shouldexit(self):
702 700 if not self._issocketowner():
703 701 self.ui.log(
704 702 b'chgserver', b'%s is not owned, exiting.\n', self._realaddress
705 703 )
706 704 return True
707 705 if time.time() - self._lastactive > self._idletimeout:
708 706 self.ui.log(b'chgserver', b'being idle too long. exiting.\n')
709 707 return True
710 708 return False
711 709
712 710 def newconnection(self):
713 711 self._lastactive = time.time()
714 712
715 713 def createcmdserver(self, repo, conn, fin, fout, prereposetups):
716 714 return chgcmdserver(
717 715 self.ui,
718 716 repo,
719 717 fin,
720 718 fout,
721 719 conn,
722 720 prereposetups,
723 721 self._hashstate,
724 722 self._baseaddress,
725 723 )
726 724
727 725
728 726 def chgunixservice(ui, repo, opts):
729 727 # CHGINTERNALMARK is set by chg client. It is an indication of things are
730 728 # started by chg so other code can do things accordingly, like disabling
731 729 # demandimport or detecting chg client started by chg client. When executed
732 730 # here, CHGINTERNALMARK is no longer useful and hence dropped to make
733 731 # environ cleaner.
734 732 if b'CHGINTERNALMARK' in encoding.environ:
735 733 del encoding.environ[b'CHGINTERNALMARK']
736 734 # Python3.7+ "coerces" the LC_CTYPE environment variable to a UTF-8 one if
737 735 # it thinks the current value is "C". This breaks the hash computation and
738 736 # causes chg to restart loop.
739 737 if b'CHGORIG_LC_CTYPE' in encoding.environ:
740 738 encoding.environ[b'LC_CTYPE'] = encoding.environ[b'CHGORIG_LC_CTYPE']
741 739 del encoding.environ[b'CHGORIG_LC_CTYPE']
742 740 elif b'CHG_CLEAR_LC_CTYPE' in encoding.environ:
743 741 if b'LC_CTYPE' in encoding.environ:
744 742 del encoding.environ[b'LC_CTYPE']
745 743 del encoding.environ[b'CHG_CLEAR_LC_CTYPE']
746 744
747 745 if repo:
748 746 # one chgserver can serve multiple repos. drop repo information
749 747 ui.setconfig(b'bundle', b'mainreporoot', b'', b'repo')
750 748 h = chgunixservicehandler(ui)
751 749 return commandserver.unixforkingservice(ui, repo=None, opts=opts, handler=h)
@@ -1,1332 +1,1330 b''
1 1 # dispatch.py - command dispatching for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import, print_function
9 9
10 10 import errno
11 11 import getopt
12 12 import io
13 13 import os
14 14 import pdb
15 15 import re
16 16 import signal
17 17 import sys
18 18 import traceback
19 19
20 20
21 21 from .i18n import _
22 22 from .pycompat import getattr
23 23
24 24 from hgdemandimport import tracing
25 25
26 26 from . import (
27 27 cmdutil,
28 28 color,
29 29 commands,
30 30 demandimport,
31 31 encoding,
32 32 error,
33 33 extensions,
34 34 fancyopts,
35 35 help,
36 36 hg,
37 37 hook,
38 38 localrepo,
39 39 profiling,
40 40 pycompat,
41 41 rcutil,
42 42 registrar,
43 43 requirements as requirementsmod,
44 44 scmutil,
45 45 ui as uimod,
46 46 util,
47 47 vfs,
48 48 )
49 49
50 50 from .utils import (
51 51 procutil,
52 52 stringutil,
53 53 )
54 54
55 55
56 56 class request(object):
57 57 def __init__(
58 58 self,
59 59 args,
60 60 ui=None,
61 61 repo=None,
62 62 fin=None,
63 63 fout=None,
64 64 ferr=None,
65 65 fmsg=None,
66 66 prereposetups=None,
67 67 ):
68 68 self.args = args
69 69 self.ui = ui
70 70 self.repo = repo
71 71
72 72 # input/output/error streams
73 73 self.fin = fin
74 74 self.fout = fout
75 75 self.ferr = ferr
76 76 # separate stream for status/error messages
77 77 self.fmsg = fmsg
78 78
79 79 # remember options pre-parsed by _earlyparseopts()
80 80 self.earlyoptions = {}
81 81
82 82 # reposetups which run before extensions, useful for chg to pre-fill
83 83 # low-level repo state (for example, changelog) before extensions.
84 84 self.prereposetups = prereposetups or []
85 85
86 86 # store the parsed and canonical command
87 87 self.canonical_command = None
88 88
89 89 def _runexithandlers(self):
90 90 exc = None
91 91 handlers = self.ui._exithandlers
92 92 try:
93 93 while handlers:
94 94 func, args, kwargs = handlers.pop()
95 95 try:
96 96 func(*args, **kwargs)
97 97 except: # re-raises below
98 98 if exc is None:
99 99 exc = sys.exc_info()[1]
100 100 self.ui.warnnoi18n(b'error in exit handlers:\n')
101 101 self.ui.traceback(force=True)
102 102 finally:
103 103 if exc is not None:
104 104 raise exc
105 105
106 106
107 107 def run():
108 108 """run the command in sys.argv"""
109 109 try:
110 110 initstdio()
111 111 with tracing.log('parse args into request'):
112 112 req = request(pycompat.sysargv[1:])
113 113 err = None
114 114 try:
115 115 status = dispatch(req)
116 116 except error.StdioError as e:
117 117 err = e
118 118 status = -1
119 119
120 120 # In all cases we try to flush stdio streams.
121 121 if util.safehasattr(req.ui, b'fout'):
122 122 assert req.ui is not None # help pytype
123 123 assert req.ui.fout is not None # help pytype
124 124 try:
125 125 req.ui.fout.flush()
126 126 except IOError as e:
127 127 err = e
128 128 status = -1
129 129
130 130 if util.safehasattr(req.ui, b'ferr'):
131 131 assert req.ui is not None # help pytype
132 132 assert req.ui.ferr is not None # help pytype
133 133 try:
134 134 if err is not None and err.errno != errno.EPIPE:
135 135 req.ui.ferr.write(
136 136 b'abort: %s\n' % encoding.strtolocal(err.strerror)
137 137 )
138 138 req.ui.ferr.flush()
139 139 # There's not much we can do about an I/O error here. So (possibly)
140 140 # change the status code and move on.
141 141 except IOError:
142 142 status = -1
143 143
144 144 _silencestdio()
145 145 except KeyboardInterrupt:
146 146 # Catch early/late KeyboardInterrupt as last ditch. Here nothing will
147 147 # be printed to console to avoid another IOError/KeyboardInterrupt.
148 148 status = -1
149 149 sys.exit(status & 255)
150 150
151 151
152 152 if pycompat.ispy3:
153 153
154 154 def initstdio():
155 155 # stdio streams on Python 3 are io.TextIOWrapper instances proxying another
156 156 # buffer. These streams will normalize \n to \r\n by default. Mercurial's
157 157 # preferred mechanism for writing output (ui.write()) uses io.BufferedWriter
158 158 # instances, which write to the underlying stdio file descriptor in binary
159 159 # mode. ui.write() uses \n for line endings and no line ending normalization
160 160 # is attempted through this interface. This "just works," even if the system
161 161 # preferred line ending is not \n.
162 162 #
163 163 # But some parts of Mercurial (e.g. hooks) can still send data to sys.stdout
164 164 # and sys.stderr. They will inherit the line ending normalization settings,
165 165 # potentially causing e.g. \r\n to be emitted. Since emitting \n should
166 166 # "just work," here we change the sys.* streams to disable line ending
167 167 # normalization, ensuring compatibility with our ui type.
168 168
169 169 # write_through is new in Python 3.7.
170 170 kwargs = {
171 171 "newline": "\n",
172 172 "line_buffering": sys.stdout.line_buffering,
173 173 }
174 174 if util.safehasattr(sys.stdout, "write_through"):
175 175 kwargs["write_through"] = sys.stdout.write_through
176 176 sys.stdout = io.TextIOWrapper(
177 177 sys.stdout.buffer, sys.stdout.encoding, sys.stdout.errors, **kwargs
178 178 )
179 179
180 180 kwargs = {
181 181 "newline": "\n",
182 182 "line_buffering": sys.stderr.line_buffering,
183 183 }
184 184 if util.safehasattr(sys.stderr, "write_through"):
185 185 kwargs["write_through"] = sys.stderr.write_through
186 186 sys.stderr = io.TextIOWrapper(
187 187 sys.stderr.buffer, sys.stderr.encoding, sys.stderr.errors, **kwargs
188 188 )
189 189
190 190 # No write_through on read-only stream.
191 191 sys.stdin = io.TextIOWrapper(
192 192 sys.stdin.buffer,
193 193 sys.stdin.encoding,
194 194 sys.stdin.errors,
195 195 # None is universal newlines mode.
196 196 newline=None,
197 197 line_buffering=sys.stdin.line_buffering,
198 198 )
199 199
200 200 def _silencestdio():
201 201 for fp in (sys.stdout, sys.stderr):
202 202 # Check if the file is okay
203 203 try:
204 204 fp.flush()
205 205 continue
206 206 except IOError:
207 207 pass
208 208 # Otherwise mark it as closed to silence "Exception ignored in"
209 209 # message emitted by the interpreter finalizer. Be careful to
210 210 # not close procutil.stdout, which may be a fdopen-ed file object
211 211 # and its close() actually closes the underlying file descriptor.
212 212 try:
213 213 fp.close()
214 214 except IOError:
215 215 pass
216 216
217 217
218 218 else:
219 219
220 220 def initstdio():
221 221 for fp in (sys.stdin, sys.stdout, sys.stderr):
222 222 procutil.setbinary(fp)
223 223
224 224 def _silencestdio():
225 225 pass
226 226
227 227
228 228 def _formatargs(args):
229 229 return b' '.join(procutil.shellquote(a) for a in args)
230 230
231 231
232 232 def dispatch(req):
233 233 """run the command specified in req.args; returns an integer status code"""
234 234 with tracing.log('dispatch.dispatch'):
235 235 if req.ferr:
236 236 ferr = req.ferr
237 237 elif req.ui:
238 238 ferr = req.ui.ferr
239 239 else:
240 240 ferr = procutil.stderr
241 241
242 242 try:
243 243 if not req.ui:
244 244 req.ui = uimod.ui.load()
245 245 req.earlyoptions.update(_earlyparseopts(req.ui, req.args))
246 246 if req.earlyoptions[b'traceback']:
247 247 req.ui.setconfig(b'ui', b'traceback', b'on', b'--traceback')
248 248
249 249 # set ui streams from the request
250 250 if req.fin:
251 251 req.ui.fin = req.fin
252 252 if req.fout:
253 253 req.ui.fout = req.fout
254 254 if req.ferr:
255 255 req.ui.ferr = req.ferr
256 256 if req.fmsg:
257 257 req.ui.fmsg = req.fmsg
258 258 except error.Abort as inst:
259 ferr.write(_(b"abort: %s\n") % inst.message)
260 if inst.hint:
261 ferr.write(_(b"(%s)\n") % inst.hint)
259 ferr.write(inst.format())
262 260 return -1
263 261 except error.ParseError as inst:
264 262 ferr.write(inst.format())
265 263 return -1
266 264
267 265 msg = _formatargs(req.args)
268 266 starttime = util.timer()
269 267 ret = 1 # default of Python exit code on unhandled exception
270 268 try:
271 269 ret = _runcatch(req) or 0
272 270 except error.ProgrammingError as inst:
273 271 req.ui.error(_(b'** ProgrammingError: %s\n') % inst)
274 272 if inst.hint:
275 273 req.ui.error(_(b'** (%s)\n') % inst.hint)
276 274 raise
277 275 except KeyboardInterrupt as inst:
278 276 try:
279 277 if isinstance(inst, error.SignalInterrupt):
280 278 msg = _(b"killed!\n")
281 279 else:
282 280 msg = _(b"interrupted!\n")
283 281 req.ui.error(msg)
284 282 except error.SignalInterrupt:
285 283 # maybe pager would quit without consuming all the output, and
286 284 # SIGPIPE was raised. we cannot print anything in this case.
287 285 pass
288 286 except IOError as inst:
289 287 if inst.errno != errno.EPIPE:
290 288 raise
291 289 ret = -1
292 290 finally:
293 291 duration = util.timer() - starttime
294 292 req.ui.flush() # record blocked times
295 293 if req.ui.logblockedtimes:
296 294 req.ui._blockedtimes[b'command_duration'] = duration * 1000
297 295 req.ui.log(
298 296 b'uiblocked',
299 297 b'ui blocked ms\n',
300 298 **pycompat.strkwargs(req.ui._blockedtimes)
301 299 )
302 300 return_code = ret & 255
303 301 req.ui.log(
304 302 b"commandfinish",
305 303 b"%s exited %d after %0.2f seconds\n",
306 304 msg,
307 305 return_code,
308 306 duration,
309 307 return_code=return_code,
310 308 duration=duration,
311 309 canonical_command=req.canonical_command,
312 310 )
313 311 try:
314 312 req._runexithandlers()
315 313 except: # exiting, so no re-raises
316 314 ret = ret or -1
317 315 # do flush again since ui.log() and exit handlers may write to ui
318 316 req.ui.flush()
319 317 return ret
320 318
321 319
322 320 def _runcatch(req):
323 321 with tracing.log('dispatch._runcatch'):
324 322
325 323 def catchterm(*args):
326 324 raise error.SignalInterrupt
327 325
328 326 ui = req.ui
329 327 try:
330 328 for name in b'SIGBREAK', b'SIGHUP', b'SIGTERM':
331 329 num = getattr(signal, name, None)
332 330 if num:
333 331 signal.signal(num, catchterm)
334 332 except ValueError:
335 333 pass # happens if called in a thread
336 334
337 335 def _runcatchfunc():
338 336 realcmd = None
339 337 try:
340 338 cmdargs = fancyopts.fancyopts(
341 339 req.args[:], commands.globalopts, {}
342 340 )
343 341 cmd = cmdargs[0]
344 342 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
345 343 realcmd = aliases[0]
346 344 except (
347 345 error.UnknownCommand,
348 346 error.AmbiguousCommand,
349 347 IndexError,
350 348 getopt.GetoptError,
351 349 ):
352 350 # Don't handle this here. We know the command is
353 351 # invalid, but all we're worried about for now is that
354 352 # it's not a command that server operators expect to
355 353 # be safe to offer to users in a sandbox.
356 354 pass
357 355 if realcmd == b'serve' and b'--stdio' in cmdargs:
358 356 # We want to constrain 'hg serve --stdio' instances pretty
359 357 # closely, as many shared-ssh access tools want to grant
360 358 # access to run *only* 'hg -R $repo serve --stdio'. We
361 359 # restrict to exactly that set of arguments, and prohibit
362 360 # any repo name that starts with '--' to prevent
363 361 # shenanigans wherein a user does something like pass
364 362 # --debugger or --config=ui.debugger=1 as a repo
365 363 # name. This used to actually run the debugger.
366 364 if (
367 365 len(req.args) != 4
368 366 or req.args[0] != b'-R'
369 367 or req.args[1].startswith(b'--')
370 368 or req.args[2] != b'serve'
371 369 or req.args[3] != b'--stdio'
372 370 ):
373 371 raise error.Abort(
374 372 _(b'potentially unsafe serve --stdio invocation: %s')
375 373 % (stringutil.pprint(req.args),)
376 374 )
377 375
378 376 try:
379 377 debugger = b'pdb'
380 378 debugtrace = {b'pdb': pdb.set_trace}
381 379 debugmortem = {b'pdb': pdb.post_mortem}
382 380
383 381 # read --config before doing anything else
384 382 # (e.g. to change trust settings for reading .hg/hgrc)
385 383 cfgs = _parseconfig(req.ui, req.earlyoptions[b'config'])
386 384
387 385 if req.repo:
388 386 # copy configs that were passed on the cmdline (--config) to
389 387 # the repo ui
390 388 for sec, name, val in cfgs:
391 389 req.repo.ui.setconfig(
392 390 sec, name, val, source=b'--config'
393 391 )
394 392
395 393 # developer config: ui.debugger
396 394 debugger = ui.config(b"ui", b"debugger")
397 395 debugmod = pdb
398 396 if not debugger or ui.plain():
399 397 # if we are in HGPLAIN mode, then disable custom debugging
400 398 debugger = b'pdb'
401 399 elif req.earlyoptions[b'debugger']:
402 400 # This import can be slow for fancy debuggers, so only
403 401 # do it when absolutely necessary, i.e. when actual
404 402 # debugging has been requested
405 403 with demandimport.deactivated():
406 404 try:
407 405 debugmod = __import__(debugger)
408 406 except ImportError:
409 407 pass # Leave debugmod = pdb
410 408
411 409 debugtrace[debugger] = debugmod.set_trace
412 410 debugmortem[debugger] = debugmod.post_mortem
413 411
414 412 # enter the debugger before command execution
415 413 if req.earlyoptions[b'debugger']:
416 414 ui.warn(
417 415 _(
418 416 b"entering debugger - "
419 417 b"type c to continue starting hg or h for help\n"
420 418 )
421 419 )
422 420
423 421 if (
424 422 debugger != b'pdb'
425 423 and debugtrace[debugger] == debugtrace[b'pdb']
426 424 ):
427 425 ui.warn(
428 426 _(
429 427 b"%s debugger specified "
430 428 b"but its module was not found\n"
431 429 )
432 430 % debugger
433 431 )
434 432 with demandimport.deactivated():
435 433 debugtrace[debugger]()
436 434 try:
437 435 return _dispatch(req)
438 436 finally:
439 437 ui.flush()
440 438 except: # re-raises
441 439 # enter the debugger when we hit an exception
442 440 if req.earlyoptions[b'debugger']:
443 441 traceback.print_exc()
444 442 debugmortem[debugger](sys.exc_info()[2])
445 443 raise
446 444
447 445 return _callcatch(ui, _runcatchfunc)
448 446
449 447
450 448 def _callcatch(ui, func):
451 449 """like scmutil.callcatch but handles more high-level exceptions about
452 450 config parsing and commands. besides, use handlecommandexception to handle
453 451 uncaught exceptions.
454 452 """
455 453 try:
456 454 return scmutil.callcatch(ui, func)
457 455 except error.AmbiguousCommand as inst:
458 456 ui.warn(
459 457 _(b"hg: command '%s' is ambiguous:\n %s\n")
460 458 % (inst.prefix, b" ".join(inst.matches))
461 459 )
462 460 except error.CommandError as inst:
463 461 if inst.command:
464 462 ui.pager(b'help')
465 463 msgbytes = pycompat.bytestr(inst.message)
466 464 ui.warn(_(b"hg %s: %s\n") % (inst.command, msgbytes))
467 465 commands.help_(ui, inst.command, full=False, command=True)
468 466 else:
469 467 ui.warn(_(b"hg: %s\n") % inst.message)
470 468 ui.warn(_(b"(use 'hg help -v' for a list of global options)\n"))
471 469 except error.ParseError as inst:
472 470 ui.warn(inst.format())
473 471 return -1
474 472 except error.UnknownCommand as inst:
475 473 nocmdmsg = _(b"hg: unknown command '%s'\n") % inst.command
476 474 try:
477 475 # check if the command is in a disabled extension
478 476 # (but don't check for extensions themselves)
479 477 formatted = help.formattedhelp(
480 478 ui, commands, inst.command, unknowncmd=True
481 479 )
482 480 ui.warn(nocmdmsg)
483 481 ui.write(formatted)
484 482 except (error.UnknownCommand, error.Abort):
485 483 suggested = False
486 484 if inst.all_commands:
487 485 sim = error.getsimilar(inst.all_commands, inst.command)
488 486 if sim:
489 487 ui.warn(nocmdmsg)
490 488 ui.warn(b"(%s)\n" % error.similarity_hint(sim))
491 489 suggested = True
492 490 if not suggested:
493 491 ui.warn(nocmdmsg)
494 492 ui.warn(_(b"(use 'hg help' for a list of commands)\n"))
495 493 except IOError:
496 494 raise
497 495 except KeyboardInterrupt:
498 496 raise
499 497 except: # probably re-raises
500 498 if not handlecommandexception(ui):
501 499 raise
502 500
503 501 return -1
504 502
505 503
506 504 def aliasargs(fn, givenargs):
507 505 args = []
508 506 # only care about alias 'args', ignore 'args' set by extensions.wrapfunction
509 507 if not util.safehasattr(fn, b'_origfunc'):
510 508 args = getattr(fn, 'args', args)
511 509 if args:
512 510 cmd = b' '.join(map(procutil.shellquote, args))
513 511
514 512 nums = []
515 513
516 514 def replacer(m):
517 515 num = int(m.group(1)) - 1
518 516 nums.append(num)
519 517 if num < len(givenargs):
520 518 return givenargs[num]
521 519 raise error.Abort(_(b'too few arguments for command alias'))
522 520
523 521 cmd = re.sub(br'\$(\d+|\$)', replacer, cmd)
524 522 givenargs = [x for i, x in enumerate(givenargs) if i not in nums]
525 523 args = pycompat.shlexsplit(cmd)
526 524 return args + givenargs
527 525
528 526
529 527 def aliasinterpolate(name, args, cmd):
530 528 '''interpolate args into cmd for shell aliases
531 529
532 530 This also handles $0, $@ and "$@".
533 531 '''
534 532 # util.interpolate can't deal with "$@" (with quotes) because it's only
535 533 # built to match prefix + patterns.
536 534 replacemap = {b'$%d' % (i + 1): arg for i, arg in enumerate(args)}
537 535 replacemap[b'$0'] = name
538 536 replacemap[b'$$'] = b'$'
539 537 replacemap[b'$@'] = b' '.join(args)
540 538 # Typical Unix shells interpolate "$@" (with quotes) as all the positional
541 539 # parameters, separated out into words. Emulate the same behavior here by
542 540 # quoting the arguments individually. POSIX shells will then typically
543 541 # tokenize each argument into exactly one word.
544 542 replacemap[b'"$@"'] = b' '.join(procutil.shellquote(arg) for arg in args)
545 543 # escape '\$' for regex
546 544 regex = b'|'.join(replacemap.keys()).replace(b'$', br'\$')
547 545 r = re.compile(regex)
548 546 return r.sub(lambda x: replacemap[x.group()], cmd)
549 547
550 548
551 549 class cmdalias(object):
552 550 def __init__(self, ui, name, definition, cmdtable, source):
553 551 self.name = self.cmd = name
554 552 self.cmdname = b''
555 553 self.definition = definition
556 554 self.fn = None
557 555 self.givenargs = []
558 556 self.opts = []
559 557 self.help = b''
560 558 self.badalias = None
561 559 self.unknowncmd = False
562 560 self.source = source
563 561
564 562 try:
565 563 aliases, entry = cmdutil.findcmd(self.name, cmdtable)
566 564 for alias, e in pycompat.iteritems(cmdtable):
567 565 if e is entry:
568 566 self.cmd = alias
569 567 break
570 568 self.shadows = True
571 569 except error.UnknownCommand:
572 570 self.shadows = False
573 571
574 572 if not self.definition:
575 573 self.badalias = _(b"no definition for alias '%s'") % self.name
576 574 return
577 575
578 576 if self.definition.startswith(b'!'):
579 577 shdef = self.definition[1:]
580 578 self.shell = True
581 579
582 580 def fn(ui, *args):
583 581 env = {b'HG_ARGS': b' '.join((self.name,) + args)}
584 582
585 583 def _checkvar(m):
586 584 if m.groups()[0] == b'$':
587 585 return m.group()
588 586 elif int(m.groups()[0]) <= len(args):
589 587 return m.group()
590 588 else:
591 589 ui.debug(
592 590 b"No argument found for substitution "
593 591 b"of %i variable in alias '%s' definition.\n"
594 592 % (int(m.groups()[0]), self.name)
595 593 )
596 594 return b''
597 595
598 596 cmd = re.sub(br'\$(\d+|\$)', _checkvar, shdef)
599 597 cmd = aliasinterpolate(self.name, args, cmd)
600 598 return ui.system(
601 599 cmd, environ=env, blockedtag=b'alias_%s' % self.name
602 600 )
603 601
604 602 self.fn = fn
605 603 self.alias = True
606 604 self._populatehelp(ui, name, shdef, self.fn)
607 605 return
608 606
609 607 try:
610 608 args = pycompat.shlexsplit(self.definition)
611 609 except ValueError as inst:
612 610 self.badalias = _(b"error in definition for alias '%s': %s") % (
613 611 self.name,
614 612 stringutil.forcebytestr(inst),
615 613 )
616 614 return
617 615 earlyopts, args = _earlysplitopts(args)
618 616 if earlyopts:
619 617 self.badalias = _(
620 618 b"error in definition for alias '%s': %s may "
621 619 b"only be given on the command line"
622 620 ) % (self.name, b'/'.join(pycompat.ziplist(*earlyopts)[0]))
623 621 return
624 622 self.cmdname = cmd = args.pop(0)
625 623 self.givenargs = args
626 624
627 625 try:
628 626 tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1]
629 627 if len(tableentry) > 2:
630 628 self.fn, self.opts, cmdhelp = tableentry
631 629 else:
632 630 self.fn, self.opts = tableentry
633 631 cmdhelp = None
634 632
635 633 self.alias = True
636 634 self._populatehelp(ui, name, cmd, self.fn, cmdhelp)
637 635
638 636 except error.UnknownCommand:
639 637 self.badalias = _(
640 638 b"alias '%s' resolves to unknown command '%s'"
641 639 ) % (self.name, cmd,)
642 640 self.unknowncmd = True
643 641 except error.AmbiguousCommand:
644 642 self.badalias = _(
645 643 b"alias '%s' resolves to ambiguous command '%s'"
646 644 ) % (self.name, cmd,)
647 645
648 646 def _populatehelp(self, ui, name, cmd, fn, defaulthelp=None):
649 647 # confine strings to be passed to i18n.gettext()
650 648 cfg = {}
651 649 for k in (b'doc', b'help', b'category'):
652 650 v = ui.config(b'alias', b'%s:%s' % (name, k), None)
653 651 if v is None:
654 652 continue
655 653 if not encoding.isasciistr(v):
656 654 self.badalias = _(
657 655 b"non-ASCII character in alias definition '%s:%s'"
658 656 ) % (name, k)
659 657 return
660 658 cfg[k] = v
661 659
662 660 self.help = cfg.get(b'help', defaulthelp or b'')
663 661 if self.help and self.help.startswith(b"hg " + cmd):
664 662 # drop prefix in old-style help lines so hg shows the alias
665 663 self.help = self.help[4 + len(cmd) :]
666 664
667 665 self.owndoc = b'doc' in cfg
668 666 doc = cfg.get(b'doc', pycompat.getdoc(fn))
669 667 if doc is not None:
670 668 doc = pycompat.sysstr(doc)
671 669 self.__doc__ = doc
672 670
673 671 self.helpcategory = cfg.get(
674 672 b'category', registrar.command.CATEGORY_NONE
675 673 )
676 674
677 675 @property
678 676 def args(self):
679 677 args = pycompat.maplist(util.expandpath, self.givenargs)
680 678 return aliasargs(self.fn, args)
681 679
682 680 def __getattr__(self, name):
683 681 adefaults = {
684 682 'norepo': True,
685 683 'intents': set(),
686 684 'optionalrepo': False,
687 685 'inferrepo': False,
688 686 }
689 687 if name not in adefaults:
690 688 raise AttributeError(name)
691 689 if self.badalias or util.safehasattr(self, b'shell'):
692 690 return adefaults[name]
693 691 return getattr(self.fn, name)
694 692
695 693 def __call__(self, ui, *args, **opts):
696 694 if self.badalias:
697 695 hint = None
698 696 if self.unknowncmd:
699 697 try:
700 698 # check if the command is in a disabled extension
701 699 cmd, ext = extensions.disabledcmd(ui, self.cmdname)[:2]
702 700 hint = _(b"'%s' is provided by '%s' extension") % (cmd, ext)
703 701 except error.UnknownCommand:
704 702 pass
705 703 raise error.Abort(self.badalias, hint=hint)
706 704 if self.shadows:
707 705 ui.debug(
708 706 b"alias '%s' shadows command '%s'\n" % (self.name, self.cmdname)
709 707 )
710 708
711 709 ui.log(
712 710 b'commandalias',
713 711 b"alias '%s' expands to '%s'\n",
714 712 self.name,
715 713 self.definition,
716 714 )
717 715 if util.safehasattr(self, b'shell'):
718 716 return self.fn(ui, *args, **opts)
719 717 else:
720 718 try:
721 719 return util.checksignature(self.fn)(ui, *args, **opts)
722 720 except error.SignatureError:
723 721 args = b' '.join([self.cmdname] + self.args)
724 722 ui.debug(b"alias '%s' expands to '%s'\n" % (self.name, args))
725 723 raise
726 724
727 725
728 726 class lazyaliasentry(object):
729 727 """like a typical command entry (func, opts, help), but is lazy"""
730 728
731 729 def __init__(self, ui, name, definition, cmdtable, source):
732 730 self.ui = ui
733 731 self.name = name
734 732 self.definition = definition
735 733 self.cmdtable = cmdtable.copy()
736 734 self.source = source
737 735 self.alias = True
738 736
739 737 @util.propertycache
740 738 def _aliasdef(self):
741 739 return cmdalias(
742 740 self.ui, self.name, self.definition, self.cmdtable, self.source
743 741 )
744 742
745 743 def __getitem__(self, n):
746 744 aliasdef = self._aliasdef
747 745 if n == 0:
748 746 return aliasdef
749 747 elif n == 1:
750 748 return aliasdef.opts
751 749 elif n == 2:
752 750 return aliasdef.help
753 751 else:
754 752 raise IndexError
755 753
756 754 def __iter__(self):
757 755 for i in range(3):
758 756 yield self[i]
759 757
760 758 def __len__(self):
761 759 return 3
762 760
763 761
764 762 def addaliases(ui, cmdtable):
765 763 # aliases are processed after extensions have been loaded, so they
766 764 # may use extension commands. Aliases can also use other alias definitions,
767 765 # but only if they have been defined prior to the current definition.
768 766 for alias, definition in ui.configitems(b'alias', ignoresub=True):
769 767 try:
770 768 if cmdtable[alias].definition == definition:
771 769 continue
772 770 except (KeyError, AttributeError):
773 771 # definition might not exist or it might not be a cmdalias
774 772 pass
775 773
776 774 source = ui.configsource(b'alias', alias)
777 775 entry = lazyaliasentry(ui, alias, definition, cmdtable, source)
778 776 cmdtable[alias] = entry
779 777
780 778
781 779 def _parse(ui, args):
782 780 options = {}
783 781 cmdoptions = {}
784 782
785 783 try:
786 784 args = fancyopts.fancyopts(args, commands.globalopts, options)
787 785 except getopt.GetoptError as inst:
788 786 raise error.CommandError(None, stringutil.forcebytestr(inst))
789 787
790 788 if args:
791 789 cmd, args = args[0], args[1:]
792 790 aliases, entry = cmdutil.findcmd(
793 791 cmd, commands.table, ui.configbool(b"ui", b"strict")
794 792 )
795 793 cmd = aliases[0]
796 794 args = aliasargs(entry[0], args)
797 795 defaults = ui.config(b"defaults", cmd)
798 796 if defaults:
799 797 args = (
800 798 pycompat.maplist(util.expandpath, pycompat.shlexsplit(defaults))
801 799 + args
802 800 )
803 801 c = list(entry[1])
804 802 else:
805 803 cmd = None
806 804 c = []
807 805
808 806 # combine global options into local
809 807 for o in commands.globalopts:
810 808 c.append((o[0], o[1], options[o[1]], o[3]))
811 809
812 810 try:
813 811 args = fancyopts.fancyopts(args, c, cmdoptions, gnu=True)
814 812 except getopt.GetoptError as inst:
815 813 raise error.CommandError(cmd, stringutil.forcebytestr(inst))
816 814
817 815 # separate global options back out
818 816 for o in commands.globalopts:
819 817 n = o[1]
820 818 options[n] = cmdoptions[n]
821 819 del cmdoptions[n]
822 820
823 821 return (cmd, cmd and entry[0] or None, args, options, cmdoptions)
824 822
825 823
826 824 def _parseconfig(ui, config):
827 825 """parse the --config options from the command line"""
828 826 configs = []
829 827
830 828 for cfg in config:
831 829 try:
832 830 name, value = [cfgelem.strip() for cfgelem in cfg.split(b'=', 1)]
833 831 section, name = name.split(b'.', 1)
834 832 if not section or not name:
835 833 raise IndexError
836 834 ui.setconfig(section, name, value, b'--config')
837 835 configs.append((section, name, value))
838 836 except (IndexError, ValueError):
839 837 raise error.Abort(
840 838 _(
841 839 b'malformed --config option: %r '
842 840 b'(use --config section.name=value)'
843 841 )
844 842 % pycompat.bytestr(cfg)
845 843 )
846 844
847 845 return configs
848 846
849 847
850 848 def _earlyparseopts(ui, args):
851 849 options = {}
852 850 fancyopts.fancyopts(
853 851 args,
854 852 commands.globalopts,
855 853 options,
856 854 gnu=not ui.plain(b'strictflags'),
857 855 early=True,
858 856 optaliases={b'repository': [b'repo']},
859 857 )
860 858 return options
861 859
862 860
863 861 def _earlysplitopts(args):
864 862 """Split args into a list of possible early options and remainder args"""
865 863 shortoptions = b'R:'
866 864 # TODO: perhaps 'debugger' should be included
867 865 longoptions = [b'cwd=', b'repository=', b'repo=', b'config=']
868 866 return fancyopts.earlygetopt(
869 867 args, shortoptions, longoptions, gnu=True, keepsep=True
870 868 )
871 869
872 870
873 871 def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions):
874 872 # run pre-hook, and abort if it fails
875 873 hook.hook(
876 874 lui,
877 875 repo,
878 876 b"pre-%s" % cmd,
879 877 True,
880 878 args=b" ".join(fullargs),
881 879 pats=cmdpats,
882 880 opts=cmdoptions,
883 881 )
884 882 try:
885 883 ret = _runcommand(ui, options, cmd, d)
886 884 # run post-hook, passing command result
887 885 hook.hook(
888 886 lui,
889 887 repo,
890 888 b"post-%s" % cmd,
891 889 False,
892 890 args=b" ".join(fullargs),
893 891 result=ret,
894 892 pats=cmdpats,
895 893 opts=cmdoptions,
896 894 )
897 895 except Exception:
898 896 # run failure hook and re-raise
899 897 hook.hook(
900 898 lui,
901 899 repo,
902 900 b"fail-%s" % cmd,
903 901 False,
904 902 args=b" ".join(fullargs),
905 903 pats=cmdpats,
906 904 opts=cmdoptions,
907 905 )
908 906 raise
909 907 return ret
910 908
911 909
912 910 def _readsharedsourceconfig(ui, path):
913 911 """if the current repository is shared one, this tries to read
914 912 .hg/hgrc of shared source if we are in share-safe mode
915 913
916 914 Config read is loaded into the ui object passed
917 915
918 916 This should be called before reading .hg/hgrc or the main repo
919 917 as that overrides config set in shared source"""
920 918 try:
921 919 with open(os.path.join(path, b".hg", b"requires"), "rb") as fp:
922 920 requirements = set(fp.read().splitlines())
923 921 if not (
924 922 requirementsmod.SHARESAFE_REQUIREMENT in requirements
925 923 and requirementsmod.SHARED_REQUIREMENT in requirements
926 924 ):
927 925 return
928 926 hgvfs = vfs.vfs(os.path.join(path, b".hg"))
929 927 sharedvfs = localrepo._getsharedvfs(hgvfs, requirements)
930 928 ui.readconfig(sharedvfs.join(b"hgrc"), path)
931 929 except IOError:
932 930 pass
933 931
934 932
935 933 def _getlocal(ui, rpath, wd=None):
936 934 """Return (path, local ui object) for the given target path.
937 935
938 936 Takes paths in [cwd]/.hg/hgrc into account."
939 937 """
940 938 if wd is None:
941 939 try:
942 940 wd = encoding.getcwd()
943 941 except OSError as e:
944 942 raise error.Abort(
945 943 _(b"error getting current working directory: %s")
946 944 % encoding.strtolocal(e.strerror)
947 945 )
948 946
949 947 path = cmdutil.findrepo(wd) or b""
950 948 if not path:
951 949 lui = ui
952 950 else:
953 951 lui = ui.copy()
954 952 if rcutil.use_repo_hgrc():
955 953 _readsharedsourceconfig(lui, path)
956 954 lui.readconfig(os.path.join(path, b".hg", b"hgrc"), path)
957 955 lui.readconfig(os.path.join(path, b".hg", b"hgrc-not-shared"), path)
958 956
959 957 if rpath:
960 958 path = lui.expandpath(rpath)
961 959 lui = ui.copy()
962 960 if rcutil.use_repo_hgrc():
963 961 _readsharedsourceconfig(lui, path)
964 962 lui.readconfig(os.path.join(path, b".hg", b"hgrc"), path)
965 963 lui.readconfig(os.path.join(path, b".hg", b"hgrc-not-shared"), path)
966 964
967 965 return path, lui
968 966
969 967
970 968 def _checkshellalias(lui, ui, args):
971 969 """Return the function to run the shell alias, if it is required"""
972 970 options = {}
973 971
974 972 try:
975 973 args = fancyopts.fancyopts(args, commands.globalopts, options)
976 974 except getopt.GetoptError:
977 975 return
978 976
979 977 if not args:
980 978 return
981 979
982 980 cmdtable = commands.table
983 981
984 982 cmd = args[0]
985 983 try:
986 984 strict = ui.configbool(b"ui", b"strict")
987 985 aliases, entry = cmdutil.findcmd(cmd, cmdtable, strict)
988 986 except (error.AmbiguousCommand, error.UnknownCommand):
989 987 return
990 988
991 989 cmd = aliases[0]
992 990 fn = entry[0]
993 991
994 992 if cmd and util.safehasattr(fn, b'shell'):
995 993 # shell alias shouldn't receive early options which are consumed by hg
996 994 _earlyopts, args = _earlysplitopts(args)
997 995 d = lambda: fn(ui, *args[1:])
998 996 return lambda: runcommand(
999 997 lui, None, cmd, args[:1], ui, options, d, [], {}
1000 998 )
1001 999
1002 1000
1003 1001 def _dispatch(req):
1004 1002 args = req.args
1005 1003 ui = req.ui
1006 1004
1007 1005 # check for cwd
1008 1006 cwd = req.earlyoptions[b'cwd']
1009 1007 if cwd:
1010 1008 os.chdir(cwd)
1011 1009
1012 1010 rpath = req.earlyoptions[b'repository']
1013 1011 path, lui = _getlocal(ui, rpath)
1014 1012
1015 1013 uis = {ui, lui}
1016 1014
1017 1015 if req.repo:
1018 1016 uis.add(req.repo.ui)
1019 1017
1020 1018 if (
1021 1019 req.earlyoptions[b'verbose']
1022 1020 or req.earlyoptions[b'debug']
1023 1021 or req.earlyoptions[b'quiet']
1024 1022 ):
1025 1023 for opt in (b'verbose', b'debug', b'quiet'):
1026 1024 val = pycompat.bytestr(bool(req.earlyoptions[opt]))
1027 1025 for ui_ in uis:
1028 1026 ui_.setconfig(b'ui', opt, val, b'--' + opt)
1029 1027
1030 1028 if req.earlyoptions[b'profile']:
1031 1029 for ui_ in uis:
1032 1030 ui_.setconfig(b'profiling', b'enabled', b'true', b'--profile')
1033 1031
1034 1032 profile = lui.configbool(b'profiling', b'enabled')
1035 1033 with profiling.profile(lui, enabled=profile) as profiler:
1036 1034 # Configure extensions in phases: uisetup, extsetup, cmdtable, and
1037 1035 # reposetup
1038 1036 extensions.loadall(lui)
1039 1037 # Propagate any changes to lui.__class__ by extensions
1040 1038 ui.__class__ = lui.__class__
1041 1039
1042 1040 # (uisetup and extsetup are handled in extensions.loadall)
1043 1041
1044 1042 # (reposetup is handled in hg.repository)
1045 1043
1046 1044 addaliases(lui, commands.table)
1047 1045
1048 1046 # All aliases and commands are completely defined, now.
1049 1047 # Check abbreviation/ambiguity of shell alias.
1050 1048 shellaliasfn = _checkshellalias(lui, ui, args)
1051 1049 if shellaliasfn:
1052 1050 # no additional configs will be set, set up the ui instances
1053 1051 for ui_ in uis:
1054 1052 extensions.populateui(ui_)
1055 1053 return shellaliasfn()
1056 1054
1057 1055 # check for fallback encoding
1058 1056 fallback = lui.config(b'ui', b'fallbackencoding')
1059 1057 if fallback:
1060 1058 encoding.fallbackencoding = fallback
1061 1059
1062 1060 fullargs = args
1063 1061 cmd, func, args, options, cmdoptions = _parse(lui, args)
1064 1062
1065 1063 # store the canonical command name in request object for later access
1066 1064 req.canonical_command = cmd
1067 1065
1068 1066 if options[b"config"] != req.earlyoptions[b"config"]:
1069 1067 raise error.Abort(_(b"option --config may not be abbreviated!"))
1070 1068 if options[b"cwd"] != req.earlyoptions[b"cwd"]:
1071 1069 raise error.Abort(_(b"option --cwd may not be abbreviated!"))
1072 1070 if options[b"repository"] != req.earlyoptions[b"repository"]:
1073 1071 raise error.Abort(
1074 1072 _(
1075 1073 b"option -R has to be separated from other options (e.g. not "
1076 1074 b"-qR) and --repository may only be abbreviated as --repo!"
1077 1075 )
1078 1076 )
1079 1077 if options[b"debugger"] != req.earlyoptions[b"debugger"]:
1080 1078 raise error.Abort(_(b"option --debugger may not be abbreviated!"))
1081 1079 # don't validate --profile/--traceback, which can be enabled from now
1082 1080
1083 1081 if options[b"encoding"]:
1084 1082 encoding.encoding = options[b"encoding"]
1085 1083 if options[b"encodingmode"]:
1086 1084 encoding.encodingmode = options[b"encodingmode"]
1087 1085 if options[b"time"]:
1088 1086
1089 1087 def get_times():
1090 1088 t = os.times()
1091 1089 if t[4] == 0.0:
1092 1090 # Windows leaves this as zero, so use time.perf_counter()
1093 1091 t = (t[0], t[1], t[2], t[3], util.timer())
1094 1092 return t
1095 1093
1096 1094 s = get_times()
1097 1095
1098 1096 def print_time():
1099 1097 t = get_times()
1100 1098 ui.warn(
1101 1099 _(b"time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n")
1102 1100 % (
1103 1101 t[4] - s[4],
1104 1102 t[0] - s[0],
1105 1103 t[2] - s[2],
1106 1104 t[1] - s[1],
1107 1105 t[3] - s[3],
1108 1106 )
1109 1107 )
1110 1108
1111 1109 ui.atexit(print_time)
1112 1110 if options[b"profile"]:
1113 1111 profiler.start()
1114 1112
1115 1113 # if abbreviated version of this were used, take them in account, now
1116 1114 if options[b'verbose'] or options[b'debug'] or options[b'quiet']:
1117 1115 for opt in (b'verbose', b'debug', b'quiet'):
1118 1116 if options[opt] == req.earlyoptions[opt]:
1119 1117 continue
1120 1118 val = pycompat.bytestr(bool(options[opt]))
1121 1119 for ui_ in uis:
1122 1120 ui_.setconfig(b'ui', opt, val, b'--' + opt)
1123 1121
1124 1122 if options[b'traceback']:
1125 1123 for ui_ in uis:
1126 1124 ui_.setconfig(b'ui', b'traceback', b'on', b'--traceback')
1127 1125
1128 1126 if options[b'noninteractive']:
1129 1127 for ui_ in uis:
1130 1128 ui_.setconfig(b'ui', b'interactive', b'off', b'-y')
1131 1129
1132 1130 if cmdoptions.get(b'insecure', False):
1133 1131 for ui_ in uis:
1134 1132 ui_.insecureconnections = True
1135 1133
1136 1134 # setup color handling before pager, because setting up pager
1137 1135 # might cause incorrect console information
1138 1136 coloropt = options[b'color']
1139 1137 for ui_ in uis:
1140 1138 if coloropt:
1141 1139 ui_.setconfig(b'ui', b'color', coloropt, b'--color')
1142 1140 color.setup(ui_)
1143 1141
1144 1142 if stringutil.parsebool(options[b'pager']):
1145 1143 # ui.pager() expects 'internal-always-' prefix in this case
1146 1144 ui.pager(b'internal-always-' + cmd)
1147 1145 elif options[b'pager'] != b'auto':
1148 1146 for ui_ in uis:
1149 1147 ui_.disablepager()
1150 1148
1151 1149 # configs are fully loaded, set up the ui instances
1152 1150 for ui_ in uis:
1153 1151 extensions.populateui(ui_)
1154 1152
1155 1153 if options[b'version']:
1156 1154 return commands.version_(ui)
1157 1155 if options[b'help']:
1158 1156 return commands.help_(ui, cmd, command=cmd is not None)
1159 1157 elif not cmd:
1160 1158 return commands.help_(ui, b'shortlist')
1161 1159
1162 1160 repo = None
1163 1161 cmdpats = args[:]
1164 1162 assert func is not None # help out pytype
1165 1163 if not func.norepo:
1166 1164 # use the repo from the request only if we don't have -R
1167 1165 if not rpath and not cwd:
1168 1166 repo = req.repo
1169 1167
1170 1168 if repo:
1171 1169 # set the descriptors of the repo ui to those of ui
1172 1170 repo.ui.fin = ui.fin
1173 1171 repo.ui.fout = ui.fout
1174 1172 repo.ui.ferr = ui.ferr
1175 1173 repo.ui.fmsg = ui.fmsg
1176 1174 else:
1177 1175 try:
1178 1176 repo = hg.repository(
1179 1177 ui,
1180 1178 path=path,
1181 1179 presetupfuncs=req.prereposetups,
1182 1180 intents=func.intents,
1183 1181 )
1184 1182 if not repo.local():
1185 1183 raise error.Abort(
1186 1184 _(b"repository '%s' is not local") % path
1187 1185 )
1188 1186 repo.ui.setconfig(
1189 1187 b"bundle", b"mainreporoot", repo.root, b'repo'
1190 1188 )
1191 1189 except error.RequirementError:
1192 1190 raise
1193 1191 except error.RepoError:
1194 1192 if rpath: # invalid -R path
1195 1193 raise
1196 1194 if not func.optionalrepo:
1197 1195 if func.inferrepo and args and not path:
1198 1196 # try to infer -R from command args
1199 1197 repos = pycompat.maplist(cmdutil.findrepo, args)
1200 1198 guess = repos[0]
1201 1199 if guess and repos.count(guess) == len(repos):
1202 1200 req.args = [b'--repository', guess] + fullargs
1203 1201 req.earlyoptions[b'repository'] = guess
1204 1202 return _dispatch(req)
1205 1203 if not path:
1206 1204 raise error.RepoError(
1207 1205 _(
1208 1206 b"no repository found in"
1209 1207 b" '%s' (.hg not found)"
1210 1208 )
1211 1209 % encoding.getcwd()
1212 1210 )
1213 1211 raise
1214 1212 if repo:
1215 1213 ui = repo.ui
1216 1214 if options[b'hidden']:
1217 1215 repo = repo.unfiltered()
1218 1216 args.insert(0, repo)
1219 1217 elif rpath:
1220 1218 ui.warn(_(b"warning: --repository ignored\n"))
1221 1219
1222 1220 msg = _formatargs(fullargs)
1223 1221 ui.log(b"command", b'%s\n', msg)
1224 1222 strcmdopt = pycompat.strkwargs(cmdoptions)
1225 1223 d = lambda: util.checksignature(func)(ui, *args, **strcmdopt)
1226 1224 try:
1227 1225 return runcommand(
1228 1226 lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions
1229 1227 )
1230 1228 finally:
1231 1229 if repo and repo != req.repo:
1232 1230 repo.close()
1233 1231
1234 1232
1235 1233 def _runcommand(ui, options, cmd, cmdfunc):
1236 1234 """Run a command function, possibly with profiling enabled."""
1237 1235 try:
1238 1236 with tracing.log("Running %s command" % cmd):
1239 1237 return cmdfunc()
1240 1238 except error.SignatureError:
1241 1239 raise error.CommandError(cmd, _(b'invalid arguments'))
1242 1240
1243 1241
1244 1242 def _exceptionwarning(ui):
1245 1243 """Produce a warning message for the current active exception"""
1246 1244
1247 1245 # For compatibility checking, we discard the portion of the hg
1248 1246 # version after the + on the assumption that if a "normal
1249 1247 # user" is running a build with a + in it the packager
1250 1248 # probably built from fairly close to a tag and anyone with a
1251 1249 # 'make local' copy of hg (where the version number can be out
1252 1250 # of date) will be clueful enough to notice the implausible
1253 1251 # version number and try updating.
1254 1252 ct = util.versiontuple(n=2)
1255 1253 worst = None, ct, b''
1256 1254 if ui.config(b'ui', b'supportcontact') is None:
1257 1255 for name, mod in extensions.extensions():
1258 1256 # 'testedwith' should be bytes, but not all extensions are ported
1259 1257 # to py3 and we don't want UnicodeException because of that.
1260 1258 testedwith = stringutil.forcebytestr(
1261 1259 getattr(mod, 'testedwith', b'')
1262 1260 )
1263 1261 report = getattr(mod, 'buglink', _(b'the extension author.'))
1264 1262 if not testedwith.strip():
1265 1263 # We found an untested extension. It's likely the culprit.
1266 1264 worst = name, b'unknown', report
1267 1265 break
1268 1266
1269 1267 # Never blame on extensions bundled with Mercurial.
1270 1268 if extensions.ismoduleinternal(mod):
1271 1269 continue
1272 1270
1273 1271 tested = [util.versiontuple(t, 2) for t in testedwith.split()]
1274 1272 if ct in tested:
1275 1273 continue
1276 1274
1277 1275 lower = [t for t in tested if t < ct]
1278 1276 nearest = max(lower or tested)
1279 1277 if worst[0] is None or nearest < worst[1]:
1280 1278 worst = name, nearest, report
1281 1279 if worst[0] is not None:
1282 1280 name, testedwith, report = worst
1283 1281 if not isinstance(testedwith, (bytes, str)):
1284 1282 testedwith = b'.'.join(
1285 1283 [stringutil.forcebytestr(c) for c in testedwith]
1286 1284 )
1287 1285 warning = _(
1288 1286 b'** Unknown exception encountered with '
1289 1287 b'possibly-broken third-party extension %s\n'
1290 1288 b'** which supports versions %s of Mercurial.\n'
1291 1289 b'** Please disable %s and try your action again.\n'
1292 1290 b'** If that fixes the bug please report it to %s\n'
1293 1291 ) % (name, testedwith, name, stringutil.forcebytestr(report))
1294 1292 else:
1295 1293 bugtracker = ui.config(b'ui', b'supportcontact')
1296 1294 if bugtracker is None:
1297 1295 bugtracker = _(b"https://mercurial-scm.org/wiki/BugTracker")
1298 1296 warning = (
1299 1297 _(
1300 1298 b"** unknown exception encountered, "
1301 1299 b"please report by visiting\n** "
1302 1300 )
1303 1301 + bugtracker
1304 1302 + b'\n'
1305 1303 )
1306 1304 sysversion = pycompat.sysbytes(sys.version).replace(b'\n', b'')
1307 1305 warning += (
1308 1306 (_(b"** Python %s\n") % sysversion)
1309 1307 + (_(b"** Mercurial Distributed SCM (version %s)\n") % util.version())
1310 1308 + (
1311 1309 _(b"** Extensions loaded: %s\n")
1312 1310 % b", ".join([x[0] for x in extensions.extensions()])
1313 1311 )
1314 1312 )
1315 1313 return warning
1316 1314
1317 1315
1318 1316 def handlecommandexception(ui):
1319 1317 """Produce a warning message for broken commands
1320 1318
1321 1319 Called when handling an exception; the exception is reraised if
1322 1320 this function returns False, ignored otherwise.
1323 1321 """
1324 1322 warning = _exceptionwarning(ui)
1325 1323 ui.log(
1326 1324 b"commandexception",
1327 1325 b"%s\n%s\n",
1328 1326 warning,
1329 1327 pycompat.sysbytes(traceback.format_exc()),
1330 1328 )
1331 1329 ui.warn(warning)
1332 1330 return False # re-raise the exception
@@ -1,557 +1,565 b''
1 1 # error.py - Mercurial exceptions
2 2 #
3 3 # Copyright 2005-2008 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """Mercurial exceptions.
9 9
10 10 This allows us to catch exceptions at higher levels without forcing
11 11 imports.
12 12 """
13 13
14 14 from __future__ import absolute_import
15 15
16 16 import difflib
17 17
18 18 # Do not import anything but pycompat here, please
19 19 from . import pycompat
20 20
21 21
22 22 def _tobytes(exc):
23 23 """Byte-stringify exception in the same way as BaseException_str()"""
24 24 if not exc.args:
25 25 return b''
26 26 if len(exc.args) == 1:
27 27 return pycompat.bytestr(exc.args[0])
28 28 return b'(%s)' % b', '.join(b"'%s'" % pycompat.bytestr(a) for a in exc.args)
29 29
30 30
31 31 class Hint(object):
32 32 """Mix-in to provide a hint of an error
33 33
34 34 This should come first in the inheritance list to consume a hint and
35 35 pass remaining arguments to the exception class.
36 36 """
37 37
38 38 def __init__(self, *args, **kw):
39 39 self.hint = kw.pop('hint', None)
40 40 super(Hint, self).__init__(*args, **kw)
41 41
42 42
43 43 class StorageError(Hint, Exception):
44 44 """Raised when an error occurs in a storage layer.
45 45
46 46 Usually subclassed by a storage-specific exception.
47 47 """
48 48
49 49 __bytes__ = _tobytes
50 50
51 51
52 52 class RevlogError(StorageError):
53 53 pass
54 54
55 55
56 56 class SidedataHashError(RevlogError):
57 57 def __init__(self, key, expected, got):
58 58 self.sidedatakey = key
59 59 self.expecteddigest = expected
60 60 self.actualdigest = got
61 61
62 62
63 63 class FilteredIndexError(IndexError):
64 64 __bytes__ = _tobytes
65 65
66 66
67 67 class LookupError(RevlogError, KeyError):
68 68 def __init__(self, name, index, message):
69 69 self.name = name
70 70 self.index = index
71 71 # this can't be called 'message' because at least some installs of
72 72 # Python 2.6+ complain about the 'message' property being deprecated
73 73 self.lookupmessage = message
74 74 if isinstance(name, bytes) and len(name) == 20:
75 75 from .node import short
76 76
77 77 name = short(name)
78 78 # if name is a binary node, it can be None
79 79 RevlogError.__init__(
80 80 self, b'%s@%s: %s' % (index, pycompat.bytestr(name), message)
81 81 )
82 82
83 83 def __bytes__(self):
84 84 return RevlogError.__bytes__(self)
85 85
86 86 def __str__(self):
87 87 return RevlogError.__str__(self)
88 88
89 89
90 90 class AmbiguousPrefixLookupError(LookupError):
91 91 pass
92 92
93 93
94 94 class FilteredLookupError(LookupError):
95 95 pass
96 96
97 97
98 98 class ManifestLookupError(LookupError):
99 99 pass
100 100
101 101
102 102 class CommandError(Exception):
103 103 """Exception raised on errors in parsing the command line."""
104 104
105 105 def __init__(self, command, message):
106 106 self.command = command
107 107 self.message = message
108 108 super(CommandError, self).__init__()
109 109
110 110 __bytes__ = _tobytes
111 111
112 112
113 113 class UnknownCommand(Exception):
114 114 """Exception raised if command is not in the command table."""
115 115
116 116 def __init__(self, command, all_commands=None):
117 117 self.command = command
118 118 self.all_commands = all_commands
119 119 super(UnknownCommand, self).__init__()
120 120
121 121 __bytes__ = _tobytes
122 122
123 123
124 124 class AmbiguousCommand(Exception):
125 125 """Exception raised if command shortcut matches more than one command."""
126 126
127 127 def __init__(self, prefix, matches):
128 128 self.prefix = prefix
129 129 self.matches = matches
130 130 super(AmbiguousCommand, self).__init__()
131 131
132 132 __bytes__ = _tobytes
133 133
134 134
135 135 class WorkerError(Exception):
136 136 """Exception raised when a worker process dies."""
137 137
138 138 def __init__(self, status_code):
139 139 self.status_code = status_code
140 140
141 141
142 142 class InterventionRequired(Hint, Exception):
143 143 """Exception raised when a command requires human intervention."""
144 144
145 145 __bytes__ = _tobytes
146 146
147 147
148 148 class ConflictResolutionRequired(InterventionRequired):
149 149 """Exception raised when a continuable command required merge conflict resolution."""
150 150
151 151 def __init__(self, opname):
152 152 from .i18n import _
153 153
154 154 self.opname = opname
155 155 InterventionRequired.__init__(
156 156 self,
157 157 _(
158 158 b"unresolved conflicts (see 'hg resolve', then 'hg %s --continue')"
159 159 )
160 160 % opname,
161 161 )
162 162
163 163
164 164 class Abort(Hint, Exception):
165 165 """Raised if a command needs to print an error and exit."""
166 166
167 167 def __init__(self, message, hint=None):
168 168 self.message = message
169 169 self.hint = hint
170 170 # Pass the message into the Exception constructor to help extensions
171 171 # that look for exc.args[0].
172 172 Exception.__init__(self, message)
173 173
174 174 def __bytes__(self):
175 175 return self.message
176 176
177 177 if pycompat.ispy3:
178 178
179 179 def __str__(self):
180 180 # the output would be unreadable if the message was translated,
181 181 # but do not replace it with encoding.strfromlocal(), which
182 182 # may raise another exception.
183 183 return pycompat.sysstr(self.__bytes__())
184 184
185 def format(self):
186 from .i18n import _
187
188 message = _(b"abort: %s\n") % self.message
189 if self.hint:
190 message += _(b"(%s)\n") % self.hint
191 return message
192
185 193
186 194 class InputError(Abort):
187 195 """Indicates that the user made an error in their input.
188 196
189 197 Examples: Invalid command, invalid flags, invalid revision.
190 198 """
191 199
192 200
193 201 class StateError(Abort):
194 202 """Indicates that the operation might work if retried in a different state.
195 203
196 204 Examples: Unresolved merge conflicts, unfinished operations.
197 205 """
198 206
199 207
200 208 class CanceledError(Abort):
201 209 """Indicates that the user canceled the operation.
202 210
203 211 Examples: Close commit editor with error status, quit chistedit.
204 212 """
205 213
206 214
207 215 class HookLoadError(Abort):
208 216 """raised when loading a hook fails, aborting an operation
209 217
210 218 Exists to allow more specialized catching."""
211 219
212 220
213 221 class HookAbort(Abort):
214 222 """raised when a validation hook fails, aborting an operation
215 223
216 224 Exists to allow more specialized catching."""
217 225
218 226
219 227 class ConfigError(Abort):
220 228 """Exception raised when parsing config files"""
221 229
222 230
223 231 class UpdateAbort(Abort):
224 232 """Raised when an update is aborted for destination issue"""
225 233
226 234
227 235 class MergeDestAbort(Abort):
228 236 """Raised when an update is aborted for destination issues"""
229 237
230 238
231 239 class NoMergeDestAbort(MergeDestAbort):
232 240 """Raised when an update is aborted because there is nothing to merge"""
233 241
234 242
235 243 class ManyMergeDestAbort(MergeDestAbort):
236 244 """Raised when an update is aborted because destination is ambiguous"""
237 245
238 246
239 247 class ResponseExpected(Abort):
240 248 """Raised when an EOF is received for a prompt"""
241 249
242 250 def __init__(self):
243 251 from .i18n import _
244 252
245 253 Abort.__init__(self, _(b'response expected'))
246 254
247 255
248 256 class OutOfBandError(Hint, Exception):
249 257 """Exception raised when a remote repo reports failure"""
250 258
251 259 __bytes__ = _tobytes
252 260
253 261
254 262 class ParseError(Hint, Exception):
255 263 """Raised when parsing config files and {rev,file}sets (msg[, pos])"""
256 264
257 265 def __init__(self, message, location=None, hint=None):
258 266 self.message = message
259 267 self.location = location
260 268 self.hint = hint
261 269 # Pass the message and possibly location into the Exception constructor
262 270 # to help code that looks for exc.args.
263 271 if location is not None:
264 272 Exception.__init__(self, message, location)
265 273 else:
266 274 Exception.__init__(self, message)
267 275
268 276 __bytes__ = _tobytes
269 277
270 278 def format(self):
271 279 from .i18n import _
272 280
273 281 if self.location is not None:
274 282 message = _(b"hg: parse error at %s: %s\n") % (
275 283 pycompat.bytestr(self.location),
276 284 self.message,
277 285 )
278 286 else:
279 287 message = _(b"hg: parse error: %s\n") % self.message
280 288 if self.hint:
281 289 message += _(b"(%s)\n") % self.hint
282 290 return message
283 291
284 292
285 293 class PatchError(Exception):
286 294 __bytes__ = _tobytes
287 295
288 296
289 297 def getsimilar(symbols, value):
290 298 sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
291 299 # The cutoff for similarity here is pretty arbitrary. It should
292 300 # probably be investigated and tweaked.
293 301 return [s for s in symbols if sim(s) > 0.6]
294 302
295 303
296 304 def similarity_hint(similar):
297 305 from .i18n import _
298 306
299 307 if len(similar) == 1:
300 308 return _(b"did you mean %s?") % similar[0]
301 309 elif similar:
302 310 ss = b", ".join(sorted(similar))
303 311 return _(b"did you mean one of %s?") % ss
304 312 else:
305 313 return None
306 314
307 315
308 316 class UnknownIdentifier(ParseError):
309 317 """Exception raised when a {rev,file}set references an unknown identifier"""
310 318
311 319 def __init__(self, function, symbols):
312 320 from .i18n import _
313 321
314 322 similar = getsimilar(symbols, function)
315 323 hint = similarity_hint(similar)
316 324
317 325 ParseError.__init__(
318 326 self, _(b"unknown identifier: %s") % function, hint=hint
319 327 )
320 328
321 329
322 330 class RepoError(Hint, Exception):
323 331 __bytes__ = _tobytes
324 332
325 333
326 334 class RepoLookupError(RepoError):
327 335 pass
328 336
329 337
330 338 class FilteredRepoLookupError(RepoLookupError):
331 339 pass
332 340
333 341
334 342 class CapabilityError(RepoError):
335 343 pass
336 344
337 345
338 346 class RequirementError(RepoError):
339 347 """Exception raised if .hg/requires has an unknown entry."""
340 348
341 349
342 350 class StdioError(IOError):
343 351 """Raised if I/O to stdout or stderr fails"""
344 352
345 353 def __init__(self, err):
346 354 IOError.__init__(self, err.errno, err.strerror)
347 355
348 356 # no __bytes__() because error message is derived from the standard IOError
349 357
350 358
351 359 class UnsupportedMergeRecords(Abort):
352 360 def __init__(self, recordtypes):
353 361 from .i18n import _
354 362
355 363 self.recordtypes = sorted(recordtypes)
356 364 s = b' '.join(self.recordtypes)
357 365 Abort.__init__(
358 366 self,
359 367 _(b'unsupported merge state records: %s') % s,
360 368 hint=_(
361 369 b'see https://mercurial-scm.org/wiki/MergeStateRecords for '
362 370 b'more information'
363 371 ),
364 372 )
365 373
366 374
367 375 class UnknownVersion(Abort):
368 376 """generic exception for aborting from an encounter with an unknown version
369 377 """
370 378
371 379 def __init__(self, msg, hint=None, version=None):
372 380 self.version = version
373 381 super(UnknownVersion, self).__init__(msg, hint=hint)
374 382
375 383
376 384 class LockError(IOError):
377 385 def __init__(self, errno, strerror, filename, desc):
378 386 IOError.__init__(self, errno, strerror, filename)
379 387 self.desc = desc
380 388
381 389 # no __bytes__() because error message is derived from the standard IOError
382 390
383 391
384 392 class LockHeld(LockError):
385 393 def __init__(self, errno, filename, desc, locker):
386 394 LockError.__init__(self, errno, b'Lock held', filename, desc)
387 395 self.locker = locker
388 396
389 397
390 398 class LockUnavailable(LockError):
391 399 pass
392 400
393 401
394 402 # LockError is for errors while acquiring the lock -- this is unrelated
395 403 class LockInheritanceContractViolation(RuntimeError):
396 404 __bytes__ = _tobytes
397 405
398 406
399 407 class ResponseError(Exception):
400 408 """Raised to print an error with part of output and exit."""
401 409
402 410 __bytes__ = _tobytes
403 411
404 412
405 413 # derived from KeyboardInterrupt to simplify some breakout code
406 414 class SignalInterrupt(KeyboardInterrupt):
407 415 """Exception raised on SIGTERM and SIGHUP."""
408 416
409 417
410 418 class SignatureError(Exception):
411 419 __bytes__ = _tobytes
412 420
413 421
414 422 class PushRaced(RuntimeError):
415 423 """An exception raised during unbundling that indicate a push race"""
416 424
417 425 __bytes__ = _tobytes
418 426
419 427
420 428 class ProgrammingError(Hint, RuntimeError):
421 429 """Raised if a mercurial (core or extension) developer made a mistake"""
422 430
423 431 def __init__(self, msg, *args, **kwargs):
424 432 # On Python 3, turn the message back into a string since this is
425 433 # an internal-only error that won't be printed except in a
426 434 # stack traces.
427 435 msg = pycompat.sysstr(msg)
428 436 super(ProgrammingError, self).__init__(msg, *args, **kwargs)
429 437
430 438 __bytes__ = _tobytes
431 439
432 440
433 441 class WdirUnsupported(Exception):
434 442 """An exception which is raised when 'wdir()' is not supported"""
435 443
436 444 __bytes__ = _tobytes
437 445
438 446
439 447 # bundle2 related errors
440 448 class BundleValueError(ValueError):
441 449 """error raised when bundle2 cannot be processed"""
442 450
443 451 __bytes__ = _tobytes
444 452
445 453
446 454 class BundleUnknownFeatureError(BundleValueError):
447 455 def __init__(self, parttype=None, params=(), values=()):
448 456 self.parttype = parttype
449 457 self.params = params
450 458 self.values = values
451 459 if self.parttype is None:
452 460 msg = b'Stream Parameter'
453 461 else:
454 462 msg = parttype
455 463 entries = self.params
456 464 if self.params and self.values:
457 465 assert len(self.params) == len(self.values)
458 466 entries = []
459 467 for idx, par in enumerate(self.params):
460 468 val = self.values[idx]
461 469 if val is None:
462 470 entries.append(val)
463 471 else:
464 472 entries.append(b"%s=%r" % (par, pycompat.maybebytestr(val)))
465 473 if entries:
466 474 msg = b'%s - %s' % (msg, b', '.join(entries))
467 475 ValueError.__init__(self, msg)
468 476
469 477
470 478 class ReadOnlyPartError(RuntimeError):
471 479 """error raised when code tries to alter a part being generated"""
472 480
473 481 __bytes__ = _tobytes
474 482
475 483
476 484 class PushkeyFailed(Abort):
477 485 """error raised when a pushkey part failed to update a value"""
478 486
479 487 def __init__(
480 488 self, partid, namespace=None, key=None, new=None, old=None, ret=None
481 489 ):
482 490 self.partid = partid
483 491 self.namespace = namespace
484 492 self.key = key
485 493 self.new = new
486 494 self.old = old
487 495 self.ret = ret
488 496 # no i18n expected to be processed into a better message
489 497 Abort.__init__(
490 498 self, b'failed to update value for "%s/%s"' % (namespace, key)
491 499 )
492 500
493 501
494 502 class CensoredNodeError(StorageError):
495 503 """error raised when content verification fails on a censored node
496 504
497 505 Also contains the tombstone data substituted for the uncensored data.
498 506 """
499 507
500 508 def __init__(self, filename, node, tombstone):
501 509 from .node import short
502 510
503 511 StorageError.__init__(self, b'%s:%s' % (filename, short(node)))
504 512 self.tombstone = tombstone
505 513
506 514
507 515 class CensoredBaseError(StorageError):
508 516 """error raised when a delta is rejected because its base is censored
509 517
510 518 A delta based on a censored revision must be formed as single patch
511 519 operation which replaces the entire base with new content. This ensures
512 520 the delta may be applied by clones which have not censored the base.
513 521 """
514 522
515 523
516 524 class InvalidBundleSpecification(Exception):
517 525 """error raised when a bundle specification is invalid.
518 526
519 527 This is used for syntax errors as opposed to support errors.
520 528 """
521 529
522 530 __bytes__ = _tobytes
523 531
524 532
525 533 class UnsupportedBundleSpecification(Exception):
526 534 """error raised when a bundle specification is not supported."""
527 535
528 536 __bytes__ = _tobytes
529 537
530 538
531 539 class CorruptedState(Exception):
532 540 """error raised when a command is not able to read its state from file"""
533 541
534 542 __bytes__ = _tobytes
535 543
536 544
537 545 class PeerTransportError(Abort):
538 546 """Transport-level I/O error when communicating with a peer repo."""
539 547
540 548
541 549 class InMemoryMergeConflictsError(Exception):
542 550 """Exception raised when merge conflicts arose during an in-memory merge."""
543 551
544 552 __bytes__ = _tobytes
545 553
546 554
547 555 class WireprotoCommandError(Exception):
548 556 """Represents an error during execution of a wire protocol command.
549 557
550 558 Should only be thrown by wire protocol version 2 commands.
551 559
552 560 The error is a formatter string and an optional iterable of arguments.
553 561 """
554 562
555 563 def __init__(self, message, args=None):
556 564 self.message = message
557 565 self.messageargs = args
@@ -1,2312 +1,2310 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import os
13 13 import posixpath
14 14 import re
15 15 import subprocess
16 16 import weakref
17 17
18 18 from .i18n import _
19 19 from .node import (
20 20 bin,
21 21 hex,
22 22 nullid,
23 23 nullrev,
24 24 short,
25 25 wdirid,
26 26 wdirrev,
27 27 )
28 28 from .pycompat import getattr
29 29 from .thirdparty import attr
30 30 from . import (
31 31 copies as copiesmod,
32 32 encoding,
33 33 error,
34 34 match as matchmod,
35 35 obsolete,
36 36 obsutil,
37 37 pathutil,
38 38 phases,
39 39 policy,
40 40 pycompat,
41 41 requirements as requirementsmod,
42 42 revsetlang,
43 43 similar,
44 44 smartset,
45 45 url,
46 46 util,
47 47 vfs,
48 48 )
49 49
50 50 from .utils import (
51 51 hashutil,
52 52 procutil,
53 53 stringutil,
54 54 )
55 55
56 56 if pycompat.iswindows:
57 57 from . import scmwindows as scmplatform
58 58 else:
59 59 from . import scmposix as scmplatform
60 60
61 61 parsers = policy.importmod('parsers')
62 62 rustrevlog = policy.importrust('revlog')
63 63
64 64 termsize = scmplatform.termsize
65 65
66 66
67 67 @attr.s(slots=True, repr=False)
68 68 class status(object):
69 69 '''Struct with a list of files per status.
70 70
71 71 The 'deleted', 'unknown' and 'ignored' properties are only
72 72 relevant to the working copy.
73 73 '''
74 74
75 75 modified = attr.ib(default=attr.Factory(list))
76 76 added = attr.ib(default=attr.Factory(list))
77 77 removed = attr.ib(default=attr.Factory(list))
78 78 deleted = attr.ib(default=attr.Factory(list))
79 79 unknown = attr.ib(default=attr.Factory(list))
80 80 ignored = attr.ib(default=attr.Factory(list))
81 81 clean = attr.ib(default=attr.Factory(list))
82 82
83 83 def __iter__(self):
84 84 yield self.modified
85 85 yield self.added
86 86 yield self.removed
87 87 yield self.deleted
88 88 yield self.unknown
89 89 yield self.ignored
90 90 yield self.clean
91 91
92 92 def __repr__(self):
93 93 return (
94 94 r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
95 95 r'unknown=%s, ignored=%s, clean=%s>'
96 96 ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
97 97
98 98
99 99 def itersubrepos(ctx1, ctx2):
100 100 """find subrepos in ctx1 or ctx2"""
101 101 # Create a (subpath, ctx) mapping where we prefer subpaths from
102 102 # ctx1. The subpaths from ctx2 are important when the .hgsub file
103 103 # has been modified (in ctx2) but not yet committed (in ctx1).
104 104 subpaths = dict.fromkeys(ctx2.substate, ctx2)
105 105 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
106 106
107 107 missing = set()
108 108
109 109 for subpath in ctx2.substate:
110 110 if subpath not in ctx1.substate:
111 111 del subpaths[subpath]
112 112 missing.add(subpath)
113 113
114 114 for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
115 115 yield subpath, ctx.sub(subpath)
116 116
117 117 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
118 118 # status and diff will have an accurate result when it does
119 119 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
120 120 # against itself.
121 121 for subpath in missing:
122 122 yield subpath, ctx2.nullsub(subpath, ctx1)
123 123
124 124
125 125 def nochangesfound(ui, repo, excluded=None):
126 126 '''Report no changes for push/pull, excluded is None or a list of
127 127 nodes excluded from the push/pull.
128 128 '''
129 129 secretlist = []
130 130 if excluded:
131 131 for n in excluded:
132 132 ctx = repo[n]
133 133 if ctx.phase() >= phases.secret and not ctx.extinct():
134 134 secretlist.append(n)
135 135
136 136 if secretlist:
137 137 ui.status(
138 138 _(b"no changes found (ignored %d secret changesets)\n")
139 139 % len(secretlist)
140 140 )
141 141 else:
142 142 ui.status(_(b"no changes found\n"))
143 143
144 144
145 145 def callcatch(ui, func):
146 146 """call func() with global exception handling
147 147
148 148 return func() if no exception happens. otherwise do some error handling
149 149 and return an exit code accordingly. does not handle all exceptions.
150 150 """
151 151 coarse_exit_code = -1
152 152 detailed_exit_code = -1
153 153 try:
154 154 try:
155 155 return func()
156 156 except: # re-raises
157 157 ui.traceback()
158 158 raise
159 159 # Global exception handling, alphabetically
160 160 # Mercurial-specific first, followed by built-in and library exceptions
161 161 except error.LockHeld as inst:
162 162 detailed_exit_code = 20
163 163 if inst.errno == errno.ETIMEDOUT:
164 164 reason = _(b'timed out waiting for lock held by %r') % (
165 165 pycompat.bytestr(inst.locker)
166 166 )
167 167 else:
168 168 reason = _(b'lock held by %r') % inst.locker
169 169 ui.error(
170 170 _(b"abort: %s: %s\n")
171 171 % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
172 172 )
173 173 if not inst.locker:
174 174 ui.error(_(b"(lock might be very busy)\n"))
175 175 except error.LockUnavailable as inst:
176 176 detailed_exit_code = 20
177 177 ui.error(
178 178 _(b"abort: could not lock %s: %s\n")
179 179 % (
180 180 inst.desc or stringutil.forcebytestr(inst.filename),
181 181 encoding.strtolocal(inst.strerror),
182 182 )
183 183 )
184 184 except error.OutOfBandError as inst:
185 185 detailed_exit_code = 100
186 186 if inst.args:
187 187 msg = _(b"abort: remote error:\n")
188 188 else:
189 189 msg = _(b"abort: remote error\n")
190 190 ui.error(msg)
191 191 if inst.args:
192 192 ui.error(b''.join(inst.args))
193 193 if inst.hint:
194 194 ui.error(b'(%s)\n' % inst.hint)
195 195 except error.RepoError as inst:
196 196 ui.error(_(b"abort: %s!\n") % inst)
197 197 if inst.hint:
198 198 ui.error(_(b"(%s)\n") % inst.hint)
199 199 except error.ResponseError as inst:
200 200 ui.error(_(b"abort: %s") % inst.args[0])
201 201 msg = inst.args[1]
202 202 if isinstance(msg, type(u'')):
203 203 msg = pycompat.sysbytes(msg)
204 204 if not isinstance(msg, bytes):
205 205 ui.error(b" %r\n" % (msg,))
206 206 elif not msg:
207 207 ui.error(_(b" empty string\n"))
208 208 else:
209 209 ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
210 210 except error.CensoredNodeError as inst:
211 211 ui.error(_(b"abort: file censored %s!\n") % inst)
212 212 except error.StorageError as inst:
213 213 ui.error(_(b"abort: %s!\n") % inst)
214 214 if inst.hint:
215 215 ui.error(_(b"(%s)\n") % inst.hint)
216 216 except error.InterventionRequired as inst:
217 217 ui.error(b"%s\n" % inst)
218 218 if inst.hint:
219 219 ui.error(_(b"(%s)\n") % inst.hint)
220 220 detailed_exit_code = 240
221 221 coarse_exit_code = 1
222 222 except error.WdirUnsupported:
223 223 ui.error(_(b"abort: working directory revision cannot be specified\n"))
224 224 except error.Abort as inst:
225 225 if isinstance(inst, error.InputError):
226 226 detailed_exit_code = 10
227 227 elif isinstance(inst, error.StateError):
228 228 detailed_exit_code = 20
229 229 elif isinstance(inst, error.ConfigError):
230 230 detailed_exit_code = 30
231 231 elif isinstance(inst, error.CanceledError):
232 232 detailed_exit_code = 250
233 ui.error(_(b"abort: %s\n") % inst.message)
234 if inst.hint:
235 ui.error(_(b"(%s)\n") % inst.hint)
233 ui.error(inst.format())
236 234 except error.WorkerError as inst:
237 235 # Don't print a message -- the worker already should have
238 236 return inst.status_code
239 237 except ImportError as inst:
240 238 ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
241 239 m = stringutil.forcebytestr(inst).split()[-1]
242 240 if m in b"mpatch bdiff".split():
243 241 ui.error(_(b"(did you forget to compile extensions?)\n"))
244 242 elif m in b"zlib".split():
245 243 ui.error(_(b"(is your Python install correct?)\n"))
246 244 except util.urlerr.httperror as inst:
247 245 detailed_exit_code = 100
248 246 ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
249 247 except util.urlerr.urlerror as inst:
250 248 detailed_exit_code = 100
251 249 try: # usually it is in the form (errno, strerror)
252 250 reason = inst.reason.args[1]
253 251 except (AttributeError, IndexError):
254 252 # it might be anything, for example a string
255 253 reason = inst.reason
256 254 if isinstance(reason, pycompat.unicode):
257 255 # SSLError of Python 2.7.9 contains a unicode
258 256 reason = encoding.unitolocal(reason)
259 257 ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
260 258 except (IOError, OSError) as inst:
261 259 if (
262 260 util.safehasattr(inst, b"args")
263 261 and inst.args
264 262 and inst.args[0] == errno.EPIPE
265 263 ):
266 264 pass
267 265 elif getattr(inst, "strerror", None): # common IOError or OSError
268 266 if getattr(inst, "filename", None) is not None:
269 267 ui.error(
270 268 _(b"abort: %s: '%s'\n")
271 269 % (
272 270 encoding.strtolocal(inst.strerror),
273 271 stringutil.forcebytestr(inst.filename),
274 272 )
275 273 )
276 274 else:
277 275 ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
278 276 else: # suspicious IOError
279 277 raise
280 278 except MemoryError:
281 279 ui.error(_(b"abort: out of memory\n"))
282 280 except SystemExit as inst:
283 281 # Commands shouldn't sys.exit directly, but give a return code.
284 282 # Just in case catch this and and pass exit code to caller.
285 283 detailed_exit_code = 254
286 284 coarse_exit_code = inst.code
287 285
288 286 if ui.configbool(b'ui', b'detailed-exit-code'):
289 287 return detailed_exit_code
290 288 else:
291 289 return coarse_exit_code
292 290
293 291
294 292 def checknewlabel(repo, lbl, kind):
295 293 # Do not use the "kind" parameter in ui output.
296 294 # It makes strings difficult to translate.
297 295 if lbl in [b'tip', b'.', b'null']:
298 296 raise error.InputError(_(b"the name '%s' is reserved") % lbl)
299 297 for c in (b':', b'\0', b'\n', b'\r'):
300 298 if c in lbl:
301 299 raise error.InputError(
302 300 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
303 301 )
304 302 try:
305 303 int(lbl)
306 304 raise error.InputError(_(b"cannot use an integer as a name"))
307 305 except ValueError:
308 306 pass
309 307 if lbl.strip() != lbl:
310 308 raise error.InputError(
311 309 _(b"leading or trailing whitespace in name %r") % lbl
312 310 )
313 311
314 312
315 313 def checkfilename(f):
316 314 '''Check that the filename f is an acceptable filename for a tracked file'''
317 315 if b'\r' in f or b'\n' in f:
318 316 raise error.InputError(
319 317 _(b"'\\n' and '\\r' disallowed in filenames: %r")
320 318 % pycompat.bytestr(f)
321 319 )
322 320
323 321
324 322 def checkportable(ui, f):
325 323 '''Check if filename f is portable and warn or abort depending on config'''
326 324 checkfilename(f)
327 325 abort, warn = checkportabilityalert(ui)
328 326 if abort or warn:
329 327 msg = util.checkwinfilename(f)
330 328 if msg:
331 329 msg = b"%s: %s" % (msg, procutil.shellquote(f))
332 330 if abort:
333 331 raise error.InputError(msg)
334 332 ui.warn(_(b"warning: %s\n") % msg)
335 333
336 334
337 335 def checkportabilityalert(ui):
338 336 '''check if the user's config requests nothing, a warning, or abort for
339 337 non-portable filenames'''
340 338 val = ui.config(b'ui', b'portablefilenames')
341 339 lval = val.lower()
342 340 bval = stringutil.parsebool(val)
343 341 abort = pycompat.iswindows or lval == b'abort'
344 342 warn = bval or lval == b'warn'
345 343 if bval is None and not (warn or abort or lval == b'ignore'):
346 344 raise error.ConfigError(
347 345 _(b"ui.portablefilenames value is invalid ('%s')") % val
348 346 )
349 347 return abort, warn
350 348
351 349
352 350 class casecollisionauditor(object):
353 351 def __init__(self, ui, abort, dirstate):
354 352 self._ui = ui
355 353 self._abort = abort
356 354 allfiles = b'\0'.join(dirstate)
357 355 self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
358 356 self._dirstate = dirstate
359 357 # The purpose of _newfiles is so that we don't complain about
360 358 # case collisions if someone were to call this object with the
361 359 # same filename twice.
362 360 self._newfiles = set()
363 361
364 362 def __call__(self, f):
365 363 if f in self._newfiles:
366 364 return
367 365 fl = encoding.lower(f)
368 366 if fl in self._loweredfiles and f not in self._dirstate:
369 367 msg = _(b'possible case-folding collision for %s') % f
370 368 if self._abort:
371 369 raise error.Abort(msg)
372 370 self._ui.warn(_(b"warning: %s\n") % msg)
373 371 self._loweredfiles.add(fl)
374 372 self._newfiles.add(f)
375 373
376 374
377 375 def filteredhash(repo, maxrev):
378 376 """build hash of filtered revisions in the current repoview.
379 377
380 378 Multiple caches perform up-to-date validation by checking that the
381 379 tiprev and tipnode stored in the cache file match the current repository.
382 380 However, this is not sufficient for validating repoviews because the set
383 381 of revisions in the view may change without the repository tiprev and
384 382 tipnode changing.
385 383
386 384 This function hashes all the revs filtered from the view and returns
387 385 that SHA-1 digest.
388 386 """
389 387 cl = repo.changelog
390 388 if not cl.filteredrevs:
391 389 return None
392 390 key = cl._filteredrevs_hashcache.get(maxrev)
393 391 if not key:
394 392 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
395 393 if revs:
396 394 s = hashutil.sha1()
397 395 for rev in revs:
398 396 s.update(b'%d;' % rev)
399 397 key = s.digest()
400 398 cl._filteredrevs_hashcache[maxrev] = key
401 399 return key
402 400
403 401
404 402 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
405 403 '''yield every hg repository under path, always recursively.
406 404 The recurse flag will only control recursion into repo working dirs'''
407 405
408 406 def errhandler(err):
409 407 if err.filename == path:
410 408 raise err
411 409
412 410 samestat = getattr(os.path, 'samestat', None)
413 411 if followsym and samestat is not None:
414 412
415 413 def adddir(dirlst, dirname):
416 414 dirstat = os.stat(dirname)
417 415 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
418 416 if not match:
419 417 dirlst.append(dirstat)
420 418 return not match
421 419
422 420 else:
423 421 followsym = False
424 422
425 423 if (seen_dirs is None) and followsym:
426 424 seen_dirs = []
427 425 adddir(seen_dirs, path)
428 426 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
429 427 dirs.sort()
430 428 if b'.hg' in dirs:
431 429 yield root # found a repository
432 430 qroot = os.path.join(root, b'.hg', b'patches')
433 431 if os.path.isdir(os.path.join(qroot, b'.hg')):
434 432 yield qroot # we have a patch queue repo here
435 433 if recurse:
436 434 # avoid recursing inside the .hg directory
437 435 dirs.remove(b'.hg')
438 436 else:
439 437 dirs[:] = [] # don't descend further
440 438 elif followsym:
441 439 newdirs = []
442 440 for d in dirs:
443 441 fname = os.path.join(root, d)
444 442 if adddir(seen_dirs, fname):
445 443 if os.path.islink(fname):
446 444 for hgname in walkrepos(fname, True, seen_dirs):
447 445 yield hgname
448 446 else:
449 447 newdirs.append(d)
450 448 dirs[:] = newdirs
451 449
452 450
453 451 def binnode(ctx):
454 452 """Return binary node id for a given basectx"""
455 453 node = ctx.node()
456 454 if node is None:
457 455 return wdirid
458 456 return node
459 457
460 458
461 459 def intrev(ctx):
462 460 """Return integer for a given basectx that can be used in comparison or
463 461 arithmetic operation"""
464 462 rev = ctx.rev()
465 463 if rev is None:
466 464 return wdirrev
467 465 return rev
468 466
469 467
470 468 def formatchangeid(ctx):
471 469 """Format changectx as '{rev}:{node|formatnode}', which is the default
472 470 template provided by logcmdutil.changesettemplater"""
473 471 repo = ctx.repo()
474 472 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
475 473
476 474
477 475 def formatrevnode(ui, rev, node):
478 476 """Format given revision and node depending on the current verbosity"""
479 477 if ui.debugflag:
480 478 hexfunc = hex
481 479 else:
482 480 hexfunc = short
483 481 return b'%d:%s' % (rev, hexfunc(node))
484 482
485 483
486 484 def resolvehexnodeidprefix(repo, prefix):
487 485 if prefix.startswith(b'x'):
488 486 prefix = prefix[1:]
489 487 try:
490 488 # Uses unfiltered repo because it's faster when prefix is ambiguous/
491 489 # This matches the shortesthexnodeidprefix() function below.
492 490 node = repo.unfiltered().changelog._partialmatch(prefix)
493 491 except error.AmbiguousPrefixLookupError:
494 492 revset = repo.ui.config(
495 493 b'experimental', b'revisions.disambiguatewithin'
496 494 )
497 495 if revset:
498 496 # Clear config to avoid infinite recursion
499 497 configoverrides = {
500 498 (b'experimental', b'revisions.disambiguatewithin'): None
501 499 }
502 500 with repo.ui.configoverride(configoverrides):
503 501 revs = repo.anyrevs([revset], user=True)
504 502 matches = []
505 503 for rev in revs:
506 504 node = repo.changelog.node(rev)
507 505 if hex(node).startswith(prefix):
508 506 matches.append(node)
509 507 if len(matches) == 1:
510 508 return matches[0]
511 509 raise
512 510 if node is None:
513 511 return
514 512 repo.changelog.rev(node) # make sure node isn't filtered
515 513 return node
516 514
517 515
518 516 def mayberevnum(repo, prefix):
519 517 """Checks if the given prefix may be mistaken for a revision number"""
520 518 try:
521 519 i = int(prefix)
522 520 # if we are a pure int, then starting with zero will not be
523 521 # confused as a rev; or, obviously, if the int is larger
524 522 # than the value of the tip rev. We still need to disambiguate if
525 523 # prefix == '0', since that *is* a valid revnum.
526 524 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
527 525 return False
528 526 return True
529 527 except ValueError:
530 528 return False
531 529
532 530
533 531 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
534 532 """Find the shortest unambiguous prefix that matches hexnode.
535 533
536 534 If "cache" is not None, it must be a dictionary that can be used for
537 535 caching between calls to this method.
538 536 """
539 537 # _partialmatch() of filtered changelog could take O(len(repo)) time,
540 538 # which would be unacceptably slow. so we look for hash collision in
541 539 # unfiltered space, which means some hashes may be slightly longer.
542 540
543 541 minlength = max(minlength, 1)
544 542
545 543 def disambiguate(prefix):
546 544 """Disambiguate against revnums."""
547 545 if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
548 546 if mayberevnum(repo, prefix):
549 547 return b'x' + prefix
550 548 else:
551 549 return prefix
552 550
553 551 hexnode = hex(node)
554 552 for length in range(len(prefix), len(hexnode) + 1):
555 553 prefix = hexnode[:length]
556 554 if not mayberevnum(repo, prefix):
557 555 return prefix
558 556
559 557 cl = repo.unfiltered().changelog
560 558 revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
561 559 if revset:
562 560 revs = None
563 561 if cache is not None:
564 562 revs = cache.get(b'disambiguationrevset')
565 563 if revs is None:
566 564 revs = repo.anyrevs([revset], user=True)
567 565 if cache is not None:
568 566 cache[b'disambiguationrevset'] = revs
569 567 if cl.rev(node) in revs:
570 568 hexnode = hex(node)
571 569 nodetree = None
572 570 if cache is not None:
573 571 nodetree = cache.get(b'disambiguationnodetree')
574 572 if not nodetree:
575 573 if util.safehasattr(parsers, 'nodetree'):
576 574 # The CExt is the only implementation to provide a nodetree
577 575 # class so far.
578 576 index = cl.index
579 577 if util.safehasattr(index, 'get_cindex'):
580 578 # the rust wrapped need to give access to its internal index
581 579 index = index.get_cindex()
582 580 nodetree = parsers.nodetree(index, len(revs))
583 581 for r in revs:
584 582 nodetree.insert(r)
585 583 if cache is not None:
586 584 cache[b'disambiguationnodetree'] = nodetree
587 585 if nodetree is not None:
588 586 length = max(nodetree.shortest(node), minlength)
589 587 prefix = hexnode[:length]
590 588 return disambiguate(prefix)
591 589 for length in range(minlength, len(hexnode) + 1):
592 590 matches = []
593 591 prefix = hexnode[:length]
594 592 for rev in revs:
595 593 otherhexnode = repo[rev].hex()
596 594 if prefix == otherhexnode[:length]:
597 595 matches.append(otherhexnode)
598 596 if len(matches) == 1:
599 597 return disambiguate(prefix)
600 598
601 599 try:
602 600 return disambiguate(cl.shortest(node, minlength))
603 601 except error.LookupError:
604 602 raise error.RepoLookupError()
605 603
606 604
607 605 def isrevsymbol(repo, symbol):
608 606 """Checks if a symbol exists in the repo.
609 607
610 608 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
611 609 symbol is an ambiguous nodeid prefix.
612 610 """
613 611 try:
614 612 revsymbol(repo, symbol)
615 613 return True
616 614 except error.RepoLookupError:
617 615 return False
618 616
619 617
620 618 def revsymbol(repo, symbol):
621 619 """Returns a context given a single revision symbol (as string).
622 620
623 621 This is similar to revsingle(), but accepts only a single revision symbol,
624 622 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
625 623 not "max(public())".
626 624 """
627 625 if not isinstance(symbol, bytes):
628 626 msg = (
629 627 b"symbol (%s of type %s) was not a string, did you mean "
630 628 b"repo[symbol]?" % (symbol, type(symbol))
631 629 )
632 630 raise error.ProgrammingError(msg)
633 631 try:
634 632 if symbol in (b'.', b'tip', b'null'):
635 633 return repo[symbol]
636 634
637 635 try:
638 636 r = int(symbol)
639 637 if b'%d' % r != symbol:
640 638 raise ValueError
641 639 l = len(repo.changelog)
642 640 if r < 0:
643 641 r += l
644 642 if r < 0 or r >= l and r != wdirrev:
645 643 raise ValueError
646 644 return repo[r]
647 645 except error.FilteredIndexError:
648 646 raise
649 647 except (ValueError, OverflowError, IndexError):
650 648 pass
651 649
652 650 if len(symbol) == 40:
653 651 try:
654 652 node = bin(symbol)
655 653 rev = repo.changelog.rev(node)
656 654 return repo[rev]
657 655 except error.FilteredLookupError:
658 656 raise
659 657 except (TypeError, LookupError):
660 658 pass
661 659
662 660 # look up bookmarks through the name interface
663 661 try:
664 662 node = repo.names.singlenode(repo, symbol)
665 663 rev = repo.changelog.rev(node)
666 664 return repo[rev]
667 665 except KeyError:
668 666 pass
669 667
670 668 node = resolvehexnodeidprefix(repo, symbol)
671 669 if node is not None:
672 670 rev = repo.changelog.rev(node)
673 671 return repo[rev]
674 672
675 673 raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
676 674
677 675 except error.WdirUnsupported:
678 676 return repo[None]
679 677 except (
680 678 error.FilteredIndexError,
681 679 error.FilteredLookupError,
682 680 error.FilteredRepoLookupError,
683 681 ):
684 682 raise _filterederror(repo, symbol)
685 683
686 684
687 685 def _filterederror(repo, changeid):
688 686 """build an exception to be raised about a filtered changeid
689 687
690 688 This is extracted in a function to help extensions (eg: evolve) to
691 689 experiment with various message variants."""
692 690 if repo.filtername.startswith(b'visible'):
693 691
694 692 # Check if the changeset is obsolete
695 693 unfilteredrepo = repo.unfiltered()
696 694 ctx = revsymbol(unfilteredrepo, changeid)
697 695
698 696 # If the changeset is obsolete, enrich the message with the reason
699 697 # that made this changeset not visible
700 698 if ctx.obsolete():
701 699 msg = obsutil._getfilteredreason(repo, changeid, ctx)
702 700 else:
703 701 msg = _(b"hidden revision '%s'") % changeid
704 702
705 703 hint = _(b'use --hidden to access hidden revisions')
706 704
707 705 return error.FilteredRepoLookupError(msg, hint=hint)
708 706 msg = _(b"filtered revision '%s' (not in '%s' subset)")
709 707 msg %= (changeid, repo.filtername)
710 708 return error.FilteredRepoLookupError(msg)
711 709
712 710
713 711 def revsingle(repo, revspec, default=b'.', localalias=None):
714 712 if not revspec and revspec != 0:
715 713 return repo[default]
716 714
717 715 l = revrange(repo, [revspec], localalias=localalias)
718 716 if not l:
719 717 raise error.Abort(_(b'empty revision set'))
720 718 return repo[l.last()]
721 719
722 720
723 721 def _pairspec(revspec):
724 722 tree = revsetlang.parse(revspec)
725 723 return tree and tree[0] in (
726 724 b'range',
727 725 b'rangepre',
728 726 b'rangepost',
729 727 b'rangeall',
730 728 )
731 729
732 730
733 731 def revpair(repo, revs):
734 732 if not revs:
735 733 return repo[b'.'], repo[None]
736 734
737 735 l = revrange(repo, revs)
738 736
739 737 if not l:
740 738 raise error.Abort(_(b'empty revision range'))
741 739
742 740 first = l.first()
743 741 second = l.last()
744 742
745 743 if (
746 744 first == second
747 745 and len(revs) >= 2
748 746 and not all(revrange(repo, [r]) for r in revs)
749 747 ):
750 748 raise error.Abort(_(b'empty revision on one side of range'))
751 749
752 750 # if top-level is range expression, the result must always be a pair
753 751 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
754 752 return repo[first], repo[None]
755 753
756 754 return repo[first], repo[second]
757 755
758 756
759 757 def revrange(repo, specs, localalias=None):
760 758 """Execute 1 to many revsets and return the union.
761 759
762 760 This is the preferred mechanism for executing revsets using user-specified
763 761 config options, such as revset aliases.
764 762
765 763 The revsets specified by ``specs`` will be executed via a chained ``OR``
766 764 expression. If ``specs`` is empty, an empty result is returned.
767 765
768 766 ``specs`` can contain integers, in which case they are assumed to be
769 767 revision numbers.
770 768
771 769 It is assumed the revsets are already formatted. If you have arguments
772 770 that need to be expanded in the revset, call ``revsetlang.formatspec()``
773 771 and pass the result as an element of ``specs``.
774 772
775 773 Specifying a single revset is allowed.
776 774
777 775 Returns a ``smartset.abstractsmartset`` which is a list-like interface over
778 776 integer revisions.
779 777 """
780 778 allspecs = []
781 779 for spec in specs:
782 780 if isinstance(spec, int):
783 781 spec = revsetlang.formatspec(b'%d', spec)
784 782 allspecs.append(spec)
785 783 return repo.anyrevs(allspecs, user=True, localalias=localalias)
786 784
787 785
788 786 def increasingwindows(windowsize=8, sizelimit=512):
789 787 while True:
790 788 yield windowsize
791 789 if windowsize < sizelimit:
792 790 windowsize *= 2
793 791
794 792
795 793 def walkchangerevs(repo, revs, makefilematcher, prepare):
796 794 '''Iterate over files and the revs in a "windowed" way.
797 795
798 796 Callers most commonly need to iterate backwards over the history
799 797 in which they are interested. Doing so has awful (quadratic-looking)
800 798 performance, so we use iterators in a "windowed" way.
801 799
802 800 We walk a window of revisions in the desired order. Within the
803 801 window, we first walk forwards to gather data, then in the desired
804 802 order (usually backwards) to display it.
805 803
806 804 This function returns an iterator yielding contexts. Before
807 805 yielding each context, the iterator will first call the prepare
808 806 function on each context in the window in forward order.'''
809 807
810 808 if not revs:
811 809 return []
812 810 change = repo.__getitem__
813 811
814 812 def iterate():
815 813 it = iter(revs)
816 814 stopiteration = False
817 815 for windowsize in increasingwindows():
818 816 nrevs = []
819 817 for i in pycompat.xrange(windowsize):
820 818 rev = next(it, None)
821 819 if rev is None:
822 820 stopiteration = True
823 821 break
824 822 nrevs.append(rev)
825 823 for rev in sorted(nrevs):
826 824 ctx = change(rev)
827 825 prepare(ctx, makefilematcher(ctx))
828 826 for rev in nrevs:
829 827 yield change(rev)
830 828
831 829 if stopiteration:
832 830 break
833 831
834 832 return iterate()
835 833
836 834
837 835 def meaningfulparents(repo, ctx):
838 836 """Return list of meaningful (or all if debug) parentrevs for rev.
839 837
840 838 For merges (two non-nullrev revisions) both parents are meaningful.
841 839 Otherwise the first parent revision is considered meaningful if it
842 840 is not the preceding revision.
843 841 """
844 842 parents = ctx.parents()
845 843 if len(parents) > 1:
846 844 return parents
847 845 if repo.ui.debugflag:
848 846 return [parents[0], repo[nullrev]]
849 847 if parents[0].rev() >= intrev(ctx) - 1:
850 848 return []
851 849 return parents
852 850
853 851
854 852 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
855 853 """Return a function that produced paths for presenting to the user.
856 854
857 855 The returned function takes a repo-relative path and produces a path
858 856 that can be presented in the UI.
859 857
860 858 Depending on the value of ui.relative-paths, either a repo-relative or
861 859 cwd-relative path will be produced.
862 860
863 861 legacyrelativevalue is the value to use if ui.relative-paths=legacy
864 862
865 863 If forcerelativevalue is not None, then that value will be used regardless
866 864 of what ui.relative-paths is set to.
867 865 """
868 866 if forcerelativevalue is not None:
869 867 relative = forcerelativevalue
870 868 else:
871 869 config = repo.ui.config(b'ui', b'relative-paths')
872 870 if config == b'legacy':
873 871 relative = legacyrelativevalue
874 872 else:
875 873 relative = stringutil.parsebool(config)
876 874 if relative is None:
877 875 raise error.ConfigError(
878 876 _(b"ui.relative-paths is not a boolean ('%s')") % config
879 877 )
880 878
881 879 if relative:
882 880 cwd = repo.getcwd()
883 881 if cwd != b'':
884 882 # this branch would work even if cwd == b'' (ie cwd = repo
885 883 # root), but its generality makes the returned function slower
886 884 pathto = repo.pathto
887 885 return lambda f: pathto(f, cwd)
888 886 if repo.ui.configbool(b'ui', b'slash'):
889 887 return lambda f: f
890 888 else:
891 889 return util.localpath
892 890
893 891
894 892 def subdiruipathfn(subpath, uipathfn):
895 893 '''Create a new uipathfn that treats the file as relative to subpath.'''
896 894 return lambda f: uipathfn(posixpath.join(subpath, f))
897 895
898 896
899 897 def anypats(pats, opts):
900 898 '''Checks if any patterns, including --include and --exclude were given.
901 899
902 900 Some commands (e.g. addremove) use this condition for deciding whether to
903 901 print absolute or relative paths.
904 902 '''
905 903 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
906 904
907 905
908 906 def expandpats(pats):
909 907 '''Expand bare globs when running on windows.
910 908 On posix we assume it already has already been done by sh.'''
911 909 if not util.expandglobs:
912 910 return list(pats)
913 911 ret = []
914 912 for kindpat in pats:
915 913 kind, pat = matchmod._patsplit(kindpat, None)
916 914 if kind is None:
917 915 try:
918 916 globbed = glob.glob(pat)
919 917 except re.error:
920 918 globbed = [pat]
921 919 if globbed:
922 920 ret.extend(globbed)
923 921 continue
924 922 ret.append(kindpat)
925 923 return ret
926 924
927 925
928 926 def matchandpats(
929 927 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
930 928 ):
931 929 '''Return a matcher and the patterns that were used.
932 930 The matcher will warn about bad matches, unless an alternate badfn callback
933 931 is provided.'''
934 932 if opts is None:
935 933 opts = {}
936 934 if not globbed and default == b'relpath':
937 935 pats = expandpats(pats or [])
938 936
939 937 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
940 938
941 939 def bad(f, msg):
942 940 ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
943 941
944 942 if badfn is None:
945 943 badfn = bad
946 944
947 945 m = ctx.match(
948 946 pats,
949 947 opts.get(b'include'),
950 948 opts.get(b'exclude'),
951 949 default,
952 950 listsubrepos=opts.get(b'subrepos'),
953 951 badfn=badfn,
954 952 )
955 953
956 954 if m.always():
957 955 pats = []
958 956 return m, pats
959 957
960 958
961 959 def match(
962 960 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
963 961 ):
964 962 '''Return a matcher that will warn about bad matches.'''
965 963 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
966 964
967 965
968 966 def matchall(repo):
969 967 '''Return a matcher that will efficiently match everything.'''
970 968 return matchmod.always()
971 969
972 970
973 971 def matchfiles(repo, files, badfn=None):
974 972 '''Return a matcher that will efficiently match exactly these files.'''
975 973 return matchmod.exact(files, badfn=badfn)
976 974
977 975
978 976 def parsefollowlinespattern(repo, rev, pat, msg):
979 977 """Return a file name from `pat` pattern suitable for usage in followlines
980 978 logic.
981 979 """
982 980 if not matchmod.patkind(pat):
983 981 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
984 982 else:
985 983 ctx = repo[rev]
986 984 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
987 985 files = [f for f in ctx if m(f)]
988 986 if len(files) != 1:
989 987 raise error.ParseError(msg)
990 988 return files[0]
991 989
992 990
993 991 def getorigvfs(ui, repo):
994 992 """return a vfs suitable to save 'orig' file
995 993
996 994 return None if no special directory is configured"""
997 995 origbackuppath = ui.config(b'ui', b'origbackuppath')
998 996 if not origbackuppath:
999 997 return None
1000 998 return vfs.vfs(repo.wvfs.join(origbackuppath))
1001 999
1002 1000
1003 1001 def backuppath(ui, repo, filepath):
1004 1002 '''customize where working copy backup files (.orig files) are created
1005 1003
1006 1004 Fetch user defined path from config file: [ui] origbackuppath = <path>
1007 1005 Fall back to default (filepath with .orig suffix) if not specified
1008 1006
1009 1007 filepath is repo-relative
1010 1008
1011 1009 Returns an absolute path
1012 1010 '''
1013 1011 origvfs = getorigvfs(ui, repo)
1014 1012 if origvfs is None:
1015 1013 return repo.wjoin(filepath + b".orig")
1016 1014
1017 1015 origbackupdir = origvfs.dirname(filepath)
1018 1016 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
1019 1017 ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
1020 1018
1021 1019 # Remove any files that conflict with the backup file's path
1022 1020 for f in reversed(list(pathutil.finddirs(filepath))):
1023 1021 if origvfs.isfileorlink(f):
1024 1022 ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
1025 1023 origvfs.unlink(f)
1026 1024 break
1027 1025
1028 1026 origvfs.makedirs(origbackupdir)
1029 1027
1030 1028 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
1031 1029 ui.note(
1032 1030 _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
1033 1031 )
1034 1032 origvfs.rmtree(filepath, forcibly=True)
1035 1033
1036 1034 return origvfs.join(filepath)
1037 1035
1038 1036
1039 1037 class _containsnode(object):
1040 1038 """proxy __contains__(node) to container.__contains__ which accepts revs"""
1041 1039
1042 1040 def __init__(self, repo, revcontainer):
1043 1041 self._torev = repo.changelog.rev
1044 1042 self._revcontains = revcontainer.__contains__
1045 1043
1046 1044 def __contains__(self, node):
1047 1045 return self._revcontains(self._torev(node))
1048 1046
1049 1047
1050 1048 def cleanupnodes(
1051 1049 repo,
1052 1050 replacements,
1053 1051 operation,
1054 1052 moves=None,
1055 1053 metadata=None,
1056 1054 fixphase=False,
1057 1055 targetphase=None,
1058 1056 backup=True,
1059 1057 ):
1060 1058 """do common cleanups when old nodes are replaced by new nodes
1061 1059
1062 1060 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1063 1061 (we might also want to move working directory parent in the future)
1064 1062
1065 1063 By default, bookmark moves are calculated automatically from 'replacements',
1066 1064 but 'moves' can be used to override that. Also, 'moves' may include
1067 1065 additional bookmark moves that should not have associated obsmarkers.
1068 1066
1069 1067 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1070 1068 have replacements. operation is a string, like "rebase".
1071 1069
1072 1070 metadata is dictionary containing metadata to be stored in obsmarker if
1073 1071 obsolescence is enabled.
1074 1072 """
1075 1073 assert fixphase or targetphase is None
1076 1074 if not replacements and not moves:
1077 1075 return
1078 1076
1079 1077 # translate mapping's other forms
1080 1078 if not util.safehasattr(replacements, b'items'):
1081 1079 replacements = {(n,): () for n in replacements}
1082 1080 else:
1083 1081 # upgrading non tuple "source" to tuple ones for BC
1084 1082 repls = {}
1085 1083 for key, value in replacements.items():
1086 1084 if not isinstance(key, tuple):
1087 1085 key = (key,)
1088 1086 repls[key] = value
1089 1087 replacements = repls
1090 1088
1091 1089 # Unfiltered repo is needed since nodes in replacements might be hidden.
1092 1090 unfi = repo.unfiltered()
1093 1091
1094 1092 # Calculate bookmark movements
1095 1093 if moves is None:
1096 1094 moves = {}
1097 1095 for oldnodes, newnodes in replacements.items():
1098 1096 for oldnode in oldnodes:
1099 1097 if oldnode in moves:
1100 1098 continue
1101 1099 if len(newnodes) > 1:
1102 1100 # usually a split, take the one with biggest rev number
1103 1101 newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1104 1102 elif len(newnodes) == 0:
1105 1103 # move bookmark backwards
1106 1104 allreplaced = []
1107 1105 for rep in replacements:
1108 1106 allreplaced.extend(rep)
1109 1107 roots = list(
1110 1108 unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1111 1109 )
1112 1110 if roots:
1113 1111 newnode = roots[0].node()
1114 1112 else:
1115 1113 newnode = nullid
1116 1114 else:
1117 1115 newnode = newnodes[0]
1118 1116 moves[oldnode] = newnode
1119 1117
1120 1118 allnewnodes = [n for ns in replacements.values() for n in ns]
1121 1119 toretract = {}
1122 1120 toadvance = {}
1123 1121 if fixphase:
1124 1122 precursors = {}
1125 1123 for oldnodes, newnodes in replacements.items():
1126 1124 for oldnode in oldnodes:
1127 1125 for newnode in newnodes:
1128 1126 precursors.setdefault(newnode, []).append(oldnode)
1129 1127
1130 1128 allnewnodes.sort(key=lambda n: unfi[n].rev())
1131 1129 newphases = {}
1132 1130
1133 1131 def phase(ctx):
1134 1132 return newphases.get(ctx.node(), ctx.phase())
1135 1133
1136 1134 for newnode in allnewnodes:
1137 1135 ctx = unfi[newnode]
1138 1136 parentphase = max(phase(p) for p in ctx.parents())
1139 1137 if targetphase is None:
1140 1138 oldphase = max(
1141 1139 unfi[oldnode].phase() for oldnode in precursors[newnode]
1142 1140 )
1143 1141 newphase = max(oldphase, parentphase)
1144 1142 else:
1145 1143 newphase = max(targetphase, parentphase)
1146 1144 newphases[newnode] = newphase
1147 1145 if newphase > ctx.phase():
1148 1146 toretract.setdefault(newphase, []).append(newnode)
1149 1147 elif newphase < ctx.phase():
1150 1148 toadvance.setdefault(newphase, []).append(newnode)
1151 1149
1152 1150 with repo.transaction(b'cleanup') as tr:
1153 1151 # Move bookmarks
1154 1152 bmarks = repo._bookmarks
1155 1153 bmarkchanges = []
1156 1154 for oldnode, newnode in moves.items():
1157 1155 oldbmarks = repo.nodebookmarks(oldnode)
1158 1156 if not oldbmarks:
1159 1157 continue
1160 1158 from . import bookmarks # avoid import cycle
1161 1159
1162 1160 repo.ui.debug(
1163 1161 b'moving bookmarks %r from %s to %s\n'
1164 1162 % (
1165 1163 pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1166 1164 hex(oldnode),
1167 1165 hex(newnode),
1168 1166 )
1169 1167 )
1170 1168 # Delete divergent bookmarks being parents of related newnodes
1171 1169 deleterevs = repo.revs(
1172 1170 b'parents(roots(%ln & (::%n))) - parents(%n)',
1173 1171 allnewnodes,
1174 1172 newnode,
1175 1173 oldnode,
1176 1174 )
1177 1175 deletenodes = _containsnode(repo, deleterevs)
1178 1176 for name in oldbmarks:
1179 1177 bmarkchanges.append((name, newnode))
1180 1178 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1181 1179 bmarkchanges.append((b, None))
1182 1180
1183 1181 if bmarkchanges:
1184 1182 bmarks.applychanges(repo, tr, bmarkchanges)
1185 1183
1186 1184 for phase, nodes in toretract.items():
1187 1185 phases.retractboundary(repo, tr, phase, nodes)
1188 1186 for phase, nodes in toadvance.items():
1189 1187 phases.advanceboundary(repo, tr, phase, nodes)
1190 1188
1191 1189 mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1192 1190 # Obsolete or strip nodes
1193 1191 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1194 1192 # If a node is already obsoleted, and we want to obsolete it
1195 1193 # without a successor, skip that obssolete request since it's
1196 1194 # unnecessary. That's the "if s or not isobs(n)" check below.
1197 1195 # Also sort the node in topology order, that might be useful for
1198 1196 # some obsstore logic.
1199 1197 # NOTE: the sorting might belong to createmarkers.
1200 1198 torev = unfi.changelog.rev
1201 1199 sortfunc = lambda ns: torev(ns[0][0])
1202 1200 rels = []
1203 1201 for ns, s in sorted(replacements.items(), key=sortfunc):
1204 1202 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1205 1203 rels.append(rel)
1206 1204 if rels:
1207 1205 obsolete.createmarkers(
1208 1206 repo, rels, operation=operation, metadata=metadata
1209 1207 )
1210 1208 elif phases.supportinternal(repo) and mayusearchived:
1211 1209 # this assume we do not have "unstable" nodes above the cleaned ones
1212 1210 allreplaced = set()
1213 1211 for ns in replacements.keys():
1214 1212 allreplaced.update(ns)
1215 1213 if backup:
1216 1214 from . import repair # avoid import cycle
1217 1215
1218 1216 node = min(allreplaced, key=repo.changelog.rev)
1219 1217 repair.backupbundle(
1220 1218 repo, allreplaced, allreplaced, node, operation
1221 1219 )
1222 1220 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1223 1221 else:
1224 1222 from . import repair # avoid import cycle
1225 1223
1226 1224 tostrip = list(n for ns in replacements for n in ns)
1227 1225 if tostrip:
1228 1226 repair.delayedstrip(
1229 1227 repo.ui, repo, tostrip, operation, backup=backup
1230 1228 )
1231 1229
1232 1230
1233 1231 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1234 1232 if opts is None:
1235 1233 opts = {}
1236 1234 m = matcher
1237 1235 dry_run = opts.get(b'dry_run')
1238 1236 try:
1239 1237 similarity = float(opts.get(b'similarity') or 0)
1240 1238 except ValueError:
1241 1239 raise error.Abort(_(b'similarity must be a number'))
1242 1240 if similarity < 0 or similarity > 100:
1243 1241 raise error.Abort(_(b'similarity must be between 0 and 100'))
1244 1242 similarity /= 100.0
1245 1243
1246 1244 ret = 0
1247 1245
1248 1246 wctx = repo[None]
1249 1247 for subpath in sorted(wctx.substate):
1250 1248 submatch = matchmod.subdirmatcher(subpath, m)
1251 1249 if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1252 1250 sub = wctx.sub(subpath)
1253 1251 subprefix = repo.wvfs.reljoin(prefix, subpath)
1254 1252 subuipathfn = subdiruipathfn(subpath, uipathfn)
1255 1253 try:
1256 1254 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1257 1255 ret = 1
1258 1256 except error.LookupError:
1259 1257 repo.ui.status(
1260 1258 _(b"skipping missing subrepository: %s\n")
1261 1259 % uipathfn(subpath)
1262 1260 )
1263 1261
1264 1262 rejected = []
1265 1263
1266 1264 def badfn(f, msg):
1267 1265 if f in m.files():
1268 1266 m.bad(f, msg)
1269 1267 rejected.append(f)
1270 1268
1271 1269 badmatch = matchmod.badmatch(m, badfn)
1272 1270 added, unknown, deleted, removed, forgotten = _interestingfiles(
1273 1271 repo, badmatch
1274 1272 )
1275 1273
1276 1274 unknownset = set(unknown + forgotten)
1277 1275 toprint = unknownset.copy()
1278 1276 toprint.update(deleted)
1279 1277 for abs in sorted(toprint):
1280 1278 if repo.ui.verbose or not m.exact(abs):
1281 1279 if abs in unknownset:
1282 1280 status = _(b'adding %s\n') % uipathfn(abs)
1283 1281 label = b'ui.addremove.added'
1284 1282 else:
1285 1283 status = _(b'removing %s\n') % uipathfn(abs)
1286 1284 label = b'ui.addremove.removed'
1287 1285 repo.ui.status(status, label=label)
1288 1286
1289 1287 renames = _findrenames(
1290 1288 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1291 1289 )
1292 1290
1293 1291 if not dry_run:
1294 1292 _markchanges(repo, unknown + forgotten, deleted, renames)
1295 1293
1296 1294 for f in rejected:
1297 1295 if f in m.files():
1298 1296 return 1
1299 1297 return ret
1300 1298
1301 1299
1302 1300 def marktouched(repo, files, similarity=0.0):
1303 1301 '''Assert that files have somehow been operated upon. files are relative to
1304 1302 the repo root.'''
1305 1303 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1306 1304 rejected = []
1307 1305
1308 1306 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1309 1307
1310 1308 if repo.ui.verbose:
1311 1309 unknownset = set(unknown + forgotten)
1312 1310 toprint = unknownset.copy()
1313 1311 toprint.update(deleted)
1314 1312 for abs in sorted(toprint):
1315 1313 if abs in unknownset:
1316 1314 status = _(b'adding %s\n') % abs
1317 1315 else:
1318 1316 status = _(b'removing %s\n') % abs
1319 1317 repo.ui.status(status)
1320 1318
1321 1319 # TODO: We should probably have the caller pass in uipathfn and apply it to
1322 1320 # the messages above too. legacyrelativevalue=True is consistent with how
1323 1321 # it used to work.
1324 1322 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1325 1323 renames = _findrenames(
1326 1324 repo, m, added + unknown, removed + deleted, similarity, uipathfn
1327 1325 )
1328 1326
1329 1327 _markchanges(repo, unknown + forgotten, deleted, renames)
1330 1328
1331 1329 for f in rejected:
1332 1330 if f in m.files():
1333 1331 return 1
1334 1332 return 0
1335 1333
1336 1334
1337 1335 def _interestingfiles(repo, matcher):
1338 1336 '''Walk dirstate with matcher, looking for files that addremove would care
1339 1337 about.
1340 1338
1341 1339 This is different from dirstate.status because it doesn't care about
1342 1340 whether files are modified or clean.'''
1343 1341 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1344 1342 audit_path = pathutil.pathauditor(repo.root, cached=True)
1345 1343
1346 1344 ctx = repo[None]
1347 1345 dirstate = repo.dirstate
1348 1346 matcher = repo.narrowmatch(matcher, includeexact=True)
1349 1347 walkresults = dirstate.walk(
1350 1348 matcher,
1351 1349 subrepos=sorted(ctx.substate),
1352 1350 unknown=True,
1353 1351 ignored=False,
1354 1352 full=False,
1355 1353 )
1356 1354 for abs, st in pycompat.iteritems(walkresults):
1357 1355 dstate = dirstate[abs]
1358 1356 if dstate == b'?' and audit_path.check(abs):
1359 1357 unknown.append(abs)
1360 1358 elif dstate != b'r' and not st:
1361 1359 deleted.append(abs)
1362 1360 elif dstate == b'r' and st:
1363 1361 forgotten.append(abs)
1364 1362 # for finding renames
1365 1363 elif dstate == b'r' and not st:
1366 1364 removed.append(abs)
1367 1365 elif dstate == b'a':
1368 1366 added.append(abs)
1369 1367
1370 1368 return added, unknown, deleted, removed, forgotten
1371 1369
1372 1370
1373 1371 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1374 1372 '''Find renames from removed files to added ones.'''
1375 1373 renames = {}
1376 1374 if similarity > 0:
1377 1375 for old, new, score in similar.findrenames(
1378 1376 repo, added, removed, similarity
1379 1377 ):
1380 1378 if (
1381 1379 repo.ui.verbose
1382 1380 or not matcher.exact(old)
1383 1381 or not matcher.exact(new)
1384 1382 ):
1385 1383 repo.ui.status(
1386 1384 _(
1387 1385 b'recording removal of %s as rename to %s '
1388 1386 b'(%d%% similar)\n'
1389 1387 )
1390 1388 % (uipathfn(old), uipathfn(new), score * 100)
1391 1389 )
1392 1390 renames[new] = old
1393 1391 return renames
1394 1392
1395 1393
1396 1394 def _markchanges(repo, unknown, deleted, renames):
1397 1395 '''Marks the files in unknown as added, the files in deleted as removed,
1398 1396 and the files in renames as copied.'''
1399 1397 wctx = repo[None]
1400 1398 with repo.wlock():
1401 1399 wctx.forget(deleted)
1402 1400 wctx.add(unknown)
1403 1401 for new, old in pycompat.iteritems(renames):
1404 1402 wctx.copy(old, new)
1405 1403
1406 1404
1407 1405 def getrenamedfn(repo, endrev=None):
1408 1406 if copiesmod.usechangesetcentricalgo(repo):
1409 1407
1410 1408 def getrenamed(fn, rev):
1411 1409 ctx = repo[rev]
1412 1410 p1copies = ctx.p1copies()
1413 1411 if fn in p1copies:
1414 1412 return p1copies[fn]
1415 1413 p2copies = ctx.p2copies()
1416 1414 if fn in p2copies:
1417 1415 return p2copies[fn]
1418 1416 return None
1419 1417
1420 1418 return getrenamed
1421 1419
1422 1420 rcache = {}
1423 1421 if endrev is None:
1424 1422 endrev = len(repo)
1425 1423
1426 1424 def getrenamed(fn, rev):
1427 1425 '''looks up all renames for a file (up to endrev) the first
1428 1426 time the file is given. It indexes on the changerev and only
1429 1427 parses the manifest if linkrev != changerev.
1430 1428 Returns rename info for fn at changerev rev.'''
1431 1429 if fn not in rcache:
1432 1430 rcache[fn] = {}
1433 1431 fl = repo.file(fn)
1434 1432 for i in fl:
1435 1433 lr = fl.linkrev(i)
1436 1434 renamed = fl.renamed(fl.node(i))
1437 1435 rcache[fn][lr] = renamed and renamed[0]
1438 1436 if lr >= endrev:
1439 1437 break
1440 1438 if rev in rcache[fn]:
1441 1439 return rcache[fn][rev]
1442 1440
1443 1441 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1444 1442 # filectx logic.
1445 1443 try:
1446 1444 return repo[rev][fn].copysource()
1447 1445 except error.LookupError:
1448 1446 return None
1449 1447
1450 1448 return getrenamed
1451 1449
1452 1450
1453 1451 def getcopiesfn(repo, endrev=None):
1454 1452 if copiesmod.usechangesetcentricalgo(repo):
1455 1453
1456 1454 def copiesfn(ctx):
1457 1455 if ctx.p2copies():
1458 1456 allcopies = ctx.p1copies().copy()
1459 1457 # There should be no overlap
1460 1458 allcopies.update(ctx.p2copies())
1461 1459 return sorted(allcopies.items())
1462 1460 else:
1463 1461 return sorted(ctx.p1copies().items())
1464 1462
1465 1463 else:
1466 1464 getrenamed = getrenamedfn(repo, endrev)
1467 1465
1468 1466 def copiesfn(ctx):
1469 1467 copies = []
1470 1468 for fn in ctx.files():
1471 1469 rename = getrenamed(fn, ctx.rev())
1472 1470 if rename:
1473 1471 copies.append((fn, rename))
1474 1472 return copies
1475 1473
1476 1474 return copiesfn
1477 1475
1478 1476
1479 1477 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1480 1478 """Update the dirstate to reflect the intent of copying src to dst. For
1481 1479 different reasons it might not end with dst being marked as copied from src.
1482 1480 """
1483 1481 origsrc = repo.dirstate.copied(src) or src
1484 1482 if dst == origsrc: # copying back a copy?
1485 1483 if repo.dirstate[dst] not in b'mn' and not dryrun:
1486 1484 repo.dirstate.normallookup(dst)
1487 1485 else:
1488 1486 if repo.dirstate[origsrc] == b'a' and origsrc == src:
1489 1487 if not ui.quiet:
1490 1488 ui.warn(
1491 1489 _(
1492 1490 b"%s has not been committed yet, so no copy "
1493 1491 b"data will be stored for %s.\n"
1494 1492 )
1495 1493 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1496 1494 )
1497 1495 if repo.dirstate[dst] in b'?r' and not dryrun:
1498 1496 wctx.add([dst])
1499 1497 elif not dryrun:
1500 1498 wctx.copy(origsrc, dst)
1501 1499
1502 1500
1503 1501 def movedirstate(repo, newctx, match=None):
1504 1502 """Move the dirstate to newctx and adjust it as necessary.
1505 1503
1506 1504 A matcher can be provided as an optimization. It is probably a bug to pass
1507 1505 a matcher that doesn't match all the differences between the parent of the
1508 1506 working copy and newctx.
1509 1507 """
1510 1508 oldctx = repo[b'.']
1511 1509 ds = repo.dirstate
1512 1510 copies = dict(ds.copies())
1513 1511 ds.setparents(newctx.node(), nullid)
1514 1512 s = newctx.status(oldctx, match=match)
1515 1513 for f in s.modified:
1516 1514 if ds[f] == b'r':
1517 1515 # modified + removed -> removed
1518 1516 continue
1519 1517 ds.normallookup(f)
1520 1518
1521 1519 for f in s.added:
1522 1520 if ds[f] == b'r':
1523 1521 # added + removed -> unknown
1524 1522 ds.drop(f)
1525 1523 elif ds[f] != b'a':
1526 1524 ds.add(f)
1527 1525
1528 1526 for f in s.removed:
1529 1527 if ds[f] == b'a':
1530 1528 # removed + added -> normal
1531 1529 ds.normallookup(f)
1532 1530 elif ds[f] != b'r':
1533 1531 ds.remove(f)
1534 1532
1535 1533 # Merge old parent and old working dir copies
1536 1534 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1537 1535 oldcopies.update(copies)
1538 1536 copies = {
1539 1537 dst: oldcopies.get(src, src)
1540 1538 for dst, src in pycompat.iteritems(oldcopies)
1541 1539 }
1542 1540 # Adjust the dirstate copies
1543 1541 for dst, src in pycompat.iteritems(copies):
1544 1542 if src not in newctx or dst in newctx or ds[dst] != b'a':
1545 1543 src = None
1546 1544 ds.copy(src, dst)
1547 1545 repo._quick_access_changeid_invalidate()
1548 1546
1549 1547
1550 1548 def filterrequirements(requirements):
1551 1549 """ filters the requirements into two sets:
1552 1550
1553 1551 wcreq: requirements which should be written in .hg/requires
1554 1552 storereq: which should be written in .hg/store/requires
1555 1553
1556 1554 Returns (wcreq, storereq)
1557 1555 """
1558 1556 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
1559 1557 wc, store = set(), set()
1560 1558 for r in requirements:
1561 1559 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
1562 1560 wc.add(r)
1563 1561 else:
1564 1562 store.add(r)
1565 1563 return wc, store
1566 1564 return requirements, None
1567 1565
1568 1566
1569 1567 def istreemanifest(repo):
1570 1568 """ returns whether the repository is using treemanifest or not """
1571 1569 return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements
1572 1570
1573 1571
1574 1572 def writereporequirements(repo, requirements=None):
1575 1573 """ writes requirements for the repo to .hg/requires """
1576 1574 if requirements:
1577 1575 repo.requirements = requirements
1578 1576 wcreq, storereq = filterrequirements(repo.requirements)
1579 1577 if wcreq is not None:
1580 1578 writerequires(repo.vfs, wcreq)
1581 1579 if storereq is not None:
1582 1580 writerequires(repo.svfs, storereq)
1583 1581
1584 1582
1585 1583 def writerequires(opener, requirements):
1586 1584 with opener(b'requires', b'w', atomictemp=True) as fp:
1587 1585 for r in sorted(requirements):
1588 1586 fp.write(b"%s\n" % r)
1589 1587
1590 1588
1591 1589 class filecachesubentry(object):
1592 1590 def __init__(self, path, stat):
1593 1591 self.path = path
1594 1592 self.cachestat = None
1595 1593 self._cacheable = None
1596 1594
1597 1595 if stat:
1598 1596 self.cachestat = filecachesubentry.stat(self.path)
1599 1597
1600 1598 if self.cachestat:
1601 1599 self._cacheable = self.cachestat.cacheable()
1602 1600 else:
1603 1601 # None means we don't know yet
1604 1602 self._cacheable = None
1605 1603
1606 1604 def refresh(self):
1607 1605 if self.cacheable():
1608 1606 self.cachestat = filecachesubentry.stat(self.path)
1609 1607
1610 1608 def cacheable(self):
1611 1609 if self._cacheable is not None:
1612 1610 return self._cacheable
1613 1611
1614 1612 # we don't know yet, assume it is for now
1615 1613 return True
1616 1614
1617 1615 def changed(self):
1618 1616 # no point in going further if we can't cache it
1619 1617 if not self.cacheable():
1620 1618 return True
1621 1619
1622 1620 newstat = filecachesubentry.stat(self.path)
1623 1621
1624 1622 # we may not know if it's cacheable yet, check again now
1625 1623 if newstat and self._cacheable is None:
1626 1624 self._cacheable = newstat.cacheable()
1627 1625
1628 1626 # check again
1629 1627 if not self._cacheable:
1630 1628 return True
1631 1629
1632 1630 if self.cachestat != newstat:
1633 1631 self.cachestat = newstat
1634 1632 return True
1635 1633 else:
1636 1634 return False
1637 1635
1638 1636 @staticmethod
1639 1637 def stat(path):
1640 1638 try:
1641 1639 return util.cachestat(path)
1642 1640 except OSError as e:
1643 1641 if e.errno != errno.ENOENT:
1644 1642 raise
1645 1643
1646 1644
1647 1645 class filecacheentry(object):
1648 1646 def __init__(self, paths, stat=True):
1649 1647 self._entries = []
1650 1648 for path in paths:
1651 1649 self._entries.append(filecachesubentry(path, stat))
1652 1650
1653 1651 def changed(self):
1654 1652 '''true if any entry has changed'''
1655 1653 for entry in self._entries:
1656 1654 if entry.changed():
1657 1655 return True
1658 1656 return False
1659 1657
1660 1658 def refresh(self):
1661 1659 for entry in self._entries:
1662 1660 entry.refresh()
1663 1661
1664 1662
1665 1663 class filecache(object):
1666 1664 """A property like decorator that tracks files under .hg/ for updates.
1667 1665
1668 1666 On first access, the files defined as arguments are stat()ed and the
1669 1667 results cached. The decorated function is called. The results are stashed
1670 1668 away in a ``_filecache`` dict on the object whose method is decorated.
1671 1669
1672 1670 On subsequent access, the cached result is used as it is set to the
1673 1671 instance dictionary.
1674 1672
1675 1673 On external property set/delete operations, the caller must update the
1676 1674 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1677 1675 instead of directly setting <attr>.
1678 1676
1679 1677 When using the property API, the cached data is always used if available.
1680 1678 No stat() is performed to check if the file has changed.
1681 1679
1682 1680 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1683 1681 can populate an entry before the property's getter is called. In this case,
1684 1682 entries in ``_filecache`` will be used during property operations,
1685 1683 if available. If the underlying file changes, it is up to external callers
1686 1684 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1687 1685 method result as well as possibly calling ``del obj._filecache[attr]`` to
1688 1686 remove the ``filecacheentry``.
1689 1687 """
1690 1688
1691 1689 def __init__(self, *paths):
1692 1690 self.paths = paths
1693 1691
1694 1692 def join(self, obj, fname):
1695 1693 """Used to compute the runtime path of a cached file.
1696 1694
1697 1695 Users should subclass filecache and provide their own version of this
1698 1696 function to call the appropriate join function on 'obj' (an instance
1699 1697 of the class that its member function was decorated).
1700 1698 """
1701 1699 raise NotImplementedError
1702 1700
1703 1701 def __call__(self, func):
1704 1702 self.func = func
1705 1703 self.sname = func.__name__
1706 1704 self.name = pycompat.sysbytes(self.sname)
1707 1705 return self
1708 1706
1709 1707 def __get__(self, obj, type=None):
1710 1708 # if accessed on the class, return the descriptor itself.
1711 1709 if obj is None:
1712 1710 return self
1713 1711
1714 1712 assert self.sname not in obj.__dict__
1715 1713
1716 1714 entry = obj._filecache.get(self.name)
1717 1715
1718 1716 if entry:
1719 1717 if entry.changed():
1720 1718 entry.obj = self.func(obj)
1721 1719 else:
1722 1720 paths = [self.join(obj, path) for path in self.paths]
1723 1721
1724 1722 # We stat -before- creating the object so our cache doesn't lie if
1725 1723 # a writer modified between the time we read and stat
1726 1724 entry = filecacheentry(paths, True)
1727 1725 entry.obj = self.func(obj)
1728 1726
1729 1727 obj._filecache[self.name] = entry
1730 1728
1731 1729 obj.__dict__[self.sname] = entry.obj
1732 1730 return entry.obj
1733 1731
1734 1732 # don't implement __set__(), which would make __dict__ lookup as slow as
1735 1733 # function call.
1736 1734
1737 1735 def set(self, obj, value):
1738 1736 if self.name not in obj._filecache:
1739 1737 # we add an entry for the missing value because X in __dict__
1740 1738 # implies X in _filecache
1741 1739 paths = [self.join(obj, path) for path in self.paths]
1742 1740 ce = filecacheentry(paths, False)
1743 1741 obj._filecache[self.name] = ce
1744 1742 else:
1745 1743 ce = obj._filecache[self.name]
1746 1744
1747 1745 ce.obj = value # update cached copy
1748 1746 obj.__dict__[self.sname] = value # update copy returned by obj.x
1749 1747
1750 1748
1751 1749 def extdatasource(repo, source):
1752 1750 """Gather a map of rev -> value dict from the specified source
1753 1751
1754 1752 A source spec is treated as a URL, with a special case shell: type
1755 1753 for parsing the output from a shell command.
1756 1754
1757 1755 The data is parsed as a series of newline-separated records where
1758 1756 each record is a revision specifier optionally followed by a space
1759 1757 and a freeform string value. If the revision is known locally, it
1760 1758 is converted to a rev, otherwise the record is skipped.
1761 1759
1762 1760 Note that both key and value are treated as UTF-8 and converted to
1763 1761 the local encoding. This allows uniformity between local and
1764 1762 remote data sources.
1765 1763 """
1766 1764
1767 1765 spec = repo.ui.config(b"extdata", source)
1768 1766 if not spec:
1769 1767 raise error.Abort(_(b"unknown extdata source '%s'") % source)
1770 1768
1771 1769 data = {}
1772 1770 src = proc = None
1773 1771 try:
1774 1772 if spec.startswith(b"shell:"):
1775 1773 # external commands should be run relative to the repo root
1776 1774 cmd = spec[6:]
1777 1775 proc = subprocess.Popen(
1778 1776 procutil.tonativestr(cmd),
1779 1777 shell=True,
1780 1778 bufsize=-1,
1781 1779 close_fds=procutil.closefds,
1782 1780 stdout=subprocess.PIPE,
1783 1781 cwd=procutil.tonativestr(repo.root),
1784 1782 )
1785 1783 src = proc.stdout
1786 1784 else:
1787 1785 # treat as a URL or file
1788 1786 src = url.open(repo.ui, spec)
1789 1787 for l in src:
1790 1788 if b" " in l:
1791 1789 k, v = l.strip().split(b" ", 1)
1792 1790 else:
1793 1791 k, v = l.strip(), b""
1794 1792
1795 1793 k = encoding.tolocal(k)
1796 1794 try:
1797 1795 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1798 1796 except (error.LookupError, error.RepoLookupError):
1799 1797 pass # we ignore data for nodes that don't exist locally
1800 1798 finally:
1801 1799 if proc:
1802 1800 try:
1803 1801 proc.communicate()
1804 1802 except ValueError:
1805 1803 # This happens if we started iterating src and then
1806 1804 # get a parse error on a line. It should be safe to ignore.
1807 1805 pass
1808 1806 if src:
1809 1807 src.close()
1810 1808 if proc and proc.returncode != 0:
1811 1809 raise error.Abort(
1812 1810 _(b"extdata command '%s' failed: %s")
1813 1811 % (cmd, procutil.explainexit(proc.returncode))
1814 1812 )
1815 1813
1816 1814 return data
1817 1815
1818 1816
1819 1817 class progress(object):
1820 1818 def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1821 1819 self.ui = ui
1822 1820 self.pos = 0
1823 1821 self.topic = topic
1824 1822 self.unit = unit
1825 1823 self.total = total
1826 1824 self.debug = ui.configbool(b'progress', b'debug')
1827 1825 self._updatebar = updatebar
1828 1826
1829 1827 def __enter__(self):
1830 1828 return self
1831 1829
1832 1830 def __exit__(self, exc_type, exc_value, exc_tb):
1833 1831 self.complete()
1834 1832
1835 1833 def update(self, pos, item=b"", total=None):
1836 1834 assert pos is not None
1837 1835 if total:
1838 1836 self.total = total
1839 1837 self.pos = pos
1840 1838 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1841 1839 if self.debug:
1842 1840 self._printdebug(item)
1843 1841
1844 1842 def increment(self, step=1, item=b"", total=None):
1845 1843 self.update(self.pos + step, item, total)
1846 1844
1847 1845 def complete(self):
1848 1846 self.pos = None
1849 1847 self.unit = b""
1850 1848 self.total = None
1851 1849 self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1852 1850
1853 1851 def _printdebug(self, item):
1854 1852 unit = b''
1855 1853 if self.unit:
1856 1854 unit = b' ' + self.unit
1857 1855 if item:
1858 1856 item = b' ' + item
1859 1857
1860 1858 if self.total:
1861 1859 pct = 100.0 * self.pos / self.total
1862 1860 self.ui.debug(
1863 1861 b'%s:%s %d/%d%s (%4.2f%%)\n'
1864 1862 % (self.topic, item, self.pos, self.total, unit, pct)
1865 1863 )
1866 1864 else:
1867 1865 self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1868 1866
1869 1867
1870 1868 def gdinitconfig(ui):
1871 1869 """helper function to know if a repo should be created as general delta
1872 1870 """
1873 1871 # experimental config: format.generaldelta
1874 1872 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1875 1873 b'format', b'usegeneraldelta'
1876 1874 )
1877 1875
1878 1876
1879 1877 def gddeltaconfig(ui):
1880 1878 """helper function to know if incoming delta should be optimised
1881 1879 """
1882 1880 # experimental config: format.generaldelta
1883 1881 return ui.configbool(b'format', b'generaldelta')
1884 1882
1885 1883
1886 1884 class simplekeyvaluefile(object):
1887 1885 """A simple file with key=value lines
1888 1886
1889 1887 Keys must be alphanumerics and start with a letter, values must not
1890 1888 contain '\n' characters"""
1891 1889
1892 1890 firstlinekey = b'__firstline'
1893 1891
1894 1892 def __init__(self, vfs, path, keys=None):
1895 1893 self.vfs = vfs
1896 1894 self.path = path
1897 1895
1898 1896 def read(self, firstlinenonkeyval=False):
1899 1897 """Read the contents of a simple key-value file
1900 1898
1901 1899 'firstlinenonkeyval' indicates whether the first line of file should
1902 1900 be treated as a key-value pair or reuturned fully under the
1903 1901 __firstline key."""
1904 1902 lines = self.vfs.readlines(self.path)
1905 1903 d = {}
1906 1904 if firstlinenonkeyval:
1907 1905 if not lines:
1908 1906 e = _(b"empty simplekeyvalue file")
1909 1907 raise error.CorruptedState(e)
1910 1908 # we don't want to include '\n' in the __firstline
1911 1909 d[self.firstlinekey] = lines[0][:-1]
1912 1910 del lines[0]
1913 1911
1914 1912 try:
1915 1913 # the 'if line.strip()' part prevents us from failing on empty
1916 1914 # lines which only contain '\n' therefore are not skipped
1917 1915 # by 'if line'
1918 1916 updatedict = dict(
1919 1917 line[:-1].split(b'=', 1) for line in lines if line.strip()
1920 1918 )
1921 1919 if self.firstlinekey in updatedict:
1922 1920 e = _(b"%r can't be used as a key")
1923 1921 raise error.CorruptedState(e % self.firstlinekey)
1924 1922 d.update(updatedict)
1925 1923 except ValueError as e:
1926 1924 raise error.CorruptedState(stringutil.forcebytestr(e))
1927 1925 return d
1928 1926
1929 1927 def write(self, data, firstline=None):
1930 1928 """Write key=>value mapping to a file
1931 1929 data is a dict. Keys must be alphanumerical and start with a letter.
1932 1930 Values must not contain newline characters.
1933 1931
1934 1932 If 'firstline' is not None, it is written to file before
1935 1933 everything else, as it is, not in a key=value form"""
1936 1934 lines = []
1937 1935 if firstline is not None:
1938 1936 lines.append(b'%s\n' % firstline)
1939 1937
1940 1938 for k, v in data.items():
1941 1939 if k == self.firstlinekey:
1942 1940 e = b"key name '%s' is reserved" % self.firstlinekey
1943 1941 raise error.ProgrammingError(e)
1944 1942 if not k[0:1].isalpha():
1945 1943 e = b"keys must start with a letter in a key-value file"
1946 1944 raise error.ProgrammingError(e)
1947 1945 if not k.isalnum():
1948 1946 e = b"invalid key name in a simple key-value file"
1949 1947 raise error.ProgrammingError(e)
1950 1948 if b'\n' in v:
1951 1949 e = b"invalid value in a simple key-value file"
1952 1950 raise error.ProgrammingError(e)
1953 1951 lines.append(b"%s=%s\n" % (k, v))
1954 1952 with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1955 1953 fp.write(b''.join(lines))
1956 1954
1957 1955
1958 1956 _reportobsoletedsource = [
1959 1957 b'debugobsolete',
1960 1958 b'pull',
1961 1959 b'push',
1962 1960 b'serve',
1963 1961 b'unbundle',
1964 1962 ]
1965 1963
1966 1964 _reportnewcssource = [
1967 1965 b'pull',
1968 1966 b'unbundle',
1969 1967 ]
1970 1968
1971 1969
1972 1970 def prefetchfiles(repo, revmatches):
1973 1971 """Invokes the registered file prefetch functions, allowing extensions to
1974 1972 ensure the corresponding files are available locally, before the command
1975 1973 uses them.
1976 1974
1977 1975 Args:
1978 1976 revmatches: a list of (revision, match) tuples to indicate the files to
1979 1977 fetch at each revision. If any of the match elements is None, it matches
1980 1978 all files.
1981 1979 """
1982 1980
1983 1981 def _matcher(m):
1984 1982 if m:
1985 1983 assert isinstance(m, matchmod.basematcher)
1986 1984 # The command itself will complain about files that don't exist, so
1987 1985 # don't duplicate the message.
1988 1986 return matchmod.badmatch(m, lambda fn, msg: None)
1989 1987 else:
1990 1988 return matchall(repo)
1991 1989
1992 1990 revbadmatches = [(rev, _matcher(match)) for (rev, match) in revmatches]
1993 1991
1994 1992 fileprefetchhooks(repo, revbadmatches)
1995 1993
1996 1994
1997 1995 # a list of (repo, revs, match) prefetch functions
1998 1996 fileprefetchhooks = util.hooks()
1999 1997
2000 1998 # A marker that tells the evolve extension to suppress its own reporting
2001 1999 _reportstroubledchangesets = True
2002 2000
2003 2001
2004 2002 def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
2005 2003 """register a callback to issue a summary after the transaction is closed
2006 2004
2007 2005 If as_validator is true, then the callbacks are registered as transaction
2008 2006 validators instead
2009 2007 """
2010 2008
2011 2009 def txmatch(sources):
2012 2010 return any(txnname.startswith(source) for source in sources)
2013 2011
2014 2012 categories = []
2015 2013
2016 2014 def reportsummary(func):
2017 2015 """decorator for report callbacks."""
2018 2016 # The repoview life cycle is shorter than the one of the actual
2019 2017 # underlying repository. So the filtered object can die before the
2020 2018 # weakref is used leading to troubles. We keep a reference to the
2021 2019 # unfiltered object and restore the filtering when retrieving the
2022 2020 # repository through the weakref.
2023 2021 filtername = repo.filtername
2024 2022 reporef = weakref.ref(repo.unfiltered())
2025 2023
2026 2024 def wrapped(tr):
2027 2025 repo = reporef()
2028 2026 if filtername:
2029 2027 assert repo is not None # help pytype
2030 2028 repo = repo.filtered(filtername)
2031 2029 func(repo, tr)
2032 2030
2033 2031 newcat = b'%02i-txnreport' % len(categories)
2034 2032 if as_validator:
2035 2033 otr.addvalidator(newcat, wrapped)
2036 2034 else:
2037 2035 otr.addpostclose(newcat, wrapped)
2038 2036 categories.append(newcat)
2039 2037 return wrapped
2040 2038
2041 2039 @reportsummary
2042 2040 def reportchangegroup(repo, tr):
2043 2041 cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
2044 2042 cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
2045 2043 cgfiles = tr.changes.get(b'changegroup-count-files', 0)
2046 2044 cgheads = tr.changes.get(b'changegroup-count-heads', 0)
2047 2045 if cgchangesets or cgrevisions or cgfiles:
2048 2046 htext = b""
2049 2047 if cgheads:
2050 2048 htext = _(b" (%+d heads)") % cgheads
2051 2049 msg = _(b"added %d changesets with %d changes to %d files%s\n")
2052 2050 if as_validator:
2053 2051 msg = _(b"adding %d changesets with %d changes to %d files%s\n")
2054 2052 assert repo is not None # help pytype
2055 2053 repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
2056 2054
2057 2055 if txmatch(_reportobsoletedsource):
2058 2056
2059 2057 @reportsummary
2060 2058 def reportobsoleted(repo, tr):
2061 2059 obsoleted = obsutil.getobsoleted(repo, tr)
2062 2060 newmarkers = len(tr.changes.get(b'obsmarkers', ()))
2063 2061 if newmarkers:
2064 2062 repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
2065 2063 if obsoleted:
2066 2064 msg = _(b'obsoleted %i changesets\n')
2067 2065 if as_validator:
2068 2066 msg = _(b'obsoleting %i changesets\n')
2069 2067 repo.ui.status(msg % len(obsoleted))
2070 2068
2071 2069 if obsolete.isenabled(
2072 2070 repo, obsolete.createmarkersopt
2073 2071 ) and repo.ui.configbool(
2074 2072 b'experimental', b'evolution.report-instabilities'
2075 2073 ):
2076 2074 instabilitytypes = [
2077 2075 (b'orphan', b'orphan'),
2078 2076 (b'phase-divergent', b'phasedivergent'),
2079 2077 (b'content-divergent', b'contentdivergent'),
2080 2078 ]
2081 2079
2082 2080 def getinstabilitycounts(repo):
2083 2081 filtered = repo.changelog.filteredrevs
2084 2082 counts = {}
2085 2083 for instability, revset in instabilitytypes:
2086 2084 counts[instability] = len(
2087 2085 set(obsolete.getrevs(repo, revset)) - filtered
2088 2086 )
2089 2087 return counts
2090 2088
2091 2089 oldinstabilitycounts = getinstabilitycounts(repo)
2092 2090
2093 2091 @reportsummary
2094 2092 def reportnewinstabilities(repo, tr):
2095 2093 newinstabilitycounts = getinstabilitycounts(repo)
2096 2094 for instability, revset in instabilitytypes:
2097 2095 delta = (
2098 2096 newinstabilitycounts[instability]
2099 2097 - oldinstabilitycounts[instability]
2100 2098 )
2101 2099 msg = getinstabilitymessage(delta, instability)
2102 2100 if msg:
2103 2101 repo.ui.warn(msg)
2104 2102
2105 2103 if txmatch(_reportnewcssource):
2106 2104
2107 2105 @reportsummary
2108 2106 def reportnewcs(repo, tr):
2109 2107 """Report the range of new revisions pulled/unbundled."""
2110 2108 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2111 2109 unfi = repo.unfiltered()
2112 2110 if origrepolen >= len(unfi):
2113 2111 return
2114 2112
2115 2113 # Compute the bounds of new visible revisions' range.
2116 2114 revs = smartset.spanset(repo, start=origrepolen)
2117 2115 if revs:
2118 2116 minrev, maxrev = repo[revs.min()], repo[revs.max()]
2119 2117
2120 2118 if minrev == maxrev:
2121 2119 revrange = minrev
2122 2120 else:
2123 2121 revrange = b'%s:%s' % (minrev, maxrev)
2124 2122 draft = len(repo.revs(b'%ld and draft()', revs))
2125 2123 secret = len(repo.revs(b'%ld and secret()', revs))
2126 2124 if not (draft or secret):
2127 2125 msg = _(b'new changesets %s\n') % revrange
2128 2126 elif draft and secret:
2129 2127 msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2130 2128 msg %= (revrange, draft, secret)
2131 2129 elif draft:
2132 2130 msg = _(b'new changesets %s (%d drafts)\n')
2133 2131 msg %= (revrange, draft)
2134 2132 elif secret:
2135 2133 msg = _(b'new changesets %s (%d secrets)\n')
2136 2134 msg %= (revrange, secret)
2137 2135 else:
2138 2136 errormsg = b'entered unreachable condition'
2139 2137 raise error.ProgrammingError(errormsg)
2140 2138 repo.ui.status(msg)
2141 2139
2142 2140 # search new changesets directly pulled as obsolete
2143 2141 duplicates = tr.changes.get(b'revduplicates', ())
2144 2142 obsadded = unfi.revs(
2145 2143 b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2146 2144 )
2147 2145 cl = repo.changelog
2148 2146 extinctadded = [r for r in obsadded if r not in cl]
2149 2147 if extinctadded:
2150 2148 # They are not just obsolete, but obsolete and invisible
2151 2149 # we call them "extinct" internally but the terms have not been
2152 2150 # exposed to users.
2153 2151 msg = b'(%d other changesets obsolete on arrival)\n'
2154 2152 repo.ui.status(msg % len(extinctadded))
2155 2153
2156 2154 @reportsummary
2157 2155 def reportphasechanges(repo, tr):
2158 2156 """Report statistics of phase changes for changesets pre-existing
2159 2157 pull/unbundle.
2160 2158 """
2161 2159 origrepolen = tr.changes.get(b'origrepolen', len(repo))
2162 2160 published = []
2163 2161 for revs, (old, new) in tr.changes.get(b'phases', []):
2164 2162 if new != phases.public:
2165 2163 continue
2166 2164 published.extend(rev for rev in revs if rev < origrepolen)
2167 2165 if not published:
2168 2166 return
2169 2167 msg = _(b'%d local changesets published\n')
2170 2168 if as_validator:
2171 2169 msg = _(b'%d local changesets will be published\n')
2172 2170 repo.ui.status(msg % len(published))
2173 2171
2174 2172
2175 2173 def getinstabilitymessage(delta, instability):
2176 2174 """function to return the message to show warning about new instabilities
2177 2175
2178 2176 exists as a separate function so that extension can wrap to show more
2179 2177 information like how to fix instabilities"""
2180 2178 if delta > 0:
2181 2179 return _(b'%i new %s changesets\n') % (delta, instability)
2182 2180
2183 2181
2184 2182 def nodesummaries(repo, nodes, maxnumnodes=4):
2185 2183 if len(nodes) <= maxnumnodes or repo.ui.verbose:
2186 2184 return b' '.join(short(h) for h in nodes)
2187 2185 first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2188 2186 return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2189 2187
2190 2188
2191 2189 def enforcesinglehead(repo, tr, desc, accountclosed=False):
2192 2190 """check that no named branch has multiple heads"""
2193 2191 if desc in (b'strip', b'repair'):
2194 2192 # skip the logic during strip
2195 2193 return
2196 2194 visible = repo.filtered(b'visible')
2197 2195 # possible improvement: we could restrict the check to affected branch
2198 2196 bm = visible.branchmap()
2199 2197 for name in bm:
2200 2198 heads = bm.branchheads(name, closed=accountclosed)
2201 2199 if len(heads) > 1:
2202 2200 msg = _(b'rejecting multiple heads on branch "%s"')
2203 2201 msg %= name
2204 2202 hint = _(b'%d heads: %s')
2205 2203 hint %= (len(heads), nodesummaries(repo, heads))
2206 2204 raise error.Abort(msg, hint=hint)
2207 2205
2208 2206
2209 2207 def wrapconvertsink(sink):
2210 2208 """Allow extensions to wrap the sink returned by convcmd.convertsink()
2211 2209 before it is used, whether or not the convert extension was formally loaded.
2212 2210 """
2213 2211 return sink
2214 2212
2215 2213
2216 2214 def unhidehashlikerevs(repo, specs, hiddentype):
2217 2215 """parse the user specs and unhide changesets whose hash or revision number
2218 2216 is passed.
2219 2217
2220 2218 hiddentype can be: 1) 'warn': warn while unhiding changesets
2221 2219 2) 'nowarn': don't warn while unhiding changesets
2222 2220
2223 2221 returns a repo object with the required changesets unhidden
2224 2222 """
2225 2223 if not repo.filtername or not repo.ui.configbool(
2226 2224 b'experimental', b'directaccess'
2227 2225 ):
2228 2226 return repo
2229 2227
2230 2228 if repo.filtername not in (b'visible', b'visible-hidden'):
2231 2229 return repo
2232 2230
2233 2231 symbols = set()
2234 2232 for spec in specs:
2235 2233 try:
2236 2234 tree = revsetlang.parse(spec)
2237 2235 except error.ParseError: # will be reported by scmutil.revrange()
2238 2236 continue
2239 2237
2240 2238 symbols.update(revsetlang.gethashlikesymbols(tree))
2241 2239
2242 2240 if not symbols:
2243 2241 return repo
2244 2242
2245 2243 revs = _getrevsfromsymbols(repo, symbols)
2246 2244
2247 2245 if not revs:
2248 2246 return repo
2249 2247
2250 2248 if hiddentype == b'warn':
2251 2249 unfi = repo.unfiltered()
2252 2250 revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2253 2251 repo.ui.warn(
2254 2252 _(
2255 2253 b"warning: accessing hidden changesets for write "
2256 2254 b"operation: %s\n"
2257 2255 )
2258 2256 % revstr
2259 2257 )
2260 2258
2261 2259 # we have to use new filtername to separate branch/tags cache until we can
2262 2260 # disbale these cache when revisions are dynamically pinned.
2263 2261 return repo.filtered(b'visible-hidden', revs)
2264 2262
2265 2263
2266 2264 def _getrevsfromsymbols(repo, symbols):
2267 2265 """parse the list of symbols and returns a set of revision numbers of hidden
2268 2266 changesets present in symbols"""
2269 2267 revs = set()
2270 2268 unfi = repo.unfiltered()
2271 2269 unficl = unfi.changelog
2272 2270 cl = repo.changelog
2273 2271 tiprev = len(unficl)
2274 2272 allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2275 2273 for s in symbols:
2276 2274 try:
2277 2275 n = int(s)
2278 2276 if n <= tiprev:
2279 2277 if not allowrevnums:
2280 2278 continue
2281 2279 else:
2282 2280 if n not in cl:
2283 2281 revs.add(n)
2284 2282 continue
2285 2283 except ValueError:
2286 2284 pass
2287 2285
2288 2286 try:
2289 2287 s = resolvehexnodeidprefix(unfi, s)
2290 2288 except (error.LookupError, error.WdirUnsupported):
2291 2289 s = None
2292 2290
2293 2291 if s is not None:
2294 2292 rev = unficl.rev(s)
2295 2293 if rev not in cl:
2296 2294 revs.add(rev)
2297 2295
2298 2296 return revs
2299 2297
2300 2298
2301 2299 def bookmarkrevs(repo, mark):
2302 2300 """
2303 2301 Select revisions reachable by a given bookmark
2304 2302 """
2305 2303 return repo.revs(
2306 2304 b"ancestors(bookmark(%s)) - "
2307 2305 b"ancestors(head() and not bookmark(%s)) - "
2308 2306 b"ancestors(bookmark() and not bookmark(%s))",
2309 2307 mark,
2310 2308 mark,
2311 2309 mark,
2312 2310 )
@@ -1,749 +1,747 b''
1 1 # wireprotov1server.py - Wire protocol version 1 server functionality
2 2 #
3 3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import binascii
11 11 import os
12 12
13 13 from .i18n import _
14 14 from .node import (
15 15 hex,
16 16 nullid,
17 17 )
18 18 from .pycompat import getattr
19 19
20 20 from . import (
21 21 bundle2,
22 22 bundlecaches,
23 23 changegroup as changegroupmod,
24 24 discovery,
25 25 encoding,
26 26 error,
27 27 exchange,
28 28 pushkey as pushkeymod,
29 29 pycompat,
30 30 streamclone,
31 31 util,
32 32 wireprototypes,
33 33 )
34 34
35 35 from .utils import (
36 36 procutil,
37 37 stringutil,
38 38 )
39 39
40 40 urlerr = util.urlerr
41 41 urlreq = util.urlreq
42 42
43 43 bundle2requiredmain = _(b'incompatible Mercurial client; bundle2 required')
44 44 bundle2requiredhint = _(
45 45 b'see https://www.mercurial-scm.org/wiki/IncompatibleClient'
46 46 )
47 47 bundle2required = b'%s\n(%s)\n' % (bundle2requiredmain, bundle2requiredhint)
48 48
49 49
50 50 def clientcompressionsupport(proto):
51 51 """Returns a list of compression methods supported by the client.
52 52
53 53 Returns a list of the compression methods supported by the client
54 54 according to the protocol capabilities. If no such capability has
55 55 been announced, fallback to the default of zlib and uncompressed.
56 56 """
57 57 for cap in proto.getprotocaps():
58 58 if cap.startswith(b'comp='):
59 59 return cap[5:].split(b',')
60 60 return [b'zlib', b'none']
61 61
62 62
63 63 # wire protocol command can either return a string or one of these classes.
64 64
65 65
66 66 def getdispatchrepo(repo, proto, command):
67 67 """Obtain the repo used for processing wire protocol commands.
68 68
69 69 The intent of this function is to serve as a monkeypatch point for
70 70 extensions that need commands to operate on different repo views under
71 71 specialized circumstances.
72 72 """
73 73 viewconfig = repo.ui.config(b'server', b'view')
74 74 return repo.filtered(viewconfig)
75 75
76 76
77 77 def dispatch(repo, proto, command):
78 78 repo = getdispatchrepo(repo, proto, command)
79 79
80 80 func, spec = commands[command]
81 81 args = proto.getargs(spec)
82 82
83 83 return func(repo, proto, *args)
84 84
85 85
86 86 def options(cmd, keys, others):
87 87 opts = {}
88 88 for k in keys:
89 89 if k in others:
90 90 opts[k] = others[k]
91 91 del others[k]
92 92 if others:
93 93 procutil.stderr.write(
94 94 b"warning: %s ignored unexpected arguments %s\n"
95 95 % (cmd, b",".join(others))
96 96 )
97 97 return opts
98 98
99 99
100 100 def bundle1allowed(repo, action):
101 101 """Whether a bundle1 operation is allowed from the server.
102 102
103 103 Priority is:
104 104
105 105 1. server.bundle1gd.<action> (if generaldelta active)
106 106 2. server.bundle1.<action>
107 107 3. server.bundle1gd (if generaldelta active)
108 108 4. server.bundle1
109 109 """
110 110 ui = repo.ui
111 111 gd = b'generaldelta' in repo.requirements
112 112
113 113 if gd:
114 114 v = ui.configbool(b'server', b'bundle1gd.%s' % action)
115 115 if v is not None:
116 116 return v
117 117
118 118 v = ui.configbool(b'server', b'bundle1.%s' % action)
119 119 if v is not None:
120 120 return v
121 121
122 122 if gd:
123 123 v = ui.configbool(b'server', b'bundle1gd')
124 124 if v is not None:
125 125 return v
126 126
127 127 return ui.configbool(b'server', b'bundle1')
128 128
129 129
130 130 commands = wireprototypes.commanddict()
131 131
132 132
133 133 def wireprotocommand(name, args=None, permission=b'push'):
134 134 """Decorator to declare a wire protocol command.
135 135
136 136 ``name`` is the name of the wire protocol command being provided.
137 137
138 138 ``args`` defines the named arguments accepted by the command. It is
139 139 a space-delimited list of argument names. ``*`` denotes a special value
140 140 that says to accept all named arguments.
141 141
142 142 ``permission`` defines the permission type needed to run this command.
143 143 Can be ``push`` or ``pull``. These roughly map to read-write and read-only,
144 144 respectively. Default is to assume command requires ``push`` permissions
145 145 because otherwise commands not declaring their permissions could modify
146 146 a repository that is supposed to be read-only.
147 147 """
148 148 transports = {
149 149 k for k, v in wireprototypes.TRANSPORTS.items() if v[b'version'] == 1
150 150 }
151 151
152 152 # Because SSHv2 is a mirror of SSHv1, we allow "batch" commands through to
153 153 # SSHv2.
154 154 # TODO undo this hack when SSH is using the unified frame protocol.
155 155 if name == b'batch':
156 156 transports.add(wireprototypes.SSHV2)
157 157
158 158 if permission not in (b'push', b'pull'):
159 159 raise error.ProgrammingError(
160 160 b'invalid wire protocol permission; '
161 161 b'got %s; expected "push" or "pull"' % permission
162 162 )
163 163
164 164 if args is None:
165 165 args = b''
166 166
167 167 if not isinstance(args, bytes):
168 168 raise error.ProgrammingError(
169 169 b'arguments for version 1 commands must be declared as bytes'
170 170 )
171 171
172 172 def register(func):
173 173 if name in commands:
174 174 raise error.ProgrammingError(
175 175 b'%s command already registered for version 1' % name
176 176 )
177 177 commands[name] = wireprototypes.commandentry(
178 178 func, args=args, transports=transports, permission=permission
179 179 )
180 180
181 181 return func
182 182
183 183 return register
184 184
185 185
186 186 # TODO define a more appropriate permissions type to use for this.
187 187 @wireprotocommand(b'batch', b'cmds *', permission=b'pull')
188 188 def batch(repo, proto, cmds, others):
189 189 unescapearg = wireprototypes.unescapebatcharg
190 190 res = []
191 191 for pair in cmds.split(b';'):
192 192 op, args = pair.split(b' ', 1)
193 193 vals = {}
194 194 for a in args.split(b','):
195 195 if a:
196 196 n, v = a.split(b'=')
197 197 vals[unescapearg(n)] = unescapearg(v)
198 198 func, spec = commands[op]
199 199
200 200 # Validate that client has permissions to perform this command.
201 201 perm = commands[op].permission
202 202 assert perm in (b'push', b'pull')
203 203 proto.checkperm(perm)
204 204
205 205 if spec:
206 206 keys = spec.split()
207 207 data = {}
208 208 for k in keys:
209 209 if k == b'*':
210 210 star = {}
211 211 for key in vals.keys():
212 212 if key not in keys:
213 213 star[key] = vals[key]
214 214 data[b'*'] = star
215 215 else:
216 216 data[k] = vals[k]
217 217 result = func(repo, proto, *[data[k] for k in keys])
218 218 else:
219 219 result = func(repo, proto)
220 220 if isinstance(result, wireprototypes.ooberror):
221 221 return result
222 222
223 223 # For now, all batchable commands must return bytesresponse or
224 224 # raw bytes (for backwards compatibility).
225 225 assert isinstance(result, (wireprototypes.bytesresponse, bytes))
226 226 if isinstance(result, wireprototypes.bytesresponse):
227 227 result = result.data
228 228 res.append(wireprototypes.escapebatcharg(result))
229 229
230 230 return wireprototypes.bytesresponse(b';'.join(res))
231 231
232 232
233 233 @wireprotocommand(b'between', b'pairs', permission=b'pull')
234 234 def between(repo, proto, pairs):
235 235 pairs = [wireprototypes.decodelist(p, b'-') for p in pairs.split(b" ")]
236 236 r = []
237 237 for b in repo.between(pairs):
238 238 r.append(wireprototypes.encodelist(b) + b"\n")
239 239
240 240 return wireprototypes.bytesresponse(b''.join(r))
241 241
242 242
243 243 @wireprotocommand(b'branchmap', permission=b'pull')
244 244 def branchmap(repo, proto):
245 245 branchmap = repo.branchmap()
246 246 heads = []
247 247 for branch, nodes in pycompat.iteritems(branchmap):
248 248 branchname = urlreq.quote(encoding.fromlocal(branch))
249 249 branchnodes = wireprototypes.encodelist(nodes)
250 250 heads.append(b'%s %s' % (branchname, branchnodes))
251 251
252 252 return wireprototypes.bytesresponse(b'\n'.join(heads))
253 253
254 254
255 255 @wireprotocommand(b'branches', b'nodes', permission=b'pull')
256 256 def branches(repo, proto, nodes):
257 257 nodes = wireprototypes.decodelist(nodes)
258 258 r = []
259 259 for b in repo.branches(nodes):
260 260 r.append(wireprototypes.encodelist(b) + b"\n")
261 261
262 262 return wireprototypes.bytesresponse(b''.join(r))
263 263
264 264
265 265 @wireprotocommand(b'clonebundles', b'', permission=b'pull')
266 266 def clonebundles(repo, proto):
267 267 """Server command for returning info for available bundles to seed clones.
268 268
269 269 Clients will parse this response and determine what bundle to fetch.
270 270
271 271 Extensions may wrap this command to filter or dynamically emit data
272 272 depending on the request. e.g. you could advertise URLs for the closest
273 273 data center given the client's IP address.
274 274 """
275 275 return wireprototypes.bytesresponse(
276 276 repo.vfs.tryread(bundlecaches.CB_MANIFEST_FILE)
277 277 )
278 278
279 279
280 280 wireprotocaps = [
281 281 b'lookup',
282 282 b'branchmap',
283 283 b'pushkey',
284 284 b'known',
285 285 b'getbundle',
286 286 b'unbundlehash',
287 287 ]
288 288
289 289
290 290 def _capabilities(repo, proto):
291 291 """return a list of capabilities for a repo
292 292
293 293 This function exists to allow extensions to easily wrap capabilities
294 294 computation
295 295
296 296 - returns a lists: easy to alter
297 297 - change done here will be propagated to both `capabilities` and `hello`
298 298 command without any other action needed.
299 299 """
300 300 # copy to prevent modification of the global list
301 301 caps = list(wireprotocaps)
302 302
303 303 # Command of same name as capability isn't exposed to version 1 of
304 304 # transports. So conditionally add it.
305 305 if commands.commandavailable(b'changegroupsubset', proto):
306 306 caps.append(b'changegroupsubset')
307 307
308 308 if streamclone.allowservergeneration(repo):
309 309 if repo.ui.configbool(b'server', b'preferuncompressed'):
310 310 caps.append(b'stream-preferred')
311 311 requiredformats = repo.requirements & repo.supportedformats
312 312 # if our local revlogs are just revlogv1, add 'stream' cap
313 313 if not requiredformats - {b'revlogv1'}:
314 314 caps.append(b'stream')
315 315 # otherwise, add 'streamreqs' detailing our local revlog format
316 316 else:
317 317 caps.append(b'streamreqs=%s' % b','.join(sorted(requiredformats)))
318 318 if repo.ui.configbool(b'experimental', b'bundle2-advertise'):
319 319 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=b'server'))
320 320 caps.append(b'bundle2=' + urlreq.quote(capsblob))
321 321 caps.append(b'unbundle=%s' % b','.join(bundle2.bundlepriority))
322 322
323 323 if repo.ui.configbool(b'experimental', b'narrow'):
324 324 caps.append(wireprototypes.NARROWCAP)
325 325 if repo.ui.configbool(b'experimental', b'narrowservebrokenellipses'):
326 326 caps.append(wireprototypes.ELLIPSESCAP)
327 327
328 328 return proto.addcapabilities(repo, caps)
329 329
330 330
331 331 # If you are writing an extension and consider wrapping this function. Wrap
332 332 # `_capabilities` instead.
333 333 @wireprotocommand(b'capabilities', permission=b'pull')
334 334 def capabilities(repo, proto):
335 335 caps = _capabilities(repo, proto)
336 336 return wireprototypes.bytesresponse(b' '.join(sorted(caps)))
337 337
338 338
339 339 @wireprotocommand(b'changegroup', b'roots', permission=b'pull')
340 340 def changegroup(repo, proto, roots):
341 341 nodes = wireprototypes.decodelist(roots)
342 342 outgoing = discovery.outgoing(
343 343 repo, missingroots=nodes, ancestorsof=repo.heads()
344 344 )
345 345 cg = changegroupmod.makechangegroup(repo, outgoing, b'01', b'serve')
346 346 gen = iter(lambda: cg.read(32768), b'')
347 347 return wireprototypes.streamres(gen=gen)
348 348
349 349
350 350 @wireprotocommand(b'changegroupsubset', b'bases heads', permission=b'pull')
351 351 def changegroupsubset(repo, proto, bases, heads):
352 352 bases = wireprototypes.decodelist(bases)
353 353 heads = wireprototypes.decodelist(heads)
354 354 outgoing = discovery.outgoing(repo, missingroots=bases, ancestorsof=heads)
355 355 cg = changegroupmod.makechangegroup(repo, outgoing, b'01', b'serve')
356 356 gen = iter(lambda: cg.read(32768), b'')
357 357 return wireprototypes.streamres(gen=gen)
358 358
359 359
360 360 @wireprotocommand(b'debugwireargs', b'one two *', permission=b'pull')
361 361 def debugwireargs(repo, proto, one, two, others):
362 362 # only accept optional args from the known set
363 363 opts = options(b'debugwireargs', [b'three', b'four'], others)
364 364 return wireprototypes.bytesresponse(
365 365 repo.debugwireargs(one, two, **pycompat.strkwargs(opts))
366 366 )
367 367
368 368
369 369 def find_pullbundle(repo, proto, opts, clheads, heads, common):
370 370 """Return a file object for the first matching pullbundle.
371 371
372 372 Pullbundles are specified in .hg/pullbundles.manifest similar to
373 373 clonebundles.
374 374 For each entry, the bundle specification is checked for compatibility:
375 375 - Client features vs the BUNDLESPEC.
376 376 - Revisions shared with the clients vs base revisions of the bundle.
377 377 A bundle can be applied only if all its base revisions are known by
378 378 the client.
379 379 - At least one leaf of the bundle's DAG is missing on the client.
380 380 - Every leaf of the bundle's DAG is part of node set the client wants.
381 381 E.g. do not send a bundle of all changes if the client wants only
382 382 one specific branch of many.
383 383 """
384 384
385 385 def decodehexstring(s):
386 386 return {binascii.unhexlify(h) for h in s.split(b';')}
387 387
388 388 manifest = repo.vfs.tryread(b'pullbundles.manifest')
389 389 if not manifest:
390 390 return None
391 391 res = bundlecaches.parseclonebundlesmanifest(repo, manifest)
392 392 res = bundlecaches.filterclonebundleentries(repo, res)
393 393 if not res:
394 394 return None
395 395 cl = repo.unfiltered().changelog
396 396 heads_anc = cl.ancestors([cl.rev(rev) for rev in heads], inclusive=True)
397 397 common_anc = cl.ancestors([cl.rev(rev) for rev in common], inclusive=True)
398 398 compformats = clientcompressionsupport(proto)
399 399 for entry in res:
400 400 comp = entry.get(b'COMPRESSION')
401 401 altcomp = util.compengines._bundlenames.get(comp)
402 402 if comp and comp not in compformats and altcomp not in compformats:
403 403 continue
404 404 # No test yet for VERSION, since V2 is supported by any client
405 405 # that advertises partial pulls
406 406 if b'heads' in entry:
407 407 try:
408 408 bundle_heads = decodehexstring(entry[b'heads'])
409 409 except TypeError:
410 410 # Bad heads entry
411 411 continue
412 412 if bundle_heads.issubset(common):
413 413 continue # Nothing new
414 414 if all(cl.rev(rev) in common_anc for rev in bundle_heads):
415 415 continue # Still nothing new
416 416 if any(
417 417 cl.rev(rev) not in heads_anc and cl.rev(rev) not in common_anc
418 418 for rev in bundle_heads
419 419 ):
420 420 continue
421 421 if b'bases' in entry:
422 422 try:
423 423 bundle_bases = decodehexstring(entry[b'bases'])
424 424 except TypeError:
425 425 # Bad bases entry
426 426 continue
427 427 if not all(cl.rev(rev) in common_anc for rev in bundle_bases):
428 428 continue
429 429 path = entry[b'URL']
430 430 repo.ui.debug(b'sending pullbundle "%s"\n' % path)
431 431 try:
432 432 return repo.vfs.open(path)
433 433 except IOError:
434 434 repo.ui.debug(b'pullbundle "%s" not accessible\n' % path)
435 435 continue
436 436 return None
437 437
438 438
439 439 @wireprotocommand(b'getbundle', b'*', permission=b'pull')
440 440 def getbundle(repo, proto, others):
441 441 opts = options(
442 442 b'getbundle', wireprototypes.GETBUNDLE_ARGUMENTS.keys(), others
443 443 )
444 444 for k, v in pycompat.iteritems(opts):
445 445 keytype = wireprototypes.GETBUNDLE_ARGUMENTS[k]
446 446 if keytype == b'nodes':
447 447 opts[k] = wireprototypes.decodelist(v)
448 448 elif keytype == b'csv':
449 449 opts[k] = list(v.split(b','))
450 450 elif keytype == b'scsv':
451 451 opts[k] = set(v.split(b','))
452 452 elif keytype == b'boolean':
453 453 # Client should serialize False as '0', which is a non-empty string
454 454 # so it evaluates as a True bool.
455 455 if v == b'0':
456 456 opts[k] = False
457 457 else:
458 458 opts[k] = bool(v)
459 459 elif keytype != b'plain':
460 460 raise KeyError(b'unknown getbundle option type %s' % keytype)
461 461
462 462 if not bundle1allowed(repo, b'pull'):
463 463 if not exchange.bundle2requested(opts.get(b'bundlecaps')):
464 464 if proto.name == b'http-v1':
465 465 return wireprototypes.ooberror(bundle2required)
466 466 raise error.Abort(bundle2requiredmain, hint=bundle2requiredhint)
467 467
468 468 try:
469 469 clheads = set(repo.changelog.heads())
470 470 heads = set(opts.get(b'heads', set()))
471 471 common = set(opts.get(b'common', set()))
472 472 common.discard(nullid)
473 473 if (
474 474 repo.ui.configbool(b'server', b'pullbundle')
475 475 and b'partial-pull' in proto.getprotocaps()
476 476 ):
477 477 # Check if a pre-built bundle covers this request.
478 478 bundle = find_pullbundle(repo, proto, opts, clheads, heads, common)
479 479 if bundle:
480 480 return wireprototypes.streamres(
481 481 gen=util.filechunkiter(bundle), prefer_uncompressed=True
482 482 )
483 483
484 484 if repo.ui.configbool(b'server', b'disablefullbundle'):
485 485 # Check to see if this is a full clone.
486 486 changegroup = opts.get(b'cg', True)
487 487 if changegroup and not common and clheads == heads:
488 488 raise error.Abort(
489 489 _(b'server has pull-based clones disabled'),
490 490 hint=_(b'remove --pull if specified or upgrade Mercurial'),
491 491 )
492 492
493 493 info, chunks = exchange.getbundlechunks(
494 494 repo, b'serve', **pycompat.strkwargs(opts)
495 495 )
496 496 prefercompressed = info.get(b'prefercompressed', True)
497 497 except error.Abort as exc:
498 498 # cleanly forward Abort error to the client
499 499 if not exchange.bundle2requested(opts.get(b'bundlecaps')):
500 500 if proto.name == b'http-v1':
501 501 return wireprototypes.ooberror(exc.message + b'\n')
502 502 raise # cannot do better for bundle1 + ssh
503 503 # bundle2 request expect a bundle2 reply
504 504 bundler = bundle2.bundle20(repo.ui)
505 505 manargs = [(b'message', exc.message)]
506 506 advargs = []
507 507 if exc.hint is not None:
508 508 advargs.append((b'hint', exc.hint))
509 509 bundler.addpart(bundle2.bundlepart(b'error:abort', manargs, advargs))
510 510 chunks = bundler.getchunks()
511 511 prefercompressed = False
512 512
513 513 return wireprototypes.streamres(
514 514 gen=chunks, prefer_uncompressed=not prefercompressed
515 515 )
516 516
517 517
518 518 @wireprotocommand(b'heads', permission=b'pull')
519 519 def heads(repo, proto):
520 520 h = repo.heads()
521 521 return wireprototypes.bytesresponse(wireprototypes.encodelist(h) + b'\n')
522 522
523 523
524 524 @wireprotocommand(b'hello', permission=b'pull')
525 525 def hello(repo, proto):
526 526 """Called as part of SSH handshake to obtain server info.
527 527
528 528 Returns a list of lines describing interesting things about the
529 529 server, in an RFC822-like format.
530 530
531 531 Currently, the only one defined is ``capabilities``, which consists of a
532 532 line of space separated tokens describing server abilities:
533 533
534 534 capabilities: <token0> <token1> <token2>
535 535 """
536 536 caps = capabilities(repo, proto).data
537 537 return wireprototypes.bytesresponse(b'capabilities: %s\n' % caps)
538 538
539 539
540 540 @wireprotocommand(b'listkeys', b'namespace', permission=b'pull')
541 541 def listkeys(repo, proto, namespace):
542 542 d = sorted(repo.listkeys(encoding.tolocal(namespace)).items())
543 543 return wireprototypes.bytesresponse(pushkeymod.encodekeys(d))
544 544
545 545
546 546 @wireprotocommand(b'lookup', b'key', permission=b'pull')
547 547 def lookup(repo, proto, key):
548 548 try:
549 549 k = encoding.tolocal(key)
550 550 n = repo.lookup(k)
551 551 r = hex(n)
552 552 success = 1
553 553 except Exception as inst:
554 554 r = stringutil.forcebytestr(inst)
555 555 success = 0
556 556 return wireprototypes.bytesresponse(b'%d %s\n' % (success, r))
557 557
558 558
559 559 @wireprotocommand(b'known', b'nodes *', permission=b'pull')
560 560 def known(repo, proto, nodes, others):
561 561 v = b''.join(
562 562 b and b'1' or b'0' for b in repo.known(wireprototypes.decodelist(nodes))
563 563 )
564 564 return wireprototypes.bytesresponse(v)
565 565
566 566
567 567 @wireprotocommand(b'protocaps', b'caps', permission=b'pull')
568 568 def protocaps(repo, proto, caps):
569 569 if proto.name == wireprototypes.SSHV1:
570 570 proto._protocaps = set(caps.split(b' '))
571 571 return wireprototypes.bytesresponse(b'OK')
572 572
573 573
574 574 @wireprotocommand(b'pushkey', b'namespace key old new', permission=b'push')
575 575 def pushkey(repo, proto, namespace, key, old, new):
576 576 # compatibility with pre-1.8 clients which were accidentally
577 577 # sending raw binary nodes rather than utf-8-encoded hex
578 578 if len(new) == 20 and stringutil.escapestr(new) != new:
579 579 # looks like it could be a binary node
580 580 try:
581 581 new.decode('utf-8')
582 582 new = encoding.tolocal(new) # but cleanly decodes as UTF-8
583 583 except UnicodeDecodeError:
584 584 pass # binary, leave unmodified
585 585 else:
586 586 new = encoding.tolocal(new) # normal path
587 587
588 588 with proto.mayberedirectstdio() as output:
589 589 r = (
590 590 repo.pushkey(
591 591 encoding.tolocal(namespace),
592 592 encoding.tolocal(key),
593 593 encoding.tolocal(old),
594 594 new,
595 595 )
596 596 or False
597 597 )
598 598
599 599 output = output.getvalue() if output else b''
600 600 return wireprototypes.bytesresponse(b'%d\n%s' % (int(r), output))
601 601
602 602
603 603 @wireprotocommand(b'stream_out', permission=b'pull')
604 604 def stream(repo, proto):
605 605 '''If the server supports streaming clone, it advertises the "stream"
606 606 capability with a value representing the version and flags of the repo
607 607 it is serving. Client checks to see if it understands the format.
608 608 '''
609 609 return wireprototypes.streamreslegacy(streamclone.generatev1wireproto(repo))
610 610
611 611
612 612 @wireprotocommand(b'unbundle', b'heads', permission=b'push')
613 613 def unbundle(repo, proto, heads):
614 614 their_heads = wireprototypes.decodelist(heads)
615 615
616 616 with proto.mayberedirectstdio() as output:
617 617 try:
618 618 exchange.check_heads(repo, their_heads, b'preparing changes')
619 619 cleanup = lambda: None
620 620 try:
621 621 payload = proto.getpayload()
622 622 if repo.ui.configbool(b'server', b'streamunbundle'):
623 623
624 624 def cleanup():
625 625 # Ensure that the full payload is consumed, so
626 626 # that the connection doesn't contain trailing garbage.
627 627 for p in payload:
628 628 pass
629 629
630 630 fp = util.chunkbuffer(payload)
631 631 else:
632 632 # write bundle data to temporary file as it can be big
633 633 fp, tempname = None, None
634 634
635 635 def cleanup():
636 636 if fp:
637 637 fp.close()
638 638 if tempname:
639 639 os.unlink(tempname)
640 640
641 641 fd, tempname = pycompat.mkstemp(prefix=b'hg-unbundle-')
642 642 repo.ui.debug(
643 643 b'redirecting incoming bundle to %s\n' % tempname
644 644 )
645 645 fp = os.fdopen(fd, pycompat.sysstr(b'wb+'))
646 646 for p in payload:
647 647 fp.write(p)
648 648 fp.seek(0)
649 649
650 650 gen = exchange.readbundle(repo.ui, fp, None)
651 651 if isinstance(
652 652 gen, changegroupmod.cg1unpacker
653 653 ) and not bundle1allowed(repo, b'push'):
654 654 if proto.name == b'http-v1':
655 655 # need to special case http because stderr do not get to
656 656 # the http client on failed push so we need to abuse
657 657 # some other error type to make sure the message get to
658 658 # the user.
659 659 return wireprototypes.ooberror(bundle2required)
660 660 raise error.Abort(
661 661 bundle2requiredmain, hint=bundle2requiredhint
662 662 )
663 663
664 664 r = exchange.unbundle(
665 665 repo, gen, their_heads, b'serve', proto.client()
666 666 )
667 667 if util.safehasattr(r, b'addpart'):
668 668 # The return looks streamable, we are in the bundle2 case
669 669 # and should return a stream.
670 670 return wireprototypes.streamreslegacy(gen=r.getchunks())
671 671 return wireprototypes.pushres(
672 672 r, output.getvalue() if output else b''
673 673 )
674 674
675 675 finally:
676 676 cleanup()
677 677
678 678 except (error.BundleValueError, error.Abort, error.PushRaced) as exc:
679 679 # handle non-bundle2 case first
680 680 if not getattr(exc, 'duringunbundle2', False):
681 681 try:
682 682 raise
683 683 except error.Abort as exc:
684 684 # The old code we moved used procutil.stderr directly.
685 685 # We did not change it to minimise code change.
686 686 # This need to be moved to something proper.
687 687 # Feel free to do it.
688 procutil.stderr.write(b"abort: %s\n" % exc.message)
689 if exc.hint is not None:
690 procutil.stderr.write(b"(%s)\n" % exc.hint)
688 procutil.stderr.write(exc.format())
691 689 procutil.stderr.flush()
692 690 return wireprototypes.pushres(
693 691 0, output.getvalue() if output else b''
694 692 )
695 693 except error.PushRaced:
696 694 return wireprototypes.pusherr(
697 695 pycompat.bytestr(exc),
698 696 output.getvalue() if output else b'',
699 697 )
700 698
701 699 bundler = bundle2.bundle20(repo.ui)
702 700 for out in getattr(exc, '_bundle2salvagedoutput', ()):
703 701 bundler.addpart(out)
704 702 try:
705 703 try:
706 704 raise
707 705 except error.PushkeyFailed as exc:
708 706 # check client caps
709 707 remotecaps = getattr(exc, '_replycaps', None)
710 708 if (
711 709 remotecaps is not None
712 710 and b'pushkey' not in remotecaps.get(b'error', ())
713 711 ):
714 712 # no support remote side, fallback to Abort handler.
715 713 raise
716 714 part = bundler.newpart(b'error:pushkey')
717 715 part.addparam(b'in-reply-to', exc.partid)
718 716 if exc.namespace is not None:
719 717 part.addparam(
720 718 b'namespace', exc.namespace, mandatory=False
721 719 )
722 720 if exc.key is not None:
723 721 part.addparam(b'key', exc.key, mandatory=False)
724 722 if exc.new is not None:
725 723 part.addparam(b'new', exc.new, mandatory=False)
726 724 if exc.old is not None:
727 725 part.addparam(b'old', exc.old, mandatory=False)
728 726 if exc.ret is not None:
729 727 part.addparam(b'ret', exc.ret, mandatory=False)
730 728 except error.BundleValueError as exc:
731 729 errpart = bundler.newpart(b'error:unsupportedcontent')
732 730 if exc.parttype is not None:
733 731 errpart.addparam(b'parttype', exc.parttype)
734 732 if exc.params:
735 733 errpart.addparam(b'params', b'\0'.join(exc.params))
736 734 except error.Abort as exc:
737 735 manargs = [(b'message', exc.message)]
738 736 advargs = []
739 737 if exc.hint is not None:
740 738 advargs.append((b'hint', exc.hint))
741 739 bundler.addpart(
742 740 bundle2.bundlepart(b'error:abort', manargs, advargs)
743 741 )
744 742 except error.PushRaced as exc:
745 743 bundler.newpart(
746 744 b'error:pushraced',
747 745 [(b'message', stringutil.forcebytestr(exc))],
748 746 )
749 747 return wireprototypes.streamreslegacy(gen=bundler.getchunks())
General Comments 0
You need to be logged in to leave comments. Login now