##// END OF EJS Templates
pytype: import typing directly...
marmoute -
r52178:9d372155 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,880 +1,883 b''
1 1 # branchmap.py - logic to computes, maintain and stores branchmap for local repo
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import struct
10 10
11 11 from .node import (
12 12 bin,
13 13 hex,
14 14 nullrev,
15 15 )
16
17 from typing import (
18 Any,
19 Callable,
20 Dict,
21 Iterable,
22 List,
23 Optional,
24 Set,
25 TYPE_CHECKING,
26 Tuple,
27 Union,
28 )
29
16 30 from . import (
17 31 encoding,
18 32 error,
19 33 obsolete,
20 pycompat,
21 34 scmutil,
22 35 util,
23 36 )
37
24 38 from .utils import (
25 39 repoviewutil,
26 40 stringutil,
27 41 )
28 42
29 if pycompat.TYPE_CHECKING:
30 from typing import (
31 Any,
32 Callable,
33 Dict,
34 Iterable,
35 List,
36 Optional,
37 Set,
38 Tuple,
39 Union,
40 )
43 # keeps pyflakes happy
44 assert [
45 Any,
46 Callable,
47 Dict,
48 Iterable,
49 List,
50 Optional,
51 Set,
52 Tuple,
53 Union,
54 ]
55
56 if TYPE_CHECKING:
41 57 from . import localrepo
42 58
43 assert any(
44 (
45 Any,
46 Callable,
47 Dict,
48 Iterable,
49 List,
50 Optional,
51 Set,
52 Tuple,
53 Union,
54 localrepo,
55 )
56 )
59 assert [localrepo]
57 60
58 61 subsettable = repoviewutil.subsettable
59 62
60 63 calcsize = struct.calcsize
61 64 pack_into = struct.pack_into
62 65 unpack_from = struct.unpack_from
63 66
64 67
65 68 class BranchMapCache:
66 69 """mapping of filtered views of repo with their branchcache"""
67 70
68 71 def __init__(self):
69 72 self._per_filter = {}
70 73
71 74 def __getitem__(self, repo):
72 75 self.updatecache(repo)
73 76 return self._per_filter[repo.filtername]
74 77
75 78 def updatecache(self, repo):
76 79 """Update the cache for the given filtered view on a repository"""
77 80 # This can trigger updates for the caches for subsets of the filtered
78 81 # view, e.g. when there is no cache for this filtered view or the cache
79 82 # is stale.
80 83
81 84 cl = repo.changelog
82 85 filtername = repo.filtername
83 86 bcache = self._per_filter.get(filtername)
84 87 if bcache is None or not bcache.validfor(repo):
85 88 # cache object missing or cache object stale? Read from disk
86 89 bcache = branchcache.fromfile(repo)
87 90
88 91 revs = []
89 92 if bcache is None:
90 93 # no (fresh) cache available anymore, perhaps we can re-use
91 94 # the cache for a subset, then extend that to add info on missing
92 95 # revisions.
93 96 subsetname = subsettable.get(filtername)
94 97 if subsetname is not None:
95 98 subset = repo.filtered(subsetname)
96 99 bcache = self[subset].copy()
97 100 extrarevs = subset.changelog.filteredrevs - cl.filteredrevs
98 101 revs.extend(r for r in extrarevs if r <= bcache.tiprev)
99 102 else:
100 103 # nothing to fall back on, start empty.
101 104 bcache = branchcache(repo)
102 105
103 106 revs.extend(cl.revs(start=bcache.tiprev + 1))
104 107 if revs:
105 108 bcache.update(repo, revs)
106 109
107 110 assert bcache.validfor(repo), filtername
108 111 self._per_filter[repo.filtername] = bcache
109 112
110 113 def replace(self, repo, remotebranchmap):
111 114 """Replace the branchmap cache for a repo with a branch mapping.
112 115
113 116 This is likely only called during clone with a branch map from a
114 117 remote.
115 118
116 119 """
117 120 cl = repo.changelog
118 121 clrev = cl.rev
119 122 clbranchinfo = cl.branchinfo
120 123 rbheads = []
121 124 closed = set()
122 125 for bheads in remotebranchmap.values():
123 126 rbheads += bheads
124 127 for h in bheads:
125 128 r = clrev(h)
126 129 b, c = clbranchinfo(r)
127 130 if c:
128 131 closed.add(h)
129 132
130 133 if rbheads:
131 134 rtiprev = max((int(clrev(node)) for node in rbheads))
132 135 cache = branchcache(
133 136 repo,
134 137 remotebranchmap,
135 138 repo[rtiprev].node(),
136 139 rtiprev,
137 140 closednodes=closed,
138 141 )
139 142
140 143 # Try to stick it as low as possible
141 144 # filter above served are unlikely to be fetch from a clone
142 145 for candidate in (b'base', b'immutable', b'served'):
143 146 rview = repo.filtered(candidate)
144 147 if cache.validfor(rview):
145 148 self._per_filter[candidate] = cache
146 149 cache.write(rview)
147 150 return
148 151
149 152 def clear(self):
150 153 self._per_filter.clear()
151 154
152 155 def write_delayed(self, repo):
153 156 unfi = repo.unfiltered()
154 157 for filtername, cache in self._per_filter.items():
155 158 if cache._delayed:
156 159 repo = unfi.filtered(filtername)
157 160 cache.write(repo)
158 161
159 162
160 163 def _unknownnode(node):
161 164 """raises ValueError when branchcache found a node which does not exists"""
162 165 raise ValueError('node %s does not exist' % node.hex())
163 166
164 167
165 168 def _branchcachedesc(repo):
166 169 if repo.filtername is not None:
167 170 return b'branch cache (%s)' % repo.filtername
168 171 else:
169 172 return b'branch cache'
170 173
171 174
172 175 class branchcache:
173 176 """A dict like object that hold branches heads cache.
174 177
175 178 This cache is used to avoid costly computations to determine all the
176 179 branch heads of a repo.
177 180
178 181 The cache is serialized on disk in the following format:
179 182
180 183 <tip hex node> <tip rev number> [optional filtered repo hex hash]
181 184 <branch head hex node> <open/closed state> <branch name>
182 185 <branch head hex node> <open/closed state> <branch name>
183 186 ...
184 187
185 188 The first line is used to check if the cache is still valid. If the
186 189 branch cache is for a filtered repo view, an optional third hash is
187 190 included that hashes the hashes of all filtered and obsolete revisions.
188 191
189 192 The open/closed state is represented by a single letter 'o' or 'c'.
190 193 This field can be used to avoid changelog reads when determining if a
191 194 branch head closes a branch or not.
192 195 """
193 196
194 197 def __init__(
195 198 self,
196 199 repo,
197 200 entries=(),
198 201 tipnode=None,
199 202 tiprev=nullrev,
200 203 filteredhash=None,
201 204 closednodes=None,
202 205 hasnode=None,
203 206 ):
204 207 # type: (localrepo.localrepository, Union[Dict[bytes, List[bytes]], Iterable[Tuple[bytes, List[bytes]]]], bytes, int, Optional[bytes], Optional[Set[bytes]], Optional[Callable[[bytes], bool]]) -> None
205 208 """hasnode is a function which can be used to verify whether changelog
206 209 has a given node or not. If it's not provided, we assume that every node
207 210 we have exists in changelog"""
208 211 self._repo = repo
209 212 self._delayed = False
210 213 if tipnode is None:
211 214 self.tipnode = repo.nullid
212 215 else:
213 216 self.tipnode = tipnode
214 217 self.tiprev = tiprev
215 218 self.filteredhash = filteredhash
216 219 # closednodes is a set of nodes that close their branch. If the branch
217 220 # cache has been updated, it may contain nodes that are no longer
218 221 # heads.
219 222 if closednodes is None:
220 223 self._closednodes = set()
221 224 else:
222 225 self._closednodes = closednodes
223 226 self._entries = dict(entries)
224 227 # whether closed nodes are verified or not
225 228 self._closedverified = False
226 229 # branches for which nodes are verified
227 230 self._verifiedbranches = set()
228 231 self._hasnode = hasnode
229 232 if self._hasnode is None:
230 233 self._hasnode = lambda x: True
231 234
232 235 def _verifyclosed(self):
233 236 """verify the closed nodes we have"""
234 237 if self._closedverified:
235 238 return
236 239 for node in self._closednodes:
237 240 if not self._hasnode(node):
238 241 _unknownnode(node)
239 242
240 243 self._closedverified = True
241 244
242 245 def _verifybranch(self, branch):
243 246 """verify head nodes for the given branch."""
244 247 if branch not in self._entries or branch in self._verifiedbranches:
245 248 return
246 249 for n in self._entries[branch]:
247 250 if not self._hasnode(n):
248 251 _unknownnode(n)
249 252
250 253 self._verifiedbranches.add(branch)
251 254
252 255 def _verifyall(self):
253 256 """verifies nodes of all the branches"""
254 257 needverification = set(self._entries.keys()) - self._verifiedbranches
255 258 for b in needverification:
256 259 self._verifybranch(b)
257 260
258 261 def __iter__(self):
259 262 return iter(self._entries)
260 263
261 264 def __setitem__(self, key, value):
262 265 self._entries[key] = value
263 266
264 267 def __getitem__(self, key):
265 268 self._verifybranch(key)
266 269 return self._entries[key]
267 270
268 271 def __contains__(self, key):
269 272 self._verifybranch(key)
270 273 return key in self._entries
271 274
272 275 def iteritems(self):
273 276 for k, v in self._entries.items():
274 277 self._verifybranch(k)
275 278 yield k, v
276 279
277 280 items = iteritems
278 281
279 282 def hasbranch(self, label):
280 283 """checks whether a branch of this name exists or not"""
281 284 self._verifybranch(label)
282 285 return label in self._entries
283 286
284 287 @classmethod
285 288 def fromfile(cls, repo):
286 289 f = None
287 290 try:
288 291 f = repo.cachevfs(cls._filename(repo))
289 292 lineiter = iter(f)
290 293 cachekey = next(lineiter).rstrip(b'\n').split(b" ", 2)
291 294 last, lrev = cachekey[:2]
292 295 last, lrev = bin(last), int(lrev)
293 296 filteredhash = None
294 297 hasnode = repo.changelog.hasnode
295 298 if len(cachekey) > 2:
296 299 filteredhash = bin(cachekey[2])
297 300 bcache = cls(
298 301 repo,
299 302 tipnode=last,
300 303 tiprev=lrev,
301 304 filteredhash=filteredhash,
302 305 hasnode=hasnode,
303 306 )
304 307 if not bcache.validfor(repo):
305 308 # invalidate the cache
306 309 raise ValueError('tip differs')
307 310 bcache.load(repo, lineiter)
308 311 except (IOError, OSError):
309 312 return None
310 313
311 314 except Exception as inst:
312 315 if repo.ui.debugflag:
313 316 msg = b'invalid %s: %s\n'
314 317 repo.ui.debug(
315 318 msg
316 319 % (
317 320 _branchcachedesc(repo),
318 321 stringutil.forcebytestr(inst),
319 322 )
320 323 )
321 324 bcache = None
322 325
323 326 finally:
324 327 if f:
325 328 f.close()
326 329
327 330 return bcache
328 331
329 332 def load(self, repo, lineiter):
330 333 """fully loads the branchcache by reading from the file using the line
331 334 iterator passed"""
332 335 for line in lineiter:
333 336 line = line.rstrip(b'\n')
334 337 if not line:
335 338 continue
336 339 node, state, label = line.split(b" ", 2)
337 340 if state not in b'oc':
338 341 raise ValueError('invalid branch state')
339 342 label = encoding.tolocal(label.strip())
340 343 node = bin(node)
341 344 self._entries.setdefault(label, []).append(node)
342 345 if state == b'c':
343 346 self._closednodes.add(node)
344 347
345 348 @staticmethod
346 349 def _filename(repo):
347 350 """name of a branchcache file for a given repo or repoview"""
348 351 filename = b"branch2"
349 352 if repo.filtername:
350 353 filename = b'%s-%s' % (filename, repo.filtername)
351 354 return filename
352 355
353 356 def validfor(self, repo):
354 357 """check that cache contents are valid for (a subset of) this repo
355 358
356 359 - False when the order of changesets changed or if we detect a strip.
357 360 - True when cache is up-to-date for the current repo or its subset."""
358 361 try:
359 362 node = repo.changelog.node(self.tiprev)
360 363 except IndexError:
361 364 # changesets were stripped and now we don't even have enough to
362 365 # find tiprev
363 366 return False
364 367 if self.tipnode != node:
365 368 # tiprev doesn't correspond to tipnode: repo was stripped, or this
366 369 # repo has a different order of changesets
367 370 return False
368 371 tiphash = scmutil.filteredhash(repo, self.tiprev, needobsolete=True)
369 372 # hashes don't match if this repo view has a different set of filtered
370 373 # revisions (e.g. due to phase changes) or obsolete revisions (e.g.
371 374 # history was rewritten)
372 375 return self.filteredhash == tiphash
373 376
374 377 def _branchtip(self, heads):
375 378 """Return tuple with last open head in heads and false,
376 379 otherwise return last closed head and true."""
377 380 tip = heads[-1]
378 381 closed = True
379 382 for h in reversed(heads):
380 383 if h not in self._closednodes:
381 384 tip = h
382 385 closed = False
383 386 break
384 387 return tip, closed
385 388
386 389 def branchtip(self, branch):
387 390 """Return the tipmost open head on branch head, otherwise return the
388 391 tipmost closed head on branch.
389 392 Raise KeyError for unknown branch."""
390 393 return self._branchtip(self[branch])[0]
391 394
392 395 def iteropen(self, nodes):
393 396 return (n for n in nodes if n not in self._closednodes)
394 397
395 398 def branchheads(self, branch, closed=False):
396 399 self._verifybranch(branch)
397 400 heads = self._entries[branch]
398 401 if not closed:
399 402 heads = list(self.iteropen(heads))
400 403 return heads
401 404
402 405 def iterbranches(self):
403 406 for bn, heads in self.items():
404 407 yield (bn, heads) + self._branchtip(heads)
405 408
406 409 def iterheads(self):
407 410 """returns all the heads"""
408 411 self._verifyall()
409 412 return self._entries.values()
410 413
411 414 def copy(self):
412 415 """return an deep copy of the branchcache object"""
413 416 return type(self)(
414 417 self._repo,
415 418 self._entries,
416 419 self.tipnode,
417 420 self.tiprev,
418 421 self.filteredhash,
419 422 self._closednodes,
420 423 )
421 424
422 425 def write(self, repo):
423 426 tr = repo.currenttransaction()
424 427 if not getattr(tr, 'finalized', True):
425 428 # Avoid premature writing.
426 429 #
427 430 # (The cache warming setup by localrepo will update the file later.)
428 431 self._delayed = True
429 432 return
430 433 try:
431 434 filename = self._filename(repo)
432 435 with repo.cachevfs(filename, b"w", atomictemp=True) as f:
433 436 cachekey = [hex(self.tipnode), b'%d' % self.tiprev]
434 437 if self.filteredhash is not None:
435 438 cachekey.append(hex(self.filteredhash))
436 439 f.write(b" ".join(cachekey) + b'\n')
437 440 nodecount = 0
438 441 for label, nodes in sorted(self._entries.items()):
439 442 label = encoding.fromlocal(label)
440 443 for node in nodes:
441 444 nodecount += 1
442 445 if node in self._closednodes:
443 446 state = b'c'
444 447 else:
445 448 state = b'o'
446 449 f.write(b"%s %s %s\n" % (hex(node), state, label))
447 450 repo.ui.log(
448 451 b'branchcache',
449 452 b'wrote %s with %d labels and %d nodes\n',
450 453 _branchcachedesc(repo),
451 454 len(self._entries),
452 455 nodecount,
453 456 )
454 457 self._delayed = False
455 458 except (IOError, OSError, error.Abort) as inst:
456 459 # Abort may be raised by read only opener, so log and continue
457 460 repo.ui.debug(
458 461 b"couldn't write branch cache: %s\n"
459 462 % stringutil.forcebytestr(inst)
460 463 )
461 464
462 465 def update(self, repo, revgen):
463 466 """Given a branchhead cache, self, that may have extra nodes or be
464 467 missing heads, and a generator of nodes that are strictly a superset of
465 468 heads missing, this function updates self to be correct.
466 469 """
467 470 starttime = util.timer()
468 471 cl = repo.changelog
469 472 # collect new branch entries
470 473 newbranches = {}
471 474 getbranchinfo = repo.revbranchcache().branchinfo
472 475 for r in revgen:
473 476 branch, closesbranch = getbranchinfo(r)
474 477 newbranches.setdefault(branch, []).append(r)
475 478 if closesbranch:
476 479 self._closednodes.add(cl.node(r))
477 480
478 481 # new tip revision which we found after iterating items from new
479 482 # branches
480 483 ntiprev = self.tiprev
481 484
482 485 # Delay fetching the topological heads until they are needed.
483 486 # A repository without non-continous branches can skip this part.
484 487 topoheads = None
485 488
486 489 # If a changeset is visible, its parents must be visible too, so
487 490 # use the faster unfiltered parent accessor.
488 491 parentrevs = repo.unfiltered().changelog.parentrevs
489 492
490 493 # Faster than using ctx.obsolete()
491 494 obsrevs = obsolete.getrevs(repo, b'obsolete')
492 495
493 496 for branch, newheadrevs in newbranches.items():
494 497 # For every branch, compute the new branchheads.
495 498 # A branchhead is a revision such that no descendant is on
496 499 # the same branch.
497 500 #
498 501 # The branchheads are computed iteratively in revision order.
499 502 # This ensures topological order, i.e. parents are processed
500 503 # before their children. Ancestors are inclusive here, i.e.
501 504 # any revision is an ancestor of itself.
502 505 #
503 506 # Core observations:
504 507 # - The current revision is always a branchhead for the
505 508 # repository up to that point.
506 509 # - It is the first revision of the branch if and only if
507 510 # there was no branchhead before. In that case, it is the
508 511 # only branchhead as there are no possible ancestors on
509 512 # the same branch.
510 513 # - If a parent is on the same branch, a branchhead can
511 514 # only be an ancestor of that parent, if it is parent
512 515 # itself. Otherwise it would have been removed as ancestor
513 516 # of that parent before.
514 517 # - Therefore, if all parents are on the same branch, they
515 518 # can just be removed from the branchhead set.
516 519 # - If one parent is on the same branch and the other is not
517 520 # and there was exactly one branchhead known, the existing
518 521 # branchhead can only be an ancestor if it is the parent.
519 522 # Otherwise it would have been removed as ancestor of
520 523 # the parent before. The other parent therefore can't have
521 524 # a branchhead as ancestor.
522 525 # - In all other cases, the parents on different branches
523 526 # could have a branchhead as ancestor. Those parents are
524 527 # kept in the "uncertain" set. If all branchheads are also
525 528 # topological heads, they can't have descendants and further
526 529 # checks can be skipped. Otherwise, the ancestors of the
527 530 # "uncertain" set are removed from branchheads.
528 531 # This computation is heavy and avoided if at all possible.
529 532 bheads = self._entries.get(branch, [])
530 533 bheadset = {cl.rev(node) for node in bheads}
531 534 uncertain = set()
532 535 for newrev in sorted(newheadrevs):
533 536 if newrev in obsrevs:
534 537 # We ignore obsolete changesets as they shouldn't be
535 538 # considered heads.
536 539 continue
537 540
538 541 if not bheadset:
539 542 bheadset.add(newrev)
540 543 continue
541 544
542 545 parents = [p for p in parentrevs(newrev) if p != nullrev]
543 546 samebranch = set()
544 547 otherbranch = set()
545 548 obsparents = set()
546 549 for p in parents:
547 550 if p in obsrevs:
548 551 # We ignored this obsolete changeset earlier, but now
549 552 # that it has non-ignored children, we need to make
550 553 # sure their ancestors are not considered heads. To
551 554 # achieve that, we will simply treat this obsolete
552 555 # changeset as a parent from other branch.
553 556 obsparents.add(p)
554 557 elif p in bheadset or getbranchinfo(p)[0] == branch:
555 558 samebranch.add(p)
556 559 else:
557 560 otherbranch.add(p)
558 561 if not (len(bheadset) == len(samebranch) == 1):
559 562 uncertain.update(otherbranch)
560 563 uncertain.update(obsparents)
561 564 bheadset.difference_update(samebranch)
562 565 bheadset.add(newrev)
563 566
564 567 if uncertain:
565 568 if topoheads is None:
566 569 topoheads = set(cl.headrevs())
567 570 if bheadset - topoheads:
568 571 floorrev = min(bheadset)
569 572 if floorrev <= max(uncertain):
570 573 ancestors = set(cl.ancestors(uncertain, floorrev))
571 574 bheadset -= ancestors
572 575 if bheadset:
573 576 self[branch] = [cl.node(rev) for rev in sorted(bheadset)]
574 577 tiprev = max(newheadrevs)
575 578 if tiprev > ntiprev:
576 579 ntiprev = tiprev
577 580
578 581 if ntiprev > self.tiprev:
579 582 self.tiprev = ntiprev
580 583 self.tipnode = cl.node(ntiprev)
581 584
582 585 if not self.validfor(repo):
583 586 # old cache key is now invalid for the repo, but we've just updated
584 587 # the cache and we assume it's valid, so let's make the cache key
585 588 # valid as well by recomputing it from the cached data
586 589 self.tipnode = repo.nullid
587 590 self.tiprev = nullrev
588 591 for heads in self.iterheads():
589 592 if not heads:
590 593 # all revisions on a branch are obsolete
591 594 continue
592 595 # note: tiprev is not necessarily the tip revision of repo,
593 596 # because the tip could be obsolete (i.e. not a head)
594 597 tiprev = max(cl.rev(node) for node in heads)
595 598 if tiprev > self.tiprev:
596 599 self.tipnode = cl.node(tiprev)
597 600 self.tiprev = tiprev
598 601 self.filteredhash = scmutil.filteredhash(
599 602 repo, self.tiprev, needobsolete=True
600 603 )
601 604
602 605 duration = util.timer() - starttime
603 606 repo.ui.log(
604 607 b'branchcache',
605 608 b'updated %s in %.4f seconds\n',
606 609 _branchcachedesc(repo),
607 610 duration,
608 611 )
609 612
610 613 self.write(repo)
611 614
612 615
613 616 class remotebranchcache(branchcache):
614 617 """Branchmap info for a remote connection, should not write locally"""
615 618
616 619 def write(self, repo):
617 620 pass
618 621
619 622
620 623 # Revision branch info cache
621 624
622 625 _rbcversion = b'-v1'
623 626 _rbcnames = b'rbc-names' + _rbcversion
624 627 _rbcrevs = b'rbc-revs' + _rbcversion
625 628 # [4 byte hash prefix][4 byte branch name number with sign bit indicating open]
626 629 _rbcrecfmt = b'>4sI'
627 630 _rbcrecsize = calcsize(_rbcrecfmt)
628 631 _rbcmininc = 64 * _rbcrecsize
629 632 _rbcnodelen = 4
630 633 _rbcbranchidxmask = 0x7FFFFFFF
631 634 _rbccloseflag = 0x80000000
632 635
633 636
634 637 class revbranchcache:
635 638 """Persistent cache, mapping from revision number to branch name and close.
636 639 This is a low level cache, independent of filtering.
637 640
638 641 Branch names are stored in rbc-names in internal encoding separated by 0.
639 642 rbc-names is append-only, and each branch name is only stored once and will
640 643 thus have a unique index.
641 644
642 645 The branch info for each revision is stored in rbc-revs as constant size
643 646 records. The whole file is read into memory, but it is only 'parsed' on
644 647 demand. The file is usually append-only but will be truncated if repo
645 648 modification is detected.
646 649 The record for each revision contains the first 4 bytes of the
647 650 corresponding node hash, and the record is only used if it still matches.
648 651 Even a completely trashed rbc-revs fill thus still give the right result
649 652 while converging towards full recovery ... assuming no incorrectly matching
650 653 node hashes.
651 654 The record also contains 4 bytes where 31 bits contains the index of the
652 655 branch and the last bit indicate that it is a branch close commit.
653 656 The usage pattern for rbc-revs is thus somewhat similar to 00changelog.i
654 657 and will grow with it but be 1/8th of its size.
655 658 """
656 659
657 660 def __init__(self, repo, readonly=True):
658 661 assert repo.filtername is None
659 662 self._repo = repo
660 663 self._names = [] # branch names in local encoding with static index
661 664 self._rbcrevs = bytearray()
662 665 self._rbcsnameslen = 0 # length of names read at _rbcsnameslen
663 666 try:
664 667 bndata = repo.cachevfs.read(_rbcnames)
665 668 self._rbcsnameslen = len(bndata) # for verification before writing
666 669 if bndata:
667 670 self._names = [
668 671 encoding.tolocal(bn) for bn in bndata.split(b'\0')
669 672 ]
670 673 except (IOError, OSError):
671 674 if readonly:
672 675 # don't try to use cache - fall back to the slow path
673 676 self.branchinfo = self._branchinfo
674 677
675 678 if self._names:
676 679 try:
677 680 data = repo.cachevfs.read(_rbcrevs)
678 681 self._rbcrevs[:] = data
679 682 except (IOError, OSError) as inst:
680 683 repo.ui.debug(
681 684 b"couldn't read revision branch cache: %s\n"
682 685 % stringutil.forcebytestr(inst)
683 686 )
684 687 # remember number of good records on disk
685 688 self._rbcrevslen = min(
686 689 len(self._rbcrevs) // _rbcrecsize, len(repo.changelog)
687 690 )
688 691 if self._rbcrevslen == 0:
689 692 self._names = []
690 693 self._rbcnamescount = len(self._names) # number of names read at
691 694 # _rbcsnameslen
692 695
693 696 def _clear(self):
694 697 self._rbcsnameslen = 0
695 698 del self._names[:]
696 699 self._rbcnamescount = 0
697 700 self._rbcrevslen = len(self._repo.changelog)
698 701 self._rbcrevs = bytearray(self._rbcrevslen * _rbcrecsize)
699 702 util.clearcachedproperty(self, b'_namesreverse')
700 703
701 704 @util.propertycache
702 705 def _namesreverse(self):
703 706 return {b: r for r, b in enumerate(self._names)}
704 707
705 708 def branchinfo(self, rev):
706 709 """Return branch name and close flag for rev, using and updating
707 710 persistent cache."""
708 711 changelog = self._repo.changelog
709 712 rbcrevidx = rev * _rbcrecsize
710 713
711 714 # avoid negative index, changelog.read(nullrev) is fast without cache
712 715 if rev == nullrev:
713 716 return changelog.branchinfo(rev)
714 717
715 718 # if requested rev isn't allocated, grow and cache the rev info
716 719 if len(self._rbcrevs) < rbcrevidx + _rbcrecsize:
717 720 return self._branchinfo(rev)
718 721
719 722 # fast path: extract data from cache, use it if node is matching
720 723 reponode = changelog.node(rev)[:_rbcnodelen]
721 724 cachenode, branchidx = unpack_from(
722 725 _rbcrecfmt, util.buffer(self._rbcrevs), rbcrevidx
723 726 )
724 727 close = bool(branchidx & _rbccloseflag)
725 728 if close:
726 729 branchidx &= _rbcbranchidxmask
727 730 if cachenode == b'\0\0\0\0':
728 731 pass
729 732 elif cachenode == reponode:
730 733 try:
731 734 return self._names[branchidx], close
732 735 except IndexError:
733 736 # recover from invalid reference to unknown branch
734 737 self._repo.ui.debug(
735 738 b"referenced branch names not found"
736 739 b" - rebuilding revision branch cache from scratch\n"
737 740 )
738 741 self._clear()
739 742 else:
740 743 # rev/node map has changed, invalidate the cache from here up
741 744 self._repo.ui.debug(
742 745 b"history modification detected - truncating "
743 746 b"revision branch cache to revision %d\n" % rev
744 747 )
745 748 truncate = rbcrevidx + _rbcrecsize
746 749 del self._rbcrevs[truncate:]
747 750 self._rbcrevslen = min(self._rbcrevslen, truncate)
748 751
749 752 # fall back to slow path and make sure it will be written to disk
750 753 return self._branchinfo(rev)
751 754
752 755 def _branchinfo(self, rev):
753 756 """Retrieve branch info from changelog and update _rbcrevs"""
754 757 changelog = self._repo.changelog
755 758 b, close = changelog.branchinfo(rev)
756 759 if b in self._namesreverse:
757 760 branchidx = self._namesreverse[b]
758 761 else:
759 762 branchidx = len(self._names)
760 763 self._names.append(b)
761 764 self._namesreverse[b] = branchidx
762 765 reponode = changelog.node(rev)
763 766 if close:
764 767 branchidx |= _rbccloseflag
765 768 self._setcachedata(rev, reponode, branchidx)
766 769 return b, close
767 770
768 771 def setdata(self, rev, changelogrevision):
769 772 """add new data information to the cache"""
770 773 branch, close = changelogrevision.branchinfo
771 774
772 775 if branch in self._namesreverse:
773 776 branchidx = self._namesreverse[branch]
774 777 else:
775 778 branchidx = len(self._names)
776 779 self._names.append(branch)
777 780 self._namesreverse[branch] = branchidx
778 781 if close:
779 782 branchidx |= _rbccloseflag
780 783 self._setcachedata(rev, self._repo.changelog.node(rev), branchidx)
781 784 # If no cache data were readable (non exists, bad permission, etc)
782 785 # the cache was bypassing itself by setting:
783 786 #
784 787 # self.branchinfo = self._branchinfo
785 788 #
786 789 # Since we now have data in the cache, we need to drop this bypassing.
787 790 if 'branchinfo' in vars(self):
788 791 del self.branchinfo
789 792
790 793 def _setcachedata(self, rev, node, branchidx):
791 794 """Writes the node's branch data to the in-memory cache data."""
792 795 if rev == nullrev:
793 796 return
794 797 rbcrevidx = rev * _rbcrecsize
795 798 requiredsize = rbcrevidx + _rbcrecsize
796 799 rbccur = len(self._rbcrevs)
797 800 if rbccur < requiredsize:
798 801 # bytearray doesn't allocate extra space at least in Python 3.7.
799 802 # When multiple changesets are added in a row, precise resize would
800 803 # result in quadratic complexity. Overallocate to compensate by
801 804 # use the classic doubling technique for dynamic arrays instead.
802 805 # If there was a gap in the map before, less space will be reserved.
803 806 self._rbcrevs.extend(b'\0' * max(_rbcmininc, requiredsize))
804 807 pack_into(_rbcrecfmt, self._rbcrevs, rbcrevidx, node, branchidx)
805 808 self._rbcrevslen = min(self._rbcrevslen, rev)
806 809
807 810 tr = self._repo.currenttransaction()
808 811 if tr:
809 812 tr.addfinalize(b'write-revbranchcache', self.write)
810 813
811 814 def write(self, tr=None):
812 815 """Save branch cache if it is dirty."""
813 816 repo = self._repo
814 817 wlock = None
815 818 step = b''
816 819 try:
817 820 # write the new names
818 821 if self._rbcnamescount < len(self._names):
819 822 wlock = repo.wlock(wait=False)
820 823 step = b' names'
821 824 self._writenames(repo)
822 825
823 826 # write the new revs
824 827 start = self._rbcrevslen * _rbcrecsize
825 828 if start != len(self._rbcrevs):
826 829 step = b''
827 830 if wlock is None:
828 831 wlock = repo.wlock(wait=False)
829 832 self._writerevs(repo, start)
830 833
831 834 except (IOError, OSError, error.Abort, error.LockError) as inst:
832 835 repo.ui.debug(
833 836 b"couldn't write revision branch cache%s: %s\n"
834 837 % (step, stringutil.forcebytestr(inst))
835 838 )
836 839 finally:
837 840 if wlock is not None:
838 841 wlock.release()
839 842
840 843 def _writenames(self, repo):
841 844 """write the new branch names to revbranchcache"""
842 845 if self._rbcnamescount != 0:
843 846 f = repo.cachevfs.open(_rbcnames, b'ab')
844 847 if f.tell() == self._rbcsnameslen:
845 848 f.write(b'\0')
846 849 else:
847 850 f.close()
848 851 repo.ui.debug(b"%s changed - rewriting it\n" % _rbcnames)
849 852 self._rbcnamescount = 0
850 853 self._rbcrevslen = 0
851 854 if self._rbcnamescount == 0:
852 855 # before rewriting names, make sure references are removed
853 856 repo.cachevfs.unlinkpath(_rbcrevs, ignoremissing=True)
854 857 f = repo.cachevfs.open(_rbcnames, b'wb')
855 858 f.write(
856 859 b'\0'.join(
857 860 encoding.fromlocal(b)
858 861 for b in self._names[self._rbcnamescount :]
859 862 )
860 863 )
861 864 self._rbcsnameslen = f.tell()
862 865 f.close()
863 866 self._rbcnamescount = len(self._names)
864 867
865 868 def _writerevs(self, repo, start):
866 869 """write the new revs to revbranchcache"""
867 870 revs = min(len(repo.changelog), len(self._rbcrevs) // _rbcrecsize)
868 871 with repo.cachevfs.open(_rbcrevs, b'ab') as f:
869 872 if f.tell() != start:
870 873 repo.ui.debug(
871 874 b"truncating cache/%s to %d\n" % (_rbcrevs, start)
872 875 )
873 876 f.seek(start)
874 877 if f.tell() != start:
875 878 start = 0
876 879 f.seek(start)
877 880 f.truncate()
878 881 end = revs * _rbcrecsize
879 882 f.write(self._rbcrevs[start:end])
880 883 self._rbcrevslen = revs
@@ -1,4134 +1,4135 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import copy as copymod
10 10 import errno
11 11 import functools
12 12 import os
13 13 import re
14 14
15 15 from typing import (
16 16 Any,
17 17 AnyStr,
18 18 Dict,
19 19 Iterable,
20 20 Optional,
21 TYPE_CHECKING,
21 22 cast,
22 23 )
23 24
24 25 from .i18n import _
25 26 from .node import (
26 27 hex,
27 28 nullrev,
28 29 short,
29 30 )
30 31 from .pycompat import (
31 32 open,
32 33 )
33 34 from .thirdparty import attr
34 35
35 36 from . import (
36 37 bookmarks,
37 38 changelog,
38 39 copies,
39 40 crecord as crecordmod,
40 41 encoding,
41 42 error,
42 43 formatter,
43 44 logcmdutil,
44 45 match as matchmod,
45 46 merge as mergemod,
46 47 mergestate as mergestatemod,
47 48 mergeutil,
48 49 obsolete,
49 50 patch,
50 51 pathutil,
51 52 phases,
52 53 pycompat,
53 54 repair,
54 55 revlog,
55 56 rewriteutil,
56 57 scmutil,
57 58 state as statemod,
58 59 subrepoutil,
59 60 templatekw,
60 61 templater,
61 62 util,
62 63 vfs as vfsmod,
63 64 )
64 65
65 66 from .utils import (
66 67 dateutil,
67 68 stringutil,
68 69 )
69 70
70 71 from .revlogutils import (
71 72 constants as revlog_constants,
72 73 )
73 74
74 if pycompat.TYPE_CHECKING:
75 if TYPE_CHECKING:
75 76 from . import (
76 77 ui as uimod,
77 78 )
78 79
79 80 stringio = util.stringio
80 81
81 82 # templates of common command options
82 83
83 84 dryrunopts = [
84 85 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
85 86 ]
86 87
87 88 confirmopts = [
88 89 (b'', b'confirm', None, _(b'ask before applying actions')),
89 90 ]
90 91
91 92 remoteopts = [
92 93 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
93 94 (
94 95 b'',
95 96 b'remotecmd',
96 97 b'',
97 98 _(b'specify hg command to run on the remote side'),
98 99 _(b'CMD'),
99 100 ),
100 101 (
101 102 b'',
102 103 b'insecure',
103 104 None,
104 105 _(b'do not verify server certificate (ignoring web.cacerts config)'),
105 106 ),
106 107 ]
107 108
108 109 walkopts = [
109 110 (
110 111 b'I',
111 112 b'include',
112 113 [],
113 114 _(b'include names matching the given patterns'),
114 115 _(b'PATTERN'),
115 116 ),
116 117 (
117 118 b'X',
118 119 b'exclude',
119 120 [],
120 121 _(b'exclude names matching the given patterns'),
121 122 _(b'PATTERN'),
122 123 ),
123 124 ]
124 125
125 126 commitopts = [
126 127 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
127 128 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
128 129 ]
129 130
130 131 commitopts2 = [
131 132 (
132 133 b'd',
133 134 b'date',
134 135 b'',
135 136 _(b'record the specified date as commit date'),
136 137 _(b'DATE'),
137 138 ),
138 139 (
139 140 b'u',
140 141 b'user',
141 142 b'',
142 143 _(b'record the specified user as committer'),
143 144 _(b'USER'),
144 145 ),
145 146 ]
146 147
147 148 commitopts3 = [
148 149 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
149 150 (b'U', b'currentuser', None, _(b'record the current user as committer')),
150 151 ]
151 152
152 153 formatteropts = [
153 154 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
154 155 ]
155 156
156 157 templateopts = [
157 158 (
158 159 b'',
159 160 b'style',
160 161 b'',
161 162 _(b'display using template map file (DEPRECATED)'),
162 163 _(b'STYLE'),
163 164 ),
164 165 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
165 166 ]
166 167
167 168 logopts = [
168 169 (b'p', b'patch', None, _(b'show patch')),
169 170 (b'g', b'git', None, _(b'use git extended diff format')),
170 171 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
171 172 (b'M', b'no-merges', None, _(b'do not show merges')),
172 173 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
173 174 (b'G', b'graph', None, _(b"show the revision DAG")),
174 175 ] + templateopts
175 176
176 177 diffopts = [
177 178 (b'a', b'text', None, _(b'treat all files as text')),
178 179 (
179 180 b'g',
180 181 b'git',
181 182 None,
182 183 _(b'use git extended diff format (DEFAULT: diff.git)'),
183 184 ),
184 185 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
185 186 (b'', b'nodates', None, _(b'omit dates from diff headers')),
186 187 ]
187 188
188 189 diffwsopts = [
189 190 (
190 191 b'w',
191 192 b'ignore-all-space',
192 193 None,
193 194 _(b'ignore white space when comparing lines'),
194 195 ),
195 196 (
196 197 b'b',
197 198 b'ignore-space-change',
198 199 None,
199 200 _(b'ignore changes in the amount of white space'),
200 201 ),
201 202 (
202 203 b'B',
203 204 b'ignore-blank-lines',
204 205 None,
205 206 _(b'ignore changes whose lines are all blank'),
206 207 ),
207 208 (
208 209 b'Z',
209 210 b'ignore-space-at-eol',
210 211 None,
211 212 _(b'ignore changes in whitespace at EOL'),
212 213 ),
213 214 ]
214 215
215 216 diffopts2 = (
216 217 [
217 218 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
218 219 (
219 220 b'p',
220 221 b'show-function',
221 222 None,
222 223 _(
223 224 b'show which function each change is in (DEFAULT: diff.showfunc)'
224 225 ),
225 226 ),
226 227 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
227 228 ]
228 229 + diffwsopts
229 230 + [
230 231 (
231 232 b'U',
232 233 b'unified',
233 234 b'',
234 235 _(b'number of lines of context to show'),
235 236 _(b'NUM'),
236 237 ),
237 238 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
238 239 (
239 240 b'',
240 241 b'root',
241 242 b'',
242 243 _(b'produce diffs relative to subdirectory'),
243 244 _(b'DIR'),
244 245 ),
245 246 ]
246 247 )
247 248
248 249 mergetoolopts = [
249 250 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
250 251 ]
251 252
252 253 similarityopts = [
253 254 (
254 255 b's',
255 256 b'similarity',
256 257 b'',
257 258 _(b'guess renamed files by similarity (0<=s<=100)'),
258 259 _(b'SIMILARITY'),
259 260 )
260 261 ]
261 262
262 263 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
263 264
264 265 debugrevlogopts = [
265 266 (b'c', b'changelog', False, _(b'open changelog')),
266 267 (b'm', b'manifest', False, _(b'open manifest')),
267 268 (b'', b'dir', b'', _(b'open directory manifest')),
268 269 ]
269 270
270 271 # special string such that everything below this line will be ingored in the
271 272 # editor text
272 273 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
273 274
274 275
275 276 def check_at_most_one_arg(
276 277 opts: Dict[AnyStr, Any],
277 278 *args: AnyStr,
278 279 ) -> Optional[AnyStr]:
279 280 """abort if more than one of the arguments are in opts
280 281
281 282 Returns the unique argument or None if none of them were specified.
282 283 """
283 284
284 285 def to_display(name: AnyStr) -> bytes:
285 286 return pycompat.sysbytes(name).replace(b'_', b'-')
286 287
287 288 previous = None
288 289 for x in args:
289 290 if opts.get(x):
290 291 if previous:
291 292 raise error.InputError(
292 293 _(b'cannot specify both --%s and --%s')
293 294 % (to_display(previous), to_display(x))
294 295 )
295 296 previous = x
296 297 return previous
297 298
298 299
299 300 def check_incompatible_arguments(
300 301 opts: Dict[AnyStr, Any],
301 302 first: AnyStr,
302 303 others: Iterable[AnyStr],
303 304 ) -> None:
304 305 """abort if the first argument is given along with any of the others
305 306
306 307 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
307 308 among themselves, and they're passed as a single collection.
308 309 """
309 310 for other in others:
310 311 check_at_most_one_arg(opts, first, other)
311 312
312 313
313 314 def resolve_commit_options(ui: "uimod.ui", opts: Dict[str, Any]) -> bool:
314 315 """modify commit options dict to handle related options
315 316
316 317 The return value indicates that ``rewrite.update-timestamp`` is the reason
317 318 the ``date`` option is set.
318 319 """
319 320 check_at_most_one_arg(opts, 'date', 'currentdate')
320 321 check_at_most_one_arg(opts, 'user', 'currentuser')
321 322
322 323 datemaydiffer = False # date-only change should be ignored?
323 324
324 325 if opts.get('currentdate'):
325 326 opts['date'] = b'%d %d' % dateutil.makedate()
326 327 elif (
327 328 not opts.get('date')
328 329 and ui.configbool(b'rewrite', b'update-timestamp')
329 330 and opts.get('currentdate') is None
330 331 ):
331 332 opts['date'] = b'%d %d' % dateutil.makedate()
332 333 datemaydiffer = True
333 334
334 335 if opts.get('currentuser'):
335 336 opts['user'] = ui.username()
336 337
337 338 return datemaydiffer
338 339
339 340
340 341 def check_note_size(opts: Dict[str, Any]) -> None:
341 342 """make sure note is of valid format"""
342 343
343 344 note = opts.get('note')
344 345 if not note:
345 346 return
346 347
347 348 if len(note) > 255:
348 349 raise error.InputError(_(b"cannot store a note of more than 255 bytes"))
349 350 if b'\n' in note:
350 351 raise error.InputError(_(b"note cannot contain a newline"))
351 352
352 353
353 354 def ishunk(x):
354 355 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
355 356 return isinstance(x, hunkclasses)
356 357
357 358
358 359 def isheader(x):
359 360 headerclasses = (crecordmod.uiheader, patch.header)
360 361 return isinstance(x, headerclasses)
361 362
362 363
363 364 def newandmodified(chunks):
364 365 newlyaddedandmodifiedfiles = set()
365 366 alsorestore = set()
366 367 for chunk in chunks:
367 368 if isheader(chunk) and chunk.isnewfile():
368 369 newlyaddedandmodifiedfiles.add(chunk.filename())
369 370 alsorestore.update(set(chunk.files()) - {chunk.filename()})
370 371 return newlyaddedandmodifiedfiles, alsorestore
371 372
372 373
373 374 def parsealiases(cmd):
374 375 base_aliases = cmd.split(b"|")
375 376 all_aliases = set(base_aliases)
376 377 extra_aliases = []
377 378 for alias in base_aliases:
378 379 if b'-' in alias:
379 380 folded_alias = alias.replace(b'-', b'')
380 381 if folded_alias not in all_aliases:
381 382 all_aliases.add(folded_alias)
382 383 extra_aliases.append(folded_alias)
383 384 base_aliases.extend(extra_aliases)
384 385 return base_aliases
385 386
386 387
387 388 def setupwrapcolorwrite(ui):
388 389 # wrap ui.write so diff output can be labeled/colorized
389 390 def wrapwrite(orig, *args, **kw):
390 391 label = kw.pop('label', b'')
391 392 for chunk, l in patch.difflabel(lambda: args):
392 393 orig(chunk, label=label + l)
393 394
394 395 oldwrite = ui.write
395 396
396 397 def wrap(*args, **kwargs):
397 398 return wrapwrite(oldwrite, *args, **kwargs)
398 399
399 400 setattr(ui, 'write', wrap)
400 401 return oldwrite
401 402
402 403
403 404 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
404 405 try:
405 406 if usecurses:
406 407 if testfile:
407 408 recordfn = crecordmod.testdecorator(
408 409 testfile, crecordmod.testchunkselector
409 410 )
410 411 else:
411 412 recordfn = crecordmod.chunkselector
412 413
413 414 return crecordmod.filterpatch(
414 415 ui, originalhunks, recordfn, operation
415 416 )
416 417 except crecordmod.fallbackerror as e:
417 418 ui.warn(b'%s\n' % e)
418 419 ui.warn(_(b'falling back to text mode\n'))
419 420
420 421 return patch.filterpatch(ui, originalhunks, match, operation)
421 422
422 423
423 424 def recordfilter(ui, originalhunks, match, operation=None):
424 425 """Prompts the user to filter the originalhunks and return a list of
425 426 selected hunks.
426 427 *operation* is used for to build ui messages to indicate the user what
427 428 kind of filtering they are doing: reverting, committing, shelving, etc.
428 429 (see patch.filterpatch).
429 430 """
430 431 usecurses = crecordmod.checkcurses(ui)
431 432 testfile = ui.config(b'experimental', b'crecordtest')
432 433 oldwrite = setupwrapcolorwrite(ui)
433 434 try:
434 435 newchunks, newopts = filterchunks(
435 436 ui, originalhunks, usecurses, testfile, match, operation
436 437 )
437 438 finally:
438 439 ui.write = oldwrite
439 440 return newchunks, newopts
440 441
441 442
442 443 def _record(
443 444 ui,
444 445 repo,
445 446 message,
446 447 match,
447 448 opts,
448 449 commitfunc,
449 450 backupall,
450 451 filterfn,
451 452 pats,
452 453 ):
453 454 """This is generic record driver.
454 455
455 456 Its job is to interactively filter local changes, and
456 457 accordingly prepare working directory into a state in which the
457 458 job can be delegated to a non-interactive commit command such as
458 459 'commit' or 'qrefresh'.
459 460
460 461 After the actual job is done by non-interactive command, the
461 462 working directory is restored to its original state.
462 463
463 464 In the end we'll record interesting changes, and everything else
464 465 will be left in place, so the user can continue working.
465 466 """
466 467 assert repo.currentwlock() is not None
467 468 if not opts.get(b'interactive-unshelve'):
468 469 checkunfinished(repo, commit=True)
469 470 wctx = repo[None]
470 471 merge = len(wctx.parents()) > 1
471 472 if merge:
472 473 raise error.InputError(
473 474 _(b'cannot partially commit a merge ' b'(use "hg commit" instead)')
474 475 )
475 476
476 477 def fail(f, msg):
477 478 raise error.InputError(b'%s: %s' % (f, msg))
478 479
479 480 force = opts.get(b'force')
480 481 if not force:
481 482 match = matchmod.badmatch(match, fail)
482 483
483 484 status = repo.status(match=match)
484 485
485 486 overrides = {(b'ui', b'commitsubrepos'): True}
486 487
487 488 with repo.ui.configoverride(overrides, b'record'):
488 489 # subrepoutil.precommit() modifies the status
489 490 tmpstatus = scmutil.status(
490 491 copymod.copy(status.modified),
491 492 copymod.copy(status.added),
492 493 copymod.copy(status.removed),
493 494 copymod.copy(status.deleted),
494 495 copymod.copy(status.unknown),
495 496 copymod.copy(status.ignored),
496 497 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
497 498 )
498 499
499 500 # Force allows -X subrepo to skip the subrepo.
500 501 subs, commitsubs, newstate = subrepoutil.precommit(
501 502 repo.ui, wctx, tmpstatus, match, force=True
502 503 )
503 504 for s in subs:
504 505 if s in commitsubs:
505 506 dirtyreason = wctx.sub(s).dirtyreason(True)
506 507 raise error.Abort(dirtyreason)
507 508
508 509 if not force:
509 510 repo.checkcommitpatterns(wctx, match, status, fail)
510 511 diffopts = patch.difffeatureopts(
511 512 ui,
512 513 opts=opts,
513 514 whitespace=True,
514 515 section=b'commands',
515 516 configprefix=b'commit.interactive.',
516 517 )
517 518 diffopts.nodates = True
518 519 diffopts.git = True
519 520 diffopts.showfunc = True
520 521 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
521 522 original_headers = patch.parsepatch(originaldiff)
522 523 match = scmutil.match(repo[None], pats)
523 524
524 525 # 1. filter patch, since we are intending to apply subset of it
525 526 try:
526 527 chunks, newopts = filterfn(ui, original_headers, match)
527 528 except error.PatchParseError as err:
528 529 raise error.InputError(_(b'error parsing patch: %s') % err)
529 530 except error.PatchApplicationError as err:
530 531 raise error.StateError(_(b'error applying patch: %s') % err)
531 532 opts.update(newopts)
532 533
533 534 # We need to keep a backup of files that have been newly added and
534 535 # modified during the recording process because there is a previous
535 536 # version without the edit in the workdir. We also will need to restore
536 537 # files that were the sources of renames so that the patch application
537 538 # works.
538 539 newlyaddedandmodifiedfiles, alsorestore = newandmodified(chunks)
539 540 contenders = set()
540 541 for h in chunks:
541 542 if isheader(h):
542 543 contenders.update(set(h.files()))
543 544
544 545 changed = status.modified + status.added + status.removed
545 546 newfiles = [f for f in changed if f in contenders]
546 547 if not newfiles:
547 548 ui.status(_(b'no changes to record\n'))
548 549 return 0
549 550
550 551 modified = set(status.modified)
551 552
552 553 # 2. backup changed files, so we can restore them in the end
553 554
554 555 if backupall:
555 556 tobackup = changed
556 557 else:
557 558 tobackup = [
558 559 f
559 560 for f in newfiles
560 561 if f in modified or f in newlyaddedandmodifiedfiles
561 562 ]
562 563 backups = {}
563 564 if tobackup:
564 565 backupdir = repo.vfs.join(b'record-backups')
565 566 try:
566 567 os.mkdir(backupdir)
567 568 except FileExistsError:
568 569 pass
569 570 try:
570 571 # backup continues
571 572 for f in tobackup:
572 573 fd, tmpname = pycompat.mkstemp(
573 574 prefix=os.path.basename(f) + b'.', dir=backupdir
574 575 )
575 576 os.close(fd)
576 577 ui.debug(b'backup %r as %r\n' % (f, tmpname))
577 578 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
578 579 backups[f] = tmpname
579 580
580 581 fp = stringio()
581 582 for c in chunks:
582 583 fname = c.filename()
583 584 if fname in backups:
584 585 c.write(fp)
585 586 dopatch = fp.tell()
586 587 fp.seek(0)
587 588
588 589 # 2.5 optionally review / modify patch in text editor
589 590 if opts.get(b'review', False):
590 591 patchtext = (
591 592 crecordmod.diffhelptext + crecordmod.patchhelptext + fp.read()
592 593 )
593 594 reviewedpatch = ui.edit(
594 595 patchtext, b"", action=b"diff", repopath=repo.path
595 596 )
596 597 fp.truncate(0)
597 598 fp.write(reviewedpatch)
598 599 fp.seek(0)
599 600
600 601 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
601 602 # 3a. apply filtered patch to clean repo (clean)
602 603 if backups:
603 604 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
604 605 mergemod.revert_to(repo[b'.'], matcher=m)
605 606
606 607 # 3b. (apply)
607 608 if dopatch:
608 609 try:
609 610 ui.debug(b'applying patch\n')
610 611 ui.debug(fp.getvalue())
611 612 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
612 613 except error.PatchParseError as err:
613 614 raise error.InputError(pycompat.bytestr(err))
614 615 except error.PatchApplicationError as err:
615 616 raise error.StateError(pycompat.bytestr(err))
616 617 del fp
617 618
618 619 # 4. We prepared working directory according to filtered
619 620 # patch. Now is the time to delegate the job to
620 621 # commit/qrefresh or the like!
621 622
622 623 # Make all of the pathnames absolute.
623 624 newfiles = [repo.wjoin(nf) for nf in newfiles]
624 625 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
625 626 finally:
626 627 # 5. finally restore backed-up files
627 628 try:
628 629 dirstate = repo.dirstate
629 630 for realname, tmpname in backups.items():
630 631 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
631 632
632 633 if dirstate.get_entry(realname).maybe_clean:
633 634 # without normallookup, restoring timestamp
634 635 # may cause partially committed files
635 636 # to be treated as unmodified
636 637
637 638 # XXX-PENDINGCHANGE: We should clarify the context in
638 639 # which this function is called to make sure it
639 640 # already called within a `pendingchange`, However we
640 641 # are taking a shortcut here in order to be able to
641 642 # quickly deprecated the older API.
642 643 with dirstate.changing_parents(repo):
643 644 dirstate.update_file(
644 645 realname,
645 646 p1_tracked=True,
646 647 wc_tracked=True,
647 648 possibly_dirty=True,
648 649 )
649 650
650 651 # copystat=True here and above are a hack to trick any
651 652 # editors that have f open that we haven't modified them.
652 653 #
653 654 # Also note that this racy as an editor could notice the
654 655 # file's mtime before we've finished writing it.
655 656 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
656 657 os.unlink(tmpname)
657 658 if tobackup:
658 659 os.rmdir(backupdir)
659 660 except OSError:
660 661 pass
661 662
662 663
663 664 def dorecord(
664 665 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
665 666 ):
666 667 opts = pycompat.byteskwargs(opts)
667 668 if not ui.interactive():
668 669 if cmdsuggest:
669 670 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
670 671 else:
671 672 msg = _(b'running non-interactively')
672 673 raise error.InputError(msg)
673 674
674 675 # make sure username is set before going interactive
675 676 if not opts.get(b'user'):
676 677 ui.username() # raise exception, username not provided
677 678
678 679 func = functools.partial(
679 680 _record,
680 681 commitfunc=commitfunc,
681 682 backupall=backupall,
682 683 filterfn=filterfn,
683 684 pats=pats,
684 685 )
685 686
686 687 return commit(ui, repo, func, pats, opts)
687 688
688 689
689 690 class dirnode:
690 691 """
691 692 Represent a directory in user working copy with information required for
692 693 the purpose of tersing its status.
693 694
694 695 path is the path to the directory, without a trailing '/'
695 696
696 697 statuses is a set of statuses of all files in this directory (this includes
697 698 all the files in all the subdirectories too)
698 699
699 700 files is a list of files which are direct child of this directory
700 701
701 702 subdirs is a dictionary of sub-directory name as the key and it's own
702 703 dirnode object as the value
703 704 """
704 705
705 706 def __init__(self, dirpath):
706 707 self.path = dirpath
707 708 self.statuses = set()
708 709 self.files = []
709 710 self.subdirs = {}
710 711
711 712 def _addfileindir(self, filename, status):
712 713 """Add a file in this directory as a direct child."""
713 714 self.files.append((filename, status))
714 715
715 716 def addfile(self, filename, status):
716 717 """
717 718 Add a file to this directory or to its direct parent directory.
718 719
719 720 If the file is not direct child of this directory, we traverse to the
720 721 directory of which this file is a direct child of and add the file
721 722 there.
722 723 """
723 724
724 725 # the filename contains a path separator, it means it's not the direct
725 726 # child of this directory
726 727 if b'/' in filename:
727 728 subdir, filep = filename.split(b'/', 1)
728 729
729 730 # does the dirnode object for subdir exists
730 731 if subdir not in self.subdirs:
731 732 subdirpath = pathutil.join(self.path, subdir)
732 733 self.subdirs[subdir] = dirnode(subdirpath)
733 734
734 735 # try adding the file in subdir
735 736 self.subdirs[subdir].addfile(filep, status)
736 737
737 738 else:
738 739 self._addfileindir(filename, status)
739 740
740 741 if status not in self.statuses:
741 742 self.statuses.add(status)
742 743
743 744 def iterfilepaths(self):
744 745 """Yield (status, path) for files directly under this directory."""
745 746 for f, st in self.files:
746 747 yield st, pathutil.join(self.path, f)
747 748
748 749 def tersewalk(self, terseargs):
749 750 """
750 751 Yield (status, path) obtained by processing the status of this
751 752 dirnode.
752 753
753 754 terseargs is the string of arguments passed by the user with `--terse`
754 755 flag.
755 756
756 757 Following are the cases which can happen:
757 758
758 759 1) All the files in the directory (including all the files in its
759 760 subdirectories) share the same status and the user has asked us to terse
760 761 that status. -> yield (status, dirpath). dirpath will end in '/'.
761 762
762 763 2) Otherwise, we do following:
763 764
764 765 a) Yield (status, filepath) for all the files which are in this
765 766 directory (only the ones in this directory, not the subdirs)
766 767
767 768 b) Recurse the function on all the subdirectories of this
768 769 directory
769 770 """
770 771
771 772 if len(self.statuses) == 1:
772 773 onlyst = self.statuses.pop()
773 774
774 775 # Making sure we terse only when the status abbreviation is
775 776 # passed as terse argument
776 777 if onlyst in terseargs:
777 778 yield onlyst, self.path + b'/'
778 779 return
779 780
780 781 # add the files to status list
781 782 for st, fpath in self.iterfilepaths():
782 783 yield st, fpath
783 784
784 785 # recurse on the subdirs
785 786 for dirobj in self.subdirs.values():
786 787 for st, fpath in dirobj.tersewalk(terseargs):
787 788 yield st, fpath
788 789
789 790
790 791 def tersedir(statuslist, terseargs):
791 792 """
792 793 Terse the status if all the files in a directory shares the same status.
793 794
794 795 statuslist is scmutil.status() object which contains a list of files for
795 796 each status.
796 797 terseargs is string which is passed by the user as the argument to `--terse`
797 798 flag.
798 799
799 800 The function makes a tree of objects of dirnode class, and at each node it
800 801 stores the information required to know whether we can terse a certain
801 802 directory or not.
802 803 """
803 804 # the order matters here as that is used to produce final list
804 805 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
805 806
806 807 # checking the argument validity
807 808 for s in pycompat.bytestr(terseargs):
808 809 if s not in allst:
809 810 raise error.InputError(_(b"'%s' not recognized") % s)
810 811
811 812 # creating a dirnode object for the root of the repo
812 813 rootobj = dirnode(b'')
813 814 pstatus = (
814 815 ('modified', b'm'),
815 816 ('added', b'a'),
816 817 ('deleted', b'd'),
817 818 ('clean', b'c'),
818 819 ('unknown', b'u'),
819 820 ('ignored', b'i'),
820 821 ('removed', b'r'),
821 822 )
822 823
823 824 tersedict = {}
824 825 for attrname, statuschar in pstatus:
825 826 for f in getattr(statuslist, attrname):
826 827 rootobj.addfile(f, statuschar)
827 828 tersedict[statuschar] = []
828 829
829 830 # we won't be tersing the root dir, so add files in it
830 831 for st, fpath in rootobj.iterfilepaths():
831 832 tersedict[st].append(fpath)
832 833
833 834 # process each sub-directory and build tersedict
834 835 for subdir in rootobj.subdirs.values():
835 836 for st, f in subdir.tersewalk(terseargs):
836 837 tersedict[st].append(f)
837 838
838 839 tersedlist = []
839 840 for st in allst:
840 841 tersedict[st].sort()
841 842 tersedlist.append(tersedict[st])
842 843
843 844 return scmutil.status(*tersedlist)
844 845
845 846
846 847 def _commentlines(raw):
847 848 '''Surround lineswith a comment char and a new line'''
848 849 lines = raw.splitlines()
849 850 commentedlines = [b'# %s' % line for line in lines]
850 851 return b'\n'.join(commentedlines) + b'\n'
851 852
852 853
853 854 @attr.s(frozen=True)
854 855 class morestatus:
855 856 repo = attr.ib()
856 857 unfinishedop = attr.ib()
857 858 unfinishedmsg = attr.ib()
858 859 activemerge = attr.ib()
859 860 unresolvedpaths = attr.ib()
860 861 _formattedpaths = attr.ib(init=False, default=set())
861 862 _label = b'status.morestatus'
862 863
863 864 def formatfile(self, path, fm):
864 865 self._formattedpaths.add(path)
865 866 if self.activemerge and path in self.unresolvedpaths:
866 867 fm.data(unresolved=True)
867 868
868 869 def formatfooter(self, fm):
869 870 if self.unfinishedop or self.unfinishedmsg:
870 871 fm.startitem()
871 872 fm.data(itemtype=b'morestatus')
872 873
873 874 if self.unfinishedop:
874 875 fm.data(unfinished=self.unfinishedop)
875 876 statemsg = (
876 877 _(b'The repository is in an unfinished *%s* state.')
877 878 % self.unfinishedop
878 879 )
879 880 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
880 881 if self.unfinishedmsg:
881 882 fm.data(unfinishedmsg=self.unfinishedmsg)
882 883
883 884 # May also start new data items.
884 885 self._formatconflicts(fm)
885 886
886 887 if self.unfinishedmsg:
887 888 fm.plain(
888 889 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
889 890 )
890 891
891 892 def _formatconflicts(self, fm):
892 893 if not self.activemerge:
893 894 return
894 895
895 896 if self.unresolvedpaths:
896 897 mergeliststr = b'\n'.join(
897 898 [
898 899 b' %s'
899 900 % util.pathto(self.repo.root, encoding.getcwd(), path)
900 901 for path in self.unresolvedpaths
901 902 ]
902 903 )
903 904 msg = (
904 905 _(
905 906 b'''Unresolved merge conflicts:
906 907
907 908 %s
908 909
909 910 To mark files as resolved: hg resolve --mark FILE'''
910 911 )
911 912 % mergeliststr
912 913 )
913 914
914 915 # If any paths with unresolved conflicts were not previously
915 916 # formatted, output them now.
916 917 for f in self.unresolvedpaths:
917 918 if f in self._formattedpaths:
918 919 # Already output.
919 920 continue
920 921 fm.startitem()
921 922 fm.context(repo=self.repo)
922 923 # We can't claim to know the status of the file - it may just
923 924 # have been in one of the states that were not requested for
924 925 # display, so it could be anything.
925 926 fm.data(itemtype=b'file', path=f, unresolved=True)
926 927
927 928 else:
928 929 msg = _(b'No unresolved merge conflicts.')
929 930
930 931 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
931 932
932 933
933 934 def readmorestatus(repo):
934 935 """Returns a morestatus object if the repo has unfinished state."""
935 936 statetuple = statemod.getrepostate(repo)
936 937 mergestate = mergestatemod.mergestate.read(repo)
937 938 activemerge = mergestate.active()
938 939 if not statetuple and not activemerge:
939 940 return None
940 941
941 942 unfinishedop = unfinishedmsg = unresolved = None
942 943 if statetuple:
943 944 unfinishedop, unfinishedmsg = statetuple
944 945 if activemerge:
945 946 unresolved = sorted(mergestate.unresolved())
946 947 return morestatus(
947 948 repo, unfinishedop, unfinishedmsg, activemerge, unresolved
948 949 )
949 950
950 951
951 952 def findpossible(cmd, table, strict=False):
952 953 """
953 954 Return cmd -> (aliases, command table entry)
954 955 for each matching command.
955 956 Return debug commands (or their aliases) only if no normal command matches.
956 957 """
957 958 choice = {}
958 959 debugchoice = {}
959 960
960 961 if cmd in table:
961 962 # short-circuit exact matches, "log" alias beats "log|history"
962 963 keys = [cmd]
963 964 else:
964 965 keys = table.keys()
965 966
966 967 allcmds = []
967 968 for e in keys:
968 969 aliases = parsealiases(e)
969 970 allcmds.extend(aliases)
970 971 found = None
971 972 if cmd in aliases:
972 973 found = cmd
973 974 elif not strict:
974 975 for a in aliases:
975 976 if a.startswith(cmd):
976 977 found = a
977 978 break
978 979 if found is not None:
979 980 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
980 981 debugchoice[found] = (aliases, table[e])
981 982 else:
982 983 choice[found] = (aliases, table[e])
983 984
984 985 if not choice and debugchoice:
985 986 choice = debugchoice
986 987
987 988 return choice, allcmds
988 989
989 990
990 991 def findcmd(cmd, table, strict=True):
991 992 """Return (aliases, command table entry) for command string."""
992 993 choice, allcmds = findpossible(cmd, table, strict)
993 994
994 995 if cmd in choice:
995 996 return choice[cmd]
996 997
997 998 if len(choice) > 1:
998 999 clist = sorted(choice)
999 1000 raise error.AmbiguousCommand(cmd, clist)
1000 1001
1001 1002 if choice:
1002 1003 return list(choice.values())[0]
1003 1004
1004 1005 raise error.UnknownCommand(cmd, allcmds)
1005 1006
1006 1007
1007 1008 def changebranch(ui, repo, revs, label, **opts):
1008 1009 """Change the branch name of given revs to label"""
1009 1010
1010 1011 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
1011 1012 # abort in case of uncommitted merge or dirty wdir
1012 1013 bailifchanged(repo)
1013 1014 revs = logcmdutil.revrange(repo, revs)
1014 1015 if not revs:
1015 1016 raise error.InputError(b"empty revision set")
1016 1017 roots = repo.revs(b'roots(%ld)', revs)
1017 1018 if len(roots) > 1:
1018 1019 raise error.InputError(
1019 1020 _(b"cannot change branch of non-linear revisions")
1020 1021 )
1021 1022 rewriteutil.precheck(repo, revs, b'change branch of')
1022 1023
1023 1024 root = repo[roots.first()]
1024 1025 rpb = {parent.branch() for parent in root.parents()}
1025 1026 if (
1026 1027 not opts.get('force')
1027 1028 and label not in rpb
1028 1029 and label in repo.branchmap()
1029 1030 ):
1030 1031 raise error.InputError(
1031 1032 _(b"a branch of the same name already exists")
1032 1033 )
1033 1034
1034 1035 # make sure only topological heads
1035 1036 if repo.revs(b'heads(%ld) - head()', revs):
1036 1037 raise error.InputError(
1037 1038 _(b"cannot change branch in middle of a stack")
1038 1039 )
1039 1040
1040 1041 replacements = {}
1041 1042 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
1042 1043 # mercurial.subrepo -> mercurial.cmdutil
1043 1044 from . import context
1044 1045
1045 1046 for rev in revs:
1046 1047 ctx = repo[rev]
1047 1048 oldbranch = ctx.branch()
1048 1049 # check if ctx has same branch
1049 1050 if oldbranch == label:
1050 1051 continue
1051 1052
1052 1053 def filectxfn(repo, newctx, path):
1053 1054 try:
1054 1055 return ctx[path]
1055 1056 except error.ManifestLookupError:
1056 1057 return None
1057 1058
1058 1059 ui.debug(
1059 1060 b"changing branch of '%s' from '%s' to '%s'\n"
1060 1061 % (hex(ctx.node()), oldbranch, label)
1061 1062 )
1062 1063 extra = ctx.extra()
1063 1064 extra[b'branch_change'] = hex(ctx.node())
1064 1065 # While changing branch of set of linear commits, make sure that
1065 1066 # we base our commits on new parent rather than old parent which
1066 1067 # was obsoleted while changing the branch
1067 1068 p1 = ctx.p1().node()
1068 1069 p2 = ctx.p2().node()
1069 1070 if p1 in replacements:
1070 1071 p1 = replacements[p1][0]
1071 1072 if p2 in replacements:
1072 1073 p2 = replacements[p2][0]
1073 1074
1074 1075 mc = context.memctx(
1075 1076 repo,
1076 1077 (p1, p2),
1077 1078 ctx.description(),
1078 1079 ctx.files(),
1079 1080 filectxfn,
1080 1081 user=ctx.user(),
1081 1082 date=ctx.date(),
1082 1083 extra=extra,
1083 1084 branch=label,
1084 1085 )
1085 1086
1086 1087 newnode = repo.commitctx(mc)
1087 1088 replacements[ctx.node()] = (newnode,)
1088 1089 ui.debug(b'new node id is %s\n' % hex(newnode))
1089 1090
1090 1091 # create obsmarkers and move bookmarks
1091 1092 scmutil.cleanupnodes(
1092 1093 repo, replacements, b'branch-change', fixphase=True
1093 1094 )
1094 1095
1095 1096 # move the working copy too
1096 1097 wctx = repo[None]
1097 1098 # in-progress merge is a bit too complex for now.
1098 1099 if len(wctx.parents()) == 1:
1099 1100 newid = replacements.get(wctx.p1().node())
1100 1101 if newid is not None:
1101 1102 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1102 1103 # mercurial.cmdutil
1103 1104 from . import hg
1104 1105
1105 1106 hg.update(repo, newid[0], quietempty=True)
1106 1107
1107 1108 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1108 1109
1109 1110
1110 1111 def findrepo(p):
1111 1112 while not os.path.isdir(os.path.join(p, b".hg")):
1112 1113 oldp, p = p, os.path.dirname(p)
1113 1114 if p == oldp:
1114 1115 return None
1115 1116
1116 1117 return p
1117 1118
1118 1119
1119 1120 def bailifchanged(repo, merge=True, hint=None):
1120 1121 """enforce the precondition that working directory must be clean.
1121 1122
1122 1123 'merge' can be set to false if a pending uncommitted merge should be
1123 1124 ignored (such as when 'update --check' runs).
1124 1125
1125 1126 'hint' is the usual hint given to Abort exception.
1126 1127 """
1127 1128
1128 1129 if merge and repo.dirstate.p2() != repo.nullid:
1129 1130 raise error.StateError(_(b'outstanding uncommitted merge'), hint=hint)
1130 1131 st = repo.status()
1131 1132 if st.modified or st.added or st.removed or st.deleted:
1132 1133 raise error.StateError(_(b'uncommitted changes'), hint=hint)
1133 1134 ctx = repo[None]
1134 1135 for s in sorted(ctx.substate):
1135 1136 ctx.sub(s).bailifchanged(hint=hint)
1136 1137
1137 1138
1138 1139 def logmessage(ui: "uimod.ui", opts: Dict[bytes, Any]) -> Optional[bytes]:
1139 1140 """get the log message according to -m and -l option"""
1140 1141
1141 1142 check_at_most_one_arg(opts, b'message', b'logfile')
1142 1143
1143 1144 message = cast(Optional[bytes], opts.get(b'message'))
1144 1145 logfile = opts.get(b'logfile')
1145 1146
1146 1147 if not message and logfile:
1147 1148 try:
1148 1149 if isstdiofilename(logfile):
1149 1150 message = ui.fin.read()
1150 1151 else:
1151 1152 message = b'\n'.join(util.readfile(logfile).splitlines())
1152 1153 except IOError as inst:
1153 1154 raise error.Abort(
1154 1155 _(b"can't read commit message '%s': %s")
1155 1156 % (logfile, encoding.strtolocal(inst.strerror))
1156 1157 )
1157 1158 return message
1158 1159
1159 1160
1160 1161 def mergeeditform(ctxorbool, baseformname):
1161 1162 """return appropriate editform name (referencing a committemplate)
1162 1163
1163 1164 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1164 1165 merging is committed.
1165 1166
1166 1167 This returns baseformname with '.merge' appended if it is a merge,
1167 1168 otherwise '.normal' is appended.
1168 1169 """
1169 1170 if isinstance(ctxorbool, bool):
1170 1171 if ctxorbool:
1171 1172 return baseformname + b".merge"
1172 1173 elif len(ctxorbool.parents()) > 1:
1173 1174 return baseformname + b".merge"
1174 1175
1175 1176 return baseformname + b".normal"
1176 1177
1177 1178
1178 1179 def getcommiteditor(
1179 1180 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1180 1181 ):
1181 1182 """get appropriate commit message editor according to '--edit' option
1182 1183
1183 1184 'finishdesc' is a function to be called with edited commit message
1184 1185 (= 'description' of the new changeset) just after editing, but
1185 1186 before checking empty-ness. It should return actual text to be
1186 1187 stored into history. This allows to change description before
1187 1188 storing.
1188 1189
1189 1190 'extramsg' is a extra message to be shown in the editor instead of
1190 1191 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1191 1192 is automatically added.
1192 1193
1193 1194 'editform' is a dot-separated list of names, to distinguish
1194 1195 the purpose of commit text editing.
1195 1196
1196 1197 'getcommiteditor' returns 'commitforceeditor' regardless of
1197 1198 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1198 1199 they are specific for usage in MQ.
1199 1200 """
1200 1201 if edit or finishdesc or extramsg:
1201 1202 return lambda r, c, s: commitforceeditor(
1202 1203 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1203 1204 )
1204 1205 elif editform:
1205 1206 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1206 1207 else:
1207 1208 return commiteditor
1208 1209
1209 1210
1210 1211 def _escapecommandtemplate(tmpl):
1211 1212 parts = []
1212 1213 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1213 1214 if typ == b'string':
1214 1215 parts.append(stringutil.escapestr(tmpl[start:end]))
1215 1216 else:
1216 1217 parts.append(tmpl[start:end])
1217 1218 return b''.join(parts)
1218 1219
1219 1220
1220 1221 def rendercommandtemplate(ui, tmpl, props):
1221 1222 r"""Expand a literal template 'tmpl' in a way suitable for command line
1222 1223
1223 1224 '\' in outermost string is not taken as an escape character because it
1224 1225 is a directory separator on Windows.
1225 1226
1226 1227 >>> from . import ui as uimod
1227 1228 >>> ui = uimod.ui()
1228 1229 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1229 1230 'c:\\foo'
1230 1231 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1231 1232 'c:{path}'
1232 1233 """
1233 1234 if not tmpl:
1234 1235 return tmpl
1235 1236 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1236 1237 return t.renderdefault(props)
1237 1238
1238 1239
1239 1240 def rendertemplate(ctx, tmpl, props=None):
1240 1241 """Expand a literal template 'tmpl' byte-string against one changeset
1241 1242
1242 1243 Each props item must be a stringify-able value or a callable returning
1243 1244 such value, i.e. no bare list nor dict should be passed.
1244 1245 """
1245 1246 repo = ctx.repo()
1246 1247 tres = formatter.templateresources(repo.ui, repo)
1247 1248 t = formatter.maketemplater(
1248 1249 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1249 1250 )
1250 1251 mapping = {b'ctx': ctx}
1251 1252 if props:
1252 1253 mapping.update(props)
1253 1254 return t.renderdefault(mapping)
1254 1255
1255 1256
1256 1257 def format_changeset_summary(ui, ctx, command=None, default_spec=None):
1257 1258 """Format a changeset summary (one line)."""
1258 1259 spec = None
1259 1260 if command:
1260 1261 spec = ui.config(
1261 1262 b'command-templates', b'oneline-summary.%s' % command, None
1262 1263 )
1263 1264 if not spec:
1264 1265 spec = ui.config(b'command-templates', b'oneline-summary')
1265 1266 if not spec:
1266 1267 spec = default_spec
1267 1268 if not spec:
1268 1269 spec = (
1269 1270 b'{separate(" ", '
1270 1271 b'label("oneline-summary.changeset", "{rev}:{node|short}")'
1271 1272 b', '
1272 1273 b'join(filter(namespaces % "{ifeq(namespace, "branches", "", join(names % "{label("oneline-summary.{namespace}", name)}", " "))}"), " ")'
1273 1274 b')} '
1274 1275 b'"{label("oneline-summary.desc", desc|firstline)}"'
1275 1276 )
1276 1277 text = rendertemplate(ctx, spec)
1277 1278 return text.split(b'\n')[0]
1278 1279
1279 1280
1280 1281 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1281 1282 r"""Convert old-style filename format string to template string
1282 1283
1283 1284 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1284 1285 'foo-{reporoot|basename}-{seqno}.patch'
1285 1286 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1286 1287 '{rev}{tags % "{tag}"}{node}'
1287 1288
1288 1289 '\' in outermost strings has to be escaped because it is a directory
1289 1290 separator on Windows:
1290 1291
1291 1292 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1292 1293 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1293 1294 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1294 1295 '\\\\\\\\foo\\\\bar.patch'
1295 1296 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1296 1297 '\\\\{tags % "{tag}"}'
1297 1298
1298 1299 but inner strings follow the template rules (i.e. '\' is taken as an
1299 1300 escape character):
1300 1301
1301 1302 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1302 1303 '{"c:\\tmp"}'
1303 1304 """
1304 1305 expander = {
1305 1306 b'H': b'{node}',
1306 1307 b'R': b'{rev}',
1307 1308 b'h': b'{node|short}',
1308 1309 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1309 1310 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1310 1311 b'%': b'%',
1311 1312 b'b': b'{reporoot|basename}',
1312 1313 }
1313 1314 if total is not None:
1314 1315 expander[b'N'] = b'{total}'
1315 1316 if seqno is not None:
1316 1317 expander[b'n'] = b'{seqno}'
1317 1318 if total is not None and seqno is not None:
1318 1319 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1319 1320 if pathname is not None:
1320 1321 expander[b's'] = b'{pathname|basename}'
1321 1322 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1322 1323 expander[b'p'] = b'{pathname}'
1323 1324
1324 1325 newname = []
1325 1326 for typ, start, end in templater.scantemplate(pat, raw=True):
1326 1327 if typ != b'string':
1327 1328 newname.append(pat[start:end])
1328 1329 continue
1329 1330 i = start
1330 1331 while i < end:
1331 1332 n = pat.find(b'%', i, end)
1332 1333 if n < 0:
1333 1334 newname.append(stringutil.escapestr(pat[i:end]))
1334 1335 break
1335 1336 newname.append(stringutil.escapestr(pat[i:n]))
1336 1337 if n + 2 > end:
1337 1338 raise error.Abort(
1338 1339 _(b"incomplete format spec in output filename")
1339 1340 )
1340 1341 c = pat[n + 1 : n + 2]
1341 1342 i = n + 2
1342 1343 try:
1343 1344 newname.append(expander[c])
1344 1345 except KeyError:
1345 1346 raise error.Abort(
1346 1347 _(b"invalid format spec '%%%s' in output filename") % c
1347 1348 )
1348 1349 return b''.join(newname)
1349 1350
1350 1351
1351 1352 def makefilename(ctx, pat, **props):
1352 1353 if not pat:
1353 1354 return pat
1354 1355 tmpl = _buildfntemplate(pat, **props)
1355 1356 # BUG: alias expansion shouldn't be made against template fragments
1356 1357 # rewritten from %-format strings, but we have no easy way to partially
1357 1358 # disable the expansion.
1358 1359 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1359 1360
1360 1361
1361 1362 def isstdiofilename(pat):
1362 1363 """True if the given pat looks like a filename denoting stdin/stdout"""
1363 1364 return not pat or pat == b'-'
1364 1365
1365 1366
1366 1367 class _unclosablefile:
1367 1368 def __init__(self, fp):
1368 1369 self._fp = fp
1369 1370
1370 1371 def close(self):
1371 1372 pass
1372 1373
1373 1374 def __iter__(self):
1374 1375 return iter(self._fp)
1375 1376
1376 1377 def __getattr__(self, attr):
1377 1378 return getattr(self._fp, attr)
1378 1379
1379 1380 def __enter__(self):
1380 1381 return self
1381 1382
1382 1383 def __exit__(self, exc_type, exc_value, exc_tb):
1383 1384 pass
1384 1385
1385 1386
1386 1387 def makefileobj(ctx, pat, mode=b'wb', **props):
1387 1388 writable = mode not in (b'r', b'rb')
1388 1389
1389 1390 if isstdiofilename(pat):
1390 1391 repo = ctx.repo()
1391 1392 if writable:
1392 1393 fp = repo.ui.fout
1393 1394 else:
1394 1395 fp = repo.ui.fin
1395 1396 return _unclosablefile(fp)
1396 1397 fn = makefilename(ctx, pat, **props)
1397 1398 return open(fn, mode)
1398 1399
1399 1400
1400 1401 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1401 1402 """opens the changelog, manifest, a filelog or a given revlog"""
1402 1403 cl = opts[b'changelog']
1403 1404 mf = opts[b'manifest']
1404 1405 dir = opts[b'dir']
1405 1406 msg = None
1406 1407 if cl and mf:
1407 1408 msg = _(b'cannot specify --changelog and --manifest at the same time')
1408 1409 elif cl and dir:
1409 1410 msg = _(b'cannot specify --changelog and --dir at the same time')
1410 1411 elif cl or mf or dir:
1411 1412 if file_:
1412 1413 msg = _(b'cannot specify filename with --changelog or --manifest')
1413 1414 elif not repo:
1414 1415 msg = _(
1415 1416 b'cannot specify --changelog or --manifest or --dir '
1416 1417 b'without a repository'
1417 1418 )
1418 1419 if msg:
1419 1420 raise error.InputError(msg)
1420 1421
1421 1422 r = None
1422 1423 if repo:
1423 1424 if cl:
1424 1425 r = repo.unfiltered().changelog
1425 1426 elif dir:
1426 1427 if not scmutil.istreemanifest(repo):
1427 1428 raise error.InputError(
1428 1429 _(
1429 1430 b"--dir can only be used on repos with "
1430 1431 b"treemanifest enabled"
1431 1432 )
1432 1433 )
1433 1434 if not dir.endswith(b'/'):
1434 1435 dir = dir + b'/'
1435 1436 dirlog = repo.manifestlog.getstorage(dir)
1436 1437 if len(dirlog):
1437 1438 r = dirlog
1438 1439 elif mf:
1439 1440 r = repo.manifestlog.getstorage(b'')
1440 1441 elif file_:
1441 1442 filelog = repo.file(file_)
1442 1443 if len(filelog):
1443 1444 r = filelog
1444 1445
1445 1446 # Not all storage may be revlogs. If requested, try to return an actual
1446 1447 # revlog instance.
1447 1448 if returnrevlog:
1448 1449 if isinstance(r, revlog.revlog):
1449 1450 pass
1450 1451 elif hasattr(r, '_revlog'):
1451 1452 r = r._revlog # pytype: disable=attribute-error
1452 1453 elif r is not None:
1453 1454 raise error.InputError(
1454 1455 _(b'%r does not appear to be a revlog') % r
1455 1456 )
1456 1457
1457 1458 if not r:
1458 1459 if not returnrevlog:
1459 1460 raise error.InputError(_(b'cannot give path to non-revlog'))
1460 1461
1461 1462 if not file_:
1462 1463 raise error.CommandError(cmd, _(b'invalid arguments'))
1463 1464 if not os.path.isfile(file_):
1464 1465 raise error.InputError(_(b"revlog '%s' not found") % file_)
1465 1466
1466 1467 target = (revlog_constants.KIND_OTHER, b'free-form:%s' % file_)
1467 1468 r = revlog.revlog(
1468 1469 vfsmod.vfs(encoding.getcwd(), audit=False),
1469 1470 target=target,
1470 1471 radix=file_[:-2],
1471 1472 )
1472 1473 return r
1473 1474
1474 1475
1475 1476 def openrevlog(repo, cmd, file_, opts):
1476 1477 """Obtain a revlog backing storage of an item.
1477 1478
1478 1479 This is similar to ``openstorage()`` except it always returns a revlog.
1479 1480
1480 1481 In most cases, a caller cares about the main storage object - not the
1481 1482 revlog backing it. Therefore, this function should only be used by code
1482 1483 that needs to examine low-level revlog implementation details. e.g. debug
1483 1484 commands.
1484 1485 """
1485 1486 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1486 1487
1487 1488
1488 1489 def copy(ui, repo, pats, opts: Dict[bytes, Any], rename=False):
1489 1490 check_incompatible_arguments(opts, b'forget', [b'dry_run'])
1490 1491
1491 1492 # called with the repo lock held
1492 1493 #
1493 1494 # hgsep => pathname that uses "/" to separate directories
1494 1495 # ossep => pathname that uses os.sep to separate directories
1495 1496 cwd = repo.getcwd()
1496 1497 targets = {}
1497 1498 forget = opts.get(b"forget")
1498 1499 after = opts.get(b"after")
1499 1500 dryrun = opts.get(b"dry_run")
1500 1501 rev = opts.get(b'at_rev')
1501 1502 if rev:
1502 1503 if not forget and not after:
1503 1504 # TODO: Remove this restriction and make it also create the copy
1504 1505 # targets (and remove the rename source if rename==True).
1505 1506 raise error.InputError(_(b'--at-rev requires --after'))
1506 1507 ctx = logcmdutil.revsingle(repo, rev)
1507 1508 if len(ctx.parents()) > 1:
1508 1509 raise error.InputError(
1509 1510 _(b'cannot mark/unmark copy in merge commit')
1510 1511 )
1511 1512 else:
1512 1513 ctx = repo[None]
1513 1514
1514 1515 pctx = ctx.p1()
1515 1516
1516 1517 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1517 1518
1518 1519 if forget:
1519 1520 if ctx.rev() is None:
1520 1521 new_ctx = ctx
1521 1522 else:
1522 1523 if len(ctx.parents()) > 1:
1523 1524 raise error.InputError(_(b'cannot unmark copy in merge commit'))
1524 1525 # avoid cycle context -> subrepo -> cmdutil
1525 1526 from . import context
1526 1527
1527 1528 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1528 1529 new_ctx = context.overlayworkingctx(repo)
1529 1530 new_ctx.setbase(ctx.p1())
1530 1531 mergemod.graft(repo, ctx, wctx=new_ctx)
1531 1532
1532 1533 match = scmutil.match(ctx, pats, opts)
1533 1534
1534 1535 current_copies = ctx.p1copies()
1535 1536 current_copies.update(ctx.p2copies())
1536 1537
1537 1538 uipathfn = scmutil.getuipathfn(repo)
1538 1539 for f in ctx.walk(match):
1539 1540 if f in current_copies:
1540 1541 new_ctx[f].markcopied(None)
1541 1542 elif match.exact(f):
1542 1543 ui.warn(
1543 1544 _(
1544 1545 b'%s: not unmarking as copy - file is not marked as copied\n'
1545 1546 )
1546 1547 % uipathfn(f)
1547 1548 )
1548 1549
1549 1550 if ctx.rev() is not None:
1550 1551 with repo.lock():
1551 1552 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1552 1553 new_node = mem_ctx.commit()
1553 1554
1554 1555 if repo.dirstate.p1() == ctx.node():
1555 1556 with repo.dirstate.changing_parents(repo):
1556 1557 scmutil.movedirstate(repo, repo[new_node])
1557 1558 replacements = {ctx.node(): [new_node]}
1558 1559 scmutil.cleanupnodes(
1559 1560 repo, replacements, b'uncopy', fixphase=True
1560 1561 )
1561 1562
1562 1563 return
1563 1564
1564 1565 pats = scmutil.expandpats(pats)
1565 1566 if not pats:
1566 1567 raise error.InputError(_(b'no source or destination specified'))
1567 1568 if len(pats) == 1:
1568 1569 raise error.InputError(_(b'no destination specified'))
1569 1570 dest = pats.pop()
1570 1571
1571 1572 def walkpat(pat):
1572 1573 srcs = []
1573 1574 # TODO: Inline and simplify the non-working-copy version of this code
1574 1575 # since it shares very little with the working-copy version of it.
1575 1576 ctx_to_walk = ctx if ctx.rev() is None else pctx
1576 1577 m = scmutil.match(ctx_to_walk, [pat], opts, globbed=True)
1577 1578 for abs in ctx_to_walk.walk(m):
1578 1579 rel = uipathfn(abs)
1579 1580 exact = m.exact(abs)
1580 1581 if abs not in ctx:
1581 1582 if abs in pctx:
1582 1583 if not after:
1583 1584 if exact:
1584 1585 ui.warn(
1585 1586 _(
1586 1587 b'%s: not copying - file has been marked '
1587 1588 b'for remove\n'
1588 1589 )
1589 1590 % rel
1590 1591 )
1591 1592 continue
1592 1593 else:
1593 1594 if exact:
1594 1595 ui.warn(
1595 1596 _(b'%s: not copying - file is not managed\n') % rel
1596 1597 )
1597 1598 continue
1598 1599
1599 1600 # abs: hgsep
1600 1601 # rel: ossep
1601 1602 srcs.append((abs, rel, exact))
1602 1603 return srcs
1603 1604
1604 1605 if ctx.rev() is not None:
1605 1606 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1606 1607 absdest = pathutil.canonpath(repo.root, cwd, dest)
1607 1608 if ctx.hasdir(absdest):
1608 1609 raise error.InputError(
1609 1610 _(b'%s: --at-rev does not support a directory as destination')
1610 1611 % uipathfn(absdest)
1611 1612 )
1612 1613 if absdest not in ctx:
1613 1614 raise error.InputError(
1614 1615 _(b'%s: copy destination does not exist in %s')
1615 1616 % (uipathfn(absdest), ctx)
1616 1617 )
1617 1618
1618 1619 # avoid cycle context -> subrepo -> cmdutil
1619 1620 from . import context
1620 1621
1621 1622 copylist = []
1622 1623 for pat in pats:
1623 1624 srcs = walkpat(pat)
1624 1625 if not srcs:
1625 1626 continue
1626 1627 for abs, rel, exact in srcs:
1627 1628 copylist.append(abs)
1628 1629
1629 1630 if not copylist:
1630 1631 raise error.InputError(_(b'no files to copy'))
1631 1632 # TODO: Add support for `hg cp --at-rev . foo bar dir` and
1632 1633 # `hg cp --at-rev . dir1 dir2`, preferably unifying the code with the
1633 1634 # existing functions below.
1634 1635 if len(copylist) != 1:
1635 1636 raise error.InputError(_(b'--at-rev requires a single source'))
1636 1637
1637 1638 new_ctx = context.overlayworkingctx(repo)
1638 1639 new_ctx.setbase(ctx.p1())
1639 1640 mergemod.graft(repo, ctx, wctx=new_ctx)
1640 1641
1641 1642 new_ctx.markcopied(absdest, copylist[0])
1642 1643
1643 1644 with repo.lock():
1644 1645 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1645 1646 new_node = mem_ctx.commit()
1646 1647
1647 1648 if repo.dirstate.p1() == ctx.node():
1648 1649 with repo.dirstate.changing_parents(repo):
1649 1650 scmutil.movedirstate(repo, repo[new_node])
1650 1651 replacements = {ctx.node(): [new_node]}
1651 1652 scmutil.cleanupnodes(repo, replacements, b'copy', fixphase=True)
1652 1653
1653 1654 return
1654 1655
1655 1656 # abssrc: hgsep
1656 1657 # relsrc: ossep
1657 1658 # otarget: ossep
1658 1659 def copyfile(abssrc, relsrc, otarget, exact):
1659 1660 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1660 1661 if b'/' in abstarget:
1661 1662 # We cannot normalize abstarget itself, this would prevent
1662 1663 # case only renames, like a => A.
1663 1664 abspath, absname = abstarget.rsplit(b'/', 1)
1664 1665 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1665 1666 reltarget = repo.pathto(abstarget, cwd)
1666 1667 target = repo.wjoin(abstarget)
1667 1668 src = repo.wjoin(abssrc)
1668 1669 entry = repo.dirstate.get_entry(abstarget)
1669 1670
1670 1671 already_commited = entry.tracked and not entry.added
1671 1672
1672 1673 scmutil.checkportable(ui, abstarget)
1673 1674
1674 1675 # check for collisions
1675 1676 prevsrc = targets.get(abstarget)
1676 1677 if prevsrc is not None:
1677 1678 ui.warn(
1678 1679 _(b'%s: not overwriting - %s collides with %s\n')
1679 1680 % (
1680 1681 reltarget,
1681 1682 repo.pathto(abssrc, cwd),
1682 1683 repo.pathto(prevsrc, cwd),
1683 1684 )
1684 1685 )
1685 1686 return True # report a failure
1686 1687
1687 1688 # check for overwrites
1688 1689 exists = os.path.lexists(target)
1689 1690 samefile = False
1690 1691 if exists and abssrc != abstarget:
1691 1692 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1692 1693 abstarget
1693 1694 ):
1694 1695 if not rename:
1695 1696 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1696 1697 return True # report a failure
1697 1698 exists = False
1698 1699 samefile = True
1699 1700
1700 1701 if not after and exists or after and already_commited:
1701 1702 if not opts[b'force']:
1702 1703 if already_commited:
1703 1704 msg = _(b'%s: not overwriting - file already committed\n')
1704 1705 # Check if if the target was added in the parent and the
1705 1706 # source already existed in the grandparent.
1706 1707 looks_like_copy_in_pctx = abstarget in pctx and any(
1707 1708 abssrc in gpctx and abstarget not in gpctx
1708 1709 for gpctx in pctx.parents()
1709 1710 )
1710 1711 if looks_like_copy_in_pctx:
1711 1712 if rename:
1712 1713 hint = _(
1713 1714 b"('hg rename --at-rev .' to record the rename "
1714 1715 b"in the parent of the working copy)\n"
1715 1716 )
1716 1717 else:
1717 1718 hint = _(
1718 1719 b"('hg copy --at-rev .' to record the copy in "
1719 1720 b"the parent of the working copy)\n"
1720 1721 )
1721 1722 else:
1722 1723 if after:
1723 1724 flags = b'--after --force'
1724 1725 else:
1725 1726 flags = b'--force'
1726 1727 if rename:
1727 1728 hint = (
1728 1729 _(
1729 1730 b"('hg rename %s' to replace the file by "
1730 1731 b'recording a rename)\n'
1731 1732 )
1732 1733 % flags
1733 1734 )
1734 1735 else:
1735 1736 hint = (
1736 1737 _(
1737 1738 b"('hg copy %s' to replace the file by "
1738 1739 b'recording a copy)\n'
1739 1740 )
1740 1741 % flags
1741 1742 )
1742 1743 else:
1743 1744 msg = _(b'%s: not overwriting - file exists\n')
1744 1745 if rename:
1745 1746 hint = _(
1746 1747 b"('hg rename --after' to record the rename)\n"
1747 1748 )
1748 1749 else:
1749 1750 hint = _(b"('hg copy --after' to record the copy)\n")
1750 1751 ui.warn(msg % reltarget)
1751 1752 ui.warn(hint)
1752 1753 return True # report a failure
1753 1754
1754 1755 if after:
1755 1756 if not exists:
1756 1757 if rename:
1757 1758 ui.warn(
1758 1759 _(b'%s: not recording move - %s does not exist\n')
1759 1760 % (relsrc, reltarget)
1760 1761 )
1761 1762 else:
1762 1763 ui.warn(
1763 1764 _(b'%s: not recording copy - %s does not exist\n')
1764 1765 % (relsrc, reltarget)
1765 1766 )
1766 1767 return True # report a failure
1767 1768 elif not dryrun:
1768 1769 try:
1769 1770 if exists:
1770 1771 os.unlink(target)
1771 1772 targetdir = os.path.dirname(target) or b'.'
1772 1773 if not os.path.isdir(targetdir):
1773 1774 os.makedirs(targetdir)
1774 1775 if samefile:
1775 1776 tmp = target + b"~hgrename"
1776 1777 os.rename(src, tmp)
1777 1778 os.rename(tmp, target)
1778 1779 else:
1779 1780 # Preserve stat info on renames, not on copies; this matches
1780 1781 # Linux CLI behavior.
1781 1782 util.copyfile(src, target, copystat=rename)
1782 1783 srcexists = True
1783 1784 except IOError as inst:
1784 1785 if inst.errno == errno.ENOENT:
1785 1786 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1786 1787 srcexists = False
1787 1788 else:
1788 1789 ui.warn(
1789 1790 _(b'%s: cannot copy - %s\n')
1790 1791 % (relsrc, encoding.strtolocal(inst.strerror))
1791 1792 )
1792 1793 return True # report a failure
1793 1794
1794 1795 if ui.verbose or not exact:
1795 1796 if rename:
1796 1797 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1797 1798 else:
1798 1799 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1799 1800
1800 1801 targets[abstarget] = abssrc
1801 1802
1802 1803 # fix up dirstate
1803 1804 scmutil.dirstatecopy(
1804 1805 ui, repo, ctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1805 1806 )
1806 1807 if rename and not dryrun:
1807 1808 if not after and srcexists and not samefile:
1808 1809 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1809 1810 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1810 1811 ctx.forget([abssrc])
1811 1812
1812 1813 # pat: ossep
1813 1814 # dest ossep
1814 1815 # srcs: list of (hgsep, hgsep, ossep, bool)
1815 1816 # return: function that takes hgsep and returns ossep
1816 1817 def targetpathfn(pat, dest, srcs):
1817 1818 if os.path.isdir(pat):
1818 1819 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1819 1820 abspfx = util.localpath(abspfx)
1820 1821 if destdirexists:
1821 1822 striplen = len(os.path.split(abspfx)[0])
1822 1823 else:
1823 1824 striplen = len(abspfx)
1824 1825 if striplen:
1825 1826 striplen += len(pycompat.ossep)
1826 1827 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1827 1828 elif destdirexists:
1828 1829 res = lambda p: os.path.join(
1829 1830 dest, os.path.basename(util.localpath(p))
1830 1831 )
1831 1832 else:
1832 1833 res = lambda p: dest
1833 1834 return res
1834 1835
1835 1836 # pat: ossep
1836 1837 # dest ossep
1837 1838 # srcs: list of (hgsep, hgsep, ossep, bool)
1838 1839 # return: function that takes hgsep and returns ossep
1839 1840 def targetpathafterfn(pat, dest, srcs):
1840 1841 if matchmod.patkind(pat):
1841 1842 # a mercurial pattern
1842 1843 res = lambda p: os.path.join(
1843 1844 dest, os.path.basename(util.localpath(p))
1844 1845 )
1845 1846 else:
1846 1847 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1847 1848 if len(abspfx) < len(srcs[0][0]):
1848 1849 # A directory. Either the target path contains the last
1849 1850 # component of the source path or it does not.
1850 1851 def evalpath(striplen):
1851 1852 score = 0
1852 1853 for s in srcs:
1853 1854 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1854 1855 if os.path.lexists(t):
1855 1856 score += 1
1856 1857 return score
1857 1858
1858 1859 abspfx = util.localpath(abspfx)
1859 1860 striplen = len(abspfx)
1860 1861 if striplen:
1861 1862 striplen += len(pycompat.ossep)
1862 1863 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1863 1864 score = evalpath(striplen)
1864 1865 striplen1 = len(os.path.split(abspfx)[0])
1865 1866 if striplen1:
1866 1867 striplen1 += len(pycompat.ossep)
1867 1868 if evalpath(striplen1) > score:
1868 1869 striplen = striplen1
1869 1870 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1870 1871 else:
1871 1872 # a file
1872 1873 if destdirexists:
1873 1874 res = lambda p: os.path.join(
1874 1875 dest, os.path.basename(util.localpath(p))
1875 1876 )
1876 1877 else:
1877 1878 res = lambda p: dest
1878 1879 return res
1879 1880
1880 1881 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1881 1882 if not destdirexists:
1882 1883 if len(pats) > 1 or matchmod.patkind(pats[0]):
1883 1884 raise error.InputError(
1884 1885 _(
1885 1886 b'with multiple sources, destination must be an '
1886 1887 b'existing directory'
1887 1888 )
1888 1889 )
1889 1890 if util.endswithsep(dest):
1890 1891 raise error.InputError(
1891 1892 _(b'destination %s is not a directory') % dest
1892 1893 )
1893 1894
1894 1895 tfn = targetpathfn
1895 1896 if after:
1896 1897 tfn = targetpathafterfn
1897 1898 copylist = []
1898 1899 for pat in pats:
1899 1900 srcs = walkpat(pat)
1900 1901 if not srcs:
1901 1902 continue
1902 1903 copylist.append((tfn(pat, dest, srcs), srcs))
1903 1904 if not copylist:
1904 1905 hint = None
1905 1906 if rename:
1906 1907 hint = _(b'maybe you meant to use --after --at-rev=.')
1907 1908 raise error.InputError(_(b'no files to copy'), hint=hint)
1908 1909
1909 1910 errors = 0
1910 1911 for targetpath, srcs in copylist:
1911 1912 for abssrc, relsrc, exact in srcs:
1912 1913 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1913 1914 errors += 1
1914 1915
1915 1916 return errors != 0
1916 1917
1917 1918
1918 1919 ## facility to let extension process additional data into an import patch
1919 1920 # list of identifier to be executed in order
1920 1921 extrapreimport = [] # run before commit
1921 1922 extrapostimport = [] # run after commit
1922 1923 # mapping from identifier to actual import function
1923 1924 #
1924 1925 # 'preimport' are run before the commit is made and are provided the following
1925 1926 # arguments:
1926 1927 # - repo: the localrepository instance,
1927 1928 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1928 1929 # - extra: the future extra dictionary of the changeset, please mutate it,
1929 1930 # - opts: the import options.
1930 1931 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1931 1932 # mutation of in memory commit and more. Feel free to rework the code to get
1932 1933 # there.
1933 1934 extrapreimportmap = {}
1934 1935 # 'postimport' are run after the commit is made and are provided the following
1935 1936 # argument:
1936 1937 # - ctx: the changectx created by import.
1937 1938 extrapostimportmap = {}
1938 1939
1939 1940
1940 1941 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1941 1942 """Utility function used by commands.import to import a single patch
1942 1943
1943 1944 This function is explicitly defined here to help the evolve extension to
1944 1945 wrap this part of the import logic.
1945 1946
1946 1947 The API is currently a bit ugly because it a simple code translation from
1947 1948 the import command. Feel free to make it better.
1948 1949
1949 1950 :patchdata: a dictionary containing parsed patch data (such as from
1950 1951 ``patch.extract()``)
1951 1952 :parents: nodes that will be parent of the created commit
1952 1953 :opts: the full dict of option passed to the import command
1953 1954 :msgs: list to save commit message to.
1954 1955 (used in case we need to save it when failing)
1955 1956 :updatefunc: a function that update a repo to a given node
1956 1957 updatefunc(<repo>, <node>)
1957 1958 """
1958 1959 # avoid cycle context -> subrepo -> cmdutil
1959 1960 from . import context
1960 1961
1961 1962 tmpname = patchdata.get(b'filename')
1962 1963 message = patchdata.get(b'message')
1963 1964 user = opts.get(b'user') or patchdata.get(b'user')
1964 1965 date = opts.get(b'date') or patchdata.get(b'date')
1965 1966 branch = patchdata.get(b'branch')
1966 1967 nodeid = patchdata.get(b'nodeid')
1967 1968 p1 = patchdata.get(b'p1')
1968 1969 p2 = patchdata.get(b'p2')
1969 1970
1970 1971 nocommit = opts.get(b'no_commit')
1971 1972 importbranch = opts.get(b'import_branch')
1972 1973 update = not opts.get(b'bypass')
1973 1974 strip = opts[b"strip"]
1974 1975 prefix = opts[b"prefix"]
1975 1976 sim = float(opts.get(b'similarity') or 0)
1976 1977
1977 1978 if not tmpname:
1978 1979 return None, None, False
1979 1980
1980 1981 rejects = False
1981 1982
1982 1983 cmdline_message = logmessage(ui, opts)
1983 1984 if cmdline_message:
1984 1985 # pickup the cmdline msg
1985 1986 message = cmdline_message
1986 1987 elif message:
1987 1988 # pickup the patch msg
1988 1989 message = message.strip()
1989 1990 else:
1990 1991 # launch the editor
1991 1992 message = None
1992 1993 ui.debug(b'message:\n%s\n' % (message or b''))
1993 1994
1994 1995 if len(parents) == 1:
1995 1996 parents.append(repo[nullrev])
1996 1997 if opts.get(b'exact'):
1997 1998 if not nodeid or not p1:
1998 1999 raise error.InputError(_(b'not a Mercurial patch'))
1999 2000 p1 = repo[p1]
2000 2001 p2 = repo[p2 or nullrev]
2001 2002 elif p2:
2002 2003 try:
2003 2004 p1 = repo[p1]
2004 2005 p2 = repo[p2]
2005 2006 # Without any options, consider p2 only if the
2006 2007 # patch is being applied on top of the recorded
2007 2008 # first parent.
2008 2009 if p1 != parents[0]:
2009 2010 p1 = parents[0]
2010 2011 p2 = repo[nullrev]
2011 2012 except error.RepoError:
2012 2013 p1, p2 = parents
2013 2014 if p2.rev() == nullrev:
2014 2015 ui.warn(
2015 2016 _(
2016 2017 b"warning: import the patch as a normal revision\n"
2017 2018 b"(use --exact to import the patch as a merge)\n"
2018 2019 )
2019 2020 )
2020 2021 else:
2021 2022 p1, p2 = parents
2022 2023
2023 2024 n = None
2024 2025 if update:
2025 2026 if p1 != parents[0]:
2026 2027 updatefunc(repo, p1.node())
2027 2028 if p2 != parents[1]:
2028 2029 repo.setparents(p1.node(), p2.node())
2029 2030
2030 2031 if opts.get(b'exact') or importbranch:
2031 2032 repo.dirstate.setbranch(
2032 2033 branch or b'default', repo.currenttransaction()
2033 2034 )
2034 2035
2035 2036 partial = opts.get(b'partial', False)
2036 2037 files = set()
2037 2038 try:
2038 2039 patch.patch(
2039 2040 ui,
2040 2041 repo,
2041 2042 tmpname,
2042 2043 strip=strip,
2043 2044 prefix=prefix,
2044 2045 files=files,
2045 2046 eolmode=None,
2046 2047 similarity=sim / 100.0,
2047 2048 )
2048 2049 except error.PatchParseError as e:
2049 2050 raise error.InputError(
2050 2051 pycompat.bytestr(e),
2051 2052 hint=_(
2052 2053 b'check that whitespace in the patch has not been mangled'
2053 2054 ),
2054 2055 )
2055 2056 except error.PatchApplicationError as e:
2056 2057 if not partial:
2057 2058 raise error.StateError(pycompat.bytestr(e))
2058 2059 if partial:
2059 2060 rejects = True
2060 2061
2061 2062 files = list(files)
2062 2063 if nocommit:
2063 2064 if message:
2064 2065 msgs.append(message)
2065 2066 else:
2066 2067 if opts.get(b'exact') or p2:
2067 2068 # If you got here, you either use --force and know what
2068 2069 # you are doing or used --exact or a merge patch while
2069 2070 # being updated to its first parent.
2070 2071 m = None
2071 2072 else:
2072 2073 m = scmutil.matchfiles(repo, files or [])
2073 2074 editform = mergeeditform(repo[None], b'import.normal')
2074 2075 if opts.get(b'exact'):
2075 2076 editor = None
2076 2077 else:
2077 2078 editor = getcommiteditor(
2078 2079 editform=editform, **pycompat.strkwargs(opts)
2079 2080 )
2080 2081 extra = {}
2081 2082 for idfunc in extrapreimport:
2082 2083 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
2083 2084 overrides = {}
2084 2085 if partial:
2085 2086 overrides[(b'ui', b'allowemptycommit')] = True
2086 2087 if opts.get(b'secret'):
2087 2088 overrides[(b'phases', b'new-commit')] = b'secret'
2088 2089 with repo.ui.configoverride(overrides, b'import'):
2089 2090 n = repo.commit(
2090 2091 message, user, date, match=m, editor=editor, extra=extra
2091 2092 )
2092 2093 for idfunc in extrapostimport:
2093 2094 extrapostimportmap[idfunc](repo[n])
2094 2095 else:
2095 2096 if opts.get(b'exact') or importbranch:
2096 2097 branch = branch or b'default'
2097 2098 else:
2098 2099 branch = p1.branch()
2099 2100 store = patch.filestore()
2100 2101 try:
2101 2102 files = set()
2102 2103 try:
2103 2104 patch.patchrepo(
2104 2105 ui,
2105 2106 repo,
2106 2107 p1,
2107 2108 store,
2108 2109 tmpname,
2109 2110 strip,
2110 2111 prefix,
2111 2112 files,
2112 2113 eolmode=None,
2113 2114 )
2114 2115 except error.PatchParseError as e:
2115 2116 raise error.InputError(
2116 2117 stringutil.forcebytestr(e),
2117 2118 hint=_(
2118 2119 b'check that whitespace in the patch has not been mangled'
2119 2120 ),
2120 2121 )
2121 2122 except error.PatchApplicationError as e:
2122 2123 raise error.StateError(stringutil.forcebytestr(e))
2123 2124 if opts.get(b'exact'):
2124 2125 editor = None
2125 2126 else:
2126 2127 editor = getcommiteditor(editform=b'import.bypass')
2127 2128 memctx = context.memctx(
2128 2129 repo,
2129 2130 (p1.node(), p2.node()),
2130 2131 message,
2131 2132 files=files,
2132 2133 filectxfn=store,
2133 2134 user=user,
2134 2135 date=date,
2135 2136 branch=branch,
2136 2137 editor=editor,
2137 2138 )
2138 2139
2139 2140 overrides = {}
2140 2141 if opts.get(b'secret'):
2141 2142 overrides[(b'phases', b'new-commit')] = b'secret'
2142 2143 with repo.ui.configoverride(overrides, b'import'):
2143 2144 n = memctx.commit()
2144 2145 finally:
2145 2146 store.close()
2146 2147 if opts.get(b'exact') and nocommit:
2147 2148 # --exact with --no-commit is still useful in that it does merge
2148 2149 # and branch bits
2149 2150 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
2150 2151 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
2151 2152 raise error.Abort(_(b'patch is damaged or loses information'))
2152 2153 msg = _(b'applied to working directory')
2153 2154 if n:
2154 2155 # i18n: refers to a short changeset id
2155 2156 msg = _(b'created %s') % short(n)
2156 2157 return msg, n, rejects
2157 2158
2158 2159
2159 2160 # facility to let extensions include additional data in an exported patch
2160 2161 # list of identifiers to be executed in order
2161 2162 extraexport = []
2162 2163 # mapping from identifier to actual export function
2163 2164 # function as to return a string to be added to the header or None
2164 2165 # it is given two arguments (sequencenumber, changectx)
2165 2166 extraexportmap = {}
2166 2167
2167 2168
2168 2169 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
2169 2170 node = scmutil.binnode(ctx)
2170 2171 parents = [p.node() for p in ctx.parents() if p]
2171 2172 branch = ctx.branch()
2172 2173 if switch_parent:
2173 2174 parents.reverse()
2174 2175
2175 2176 if parents:
2176 2177 prev = parents[0]
2177 2178 else:
2178 2179 prev = repo.nullid
2179 2180
2180 2181 fm.context(ctx=ctx)
2181 2182 fm.plain(b'# HG changeset patch\n')
2182 2183 fm.write(b'user', b'# User %s\n', ctx.user())
2183 2184 fm.plain(b'# Date %d %d\n' % ctx.date())
2184 2185 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
2185 2186 fm.condwrite(
2186 2187 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
2187 2188 )
2188 2189 fm.write(b'node', b'# Node ID %s\n', hex(node))
2189 2190 fm.plain(b'# Parent %s\n' % hex(prev))
2190 2191 if len(parents) > 1:
2191 2192 fm.plain(b'# Parent %s\n' % hex(parents[1]))
2192 2193 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
2193 2194
2194 2195 # TODO: redesign extraexportmap function to support formatter
2195 2196 for headerid in extraexport:
2196 2197 header = extraexportmap[headerid](seqno, ctx)
2197 2198 if header is not None:
2198 2199 fm.plain(b'# %s\n' % header)
2199 2200
2200 2201 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
2201 2202 fm.plain(b'\n')
2202 2203
2203 2204 if fm.isplain():
2204 2205 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
2205 2206 for chunk, label in chunkiter:
2206 2207 fm.plain(chunk, label=label)
2207 2208 else:
2208 2209 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
2209 2210 # TODO: make it structured?
2210 2211 fm.data(diff=b''.join(chunkiter))
2211 2212
2212 2213
2213 2214 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
2214 2215 """Export changesets to stdout or a single file"""
2215 2216 for seqno, rev in enumerate(revs, 1):
2216 2217 ctx = repo[rev]
2217 2218 if not dest.startswith(b'<'):
2218 2219 repo.ui.note(b"%s\n" % dest)
2219 2220 fm.startitem()
2220 2221 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
2221 2222
2222 2223
2223 2224 def _exportfntemplate(
2224 2225 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
2225 2226 ):
2226 2227 """Export changesets to possibly multiple files"""
2227 2228 total = len(revs)
2228 2229 revwidth = max(len(str(rev)) for rev in revs)
2229 2230 filemap = util.sortdict() # filename: [(seqno, rev), ...]
2230 2231
2231 2232 for seqno, rev in enumerate(revs, 1):
2232 2233 ctx = repo[rev]
2233 2234 dest = makefilename(
2234 2235 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
2235 2236 )
2236 2237 filemap.setdefault(dest, []).append((seqno, rev))
2237 2238
2238 2239 for dest in filemap:
2239 2240 with formatter.maybereopen(basefm, dest) as fm:
2240 2241 repo.ui.note(b"%s\n" % dest)
2241 2242 for seqno, rev in filemap[dest]:
2242 2243 fm.startitem()
2243 2244 ctx = repo[rev]
2244 2245 _exportsingle(
2245 2246 repo, ctx, fm, match, switch_parent, seqno, diffopts
2246 2247 )
2247 2248
2248 2249
2249 2250 def _prefetchchangedfiles(repo, revs, match):
2250 2251 allfiles = set()
2251 2252 for rev in revs:
2252 2253 for file in repo[rev].files():
2253 2254 if not match or match(file):
2254 2255 allfiles.add(file)
2255 2256 match = scmutil.matchfiles(repo, allfiles)
2256 2257 revmatches = [(rev, match) for rev in revs]
2257 2258 scmutil.prefetchfiles(repo, revmatches)
2258 2259
2259 2260
2260 2261 def export(
2261 2262 repo,
2262 2263 revs,
2263 2264 basefm,
2264 2265 fntemplate=b'hg-%h.patch',
2265 2266 switch_parent=False,
2266 2267 opts=None,
2267 2268 match=None,
2268 2269 ):
2269 2270 """export changesets as hg patches
2270 2271
2271 2272 Args:
2272 2273 repo: The repository from which we're exporting revisions.
2273 2274 revs: A list of revisions to export as revision numbers.
2274 2275 basefm: A formatter to which patches should be written.
2275 2276 fntemplate: An optional string to use for generating patch file names.
2276 2277 switch_parent: If True, show diffs against second parent when not nullid.
2277 2278 Default is false, which always shows diff against p1.
2278 2279 opts: diff options to use for generating the patch.
2279 2280 match: If specified, only export changes to files matching this matcher.
2280 2281
2281 2282 Returns:
2282 2283 Nothing.
2283 2284
2284 2285 Side Effect:
2285 2286 "HG Changeset Patch" data is emitted to one of the following
2286 2287 destinations:
2287 2288 fntemplate specified: Each rev is written to a unique file named using
2288 2289 the given template.
2289 2290 Otherwise: All revs will be written to basefm.
2290 2291 """
2291 2292 _prefetchchangedfiles(repo, revs, match)
2292 2293
2293 2294 if not fntemplate:
2294 2295 _exportfile(
2295 2296 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2296 2297 )
2297 2298 else:
2298 2299 _exportfntemplate(
2299 2300 repo, revs, basefm, fntemplate, switch_parent, opts, match
2300 2301 )
2301 2302
2302 2303
2303 2304 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2304 2305 """Export changesets to the given file stream"""
2305 2306 _prefetchchangedfiles(repo, revs, match)
2306 2307
2307 2308 dest = getattr(fp, 'name', b'<unnamed>')
2308 2309 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2309 2310 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2310 2311
2311 2312
2312 2313 def showmarker(fm, marker, index=None):
2313 2314 """utility function to display obsolescence marker in a readable way
2314 2315
2315 2316 To be used by debug function."""
2316 2317 if index is not None:
2317 2318 fm.write(b'index', b'%i ', index)
2318 2319 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2319 2320 succs = marker.succnodes()
2320 2321 fm.condwrite(
2321 2322 succs,
2322 2323 b'succnodes',
2323 2324 b'%s ',
2324 2325 fm.formatlist(map(hex, succs), name=b'node'),
2325 2326 )
2326 2327 fm.write(b'flag', b'%X ', marker.flags())
2327 2328 parents = marker.parentnodes()
2328 2329 if parents is not None:
2329 2330 fm.write(
2330 2331 b'parentnodes',
2331 2332 b'{%s} ',
2332 2333 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2333 2334 )
2334 2335 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2335 2336 meta = marker.metadata().copy()
2336 2337 meta.pop(b'date', None)
2337 2338 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2338 2339 fm.write(
2339 2340 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2340 2341 )
2341 2342 fm.plain(b'\n')
2342 2343
2343 2344
2344 2345 def finddate(ui, repo, date):
2345 2346 """Find the tipmost changeset that matches the given date spec"""
2346 2347 mrevs = repo.revs(b'date(%s)', date)
2347 2348 try:
2348 2349 rev = mrevs.max()
2349 2350 except ValueError:
2350 2351 raise error.InputError(_(b"revision matching date not found"))
2351 2352
2352 2353 ui.status(
2353 2354 _(b"found revision %d from %s\n")
2354 2355 % (rev, dateutil.datestr(repo[rev].date()))
2355 2356 )
2356 2357 return b'%d' % rev
2357 2358
2358 2359
2359 2360 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2360 2361 bad = []
2361 2362
2362 2363 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2363 2364 names = []
2364 2365 wctx = repo[None]
2365 2366 cca = None
2366 2367 abort, warn = scmutil.checkportabilityalert(ui)
2367 2368 if abort or warn:
2368 2369 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2369 2370
2370 2371 match = repo.narrowmatch(match, includeexact=True)
2371 2372 badmatch = matchmod.badmatch(match, badfn)
2372 2373 dirstate = repo.dirstate
2373 2374 # We don't want to just call wctx.walk here, since it would return a lot of
2374 2375 # clean files, which we aren't interested in and takes time.
2375 2376 for f in sorted(
2376 2377 dirstate.walk(
2377 2378 badmatch,
2378 2379 subrepos=sorted(wctx.substate),
2379 2380 unknown=True,
2380 2381 ignored=False,
2381 2382 full=False,
2382 2383 )
2383 2384 ):
2384 2385 entry = dirstate.get_entry(f)
2385 2386 # We don't want to even attmpt to add back files that have been removed
2386 2387 # It would lead to a misleading message saying we're adding the path,
2387 2388 # and can also lead to file/dir conflicts when attempting to add it.
2388 2389 removed = entry and entry.removed
2389 2390 exact = match.exact(f)
2390 2391 if (
2391 2392 exact
2392 2393 or not explicitonly
2393 2394 and f not in wctx
2394 2395 and repo.wvfs.lexists(f)
2395 2396 and not removed
2396 2397 ):
2397 2398 if cca:
2398 2399 cca(f)
2399 2400 names.append(f)
2400 2401 if ui.verbose or not exact:
2401 2402 ui.status(
2402 2403 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2403 2404 )
2404 2405
2405 2406 for subpath in sorted(wctx.substate):
2406 2407 sub = wctx.sub(subpath)
2407 2408 try:
2408 2409 submatch = matchmod.subdirmatcher(subpath, match)
2409 2410 subprefix = repo.wvfs.reljoin(prefix, subpath)
2410 2411 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2411 2412 if opts.get('subrepos'):
2412 2413 bad.extend(
2413 2414 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2414 2415 )
2415 2416 else:
2416 2417 bad.extend(
2417 2418 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2418 2419 )
2419 2420 except error.LookupError:
2420 2421 ui.status(
2421 2422 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2422 2423 )
2423 2424
2424 2425 if not opts.get('dry_run'):
2425 2426 rejected = wctx.add(names, prefix)
2426 2427 bad.extend(f for f in rejected if f in match.files())
2427 2428 return bad
2428 2429
2429 2430
2430 2431 def addwebdirpath(repo, serverpath, webconf):
2431 2432 webconf[serverpath] = repo.root
2432 2433 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2433 2434
2434 2435 for r in repo.revs(b'filelog("path:.hgsub")'):
2435 2436 ctx = repo[r]
2436 2437 for subpath in ctx.substate:
2437 2438 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2438 2439
2439 2440
2440 2441 def forget(
2441 2442 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2442 2443 ):
2443 2444 if dryrun and interactive:
2444 2445 raise error.InputError(
2445 2446 _(b"cannot specify both --dry-run and --interactive")
2446 2447 )
2447 2448 bad = []
2448 2449 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2449 2450 wctx = repo[None]
2450 2451 forgot = []
2451 2452
2452 2453 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2453 2454 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2454 2455 if explicitonly:
2455 2456 forget = [f for f in forget if match.exact(f)]
2456 2457
2457 2458 for subpath in sorted(wctx.substate):
2458 2459 sub = wctx.sub(subpath)
2459 2460 submatch = matchmod.subdirmatcher(subpath, match)
2460 2461 subprefix = repo.wvfs.reljoin(prefix, subpath)
2461 2462 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2462 2463 try:
2463 2464 subbad, subforgot = sub.forget(
2464 2465 submatch,
2465 2466 subprefix,
2466 2467 subuipathfn,
2467 2468 dryrun=dryrun,
2468 2469 interactive=interactive,
2469 2470 )
2470 2471 bad.extend([subpath + b'/' + f for f in subbad])
2471 2472 forgot.extend([subpath + b'/' + f for f in subforgot])
2472 2473 except error.LookupError:
2473 2474 ui.status(
2474 2475 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2475 2476 )
2476 2477
2477 2478 if not explicitonly:
2478 2479 for f in match.files():
2479 2480 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2480 2481 if f not in forgot:
2481 2482 if repo.wvfs.exists(f):
2482 2483 # Don't complain if the exact case match wasn't given.
2483 2484 # But don't do this until after checking 'forgot', so
2484 2485 # that subrepo files aren't normalized, and this op is
2485 2486 # purely from data cached by the status walk above.
2486 2487 if repo.dirstate.normalize(f) in repo.dirstate:
2487 2488 continue
2488 2489 ui.warn(
2489 2490 _(
2490 2491 b'not removing %s: '
2491 2492 b'file is already untracked\n'
2492 2493 )
2493 2494 % uipathfn(f)
2494 2495 )
2495 2496 bad.append(f)
2496 2497
2497 2498 if interactive:
2498 2499 responses = _(
2499 2500 b'[Ynsa?]'
2500 2501 b'$$ &Yes, forget this file'
2501 2502 b'$$ &No, skip this file'
2502 2503 b'$$ &Skip remaining files'
2503 2504 b'$$ Include &all remaining files'
2504 2505 b'$$ &? (display help)'
2505 2506 )
2506 2507 for filename in forget[:]:
2507 2508 r = ui.promptchoice(
2508 2509 _(b'forget %s %s') % (uipathfn(filename), responses)
2509 2510 )
2510 2511 if r == 4: # ?
2511 2512 while r == 4:
2512 2513 for c, t in ui.extractchoices(responses)[1]:
2513 2514 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2514 2515 r = ui.promptchoice(
2515 2516 _(b'forget %s %s') % (uipathfn(filename), responses)
2516 2517 )
2517 2518 if r == 0: # yes
2518 2519 continue
2519 2520 elif r == 1: # no
2520 2521 forget.remove(filename)
2521 2522 elif r == 2: # Skip
2522 2523 fnindex = forget.index(filename)
2523 2524 del forget[fnindex:]
2524 2525 break
2525 2526 elif r == 3: # All
2526 2527 break
2527 2528
2528 2529 for f in forget:
2529 2530 if ui.verbose or not match.exact(f) or interactive:
2530 2531 ui.status(
2531 2532 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2532 2533 )
2533 2534
2534 2535 if not dryrun:
2535 2536 rejected = wctx.forget(forget, prefix)
2536 2537 bad.extend(f for f in rejected if f in match.files())
2537 2538 forgot.extend(f for f in forget if f not in rejected)
2538 2539 return bad, forgot
2539 2540
2540 2541
2541 2542 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2542 2543 ret = 1
2543 2544
2544 2545 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2545 2546 if fm.isplain() and not needsfctx:
2546 2547 # Fast path. The speed-up comes from skipping the formatter, and batching
2547 2548 # calls to ui.write.
2548 2549 buf = []
2549 2550 for f in ctx.matches(m):
2550 2551 buf.append(fmt % uipathfn(f))
2551 2552 if len(buf) > 100:
2552 2553 ui.write(b''.join(buf))
2553 2554 del buf[:]
2554 2555 ret = 0
2555 2556 if buf:
2556 2557 ui.write(b''.join(buf))
2557 2558 else:
2558 2559 for f in ctx.matches(m):
2559 2560 fm.startitem()
2560 2561 fm.context(ctx=ctx)
2561 2562 if needsfctx:
2562 2563 fc = ctx[f]
2563 2564 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2564 2565 fm.data(path=f)
2565 2566 fm.plain(fmt % uipathfn(f))
2566 2567 ret = 0
2567 2568
2568 2569 for subpath in sorted(ctx.substate):
2569 2570 submatch = matchmod.subdirmatcher(subpath, m)
2570 2571 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2571 2572 if subrepos or m.exact(subpath) or any(submatch.files()):
2572 2573 sub = ctx.sub(subpath)
2573 2574 try:
2574 2575 recurse = m.exact(subpath) or subrepos
2575 2576 if (
2576 2577 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2577 2578 == 0
2578 2579 ):
2579 2580 ret = 0
2580 2581 except error.LookupError:
2581 2582 ui.status(
2582 2583 _(b"skipping missing subrepository: %s\n")
2583 2584 % uipathfn(subpath)
2584 2585 )
2585 2586
2586 2587 return ret
2587 2588
2588 2589
2589 2590 def remove(
2590 2591 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2591 2592 ):
2592 2593 ret = 0
2593 2594 s = repo.status(match=m, clean=True)
2594 2595 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2595 2596
2596 2597 wctx = repo[None]
2597 2598
2598 2599 if warnings is None:
2599 2600 warnings = []
2600 2601 warn = True
2601 2602 else:
2602 2603 warn = False
2603 2604
2604 2605 subs = sorted(wctx.substate)
2605 2606 progress = ui.makeprogress(
2606 2607 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2607 2608 )
2608 2609 for subpath in subs:
2609 2610 submatch = matchmod.subdirmatcher(subpath, m)
2610 2611 subprefix = repo.wvfs.reljoin(prefix, subpath)
2611 2612 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2612 2613 if subrepos or m.exact(subpath) or any(submatch.files()):
2613 2614 progress.increment()
2614 2615 sub = wctx.sub(subpath)
2615 2616 try:
2616 2617 if sub.removefiles(
2617 2618 submatch,
2618 2619 subprefix,
2619 2620 subuipathfn,
2620 2621 after,
2621 2622 force,
2622 2623 subrepos,
2623 2624 dryrun,
2624 2625 warnings,
2625 2626 ):
2626 2627 ret = 1
2627 2628 except error.LookupError:
2628 2629 warnings.append(
2629 2630 _(b"skipping missing subrepository: %s\n")
2630 2631 % uipathfn(subpath)
2631 2632 )
2632 2633 progress.complete()
2633 2634
2634 2635 # warn about failure to delete explicit files/dirs
2635 2636 deleteddirs = pathutil.dirs(deleted)
2636 2637 files = m.files()
2637 2638 progress = ui.makeprogress(
2638 2639 _(b'deleting'), total=len(files), unit=_(b'files')
2639 2640 )
2640 2641 for f in files:
2641 2642
2642 2643 def insubrepo():
2643 2644 for subpath in wctx.substate:
2644 2645 if f.startswith(subpath + b'/'):
2645 2646 return True
2646 2647 return False
2647 2648
2648 2649 progress.increment()
2649 2650 isdir = f in deleteddirs or wctx.hasdir(f)
2650 2651 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2651 2652 continue
2652 2653
2653 2654 if repo.wvfs.exists(f):
2654 2655 if repo.wvfs.isdir(f):
2655 2656 warnings.append(
2656 2657 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2657 2658 )
2658 2659 else:
2659 2660 warnings.append(
2660 2661 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2661 2662 )
2662 2663 # missing files will generate a warning elsewhere
2663 2664 ret = 1
2664 2665 progress.complete()
2665 2666
2666 2667 if force:
2667 2668 list = modified + deleted + clean + added
2668 2669 elif after:
2669 2670 list = deleted
2670 2671 remaining = modified + added + clean
2671 2672 progress = ui.makeprogress(
2672 2673 _(b'skipping'), total=len(remaining), unit=_(b'files')
2673 2674 )
2674 2675 for f in remaining:
2675 2676 progress.increment()
2676 2677 if ui.verbose or (f in files):
2677 2678 warnings.append(
2678 2679 _(b'not removing %s: file still exists\n') % uipathfn(f)
2679 2680 )
2680 2681 ret = 1
2681 2682 progress.complete()
2682 2683 else:
2683 2684 list = deleted + clean
2684 2685 progress = ui.makeprogress(
2685 2686 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2686 2687 )
2687 2688 for f in modified:
2688 2689 progress.increment()
2689 2690 warnings.append(
2690 2691 _(
2691 2692 b'not removing %s: file is modified (use -f'
2692 2693 b' to force removal)\n'
2693 2694 )
2694 2695 % uipathfn(f)
2695 2696 )
2696 2697 ret = 1
2697 2698 for f in added:
2698 2699 progress.increment()
2699 2700 warnings.append(
2700 2701 _(
2701 2702 b"not removing %s: file has been marked for add"
2702 2703 b" (use 'hg forget' to undo add)\n"
2703 2704 )
2704 2705 % uipathfn(f)
2705 2706 )
2706 2707 ret = 1
2707 2708 progress.complete()
2708 2709
2709 2710 list = sorted(list)
2710 2711 progress = ui.makeprogress(
2711 2712 _(b'deleting'), total=len(list), unit=_(b'files')
2712 2713 )
2713 2714 for f in list:
2714 2715 if ui.verbose or not m.exact(f):
2715 2716 progress.increment()
2716 2717 ui.status(
2717 2718 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2718 2719 )
2719 2720 progress.complete()
2720 2721
2721 2722 if not dryrun:
2722 2723 with repo.wlock():
2723 2724 if not after:
2724 2725 for f in list:
2725 2726 if f in added:
2726 2727 continue # we never unlink added files on remove
2727 2728 rmdir = repo.ui.configbool(
2728 2729 b'experimental', b'removeemptydirs'
2729 2730 )
2730 2731 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2731 2732 repo[None].forget(list)
2732 2733
2733 2734 if warn:
2734 2735 for warning in warnings:
2735 2736 ui.warn(warning)
2736 2737
2737 2738 return ret
2738 2739
2739 2740
2740 2741 def _catfmtneedsdata(fm):
2741 2742 return not fm.datahint() or b'data' in fm.datahint()
2742 2743
2743 2744
2744 2745 def _updatecatformatter(fm, ctx, matcher, path, decode):
2745 2746 """Hook for adding data to the formatter used by ``hg cat``.
2746 2747
2747 2748 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2748 2749 this method first."""
2749 2750
2750 2751 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2751 2752 # wasn't requested.
2752 2753 data = b''
2753 2754 if _catfmtneedsdata(fm):
2754 2755 data = ctx[path].data()
2755 2756 if decode:
2756 2757 data = ctx.repo().wwritedata(path, data)
2757 2758 fm.startitem()
2758 2759 fm.context(ctx=ctx)
2759 2760 fm.write(b'data', b'%s', data)
2760 2761 fm.data(path=path)
2761 2762
2762 2763
2763 2764 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2764 2765 err = 1
2765 2766
2766 2767 def write(path):
2767 2768 filename = None
2768 2769 if fntemplate:
2769 2770 filename = makefilename(
2770 2771 ctx, fntemplate, pathname=os.path.join(prefix, path)
2771 2772 )
2772 2773 # attempt to create the directory if it does not already exist
2773 2774 try:
2774 2775 os.makedirs(os.path.dirname(filename))
2775 2776 except OSError:
2776 2777 pass
2777 2778 with formatter.maybereopen(basefm, filename) as fm:
2778 2779 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2779 2780
2780 2781 # Automation often uses hg cat on single files, so special case it
2781 2782 # for performance to avoid the cost of parsing the manifest.
2782 2783 if len(matcher.files()) == 1 and not matcher.anypats():
2783 2784 file = matcher.files()[0]
2784 2785 mfl = repo.manifestlog
2785 2786 mfnode = ctx.manifestnode()
2786 2787 try:
2787 2788 if mfnode and mfl[mfnode].find(file)[0]:
2788 2789 if _catfmtneedsdata(basefm):
2789 2790 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
2790 2791 write(file)
2791 2792 return 0
2792 2793 except KeyError:
2793 2794 pass
2794 2795
2795 2796 if _catfmtneedsdata(basefm):
2796 2797 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
2797 2798
2798 2799 for abs in ctx.walk(matcher):
2799 2800 write(abs)
2800 2801 err = 0
2801 2802
2802 2803 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2803 2804 for subpath in sorted(ctx.substate):
2804 2805 sub = ctx.sub(subpath)
2805 2806 try:
2806 2807 submatch = matchmod.subdirmatcher(subpath, matcher)
2807 2808 subprefix = os.path.join(prefix, subpath)
2808 2809 if not sub.cat(
2809 2810 submatch,
2810 2811 basefm,
2811 2812 fntemplate,
2812 2813 subprefix,
2813 2814 **opts,
2814 2815 ):
2815 2816 err = 0
2816 2817 except error.RepoLookupError:
2817 2818 ui.status(
2818 2819 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2819 2820 )
2820 2821
2821 2822 return err
2822 2823
2823 2824
2824 2825 class _AddRemoveContext:
2825 2826 """a small (hacky) context to deal with lazy opening of context
2826 2827
2827 2828 This is to be used in the `commit` function right below. This deals with
2828 2829 lazily open a `changing_files` context inside a `transaction` that span the
2829 2830 full commit operation.
2830 2831
2831 2832 We need :
2832 2833 - a `changing_files` context to wrap the dirstate change within the
2833 2834 "addremove" operation,
2834 2835 - a transaction to make sure these change are not written right after the
2835 2836 addremove, but when the commit operation succeed.
2836 2837
2837 2838 However it get complicated because:
2838 2839 - opening a transaction "this early" shuffle hooks order, especially the
2839 2840 `precommit` one happening after the `pretxtopen` one which I am not too
2840 2841 enthusiastic about.
2841 2842 - the `mq` extensions + the `record` extension stacks many layers of call
2842 2843 to implement `qrefresh --interactive` and this result with `mq` calling a
2843 2844 `strip` in the middle of this function. Which prevent the existence of
2844 2845 transaction wrapping all of its function code. (however, `qrefresh` never
2845 2846 call the `addremove` bits.
2846 2847 - the largefile extensions (and maybe other extensions?) wraps `addremove`
2847 2848 so slicing `addremove` in smaller bits is a complex endeavour.
2848 2849
2849 2850 So I eventually took a this shortcut that open the transaction if we
2850 2851 actually needs it, not disturbing much of the rest of the code.
2851 2852
2852 2853 It will result in some hooks order change for `hg commit --addremove`,
2853 2854 however it seems a corner case enough to ignore that for now (hopefully).
2854 2855
2855 2856 Notes that None of the above problems seems insurmountable, however I have
2856 2857 been fighting with this specific piece of code for a couple of day already
2857 2858 and I need a solution to keep moving forward on the bigger work around
2858 2859 `changing_files` context that is being introduced at the same time as this
2859 2860 hack.
2860 2861
2861 2862 Each problem seems to have a solution:
2862 2863 - the hook order issue could be solved by refactoring the many-layer stack
2863 2864 that currently composes a commit and calling them earlier,
2864 2865 - the mq issue could be solved by refactoring `mq` so that the final strip
2865 2866 is done after transaction closure. Be warned that the mq code is quite
2866 2867 antic however.
2867 2868 - large-file could be reworked in parallel of the `addremove` to be
2868 2869 friendlier to this.
2869 2870
2870 2871 However each of these tasks are too much a diversion right now. In addition
2871 2872 they will be much easier to undertake when the `changing_files` dust has
2872 2873 settled."""
2873 2874
2874 2875 def __init__(self, repo):
2875 2876 self._repo = repo
2876 2877 self._transaction = None
2877 2878 self._dirstate_context = None
2878 2879 self._state = None
2879 2880
2880 2881 def __enter__(self):
2881 2882 assert self._state is None
2882 2883 self._state = True
2883 2884 return self
2884 2885
2885 2886 def open_transaction(self):
2886 2887 """open a `transaction` and `changing_files` context
2887 2888
2888 2889 Call this when you know that change to the dirstate will be needed and
2889 2890 we need to open the transaction early
2890 2891
2891 2892 This will also open the dirstate `changing_files` context, so you should
2892 2893 call `close_dirstate_context` when the distate changes are done.
2893 2894 """
2894 2895 assert self._state is not None
2895 2896 if self._transaction is None:
2896 2897 self._transaction = self._repo.transaction(b'commit')
2897 2898 self._transaction.__enter__()
2898 2899 if self._dirstate_context is None:
2899 2900 self._dirstate_context = self._repo.dirstate.changing_files(
2900 2901 self._repo
2901 2902 )
2902 2903 self._dirstate_context.__enter__()
2903 2904
2904 2905 def close_dirstate_context(self):
2905 2906 """close the change_files if any
2906 2907
2907 2908 Call this after the (potential) `open_transaction` call to close the
2908 2909 (potential) changing_files context.
2909 2910 """
2910 2911 if self._dirstate_context is not None:
2911 2912 self._dirstate_context.__exit__(None, None, None)
2912 2913 self._dirstate_context = None
2913 2914
2914 2915 def __exit__(self, *args):
2915 2916 if self._dirstate_context is not None:
2916 2917 self._dirstate_context.__exit__(*args)
2917 2918 if self._transaction is not None:
2918 2919 self._transaction.__exit__(*args)
2919 2920
2920 2921
2921 2922 def commit(ui, repo, commitfunc, pats, opts):
2922 2923 '''commit the specified files or all outstanding changes'''
2923 2924 date = opts.get(b'date')
2924 2925 if date:
2925 2926 opts[b'date'] = dateutil.parsedate(date)
2926 2927
2927 2928 with repo.wlock(), repo.lock():
2928 2929 message = logmessage(ui, opts)
2929 2930 matcher = scmutil.match(repo[None], pats, opts)
2930 2931
2931 2932 with _AddRemoveContext(repo) as c:
2932 2933 # extract addremove carefully -- this function can be called from a
2933 2934 # command that doesn't support addremove
2934 2935 if opts.get(b'addremove'):
2935 2936 relative = scmutil.anypats(pats, opts)
2936 2937 uipathfn = scmutil.getuipathfn(
2937 2938 repo,
2938 2939 legacyrelativevalue=relative,
2939 2940 )
2940 2941 r = scmutil.addremove(
2941 2942 repo,
2942 2943 matcher,
2943 2944 b"",
2944 2945 uipathfn,
2945 2946 opts,
2946 2947 open_tr=c.open_transaction,
2947 2948 )
2948 2949 m = _(b"failed to mark all new/missing files as added/removed")
2949 2950 if r != 0:
2950 2951 raise error.Abort(m)
2951 2952 c.close_dirstate_context()
2952 2953 return commitfunc(ui, repo, message, matcher, opts)
2953 2954
2954 2955
2955 2956 def samefile(f, ctx1, ctx2):
2956 2957 if f in ctx1.manifest():
2957 2958 a = ctx1.filectx(f)
2958 2959 if f in ctx2.manifest():
2959 2960 b = ctx2.filectx(f)
2960 2961 return not a.cmp(b) and a.flags() == b.flags()
2961 2962 else:
2962 2963 return False
2963 2964 else:
2964 2965 return f not in ctx2.manifest()
2965 2966
2966 2967
2967 2968 def amend(ui, repo, old, extra, pats, opts: Dict[str, Any]):
2968 2969 # avoid cycle context -> subrepo -> cmdutil
2969 2970 from . import context
2970 2971
2971 2972 # amend will reuse the existing user if not specified, but the obsolete
2972 2973 # marker creation requires that the current user's name is specified.
2973 2974 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2974 2975 ui.username() # raise exception if username not set
2975 2976
2976 2977 ui.note(_(b'amending changeset %s\n') % old)
2977 2978 base = old.p1()
2978 2979
2979 2980 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
2980 2981 # Participating changesets:
2981 2982 #
2982 2983 # wctx o - workingctx that contains changes from working copy
2983 2984 # | to go into amending commit
2984 2985 # |
2985 2986 # old o - changeset to amend
2986 2987 # |
2987 2988 # base o - first parent of the changeset to amend
2988 2989 wctx = repo[None]
2989 2990
2990 2991 # Copy to avoid mutating input
2991 2992 extra = extra.copy()
2992 2993 # Update extra dict from amended commit (e.g. to preserve graft
2993 2994 # source)
2994 2995 extra.update(old.extra())
2995 2996
2996 2997 # Also update it from the from the wctx
2997 2998 extra.update(wctx.extra())
2998 2999
2999 3000 # date-only change should be ignored?
3000 3001 datemaydiffer = resolve_commit_options(ui, opts)
3001 3002 opts = pycompat.byteskwargs(opts)
3002 3003
3003 3004 date = old.date()
3004 3005 if opts.get(b'date'):
3005 3006 date = dateutil.parsedate(opts.get(b'date'))
3006 3007 user = opts.get(b'user') or old.user()
3007 3008
3008 3009 if len(old.parents()) > 1:
3009 3010 # ctx.files() isn't reliable for merges, so fall back to the
3010 3011 # slower repo.status() method
3011 3012 st = base.status(old)
3012 3013 files = set(st.modified) | set(st.added) | set(st.removed)
3013 3014 else:
3014 3015 files = set(old.files())
3015 3016
3016 3017 # add/remove the files to the working copy if the "addremove" option
3017 3018 # was specified.
3018 3019 matcher = scmutil.match(wctx, pats, opts)
3019 3020 relative = scmutil.anypats(pats, opts)
3020 3021 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
3021 3022 if opts.get(b'addremove'):
3022 3023 with repo.dirstate.changing_files(repo):
3023 3024 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
3024 3025 m = _(
3025 3026 b"failed to mark all new/missing files as added/removed"
3026 3027 )
3027 3028 raise error.Abort(m)
3028 3029
3029 3030 # Check subrepos. This depends on in-place wctx._status update in
3030 3031 # subrepo.precommit(). To minimize the risk of this hack, we do
3031 3032 # nothing if .hgsub does not exist.
3032 3033 if b'.hgsub' in wctx or b'.hgsub' in old:
3033 3034 subs, commitsubs, newsubstate = subrepoutil.precommit(
3034 3035 ui, wctx, wctx._status, matcher
3035 3036 )
3036 3037 # amend should abort if commitsubrepos is enabled
3037 3038 assert not commitsubs
3038 3039 if subs:
3039 3040 subrepoutil.writestate(repo, newsubstate)
3040 3041
3041 3042 ms = mergestatemod.mergestate.read(repo)
3042 3043 mergeutil.checkunresolved(ms)
3043 3044
3044 3045 filestoamend = {f for f in wctx.files() if matcher(f)}
3045 3046
3046 3047 changes = len(filestoamend) > 0
3047 3048 changeset_copies = (
3048 3049 repo.ui.config(b'experimental', b'copies.read-from')
3049 3050 != b'filelog-only'
3050 3051 )
3051 3052 # If there are changes to amend or if copy information needs to be read
3052 3053 # from the changeset extras, we cannot take the fast path of using
3053 3054 # filectxs from the old commit.
3054 3055 if changes or changeset_copies:
3055 3056 # Recompute copies (avoid recording a -> b -> a)
3056 3057 copied = copies.pathcopies(base, wctx)
3057 3058 if old.p2():
3058 3059 copied.update(copies.pathcopies(old.p2(), wctx))
3059 3060
3060 3061 # Prune files which were reverted by the updates: if old
3061 3062 # introduced file X and the file was renamed in the working
3062 3063 # copy, then those two files are the same and
3063 3064 # we can discard X from our list of files. Likewise if X
3064 3065 # was removed, it's no longer relevant. If X is missing (aka
3065 3066 # deleted), old X must be preserved.
3066 3067 files.update(filestoamend)
3067 3068 files = [
3068 3069 f
3069 3070 for f in files
3070 3071 if (f not in filestoamend or not samefile(f, wctx, base))
3071 3072 ]
3072 3073
3073 3074 def filectxfn(repo, ctx_, path):
3074 3075 try:
3075 3076 # If the file being considered is not amongst the files
3076 3077 # to be amended, we should use the file context from the
3077 3078 # old changeset. This avoids issues when only some files in
3078 3079 # the working copy are being amended but there are also
3079 3080 # changes to other files from the old changeset.
3080 3081 if path in filestoamend:
3081 3082 # Return None for removed files.
3082 3083 if path in wctx.removed():
3083 3084 return None
3084 3085 fctx = wctx[path]
3085 3086 else:
3086 3087 fctx = old.filectx(path)
3087 3088 flags = fctx.flags()
3088 3089 mctx = context.memfilectx(
3089 3090 repo,
3090 3091 ctx_,
3091 3092 fctx.path(),
3092 3093 fctx.data(),
3093 3094 islink=b'l' in flags,
3094 3095 isexec=b'x' in flags,
3095 3096 copysource=copied.get(path),
3096 3097 )
3097 3098 return mctx
3098 3099 except KeyError:
3099 3100 return None
3100 3101
3101 3102 else:
3102 3103 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
3103 3104
3104 3105 # Use version of files as in the old cset
3105 3106 def filectxfn(repo, ctx_, path):
3106 3107 try:
3107 3108 return old.filectx(path)
3108 3109 except KeyError:
3109 3110 return None
3110 3111
3111 3112 # See if we got a message from -m or -l, if not, open the editor with
3112 3113 # the message of the changeset to amend.
3113 3114 message = logmessage(ui, opts)
3114 3115
3115 3116 editform = mergeeditform(old, b'commit.amend')
3116 3117
3117 3118 if not message:
3118 3119 message = old.description()
3119 3120 # Default if message isn't provided and --edit is not passed is to
3120 3121 # invoke editor, but allow --no-edit. If somehow we don't have any
3121 3122 # description, let's always start the editor.
3122 3123 doedit = not message or opts.get(b'edit') in [True, None]
3123 3124 else:
3124 3125 # Default if message is provided is to not invoke editor, but allow
3125 3126 # --edit.
3126 3127 doedit = opts.get(b'edit') is True
3127 3128 editor = getcommiteditor(edit=doedit, editform=editform)
3128 3129
3129 3130 pureextra = extra.copy()
3130 3131 extra[b'amend_source'] = old.hex()
3131 3132
3132 3133 new = context.memctx(
3133 3134 repo,
3134 3135 parents=[base.node(), old.p2().node()],
3135 3136 text=message,
3136 3137 files=files,
3137 3138 filectxfn=filectxfn,
3138 3139 user=user,
3139 3140 date=date,
3140 3141 extra=extra,
3141 3142 editor=editor,
3142 3143 )
3143 3144
3144 3145 newdesc = changelog.stripdesc(new.description())
3145 3146 if (
3146 3147 (not changes)
3147 3148 and newdesc == old.description()
3148 3149 and user == old.user()
3149 3150 and (date == old.date() or datemaydiffer)
3150 3151 and pureextra == old.extra()
3151 3152 ):
3152 3153 # nothing changed. continuing here would create a new node
3153 3154 # anyway because of the amend_source noise.
3154 3155 #
3155 3156 # This not what we expect from amend.
3156 3157 return old.node()
3157 3158
3158 3159 commitphase = None
3159 3160 if opts.get(b'secret'):
3160 3161 commitphase = phases.secret
3161 3162 elif opts.get(b'draft'):
3162 3163 commitphase = phases.draft
3163 3164 newid = repo.commitctx(new)
3164 3165 ms.reset()
3165 3166
3166 3167 with repo.dirstate.changing_parents(repo):
3167 3168 # Reroute the working copy parent to the new changeset
3168 3169 repo.setparents(newid, repo.nullid)
3169 3170
3170 3171 # Fixing the dirstate because localrepo.commitctx does not update
3171 3172 # it. This is rather convenient because we did not need to update
3172 3173 # the dirstate for all the files in the new commit which commitctx
3173 3174 # could have done if it updated the dirstate. Now, we can
3174 3175 # selectively update the dirstate only for the amended files.
3175 3176 dirstate = repo.dirstate
3176 3177
3177 3178 # Update the state of the files which were added and modified in the
3178 3179 # amend to "normal" in the dirstate. We need to use "normallookup" since
3179 3180 # the files may have changed since the command started; using "normal"
3180 3181 # would mark them as clean but with uncommitted contents.
3181 3182 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3182 3183 for f in normalfiles:
3183 3184 dirstate.update_file(
3184 3185 f, p1_tracked=True, wc_tracked=True, possibly_dirty=True
3185 3186 )
3186 3187
3187 3188 # Update the state of files which were removed in the amend
3188 3189 # to "removed" in the dirstate.
3189 3190 removedfiles = set(wctx.removed()) & filestoamend
3190 3191 for f in removedfiles:
3191 3192 dirstate.update_file(f, p1_tracked=False, wc_tracked=False)
3192 3193
3193 3194 mapping = {old.node(): (newid,)}
3194 3195 obsmetadata = None
3195 3196 if opts.get(b'note'):
3196 3197 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
3197 3198 backup = ui.configbool(b'rewrite', b'backup-bundle')
3198 3199 scmutil.cleanupnodes(
3199 3200 repo,
3200 3201 mapping,
3201 3202 b'amend',
3202 3203 metadata=obsmetadata,
3203 3204 fixphase=True,
3204 3205 targetphase=commitphase,
3205 3206 backup=backup,
3206 3207 )
3207 3208
3208 3209 return newid
3209 3210
3210 3211
3211 3212 def commiteditor(repo, ctx, subs, editform=b''):
3212 3213 if ctx.description():
3213 3214 return ctx.description()
3214 3215 return commitforceeditor(
3215 3216 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3216 3217 )
3217 3218
3218 3219
3219 3220 def commitforceeditor(
3220 3221 repo,
3221 3222 ctx,
3222 3223 subs,
3223 3224 finishdesc=None,
3224 3225 extramsg=None,
3225 3226 editform=b'',
3226 3227 unchangedmessagedetection=False,
3227 3228 ):
3228 3229 if not extramsg:
3229 3230 extramsg = _(b"Leave message empty to abort commit.")
3230 3231
3231 3232 forms = [e for e in editform.split(b'.') if e]
3232 3233 forms.insert(0, b'changeset')
3233 3234 templatetext = None
3234 3235 while forms:
3235 3236 ref = b'.'.join(forms)
3236 3237 if repo.ui.config(b'committemplate', ref):
3237 3238 templatetext = committext = buildcommittemplate(
3238 3239 repo, ctx, subs, extramsg, ref
3239 3240 )
3240 3241 break
3241 3242 forms.pop()
3242 3243 else:
3243 3244 committext = buildcommittext(repo, ctx, subs, extramsg)
3244 3245
3245 3246 # run editor in the repository root
3246 3247 olddir = encoding.getcwd()
3247 3248 os.chdir(repo.root)
3248 3249
3249 3250 # make in-memory changes visible to external process
3250 3251 tr = repo.currenttransaction()
3251 3252 repo.dirstate.write(tr)
3252 3253 pending = tr and tr.writepending() and repo.root
3253 3254
3254 3255 editortext = repo.ui.edit(
3255 3256 committext,
3256 3257 ctx.user(),
3257 3258 ctx.extra(),
3258 3259 editform=editform,
3259 3260 pending=pending,
3260 3261 repopath=repo.path,
3261 3262 action=b'commit',
3262 3263 )
3263 3264 text = editortext
3264 3265
3265 3266 # strip away anything below this special string (used for editors that want
3266 3267 # to display the diff)
3267 3268 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3268 3269 if stripbelow:
3269 3270 text = text[: stripbelow.start()]
3270 3271
3271 3272 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3272 3273 os.chdir(olddir)
3273 3274
3274 3275 if finishdesc:
3275 3276 text = finishdesc(text)
3276 3277 if not text.strip():
3277 3278 raise error.InputError(_(b"empty commit message"))
3278 3279 if unchangedmessagedetection and editortext == templatetext:
3279 3280 raise error.InputError(_(b"commit message unchanged"))
3280 3281
3281 3282 return text
3282 3283
3283 3284
3284 3285 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3285 3286 ui = repo.ui
3286 3287 spec = formatter.reference_templatespec(ref)
3287 3288 t = logcmdutil.changesettemplater(ui, repo, spec)
3288 3289 t.t.cache.update(
3289 3290 (k, templater.unquotestring(v))
3290 3291 for k, v in repo.ui.configitems(b'committemplate')
3291 3292 )
3292 3293
3293 3294 if not extramsg:
3294 3295 extramsg = b'' # ensure that extramsg is string
3295 3296
3296 3297 ui.pushbuffer()
3297 3298 t.show(ctx, extramsg=extramsg)
3298 3299 return ui.popbuffer()
3299 3300
3300 3301
3301 3302 def hgprefix(msg):
3302 3303 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3303 3304
3304 3305
3305 3306 def buildcommittext(repo, ctx, subs, extramsg):
3306 3307 edittext = []
3307 3308 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3308 3309 if ctx.description():
3309 3310 edittext.append(ctx.description())
3310 3311 edittext.append(b"")
3311 3312 edittext.append(b"") # Empty line between message and comments.
3312 3313 edittext.append(
3313 3314 hgprefix(
3314 3315 _(
3315 3316 b"Enter commit message."
3316 3317 b" Lines beginning with 'HG:' are removed."
3317 3318 )
3318 3319 )
3319 3320 )
3320 3321 edittext.append(hgprefix(extramsg))
3321 3322 edittext.append(b"HG: --")
3322 3323 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3323 3324 if ctx.p2():
3324 3325 edittext.append(hgprefix(_(b"branch merge")))
3325 3326 if ctx.branch():
3326 3327 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3327 3328 if bookmarks.isactivewdirparent(repo):
3328 3329 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3329 3330 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3330 3331 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3331 3332 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3332 3333 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3333 3334 if not added and not modified and not removed:
3334 3335 edittext.append(hgprefix(_(b"no files changed")))
3335 3336 edittext.append(b"")
3336 3337
3337 3338 return b"\n".join(edittext)
3338 3339
3339 3340
3340 3341 def commitstatus(repo, node, branch, bheads=None, tip=None, **opts):
3341 3342 ctx = repo[node]
3342 3343 parents = ctx.parents()
3343 3344
3344 3345 if tip is not None and repo.changelog.tip() == tip:
3345 3346 # avoid reporting something like "committed new head" when
3346 3347 # recommitting old changesets, and issue a helpful warning
3347 3348 # for most instances
3348 3349 repo.ui.warn(_(b"warning: commit already existed in the repository!\n"))
3349 3350 elif (
3350 3351 not opts.get('amend')
3351 3352 and bheads
3352 3353 and node not in bheads
3353 3354 and not any(
3354 3355 p.node() in bheads and p.branch() == branch for p in parents
3355 3356 )
3356 3357 ):
3357 3358 repo.ui.status(_(b'created new head\n'))
3358 3359 # The message is not printed for initial roots. For the other
3359 3360 # changesets, it is printed in the following situations:
3360 3361 #
3361 3362 # Par column: for the 2 parents with ...
3362 3363 # N: null or no parent
3363 3364 # B: parent is on another named branch
3364 3365 # C: parent is a regular non head changeset
3365 3366 # H: parent was a branch head of the current branch
3366 3367 # Msg column: whether we print "created new head" message
3367 3368 # In the following, it is assumed that there already exists some
3368 3369 # initial branch heads of the current branch, otherwise nothing is
3369 3370 # printed anyway.
3370 3371 #
3371 3372 # Par Msg Comment
3372 3373 # N N y additional topo root
3373 3374 #
3374 3375 # B N y additional branch root
3375 3376 # C N y additional topo head
3376 3377 # H N n usual case
3377 3378 #
3378 3379 # B B y weird additional branch root
3379 3380 # C B y branch merge
3380 3381 # H B n merge with named branch
3381 3382 #
3382 3383 # C C y additional head from merge
3383 3384 # C H n merge with a head
3384 3385 #
3385 3386 # H H n head merge: head count decreases
3386 3387
3387 3388 if not opts.get('close_branch'):
3388 3389 for r in parents:
3389 3390 if r.closesbranch() and r.branch() == branch:
3390 3391 repo.ui.status(
3391 3392 _(b'reopening closed branch head %d\n') % r.rev()
3392 3393 )
3393 3394
3394 3395 if repo.ui.debugflag:
3395 3396 repo.ui.write(
3396 3397 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3397 3398 )
3398 3399 elif repo.ui.verbose:
3399 3400 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3400 3401
3401 3402
3402 3403 def postcommitstatus(repo, pats, opts):
3403 3404 return repo.status(match=scmutil.match(repo[None], pats, opts))
3404 3405
3405 3406
3406 3407 def revert(ui, repo, ctx, *pats, **opts):
3407 3408 opts = pycompat.byteskwargs(opts)
3408 3409 parent, p2 = repo.dirstate.parents()
3409 3410 node = ctx.node()
3410 3411
3411 3412 mf = ctx.manifest()
3412 3413 if node == p2:
3413 3414 parent = p2
3414 3415
3415 3416 # need all matching names in dirstate and manifest of target rev,
3416 3417 # so have to walk both. do not print errors if files exist in one
3417 3418 # but not other. in both cases, filesets should be evaluated against
3418 3419 # workingctx to get consistent result (issue4497). this means 'set:**'
3419 3420 # cannot be used to select missing files from target rev.
3420 3421
3421 3422 # `names` is a mapping for all elements in working copy and target revision
3422 3423 # The mapping is in the form:
3423 3424 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3424 3425 names = {}
3425 3426 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3426 3427
3427 3428 with repo.wlock(), repo.dirstate.changing_files(repo):
3428 3429 ## filling of the `names` mapping
3429 3430 # walk dirstate to fill `names`
3430 3431
3431 3432 interactive = opts.get(b'interactive', False)
3432 3433 wctx = repo[None]
3433 3434 m = scmutil.match(wctx, pats, opts)
3434 3435
3435 3436 # we'll need this later
3436 3437 targetsubs = sorted(s for s in wctx.substate if m(s))
3437 3438
3438 3439 if not m.always():
3439 3440 matcher = matchmod.badmatch(m, lambda x, y: False)
3440 3441 for abs in wctx.walk(matcher):
3441 3442 names[abs] = m.exact(abs)
3442 3443
3443 3444 # walk target manifest to fill `names`
3444 3445
3445 3446 def badfn(path, msg):
3446 3447 if path in names:
3447 3448 return
3448 3449 if path in ctx.substate:
3449 3450 return
3450 3451 path_ = path + b'/'
3451 3452 for f in names:
3452 3453 if f.startswith(path_):
3453 3454 return
3454 3455 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3455 3456
3456 3457 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3457 3458 if abs not in names:
3458 3459 names[abs] = m.exact(abs)
3459 3460
3460 3461 # Find status of all file in `names`.
3461 3462 m = scmutil.matchfiles(repo, names)
3462 3463
3463 3464 changes = repo.status(
3464 3465 node1=node, match=m, unknown=True, ignored=True, clean=True
3465 3466 )
3466 3467 else:
3467 3468 changes = repo.status(node1=node, match=m)
3468 3469 for kind in changes:
3469 3470 for abs in kind:
3470 3471 names[abs] = m.exact(abs)
3471 3472
3472 3473 m = scmutil.matchfiles(repo, names)
3473 3474
3474 3475 modified = set(changes.modified)
3475 3476 added = set(changes.added)
3476 3477 removed = set(changes.removed)
3477 3478 _deleted = set(changes.deleted)
3478 3479 unknown = set(changes.unknown)
3479 3480 unknown.update(changes.ignored)
3480 3481 clean = set(changes.clean)
3481 3482 modadded = set()
3482 3483
3483 3484 # We need to account for the state of the file in the dirstate,
3484 3485 # even when we revert against something else than parent. This will
3485 3486 # slightly alter the behavior of revert (doing back up or not, delete
3486 3487 # or just forget etc).
3487 3488 if parent == node:
3488 3489 dsmodified = modified
3489 3490 dsadded = added
3490 3491 dsremoved = removed
3491 3492 # store all local modifications, useful later for rename detection
3492 3493 localchanges = dsmodified | dsadded
3493 3494 modified, added, removed = set(), set(), set()
3494 3495 else:
3495 3496 changes = repo.status(node1=parent, match=m)
3496 3497 dsmodified = set(changes.modified)
3497 3498 dsadded = set(changes.added)
3498 3499 dsremoved = set(changes.removed)
3499 3500 # store all local modifications, useful later for rename detection
3500 3501 localchanges = dsmodified | dsadded
3501 3502
3502 3503 # only take into account for removes between wc and target
3503 3504 clean |= dsremoved - removed
3504 3505 dsremoved &= removed
3505 3506 # distinct between dirstate remove and other
3506 3507 removed -= dsremoved
3507 3508
3508 3509 modadded = added & dsmodified
3509 3510 added -= modadded
3510 3511
3511 3512 # tell newly modified apart.
3512 3513 dsmodified &= modified
3513 3514 dsmodified |= modified & dsadded # dirstate added may need backup
3514 3515 modified -= dsmodified
3515 3516
3516 3517 # We need to wait for some post-processing to update this set
3517 3518 # before making the distinction. The dirstate will be used for
3518 3519 # that purpose.
3519 3520 dsadded = added
3520 3521
3521 3522 # in case of merge, files that are actually added can be reported as
3522 3523 # modified, we need to post process the result
3523 3524 if p2 != repo.nullid:
3524 3525 mergeadd = set(dsmodified)
3525 3526 for path in dsmodified:
3526 3527 if path in mf:
3527 3528 mergeadd.remove(path)
3528 3529 dsadded |= mergeadd
3529 3530 dsmodified -= mergeadd
3530 3531
3531 3532 # if f is a rename, update `names` to also revert the source
3532 3533 for f in localchanges:
3533 3534 src = repo.dirstate.copied(f)
3534 3535 # XXX should we check for rename down to target node?
3535 3536 if (
3536 3537 src
3537 3538 and src not in names
3538 3539 and repo.dirstate.get_entry(src).removed
3539 3540 ):
3540 3541 dsremoved.add(src)
3541 3542 names[src] = True
3542 3543
3543 3544 # determine the exact nature of the deleted changesets
3544 3545 deladded = set(_deleted)
3545 3546 for path in _deleted:
3546 3547 if path in mf:
3547 3548 deladded.remove(path)
3548 3549 deleted = _deleted - deladded
3549 3550
3550 3551 # distinguish between file to forget and the other
3551 3552 added = set()
3552 3553 for abs in dsadded:
3553 3554 if not repo.dirstate.get_entry(abs).added:
3554 3555 added.add(abs)
3555 3556 dsadded -= added
3556 3557
3557 3558 for abs in deladded:
3558 3559 if repo.dirstate.get_entry(abs).added:
3559 3560 dsadded.add(abs)
3560 3561 deladded -= dsadded
3561 3562
3562 3563 # For files marked as removed, we check if an unknown file is present at
3563 3564 # the same path. If a such file exists it may need to be backed up.
3564 3565 # Making the distinction at this stage helps have simpler backup
3565 3566 # logic.
3566 3567 removunk = set()
3567 3568 for abs in removed:
3568 3569 target = repo.wjoin(abs)
3569 3570 if os.path.lexists(target):
3570 3571 removunk.add(abs)
3571 3572 removed -= removunk
3572 3573
3573 3574 dsremovunk = set()
3574 3575 for abs in dsremoved:
3575 3576 target = repo.wjoin(abs)
3576 3577 if os.path.lexists(target):
3577 3578 dsremovunk.add(abs)
3578 3579 dsremoved -= dsremovunk
3579 3580
3580 3581 # action to be actually performed by revert
3581 3582 # (<list of file>, message>) tuple
3582 3583 actions = {
3583 3584 b'revert': ([], _(b'reverting %s\n')),
3584 3585 b'add': ([], _(b'adding %s\n')),
3585 3586 b'remove': ([], _(b'removing %s\n')),
3586 3587 b'drop': ([], _(b'removing %s\n')),
3587 3588 b'forget': ([], _(b'forgetting %s\n')),
3588 3589 b'undelete': ([], _(b'undeleting %s\n')),
3589 3590 b'noop': (None, _(b'no changes needed to %s\n')),
3590 3591 b'unknown': (None, _(b'file not managed: %s\n')),
3591 3592 }
3592 3593
3593 3594 # "constant" that convey the backup strategy.
3594 3595 # All set to `discard` if `no-backup` is set do avoid checking
3595 3596 # no_backup lower in the code.
3596 3597 # These values are ordered for comparison purposes
3597 3598 backupinteractive = 3 # do backup if interactively modified
3598 3599 backup = 2 # unconditionally do backup
3599 3600 check = 1 # check if the existing file differs from target
3600 3601 discard = 0 # never do backup
3601 3602 if opts.get(b'no_backup'):
3602 3603 backupinteractive = backup = check = discard
3603 3604 if interactive:
3604 3605 dsmodifiedbackup = backupinteractive
3605 3606 else:
3606 3607 dsmodifiedbackup = backup
3607 3608 tobackup = set()
3608 3609
3609 3610 backupanddel = actions[b'remove']
3610 3611 if not opts.get(b'no_backup'):
3611 3612 backupanddel = actions[b'drop']
3612 3613
3613 3614 disptable = (
3614 3615 # dispatch table:
3615 3616 # file state
3616 3617 # action
3617 3618 # make backup
3618 3619 ## Sets that results that will change file on disk
3619 3620 # Modified compared to target, no local change
3620 3621 (modified, actions[b'revert'], discard),
3621 3622 # Modified compared to target, but local file is deleted
3622 3623 (deleted, actions[b'revert'], discard),
3623 3624 # Modified compared to target, local change
3624 3625 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3625 3626 # Added since target
3626 3627 (added, actions[b'remove'], discard),
3627 3628 # Added in working directory
3628 3629 (dsadded, actions[b'forget'], discard),
3629 3630 # Added since target, have local modification
3630 3631 (modadded, backupanddel, backup),
3631 3632 # Added since target but file is missing in working directory
3632 3633 (deladded, actions[b'drop'], discard),
3633 3634 # Removed since target, before working copy parent
3634 3635 (removed, actions[b'add'], discard),
3635 3636 # Same as `removed` but an unknown file exists at the same path
3636 3637 (removunk, actions[b'add'], check),
3637 3638 # Removed since targe, marked as such in working copy parent
3638 3639 (dsremoved, actions[b'undelete'], discard),
3639 3640 # Same as `dsremoved` but an unknown file exists at the same path
3640 3641 (dsremovunk, actions[b'undelete'], check),
3641 3642 ## the following sets does not result in any file changes
3642 3643 # File with no modification
3643 3644 (clean, actions[b'noop'], discard),
3644 3645 # Existing file, not tracked anywhere
3645 3646 (unknown, actions[b'unknown'], discard),
3646 3647 )
3647 3648
3648 3649 for abs, exact in sorted(names.items()):
3649 3650 # target file to be touch on disk (relative to cwd)
3650 3651 target = repo.wjoin(abs)
3651 3652 # search the entry in the dispatch table.
3652 3653 # if the file is in any of these sets, it was touched in the working
3653 3654 # directory parent and we are sure it needs to be reverted.
3654 3655 for table, (xlist, msg), dobackup in disptable:
3655 3656 if abs not in table:
3656 3657 continue
3657 3658 if xlist is not None:
3658 3659 xlist.append(abs)
3659 3660 if dobackup:
3660 3661 # If in interactive mode, don't automatically create
3661 3662 # .orig files (issue4793)
3662 3663 if dobackup == backupinteractive:
3663 3664 tobackup.add(abs)
3664 3665 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3665 3666 absbakname = scmutil.backuppath(ui, repo, abs)
3666 3667 bakname = os.path.relpath(
3667 3668 absbakname, start=repo.root
3668 3669 )
3669 3670 ui.note(
3670 3671 _(b'saving current version of %s as %s\n')
3671 3672 % (uipathfn(abs), uipathfn(bakname))
3672 3673 )
3673 3674 if not opts.get(b'dry_run'):
3674 3675 if interactive:
3675 3676 util.copyfile(target, absbakname)
3676 3677 else:
3677 3678 util.rename(target, absbakname)
3678 3679 if opts.get(b'dry_run'):
3679 3680 if ui.verbose or not exact:
3680 3681 ui.status(msg % uipathfn(abs))
3681 3682 elif exact:
3682 3683 ui.warn(msg % uipathfn(abs))
3683 3684 break
3684 3685
3685 3686 if not opts.get(b'dry_run'):
3686 3687 needdata = (b'revert', b'add', b'undelete')
3687 3688 oplist = [actions[name][0] for name in needdata]
3688 3689 prefetch = scmutil.prefetchfiles
3689 3690 matchfiles = scmutil.matchfiles(
3690 3691 repo, [f for sublist in oplist for f in sublist]
3691 3692 )
3692 3693 prefetch(
3693 3694 repo,
3694 3695 [(ctx.rev(), matchfiles)],
3695 3696 )
3696 3697 match = scmutil.match(repo[None], pats)
3697 3698 _performrevert(
3698 3699 repo,
3699 3700 ctx,
3700 3701 names,
3701 3702 uipathfn,
3702 3703 actions,
3703 3704 match,
3704 3705 interactive,
3705 3706 tobackup,
3706 3707 )
3707 3708
3708 3709 if targetsubs:
3709 3710 # Revert the subrepos on the revert list
3710 3711 for sub in targetsubs:
3711 3712 try:
3712 3713 wctx.sub(sub).revert(
3713 3714 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3714 3715 )
3715 3716 except KeyError:
3716 3717 raise error.Abort(
3717 3718 b"subrepository '%s' does not exist in %s!"
3718 3719 % (sub, short(ctx.node()))
3719 3720 )
3720 3721
3721 3722
3722 3723 def _performrevert(
3723 3724 repo,
3724 3725 ctx,
3725 3726 names,
3726 3727 uipathfn,
3727 3728 actions,
3728 3729 match,
3729 3730 interactive=False,
3730 3731 tobackup=None,
3731 3732 ):
3732 3733 """function that actually perform all the actions computed for revert
3733 3734
3734 3735 This is an independent function to let extension to plug in and react to
3735 3736 the imminent revert.
3736 3737
3737 3738 Make sure you have the working directory locked when calling this function.
3738 3739 """
3739 3740 parent, p2 = repo.dirstate.parents()
3740 3741 node = ctx.node()
3741 3742 excluded_files = []
3742 3743
3743 3744 def checkout(f):
3744 3745 fc = ctx[f]
3745 3746 repo.wwrite(f, fc.data(), fc.flags())
3746 3747
3747 3748 def doremove(f):
3748 3749 try:
3749 3750 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3750 3751 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3751 3752 except OSError:
3752 3753 pass
3753 3754 repo.dirstate.set_untracked(f)
3754 3755
3755 3756 def prntstatusmsg(action, f):
3756 3757 exact = names[f]
3757 3758 if repo.ui.verbose or not exact:
3758 3759 repo.ui.status(actions[action][1] % uipathfn(f))
3759 3760
3760 3761 audit_path = pathutil.pathauditor(repo.root, cached=True)
3761 3762 for f in actions[b'forget'][0]:
3762 3763 if interactive:
3763 3764 choice = repo.ui.promptchoice(
3764 3765 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3765 3766 )
3766 3767 if choice == 0:
3767 3768 prntstatusmsg(b'forget', f)
3768 3769 repo.dirstate.set_untracked(f)
3769 3770 else:
3770 3771 excluded_files.append(f)
3771 3772 else:
3772 3773 prntstatusmsg(b'forget', f)
3773 3774 repo.dirstate.set_untracked(f)
3774 3775 for f in actions[b'remove'][0]:
3775 3776 audit_path(f)
3776 3777 if interactive:
3777 3778 choice = repo.ui.promptchoice(
3778 3779 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3779 3780 )
3780 3781 if choice == 0:
3781 3782 prntstatusmsg(b'remove', f)
3782 3783 doremove(f)
3783 3784 else:
3784 3785 excluded_files.append(f)
3785 3786 else:
3786 3787 prntstatusmsg(b'remove', f)
3787 3788 doremove(f)
3788 3789 for f in actions[b'drop'][0]:
3789 3790 audit_path(f)
3790 3791 prntstatusmsg(b'drop', f)
3791 3792 repo.dirstate.set_untracked(f)
3792 3793
3793 3794 # We are reverting to our parent. If possible, we had like `hg status`
3794 3795 # to report the file as clean. We have to be less agressive for
3795 3796 # merges to avoid losing information about copy introduced by the merge.
3796 3797 # This might comes with bugs ?
3797 3798 reset_copy = p2 == repo.nullid
3798 3799
3799 3800 def normal(filename):
3800 3801 return repo.dirstate.set_tracked(filename, reset_copy=reset_copy)
3801 3802
3802 3803 newlyaddedandmodifiedfiles = set()
3803 3804 if interactive:
3804 3805 # Prompt the user for changes to revert
3805 3806 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3806 3807 m = scmutil.matchfiles(repo, torevert)
3807 3808 diffopts = patch.difffeatureopts(
3808 3809 repo.ui,
3809 3810 whitespace=True,
3810 3811 section=b'commands',
3811 3812 configprefix=b'revert.interactive.',
3812 3813 )
3813 3814 diffopts.nodates = True
3814 3815 diffopts.git = True
3815 3816 operation = b'apply'
3816 3817 if node == parent:
3817 3818 if repo.ui.configbool(
3818 3819 b'experimental', b'revert.interactive.select-to-keep'
3819 3820 ):
3820 3821 operation = b'keep'
3821 3822 else:
3822 3823 operation = b'discard'
3823 3824
3824 3825 if operation == b'apply':
3825 3826 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3826 3827 else:
3827 3828 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3828 3829 original_headers = patch.parsepatch(diff)
3829 3830
3830 3831 try:
3831 3832
3832 3833 chunks, opts = recordfilter(
3833 3834 repo.ui, original_headers, match, operation=operation
3834 3835 )
3835 3836 if operation == b'discard':
3836 3837 chunks = patch.reversehunks(chunks)
3837 3838
3838 3839 except error.PatchParseError as err:
3839 3840 raise error.InputError(_(b'error parsing patch: %s') % err)
3840 3841 except error.PatchApplicationError as err:
3841 3842 raise error.StateError(_(b'error applying patch: %s') % err)
3842 3843
3843 3844 # FIXME: when doing an interactive revert of a copy, there's no way of
3844 3845 # performing a partial revert of the added file, the only option is
3845 3846 # "remove added file <name> (Yn)?", so we don't need to worry about the
3846 3847 # alsorestore value. Ideally we'd be able to partially revert
3847 3848 # copied/renamed files.
3848 3849 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(chunks)
3849 3850 if tobackup is None:
3850 3851 tobackup = set()
3851 3852 # Apply changes
3852 3853 fp = stringio()
3853 3854 # chunks are serialized per file, but files aren't sorted
3854 3855 for f in sorted({c.header.filename() for c in chunks if ishunk(c)}):
3855 3856 prntstatusmsg(b'revert', f)
3856 3857 files = set()
3857 3858 for c in chunks:
3858 3859 if ishunk(c):
3859 3860 abs = c.header.filename()
3860 3861 # Create a backup file only if this hunk should be backed up
3861 3862 if c.header.filename() in tobackup:
3862 3863 target = repo.wjoin(abs)
3863 3864 bakname = scmutil.backuppath(repo.ui, repo, abs)
3864 3865 util.copyfile(target, bakname)
3865 3866 tobackup.remove(abs)
3866 3867 if abs not in files:
3867 3868 files.add(abs)
3868 3869 if operation == b'keep':
3869 3870 checkout(abs)
3870 3871 c.write(fp)
3871 3872 dopatch = fp.tell()
3872 3873 fp.seek(0)
3873 3874 if dopatch:
3874 3875 try:
3875 3876 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3876 3877 except error.PatchParseError as err:
3877 3878 raise error.InputError(pycompat.bytestr(err))
3878 3879 except error.PatchApplicationError as err:
3879 3880 raise error.StateError(pycompat.bytestr(err))
3880 3881 del fp
3881 3882 else:
3882 3883 for f in actions[b'revert'][0]:
3883 3884 prntstatusmsg(b'revert', f)
3884 3885 checkout(f)
3885 3886 if normal:
3886 3887 normal(f)
3887 3888
3888 3889 for f in actions[b'add'][0]:
3889 3890 # Don't checkout modified files, they are already created by the diff
3890 3891 if f in newlyaddedandmodifiedfiles:
3891 3892 continue
3892 3893
3893 3894 if interactive:
3894 3895 choice = repo.ui.promptchoice(
3895 3896 _(b"add new file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3896 3897 )
3897 3898 if choice != 0:
3898 3899 continue
3899 3900 prntstatusmsg(b'add', f)
3900 3901 checkout(f)
3901 3902 repo.dirstate.set_tracked(f)
3902 3903
3903 3904 for f in actions[b'undelete'][0]:
3904 3905 if interactive:
3905 3906 choice = repo.ui.promptchoice(
3906 3907 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3907 3908 )
3908 3909 if choice == 0:
3909 3910 prntstatusmsg(b'undelete', f)
3910 3911 checkout(f)
3911 3912 normal(f)
3912 3913 else:
3913 3914 excluded_files.append(f)
3914 3915 else:
3915 3916 prntstatusmsg(b'undelete', f)
3916 3917 checkout(f)
3917 3918 normal(f)
3918 3919
3919 3920 copied = copies.pathcopies(repo[parent], ctx)
3920 3921
3921 3922 for f in (
3922 3923 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
3923 3924 ):
3924 3925 if f in copied:
3925 3926 repo.dirstate.copy(copied[f], f)
3926 3927
3927 3928
3928 3929 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3929 3930 # commands.outgoing. "missing" is "missing" of the result of
3930 3931 # "findcommonoutgoing()"
3931 3932 outgoinghooks = util.hooks()
3932 3933
3933 3934 # a list of (ui, repo) functions called by commands.summary
3934 3935 summaryhooks = util.hooks()
3935 3936
3936 3937 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3937 3938 #
3938 3939 # functions should return tuple of booleans below, if 'changes' is None:
3939 3940 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3940 3941 #
3941 3942 # otherwise, 'changes' is a tuple of tuples below:
3942 3943 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3943 3944 # - (desturl, destbranch, destpeer, outgoing)
3944 3945 summaryremotehooks = util.hooks()
3945 3946
3946 3947
3947 3948 def checkunfinished(repo, commit=False, skipmerge=False):
3948 3949 """Look for an unfinished multistep operation, like graft, and abort
3949 3950 if found. It's probably good to check this right before
3950 3951 bailifchanged().
3951 3952 """
3952 3953 # Check for non-clearable states first, so things like rebase will take
3953 3954 # precedence over update.
3954 3955 for state in statemod._unfinishedstates:
3955 3956 if (
3956 3957 state._clearable
3957 3958 or (commit and state._allowcommit)
3958 3959 or state._reportonly
3959 3960 ):
3960 3961 continue
3961 3962 if state.isunfinished(repo):
3962 3963 raise error.StateError(state.msg(), hint=state.hint())
3963 3964
3964 3965 for s in statemod._unfinishedstates:
3965 3966 if (
3966 3967 not s._clearable
3967 3968 or (commit and s._allowcommit)
3968 3969 or (s._opname == b'merge' and skipmerge)
3969 3970 or s._reportonly
3970 3971 ):
3971 3972 continue
3972 3973 if s.isunfinished(repo):
3973 3974 raise error.StateError(s.msg(), hint=s.hint())
3974 3975
3975 3976
3976 3977 def clearunfinished(repo):
3977 3978 """Check for unfinished operations (as above), and clear the ones
3978 3979 that are clearable.
3979 3980 """
3980 3981 for state in statemod._unfinishedstates:
3981 3982 if state._reportonly:
3982 3983 continue
3983 3984 if not state._clearable and state.isunfinished(repo):
3984 3985 raise error.StateError(state.msg(), hint=state.hint())
3985 3986
3986 3987 for s in statemod._unfinishedstates:
3987 3988 if s._opname == b'merge' or s._reportonly:
3988 3989 continue
3989 3990 if s._clearable and s.isunfinished(repo):
3990 3991 util.unlink(repo.vfs.join(s._fname))
3991 3992
3992 3993
3993 3994 def getunfinishedstate(repo):
3994 3995 """Checks for unfinished operations and returns statecheck object
3995 3996 for it"""
3996 3997 for state in statemod._unfinishedstates:
3997 3998 if state.isunfinished(repo):
3998 3999 return state
3999 4000 return None
4000 4001
4001 4002
4002 4003 def howtocontinue(repo):
4003 4004 """Check for an unfinished operation and return the command to finish
4004 4005 it.
4005 4006
4006 4007 statemod._unfinishedstates list is checked for an unfinished operation
4007 4008 and the corresponding message to finish it is generated if a method to
4008 4009 continue is supported by the operation.
4009 4010
4010 4011 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
4011 4012 a boolean.
4012 4013 """
4013 4014 contmsg = _(b"continue: %s")
4014 4015 for state in statemod._unfinishedstates:
4015 4016 if not state._continueflag:
4016 4017 continue
4017 4018 if state.isunfinished(repo):
4018 4019 return contmsg % state.continuemsg(), True
4019 4020 if repo[None].dirty(missing=True, merge=False, branch=False):
4020 4021 return contmsg % _(b"hg commit"), False
4021 4022 return None, None
4022 4023
4023 4024
4024 4025 def checkafterresolved(repo):
4025 4026 """Inform the user about the next action after completing hg resolve
4026 4027
4027 4028 If there's a an unfinished operation that supports continue flag,
4028 4029 howtocontinue will yield repo.ui.warn as the reporter.
4029 4030
4030 4031 Otherwise, it will yield repo.ui.note.
4031 4032 """
4032 4033 msg, warning = howtocontinue(repo)
4033 4034 if msg is not None:
4034 4035 if warning:
4035 4036 repo.ui.warn(b"%s\n" % msg)
4036 4037 else:
4037 4038 repo.ui.note(b"%s\n" % msg)
4038 4039
4039 4040
4040 4041 def wrongtooltocontinue(repo, task):
4041 4042 """Raise an abort suggesting how to properly continue if there is an
4042 4043 active task.
4043 4044
4044 4045 Uses howtocontinue() to find the active task.
4045 4046
4046 4047 If there's no task (repo.ui.note for 'hg commit'), it does not offer
4047 4048 a hint.
4048 4049 """
4049 4050 after = howtocontinue(repo)
4050 4051 hint = None
4051 4052 if after[1]:
4052 4053 hint = after[0]
4053 4054 raise error.StateError(_(b'no %s in progress') % task, hint=hint)
4054 4055
4055 4056
4056 4057 def abortgraft(ui, repo, graftstate):
4057 4058 """abort the interrupted graft and rollbacks to the state before interrupted
4058 4059 graft"""
4059 4060 if not graftstate.exists():
4060 4061 raise error.StateError(_(b"no interrupted graft to abort"))
4061 4062 statedata = readgraftstate(repo, graftstate)
4062 4063 newnodes = statedata.get(b'newnodes')
4063 4064 if newnodes is None:
4064 4065 # and old graft state which does not have all the data required to abort
4065 4066 # the graft
4066 4067 raise error.Abort(_(b"cannot abort using an old graftstate"))
4067 4068
4068 4069 # changeset from which graft operation was started
4069 4070 if len(newnodes) > 0:
4070 4071 startctx = repo[newnodes[0]].p1()
4071 4072 else:
4072 4073 startctx = repo[b'.']
4073 4074 # whether to strip or not
4074 4075 cleanup = False
4075 4076
4076 4077 if newnodes:
4077 4078 newnodes = [repo[r].rev() for r in newnodes]
4078 4079 cleanup = True
4079 4080 # checking that none of the newnodes turned public or is public
4080 4081 immutable = [c for c in newnodes if not repo[c].mutable()]
4081 4082 if immutable:
4082 4083 repo.ui.warn(
4083 4084 _(b"cannot clean up public changesets %s\n")
4084 4085 % b', '.join(bytes(repo[r]) for r in immutable),
4085 4086 hint=_(b"see 'hg help phases' for details"),
4086 4087 )
4087 4088 cleanup = False
4088 4089
4089 4090 # checking that no new nodes are created on top of grafted revs
4090 4091 desc = set(repo.changelog.descendants(newnodes))
4091 4092 if desc - set(newnodes):
4092 4093 repo.ui.warn(
4093 4094 _(
4094 4095 b"new changesets detected on destination "
4095 4096 b"branch, can't strip\n"
4096 4097 )
4097 4098 )
4098 4099 cleanup = False
4099 4100
4100 4101 if cleanup:
4101 4102 with repo.wlock(), repo.lock():
4102 4103 mergemod.clean_update(startctx)
4103 4104 # stripping the new nodes created
4104 4105 strippoints = [
4105 4106 c.node() for c in repo.set(b"roots(%ld)", newnodes)
4106 4107 ]
4107 4108 repair.strip(repo.ui, repo, strippoints, backup=False)
4108 4109
4109 4110 if not cleanup:
4110 4111 # we don't update to the startnode if we can't strip
4111 4112 startctx = repo[b'.']
4112 4113 mergemod.clean_update(startctx)
4113 4114
4114 4115 ui.status(_(b"graft aborted\n"))
4115 4116 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
4116 4117 graftstate.delete()
4117 4118 return 0
4118 4119
4119 4120
4120 4121 def readgraftstate(repo, graftstate):
4121 4122 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
4122 4123 """read the graft state file and return a dict of the data stored in it"""
4123 4124 try:
4124 4125 return graftstate.read()
4125 4126 except error.CorruptedState:
4126 4127 nodes = repo.vfs.read(b'graftstate').splitlines()
4127 4128 return {b'nodes': nodes}
4128 4129
4129 4130
4130 4131 def hgabortgraft(ui, repo):
4131 4132 """abort logic for aborting graft using 'hg abort'"""
4132 4133 with repo.wlock():
4133 4134 graftstate = statemod.cmdstate(repo, b'graftstate')
4134 4135 return abortgraft(ui, repo, graftstate)
@@ -1,738 +1,738 b''
1 1 # encoding.py - character transcoding support for Mercurial
2 2 #
3 3 # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import locale
10 10 import os
11 11 import re
12 import typing
12 13 import unicodedata
13 14
15 from typing import (
16 Any,
17 Callable,
18 List,
19 Text,
20 Type,
21 TypeVar,
22 Union,
23 )
24
14 25 from . import (
15 26 error,
16 27 policy,
17 28 pycompat,
18 29 )
19 30
20 31 from .pure import charencode as charencodepure
21 32
22 if pycompat.TYPE_CHECKING:
23 from typing import (
24 Any,
25 Callable,
26 List,
27 Text,
28 Type,
29 TypeVar,
30 Union,
31 )
33 # keep pyflakes happy
34 for t in (Any, Callable, List, Text, Type, Union):
35 assert t
32 36
33 # keep pyflakes happy
34 for t in (Any, Callable, List, Text, Type, Union):
35 assert t
36
37 _Tlocalstr = TypeVar('_Tlocalstr', bound='localstr')
37 _Tlocalstr = TypeVar('_Tlocalstr', bound='localstr')
38 38
39 39 charencode = policy.importmod('charencode')
40 40
41 41 isasciistr = charencode.isasciistr
42 42 asciilower = charencode.asciilower
43 43 asciiupper = charencode.asciiupper
44 44 _jsonescapeu8fast = charencode.jsonescapeu8fast
45 45
46 46 _sysstr = pycompat.sysstr
47 47
48 48 unichr = chr
49 49
50 50 # These unicode characters are ignored by HFS+ (Apple Technote 1150,
51 51 # "Unicode Subtleties"), so we need to ignore them in some places for
52 52 # sanity.
53 53 _ignore = [
54 54 unichr(int(x, 16)).encode("utf-8")
55 55 for x in b"200c 200d 200e 200f 202a 202b 202c 202d 202e "
56 56 b"206a 206b 206c 206d 206e 206f feff".split()
57 57 ]
58 58 # verify the next function will work
59 59 assert all(i.startswith((b"\xe2", b"\xef")) for i in _ignore)
60 60
61 61
62 62 def hfsignoreclean(s):
63 63 # type: (bytes) -> bytes
64 64 """Remove codepoints ignored by HFS+ from s.
65 65
66 66 >>> hfsignoreclean(u'.h\u200cg'.encode('utf-8'))
67 67 '.hg'
68 68 >>> hfsignoreclean(u'.h\ufeffg'.encode('utf-8'))
69 69 '.hg'
70 70 """
71 71 if b"\xe2" in s or b"\xef" in s:
72 72 for c in _ignore:
73 73 s = s.replace(c, b'')
74 74 return s
75 75
76 76
77 77 # encoding.environ is provided read-only, which may not be used to modify
78 78 # the process environment
79 79 _nativeenviron = os.supports_bytes_environ
80 80 if _nativeenviron:
81 81 environ = os.environb # re-exports
82 82 if pycompat.sysplatform == b'OpenVMS':
83 83 # workaround for a bug in VSI 3.10 port
84 84 # os.environb is only populated with a few Predefined symbols
85 85 def newget(self, key, default=None):
86 86 # pytype on linux does not understand OpenVMS special modules
87 87 import _decc # pytype: disable=import-error
88 88
89 89 v = _decc.getenv(key, None)
90 90 if isinstance(key, bytes):
91 91 return default if v is None else v.encode('latin-1')
92 92 else:
93 93 return default if v is None else v
94 94
95 95 environ.__class__.get = newget
96 96 else:
97 97 # preferred encoding isn't known yet; use utf-8 to avoid unicode error
98 98 # and recreate it once encoding is settled
99 99 environ = {
100 100 k.encode('utf-8'): v.encode('utf-8')
101 101 for k, v in os.environ.items() # re-exports
102 102 }
103 103
104 104 _encodingrewrites = {
105 105 b'646': b'ascii',
106 106 b'ANSI_X3.4-1968': b'ascii',
107 107 }
108 108 # cp65001 is a Windows variant of utf-8, which isn't supported on Python 2.
109 109 # No idea if it should be rewritten to the canonical name 'utf-8' on Python 3.
110 110 # https://bugs.python.org/issue13216
111 111 if pycompat.iswindows:
112 112 _encodingrewrites[b'cp65001'] = b'utf-8'
113 113
114 114 try:
115 115 encoding = environ.get(b"HGENCODING")
116 116 if not encoding:
117 117 encoding = locale.getpreferredencoding().encode('ascii') or b'ascii'
118 118 encoding = _encodingrewrites.get(encoding, encoding)
119 119 except locale.Error:
120 120 encoding = b'ascii'
121 121 encodingmode = environ.get(b"HGENCODINGMODE", b"strict")
122 122 fallbackencoding = b'ISO-8859-1'
123 123
124 124
125 125 class localstr(bytes):
126 126 """This class allows strings that are unmodified to be
127 127 round-tripped to the local encoding and back"""
128 128
129 129 def __new__(cls, u, l):
130 130 s = bytes.__new__(cls, l)
131 131 s._utf8 = u
132 132 return s
133 133
134 if pycompat.TYPE_CHECKING:
134 if typing.TYPE_CHECKING:
135 135 # pseudo implementation to help pytype see localstr() constructor
136 136 def __init__(self, u, l):
137 137 # type: (bytes, bytes) -> None
138 138 super(localstr, self).__init__(l)
139 139 self._utf8 = u
140 140
141 141 def __hash__(self):
142 142 return hash(self._utf8) # avoid collisions in local string space
143 143
144 144
145 145 class safelocalstr(bytes):
146 146 """Tagged string denoting it was previously an internal UTF-8 string,
147 147 and can be converted back to UTF-8 losslessly
148 148
149 149 >>> assert safelocalstr(b'\\xc3') == b'\\xc3'
150 150 >>> assert b'\\xc3' == safelocalstr(b'\\xc3')
151 151 >>> assert b'\\xc3' in {safelocalstr(b'\\xc3'): 0}
152 152 >>> assert safelocalstr(b'\\xc3') in {b'\\xc3': 0}
153 153 """
154 154
155 155
156 156 def tolocal(s):
157 157 # type: (bytes) -> bytes
158 158 """
159 159 Convert a string from internal UTF-8 to local encoding
160 160
161 161 All internal strings should be UTF-8 but some repos before the
162 162 implementation of locale support may contain latin1 or possibly
163 163 other character sets. We attempt to decode everything strictly
164 164 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
165 165 replace unknown characters.
166 166
167 167 The localstr class is used to cache the known UTF-8 encoding of
168 168 strings next to their local representation to allow lossless
169 169 round-trip conversion back to UTF-8.
170 170
171 171 >>> u = b'foo: \\xc3\\xa4' # utf-8
172 172 >>> l = tolocal(u)
173 173 >>> l
174 174 'foo: ?'
175 175 >>> fromlocal(l)
176 176 'foo: \\xc3\\xa4'
177 177 >>> u2 = b'foo: \\xc3\\xa1'
178 178 >>> d = { l: 1, tolocal(u2): 2 }
179 179 >>> len(d) # no collision
180 180 2
181 181 >>> b'foo: ?' in d
182 182 False
183 183 >>> l1 = b'foo: \\xe4' # historical latin1 fallback
184 184 >>> l = tolocal(l1)
185 185 >>> l
186 186 'foo: ?'
187 187 >>> fromlocal(l) # magically in utf-8
188 188 'foo: \\xc3\\xa4'
189 189 """
190 190
191 191 if isasciistr(s):
192 192 return s
193 193
194 194 try:
195 195 try:
196 196 # make sure string is actually stored in UTF-8
197 197 u = s.decode('UTF-8')
198 198 if encoding == b'UTF-8':
199 199 # fast path
200 200 return s
201 201 r = u.encode(_sysstr(encoding), "replace")
202 202 if u == r.decode(_sysstr(encoding)):
203 203 # r is a safe, non-lossy encoding of s
204 204 return safelocalstr(r)
205 205 return localstr(s, r)
206 206 except UnicodeDecodeError:
207 207 # we should only get here if we're looking at an ancient changeset
208 208 try:
209 209 u = s.decode(_sysstr(fallbackencoding))
210 210 r = u.encode(_sysstr(encoding), "replace")
211 211 if u == r.decode(_sysstr(encoding)):
212 212 # r is a safe, non-lossy encoding of s
213 213 return safelocalstr(r)
214 214 return localstr(u.encode('UTF-8'), r)
215 215 except UnicodeDecodeError:
216 216 u = s.decode("utf-8", "replace") # last ditch
217 217 # can't round-trip
218 218 return u.encode(_sysstr(encoding), "replace")
219 219 except LookupError as k:
220 220 raise error.Abort(
221 221 pycompat.bytestr(k), hint=b"please check your locale settings"
222 222 )
223 223
224 224
225 225 def fromlocal(s):
226 226 # type: (bytes) -> bytes
227 227 """
228 228 Convert a string from the local character encoding to UTF-8
229 229
230 230 We attempt to decode strings using the encoding mode set by
231 231 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
232 232 characters will cause an error message. Other modes include
233 233 'replace', which replaces unknown characters with a special
234 234 Unicode character, and 'ignore', which drops the character.
235 235 """
236 236
237 237 # can we do a lossless round-trip?
238 238 if isinstance(s, localstr):
239 239 return s._utf8
240 240 if isasciistr(s):
241 241 return s
242 242
243 243 try:
244 244 u = s.decode(_sysstr(encoding), _sysstr(encodingmode))
245 245 return u.encode("utf-8")
246 246 except UnicodeDecodeError as inst:
247 247 sub = s[max(0, inst.start - 10) : inst.start + 10]
248 248 raise error.Abort(
249 249 b"decoding near '%s': %s!" % (sub, pycompat.bytestr(inst))
250 250 )
251 251 except LookupError as k:
252 252 raise error.Abort(
253 253 pycompat.bytestr(k), hint=b"please check your locale settings"
254 254 )
255 255
256 256
257 257 def unitolocal(u):
258 258 # type: (Text) -> bytes
259 259 """Convert a unicode string to a byte string of local encoding"""
260 260 return tolocal(u.encode('utf-8'))
261 261
262 262
263 263 def unifromlocal(s):
264 264 # type: (bytes) -> Text
265 265 """Convert a byte string of local encoding to a unicode string"""
266 266 return fromlocal(s).decode('utf-8')
267 267
268 268
269 269 def unimethod(bytesfunc):
270 270 # type: (Callable[[Any], bytes]) -> Callable[[Any], Text]
271 271 """Create a proxy method that forwards __unicode__() and __str__() of
272 272 Python 3 to __bytes__()"""
273 273
274 274 def unifunc(obj):
275 275 return unifromlocal(bytesfunc(obj))
276 276
277 277 return unifunc
278 278
279 279
280 280 # converter functions between native str and byte string. use these if the
281 281 # character encoding is not aware (e.g. exception message) or is known to
282 282 # be locale dependent (e.g. date formatting.)
283 283 strtolocal = unitolocal
284 284 strfromlocal = unifromlocal
285 285 strmethod = unimethod
286 286
287 287
288 288 def lower(s):
289 289 # type: (bytes) -> bytes
290 290 """best-effort encoding-aware case-folding of local string s"""
291 291 try:
292 292 return asciilower(s)
293 293 except UnicodeDecodeError:
294 294 pass
295 295 try:
296 296 if isinstance(s, localstr):
297 297 u = s._utf8.decode("utf-8")
298 298 else:
299 299 u = s.decode(_sysstr(encoding), _sysstr(encodingmode))
300 300
301 301 lu = u.lower()
302 302 if u == lu:
303 303 return s # preserve localstring
304 304 return lu.encode(_sysstr(encoding))
305 305 except UnicodeError:
306 306 return s.lower() # we don't know how to fold this except in ASCII
307 307 except LookupError as k:
308 308 raise error.Abort(
309 309 pycompat.bytestr(k), hint=b"please check your locale settings"
310 310 )
311 311
312 312
313 313 def upper(s):
314 314 # type: (bytes) -> bytes
315 315 """best-effort encoding-aware case-folding of local string s"""
316 316 try:
317 317 return asciiupper(s)
318 318 except UnicodeDecodeError:
319 319 return upperfallback(s)
320 320
321 321
322 322 def upperfallback(s):
323 323 # type: (Any) -> Any
324 324 try:
325 325 if isinstance(s, localstr):
326 326 u = s._utf8.decode("utf-8")
327 327 else:
328 328 u = s.decode(_sysstr(encoding), _sysstr(encodingmode))
329 329
330 330 uu = u.upper()
331 331 if u == uu:
332 332 return s # preserve localstring
333 333 return uu.encode(_sysstr(encoding))
334 334 except UnicodeError:
335 335 return s.upper() # we don't know how to fold this except in ASCII
336 336 except LookupError as k:
337 337 raise error.Abort(
338 338 pycompat.bytestr(k), hint=b"please check your locale settings"
339 339 )
340 340
341 341
342 342 if not _nativeenviron:
343 343 # now encoding and helper functions are available, recreate the environ
344 344 # dict to be exported to other modules
345 345 if pycompat.iswindows:
346 346
347 347 class WindowsEnviron(dict):
348 348 """`os.environ` normalizes environment variables to uppercase on windows"""
349 349
350 350 def get(self, key, default=None):
351 351 return super().get(upper(key), default)
352 352
353 353 environ = WindowsEnviron()
354 354
355 355 for k, v in os.environ.items(): # re-exports
356 356 environ[tolocal(k.encode('utf-8'))] = tolocal(v.encode('utf-8'))
357 357
358 358
359 359 DRIVE_RE = re.compile(b'^[a-z]:')
360 360
361 361 # os.getcwd() on Python 3 returns string, but it has os.getcwdb() which
362 362 # returns bytes.
363 363 if pycompat.iswindows:
364 364 # Python 3 on Windows issues a DeprecationWarning about using the bytes
365 365 # API when os.getcwdb() is called.
366 366 #
367 367 # Additionally, py3.8+ uppercases the drive letter when calling
368 368 # os.path.realpath(), which is used on ``repo.root``. Since those
369 369 # strings are compared in various places as simple strings, also call
370 370 # realpath here. See https://bugs.python.org/issue40368
371 371 #
372 372 # However this is not reliable, so lets explicitly make this drive
373 373 # letter upper case.
374 374 #
375 375 # note: we should consider dropping realpath here since it seems to
376 376 # change the semantic of `getcwd`.
377 377
378 378 def getcwd():
379 379 cwd = os.getcwd() # re-exports
380 380 cwd = os.path.realpath(cwd)
381 381 cwd = strtolocal(cwd)
382 382 if DRIVE_RE.match(cwd):
383 383 cwd = cwd[0:1].upper() + cwd[1:]
384 384 return cwd
385 385
386 386
387 387 else:
388 388 getcwd = os.getcwdb # re-exports
389 389
390 390 # How to treat ambiguous-width characters. Set to 'wide' to treat as wide.
391 391 _wide = _sysstr(
392 392 environ.get(b"HGENCODINGAMBIGUOUS", b"narrow") == b"wide"
393 393 and b"WFA"
394 394 or b"WF"
395 395 )
396 396
397 397
398 398 def colwidth(s):
399 399 # type: (bytes) -> int
400 400 """Find the column width of a string for display in the local encoding"""
401 401 return ucolwidth(s.decode(_sysstr(encoding), 'replace'))
402 402
403 403
404 404 def ucolwidth(d):
405 405 # type: (Text) -> int
406 406 """Find the column width of a Unicode string for display"""
407 407 eaw = getattr(unicodedata, 'east_asian_width', None)
408 408 if eaw is not None:
409 409 return sum([eaw(c) in _wide and 2 or 1 for c in d])
410 410 return len(d)
411 411
412 412
413 413 def getcols(s, start, c):
414 414 # type: (bytes, int, int) -> bytes
415 415 """Use colwidth to find a c-column substring of s starting at byte
416 416 index start"""
417 417 for x in range(start + c, len(s)):
418 418 t = s[start:x]
419 419 if colwidth(t) == c:
420 420 return t
421 421 raise ValueError('substring not found')
422 422
423 423
424 424 def trim(s, width, ellipsis=b'', leftside=False):
425 425 # type: (bytes, int, bytes, bool) -> bytes
426 426 """Trim string 's' to at most 'width' columns (including 'ellipsis').
427 427
428 428 If 'leftside' is True, left side of string 's' is trimmed.
429 429 'ellipsis' is always placed at trimmed side.
430 430
431 431 >>> from .node import bin
432 432 >>> def bprint(s):
433 433 ... print(pycompat.sysstr(s))
434 434 >>> ellipsis = b'+++'
435 435 >>> from . import encoding
436 436 >>> encoding.encoding = b'utf-8'
437 437 >>> t = b'1234567890'
438 438 >>> bprint(trim(t, 12, ellipsis=ellipsis))
439 439 1234567890
440 440 >>> bprint(trim(t, 10, ellipsis=ellipsis))
441 441 1234567890
442 442 >>> bprint(trim(t, 8, ellipsis=ellipsis))
443 443 12345+++
444 444 >>> bprint(trim(t, 8, ellipsis=ellipsis, leftside=True))
445 445 +++67890
446 446 >>> bprint(trim(t, 8))
447 447 12345678
448 448 >>> bprint(trim(t, 8, leftside=True))
449 449 34567890
450 450 >>> bprint(trim(t, 3, ellipsis=ellipsis))
451 451 +++
452 452 >>> bprint(trim(t, 1, ellipsis=ellipsis))
453 453 +
454 454 >>> u = u'\u3042\u3044\u3046\u3048\u304a' # 2 x 5 = 10 columns
455 455 >>> t = u.encode(pycompat.sysstr(encoding.encoding))
456 456 >>> bprint(trim(t, 12, ellipsis=ellipsis))
457 457 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a
458 458 >>> bprint(trim(t, 10, ellipsis=ellipsis))
459 459 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a
460 460 >>> bprint(trim(t, 8, ellipsis=ellipsis))
461 461 \xe3\x81\x82\xe3\x81\x84+++
462 462 >>> bprint(trim(t, 8, ellipsis=ellipsis, leftside=True))
463 463 +++\xe3\x81\x88\xe3\x81\x8a
464 464 >>> bprint(trim(t, 5))
465 465 \xe3\x81\x82\xe3\x81\x84
466 466 >>> bprint(trim(t, 5, leftside=True))
467 467 \xe3\x81\x88\xe3\x81\x8a
468 468 >>> bprint(trim(t, 4, ellipsis=ellipsis))
469 469 +++
470 470 >>> bprint(trim(t, 4, ellipsis=ellipsis, leftside=True))
471 471 +++
472 472 >>> t = bin(b'112233445566778899aa') # invalid byte sequence
473 473 >>> bprint(trim(t, 12, ellipsis=ellipsis))
474 474 \x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa
475 475 >>> bprint(trim(t, 10, ellipsis=ellipsis))
476 476 \x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa
477 477 >>> bprint(trim(t, 8, ellipsis=ellipsis))
478 478 \x11\x22\x33\x44\x55+++
479 479 >>> bprint(trim(t, 8, ellipsis=ellipsis, leftside=True))
480 480 +++\x66\x77\x88\x99\xaa
481 481 >>> bprint(trim(t, 8))
482 482 \x11\x22\x33\x44\x55\x66\x77\x88
483 483 >>> bprint(trim(t, 8, leftside=True))
484 484 \x33\x44\x55\x66\x77\x88\x99\xaa
485 485 >>> bprint(trim(t, 3, ellipsis=ellipsis))
486 486 +++
487 487 >>> bprint(trim(t, 1, ellipsis=ellipsis))
488 488 +
489 489 """
490 490 try:
491 491 u = s.decode(_sysstr(encoding))
492 492 except UnicodeDecodeError:
493 493 if len(s) <= width: # trimming is not needed
494 494 return s
495 495 width -= len(ellipsis)
496 496 if width <= 0: # no enough room even for ellipsis
497 497 return ellipsis[: width + len(ellipsis)]
498 498 if leftside:
499 499 return ellipsis + s[-width:]
500 500 return s[:width] + ellipsis
501 501
502 502 if ucolwidth(u) <= width: # trimming is not needed
503 503 return s
504 504
505 505 width -= len(ellipsis)
506 506 if width <= 0: # no enough room even for ellipsis
507 507 return ellipsis[: width + len(ellipsis)]
508 508
509 509 chars = list(u)
510 510 if leftside:
511 511 chars.reverse()
512 512 width_so_far = 0
513 513 for i, c in enumerate(chars):
514 514 width_so_far += ucolwidth(c)
515 515 if width_so_far > width:
516 516 break
517 517 chars = chars[:i]
518 518 if leftside:
519 519 chars.reverse()
520 520 u = u''.join(chars).encode(_sysstr(encoding))
521 521 if leftside:
522 522 return ellipsis + u
523 523 return u + ellipsis
524 524
525 525
526 526 class normcasespecs:
527 527 """what a platform's normcase does to ASCII strings
528 528
529 529 This is specified per platform, and should be consistent with what normcase
530 530 on that platform actually does.
531 531
532 532 lower: normcase lowercases ASCII strings
533 533 upper: normcase uppercases ASCII strings
534 534 other: the fallback function should always be called
535 535
536 536 This should be kept in sync with normcase_spec in util.h."""
537 537
538 538 lower = -1
539 539 upper = 1
540 540 other = 0
541 541
542 542
543 543 def jsonescape(s, paranoid=False):
544 544 # type: (Any, Any) -> Any
545 545 """returns a string suitable for JSON
546 546
547 547 JSON is problematic for us because it doesn't support non-Unicode
548 548 bytes. To deal with this, we take the following approach:
549 549
550 550 - localstr/safelocalstr objects are converted back to UTF-8
551 551 - valid UTF-8/ASCII strings are passed as-is
552 552 - other strings are converted to UTF-8b surrogate encoding
553 553 - apply JSON-specified string escaping
554 554
555 555 (escapes are doubled in these tests)
556 556
557 557 >>> jsonescape(b'this is a test')
558 558 'this is a test'
559 559 >>> jsonescape(b'escape characters: \\0 \\x0b \\x7f')
560 560 'escape characters: \\\\u0000 \\\\u000b \\\\u007f'
561 561 >>> jsonescape(b'escape characters: \\b \\t \\n \\f \\r \\" \\\\')
562 562 'escape characters: \\\\b \\\\t \\\\n \\\\f \\\\r \\\\" \\\\\\\\'
563 563 >>> jsonescape(b'a weird byte: \\xdd')
564 564 'a weird byte: \\xed\\xb3\\x9d'
565 565 >>> jsonescape(b'utf-8: caf\\xc3\\xa9')
566 566 'utf-8: caf\\xc3\\xa9'
567 567 >>> jsonescape(b'')
568 568 ''
569 569
570 570 If paranoid, non-ascii and common troublesome characters are also escaped.
571 571 This is suitable for web output.
572 572
573 573 >>> s = b'escape characters: \\0 \\x0b \\x7f'
574 574 >>> assert jsonescape(s) == jsonescape(s, paranoid=True)
575 575 >>> s = b'escape characters: \\b \\t \\n \\f \\r \\" \\\\'
576 576 >>> assert jsonescape(s) == jsonescape(s, paranoid=True)
577 577 >>> jsonescape(b'escape boundary: \\x7e \\x7f \\xc2\\x80', paranoid=True)
578 578 'escape boundary: ~ \\\\u007f \\\\u0080'
579 579 >>> jsonescape(b'a weird byte: \\xdd', paranoid=True)
580 580 'a weird byte: \\\\udcdd'
581 581 >>> jsonescape(b'utf-8: caf\\xc3\\xa9', paranoid=True)
582 582 'utf-8: caf\\\\u00e9'
583 583 >>> jsonescape(b'non-BMP: \\xf0\\x9d\\x84\\x9e', paranoid=True)
584 584 'non-BMP: \\\\ud834\\\\udd1e'
585 585 >>> jsonescape(b'<foo@example.org>', paranoid=True)
586 586 '\\\\u003cfoo@example.org\\\\u003e'
587 587 """
588 588
589 589 u8chars = toutf8b(s)
590 590 try:
591 591 return _jsonescapeu8fast(u8chars, paranoid)
592 592 except ValueError:
593 593 pass
594 594 return charencodepure.jsonescapeu8fallback(u8chars, paranoid)
595 595
596 596
597 597 # We need to decode/encode U+DCxx codes transparently since invalid UTF-8
598 598 # bytes are mapped to that range.
599 599 _utf8strict = r'surrogatepass'
600 600
601 601 _utf8len = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3, 4]
602 602
603 603
604 604 def getutf8char(s, pos):
605 605 # type: (bytes, int) -> bytes
606 606 """get the next full utf-8 character in the given string, starting at pos
607 607
608 608 Raises a UnicodeError if the given location does not start a valid
609 609 utf-8 character.
610 610 """
611 611
612 612 # find how many bytes to attempt decoding from first nibble
613 613 l = _utf8len[ord(s[pos : pos + 1]) >> 4]
614 614 if not l: # ascii
615 615 return s[pos : pos + 1]
616 616
617 617 c = s[pos : pos + l]
618 618 # validate with attempted decode
619 619 c.decode("utf-8", _utf8strict)
620 620 return c
621 621
622 622
623 623 def toutf8b(s):
624 624 # type: (bytes) -> bytes
625 625 """convert a local, possibly-binary string into UTF-8b
626 626
627 627 This is intended as a generic method to preserve data when working
628 628 with schemes like JSON and XML that have no provision for
629 629 arbitrary byte strings. As Mercurial often doesn't know
630 630 what encoding data is in, we use so-called UTF-8b.
631 631
632 632 If a string is already valid UTF-8 (or ASCII), it passes unmodified.
633 633 Otherwise, unsupported bytes are mapped to UTF-16 surrogate range,
634 634 uDC00-uDCFF.
635 635
636 636 Principles of operation:
637 637
638 638 - ASCII and UTF-8 data successfully round-trips and is understood
639 639 by Unicode-oriented clients
640 640 - filenames and file contents in arbitrary other encodings can have
641 641 be round-tripped or recovered by clueful clients
642 642 - local strings that have a cached known UTF-8 encoding (aka
643 643 localstr) get sent as UTF-8 so Unicode-oriented clients get the
644 644 Unicode data they want
645 645 - non-lossy local strings (aka safelocalstr) get sent as UTF-8 as well
646 646 - because we must preserve UTF-8 bytestring in places such as
647 647 filenames, metadata can't be roundtripped without help
648 648
649 649 (Note: "UTF-8b" often refers to decoding a mix of valid UTF-8 and
650 650 arbitrary bytes into an internal Unicode format that can be
651 651 re-encoded back into the original. Here we are exposing the
652 652 internal surrogate encoding as a UTF-8 string.)
653 653 """
654 654
655 655 if isinstance(s, localstr):
656 656 # assume that the original UTF-8 sequence would never contain
657 657 # invalid characters in U+DCxx range
658 658 return s._utf8
659 659 elif isinstance(s, safelocalstr):
660 660 # already verified that s is non-lossy in legacy encoding, which
661 661 # shouldn't contain characters in U+DCxx range
662 662 return fromlocal(s)
663 663 elif isasciistr(s):
664 664 return s
665 665 if b"\xed" not in s:
666 666 try:
667 667 s.decode('utf-8', _utf8strict)
668 668 return s
669 669 except UnicodeDecodeError:
670 670 pass
671 671
672 672 s = pycompat.bytestr(s)
673 673 r = bytearray()
674 674 pos = 0
675 675 l = len(s)
676 676 while pos < l:
677 677 try:
678 678 c = getutf8char(s, pos)
679 679 if b"\xed\xb0\x80" <= c <= b"\xed\xb3\xbf":
680 680 # have to re-escape existing U+DCxx characters
681 681 c = unichr(0xDC00 + ord(s[pos])).encode('utf-8', _utf8strict)
682 682 pos += 1
683 683 else:
684 684 pos += len(c)
685 685 except UnicodeDecodeError:
686 686 c = unichr(0xDC00 + ord(s[pos])).encode('utf-8', _utf8strict)
687 687 pos += 1
688 688 r += c
689 689 return bytes(r)
690 690
691 691
692 692 def fromutf8b(s):
693 693 # type: (bytes) -> bytes
694 694 """Given a UTF-8b string, return a local, possibly-binary string.
695 695
696 696 return the original binary string. This
697 697 is a round-trip process for strings like filenames, but metadata
698 698 that's was passed through tolocal will remain in UTF-8.
699 699
700 700 >>> roundtrip = lambda x: fromutf8b(toutf8b(x)) == x
701 701 >>> m = b"\\xc3\\xa9\\x99abcd"
702 702 >>> toutf8b(m)
703 703 '\\xc3\\xa9\\xed\\xb2\\x99abcd'
704 704 >>> roundtrip(m)
705 705 True
706 706 >>> roundtrip(b"\\xc2\\xc2\\x80")
707 707 True
708 708 >>> roundtrip(b"\\xef\\xbf\\xbd")
709 709 True
710 710 >>> roundtrip(b"\\xef\\xef\\xbf\\xbd")
711 711 True
712 712 >>> roundtrip(b"\\xf1\\x80\\x80\\x80\\x80")
713 713 True
714 714 """
715 715
716 716 if isasciistr(s):
717 717 return s
718 718 # fast path - look for uDxxx prefixes in s
719 719 if b"\xed" not in s:
720 720 return s
721 721
722 722 # We could do this with the unicode type but some Python builds
723 723 # use UTF-16 internally (issue5031) which causes non-BMP code
724 724 # points to be escaped. Instead, we use our handy getutf8char
725 725 # helper again to walk the string without "decoding" it.
726 726
727 727 s = pycompat.bytestr(s)
728 728 r = bytearray()
729 729 pos = 0
730 730 l = len(s)
731 731 while pos < l:
732 732 c = getutf8char(s, pos)
733 733 pos += len(c)
734 734 # unescape U+DCxx characters
735 735 if b"\xed\xb0\x80" <= c <= b"\xed\xb3\xbf":
736 736 c = pycompat.bytechr(ord(c.decode("utf-8", _utf8strict)) & 0xFF)
737 737 r += c
738 738 return bytes(r)
@@ -1,681 +1,692 b''
1 1 # error.py - Mercurial exceptions
2 2 #
3 3 # Copyright 2005-2008 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """Mercurial exceptions.
9 9
10 10 This allows us to catch exceptions at higher levels without forcing
11 11 imports.
12 12 """
13 13
14 14
15 15 import difflib
16 16
17 from typing import (
18 Any,
19 AnyStr,
20 Iterable,
21 List,
22 Optional,
23 Sequence,
24 Union,
25 )
26
17 27 # Do not import anything but pycompat here, please
18 28 from . import pycompat
19 29
20 if pycompat.TYPE_CHECKING:
21 from typing import (
22 Any,
23 AnyStr,
24 Iterable,
25 List,
26 Optional,
27 Sequence,
28 Union,
29 )
30
31 # keeps pyflakes happy
32 assert [
33 Any,
34 AnyStr,
35 Iterable,
36 List,
37 Optional,
38 Sequence,
39 Union,
40 ]
30 41
31 42
32 43 def _tobytes(exc):
33 44 # type: (...) -> bytes
34 45 """Byte-stringify exception in the same way as BaseException_str()"""
35 46 if not exc.args:
36 47 return b''
37 48 if len(exc.args) == 1:
38 49 return pycompat.bytestr(exc.args[0])
39 50 return b'(%s)' % b', '.join(b"'%s'" % pycompat.bytestr(a) for a in exc.args)
40 51
41 52
42 53 class Hint:
43 54 """Mix-in to provide a hint of an error
44 55
45 56 This should come first in the inheritance list to consume a hint and
46 57 pass remaining arguments to the exception class.
47 58 """
48 59
49 60 def __init__(self, *args, **kw):
50 61 self.hint = kw.pop('hint', None) # type: Optional[bytes]
51 62 super(Hint, self).__init__(*args, **kw)
52 63
53 64
54 65 class Error(Hint, Exception):
55 66 """Base class for Mercurial errors."""
56 67
57 68 coarse_exit_code = None
58 69 detailed_exit_code = None
59 70
60 71 def __init__(self, message, hint=None):
61 72 # type: (bytes, Optional[bytes]) -> None
62 73 self.message = message
63 74 self.hint = hint
64 75 # Pass the message into the Exception constructor to help extensions
65 76 # that look for exc.args[0].
66 77 Exception.__init__(self, message)
67 78
68 79 def __bytes__(self):
69 80 return self.message
70 81
71 82 def __str__(self):
72 83 # type: () -> str
73 84 # the output would be unreadable if the message was translated,
74 85 # but do not replace it with encoding.strfromlocal(), which
75 86 # may raise another exception.
76 87 return pycompat.sysstr(self.__bytes__())
77 88
78 89 def format(self):
79 90 # type: () -> bytes
80 91 from .i18n import _
81 92
82 93 message = _(b"abort: %s\n") % self.message
83 94 if self.hint:
84 95 message += _(b"(%s)\n") % self.hint
85 96 return message
86 97
87 98
88 99 class Abort(Error):
89 100 """Raised if a command needs to print an error and exit."""
90 101
91 102
92 103 class StorageError(Error):
93 104 """Raised when an error occurs in a storage layer.
94 105
95 106 Usually subclassed by a storage-specific exception.
96 107 """
97 108
98 109 detailed_exit_code = 50
99 110
100 111
101 112 class RevlogError(StorageError):
102 113 pass
103 114
104 115
105 116 class SidedataHashError(RevlogError):
106 117 def __init__(self, key, expected, got):
107 118 # type: (int, bytes, bytes) -> None
108 119 self.hint = None
109 120 self.sidedatakey = key
110 121 self.expecteddigest = expected
111 122 self.actualdigest = got
112 123
113 124
114 125 class FilteredIndexError(IndexError):
115 126 __bytes__ = _tobytes
116 127
117 128
118 129 class LookupError(RevlogError, KeyError):
119 130 def __init__(self, name, index, message):
120 131 # type: (bytes, bytes, bytes) -> None
121 132 self.name = name
122 133 self.index = index
123 134 # this can't be called 'message' because at least some installs of
124 135 # Python 2.6+ complain about the 'message' property being deprecated
125 136 self.lookupmessage = message
126 137 if isinstance(name, bytes) and len(name) == 20:
127 138 from .node import hex
128 139
129 140 name = hex(name)
130 141 # if name is a binary node, it can be None
131 142 RevlogError.__init__(
132 143 self, b'%s@%s: %s' % (index, pycompat.bytestr(name), message)
133 144 )
134 145
135 146 def __bytes__(self):
136 147 return RevlogError.__bytes__(self)
137 148
138 149 def __str__(self):
139 150 return RevlogError.__str__(self)
140 151
141 152
142 153 class AmbiguousPrefixLookupError(LookupError):
143 154 pass
144 155
145 156
146 157 class FilteredLookupError(LookupError):
147 158 pass
148 159
149 160
150 161 class ManifestLookupError(LookupError):
151 162 pass
152 163
153 164
154 165 class CommandError(Exception):
155 166 """Exception raised on errors in parsing the command line."""
156 167
157 168 def __init__(self, command, message):
158 169 # type: (Optional[bytes], bytes) -> None
159 170 self.command = command
160 171 self.message = message
161 172 super(CommandError, self).__init__()
162 173
163 174 __bytes__ = _tobytes
164 175
165 176
166 177 class UnknownCommand(Exception):
167 178 """Exception raised if command is not in the command table."""
168 179
169 180 def __init__(self, command, all_commands=None):
170 181 # type: (bytes, Optional[List[bytes]]) -> None
171 182 self.command = command
172 183 self.all_commands = all_commands
173 184 super(UnknownCommand, self).__init__()
174 185
175 186 __bytes__ = _tobytes
176 187
177 188
178 189 class AmbiguousCommand(Exception):
179 190 """Exception raised if command shortcut matches more than one command."""
180 191
181 192 def __init__(self, prefix, matches):
182 193 # type: (bytes, List[bytes]) -> None
183 194 self.prefix = prefix
184 195 self.matches = matches
185 196 super(AmbiguousCommand, self).__init__()
186 197
187 198 __bytes__ = _tobytes
188 199
189 200
190 201 class WorkerError(Exception):
191 202 """Exception raised when a worker process dies."""
192 203
193 204 def __init__(self, status_code):
194 205 # type: (int) -> None
195 206 self.status_code = status_code
196 207 # Pass status code to superclass just so it becomes part of __bytes__
197 208 super(WorkerError, self).__init__(status_code)
198 209
199 210 __bytes__ = _tobytes
200 211
201 212
202 213 class InterventionRequired(Abort):
203 214 """Exception raised when a command requires human intervention."""
204 215
205 216 coarse_exit_code = 1
206 217 detailed_exit_code = 240
207 218
208 219 def format(self):
209 220 # type: () -> bytes
210 221 from .i18n import _
211 222
212 223 message = _(b"%s\n") % self.message
213 224 if self.hint:
214 225 message += _(b"(%s)\n") % self.hint
215 226 return message
216 227
217 228
218 229 class ConflictResolutionRequired(InterventionRequired):
219 230 """Exception raised when a continuable command required merge conflict resolution."""
220 231
221 232 def __init__(self, opname):
222 233 # type: (bytes) -> None
223 234 from .i18n import _
224 235
225 236 self.opname = opname
226 237 InterventionRequired.__init__(
227 238 self,
228 239 _(
229 240 b"unresolved conflicts (see 'hg resolve', then 'hg %s --continue')"
230 241 )
231 242 % opname,
232 243 )
233 244
234 245
235 246 class InputError(Abort):
236 247 """Indicates that the user made an error in their input.
237 248
238 249 Examples: Invalid command, invalid flags, invalid revision.
239 250 """
240 251
241 252 detailed_exit_code = 10
242 253
243 254
244 255 class StateError(Abort):
245 256 """Indicates that the operation might work if retried in a different state.
246 257
247 258 Examples: Unresolved merge conflicts, unfinished operations.
248 259 """
249 260
250 261 detailed_exit_code = 20
251 262
252 263
253 264 class CanceledError(Abort):
254 265 """Indicates that the user canceled the operation.
255 266
256 267 Examples: Close commit editor with error status, quit chistedit.
257 268 """
258 269
259 270 detailed_exit_code = 250
260 271
261 272
262 273 class SecurityError(Abort):
263 274 """Indicates that some aspect of security failed.
264 275
265 276 Examples: Bad server credentials, expired local credentials for network
266 277 filesystem, mismatched GPG signature, DoS protection.
267 278 """
268 279
269 280 detailed_exit_code = 150
270 281
271 282
272 283 class HookLoadError(Abort):
273 284 """raised when loading a hook fails, aborting an operation
274 285
275 286 Exists to allow more specialized catching."""
276 287
277 288
278 289 class HookAbort(Abort):
279 290 """raised when a validation hook fails, aborting an operation
280 291
281 292 Exists to allow more specialized catching."""
282 293
283 294 detailed_exit_code = 40
284 295
285 296
286 297 class ConfigError(Abort):
287 298 """Exception raised when parsing config files"""
288 299
289 300 detailed_exit_code = 30
290 301
291 302 def __init__(self, message, location=None, hint=None):
292 303 # type: (bytes, Optional[bytes], Optional[bytes]) -> None
293 304 super(ConfigError, self).__init__(message, hint=hint)
294 305 self.location = location
295 306
296 307 def format(self):
297 308 # type: () -> bytes
298 309 from .i18n import _
299 310
300 311 if self.location is not None:
301 312 message = _(b"config error at %s: %s\n") % (
302 313 pycompat.bytestr(self.location),
303 314 self.message,
304 315 )
305 316 else:
306 317 message = _(b"config error: %s\n") % self.message
307 318 if self.hint:
308 319 message += _(b"(%s)\n") % self.hint
309 320 return message
310 321
311 322
312 323 class UpdateAbort(Abort):
313 324 """Raised when an update is aborted for destination issue"""
314 325
315 326
316 327 class MergeDestAbort(Abort):
317 328 """Raised when an update is aborted for destination issues"""
318 329
319 330
320 331 class NoMergeDestAbort(MergeDestAbort):
321 332 """Raised when an update is aborted because there is nothing to merge"""
322 333
323 334
324 335 class ManyMergeDestAbort(MergeDestAbort):
325 336 """Raised when an update is aborted because destination is ambiguous"""
326 337
327 338
328 339 class ResponseExpected(Abort):
329 340 """Raised when an EOF is received for a prompt"""
330 341
331 342 def __init__(self):
332 343 from .i18n import _
333 344
334 345 Abort.__init__(self, _(b'response expected'))
335 346
336 347
337 348 class RemoteError(Abort):
338 349 """Exception raised when interacting with a remote repo fails"""
339 350
340 351 detailed_exit_code = 100
341 352
342 353
343 354 class OutOfBandError(RemoteError):
344 355 """Exception raised when a remote repo reports failure"""
345 356
346 357 def __init__(self, message=None, hint=None):
347 358 # type: (Optional[bytes], Optional[bytes]) -> None
348 359 from .i18n import _
349 360
350 361 if message:
351 362 # Abort.format() adds a trailing newline
352 363 message = _(b"remote error:\n%s") % message.rstrip(b'\n')
353 364 else:
354 365 message = _(b"remote error")
355 366 super(OutOfBandError, self).__init__(message, hint=hint)
356 367
357 368
358 369 class ParseError(Abort):
359 370 """Raised when parsing config files and {rev,file}sets (msg[, pos])"""
360 371
361 372 detailed_exit_code = 10
362 373
363 374 def __init__(self, message, location=None, hint=None):
364 375 # type: (bytes, Optional[Union[bytes, int]], Optional[bytes]) -> None
365 376 super(ParseError, self).__init__(message, hint=hint)
366 377 self.location = location
367 378
368 379 def format(self):
369 380 # type: () -> bytes
370 381 from .i18n import _
371 382
372 383 if self.location is not None:
373 384 message = _(b"hg: parse error at %s: %s\n") % (
374 385 pycompat.bytestr(self.location),
375 386 self.message,
376 387 )
377 388 else:
378 389 message = _(b"hg: parse error: %s\n") % self.message
379 390 if self.hint:
380 391 message += _(b"(%s)\n") % self.hint
381 392 return message
382 393
383 394
384 395 class PatchError(Exception):
385 396 __bytes__ = _tobytes
386 397
387 398
388 399 class PatchParseError(PatchError):
389 400 __bytes__ = _tobytes
390 401
391 402
392 403 class PatchApplicationError(PatchError):
393 404 __bytes__ = _tobytes
394 405
395 406
396 407 def getsimilar(symbols, value):
397 408 # type: (Iterable[bytes], bytes) -> List[bytes]
398 409 sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
399 410 # The cutoff for similarity here is pretty arbitrary. It should
400 411 # probably be investigated and tweaked.
401 412 return [s for s in symbols if sim(s) > 0.6]
402 413
403 414
404 415 def similarity_hint(similar):
405 416 # type: (List[bytes]) -> Optional[bytes]
406 417 from .i18n import _
407 418
408 419 if len(similar) == 1:
409 420 return _(b"did you mean %s?") % similar[0]
410 421 elif similar:
411 422 ss = b", ".join(sorted(similar))
412 423 return _(b"did you mean one of %s?") % ss
413 424 else:
414 425 return None
415 426
416 427
417 428 class UnknownIdentifier(ParseError):
418 429 """Exception raised when a {rev,file}set references an unknown identifier"""
419 430
420 431 def __init__(self, function, symbols):
421 432 # type: (bytes, Iterable[bytes]) -> None
422 433 from .i18n import _
423 434
424 435 similar = getsimilar(symbols, function)
425 436 hint = similarity_hint(similar)
426 437
427 438 ParseError.__init__(
428 439 self, _(b"unknown identifier: %s") % function, hint=hint
429 440 )
430 441
431 442
432 443 class RepoError(Hint, Exception):
433 444 __bytes__ = _tobytes
434 445
435 446
436 447 class RepoLookupError(RepoError):
437 448 pass
438 449
439 450
440 451 class FilteredRepoLookupError(RepoLookupError):
441 452 pass
442 453
443 454
444 455 class CapabilityError(RepoError):
445 456 pass
446 457
447 458
448 459 class RequirementError(RepoError):
449 460 """Exception raised if .hg/requires has an unknown entry."""
450 461
451 462
452 463 class StdioError(IOError):
453 464 """Raised if I/O to stdout or stderr fails"""
454 465
455 466 def __init__(self, err):
456 467 # type: (IOError) -> None
457 468 IOError.__init__(self, err.errno, err.strerror)
458 469
459 470 # no __bytes__() because error message is derived from the standard IOError
460 471
461 472
462 473 class UnsupportedMergeRecords(Abort):
463 474 def __init__(self, recordtypes):
464 475 # type: (Iterable[bytes]) -> None
465 476 from .i18n import _
466 477
467 478 self.recordtypes = sorted(recordtypes)
468 479 s = b' '.join(self.recordtypes)
469 480 Abort.__init__(
470 481 self,
471 482 _(b'unsupported merge state records: %s') % s,
472 483 hint=_(
473 484 b'see https://mercurial-scm.org/wiki/MergeStateRecords for '
474 485 b'more information'
475 486 ),
476 487 )
477 488
478 489
479 490 class UnknownVersion(Abort):
480 491 """generic exception for aborting from an encounter with an unknown version"""
481 492
482 493 def __init__(self, msg, hint=None, version=None):
483 494 # type: (bytes, Optional[bytes], Optional[bytes]) -> None
484 495 self.version = version
485 496 super(UnknownVersion, self).__init__(msg, hint=hint)
486 497
487 498
488 499 class LockError(IOError):
489 500 def __init__(self, errno, strerror, filename, desc):
490 501 # TODO: figure out if this should be bytes or str
491 502 # _type: (int, str, str, bytes) -> None
492 503 IOError.__init__(self, errno, strerror, filename)
493 504 self.desc = desc
494 505
495 506 # no __bytes__() because error message is derived from the standard IOError
496 507
497 508
498 509 class LockHeld(LockError):
499 510 def __init__(self, errno, filename, desc, locker):
500 511 LockError.__init__(self, errno, b'Lock held', filename, desc)
501 512 self.locker = locker
502 513
503 514
504 515 class LockUnavailable(LockError):
505 516 pass
506 517
507 518
508 519 # LockError is for errors while acquiring the lock -- this is unrelated
509 520 class LockInheritanceContractViolation(RuntimeError):
510 521 __bytes__ = _tobytes
511 522
512 523
513 524 class ResponseError(Exception):
514 525 """Raised to print an error with part of output and exit."""
515 526
516 527 __bytes__ = _tobytes
517 528
518 529
519 530 # derived from KeyboardInterrupt to simplify some breakout code
520 531 class SignalInterrupt(KeyboardInterrupt):
521 532 """Exception raised on SIGTERM and SIGHUP."""
522 533
523 534
524 535 class SignatureError(Exception):
525 536 __bytes__ = _tobytes
526 537
527 538
528 539 class PushRaced(RuntimeError):
529 540 """An exception raised during unbundling that indicate a push race"""
530 541
531 542 __bytes__ = _tobytes
532 543
533 544
534 545 class ProgrammingError(Hint, RuntimeError):
535 546 """Raised if a mercurial (core or extension) developer made a mistake"""
536 547
537 548 def __init__(self, msg, *args, **kwargs):
538 549 # type: (AnyStr, Any, Any) -> None
539 550 # On Python 3, turn the message back into a string since this is
540 551 # an internal-only error that won't be printed except in a
541 552 # stack traces.
542 553 msg = pycompat.sysstr(msg)
543 554 super(ProgrammingError, self).__init__(msg, *args, **kwargs)
544 555
545 556 __bytes__ = _tobytes
546 557
547 558
548 559 class WdirUnsupported(Exception):
549 560 """An exception which is raised when 'wdir()' is not supported"""
550 561
551 562 __bytes__ = _tobytes
552 563
553 564
554 565 # bundle2 related errors
555 566 class BundleValueError(ValueError):
556 567 """error raised when bundle2 cannot be processed"""
557 568
558 569 __bytes__ = _tobytes
559 570
560 571
561 572 class BundleUnknownFeatureError(BundleValueError):
562 573 def __init__(self, parttype=None, params=(), values=()):
563 574 self.parttype = parttype
564 575 self.params = params
565 576 self.values = values
566 577 if self.parttype is None:
567 578 msg = b'Stream Parameter'
568 579 else:
569 580 msg = parttype
570 581 entries = self.params
571 582 if self.params and self.values:
572 583 assert len(self.params) == len(self.values)
573 584 entries = []
574 585 for idx, par in enumerate(self.params):
575 586 val = self.values[idx]
576 587 if val is None:
577 588 entries.append(val)
578 589 else:
579 590 entries.append(b"%s=%r" % (par, pycompat.maybebytestr(val)))
580 591 if entries:
581 592 msg = b'%s - %s' % (msg, b', '.join(entries))
582 593 ValueError.__init__(self, msg) # TODO: convert to str?
583 594
584 595
585 596 class ReadOnlyPartError(RuntimeError):
586 597 """error raised when code tries to alter a part being generated"""
587 598
588 599 __bytes__ = _tobytes
589 600
590 601
591 602 class PushkeyFailed(Abort):
592 603 """error raised when a pushkey part failed to update a value"""
593 604
594 605 def __init__(
595 606 self, partid, namespace=None, key=None, new=None, old=None, ret=None
596 607 ):
597 608 self.partid = partid
598 609 self.namespace = namespace
599 610 self.key = key
600 611 self.new = new
601 612 self.old = old
602 613 self.ret = ret
603 614 # no i18n expected to be processed into a better message
604 615 Abort.__init__(
605 616 self, b'failed to update value for "%s/%s"' % (namespace, key)
606 617 )
607 618
608 619
609 620 class CensoredNodeError(StorageError):
610 621 """error raised when content verification fails on a censored node
611 622
612 623 Also contains the tombstone data substituted for the uncensored data.
613 624 """
614 625
615 626 def __init__(self, filename, node, tombstone):
616 627 # type: (bytes, bytes, bytes) -> None
617 628 from .node import short
618 629
619 630 StorageError.__init__(self, b'%s:%s' % (filename, short(node)))
620 631 self.tombstone = tombstone
621 632
622 633
623 634 class CensoredBaseError(StorageError):
624 635 """error raised when a delta is rejected because its base is censored
625 636
626 637 A delta based on a censored revision must be formed as single patch
627 638 operation which replaces the entire base with new content. This ensures
628 639 the delta may be applied by clones which have not censored the base.
629 640 """
630 641
631 642
632 643 class InvalidBundleSpecification(Exception):
633 644 """error raised when a bundle specification is invalid.
634 645
635 646 This is used for syntax errors as opposed to support errors.
636 647 """
637 648
638 649 __bytes__ = _tobytes
639 650
640 651
641 652 class UnsupportedBundleSpecification(Exception):
642 653 """error raised when a bundle specification is not supported."""
643 654
644 655 __bytes__ = _tobytes
645 656
646 657
647 658 class CorruptedState(Exception):
648 659 """error raised when a command is not able to read its state from file"""
649 660
650 661 __bytes__ = _tobytes
651 662
652 663
653 664 class CorruptedDirstate(Exception):
654 665 """error raised the dirstate appears corrupted on-disk. It may be due to
655 666 a dirstate version mismatch (i.e. expecting v2 and finding v1 on disk)."""
656 667
657 668 __bytes__ = _tobytes
658 669
659 670
660 671 class PeerTransportError(Abort):
661 672 """Transport-level I/O error when communicating with a peer repo."""
662 673
663 674
664 675 class InMemoryMergeConflictsError(Exception):
665 676 """Exception raised when merge conflicts arose during an in-memory merge."""
666 677
667 678 __bytes__ = _tobytes
668 679
669 680
670 681 class WireprotoCommandError(Exception):
671 682 """Represents an error during execution of a wire protocol command.
672 683
673 684 Should only be thrown by wire protocol version 2 commands.
674 685
675 686 The error is a formatter string and an optional iterable of arguments.
676 687 """
677 688
678 689 def __init__(self, message, args=None):
679 690 # type: (bytes, Optional[Sequence[bytes]]) -> None
680 691 self.message = message
681 692 self.messageargs = args
@@ -1,124 +1,128 b''
1 1 # i18n.py - internationalization support for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import gettext as gettextmod
10 10 import locale
11 11 import os
12 12 import sys
13 13
14 from typing import (
15 Callable,
16 List,
17 )
18
14 19 from .utils import resourceutil
15 20 from . import (
16 21 encoding,
17 22 pycompat,
18 23 )
19 24
20 if pycompat.TYPE_CHECKING:
21 from typing import (
22 Callable,
23 List,
24 )
25
25 # keeps pyflakes happy
26 assert [
27 Callable,
28 List,
29 ]
26 30
27 31 # modelled after templater.templatepath:
28 32 if getattr(sys, 'frozen', None) is not None:
29 33 module = pycompat.sysexecutable
30 34 else:
31 35 module = pycompat.fsencode(__file__)
32 36
33 37 _languages = None
34 38 if (
35 39 pycompat.iswindows
36 40 and b'LANGUAGE' not in encoding.environ
37 41 and b'LC_ALL' not in encoding.environ
38 42 and b'LC_MESSAGES' not in encoding.environ
39 43 and b'LANG' not in encoding.environ
40 44 ):
41 45 # Try to detect UI language by "User Interface Language Management" API
42 46 # if no locale variables are set. Note that locale.getdefaultlocale()
43 47 # uses GetLocaleInfo(), which may be different from UI language.
44 48 # (See http://msdn.microsoft.com/en-us/library/dd374098(v=VS.85).aspx )
45 49 try:
46 50 import ctypes
47 51
48 52 # pytype: disable=module-attr
49 53 langid = ctypes.windll.kernel32.GetUserDefaultUILanguage()
50 54 # pytype: enable=module-attr
51 55
52 56 _languages = [locale.windows_locale[langid]]
53 57 except (ImportError, AttributeError, KeyError):
54 58 # ctypes not found or unknown langid
55 59 pass
56 60
57 61
58 62 datapath = pycompat.fsdecode(resourceutil.datapath)
59 63 localedir = os.path.join(datapath, 'locale')
60 64 t = gettextmod.translation('hg', localedir, _languages, fallback=True)
61 65 try:
62 66 _ugettext = t.ugettext # pytype: disable=attribute-error
63 67 except AttributeError:
64 68 _ugettext = t.gettext
65 69
66 70
67 71 _msgcache = {} # encoding: {message: translation}
68 72
69 73
70 74 def gettext(message):
71 75 # type: (bytes) -> bytes
72 76 """Translate message.
73 77
74 78 The message is looked up in the catalog to get a Unicode string,
75 79 which is encoded in the local encoding before being returned.
76 80
77 81 Important: message is restricted to characters in the encoding
78 82 given by sys.getdefaultencoding() which is most likely 'ascii'.
79 83 """
80 84 # If message is None, t.ugettext will return u'None' as the
81 85 # translation whereas our callers expect us to return None.
82 86 if message is None or not _ugettext:
83 87 return message
84 88
85 89 cache = _msgcache.setdefault(encoding.encoding, {})
86 90 if message not in cache:
87 91 if type(message) is str:
88 92 # goofy unicode docstrings in test
89 93 paragraphs = message.split(u'\n\n') # type: List[str]
90 94 else:
91 95 # should be ascii, but we have unicode docstrings in test, which
92 96 # are converted to utf-8 bytes on Python 3.
93 97 paragraphs = [p.decode("utf-8") for p in message.split(b'\n\n')]
94 98 # Be careful not to translate the empty string -- it holds the
95 99 # meta data of the .po file.
96 100 u = u'\n\n'.join([p and _ugettext(p) or u'' for p in paragraphs])
97 101 try:
98 102 # encoding.tolocal cannot be used since it will first try to
99 103 # decode the Unicode string. Calling u.decode(enc) really
100 104 # means u.encode(sys.getdefaultencoding()).decode(enc). Since
101 105 # the Python encoding defaults to 'ascii', this fails if the
102 106 # translated string use non-ASCII characters.
103 107 encodingstr = pycompat.sysstr(encoding.encoding)
104 108 cache[message] = u.encode(encodingstr, "replace")
105 109 except LookupError:
106 110 # An unknown encoding results in a LookupError.
107 111 cache[message] = message
108 112 return cache[message]
109 113
110 114
111 115 def _plain():
112 116 if (
113 117 b'HGPLAIN' not in encoding.environ
114 118 and b'HGPLAINEXCEPT' not in encoding.environ
115 119 ):
116 120 return False
117 121 exceptions = encoding.environ.get(b'HGPLAINEXCEPT', b'').strip().split(b',')
118 122 return b'i18n' not in exceptions
119 123
120 124
121 125 if _plain():
122 126 _ = lambda message: message # type: Callable[[bytes], bytes]
123 127 else:
124 128 _ = gettext
@@ -1,1324 +1,1333 b''
1 1 # logcmdutil.py - utility for log-like commands
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import itertools
10 10 import os
11 11 import posixpath
12 12
13 from typing import (
14 Any,
15 Callable,
16 Dict,
17 Optional,
18 Sequence,
19 Tuple,
20 )
21
13 22 from .i18n import _
14 23 from .node import wdirrev
15 24
16 25 from .thirdparty import attr
17 26
18 27 from . import (
19 28 dagop,
20 29 diffutil,
21 30 error,
22 31 formatter,
23 32 graphmod,
24 33 match as matchmod,
25 34 mdiff,
26 35 patch,
27 36 pathutil,
28 37 pycompat,
29 38 revset,
30 39 revsetlang,
31 40 scmutil,
32 41 smartset,
33 42 templatekw,
34 43 templater,
35 44 util,
36 45 )
37 46 from .utils import (
38 47 dateutil,
39 48 stringutil,
40 49 )
41 50
51 # keeps pyflakes happy
52 assert [
53 Any,
54 Callable,
55 Dict,
56 Optional,
57 Sequence,
58 Tuple,
59 ]
42 60
43 if pycompat.TYPE_CHECKING:
44 from typing import (
45 Any,
46 Callable,
47 Dict,
48 Optional,
49 Sequence,
50 Tuple,
51 )
52
53 for t in (Any, Callable, Dict, Optional, Tuple):
54 assert t
61 # keep pyflakes happy
62 for t in (Any, Callable, Dict, Optional, Tuple):
63 assert t
55 64
56 65
57 66 def getlimit(opts):
58 67 """get the log limit according to option -l/--limit"""
59 68 limit = opts.get(b'limit')
60 69 if limit:
61 70 try:
62 71 limit = int(limit)
63 72 except ValueError:
64 73 raise error.InputError(_(b'limit must be a positive integer'))
65 74 if limit <= 0:
66 75 raise error.InputError(_(b'limit must be positive'))
67 76 else:
68 77 limit = None
69 78 return limit
70 79
71 80
72 81 def get_diff_chunks(
73 82 ui,
74 83 repo,
75 84 diffopts,
76 85 ctx1,
77 86 ctx2,
78 87 match,
79 88 changes=None,
80 89 stat=False,
81 90 prefix=b'',
82 91 root=b'',
83 92 hunksfilterfn=None,
84 93 ):
85 94 if root:
86 95 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
87 96 else:
88 97 relroot = b''
89 98 copysourcematch = None
90 99
91 100 def compose(f, g):
92 101 return lambda x: f(g(x))
93 102
94 103 def pathfn(f):
95 104 return posixpath.join(prefix, f)
96 105
97 106 if relroot != b'':
98 107 # XXX relative roots currently don't work if the root is within a
99 108 # subrepo
100 109 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
101 110 uirelroot = uipathfn(pathfn(relroot))
102 111 relroot += b'/'
103 112 for matchroot in match.files():
104 113 if not matchroot.startswith(relroot):
105 114 ui.warn(
106 115 _(b'warning: %s not inside relative root %s\n')
107 116 % (uipathfn(pathfn(matchroot)), uirelroot)
108 117 )
109 118
110 119 relrootmatch = scmutil.match(ctx2, pats=[relroot], default=b'path')
111 120 match = matchmod.intersectmatchers(match, relrootmatch)
112 121 copysourcematch = relrootmatch
113 122
114 123 checkroot = repo.ui.configbool(
115 124 b'devel', b'all-warnings'
116 125 ) or repo.ui.configbool(b'devel', b'check-relroot')
117 126
118 127 def relrootpathfn(f):
119 128 if checkroot and not f.startswith(relroot):
120 129 raise AssertionError(
121 130 b"file %s doesn't start with relroot %s" % (f, relroot)
122 131 )
123 132 return f[len(relroot) :]
124 133
125 134 pathfn = compose(relrootpathfn, pathfn)
126 135
127 136 if stat:
128 137 diffopts = diffopts.copy(context=0, noprefix=False)
129 138 # If an explicit --root was given, don't respect ui.relative-paths
130 139 if not relroot:
131 140 pathfn = compose(scmutil.getuipathfn(repo), pathfn)
132 141
133 142 return ctx2.diff(
134 143 ctx1,
135 144 match,
136 145 changes,
137 146 opts=diffopts,
138 147 pathfn=pathfn,
139 148 copysourcematch=copysourcematch,
140 149 hunksfilterfn=hunksfilterfn,
141 150 )
142 151
143 152
144 153 def diffordiffstat(
145 154 ui,
146 155 repo,
147 156 diffopts,
148 157 ctx1,
149 158 ctx2,
150 159 match,
151 160 changes=None,
152 161 stat=False,
153 162 fp=None,
154 163 graphwidth=0,
155 164 prefix=b'',
156 165 root=b'',
157 166 listsubrepos=False,
158 167 hunksfilterfn=None,
159 168 ):
160 169 '''show diff or diffstat.'''
161 170
162 171 chunks = get_diff_chunks(
163 172 ui,
164 173 repo,
165 174 diffopts,
166 175 ctx1,
167 176 ctx2,
168 177 match,
169 178 changes=changes,
170 179 stat=stat,
171 180 prefix=prefix,
172 181 root=root,
173 182 hunksfilterfn=hunksfilterfn,
174 183 )
175 184
176 185 if stat:
177 186 diffopts = diffopts.copy(context=0, noprefix=False)
178 187 width = 80
179 188 if not ui.plain():
180 189 width = ui.termwidth() - graphwidth
181 190
182 191 if fp is not None or ui.canwritewithoutlabels():
183 192 out = fp or ui
184 193 if stat:
185 194 chunks = [patch.diffstat(util.iterlines(chunks), width=width)]
186 195 for chunk in util.filechunkiter(util.chunkbuffer(chunks)):
187 196 out.write(chunk)
188 197 else:
189 198 if stat:
190 199 chunks = patch.diffstatui(util.iterlines(chunks), width=width)
191 200 else:
192 201 chunks = patch.difflabel(
193 202 lambda chunks, **kwargs: chunks, chunks, opts=diffopts
194 203 )
195 204 if ui.canbatchlabeledwrites():
196 205
197 206 def gen():
198 207 for chunk, label in chunks:
199 208 yield ui.label(chunk, label=label)
200 209
201 210 for chunk in util.filechunkiter(util.chunkbuffer(gen())):
202 211 ui.write(chunk)
203 212 else:
204 213 for chunk, label in chunks:
205 214 ui.write(chunk, label=label)
206 215
207 216 node2 = ctx2.node()
208 217 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
209 218 tempnode2 = node2
210 219 try:
211 220 if node2 is not None:
212 221 tempnode2 = ctx2.substate[subpath][1]
213 222 except KeyError:
214 223 # A subrepo that existed in node1 was deleted between node1 and
215 224 # node2 (inclusive). Thus, ctx2's substate won't contain that
216 225 # subpath. The best we can do is to ignore it.
217 226 tempnode2 = None
218 227 submatch = matchmod.subdirmatcher(subpath, match)
219 228 subprefix = repo.wvfs.reljoin(prefix, subpath)
220 229 if listsubrepos or match.exact(subpath) or any(submatch.files()):
221 230 sub.diff(
222 231 ui,
223 232 diffopts,
224 233 tempnode2,
225 234 submatch,
226 235 changes=changes,
227 236 stat=stat,
228 237 fp=fp,
229 238 prefix=subprefix,
230 239 )
231 240
232 241
233 242 class changesetdiffer:
234 243 """Generate diff of changeset with pre-configured filtering functions"""
235 244
236 245 def _makefilematcher(self, ctx):
237 246 return scmutil.matchall(ctx.repo())
238 247
239 248 def _makehunksfilter(self, ctx):
240 249 return None
241 250
242 251 def showdiff(self, ui, ctx, diffopts, graphwidth=0, stat=False):
243 252 diffordiffstat(
244 253 ui,
245 254 ctx.repo(),
246 255 diffopts,
247 256 diffutil.diff_parent(ctx),
248 257 ctx,
249 258 match=self._makefilematcher(ctx),
250 259 stat=stat,
251 260 graphwidth=graphwidth,
252 261 hunksfilterfn=self._makehunksfilter(ctx),
253 262 )
254 263
255 264 def getdiffstats(self, ui, ctx, diffopts, stat=False):
256 265 chunks = get_diff_chunks(
257 266 ui,
258 267 ctx.repo(),
259 268 diffopts,
260 269 diffutil.diff_parent(ctx),
261 270 ctx,
262 271 match=self._makefilematcher(ctx),
263 272 stat=stat,
264 273 hunksfilterfn=self._makehunksfilter(ctx),
265 274 )
266 275
267 276 diffdata = []
268 277 for filename, additions, removals, binary in patch.diffstatdata(
269 278 util.iterlines(chunks)
270 279 ):
271 280 diffdata.append(
272 281 {
273 282 b"name": filename,
274 283 b"additions": additions,
275 284 b"removals": removals,
276 285 b"binary": binary,
277 286 }
278 287 )
279 288
280 289 return diffdata
281 290
282 291
283 292 def changesetlabels(ctx):
284 293 labels = [b'log.changeset', b'changeset.%s' % ctx.phasestr()]
285 294 if ctx.obsolete():
286 295 labels.append(b'changeset.obsolete')
287 296 if ctx.isunstable():
288 297 labels.append(b'changeset.unstable')
289 298 for instability in ctx.instabilities():
290 299 labels.append(b'instability.%s' % instability)
291 300 return b' '.join(labels)
292 301
293 302
294 303 class changesetprinter:
295 304 '''show changeset information when templating not requested.'''
296 305
297 306 def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False):
298 307 self.ui = ui
299 308 self.repo = repo
300 309 self.buffered = buffered
301 310 self._differ = differ or changesetdiffer()
302 311 self._diffopts = patch.diffallopts(ui, diffopts)
303 312 self._includestat = diffopts and diffopts.get(b'stat')
304 313 self._includediff = diffopts and diffopts.get(b'patch')
305 314 self.header = {}
306 315 self.hunk = {}
307 316 self.lastheader = None
308 317 self.footer = None
309 318 self._columns = templatekw.getlogcolumns()
310 319
311 320 def flush(self, ctx):
312 321 rev = ctx.rev()
313 322 if rev in self.header:
314 323 h = self.header[rev]
315 324 if h != self.lastheader:
316 325 self.lastheader = h
317 326 self.ui.write(h)
318 327 del self.header[rev]
319 328 if rev in self.hunk:
320 329 self.ui.write(self.hunk[rev])
321 330 del self.hunk[rev]
322 331
323 332 def close(self):
324 333 if self.footer:
325 334 self.ui.write(self.footer)
326 335
327 336 def show(self, ctx, copies=None, **props):
328 337 props = pycompat.byteskwargs(props)
329 338 if self.buffered:
330 339 self.ui.pushbuffer(labeled=True)
331 340 self._show(ctx, copies, props)
332 341 self.hunk[ctx.rev()] = self.ui.popbuffer()
333 342 else:
334 343 self._show(ctx, copies, props)
335 344
336 345 def _show(self, ctx, copies, props):
337 346 '''show a single changeset or file revision'''
338 347 changenode = ctx.node()
339 348 graphwidth = props.get(b'graphwidth', 0)
340 349
341 350 if self.ui.quiet:
342 351 self.ui.write(
343 352 b"%s\n" % scmutil.formatchangeid(ctx), label=b'log.node'
344 353 )
345 354 return
346 355
347 356 columns = self._columns
348 357 self.ui.write(
349 358 columns[b'changeset'] % scmutil.formatchangeid(ctx),
350 359 label=changesetlabels(ctx),
351 360 )
352 361
353 362 # branches are shown first before any other names due to backwards
354 363 # compatibility
355 364 branch = ctx.branch()
356 365 # don't show the default branch name
357 366 if branch != b'default':
358 367 self.ui.write(columns[b'branch'] % branch, label=b'log.branch')
359 368
360 369 for nsname, ns in self.repo.names.items():
361 370 # branches has special logic already handled above, so here we just
362 371 # skip it
363 372 if nsname == b'branches':
364 373 continue
365 374 # we will use the templatename as the color name since those two
366 375 # should be the same
367 376 for name in ns.names(self.repo, changenode):
368 377 self.ui.write(ns.logfmt % name, label=b'log.%s' % ns.colorname)
369 378 if self.ui.debugflag:
370 379 self.ui.write(
371 380 columns[b'phase'] % ctx.phasestr(), label=b'log.phase'
372 381 )
373 382 for pctx in scmutil.meaningfulparents(self.repo, ctx):
374 383 label = b'log.parent changeset.%s' % pctx.phasestr()
375 384 self.ui.write(
376 385 columns[b'parent'] % scmutil.formatchangeid(pctx), label=label
377 386 )
378 387
379 388 if self.ui.debugflag:
380 389 mnode = ctx.manifestnode()
381 390 if mnode is None:
382 391 mnode = self.repo.nodeconstants.wdirid
383 392 mrev = wdirrev
384 393 else:
385 394 mrev = self.repo.manifestlog.rev(mnode)
386 395 self.ui.write(
387 396 columns[b'manifest']
388 397 % scmutil.formatrevnode(self.ui, mrev, mnode),
389 398 label=b'ui.debug log.manifest',
390 399 )
391 400 self.ui.write(columns[b'user'] % ctx.user(), label=b'log.user')
392 401 self.ui.write(
393 402 columns[b'date'] % dateutil.datestr(ctx.date()), label=b'log.date'
394 403 )
395 404
396 405 if ctx.isunstable():
397 406 instabilities = ctx.instabilities()
398 407 self.ui.write(
399 408 columns[b'instability'] % b', '.join(instabilities),
400 409 label=b'log.instability',
401 410 )
402 411
403 412 elif ctx.obsolete():
404 413 self._showobsfate(ctx)
405 414
406 415 self._exthook(ctx)
407 416
408 417 if self.ui.debugflag:
409 418 for key, value in zip(
410 419 [b'files', b'files+', b'files-'],
411 420 [ctx.filesmodified(), ctx.filesadded(), ctx.filesremoved()],
412 421 ):
413 422 if value:
414 423 self.ui.write(
415 424 columns[key] % b" ".join(value),
416 425 label=b'ui.debug log.files',
417 426 )
418 427 elif ctx.files() and self.ui.verbose:
419 428 self.ui.write(
420 429 columns[b'files'] % b" ".join(ctx.files()),
421 430 label=b'ui.note log.files',
422 431 )
423 432 if copies and self.ui.verbose:
424 433 copies = [b'%s (%s)' % c for c in copies]
425 434 self.ui.write(
426 435 columns[b'copies'] % b' '.join(copies),
427 436 label=b'ui.note log.copies',
428 437 )
429 438
430 439 extra = ctx.extra()
431 440 if extra and self.ui.debugflag:
432 441 for key, value in sorted(extra.items()):
433 442 self.ui.write(
434 443 columns[b'extra'] % (key, stringutil.escapestr(value)),
435 444 label=b'ui.debug log.extra',
436 445 )
437 446
438 447 description = ctx.description().strip()
439 448 if description:
440 449 if self.ui.verbose:
441 450 self.ui.write(
442 451 _(b"description:\n"), label=b'ui.note log.description'
443 452 )
444 453 self.ui.write(description, label=b'ui.note log.description')
445 454 self.ui.write(b"\n\n")
446 455 else:
447 456 self.ui.write(
448 457 columns[b'summary'] % stringutil.firstline(description),
449 458 label=b'log.summary',
450 459 )
451 460 self.ui.write(b"\n")
452 461
453 462 self._showpatch(ctx, graphwidth)
454 463
455 464 def _showobsfate(self, ctx):
456 465 # TODO: do not depend on templater
457 466 tres = formatter.templateresources(self.repo.ui, self.repo)
458 467 t = formatter.maketemplater(
459 468 self.repo.ui,
460 469 b'{join(obsfate, "\n")}',
461 470 defaults=templatekw.keywords,
462 471 resources=tres,
463 472 )
464 473 obsfate = t.renderdefault({b'ctx': ctx}).splitlines()
465 474
466 475 if obsfate:
467 476 for obsfateline in obsfate:
468 477 self.ui.write(
469 478 self._columns[b'obsolete'] % obsfateline,
470 479 label=b'log.obsfate',
471 480 )
472 481
473 482 def _exthook(self, ctx):
474 483 """empty method used by extension as a hook point"""
475 484
476 485 def _showpatch(self, ctx, graphwidth=0):
477 486 if self._includestat:
478 487 self._differ.showdiff(
479 488 self.ui, ctx, self._diffopts, graphwidth, stat=True
480 489 )
481 490 if self._includestat and self._includediff:
482 491 self.ui.write(b"\n")
483 492 if self._includediff:
484 493 self._differ.showdiff(
485 494 self.ui, ctx, self._diffopts, graphwidth, stat=False
486 495 )
487 496 if self._includestat or self._includediff:
488 497 self.ui.write(b"\n")
489 498
490 499
491 500 class changesetformatter(changesetprinter):
492 501 """Format changeset information by generic formatter"""
493 502
494 503 def __init__(
495 504 self, ui, repo, fm, differ=None, diffopts=None, buffered=False
496 505 ):
497 506 changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
498 507 self._diffopts = patch.difffeatureopts(ui, diffopts, git=True)
499 508 self._fm = fm
500 509
501 510 def close(self):
502 511 self._fm.end()
503 512
504 513 def _show(self, ctx, copies, props):
505 514 '''show a single changeset or file revision'''
506 515 fm = self._fm
507 516 fm.startitem()
508 517 fm.context(ctx=ctx)
509 518 fm.data(rev=scmutil.intrev(ctx), node=fm.hexfunc(scmutil.binnode(ctx)))
510 519
511 520 datahint = fm.datahint()
512 521 if self.ui.quiet and not datahint:
513 522 return
514 523
515 524 fm.data(
516 525 branch=ctx.branch(),
517 526 phase=ctx.phasestr(),
518 527 user=ctx.user(),
519 528 date=fm.formatdate(ctx.date()),
520 529 desc=ctx.description(),
521 530 bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'),
522 531 tags=fm.formatlist(ctx.tags(), name=b'tag'),
523 532 parents=fm.formatlist(
524 533 [fm.hexfunc(c.node()) for c in ctx.parents()], name=b'node'
525 534 ),
526 535 )
527 536
528 537 if self.ui.debugflag or b'manifest' in datahint:
529 538 fm.data(
530 539 manifest=fm.hexfunc(
531 540 ctx.manifestnode() or self.repo.nodeconstants.wdirid
532 541 )
533 542 )
534 543 if self.ui.debugflag or b'extra' in datahint:
535 544 fm.data(extra=fm.formatdict(ctx.extra()))
536 545
537 546 if (
538 547 self.ui.debugflag
539 548 or b'modified' in datahint
540 549 or b'added' in datahint
541 550 or b'removed' in datahint
542 551 ):
543 552 fm.data(
544 553 modified=fm.formatlist(ctx.filesmodified(), name=b'file'),
545 554 added=fm.formatlist(ctx.filesadded(), name=b'file'),
546 555 removed=fm.formatlist(ctx.filesremoved(), name=b'file'),
547 556 )
548 557
549 558 verbose = not self.ui.debugflag and self.ui.verbose
550 559 if verbose or b'files' in datahint:
551 560 fm.data(files=fm.formatlist(ctx.files(), name=b'file'))
552 561 if verbose and copies or b'copies' in datahint:
553 562 fm.data(
554 563 copies=fm.formatdict(copies or {}, key=b'name', value=b'source')
555 564 )
556 565
557 566 if self._includestat or b'diffstat' in datahint:
558 567 data = self._differ.getdiffstats(
559 568 self.ui, ctx, self._diffopts, stat=True
560 569 )
561 570 fm.data(diffstat=fm.formatlist(data, name=b'diffstat'))
562 571 if self._includediff or b'diff' in datahint:
563 572 self.ui.pushbuffer()
564 573 self._differ.showdiff(self.ui, ctx, self._diffopts, stat=False)
565 574 fm.data(diff=self.ui.popbuffer())
566 575
567 576
568 577 class changesettemplater(changesetprinter):
569 578 """format changeset information.
570 579
571 580 Note: there are a variety of convenience functions to build a
572 581 changesettemplater for common cases. See functions such as:
573 582 maketemplater, changesetdisplayer, buildcommittemplate, or other
574 583 functions that use changesest_templater.
575 584 """
576 585
577 586 # Arguments before "buffered" used to be positional. Consider not
578 587 # adding/removing arguments before "buffered" to not break callers.
579 588 def __init__(
580 589 self, ui, repo, tmplspec, differ=None, diffopts=None, buffered=False
581 590 ):
582 591 changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
583 592 # tres is shared with _graphnodeformatter()
584 593 self._tresources = tres = formatter.templateresources(ui, repo)
585 594 self.t = formatter.loadtemplater(
586 595 ui,
587 596 tmplspec,
588 597 defaults=templatekw.keywords,
589 598 resources=tres,
590 599 cache=templatekw.defaulttempl,
591 600 )
592 601 self._counter = itertools.count()
593 602
594 603 self._tref = tmplspec.ref
595 604 self._parts = {
596 605 b'header': b'',
597 606 b'footer': b'',
598 607 tmplspec.ref: tmplspec.ref,
599 608 b'docheader': b'',
600 609 b'docfooter': b'',
601 610 b'separator': b'',
602 611 }
603 612 if tmplspec.mapfile:
604 613 # find correct templates for current mode, for backward
605 614 # compatibility with 'log -v/-q/--debug' using a mapfile
606 615 tmplmodes = [
607 616 (True, b''),
608 617 (self.ui.verbose, b'_verbose'),
609 618 (self.ui.quiet, b'_quiet'),
610 619 (self.ui.debugflag, b'_debug'),
611 620 ]
612 621 for mode, postfix in tmplmodes:
613 622 for t in self._parts:
614 623 cur = t + postfix
615 624 if mode and cur in self.t:
616 625 self._parts[t] = cur
617 626 else:
618 627 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
619 628 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
620 629 self._parts.update(m)
621 630
622 631 if self._parts[b'docheader']:
623 632 self.ui.write(self.t.render(self._parts[b'docheader'], {}))
624 633
625 634 def close(self):
626 635 if self._parts[b'docfooter']:
627 636 if not self.footer:
628 637 self.footer = b""
629 638 self.footer += self.t.render(self._parts[b'docfooter'], {})
630 639 return super(changesettemplater, self).close()
631 640
632 641 def _show(self, ctx, copies, props):
633 642 '''show a single changeset or file revision'''
634 643 props = props.copy()
635 644 props[b'ctx'] = ctx
636 645 props[b'index'] = index = next(self._counter)
637 646 props[b'revcache'] = {b'copies': copies}
638 647 graphwidth = props.get(b'graphwidth', 0)
639 648
640 649 # write separator, which wouldn't work well with the header part below
641 650 # since there's inherently a conflict between header (across items) and
642 651 # separator (per item)
643 652 if self._parts[b'separator'] and index > 0:
644 653 self.ui.write(self.t.render(self._parts[b'separator'], {}))
645 654
646 655 # write header
647 656 if self._parts[b'header']:
648 657 h = self.t.render(self._parts[b'header'], props)
649 658 if self.buffered:
650 659 self.header[ctx.rev()] = h
651 660 else:
652 661 if self.lastheader != h:
653 662 self.lastheader = h
654 663 self.ui.write(h)
655 664
656 665 # write changeset metadata, then patch if requested
657 666 key = self._parts[self._tref]
658 667 self.ui.write(self.t.render(key, props))
659 668 self._exthook(ctx)
660 669 self._showpatch(ctx, graphwidth)
661 670
662 671 if self._parts[b'footer']:
663 672 if not self.footer:
664 673 self.footer = self.t.render(self._parts[b'footer'], props)
665 674
666 675
667 676 def templatespec(tmpl, mapfile):
668 677 assert not (tmpl and mapfile)
669 678 if mapfile:
670 679 return formatter.mapfile_templatespec(b'changeset', mapfile)
671 680 else:
672 681 return formatter.literal_templatespec(tmpl)
673 682
674 683
675 684 def _lookuptemplate(ui, tmpl, style):
676 685 """Find the template matching the given template spec or style
677 686
678 687 See formatter.lookuptemplate() for details.
679 688 """
680 689
681 690 # ui settings
682 691 if not tmpl and not style: # template are stronger than style
683 692 tmpl = ui.config(b'command-templates', b'log')
684 693 if tmpl:
685 694 return formatter.literal_templatespec(templater.unquotestring(tmpl))
686 695 else:
687 696 style = util.expandpath(ui.config(b'ui', b'style'))
688 697
689 698 if not tmpl and style:
690 699 mapfile = style
691 700 fp = None
692 701 if not os.path.split(mapfile)[0]:
693 702 (mapname, fp) = templater.try_open_template(
694 703 b'map-cmdline.' + mapfile
695 704 ) or templater.try_open_template(mapfile)
696 705 if mapname:
697 706 mapfile = mapname
698 707 return formatter.mapfile_templatespec(b'changeset', mapfile, fp)
699 708
700 709 return formatter.lookuptemplate(ui, b'changeset', tmpl)
701 710
702 711
703 712 def maketemplater(ui, repo, tmpl, buffered=False):
704 713 """Create a changesettemplater from a literal template 'tmpl'
705 714 byte-string."""
706 715 spec = formatter.literal_templatespec(tmpl)
707 716 return changesettemplater(ui, repo, spec, buffered=buffered)
708 717
709 718
710 719 def changesetdisplayer(ui, repo, opts, differ=None, buffered=False):
711 720 """show one changeset using template or regular display.
712 721
713 722 Display format will be the first non-empty hit of:
714 723 1. option 'template'
715 724 2. option 'style'
716 725 3. [command-templates] setting 'log'
717 726 4. [ui] setting 'style'
718 727 If all of these values are either the unset or the empty string,
719 728 regular display via changesetprinter() is done.
720 729 """
721 730 postargs = (differ, opts, buffered)
722 731 spec = _lookuptemplate(ui, opts.get(b'template'), opts.get(b'style'))
723 732
724 733 # machine-readable formats have slightly different keyword set than
725 734 # plain templates, which are handled by changesetformatter.
726 735 # note that {b'pickle', b'debug'} can also be added to the list if needed.
727 736 if spec.ref in {b'cbor', b'json'}:
728 737 fm = ui.formatter(b'log', opts)
729 738 return changesetformatter(ui, repo, fm, *postargs)
730 739
731 740 if not spec.ref and not spec.tmpl and not spec.mapfile:
732 741 return changesetprinter(ui, repo, *postargs)
733 742
734 743 return changesettemplater(ui, repo, spec, *postargs)
735 744
736 745
737 746 @attr.s
738 747 class walkopts:
739 748 """Options to configure a set of revisions and file matcher factory
740 749 to scan revision/file history
741 750 """
742 751
743 752 # raw command-line parameters, which a matcher will be built from
744 753 pats = attr.ib()
745 754 opts = attr.ib()
746 755
747 756 # a list of revset expressions to be traversed; if follow, it specifies
748 757 # the start revisions
749 758 revspec = attr.ib()
750 759
751 760 # miscellaneous queries to filter revisions (see "hg help log" for details)
752 761 bookmarks = attr.ib(default=attr.Factory(list))
753 762 branches = attr.ib(default=attr.Factory(list))
754 763 date = attr.ib(default=None)
755 764 keywords = attr.ib(default=attr.Factory(list))
756 765 no_merges = attr.ib(default=False)
757 766 only_merges = attr.ib(default=False)
758 767 prune_ancestors = attr.ib(default=attr.Factory(list))
759 768 users = attr.ib(default=attr.Factory(list))
760 769
761 770 # miscellaneous matcher arguments
762 771 include_pats = attr.ib(default=attr.Factory(list))
763 772 exclude_pats = attr.ib(default=attr.Factory(list))
764 773
765 774 # 0: no follow, 1: follow first, 2: follow both parents
766 775 follow = attr.ib(default=0)
767 776
768 777 # do not attempt filelog-based traversal, which may be fast but cannot
769 778 # include revisions where files were removed
770 779 force_changelog_traversal = attr.ib(default=False)
771 780
772 781 # filter revisions by file patterns, which should be disabled only if
773 782 # you want to include revisions where files were unmodified
774 783 filter_revisions_by_pats = attr.ib(default=True)
775 784
776 785 # sort revisions prior to traversal: 'desc', 'topo', or None
777 786 sort_revisions = attr.ib(default=None)
778 787
779 788 # limit number of changes displayed; None means unlimited
780 789 limit = attr.ib(default=None)
781 790
782 791
783 792 def parseopts(ui, pats, opts):
784 793 # type: (Any, Sequence[bytes], Dict[bytes, Any]) -> walkopts
785 794 """Parse log command options into walkopts
786 795
787 796 The returned walkopts will be passed in to getrevs() or makewalker().
788 797 """
789 798 if opts.get(b'follow_first'):
790 799 follow = 1
791 800 elif opts.get(b'follow'):
792 801 follow = 2
793 802 else:
794 803 follow = 0
795 804
796 805 if opts.get(b'graph'):
797 806 if ui.configbool(b'experimental', b'log.topo'):
798 807 sort_revisions = b'topo'
799 808 else:
800 809 sort_revisions = b'desc'
801 810 else:
802 811 sort_revisions = None
803 812
804 813 return walkopts(
805 814 pats=pats,
806 815 opts=opts,
807 816 revspec=opts.get(b'rev', []),
808 817 bookmarks=opts.get(b'bookmark', []),
809 818 # branch and only_branch are really aliases and must be handled at
810 819 # the same time
811 820 branches=opts.get(b'branch', []) + opts.get(b'only_branch', []),
812 821 date=opts.get(b'date'),
813 822 keywords=opts.get(b'keyword', []),
814 823 no_merges=bool(opts.get(b'no_merges')),
815 824 only_merges=bool(opts.get(b'only_merges')),
816 825 prune_ancestors=opts.get(b'prune', []),
817 826 users=opts.get(b'user', []),
818 827 include_pats=opts.get(b'include', []),
819 828 exclude_pats=opts.get(b'exclude', []),
820 829 follow=follow,
821 830 force_changelog_traversal=bool(opts.get(b'removed')),
822 831 sort_revisions=sort_revisions,
823 832 limit=getlimit(opts),
824 833 )
825 834
826 835
827 836 def _makematcher(repo, revs, wopts):
828 837 """Build matcher and expanded patterns from log options
829 838
830 839 If --follow, revs are the revisions to follow from.
831 840
832 841 Returns (match, pats, slowpath) where
833 842 - match: a matcher built from the given pats and -I/-X opts
834 843 - pats: patterns used (globs are expanded on Windows)
835 844 - slowpath: True if patterns aren't as simple as scanning filelogs
836 845 """
837 846 # pats/include/exclude are passed to match.match() directly in
838 847 # _matchfiles() revset, but a log-like command should build its matcher
839 848 # with scmutil.match(). The difference is input pats are globbed on
840 849 # platforms without shell expansion (windows).
841 850 wctx = repo[None]
842 851 match, pats = scmutil.matchandpats(wctx, wopts.pats, wopts.opts)
843 852 slowpath = match.anypats() or (
844 853 not match.always() and wopts.force_changelog_traversal
845 854 )
846 855 if not slowpath:
847 856 if wopts.follow and wopts.revspec:
848 857 # There may be the case that a path doesn't exist in some (but
849 858 # not all) of the specified start revisions, but let's consider
850 859 # the path is valid. Missing files will be warned by the matcher.
851 860 all_files = list(match.files())
852 861 missing_files = set(all_files)
853 862 files = all_files
854 863 for r in revs:
855 864 if not files:
856 865 # We don't have any file to check anymore.
857 866 break
858 867 ctx = repo[r]
859 868 for f in files:
860 869 if f in ctx:
861 870 missing_files.discard(f)
862 871 elif ctx.hasdir(f):
863 872 # If a directory exists in any of the start revisions,
864 873 # take the slow path.
865 874 missing_files.discard(f)
866 875 slowpath = True
867 876 # we found on slow path, no need to search for more.
868 877 files = missing_files
869 878 for f in all_files:
870 879 if f in missing_files:
871 880 raise error.StateError(
872 881 _(
873 882 b'cannot follow file not in any of the specified '
874 883 b'revisions: "%s"'
875 884 )
876 885 % f
877 886 )
878 887 elif wopts.follow:
879 888 for f in match.files():
880 889 if f not in wctx:
881 890 # If the file exists, it may be a directory, so let it
882 891 # take the slow path.
883 892 if os.path.exists(repo.wjoin(f)):
884 893 slowpath = True
885 894 continue
886 895 else:
887 896 raise error.StateError(
888 897 _(
889 898 b'cannot follow file not in parent '
890 899 b'revision: "%s"'
891 900 )
892 901 % f
893 902 )
894 903 filelog = repo.file(f)
895 904 if not filelog:
896 905 # A file exists in wdir but not in history, which means
897 906 # the file isn't committed yet.
898 907 raise error.StateError(
899 908 _(b'cannot follow nonexistent file: "%s"') % f
900 909 )
901 910 else:
902 911 for f in match.files():
903 912 filelog = repo.file(f)
904 913 if not filelog:
905 914 # A zero count may be a directory or deleted file, so
906 915 # try to find matching entries on the slow path.
907 916 slowpath = True
908 917
909 918 # We decided to fall back to the slowpath because at least one
910 919 # of the paths was not a file. Check to see if at least one of them
911 920 # existed in history - in that case, we'll continue down the
912 921 # slowpath; otherwise, we can turn off the slowpath
913 922 if slowpath:
914 923 for path in match.files():
915 924 if not path or path in repo.store:
916 925 break
917 926 else:
918 927 slowpath = False
919 928
920 929 return match, pats, slowpath
921 930
922 931
923 932 def _fileancestors(repo, revs, match, followfirst):
924 933 fctxs = []
925 934 for r in revs:
926 935 ctx = repo[r]
927 936 fctxs.extend(ctx[f].introfilectx() for f in ctx.walk(match))
928 937
929 938 # When displaying a revision with --patch --follow FILE, we have
930 939 # to know which file of the revision must be diffed. With
931 940 # --follow, we want the names of the ancestors of FILE in the
932 941 # revision, stored in "fcache". "fcache" is populated as a side effect
933 942 # of the graph traversal.
934 943 fcache = {}
935 944
936 945 def filematcher(ctx):
937 946 return scmutil.matchfiles(repo, fcache.get(scmutil.intrev(ctx), []))
938 947
939 948 def revgen():
940 949 for rev, cs in dagop.filectxancestors(fctxs, followfirst=followfirst):
941 950 fcache[rev] = [c.path() for c in cs]
942 951 yield rev
943 952
944 953 return smartset.generatorset(revgen(), iterasc=False), filematcher
945 954
946 955
947 956 def _makenofollowfilematcher(repo, pats, opts):
948 957 '''hook for extensions to override the filematcher for non-follow cases'''
949 958 return None
950 959
951 960
952 961 def revsingle(repo, revspec, default=b'.', localalias=None):
953 962 """Resolves user-provided revset(s) into a single revision.
954 963
955 964 This just wraps the lower-level scmutil.revsingle() in order to raise an
956 965 exception indicating user error.
957 966 """
958 967 try:
959 968 return scmutil.revsingle(repo, revspec, default, localalias)
960 969 except error.RepoLookupError as e:
961 970 raise error.InputError(e.args[0], hint=e.hint)
962 971
963 972
964 973 def revpair(repo, revs):
965 974 """Resolves user-provided revset(s) into two revisions.
966 975
967 976 This just wraps the lower-level scmutil.revpair() in order to raise an
968 977 exception indicating user error.
969 978 """
970 979 try:
971 980 return scmutil.revpair(repo, revs)
972 981 except error.RepoLookupError as e:
973 982 raise error.InputError(e.args[0], hint=e.hint)
974 983
975 984
976 985 def revrange(repo, specs, localalias=None):
977 986 """Resolves user-provided revset(s).
978 987
979 988 This just wraps the lower-level scmutil.revrange() in order to raise an
980 989 exception indicating user error.
981 990 """
982 991 try:
983 992 return scmutil.revrange(repo, specs, localalias)
984 993 except error.RepoLookupError as e:
985 994 raise error.InputError(e.args[0], hint=e.hint)
986 995
987 996
988 997 _opt2logrevset = {
989 998 b'no_merges': (b'not merge()', None),
990 999 b'only_merges': (b'merge()', None),
991 1000 b'_matchfiles': (None, b'_matchfiles(%ps)'),
992 1001 b'date': (b'date(%s)', None),
993 1002 b'branch': (b'branch(%s)', b'%lr'),
994 1003 b'_patslog': (b'filelog(%s)', b'%lr'),
995 1004 b'keyword': (b'keyword(%s)', b'%lr'),
996 1005 b'prune': (b'ancestors(%s)', b'not %lr'),
997 1006 b'user': (b'user(%s)', b'%lr'),
998 1007 }
999 1008
1000 1009
1001 1010 def _makerevset(repo, wopts, slowpath):
1002 1011 """Return a revset string built from log options and file patterns"""
1003 1012 opts = {
1004 1013 b'branch': [b'literal:' + repo.lookupbranch(b) for b in wopts.branches],
1005 1014 b'date': wopts.date,
1006 1015 b'keyword': wopts.keywords,
1007 1016 b'no_merges': wopts.no_merges,
1008 1017 b'only_merges': wopts.only_merges,
1009 1018 b'prune': wopts.prune_ancestors,
1010 1019 b'user': [b'literal:' + v for v in wopts.users],
1011 1020 }
1012 1021
1013 1022 if wopts.filter_revisions_by_pats and slowpath:
1014 1023 # pats/include/exclude cannot be represented as separate
1015 1024 # revset expressions as their filtering logic applies at file
1016 1025 # level. For instance "-I a -X b" matches a revision touching
1017 1026 # "a" and "b" while "file(a) and not file(b)" does
1018 1027 # not. Besides, filesets are evaluated against the working
1019 1028 # directory.
1020 1029 matchargs = [b'r:', b'd:relpath']
1021 1030 for p in wopts.pats:
1022 1031 matchargs.append(b'p:' + p)
1023 1032 for p in wopts.include_pats:
1024 1033 matchargs.append(b'i:' + p)
1025 1034 for p in wopts.exclude_pats:
1026 1035 matchargs.append(b'x:' + p)
1027 1036 opts[b'_matchfiles'] = matchargs
1028 1037 elif wopts.filter_revisions_by_pats and not wopts.follow:
1029 1038 opts[b'_patslog'] = list(wopts.pats)
1030 1039
1031 1040 expr = []
1032 1041 for op, val in sorted(opts.items()):
1033 1042 if not val:
1034 1043 continue
1035 1044 revop, listop = _opt2logrevset[op]
1036 1045 if revop and b'%' not in revop:
1037 1046 expr.append(revop)
1038 1047 elif not listop:
1039 1048 expr.append(revsetlang.formatspec(revop, val))
1040 1049 else:
1041 1050 if revop:
1042 1051 val = [revsetlang.formatspec(revop, v) for v in val]
1043 1052 expr.append(revsetlang.formatspec(listop, val))
1044 1053
1045 1054 if wopts.bookmarks:
1046 1055 expr.append(
1047 1056 revsetlang.formatspec(
1048 1057 b'%lr',
1049 1058 [scmutil.format_bookmark_revspec(v) for v in wopts.bookmarks],
1050 1059 )
1051 1060 )
1052 1061
1053 1062 if expr:
1054 1063 expr = b'(' + b' and '.join(expr) + b')'
1055 1064 else:
1056 1065 expr = None
1057 1066 return expr
1058 1067
1059 1068
1060 1069 def _initialrevs(repo, wopts):
1061 1070 """Return the initial set of revisions to be filtered or followed"""
1062 1071 if wopts.revspec:
1063 1072 revs = revrange(repo, wopts.revspec)
1064 1073 elif wopts.follow and repo.dirstate.p1() == repo.nullid:
1065 1074 revs = smartset.baseset()
1066 1075 elif wopts.follow:
1067 1076 revs = repo.revs(b'.')
1068 1077 else:
1069 1078 revs = smartset.spanset(repo)
1070 1079 revs.reverse()
1071 1080 return revs
1072 1081
1073 1082
1074 1083 def makewalker(repo, wopts):
1075 1084 # type: (Any, walkopts) -> Tuple[smartset.abstractsmartset, Optional[Callable[[Any], matchmod.basematcher]]]
1076 1085 """Build (revs, makefilematcher) to scan revision/file history
1077 1086
1078 1087 - revs is the smartset to be traversed.
1079 1088 - makefilematcher is a function to map ctx to a matcher for that revision
1080 1089 """
1081 1090 revs = _initialrevs(repo, wopts)
1082 1091 if not revs:
1083 1092 return smartset.baseset(), None
1084 1093 # TODO: might want to merge slowpath with wopts.force_changelog_traversal
1085 1094 match, pats, slowpath = _makematcher(repo, revs, wopts)
1086 1095 wopts = attr.evolve(wopts, pats=pats)
1087 1096
1088 1097 filematcher = None
1089 1098 if wopts.follow:
1090 1099 if slowpath or match.always():
1091 1100 revs = dagop.revancestors(repo, revs, followfirst=wopts.follow == 1)
1092 1101 else:
1093 1102 assert not wopts.force_changelog_traversal
1094 1103 revs, filematcher = _fileancestors(
1095 1104 repo, revs, match, followfirst=wopts.follow == 1
1096 1105 )
1097 1106 revs.reverse()
1098 1107 if filematcher is None:
1099 1108 filematcher = _makenofollowfilematcher(repo, wopts.pats, wopts.opts)
1100 1109 if filematcher is None:
1101 1110
1102 1111 def filematcher(ctx):
1103 1112 return match
1104 1113
1105 1114 expr = _makerevset(repo, wopts, slowpath)
1106 1115 if wopts.sort_revisions:
1107 1116 assert wopts.sort_revisions in {b'topo', b'desc'}
1108 1117 if wopts.sort_revisions == b'topo':
1109 1118 if not revs.istopo():
1110 1119 revs = dagop.toposort(revs, repo.changelog.parentrevs)
1111 1120 # TODO: try to iterate the set lazily
1112 1121 revs = revset.baseset(list(revs), istopo=True)
1113 1122 elif not (revs.isdescending() or revs.istopo()):
1114 1123 # User-specified revs might be unsorted
1115 1124 revs.sort(reverse=True)
1116 1125 if expr:
1117 1126 matcher = revset.match(None, expr)
1118 1127 revs = matcher(repo, revs)
1119 1128 if wopts.limit is not None:
1120 1129 revs = revs.slice(0, wopts.limit)
1121 1130
1122 1131 return revs, filematcher
1123 1132
1124 1133
1125 1134 def getrevs(repo, wopts):
1126 1135 # type: (Any, walkopts) -> Tuple[smartset.abstractsmartset, Optional[changesetdiffer]]
1127 1136 """Return (revs, differ) where revs is a smartset
1128 1137
1129 1138 differ is a changesetdiffer with pre-configured file matcher.
1130 1139 """
1131 1140 revs, filematcher = makewalker(repo, wopts)
1132 1141 if not revs:
1133 1142 return revs, None
1134 1143 differ = changesetdiffer()
1135 1144 differ._makefilematcher = filematcher
1136 1145 return revs, differ
1137 1146
1138 1147
1139 1148 def _parselinerangeopt(repo, opts):
1140 1149 """Parse --line-range log option and return a list of tuples (filename,
1141 1150 (fromline, toline)).
1142 1151 """
1143 1152 linerangebyfname = []
1144 1153 for pat in opts.get(b'line_range', []):
1145 1154 try:
1146 1155 pat, linerange = pat.rsplit(b',', 1)
1147 1156 except ValueError:
1148 1157 raise error.InputError(
1149 1158 _(b'malformatted line-range pattern %s') % pat
1150 1159 )
1151 1160 try:
1152 1161 fromline, toline = map(int, linerange.split(b':'))
1153 1162 except ValueError:
1154 1163 raise error.InputError(_(b"invalid line range for %s") % pat)
1155 1164 msg = _(b"line range pattern '%s' must match exactly one file") % pat
1156 1165 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
1157 1166 linerangebyfname.append(
1158 1167 (fname, util.processlinerange(fromline, toline))
1159 1168 )
1160 1169 return linerangebyfname
1161 1170
1162 1171
1163 1172 def getlinerangerevs(repo, userrevs, opts):
1164 1173 """Return (revs, differ).
1165 1174
1166 1175 "revs" are revisions obtained by processing "line-range" log options and
1167 1176 walking block ancestors of each specified file/line-range.
1168 1177
1169 1178 "differ" is a changesetdiffer with pre-configured file matcher and hunks
1170 1179 filter.
1171 1180 """
1172 1181 wctx = repo[None]
1173 1182
1174 1183 # Two-levels map of "rev -> file ctx -> [line range]".
1175 1184 linerangesbyrev = {}
1176 1185 for fname, (fromline, toline) in _parselinerangeopt(repo, opts):
1177 1186 if fname not in wctx:
1178 1187 raise error.StateError(
1179 1188 _(b'cannot follow file not in parent revision: "%s"') % fname
1180 1189 )
1181 1190 fctx = wctx.filectx(fname)
1182 1191 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
1183 1192 rev = fctx.introrev()
1184 1193 if rev is None:
1185 1194 rev = wdirrev
1186 1195 if rev not in userrevs:
1187 1196 continue
1188 1197 linerangesbyrev.setdefault(rev, {}).setdefault(
1189 1198 fctx.path(), []
1190 1199 ).append(linerange)
1191 1200
1192 1201 def nofilterhunksfn(fctx, hunks):
1193 1202 return hunks
1194 1203
1195 1204 def hunksfilter(ctx):
1196 1205 fctxlineranges = linerangesbyrev.get(scmutil.intrev(ctx))
1197 1206 if fctxlineranges is None:
1198 1207 return nofilterhunksfn
1199 1208
1200 1209 def filterfn(fctx, hunks):
1201 1210 lineranges = fctxlineranges.get(fctx.path())
1202 1211 if lineranges is not None:
1203 1212 for hr, lines in hunks:
1204 1213 if hr is None: # binary
1205 1214 yield hr, lines
1206 1215 continue
1207 1216 if any(mdiff.hunkinrange(hr[2:], lr) for lr in lineranges):
1208 1217 yield hr, lines
1209 1218 else:
1210 1219 for hunk in hunks:
1211 1220 yield hunk
1212 1221
1213 1222 return filterfn
1214 1223
1215 1224 def filematcher(ctx):
1216 1225 files = list(linerangesbyrev.get(scmutil.intrev(ctx), []))
1217 1226 return scmutil.matchfiles(repo, files)
1218 1227
1219 1228 revs = sorted(linerangesbyrev, reverse=True)
1220 1229
1221 1230 differ = changesetdiffer()
1222 1231 differ._makefilematcher = filematcher
1223 1232 differ._makehunksfilter = hunksfilter
1224 1233 return smartset.baseset(revs), differ
1225 1234
1226 1235
1227 1236 def _graphnodeformatter(ui, displayer):
1228 1237 spec = ui.config(b'command-templates', b'graphnode')
1229 1238 if not spec:
1230 1239 return templatekw.getgraphnode # fast path for "{graphnode}"
1231 1240
1232 1241 spec = templater.unquotestring(spec)
1233 1242 if isinstance(displayer, changesettemplater):
1234 1243 # reuse cache of slow templates
1235 1244 tres = displayer._tresources
1236 1245 else:
1237 1246 tres = formatter.templateresources(ui)
1238 1247 templ = formatter.maketemplater(
1239 1248 ui, spec, defaults=templatekw.keywords, resources=tres
1240 1249 )
1241 1250
1242 1251 def formatnode(repo, ctx, cache):
1243 1252 props = {b'ctx': ctx, b'repo': repo}
1244 1253 return templ.renderdefault(props)
1245 1254
1246 1255 return formatnode
1247 1256
1248 1257
1249 1258 def displaygraph(ui, repo, dag, displayer, edgefn, getcopies=None, props=None):
1250 1259 props = props or {}
1251 1260 formatnode = _graphnodeformatter(ui, displayer)
1252 1261 state = graphmod.asciistate()
1253 1262 styles = state.styles
1254 1263
1255 1264 # only set graph styling if HGPLAIN is not set.
1256 1265 if ui.plain(b'graph'):
1257 1266 # set all edge styles to |, the default pre-3.8 behaviour
1258 1267 styles.update(dict.fromkeys(styles, b'|'))
1259 1268 else:
1260 1269 edgetypes = {
1261 1270 b'parent': graphmod.PARENT,
1262 1271 b'grandparent': graphmod.GRANDPARENT,
1263 1272 b'missing': graphmod.MISSINGPARENT,
1264 1273 }
1265 1274 for name, key in edgetypes.items():
1266 1275 # experimental config: experimental.graphstyle.*
1267 1276 styles[key] = ui.config(
1268 1277 b'experimental', b'graphstyle.%s' % name, styles[key]
1269 1278 )
1270 1279 if not styles[key]:
1271 1280 styles[key] = None
1272 1281
1273 1282 # experimental config: experimental.graphshorten
1274 1283 state.graphshorten = ui.configbool(b'experimental', b'graphshorten')
1275 1284
1276 1285 formatnode_cache = {}
1277 1286 for rev, type, ctx, parents in dag:
1278 1287 char = formatnode(repo, ctx, formatnode_cache)
1279 1288 copies = getcopies(ctx) if getcopies else None
1280 1289 edges = edgefn(type, char, state, rev, parents)
1281 1290 firstedge = next(edges)
1282 1291 width = firstedge[2]
1283 1292 displayer.show(
1284 1293 ctx, copies=copies, graphwidth=width, **pycompat.strkwargs(props)
1285 1294 )
1286 1295 lines = displayer.hunk.pop(rev).split(b'\n')
1287 1296 if not lines[-1]:
1288 1297 del lines[-1]
1289 1298 displayer.flush(ctx)
1290 1299 for type, char, width, coldata in itertools.chain([firstedge], edges):
1291 1300 graphmod.ascii(ui, state, type, char, lines, coldata)
1292 1301 lines = []
1293 1302 displayer.close()
1294 1303
1295 1304
1296 1305 def displaygraphrevs(ui, repo, revs, displayer, getrenamed):
1297 1306 revdag = graphmod.dagwalker(repo, revs)
1298 1307 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed)
1299 1308
1300 1309
1301 1310 def displayrevs(ui, repo, revs, displayer, getcopies):
1302 1311 for rev in revs:
1303 1312 ctx = repo[rev]
1304 1313 copies = getcopies(ctx) if getcopies else None
1305 1314 displayer.show(ctx, copies=copies)
1306 1315 displayer.flush(ctx)
1307 1316 displayer.close()
1308 1317
1309 1318
1310 1319 def checkunsupportedgraphflags(pats, opts):
1311 1320 for op in [b"newest_first"]:
1312 1321 if op in opts and opts[op]:
1313 1322 raise error.InputError(
1314 1323 _(b"-G/--graph option is incompatible with --%s")
1315 1324 % op.replace(b"_", b"-")
1316 1325 )
1317 1326
1318 1327
1319 1328 def graphrevs(repo, nodes, opts):
1320 1329 limit = getlimit(opts)
1321 1330 nodes.reverse()
1322 1331 if limit is not None:
1323 1332 nodes = nodes[:limit]
1324 1333 return graphmod.nodes(repo, nodes)
@@ -1,517 +1,527 b''
1 1 # mail.py - mail sending bits for mercurial
2 2 #
3 3 # Copyright 2006 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import email
10 10 import email.charset
11 11 import email.generator
12 12 import email.header
13 13 import email.message
14 14 import email.parser
15 15 import io
16 16 import os
17 17 import smtplib
18 18 import socket
19 19 import time
20 20
21 from typing import (
22 Any,
23 List,
24 Tuple,
25 Union,
26 )
27
21 28 from .i18n import _
22 29 from .pycompat import (
23 30 open,
24 31 )
25 32 from . import (
26 33 encoding,
27 34 error,
28 35 pycompat,
29 36 sslutil,
30 37 util,
31 38 )
32 39 from .utils import (
33 40 procutil,
34 41 stringutil,
35 42 urlutil,
36 43 )
37 44
38 if pycompat.TYPE_CHECKING:
39 from typing import Any, List, Tuple, Union
40 45
41 # keep pyflakes happy
42 assert all((Any, List, Tuple, Union))
46 # keep pyflakes happy
47 assert [
48 Any,
49 List,
50 Tuple,
51 Union,
52 ]
43 53
44 54
45 55 class STARTTLS(smtplib.SMTP):
46 56 """Derived class to verify the peer certificate for STARTTLS.
47 57
48 58 This class allows to pass any keyword arguments to SSL socket creation.
49 59 """
50 60
51 61 def __init__(self, ui, host=None, **kwargs):
52 62 smtplib.SMTP.__init__(self, **kwargs)
53 63 self._ui = ui
54 64 self._host = host
55 65
56 66 def starttls(self, keyfile=None, certfile=None, context=None):
57 67 if not self.has_extn("starttls"):
58 68 msg = "STARTTLS extension not supported by server"
59 69 raise smtplib.SMTPException(msg)
60 70 (resp, reply) = self.docmd("STARTTLS")
61 71 if resp == 220:
62 72 self.sock = sslutil.wrapsocket(
63 73 self.sock,
64 74 keyfile,
65 75 certfile,
66 76 ui=self._ui,
67 77 serverhostname=self._host,
68 78 )
69 79 self.file = self.sock.makefile("rb")
70 80 self.helo_resp = None
71 81 self.ehlo_resp = None
72 82 self.esmtp_features = {}
73 83 self.does_esmtp = 0
74 84 return (resp, reply)
75 85
76 86
77 87 class SMTPS(smtplib.SMTP):
78 88 """Derived class to verify the peer certificate for SMTPS.
79 89
80 90 This class allows to pass any keyword arguments to SSL socket creation.
81 91 """
82 92
83 93 def __init__(self, ui, keyfile=None, certfile=None, host=None, **kwargs):
84 94 self.keyfile = keyfile
85 95 self.certfile = certfile
86 96 smtplib.SMTP.__init__(self, **kwargs)
87 97 self._host = host
88 98 self.default_port = smtplib.SMTP_SSL_PORT
89 99 self._ui = ui
90 100
91 101 def _get_socket(self, host, port, timeout):
92 102 if self.debuglevel > 0:
93 103 self._ui.debug(b'connect: %r\n' % ((host, port),))
94 104 new_socket = socket.create_connection((host, port), timeout)
95 105 new_socket = sslutil.wrapsocket(
96 106 new_socket,
97 107 self.keyfile,
98 108 self.certfile,
99 109 ui=self._ui,
100 110 serverhostname=self._host,
101 111 )
102 112 self.file = new_socket.makefile('rb')
103 113 return new_socket
104 114
105 115
106 116 def _pyhastls():
107 117 # type: () -> bool
108 118 """Returns true iff Python has TLS support, false otherwise."""
109 119 try:
110 120 import ssl
111 121
112 122 getattr(ssl, 'HAS_TLS', False)
113 123 return True
114 124 except ImportError:
115 125 return False
116 126
117 127
118 128 def _smtp(ui):
119 129 '''build an smtp connection and return a function to send mail'''
120 130 local_hostname = ui.config(b'smtp', b'local_hostname')
121 131 tls = ui.config(b'smtp', b'tls')
122 132 # backward compatible: when tls = true, we use starttls.
123 133 starttls = tls == b'starttls' or stringutil.parsebool(tls)
124 134 smtps = tls == b'smtps'
125 135 if (starttls or smtps) and not _pyhastls():
126 136 raise error.Abort(_(b"can't use TLS: Python SSL support not installed"))
127 137 mailhost = ui.config(b'smtp', b'host')
128 138 if not mailhost:
129 139 raise error.Abort(_(b'smtp.host not configured - cannot send mail'))
130 140 if smtps:
131 141 ui.note(_(b'(using smtps)\n'))
132 142 s = SMTPS(ui, local_hostname=local_hostname, host=mailhost)
133 143 elif starttls:
134 144 s = STARTTLS(ui, local_hostname=local_hostname, host=mailhost)
135 145 else:
136 146 s = smtplib.SMTP(local_hostname=local_hostname)
137 147 if smtps:
138 148 defaultport = 465
139 149 else:
140 150 defaultport = 25
141 151 mailport = urlutil.getport(ui.config(b'smtp', b'port', defaultport))
142 152 ui.note(_(b'sending mail: smtp host %s, port %d\n') % (mailhost, mailport))
143 153 s.connect(host=mailhost, port=mailport)
144 154 if starttls:
145 155 ui.note(_(b'(using starttls)\n'))
146 156 s.ehlo()
147 157 s.starttls()
148 158 s.ehlo()
149 159 if starttls or smtps:
150 160 ui.note(_(b'(verifying remote certificate)\n'))
151 161 sslutil.validatesocket(s.sock)
152 162
153 163 try:
154 164 _smtp_login(ui, s, mailhost, mailport)
155 165 except smtplib.SMTPException as inst:
156 166 raise error.Abort(stringutil.forcebytestr(inst))
157 167
158 168 def send(sender, recipients, msg):
159 169 try:
160 170 return s.sendmail(sender, recipients, msg)
161 171 except smtplib.SMTPRecipientsRefused as inst:
162 172 recipients = [r[1] for r in inst.recipients.values()]
163 173 raise error.Abort(b'\n' + b'\n'.join(recipients))
164 174 except smtplib.SMTPException as inst:
165 175 raise error.Abort(stringutil.forcebytestr(inst))
166 176
167 177 return send
168 178
169 179
170 180 def _smtp_login(ui, smtp, mailhost, mailport):
171 181 """A hook for the keyring extension to perform the actual SMTP login.
172 182
173 183 An already connected SMTP object of the proper type is provided, based on
174 184 the current configuration. The host and port to which the connection was
175 185 established are provided for accessibility, since the SMTP object doesn't
176 186 provide an accessor. ``smtplib.SMTPException`` is raised on error.
177 187 """
178 188 username = ui.config(b'smtp', b'username')
179 189 password = ui.config(b'smtp', b'password')
180 190 if username:
181 191 if password:
182 192 password = encoding.strfromlocal(password)
183 193 else:
184 194 password = ui.getpass()
185 195 if password is not None:
186 196 password = encoding.strfromlocal(password)
187 197 if username and password:
188 198 ui.note(_(b'(authenticating to mail server as %s)\n') % username)
189 199 username = encoding.strfromlocal(username)
190 200 smtp.login(username, password)
191 201
192 202
193 203 def _sendmail(ui, sender, recipients, msg):
194 204 '''send mail using sendmail.'''
195 205 program = ui.config(b'email', b'method')
196 206
197 207 def stremail(x):
198 208 return procutil.shellquote(stringutil.email(encoding.strtolocal(x)))
199 209
200 210 cmdline = b'%s -f %s %s' % (
201 211 program,
202 212 stremail(sender),
203 213 b' '.join(map(stremail, recipients)),
204 214 )
205 215 ui.note(_(b'sending mail: %s\n') % cmdline)
206 216 fp = procutil.popen(cmdline, b'wb')
207 217 fp.write(util.tonativeeol(msg))
208 218 ret = fp.close()
209 219 if ret:
210 220 raise error.Abort(
211 221 b'%s %s'
212 222 % (
213 223 os.path.basename(procutil.shellsplit(program)[0]),
214 224 procutil.explainexit(ret),
215 225 )
216 226 )
217 227
218 228
219 229 def _mbox(mbox, sender, recipients, msg):
220 230 '''write mails to mbox'''
221 231 # TODO: use python mbox library for proper locking
222 232 with open(mbox, b'ab+') as fp:
223 233 # Should be time.asctime(), but Windows prints 2-characters day
224 234 # of month instead of one. Make them print the same thing.
225 235 date = time.strftime('%a %b %d %H:%M:%S %Y', time.localtime())
226 236 fp.write(
227 237 b'From %s %s\n'
228 238 % (encoding.strtolocal(sender), encoding.strtolocal(date))
229 239 )
230 240 fp.write(msg)
231 241 fp.write(b'\n\n')
232 242
233 243
234 244 def connect(ui, mbox=None):
235 245 """make a mail connection. return a function to send mail.
236 246 call as sendmail(sender, list-of-recipients, msg)."""
237 247 if mbox:
238 248 open(mbox, b'wb').close()
239 249 return lambda s, r, m: _mbox(mbox, s, r, m)
240 250 if ui.config(b'email', b'method') == b'smtp':
241 251 return _smtp(ui)
242 252 return lambda s, r, m: _sendmail(ui, s, r, m)
243 253
244 254
245 255 def sendmail(ui, sender, recipients, msg, mbox=None):
246 256 send = connect(ui, mbox=mbox)
247 257 return send(sender, recipients, msg)
248 258
249 259
250 260 def validateconfig(ui):
251 261 '''determine if we have enough config data to try sending email.'''
252 262 method = ui.config(b'email', b'method')
253 263 if method == b'smtp':
254 264 if not ui.config(b'smtp', b'host'):
255 265 raise error.Abort(
256 266 _(
257 267 b'smtp specified as email transport, '
258 268 b'but no smtp host configured'
259 269 )
260 270 )
261 271 else:
262 272 command = procutil.shellsplit(method)
263 273 command = command[0] if command else b''
264 274 if not (command and procutil.findexe(command)):
265 275 raise error.Abort(
266 276 _(b'%r specified as email transport, but not in PATH') % command
267 277 )
268 278
269 279
270 280 def codec2iana(cs):
271 281 # type: (str) -> str
272 282 ''' '''
273 283 cs = email.charset.Charset(cs).input_charset.lower()
274 284
275 285 # "latin1" normalizes to "iso8859-1", standard calls for "iso-8859-1"
276 286 if cs.startswith("iso") and not cs.startswith("iso-"):
277 287 return "iso-" + cs[3:]
278 288 return cs
279 289
280 290
281 291 def mimetextpatch(s, subtype='plain', display=False):
282 292 # type: (bytes, str, bool) -> email.message.Message
283 293 """Return MIME message suitable for a patch.
284 294 Charset will be detected by first trying to decode as us-ascii, then utf-8,
285 295 and finally the global encodings. If all those fail, fall back to
286 296 ISO-8859-1, an encoding with that allows all byte sequences.
287 297 Transfer encodings will be used if necessary."""
288 298
289 299 cs = [
290 300 'us-ascii',
291 301 'utf-8',
292 302 pycompat.sysstr(encoding.encoding),
293 303 pycompat.sysstr(encoding.fallbackencoding),
294 304 ]
295 305 if display:
296 306 cs = ['us-ascii']
297 307 for charset in cs:
298 308 try:
299 309 s.decode(charset)
300 310 return mimetextqp(s, subtype, codec2iana(charset))
301 311 except UnicodeDecodeError:
302 312 pass
303 313
304 314 return mimetextqp(s, subtype, "iso-8859-1")
305 315
306 316
307 317 def mimetextqp(body, subtype, charset):
308 318 # type: (bytes, str, str) -> email.message.Message
309 319 """Return MIME message.
310 320 Quoted-printable transfer encoding will be used if necessary.
311 321 """
312 322 cs = email.charset.Charset(charset)
313 323 msg = email.message.Message()
314 324 msg.set_type('text/' + subtype)
315 325
316 326 for line in body.splitlines():
317 327 if len(line) > 950:
318 328 cs.body_encoding = email.charset.QP
319 329 break
320 330
321 331 # On Python 2, this simply assigns a value. Python 3 inspects
322 332 # body and does different things depending on whether it has
323 333 # encode() or decode() attributes. We can get the old behavior
324 334 # if we pass a str and charset is None and we call set_charset().
325 335 # But we may get into trouble later due to Python attempting to
326 336 # encode/decode using the registered charset (or attempting to
327 337 # use ascii in the absence of a charset).
328 338 msg.set_payload(body, cs)
329 339
330 340 return msg
331 341
332 342
333 343 def _charsets(ui):
334 344 # type: (Any) -> List[str]
335 345 '''Obtains charsets to send mail parts not containing patches.'''
336 346 charsets = [
337 347 pycompat.sysstr(cs.lower())
338 348 for cs in ui.configlist(b'email', b'charsets')
339 349 ]
340 350 fallbacks = [
341 351 pycompat.sysstr(encoding.fallbackencoding.lower()),
342 352 pycompat.sysstr(encoding.encoding.lower()),
343 353 'utf-8',
344 354 ]
345 355 for cs in fallbacks: # find unique charsets while keeping order
346 356 if cs not in charsets:
347 357 charsets.append(cs)
348 358 return [cs for cs in charsets if not cs.endswith('ascii')]
349 359
350 360
351 361 def _encode(ui, s, charsets):
352 362 # type: (Any, bytes, List[str]) -> Tuple[bytes, str]
353 363 """Returns (converted) string, charset tuple.
354 364 Finds out best charset by cycling through sendcharsets in descending
355 365 order. Tries both encoding and fallbackencoding for input. Only as
356 366 last resort send as is in fake ascii.
357 367 Caveat: Do not use for mail parts containing patches!"""
358 368 sendcharsets = charsets or _charsets(ui)
359 369 if not isinstance(s, bytes):
360 370 # We have unicode data, which we need to try and encode to
361 371 # some reasonable-ish encoding. Try the encodings the user
362 372 # wants, and fall back to garbage-in-ascii.
363 373 for ocs in sendcharsets:
364 374 try:
365 375 return s.encode(ocs), ocs
366 376 except UnicodeEncodeError:
367 377 pass
368 378 except LookupError:
369 379 ui.warn(
370 380 _(b'ignoring invalid sendcharset: %s\n')
371 381 % pycompat.sysbytes(ocs)
372 382 )
373 383 else:
374 384 # Everything failed, ascii-armor what we've got and send it.
375 385 return s.encode('ascii', 'backslashreplace'), 'us-ascii'
376 386 # We have a bytes of unknown encoding. We'll try and guess a valid
377 387 # encoding, falling back to pretending we had ascii even though we
378 388 # know that's wrong.
379 389 try:
380 390 s.decode('ascii')
381 391 except UnicodeDecodeError:
382 392 for ics in (encoding.encoding, encoding.fallbackencoding):
383 393 ics = pycompat.sysstr(ics)
384 394 try:
385 395 u = s.decode(ics)
386 396 except UnicodeDecodeError:
387 397 continue
388 398 for ocs in sendcharsets:
389 399 try:
390 400 return u.encode(ocs), ocs
391 401 except UnicodeEncodeError:
392 402 pass
393 403 except LookupError:
394 404 ui.warn(
395 405 _(b'ignoring invalid sendcharset: %s\n')
396 406 % pycompat.sysbytes(ocs)
397 407 )
398 408 # if ascii, or all conversion attempts fail, send (broken) ascii
399 409 return s, 'us-ascii'
400 410
401 411
402 412 def headencode(ui, s, charsets=None, display=False):
403 413 # type: (Any, Union[bytes, str], List[str], bool) -> str
404 414 '''Returns RFC-2047 compliant header from given string.'''
405 415 if not display:
406 416 # split into words?
407 417 s, cs = _encode(ui, s, charsets)
408 418 return email.header.Header(s, cs).encode()
409 419 return encoding.strfromlocal(s)
410 420
411 421
412 422 def _addressencode(ui, name, addr, charsets=None):
413 423 # type: (Any, str, str, List[str]) -> str
414 424 addr = encoding.strtolocal(addr)
415 425 name = headencode(ui, name, charsets)
416 426 try:
417 427 acc, dom = addr.split(b'@')
418 428 acc.decode('ascii')
419 429 dom = dom.decode(pycompat.sysstr(encoding.encoding)).encode('idna')
420 430 addr = b'%s@%s' % (acc, dom)
421 431 except UnicodeDecodeError:
422 432 raise error.Abort(_(b'invalid email address: %s') % addr)
423 433 except ValueError:
424 434 try:
425 435 # too strict?
426 436 addr.decode('ascii')
427 437 except UnicodeDecodeError:
428 438 raise error.Abort(_(b'invalid local address: %s') % addr)
429 439 return email.utils.formataddr((name, encoding.strfromlocal(addr)))
430 440
431 441
432 442 def addressencode(ui, address, charsets=None, display=False):
433 443 # type: (Any, bytes, List[str], bool) -> str
434 444 '''Turns address into RFC-2047 compliant header.'''
435 445 if display or not address:
436 446 return encoding.strfromlocal(address or b'')
437 447 name, addr = email.utils.parseaddr(encoding.strfromlocal(address))
438 448 return _addressencode(ui, name, addr, charsets)
439 449
440 450
441 451 def addrlistencode(ui, addrs, charsets=None, display=False):
442 452 # type: (Any, List[bytes], List[str], bool) -> List[str]
443 453 """Turns a list of addresses into a list of RFC-2047 compliant headers.
444 454 A single element of input list may contain multiple addresses, but output
445 455 always has one address per item"""
446 456 straddrs = []
447 457 for a in addrs:
448 458 assert isinstance(a, bytes), '%r unexpectedly not a bytestr' % a
449 459 straddrs.append(encoding.strfromlocal(a))
450 460 if display:
451 461 return [a.strip() for a in straddrs if a.strip()]
452 462
453 463 result = []
454 464 for name, addr in email.utils.getaddresses(straddrs):
455 465 if name or addr:
456 466 r = _addressencode(ui, name, addr, charsets)
457 467 result.append(r)
458 468 return result
459 469
460 470
461 471 def mimeencode(ui, s, charsets=None, display=False):
462 472 # type: (Any, bytes, List[str], bool) -> email.message.Message
463 473 """creates mime text object, encodes it if needed, and sets
464 474 charset and transfer-encoding accordingly."""
465 475 cs = 'us-ascii'
466 476 if not display:
467 477 s, cs = _encode(ui, s, charsets)
468 478 return mimetextqp(s, 'plain', cs)
469 479
470 480
471 481 Generator = email.generator.BytesGenerator
472 482
473 483
474 484 def parse(fp):
475 485 # type: (Any) -> email.message.Message
476 486 ep = email.parser.Parser()
477 487 # disable the "universal newlines" mode, which isn't binary safe.
478 488 # I have no idea if ascii/surrogateescape is correct, but that's
479 489 # what the standard Python email parser does.
480 490 fp = io.TextIOWrapper(
481 491 fp, encoding='ascii', errors='surrogateescape', newline=chr(10)
482 492 )
483 493 try:
484 494 return ep.parse(fp)
485 495 finally:
486 496 fp.detach()
487 497
488 498
489 499 def parsebytes(data):
490 500 # type: (bytes) -> email.message.Message
491 501 ep = email.parser.BytesParser()
492 502 return ep.parsebytes(data)
493 503
494 504
495 505 def headdecode(s):
496 506 # type: (Union[email.header.Header, bytes]) -> bytes
497 507 '''Decodes RFC-2047 header'''
498 508 uparts = []
499 509 for part, charset in email.header.decode_header(s):
500 510 if charset is not None:
501 511 try:
502 512 uparts.append(part.decode(charset))
503 513 continue
504 514 except (UnicodeDecodeError, LookupError):
505 515 pass
506 516 # On Python 3, decode_header() may return either bytes or unicode
507 517 # depending on whether the header has =?<charset>? or not
508 518 if isinstance(part, type(u'')):
509 519 uparts.append(part)
510 520 continue
511 521 try:
512 522 uparts.append(part.decode('UTF-8'))
513 523 continue
514 524 except UnicodeDecodeError:
515 525 pass
516 526 uparts.append(part.decode('ISO-8859-1'))
517 527 return encoding.unitolocal(u' '.join(uparts))
@@ -1,391 +1,399 b''
1 1 import contextlib
2 2 import errno
3 3 import os
4 4 import posixpath
5 5 import stat
6 6
7 7 from typing import (
8 8 Any,
9 9 Callable,
10 10 Iterator,
11 11 Optional,
12 12 )
13 13
14 14 from .i18n import _
15 15 from . import (
16 16 encoding,
17 17 error,
18 18 policy,
19 19 pycompat,
20 20 util,
21 21 )
22 22
23 23 rustdirs = policy.importrust('dirstate', 'Dirs')
24 24 parsers = policy.importmod('parsers')
25 25
26 # keeps pyflakes happy
27 assert [
28 Any,
29 Callable,
30 Iterator,
31 Optional,
32 ]
33
26 34
27 35 def _lowerclean(s):
28 36 # type: (bytes) -> bytes
29 37 return encoding.hfsignoreclean(s.lower())
30 38
31 39
32 40 class pathauditor:
33 41 """ensure that a filesystem path contains no banned components.
34 42 the following properties of a path are checked:
35 43
36 44 - ends with a directory separator
37 45 - under top-level .hg
38 46 - starts at the root of a windows drive
39 47 - contains ".."
40 48
41 49 More check are also done about the file system states:
42 50 - traverses a symlink (e.g. a/symlink_here/b)
43 51 - inside a nested repository (a callback can be used to approve
44 52 some nested repositories, e.g., subrepositories)
45 53
46 54 The file system checks are only done when 'realfs' is set to True (the
47 55 default). They should be disable then we are auditing path for operation on
48 56 stored history.
49 57
50 58 If 'cached' is set to True, audited paths and sub-directories are cached.
51 59 Be careful to not keep the cache of unmanaged directories for long because
52 60 audited paths may be replaced with symlinks.
53 61 """
54 62
55 63 def __init__(self, root, callback=None, realfs=True, cached=False):
56 64 self.audited = set()
57 65 self.auditeddir = dict()
58 66 self.root = root
59 67 self._realfs = realfs
60 68 self._cached = cached
61 69 self.callback = callback
62 70 if os.path.lexists(root) and not util.fscasesensitive(root):
63 71 self.normcase = util.normcase
64 72 else:
65 73 self.normcase = lambda x: x
66 74
67 75 def __call__(self, path, mode=None):
68 76 # type: (bytes, Optional[Any]) -> None
69 77 """Check the relative path.
70 78 path may contain a pattern (e.g. foodir/**.txt)"""
71 79
72 80 path = util.localpath(path)
73 81 if path in self.audited:
74 82 return
75 83 # AIX ignores "/" at end of path, others raise EISDIR.
76 84 if util.endswithsep(path):
77 85 raise error.InputError(
78 86 _(b"path ends in directory separator: %s") % path
79 87 )
80 88 parts = util.splitpath(path)
81 89 if (
82 90 os.path.splitdrive(path)[0]
83 91 or _lowerclean(parts[0]) in (b'.hg', b'.hg.', b'')
84 92 or pycompat.ospardir in parts
85 93 ):
86 94 raise error.InputError(
87 95 _(b"path contains illegal component: %s") % path
88 96 )
89 97 # Windows shortname aliases
90 98 if b"~" in path:
91 99 for p in parts:
92 100 if b"~" in p:
93 101 first, last = p.split(b"~", 1)
94 102 if last.isdigit() and first.upper() in [b"HG", b"HG8B6C"]:
95 103 raise error.InputError(
96 104 _(b"path contains illegal component: %s") % path
97 105 )
98 106 if b'.hg' in _lowerclean(path):
99 107 lparts = [_lowerclean(p) for p in parts]
100 108 for p in b'.hg', b'.hg.':
101 109 if p in lparts[1:]:
102 110 pos = lparts.index(p)
103 111 base = os.path.join(*parts[:pos])
104 112 raise error.InputError(
105 113 _(b"path '%s' is inside nested repo %r")
106 114 % (path, pycompat.bytestr(base))
107 115 )
108 116
109 117 if self._realfs:
110 118 # It's important that we check the path parts starting from the root.
111 119 # We don't want to add "foo/bar/baz" to auditeddir before checking if
112 120 # there's a "foo/.hg" directory. This also means we won't accidentally
113 121 # traverse a symlink into some other filesystem (which is potentially
114 122 # expensive to access).
115 123 for prefix in finddirs_rev_noroot(path):
116 124 if prefix in self.auditeddir:
117 125 res = self.auditeddir[prefix]
118 126 else:
119 127 res = pathauditor._checkfs_exists(
120 128 self.root, prefix, path, self.callback
121 129 )
122 130 if self._cached:
123 131 self.auditeddir[prefix] = res
124 132 if not res:
125 133 break
126 134
127 135 if self._cached:
128 136 self.audited.add(path)
129 137
130 138 @staticmethod
131 139 def _checkfs_exists(
132 140 root,
133 141 prefix: bytes,
134 142 path: bytes,
135 143 callback: Optional[Callable[[bytes], bool]] = None,
136 144 ):
137 145 """raise exception if a file system backed check fails.
138 146
139 147 Return a bool that indicates that the directory (or file) exists."""
140 148 curpath = os.path.join(root, prefix)
141 149 try:
142 150 st = os.lstat(curpath)
143 151 except OSError as err:
144 152 if err.errno == errno.ENOENT:
145 153 return False
146 154 # EINVAL can be raised as invalid path syntax under win32.
147 155 # They must be ignored for patterns can be checked too.
148 156 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
149 157 raise
150 158 else:
151 159 if stat.S_ISLNK(st.st_mode):
152 160 msg = _(b'path %r traverses symbolic link %r') % (
153 161 pycompat.bytestr(path),
154 162 pycompat.bytestr(prefix),
155 163 )
156 164 raise error.Abort(msg)
157 165 elif stat.S_ISDIR(st.st_mode) and os.path.isdir(
158 166 os.path.join(curpath, b'.hg')
159 167 ):
160 168 if not callback or not callback(curpath):
161 169 msg = _(b"path '%s' is inside nested repo %r")
162 170 raise error.Abort(msg % (path, pycompat.bytestr(prefix)))
163 171 return True
164 172
165 173 def check(self, path):
166 174 # type: (bytes) -> bool
167 175 try:
168 176 self(path)
169 177 return True
170 178 except (OSError, error.Abort):
171 179 return False
172 180
173 181 @contextlib.contextmanager
174 182 def cached(self):
175 183 if self._cached:
176 184 yield
177 185 else:
178 186 try:
179 187 self._cached = True
180 188 yield
181 189 finally:
182 190 self.audited.clear()
183 191 self.auditeddir.clear()
184 192 self._cached = False
185 193
186 194
187 195 def canonpath(root, cwd, myname, auditor=None):
188 196 # type: (bytes, bytes, bytes, Optional[pathauditor]) -> bytes
189 197 """return the canonical path of myname, given cwd and root
190 198
191 199 >>> def check(root, cwd, myname):
192 200 ... a = pathauditor(root, realfs=False)
193 201 ... try:
194 202 ... return canonpath(root, cwd, myname, a)
195 203 ... except error.Abort:
196 204 ... return 'aborted'
197 205 >>> def unixonly(root, cwd, myname, expected='aborted'):
198 206 ... if pycompat.iswindows:
199 207 ... return expected
200 208 ... return check(root, cwd, myname)
201 209 >>> def winonly(root, cwd, myname, expected='aborted'):
202 210 ... if not pycompat.iswindows:
203 211 ... return expected
204 212 ... return check(root, cwd, myname)
205 213 >>> winonly(b'd:\\\\repo', b'c:\\\\dir', b'filename')
206 214 'aborted'
207 215 >>> winonly(b'c:\\\\repo', b'c:\\\\dir', b'filename')
208 216 'aborted'
209 217 >>> winonly(b'c:\\\\repo', b'c:\\\\', b'filename')
210 218 'aborted'
211 219 >>> winonly(b'c:\\\\repo', b'c:\\\\', b'repo\\\\filename',
212 220 ... b'filename')
213 221 'filename'
214 222 >>> winonly(b'c:\\\\repo', b'c:\\\\repo', b'filename', b'filename')
215 223 'filename'
216 224 >>> winonly(b'c:\\\\repo', b'c:\\\\repo\\\\subdir', b'filename',
217 225 ... b'subdir/filename')
218 226 'subdir/filename'
219 227 >>> unixonly(b'/repo', b'/dir', b'filename')
220 228 'aborted'
221 229 >>> unixonly(b'/repo', b'/', b'filename')
222 230 'aborted'
223 231 >>> unixonly(b'/repo', b'/', b'repo/filename', b'filename')
224 232 'filename'
225 233 >>> unixonly(b'/repo', b'/repo', b'filename', b'filename')
226 234 'filename'
227 235 >>> unixonly(b'/repo', b'/repo/subdir', b'filename', b'subdir/filename')
228 236 'subdir/filename'
229 237 """
230 238 if util.endswithsep(root):
231 239 rootsep = root
232 240 else:
233 241 rootsep = root + pycompat.ossep
234 242 name = myname
235 243 if not os.path.isabs(name):
236 244 name = os.path.join(root, cwd, name)
237 245 name = os.path.normpath(name)
238 246 if auditor is None:
239 247 auditor = pathauditor(root)
240 248 if name != rootsep and name.startswith(rootsep):
241 249 name = name[len(rootsep) :]
242 250 auditor(name)
243 251 return util.pconvert(name)
244 252 elif name == root:
245 253 return b''
246 254 else:
247 255 # Determine whether `name' is in the hierarchy at or beneath `root',
248 256 # by iterating name=dirname(name) until that causes no change (can't
249 257 # check name == '/', because that doesn't work on windows). The list
250 258 # `rel' holds the reversed list of components making up the relative
251 259 # file name we want.
252 260 rel = []
253 261 while True:
254 262 try:
255 263 s = util.samefile(name, root)
256 264 except OSError:
257 265 s = False
258 266 if s:
259 267 if not rel:
260 268 # name was actually the same as root (maybe a symlink)
261 269 return b''
262 270 rel.reverse()
263 271 name = os.path.join(*rel)
264 272 auditor(name)
265 273 return util.pconvert(name)
266 274 dirname, basename = util.split(name)
267 275 rel.append(basename)
268 276 if dirname == name:
269 277 break
270 278 name = dirname
271 279
272 280 # A common mistake is to use -R, but specify a file relative to the repo
273 281 # instead of cwd. Detect that case, and provide a hint to the user.
274 282 hint = None
275 283 try:
276 284 if cwd != root:
277 285 canonpath(root, root, myname, auditor)
278 286 relpath = util.pathto(root, cwd, b'')
279 287 if relpath.endswith(pycompat.ossep):
280 288 relpath = relpath[:-1]
281 289 hint = _(b"consider using '--cwd %s'") % relpath
282 290 except error.Abort:
283 291 pass
284 292
285 293 raise error.Abort(
286 294 _(b"%s not under root '%s'") % (myname, root), hint=hint
287 295 )
288 296
289 297
290 298 def normasprefix(path):
291 299 # type: (bytes) -> bytes
292 300 """normalize the specified path as path prefix
293 301
294 302 Returned value can be used safely for "p.startswith(prefix)",
295 303 "p[len(prefix):]", and so on.
296 304
297 305 For efficiency, this expects "path" argument to be already
298 306 normalized by "os.path.normpath", "os.path.realpath", and so on.
299 307
300 308 See also issue3033 for detail about need of this function.
301 309
302 310 >>> normasprefix(b'/foo/bar').replace(pycompat.ossep, b'/')
303 311 '/foo/bar/'
304 312 >>> normasprefix(b'/').replace(pycompat.ossep, b'/')
305 313 '/'
306 314 """
307 315 d, p = os.path.splitdrive(path)
308 316 if len(p) != len(pycompat.ossep):
309 317 return path + pycompat.ossep
310 318 else:
311 319 return path
312 320
313 321
314 322 def finddirs(path):
315 323 # type: (bytes) -> Iterator[bytes]
316 324 pos = path.rfind(b'/')
317 325 while pos != -1:
318 326 yield path[:pos]
319 327 pos = path.rfind(b'/', 0, pos)
320 328 yield b''
321 329
322 330
323 331 def finddirs_rev_noroot(path: bytes) -> Iterator[bytes]:
324 332 pos = path.find(pycompat.ossep)
325 333 while pos != -1:
326 334 yield path[:pos]
327 335 pos = path.find(pycompat.ossep, pos + 1)
328 336
329 337
330 338 class dirs:
331 339 '''a multiset of directory names from a set of file paths'''
332 340
333 341 def __init__(self, map, only_tracked=False):
334 342 """
335 343 a dict map indicates a dirstate while a list indicates a manifest
336 344 """
337 345 self._dirs = {}
338 346 addpath = self.addpath
339 347 if isinstance(map, dict) and only_tracked:
340 348 for f, s in map.items():
341 349 if s.state != b'r':
342 350 addpath(f)
343 351 elif only_tracked:
344 352 msg = b"`only_tracked` is only supported with a dict source"
345 353 raise error.ProgrammingError(msg)
346 354 else:
347 355 for f in map:
348 356 addpath(f)
349 357
350 358 def addpath(self, path):
351 359 # type: (bytes) -> None
352 360 dirs = self._dirs
353 361 for base in finddirs(path):
354 362 if base.endswith(b'/'):
355 363 raise ValueError(
356 364 "found invalid consecutive slashes in path: %r" % base
357 365 )
358 366 if base in dirs:
359 367 dirs[base] += 1
360 368 return
361 369 dirs[base] = 1
362 370
363 371 def delpath(self, path):
364 372 # type: (bytes) -> None
365 373 dirs = self._dirs
366 374 for base in finddirs(path):
367 375 if dirs[base] > 1:
368 376 dirs[base] -= 1
369 377 return
370 378 del dirs[base]
371 379
372 380 def __iter__(self):
373 381 return iter(self._dirs)
374 382
375 383 def __contains__(self, d):
376 384 # type: (bytes) -> bool
377 385 return d in self._dirs
378 386
379 387
380 388 if hasattr(parsers, 'dirs'):
381 389 dirs = parsers.dirs
382 390
383 391 if rustdirs is not None:
384 392 dirs = rustdirs
385 393
386 394
387 395 # forward two methods from posixpath that do what we need, but we'd
388 396 # rather not let our internals know that we're thinking in posix terms
389 397 # - instead we'll let them be oblivious.
390 398 join = posixpath.join
391 399 dirname = posixpath.dirname # type: Callable[[bytes], bytes]
@@ -1,979 +1,997 b''
1 1 """ Mercurial phases support code
2 2
3 3 ---
4 4
5 5 Copyright 2011 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
6 6 Logilab SA <contact@logilab.fr>
7 7 Augie Fackler <durin42@gmail.com>
8 8
9 9 This software may be used and distributed according to the terms
10 10 of the GNU General Public License version 2 or any later version.
11 11
12 12 ---
13 13
14 14 This module implements most phase logic in mercurial.
15 15
16 16
17 17 Basic Concept
18 18 =============
19 19
20 20 A 'changeset phase' is an indicator that tells us how a changeset is
21 21 manipulated and communicated. The details of each phase is described
22 22 below, here we describe the properties they have in common.
23 23
24 24 Like bookmarks, phases are not stored in history and thus are not
25 25 permanent and leave no audit trail.
26 26
27 27 First, no changeset can be in two phases at once. Phases are ordered,
28 28 so they can be considered from lowest to highest. The default, lowest
29 29 phase is 'public' - this is the normal phase of existing changesets. A
30 30 child changeset can not be in a lower phase than its parents.
31 31
32 32 These phases share a hierarchy of traits:
33 33
34 34 immutable shared
35 35 public: X X
36 36 draft: X
37 37 secret:
38 38
39 39 Local commits are draft by default.
40 40
41 41 Phase Movement and Exchange
42 42 ===========================
43 43
44 44 Phase data is exchanged by pushkey on pull and push. Some servers have
45 45 a publish option set, we call such a server a "publishing server".
46 46 Pushing a draft changeset to a publishing server changes the phase to
47 47 public.
48 48
49 49 A small list of fact/rules define the exchange of phase:
50 50
51 51 * old client never changes server states
52 52 * pull never changes server states
53 53 * publish and old server changesets are seen as public by client
54 54 * any secret changeset seen in another repository is lowered to at
55 55 least draft
56 56
57 57 Here is the final table summing up the 49 possible use cases of phase
58 58 exchange:
59 59
60 60 server
61 61 old publish non-publish
62 62 N X N D P N D P
63 63 old client
64 64 pull
65 65 N - X/X - X/D X/P - X/D X/P
66 66 X - X/X - X/D X/P - X/D X/P
67 67 push
68 68 X X/X X/X X/P X/P X/P X/D X/D X/P
69 69 new client
70 70 pull
71 71 N - P/X - P/D P/P - D/D P/P
72 72 D - P/X - P/D P/P - D/D P/P
73 73 P - P/X - P/D P/P - P/D P/P
74 74 push
75 75 D P/X P/X P/P P/P P/P D/D D/D P/P
76 76 P P/X P/X P/P P/P P/P P/P P/P P/P
77 77
78 78 Legend:
79 79
80 80 A/B = final state on client / state on server
81 81
82 82 * N = new/not present,
83 83 * P = public,
84 84 * D = draft,
85 85 * X = not tracked (i.e., the old client or server has no internal
86 86 way of recording the phase.)
87 87
88 88 passive = only pushes
89 89
90 90
91 91 A cell here can be read like this:
92 92
93 93 "When a new client pushes a draft changeset (D) to a publishing
94 94 server where it's not present (N), it's marked public on both
95 95 sides (P/P)."
96 96
97 97 Note: old client behave as a publishing server with draft only content
98 98 - other people see it as public
99 99 - content is pushed as draft
100 100
101 101 """
102 102
103 103
104 104 import struct
105 import typing
106
107 from typing import (
108 Any,
109 Callable,
110 Dict,
111 Iterable,
112 List,
113 Optional,
114 Set,
115 Tuple,
116 )
105 117
106 118 from .i18n import _
107 119 from .node import (
108 120 bin,
109 121 hex,
110 122 nullrev,
111 123 short,
112 124 wdirrev,
113 125 )
114 126 from . import (
115 127 error,
116 128 pycompat,
117 129 requirements,
118 130 smartset,
119 131 txnutil,
120 132 util,
121 133 )
122 134
123 if pycompat.TYPE_CHECKING:
124 from typing import (
125 Any,
126 Callable,
127 Dict,
128 Iterable,
129 List,
130 Optional,
131 Set,
132 Tuple,
133 )
135 # keeps pyflakes happy
136 assert [
137 Any,
138 Callable,
139 Dict,
140 Iterable,
141 List,
142 Optional,
143 Set,
144 Tuple,
145 ]
146
147 Phaseroots = Dict[int, Set[bytes]]
148
149 if typing.TYPE_CHECKING:
134 150 from . import (
135 151 localrepo,
136 152 ui as uimod,
137 153 )
138 154
139 Phaseroots = Dict[int, Set[bytes]]
155 # keeps pyflakes happy
156 assert [uimod]
157
140 158 Phasedefaults = List[
141 159 Callable[[localrepo.localrepository, Phaseroots], Phaseroots]
142 160 ]
143 161
144 162
145 163 _fphasesentry = struct.Struct(b'>i20s')
146 164
147 165 # record phase index
148 166 public, draft, secret = range(3) # type: int
149 167 archived = 32 # non-continuous for compatibility
150 168 internal = 96 # non-continuous for compatibility
151 169 allphases = (public, draft, secret, archived, internal)
152 170 trackedphases = (draft, secret, archived, internal)
153 171 not_public_phases = trackedphases
154 172 # record phase names
155 173 cmdphasenames = [b'public', b'draft', b'secret'] # known to `hg phase` command
156 174 phasenames = dict(enumerate(cmdphasenames))
157 175 phasenames[archived] = b'archived'
158 176 phasenames[internal] = b'internal'
159 177 # map phase name to phase number
160 178 phasenumber = {name: phase for phase, name in phasenames.items()}
161 179 # like phasenumber, but also include maps for the numeric and binary
162 180 # phase number to the phase number
163 181 phasenumber2 = phasenumber.copy()
164 182 phasenumber2.update({phase: phase for phase in phasenames})
165 183 phasenumber2.update({b'%i' % phase: phase for phase in phasenames})
166 184 # record phase property
167 185 mutablephases = (draft, secret, archived, internal)
168 186 relevant_mutable_phases = (draft, secret) # could be obsolete or unstable
169 187 remotehiddenphases = (secret, archived, internal)
170 188 localhiddenphases = (internal, archived)
171 189
172 190 all_internal_phases = tuple(p for p in allphases if p & internal)
173 191 # We do not want any internal content to exit the repository, ever.
174 192 no_bundle_phases = all_internal_phases
175 193
176 194
177 195 def supportinternal(repo):
178 196 # type: (localrepo.localrepository) -> bool
179 197 """True if the internal phase can be used on a repository"""
180 198 return requirements.INTERNAL_PHASE_REQUIREMENT in repo.requirements
181 199
182 200
183 201 def supportarchived(repo):
184 202 # type: (localrepo.localrepository) -> bool
185 203 """True if the archived phase can be used on a repository"""
186 204 return requirements.ARCHIVED_PHASE_REQUIREMENT in repo.requirements
187 205
188 206
189 207 def _readroots(repo, phasedefaults=None):
190 208 # type: (localrepo.localrepository, Optional[Phasedefaults]) -> Tuple[Phaseroots, bool]
191 209 """Read phase roots from disk
192 210
193 211 phasedefaults is a list of fn(repo, roots) callable, which are
194 212 executed if the phase roots file does not exist. When phases are
195 213 being initialized on an existing repository, this could be used to
196 214 set selected changesets phase to something else than public.
197 215
198 216 Return (roots, dirty) where dirty is true if roots differ from
199 217 what is being stored.
200 218 """
201 219 repo = repo.unfiltered()
202 220 dirty = False
203 221 roots = {i: set() for i in allphases}
204 222 try:
205 223 f, pending = txnutil.trypending(repo.root, repo.svfs, b'phaseroots')
206 224 try:
207 225 for line in f:
208 226 phase, nh = line.split()
209 227 roots[int(phase)].add(bin(nh))
210 228 finally:
211 229 f.close()
212 230 except FileNotFoundError:
213 231 if phasedefaults:
214 232 for f in phasedefaults:
215 233 roots = f(repo, roots)
216 234 dirty = True
217 235 return roots, dirty
218 236
219 237
220 238 def binaryencode(phasemapping):
221 239 # type: (Dict[int, List[bytes]]) -> bytes
222 240 """encode a 'phase -> nodes' mapping into a binary stream
223 241
224 242 The revision lists are encoded as (phase, root) pairs.
225 243 """
226 244 binarydata = []
227 245 for phase, nodes in phasemapping.items():
228 246 for head in nodes:
229 247 binarydata.append(_fphasesentry.pack(phase, head))
230 248 return b''.join(binarydata)
231 249
232 250
233 251 def binarydecode(stream):
234 252 # type: (...) -> Dict[int, List[bytes]]
235 253 """decode a binary stream into a 'phase -> nodes' mapping
236 254
237 255 The (phase, root) pairs are turned back into a dictionary with
238 256 the phase as index and the aggregated roots of that phase as value."""
239 257 headsbyphase = {i: [] for i in allphases}
240 258 entrysize = _fphasesentry.size
241 259 while True:
242 260 entry = stream.read(entrysize)
243 261 if len(entry) < entrysize:
244 262 if entry:
245 263 raise error.Abort(_(b'bad phase-heads stream'))
246 264 break
247 265 phase, node = _fphasesentry.unpack(entry)
248 266 headsbyphase[phase].append(node)
249 267 return headsbyphase
250 268
251 269
252 270 def _sortedrange_insert(data, idx, rev, t):
253 271 merge_before = False
254 272 if idx:
255 273 r1, t1 = data[idx - 1]
256 274 merge_before = r1[-1] + 1 == rev and t1 == t
257 275 merge_after = False
258 276 if idx < len(data):
259 277 r2, t2 = data[idx]
260 278 merge_after = r2[0] == rev + 1 and t2 == t
261 279
262 280 if merge_before and merge_after:
263 281 data[idx - 1] = (range(r1[0], r2[-1] + 1), t)
264 282 data.pop(idx)
265 283 elif merge_before:
266 284 data[idx - 1] = (range(r1[0], rev + 1), t)
267 285 elif merge_after:
268 286 data[idx] = (range(rev, r2[-1] + 1), t)
269 287 else:
270 288 data.insert(idx, (range(rev, rev + 1), t))
271 289
272 290
273 291 def _sortedrange_split(data, idx, rev, t):
274 292 r1, t1 = data[idx]
275 293 if t == t1:
276 294 return
277 295 t = (t1[0], t[1])
278 296 if len(r1) == 1:
279 297 data.pop(idx)
280 298 _sortedrange_insert(data, idx, rev, t)
281 299 elif r1[0] == rev:
282 300 data[idx] = (range(rev + 1, r1[-1] + 1), t1)
283 301 _sortedrange_insert(data, idx, rev, t)
284 302 elif r1[-1] == rev:
285 303 data[idx] = (range(r1[0], rev), t1)
286 304 _sortedrange_insert(data, idx + 1, rev, t)
287 305 else:
288 306 data[idx : idx + 1] = [
289 307 (range(r1[0], rev), t1),
290 308 (range(rev, rev + 1), t),
291 309 (range(rev + 1, r1[-1] + 1), t1),
292 310 ]
293 311
294 312
295 313 def _trackphasechange(data, rev, old, new):
296 314 """add a phase move to the <data> list of ranges
297 315
298 316 If data is None, nothing happens.
299 317 """
300 318 if data is None:
301 319 return
302 320
303 321 # If data is empty, create a one-revision range and done
304 322 if not data:
305 323 data.insert(0, (range(rev, rev + 1), (old, new)))
306 324 return
307 325
308 326 low = 0
309 327 high = len(data)
310 328 t = (old, new)
311 329 while low < high:
312 330 mid = (low + high) // 2
313 331 revs = data[mid][0]
314 332 revs_low = revs[0]
315 333 revs_high = revs[-1]
316 334
317 335 if rev >= revs_low and rev <= revs_high:
318 336 _sortedrange_split(data, mid, rev, t)
319 337 return
320 338
321 339 if revs_low == rev + 1:
322 340 if mid and data[mid - 1][0][-1] == rev:
323 341 _sortedrange_split(data, mid - 1, rev, t)
324 342 else:
325 343 _sortedrange_insert(data, mid, rev, t)
326 344 return
327 345
328 346 if revs_high == rev - 1:
329 347 if mid + 1 < len(data) and data[mid + 1][0][0] == rev:
330 348 _sortedrange_split(data, mid + 1, rev, t)
331 349 else:
332 350 _sortedrange_insert(data, mid + 1, rev, t)
333 351 return
334 352
335 353 if revs_low > rev:
336 354 high = mid
337 355 else:
338 356 low = mid + 1
339 357
340 358 if low == len(data):
341 359 data.append((range(rev, rev + 1), t))
342 360 return
343 361
344 362 r1, t1 = data[low]
345 363 if r1[0] > rev:
346 364 data.insert(low, (range(rev, rev + 1), t))
347 365 else:
348 366 data.insert(low + 1, (range(rev, rev + 1), t))
349 367
350 368
351 369 class phasecache:
352 370 def __init__(self, repo, phasedefaults, _load=True):
353 371 # type: (localrepo.localrepository, Optional[Phasedefaults], bool) -> None
354 372 if _load:
355 373 # Cheap trick to allow shallow-copy without copy module
356 374 self.phaseroots, self.dirty = _readroots(repo, phasedefaults)
357 375 self._loadedrevslen = 0
358 376 self._phasesets = None
359 377 self.filterunknown(repo)
360 378 self.opener = repo.svfs
361 379
362 380 def hasnonpublicphases(self, repo):
363 381 # type: (localrepo.localrepository) -> bool
364 382 """detect if there are revisions with non-public phase"""
365 383 repo = repo.unfiltered()
366 384 cl = repo.changelog
367 385 if len(cl) >= self._loadedrevslen:
368 386 self.invalidate()
369 387 self.loadphaserevs(repo)
370 388 return any(
371 389 revs for phase, revs in self.phaseroots.items() if phase != public
372 390 )
373 391
374 392 def nonpublicphaseroots(self, repo):
375 393 # type: (localrepo.localrepository) -> Set[bytes]
376 394 """returns the roots of all non-public phases
377 395
378 396 The roots are not minimized, so if the secret revisions are
379 397 descendants of draft revisions, their roots will still be present.
380 398 """
381 399 repo = repo.unfiltered()
382 400 cl = repo.changelog
383 401 if len(cl) >= self._loadedrevslen:
384 402 self.invalidate()
385 403 self.loadphaserevs(repo)
386 404 return set().union(
387 405 *[
388 406 revs
389 407 for phase, revs in self.phaseroots.items()
390 408 if phase != public
391 409 ]
392 410 )
393 411
394 412 def getrevset(self, repo, phases, subset=None):
395 413 # type: (localrepo.localrepository, Iterable[int], Optional[Any]) -> Any
396 414 # TODO: finish typing this
397 415 """return a smartset for the given phases"""
398 416 self.loadphaserevs(repo) # ensure phase's sets are loaded
399 417 phases = set(phases)
400 418 publicphase = public in phases
401 419
402 420 if publicphase:
403 421 # In this case, phases keeps all the *other* phases.
404 422 phases = set(allphases).difference(phases)
405 423 if not phases:
406 424 return smartset.fullreposet(repo)
407 425
408 426 # fast path: _phasesets contains the interesting sets,
409 427 # might only need a union and post-filtering.
410 428 revsneedscopy = False
411 429 if len(phases) == 1:
412 430 [p] = phases
413 431 revs = self._phasesets[p]
414 432 revsneedscopy = True # Don't modify _phasesets
415 433 else:
416 434 # revs has the revisions in all *other* phases.
417 435 revs = set.union(*[self._phasesets[p] for p in phases])
418 436
419 437 def _addwdir(wdirsubset, wdirrevs):
420 438 if wdirrev in wdirsubset and repo[None].phase() in phases:
421 439 if revsneedscopy:
422 440 wdirrevs = wdirrevs.copy()
423 441 # The working dir would never be in the # cache, but it was in
424 442 # the subset being filtered for its phase (or filtered out,
425 443 # depending on publicphase), so add it to the output to be
426 444 # included (or filtered out).
427 445 wdirrevs.add(wdirrev)
428 446 return wdirrevs
429 447
430 448 if not publicphase:
431 449 if repo.changelog.filteredrevs:
432 450 revs = revs - repo.changelog.filteredrevs
433 451
434 452 if subset is None:
435 453 return smartset.baseset(revs)
436 454 else:
437 455 revs = _addwdir(subset, revs)
438 456 return subset & smartset.baseset(revs)
439 457 else:
440 458 if subset is None:
441 459 subset = smartset.fullreposet(repo)
442 460
443 461 revs = _addwdir(subset, revs)
444 462
445 463 if not revs:
446 464 return subset
447 465 return subset.filter(lambda r: r not in revs)
448 466
449 467 def copy(self):
450 468 # Shallow copy meant to ensure isolation in
451 469 # advance/retractboundary(), nothing more.
452 470 ph = self.__class__(None, None, _load=False)
453 471 ph.phaseroots = self.phaseroots.copy()
454 472 ph.dirty = self.dirty
455 473 ph.opener = self.opener
456 474 ph._loadedrevslen = self._loadedrevslen
457 475 ph._phasesets = self._phasesets
458 476 return ph
459 477
460 478 def replace(self, phcache):
461 479 """replace all values in 'self' with content of phcache"""
462 480 for a in (
463 481 'phaseroots',
464 482 'dirty',
465 483 'opener',
466 484 '_loadedrevslen',
467 485 '_phasesets',
468 486 ):
469 487 setattr(self, a, getattr(phcache, a))
470 488
471 489 def _getphaserevsnative(self, repo):
472 490 repo = repo.unfiltered()
473 491 return repo.changelog.computephases(self.phaseroots)
474 492
475 493 def _computephaserevspure(self, repo):
476 494 repo = repo.unfiltered()
477 495 cl = repo.changelog
478 496 self._phasesets = {phase: set() for phase in allphases}
479 497 lowerroots = set()
480 498 for phase in reversed(trackedphases):
481 499 roots = pycompat.maplist(cl.rev, self.phaseroots[phase])
482 500 if roots:
483 501 ps = set(cl.descendants(roots))
484 502 for root in roots:
485 503 ps.add(root)
486 504 ps.difference_update(lowerroots)
487 505 lowerroots.update(ps)
488 506 self._phasesets[phase] = ps
489 507 self._loadedrevslen = len(cl)
490 508
491 509 def loadphaserevs(self, repo):
492 510 # type: (localrepo.localrepository) -> None
493 511 """ensure phase information is loaded in the object"""
494 512 if self._phasesets is None:
495 513 try:
496 514 res = self._getphaserevsnative(repo)
497 515 self._loadedrevslen, self._phasesets = res
498 516 except AttributeError:
499 517 self._computephaserevspure(repo)
500 518
501 519 def invalidate(self):
502 520 self._loadedrevslen = 0
503 521 self._phasesets = None
504 522
505 523 def phase(self, repo, rev):
506 524 # type: (localrepo.localrepository, int) -> int
507 525 # We need a repo argument here to be able to build _phasesets
508 526 # if necessary. The repository instance is not stored in
509 527 # phasecache to avoid reference cycles. The changelog instance
510 528 # is not stored because it is a filecache() property and can
511 529 # be replaced without us being notified.
512 530 if rev == nullrev:
513 531 return public
514 532 if rev < nullrev:
515 533 raise ValueError(_(b'cannot lookup negative revision'))
516 534 if rev >= self._loadedrevslen:
517 535 self.invalidate()
518 536 self.loadphaserevs(repo)
519 537 for phase in trackedphases:
520 538 if rev in self._phasesets[phase]:
521 539 return phase
522 540 return public
523 541
524 542 def write(self):
525 543 if not self.dirty:
526 544 return
527 545 f = self.opener(b'phaseroots', b'w', atomictemp=True, checkambig=True)
528 546 try:
529 547 self._write(f)
530 548 finally:
531 549 f.close()
532 550
533 551 def _write(self, fp):
534 552 for phase, roots in self.phaseroots.items():
535 553 for h in sorted(roots):
536 554 fp.write(b'%i %s\n' % (phase, hex(h)))
537 555 self.dirty = False
538 556
539 557 def _updateroots(self, phase, newroots, tr):
540 558 self.phaseroots[phase] = newroots
541 559 self.invalidate()
542 560 self.dirty = True
543 561
544 562 tr.addfilegenerator(b'phase', (b'phaseroots',), self._write)
545 563 tr.hookargs[b'phases_moved'] = b'1'
546 564
547 565 def registernew(self, repo, tr, targetphase, revs):
548 566 repo = repo.unfiltered()
549 567 self._retractboundary(repo, tr, targetphase, [], revs=revs)
550 568 if tr is not None and b'phases' in tr.changes:
551 569 phasetracking = tr.changes[b'phases']
552 570 phase = self.phase
553 571 for rev in sorted(revs):
554 572 revphase = phase(repo, rev)
555 573 _trackphasechange(phasetracking, rev, None, revphase)
556 574 repo.invalidatevolatilesets()
557 575
558 576 def advanceboundary(
559 577 self, repo, tr, targetphase, nodes, revs=None, dryrun=None
560 578 ):
561 579 """Set all 'nodes' to phase 'targetphase'
562 580
563 581 Nodes with a phase lower than 'targetphase' are not affected.
564 582
565 583 If dryrun is True, no actions will be performed
566 584
567 585 Returns a set of revs whose phase is changed or should be changed
568 586 """
569 587 # Be careful to preserve shallow-copied values: do not update
570 588 # phaseroots values, replace them.
571 589 if revs is None:
572 590 revs = []
573 591 if tr is None:
574 592 phasetracking = None
575 593 else:
576 594 phasetracking = tr.changes.get(b'phases')
577 595
578 596 repo = repo.unfiltered()
579 597 revs = [repo[n].rev() for n in nodes] + [r for r in revs]
580 598
581 599 changes = set() # set of revisions to be changed
582 600 delroots = [] # set of root deleted by this path
583 601 for phase in (phase for phase in allphases if phase > targetphase):
584 602 # filter nodes that are not in a compatible phase already
585 603 revs = [rev for rev in revs if self.phase(repo, rev) >= phase]
586 604 if not revs:
587 605 break # no roots to move anymore
588 606
589 607 olds = self.phaseroots[phase]
590 608
591 609 affected = repo.revs(b'%ln::%ld', olds, revs)
592 610 changes.update(affected)
593 611 if dryrun:
594 612 continue
595 613 for r in affected:
596 614 _trackphasechange(
597 615 phasetracking, r, self.phase(repo, r), targetphase
598 616 )
599 617
600 618 roots = {
601 619 ctx.node()
602 620 for ctx in repo.set(b'roots((%ln::) - %ld)', olds, affected)
603 621 }
604 622 if olds != roots:
605 623 self._updateroots(phase, roots, tr)
606 624 # some roots may need to be declared for lower phases
607 625 delroots.extend(olds - roots)
608 626 if not dryrun:
609 627 # declare deleted root in the target phase
610 628 if targetphase != 0:
611 629 self._retractboundary(repo, tr, targetphase, delroots)
612 630 repo.invalidatevolatilesets()
613 631 return changes
614 632
615 633 def retractboundary(self, repo, tr, targetphase, nodes):
616 634 oldroots = {
617 635 phase: revs
618 636 for phase, revs in self.phaseroots.items()
619 637 if phase <= targetphase
620 638 }
621 639 if tr is None:
622 640 phasetracking = None
623 641 else:
624 642 phasetracking = tr.changes.get(b'phases')
625 643 repo = repo.unfiltered()
626 644 if (
627 645 self._retractboundary(repo, tr, targetphase, nodes)
628 646 and phasetracking is not None
629 647 ):
630 648
631 649 # find the affected revisions
632 650 new = self.phaseroots[targetphase]
633 651 old = oldroots[targetphase]
634 652 affected = set(repo.revs(b'(%ln::) - (%ln::)', new, old))
635 653
636 654 # find the phase of the affected revision
637 655 for phase in range(targetphase, -1, -1):
638 656 if phase:
639 657 roots = oldroots.get(phase, [])
640 658 revs = set(repo.revs(b'%ln::%ld', roots, affected))
641 659 affected -= revs
642 660 else: # public phase
643 661 revs = affected
644 662 for r in sorted(revs):
645 663 _trackphasechange(phasetracking, r, phase, targetphase)
646 664 repo.invalidatevolatilesets()
647 665
648 666 def _retractboundary(self, repo, tr, targetphase, nodes, revs=None):
649 667 # Be careful to preserve shallow-copied values: do not update
650 668 # phaseroots values, replace them.
651 669 if revs is None:
652 670 revs = []
653 671 if (
654 672 targetphase == internal
655 673 and not supportinternal(repo)
656 674 or targetphase == archived
657 675 and not supportarchived(repo)
658 676 ):
659 677 name = phasenames[targetphase]
660 678 msg = b'this repository does not support the %s phase' % name
661 679 raise error.ProgrammingError(msg)
662 680
663 681 repo = repo.unfiltered()
664 682 torev = repo.changelog.rev
665 683 tonode = repo.changelog.node
666 684 currentroots = {torev(node) for node in self.phaseroots[targetphase]}
667 685 finalroots = oldroots = set(currentroots)
668 686 newroots = [torev(node) for node in nodes] + [r for r in revs]
669 687 newroots = [
670 688 rev for rev in newroots if self.phase(repo, rev) < targetphase
671 689 ]
672 690
673 691 if newroots:
674 692 if nullrev in newroots:
675 693 raise error.Abort(_(b'cannot change null revision phase'))
676 694 currentroots.update(newroots)
677 695
678 696 # Only compute new roots for revs above the roots that are being
679 697 # retracted.
680 698 minnewroot = min(newroots)
681 699 aboveroots = [rev for rev in currentroots if rev >= minnewroot]
682 700 updatedroots = repo.revs(b'roots(%ld::)', aboveroots)
683 701
684 702 finalroots = {rev for rev in currentroots if rev < minnewroot}
685 703 finalroots.update(updatedroots)
686 704 if finalroots != oldroots:
687 705 self._updateroots(
688 706 targetphase, {tonode(rev) for rev in finalroots}, tr
689 707 )
690 708 return True
691 709 return False
692 710
693 711 def filterunknown(self, repo):
694 712 # type: (localrepo.localrepository) -> None
695 713 """remove unknown nodes from the phase boundary
696 714
697 715 Nothing is lost as unknown nodes only hold data for their descendants.
698 716 """
699 717 filtered = False
700 718 has_node = repo.changelog.index.has_node # to filter unknown nodes
701 719 for phase, nodes in self.phaseroots.items():
702 720 missing = sorted(node for node in nodes if not has_node(node))
703 721 if missing:
704 722 for mnode in missing:
705 723 repo.ui.debug(
706 724 b'removing unknown node %s from %i-phase boundary\n'
707 725 % (short(mnode), phase)
708 726 )
709 727 nodes.symmetric_difference_update(missing)
710 728 filtered = True
711 729 if filtered:
712 730 self.dirty = True
713 731 # filterunknown is called by repo.destroyed, we may have no changes in
714 732 # root but _phasesets contents is certainly invalid (or at least we
715 733 # have not proper way to check that). related to issue 3858.
716 734 #
717 735 # The other caller is __init__ that have no _phasesets initialized
718 736 # anyway. If this change we should consider adding a dedicated
719 737 # "destroyed" function to phasecache or a proper cache key mechanism
720 738 # (see branchmap one)
721 739 self.invalidate()
722 740
723 741
724 742 def advanceboundary(repo, tr, targetphase, nodes, revs=None, dryrun=None):
725 743 """Add nodes to a phase changing other nodes phases if necessary.
726 744
727 745 This function move boundary *forward* this means that all nodes
728 746 are set in the target phase or kept in a *lower* phase.
729 747
730 748 Simplify boundary to contains phase roots only.
731 749
732 750 If dryrun is True, no actions will be performed
733 751
734 752 Returns a set of revs whose phase is changed or should be changed
735 753 """
736 754 if revs is None:
737 755 revs = []
738 756 phcache = repo._phasecache.copy()
739 757 changes = phcache.advanceboundary(
740 758 repo, tr, targetphase, nodes, revs=revs, dryrun=dryrun
741 759 )
742 760 if not dryrun:
743 761 repo._phasecache.replace(phcache)
744 762 return changes
745 763
746 764
747 765 def retractboundary(repo, tr, targetphase, nodes):
748 766 """Set nodes back to a phase changing other nodes phases if
749 767 necessary.
750 768
751 769 This function move boundary *backward* this means that all nodes
752 770 are set in the target phase or kept in a *higher* phase.
753 771
754 772 Simplify boundary to contains phase roots only."""
755 773 phcache = repo._phasecache.copy()
756 774 phcache.retractboundary(repo, tr, targetphase, nodes)
757 775 repo._phasecache.replace(phcache)
758 776
759 777
760 778 def registernew(repo, tr, targetphase, revs):
761 779 """register a new revision and its phase
762 780
763 781 Code adding revisions to the repository should use this function to
764 782 set new changeset in their target phase (or higher).
765 783 """
766 784 phcache = repo._phasecache.copy()
767 785 phcache.registernew(repo, tr, targetphase, revs)
768 786 repo._phasecache.replace(phcache)
769 787
770 788
771 789 def listphases(repo):
772 790 # type: (localrepo.localrepository) -> Dict[bytes, bytes]
773 791 """List phases root for serialization over pushkey"""
774 792 # Use ordered dictionary so behavior is deterministic.
775 793 keys = util.sortdict()
776 794 value = b'%i' % draft
777 795 cl = repo.unfiltered().changelog
778 796 for root in repo._phasecache.phaseroots[draft]:
779 797 if repo._phasecache.phase(repo, cl.rev(root)) <= draft:
780 798 keys[hex(root)] = value
781 799
782 800 if repo.publishing():
783 801 # Add an extra data to let remote know we are a publishing
784 802 # repo. Publishing repo can't just pretend they are old repo.
785 803 # When pushing to a publishing repo, the client still need to
786 804 # push phase boundary
787 805 #
788 806 # Push do not only push changeset. It also push phase data.
789 807 # New phase data may apply to common changeset which won't be
790 808 # push (as they are common). Here is a very simple example:
791 809 #
792 810 # 1) repo A push changeset X as draft to repo B
793 811 # 2) repo B make changeset X public
794 812 # 3) repo B push to repo A. X is not pushed but the data that
795 813 # X as now public should
796 814 #
797 815 # The server can't handle it on it's own as it has no idea of
798 816 # client phase data.
799 817 keys[b'publishing'] = b'True'
800 818 return keys
801 819
802 820
803 821 def pushphase(repo, nhex, oldphasestr, newphasestr):
804 822 # type: (localrepo.localrepository, bytes, bytes, bytes) -> bool
805 823 """List phases root for serialization over pushkey"""
806 824 repo = repo.unfiltered()
807 825 with repo.lock():
808 826 currentphase = repo[nhex].phase()
809 827 newphase = abs(int(newphasestr)) # let's avoid negative index surprise
810 828 oldphase = abs(int(oldphasestr)) # let's avoid negative index surprise
811 829 if currentphase == oldphase and newphase < oldphase:
812 830 with repo.transaction(b'pushkey-phase') as tr:
813 831 advanceboundary(repo, tr, newphase, [bin(nhex)])
814 832 return True
815 833 elif currentphase == newphase:
816 834 # raced, but got correct result
817 835 return True
818 836 else:
819 837 return False
820 838
821 839
822 840 def subsetphaseheads(repo, subset):
823 841 """Finds the phase heads for a subset of a history
824 842
825 843 Returns a list indexed by phase number where each item is a list of phase
826 844 head nodes.
827 845 """
828 846 cl = repo.changelog
829 847
830 848 headsbyphase = {i: [] for i in allphases}
831 849 for phase in allphases:
832 850 revset = b"heads(%%ln & _phase(%d))" % phase
833 851 headsbyphase[phase] = [cl.node(r) for r in repo.revs(revset, subset)]
834 852 return headsbyphase
835 853
836 854
837 855 def updatephases(repo, trgetter, headsbyphase):
838 856 """Updates the repo with the given phase heads"""
839 857 # Now advance phase boundaries of all phases
840 858 #
841 859 # run the update (and fetch transaction) only if there are actually things
842 860 # to update. This avoid creating empty transaction during no-op operation.
843 861
844 862 for phase in allphases:
845 863 revset = b'%ln - _phase(%s)'
846 864 heads = [c.node() for c in repo.set(revset, headsbyphase[phase], phase)]
847 865 if heads:
848 866 advanceboundary(repo, trgetter(), phase, heads)
849 867
850 868
851 869 def analyzeremotephases(repo, subset, roots):
852 870 """Compute phases heads and root in a subset of node from root dict
853 871
854 872 * subset is heads of the subset
855 873 * roots is {<nodeid> => phase} mapping. key and value are string.
856 874
857 875 Accept unknown element input
858 876 """
859 877 repo = repo.unfiltered()
860 878 # build list from dictionary
861 879 draftroots = []
862 880 has_node = repo.changelog.index.has_node # to filter unknown nodes
863 881 for nhex, phase in roots.items():
864 882 if nhex == b'publishing': # ignore data related to publish option
865 883 continue
866 884 node = bin(nhex)
867 885 phase = int(phase)
868 886 if phase == public:
869 887 if node != repo.nullid:
870 888 repo.ui.warn(
871 889 _(
872 890 b'ignoring inconsistent public root'
873 891 b' from remote: %s\n'
874 892 )
875 893 % nhex
876 894 )
877 895 elif phase == draft:
878 896 if has_node(node):
879 897 draftroots.append(node)
880 898 else:
881 899 repo.ui.warn(
882 900 _(b'ignoring unexpected root from remote: %i %s\n')
883 901 % (phase, nhex)
884 902 )
885 903 # compute heads
886 904 publicheads = newheads(repo, subset, draftroots)
887 905 return publicheads, draftroots
888 906
889 907
890 908 class remotephasessummary:
891 909 """summarize phase information on the remote side
892 910
893 911 :publishing: True is the remote is publishing
894 912 :publicheads: list of remote public phase heads (nodes)
895 913 :draftheads: list of remote draft phase heads (nodes)
896 914 :draftroots: list of remote draft phase root (nodes)
897 915 """
898 916
899 917 def __init__(self, repo, remotesubset, remoteroots):
900 918 unfi = repo.unfiltered()
901 919 self._allremoteroots = remoteroots
902 920
903 921 self.publishing = remoteroots.get(b'publishing', False)
904 922
905 923 ana = analyzeremotephases(repo, remotesubset, remoteroots)
906 924 self.publicheads, self.draftroots = ana
907 925 # Get the list of all "heads" revs draft on remote
908 926 dheads = unfi.set(b'heads(%ln::%ln)', self.draftroots, remotesubset)
909 927 self.draftheads = [c.node() for c in dheads]
910 928
911 929
912 930 def newheads(repo, heads, roots):
913 931 """compute new head of a subset minus another
914 932
915 933 * `heads`: define the first subset
916 934 * `roots`: define the second we subtract from the first"""
917 935 # prevent an import cycle
918 936 # phases > dagop > patch > copies > scmutil > obsolete > obsutil > phases
919 937 from . import dagop
920 938
921 939 repo = repo.unfiltered()
922 940 cl = repo.changelog
923 941 rev = cl.index.get_rev
924 942 if not roots:
925 943 return heads
926 944 if not heads or heads == [repo.nullid]:
927 945 return []
928 946 # The logic operated on revisions, convert arguments early for convenience
929 947 new_heads = {rev(n) for n in heads if n != repo.nullid}
930 948 roots = [rev(n) for n in roots]
931 949 # compute the area we need to remove
932 950 affected_zone = repo.revs(b"(%ld::%ld)", roots, new_heads)
933 951 # heads in the area are no longer heads
934 952 new_heads.difference_update(affected_zone)
935 953 # revisions in the area have children outside of it,
936 954 # They might be new heads
937 955 candidates = repo.revs(
938 956 b"parents(%ld + (%ld and merge())) and not null", roots, affected_zone
939 957 )
940 958 candidates -= affected_zone
941 959 if new_heads or candidates:
942 960 # remove candidate that are ancestors of other heads
943 961 new_heads.update(candidates)
944 962 prunestart = repo.revs(b"parents(%ld) and not null", new_heads)
945 963 pruned = dagop.reachableroots(repo, candidates, prunestart)
946 964 new_heads.difference_update(pruned)
947 965
948 966 return pycompat.maplist(cl.node, sorted(new_heads))
949 967
950 968
951 969 def newcommitphase(ui):
952 970 # type: (uimod.ui) -> int
953 971 """helper to get the target phase of new commit
954 972
955 973 Handle all possible values for the phases.new-commit options.
956 974
957 975 """
958 976 v = ui.config(b'phases', b'new-commit')
959 977 try:
960 978 return phasenumber2[v]
961 979 except KeyError:
962 980 raise error.ConfigError(
963 981 _(b"phases.new-commit: not a valid phase name ('%s')") % v
964 982 )
965 983
966 984
967 985 def hassecret(repo):
968 986 # type: (localrepo.localrepository) -> bool
969 987 """utility function that check if a repo have any secret changeset."""
970 988 return bool(repo._phasecache.phaseroots[secret])
971 989
972 990
973 991 def preparehookargs(node, old, new):
974 992 # type: (bytes, Optional[int], Optional[int]) -> Dict[bytes, bytes]
975 993 if old is None:
976 994 old = b''
977 995 else:
978 996 old = phasenames[old]
979 997 return {b'node': node, b'oldphase': old, b'phase': phasenames[new]}
@@ -1,816 +1,809 b''
1 1 # posix.py - Posix utility function implementations for Mercurial
2 2 #
3 3 # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import errno
10 10 import fcntl
11 11 import getpass
12 12 import grp
13 13 import os
14 14 import pwd
15 15 import re
16 16 import select
17 17 import stat
18 18 import sys
19 19 import tempfile
20 20 import typing
21 21 import unicodedata
22 22
23 23 from typing import (
24 24 Any,
25 25 AnyStr,
26 26 Iterable,
27 27 Iterator,
28 28 List,
29 29 Match,
30 30 NoReturn,
31 31 Optional,
32 32 Sequence,
33 33 Tuple,
34 34 Union,
35 35 )
36 36
37 37 from .i18n import _
38 38 from .pycompat import (
39 39 open,
40 40 )
41 41 from . import (
42 42 encoding,
43 43 error,
44 44 policy,
45 45 pycompat,
46 46 )
47 47
48 48 osutil = policy.importmod('osutil')
49 49
50 50 normpath = os.path.normpath
51 51 samestat = os.path.samestat
52 52 abspath = os.path.abspath # re-exports
53 53
54 54 try:
55 55 oslink = os.link
56 56 except AttributeError:
57 57 # Some platforms build Python without os.link on systems that are
58 58 # vaguely unix-like but don't have hardlink support. For those
59 59 # poor souls, just say we tried and that it failed so we fall back
60 60 # to copies.
61 61 def oslink(src: bytes, dst: bytes) -> NoReturn:
62 62 raise OSError(
63 63 errno.EINVAL, b'hardlinks not supported: %s to %s' % (src, dst)
64 64 )
65 65
66 66
67 67 readlink = os.readlink
68 68 unlink = os.unlink
69 69 rename = os.rename
70 70 removedirs = os.removedirs
71 71
72 72 if typing.TYPE_CHECKING:
73 # Replace the various overloads that come along with aliasing stdlib methods
74 # with the narrow definition that we care about in the type checking phase
75 # only. This ensures that both Windows and POSIX see only the definition
76 # that is actually available.
77 #
78 # Note that if we check pycompat.TYPE_CHECKING here, it is always False, and
79 # the methods aren't replaced.
80 73
81 74 def normpath(path: bytes) -> bytes:
82 75 raise NotImplementedError
83 76
84 77 def abspath(path: AnyStr) -> AnyStr:
85 78 raise NotImplementedError
86 79
87 80 def oslink(src: bytes, dst: bytes) -> None:
88 81 raise NotImplementedError
89 82
90 83 def readlink(path: bytes) -> bytes:
91 84 raise NotImplementedError
92 85
93 86 def unlink(path: bytes) -> None:
94 87 raise NotImplementedError
95 88
96 89 def rename(src: bytes, dst: bytes) -> None:
97 90 raise NotImplementedError
98 91
99 92 def removedirs(name: bytes) -> None:
100 93 raise NotImplementedError
101 94
102 95
103 96 expandglobs: bool = False
104 97
105 98 umask: int = os.umask(0)
106 99 os.umask(umask)
107 100
108 101 posixfile = open
109 102
110 103
111 104 def split(p: bytes) -> Tuple[bytes, bytes]:
112 105 """Same as posixpath.split, but faster
113 106
114 107 >>> import posixpath
115 108 >>> for f in [b'/absolute/path/to/file',
116 109 ... b'relative/path/to/file',
117 110 ... b'file_alone',
118 111 ... b'path/to/directory/',
119 112 ... b'/multiple/path//separators',
120 113 ... b'/file_at_root',
121 114 ... b'///multiple_leading_separators_at_root',
122 115 ... b'']:
123 116 ... assert split(f) == posixpath.split(f), f
124 117 """
125 118 ht = p.rsplit(b'/', 1)
126 119 if len(ht) == 1:
127 120 return b'', p
128 121 nh = ht[0].rstrip(b'/')
129 122 if nh:
130 123 return nh, ht[1]
131 124 return ht[0] + b'/', ht[1]
132 125
133 126
134 127 def openhardlinks() -> bool:
135 128 '''return true if it is safe to hold open file handles to hardlinks'''
136 129 return True
137 130
138 131
139 132 def nlinks(name: bytes) -> int:
140 133 '''return number of hardlinks for the given file'''
141 134 return os.lstat(name).st_nlink
142 135
143 136
144 137 def parsepatchoutput(output_line: bytes) -> bytes:
145 138 """parses the output produced by patch and returns the filename"""
146 139 pf = output_line[14:]
147 140 if pycompat.sysplatform == b'OpenVMS':
148 141 if pf[0] == b'`':
149 142 pf = pf[1:-1] # Remove the quotes
150 143 else:
151 144 if pf.startswith(b"'") and pf.endswith(b"'") and b" " in pf:
152 145 pf = pf[1:-1] # Remove the quotes
153 146 return pf
154 147
155 148
156 149 def sshargs(
157 150 sshcmd: bytes, host: bytes, user: Optional[bytes], port: Optional[bytes]
158 151 ) -> bytes:
159 152 '''Build argument list for ssh'''
160 153 args = user and (b"%s@%s" % (user, host)) or host
161 154 if b'-' in args[:1]:
162 155 raise error.Abort(
163 156 _(b'illegal ssh hostname or username starting with -: %s') % args
164 157 )
165 158 args = shellquote(args)
166 159 if port:
167 160 args = b'-p %s %s' % (shellquote(port), args)
168 161 return args
169 162
170 163
171 164 def isexec(f: bytes) -> bool:
172 165 """check whether a file is executable"""
173 166 return os.lstat(f).st_mode & 0o100 != 0
174 167
175 168
176 169 def setflags(f: bytes, l: bool, x: bool) -> None:
177 170 st = os.lstat(f)
178 171 s = st.st_mode
179 172 if l:
180 173 if not stat.S_ISLNK(s):
181 174 # switch file to link
182 175 with open(f, b'rb') as fp:
183 176 data = fp.read()
184 177 unlink(f)
185 178 try:
186 179 os.symlink(data, f)
187 180 except OSError:
188 181 # failed to make a link, rewrite file
189 182 with open(f, b"wb") as fp:
190 183 fp.write(data)
191 184
192 185 # no chmod needed at this point
193 186 return
194 187 if stat.S_ISLNK(s):
195 188 # switch link to file
196 189 data = os.readlink(f)
197 190 unlink(f)
198 191 with open(f, b"wb") as fp:
199 192 fp.write(data)
200 193 s = 0o666 & ~umask # avoid restatting for chmod
201 194
202 195 sx = s & 0o100
203 196 if st.st_nlink > 1 and bool(x) != bool(sx):
204 197 # the file is a hardlink, break it
205 198 with open(f, b"rb") as fp:
206 199 data = fp.read()
207 200 unlink(f)
208 201 with open(f, b"wb") as fp:
209 202 fp.write(data)
210 203
211 204 if x and not sx:
212 205 # Turn on +x for every +r bit when making a file executable
213 206 # and obey umask.
214 207 os.chmod(f, s | (s & 0o444) >> 2 & ~umask)
215 208 elif not x and sx:
216 209 # Turn off all +x bits
217 210 os.chmod(f, s & 0o666)
218 211
219 212
220 213 def copymode(
221 214 src: bytes,
222 215 dst: bytes,
223 216 mode: Optional[bytes] = None,
224 217 enforcewritable: bool = False,
225 218 ) -> None:
226 219 """Copy the file mode from the file at path src to dst.
227 220 If src doesn't exist, we're using mode instead. If mode is None, we're
228 221 using umask."""
229 222 try:
230 223 st_mode = os.lstat(src).st_mode & 0o777
231 224 except FileNotFoundError:
232 225 st_mode = mode
233 226 if st_mode is None:
234 227 st_mode = ~umask
235 228 st_mode &= 0o666
236 229
237 230 new_mode = st_mode
238 231
239 232 if enforcewritable:
240 233 new_mode |= stat.S_IWUSR
241 234
242 235 os.chmod(dst, new_mode)
243 236
244 237
245 238 def checkexec(path: bytes) -> bool:
246 239 """
247 240 Check whether the given path is on a filesystem with UNIX-like exec flags
248 241
249 242 Requires a directory (like /foo/.hg)
250 243 """
251 244
252 245 # VFAT on some Linux versions can flip mode but it doesn't persist
253 246 # a FS remount. Frequently we can detect it if files are created
254 247 # with exec bit on.
255 248
256 249 try:
257 250 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
258 251 basedir = os.path.join(path, b'.hg')
259 252 cachedir = os.path.join(basedir, b'wcache')
260 253 storedir = os.path.join(basedir, b'store')
261 254 if not os.path.exists(cachedir):
262 255 try:
263 256 # we want to create the 'cache' directory, not the '.hg' one.
264 257 # Automatically creating '.hg' directory could silently spawn
265 258 # invalid Mercurial repositories. That seems like a bad idea.
266 259 os.mkdir(cachedir)
267 260 if os.path.exists(storedir):
268 261 copymode(storedir, cachedir)
269 262 else:
270 263 copymode(basedir, cachedir)
271 264 except (IOError, OSError):
272 265 # we other fallback logic triggers
273 266 pass
274 267 if os.path.isdir(cachedir):
275 268 checkisexec = os.path.join(cachedir, b'checkisexec')
276 269 checknoexec = os.path.join(cachedir, b'checknoexec')
277 270
278 271 try:
279 272 m = os.stat(checkisexec).st_mode
280 273 except FileNotFoundError:
281 274 # checkisexec does not exist - fall through ...
282 275 pass
283 276 else:
284 277 # checkisexec exists, check if it actually is exec
285 278 if m & EXECFLAGS != 0:
286 279 # ensure checknoexec exists, check it isn't exec
287 280 try:
288 281 m = os.stat(checknoexec).st_mode
289 282 except FileNotFoundError:
290 283 open(checknoexec, b'w').close() # might fail
291 284 m = os.stat(checknoexec).st_mode
292 285 if m & EXECFLAGS == 0:
293 286 # check-exec is exec and check-no-exec is not exec
294 287 return True
295 288 # checknoexec exists but is exec - delete it
296 289 unlink(checknoexec)
297 290 # checkisexec exists but is not exec - delete it
298 291 unlink(checkisexec)
299 292
300 293 # check using one file, leave it as checkisexec
301 294 checkdir = cachedir
302 295 else:
303 296 # check directly in path and don't leave checkisexec behind
304 297 checkdir = path
305 298 checkisexec = None
306 299 fh, fn = pycompat.mkstemp(dir=checkdir, prefix=b'hg-checkexec-')
307 300 try:
308 301 os.close(fh)
309 302 m = os.stat(fn).st_mode
310 303 if m & EXECFLAGS == 0:
311 304 os.chmod(fn, m & 0o777 | EXECFLAGS)
312 305 if os.stat(fn).st_mode & EXECFLAGS != 0:
313 306 if checkisexec is not None:
314 307 os.rename(fn, checkisexec)
315 308 fn = None
316 309 return True
317 310 finally:
318 311 if fn is not None:
319 312 unlink(fn)
320 313 except (IOError, OSError):
321 314 # we don't care, the user probably won't be able to commit anyway
322 315 return False
323 316
324 317
325 318 def checklink(path: bytes) -> bool:
326 319 """check whether the given path is on a symlink-capable filesystem"""
327 320 # mktemp is not racy because symlink creation will fail if the
328 321 # file already exists
329 322 while True:
330 323 cachedir = os.path.join(path, b'.hg', b'wcache')
331 324 checklink = os.path.join(cachedir, b'checklink')
332 325 # try fast path, read only
333 326 if os.path.islink(checklink):
334 327 return True
335 328 if os.path.isdir(cachedir):
336 329 checkdir = cachedir
337 330 else:
338 331 checkdir = path
339 332 cachedir = None
340 333 name = tempfile.mktemp(
341 334 dir=pycompat.fsdecode(checkdir), prefix=r'checklink-'
342 335 )
343 336 name = pycompat.fsencode(name)
344 337 try:
345 338 fd = None
346 339 if cachedir is None:
347 340 fd = pycompat.namedtempfile(
348 341 dir=checkdir, prefix=b'hg-checklink-'
349 342 )
350 343 target = os.path.basename(fd.name)
351 344 else:
352 345 # create a fixed file to link to; doesn't matter if it
353 346 # already exists.
354 347 target = b'checklink-target'
355 348 try:
356 349 fullpath = os.path.join(cachedir, target)
357 350 open(fullpath, b'w').close()
358 351 except PermissionError:
359 352 # If we can't write to cachedir, just pretend
360 353 # that the fs is readonly and by association
361 354 # that the fs won't support symlinks. This
362 355 # seems like the least dangerous way to avoid
363 356 # data loss.
364 357 return False
365 358 try:
366 359 os.symlink(target, name)
367 360 if cachedir is None:
368 361 unlink(name)
369 362 else:
370 363 try:
371 364 os.rename(name, checklink)
372 365 except OSError:
373 366 unlink(name)
374 367 return True
375 368 except FileExistsError:
376 369 # link creation might race, try again
377 370 continue
378 371 finally:
379 372 if fd is not None:
380 373 fd.close()
381 374 except AttributeError:
382 375 return False
383 376 except OSError as inst:
384 377 # sshfs might report failure while successfully creating the link
385 378 if inst.errno == errno.EIO and os.path.exists(name):
386 379 unlink(name)
387 380 return False
388 381
389 382
390 383 def checkosfilename(path: bytes) -> Optional[bytes]:
391 384 """Check that the base-relative path is a valid filename on this platform.
392 385 Returns None if the path is ok, or a UI string describing the problem."""
393 386 return None # on posix platforms, every path is ok
394 387
395 388
396 389 def getfsmountpoint(dirpath: bytes) -> Optional[bytes]:
397 390 """Get the filesystem mount point from a directory (best-effort)
398 391
399 392 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
400 393 """
401 394 return getattr(osutil, 'getfsmountpoint', lambda x: None)(dirpath)
402 395
403 396
404 397 def getfstype(dirpath: bytes) -> Optional[bytes]:
405 398 """Get the filesystem type name from a directory (best-effort)
406 399
407 400 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
408 401 """
409 402 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
410 403
411 404
412 405 def get_password() -> bytes:
413 406 return encoding.strtolocal(getpass.getpass(''))
414 407
415 408
416 409 def setbinary(fd) -> None:
417 410 pass
418 411
419 412
420 413 def pconvert(path: bytes) -> bytes:
421 414 return path
422 415
423 416
424 417 def localpath(path: bytes) -> bytes:
425 418 return path
426 419
427 420
428 421 def samefile(fpath1: bytes, fpath2: bytes) -> bool:
429 422 """Returns whether path1 and path2 refer to the same file. This is only
430 423 guaranteed to work for files, not directories."""
431 424 return os.path.samefile(fpath1, fpath2)
432 425
433 426
434 427 def samedevice(fpath1: bytes, fpath2: bytes) -> bool:
435 428 """Returns whether fpath1 and fpath2 are on the same device. This is only
436 429 guaranteed to work for files, not directories."""
437 430 st1 = os.lstat(fpath1)
438 431 st2 = os.lstat(fpath2)
439 432 return st1.st_dev == st2.st_dev
440 433
441 434
442 435 # os.path.normcase is a no-op, which doesn't help us on non-native filesystems
443 436 def normcase(path: bytes) -> bytes:
444 437 return path.lower()
445 438
446 439
447 440 # what normcase does to ASCII strings
448 441 normcasespec: int = encoding.normcasespecs.lower
449 442 # fallback normcase function for non-ASCII strings
450 443 normcasefallback = normcase
451 444
452 445 if pycompat.isdarwin:
453 446
454 447 def normcase(path: bytes) -> bytes:
455 448 """
456 449 Normalize a filename for OS X-compatible comparison:
457 450 - escape-encode invalid characters
458 451 - decompose to NFD
459 452 - lowercase
460 453 - omit ignored characters [200c-200f, 202a-202e, 206a-206f,feff]
461 454
462 455 >>> normcase(b'UPPER')
463 456 'upper'
464 457 >>> normcase(b'Caf\\xc3\\xa9')
465 458 'cafe\\xcc\\x81'
466 459 >>> normcase(b'\\xc3\\x89')
467 460 'e\\xcc\\x81'
468 461 >>> normcase(b'\\xb8\\xca\\xc3\\xca\\xbe\\xc8.JPG') # issue3918
469 462 '%b8%ca%c3\\xca\\xbe%c8.jpg'
470 463 """
471 464
472 465 try:
473 466 return encoding.asciilower(path) # exception for non-ASCII
474 467 except UnicodeDecodeError:
475 468 return normcasefallback(path)
476 469
477 470 normcasespec = encoding.normcasespecs.lower
478 471
479 472 def normcasefallback(path: bytes) -> bytes:
480 473 try:
481 474 u = path.decode('utf-8')
482 475 except UnicodeDecodeError:
483 476 # OS X percent-encodes any bytes that aren't valid utf-8
484 477 s = b''
485 478 pos = 0
486 479 l = len(path)
487 480 while pos < l:
488 481 try:
489 482 c = encoding.getutf8char(path, pos)
490 483 pos += len(c)
491 484 except ValueError:
492 485 c = b'%%%02X' % ord(path[pos : pos + 1])
493 486 pos += 1
494 487 s += c
495 488
496 489 u = s.decode('utf-8')
497 490
498 491 # Decompose then lowercase (HFS+ technote specifies lower)
499 492 enc = unicodedata.normalize('NFD', u).lower().encode('utf-8')
500 493 # drop HFS+ ignored characters
501 494 return encoding.hfsignoreclean(enc)
502 495
503 496
504 497 if pycompat.sysplatform == b'cygwin':
505 498 # workaround for cygwin, in which mount point part of path is
506 499 # treated as case sensitive, even though underlying NTFS is case
507 500 # insensitive.
508 501
509 502 # default mount points
510 503 cygwinmountpoints = sorted(
511 504 [
512 505 b"/usr/bin",
513 506 b"/usr/lib",
514 507 b"/cygdrive",
515 508 ],
516 509 reverse=True,
517 510 )
518 511
519 512 # use upper-ing as normcase as same as NTFS workaround
520 513 def normcase(path: bytes) -> bytes:
521 514 pathlen = len(path)
522 515 if (pathlen == 0) or (path[0] != pycompat.ossep):
523 516 # treat as relative
524 517 return encoding.upper(path)
525 518
526 519 # to preserve case of mountpoint part
527 520 for mp in cygwinmountpoints:
528 521 if not path.startswith(mp):
529 522 continue
530 523
531 524 mplen = len(mp)
532 525 if mplen == pathlen: # mount point itself
533 526 return mp
534 527 if path[mplen] == pycompat.ossep:
535 528 return mp + encoding.upper(path[mplen:])
536 529
537 530 return encoding.upper(path)
538 531
539 532 normcasespec = encoding.normcasespecs.other
540 533 normcasefallback = normcase
541 534
542 535 # Cygwin translates native ACLs to POSIX permissions,
543 536 # but these translations are not supported by native
544 537 # tools, so the exec bit tends to be set erroneously.
545 538 # Therefore, disable executable bit access on Cygwin.
546 539 def checkexec(path: bytes) -> bool:
547 540 return False
548 541
549 542 # Similarly, Cygwin's symlink emulation is likely to create
550 543 # problems when Mercurial is used from both Cygwin and native
551 544 # Windows, with other native tools, or on shared volumes
552 545 def checklink(path: bytes) -> bool:
553 546 return False
554 547
555 548
556 549 if pycompat.sysplatform == b'OpenVMS':
557 550 # OpenVMS's symlink emulation is broken on some OpenVMS versions.
558 551 def checklink(path):
559 552 return False
560 553
561 554
562 555 _needsshellquote: Optional[Match[bytes]] = None
563 556
564 557
565 558 def shellquote(s: bytes) -> bytes:
566 559 if pycompat.sysplatform == b'OpenVMS':
567 560 return b'"%s"' % s
568 561 global _needsshellquote
569 562 if _needsshellquote is None:
570 563 _needsshellquote = re.compile(br'[^a-zA-Z0-9._/+-]').search
571 564 if s and not _needsshellquote(s):
572 565 # "s" shouldn't have to be quoted
573 566 return s
574 567 else:
575 568 return b"'%s'" % s.replace(b"'", b"'\\''")
576 569
577 570
578 571 def shellsplit(s: bytes) -> List[bytes]:
579 572 """Parse a command string in POSIX shell way (best-effort)"""
580 573 return pycompat.shlexsplit(s, posix=True)
581 574
582 575
583 576 def testpid(pid: int) -> bool:
584 577 '''return False if pid dead, True if running or not sure'''
585 578 if pycompat.sysplatform == b'OpenVMS':
586 579 return True
587 580 try:
588 581 os.kill(pid, 0)
589 582 return True
590 583 except OSError as inst:
591 584 return inst.errno != errno.ESRCH
592 585
593 586
594 587 def isowner(st: os.stat_result) -> bool:
595 588 """Return True if the stat object st is from the current user."""
596 589 return st.st_uid == os.getuid()
597 590
598 591
599 592 def findexe(command: bytes) -> Optional[bytes]:
600 593 """Find executable for command searching like which does.
601 594 If command is a basename then PATH is searched for command.
602 595 PATH isn't searched if command is an absolute or relative path.
603 596 If command isn't found None is returned."""
604 597 if pycompat.sysplatform == b'OpenVMS':
605 598 return command
606 599
607 600 def findexisting(executable: bytes) -> Optional[bytes]:
608 601 b'Will return executable if existing file'
609 602 if os.path.isfile(executable) and os.access(executable, os.X_OK):
610 603 return executable
611 604 return None
612 605
613 606 if pycompat.ossep in command:
614 607 return findexisting(command)
615 608
616 609 if pycompat.sysplatform == b'plan9':
617 610 return findexisting(os.path.join(b'/bin', command))
618 611
619 612 for path in encoding.environ.get(b'PATH', b'').split(pycompat.ospathsep):
620 613 executable = findexisting(os.path.join(path, command))
621 614 if executable is not None:
622 615 return executable
623 616 return None
624 617
625 618
626 619 def setsignalhandler() -> None:
627 620 pass
628 621
629 622
630 623 _wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
631 624
632 625
633 626 def statfiles(files: Sequence[bytes]) -> Iterator[Optional[os.stat_result]]:
634 627 """Stat each file in files. Yield each stat, or None if a file does not
635 628 exist or has a type we don't care about."""
636 629 lstat = os.lstat
637 630 getkind = stat.S_IFMT
638 631 for nf in files:
639 632 try:
640 633 st = lstat(nf)
641 634 if getkind(st.st_mode) not in _wantedkinds:
642 635 st = None
643 636 except (FileNotFoundError, NotADirectoryError):
644 637 st = None
645 638 yield st
646 639
647 640
648 641 def getuser() -> bytes:
649 642 '''return name of current user'''
650 643 return pycompat.fsencode(getpass.getuser())
651 644
652 645
653 646 def username(uid: Optional[int] = None) -> Optional[bytes]:
654 647 """Return the name of the user with the given uid.
655 648
656 649 If uid is None, return the name of the current user."""
657 650
658 651 if uid is None:
659 652 uid = os.getuid()
660 653 try:
661 654 return pycompat.fsencode(pwd.getpwuid(uid)[0])
662 655 except KeyError:
663 656 return b'%d' % uid
664 657
665 658
666 659 def groupname(gid: Optional[int] = None) -> Optional[bytes]:
667 660 """Return the name of the group with the given gid.
668 661
669 662 If gid is None, return the name of the current group."""
670 663
671 664 if gid is None:
672 665 gid = os.getgid()
673 666 try:
674 667 return pycompat.fsencode(grp.getgrgid(gid)[0])
675 668 except KeyError:
676 669 return pycompat.bytestr(gid)
677 670
678 671
679 672 def groupmembers(name: bytes) -> List[bytes]:
680 673 """Return the list of members of the group with the given
681 674 name, KeyError if the group does not exist.
682 675 """
683 676 name = pycompat.fsdecode(name)
684 677 return pycompat.rapply(pycompat.fsencode, list(grp.getgrnam(name).gr_mem))
685 678
686 679
687 680 def spawndetached(args: List[bytes]) -> int:
688 681 return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0), args[0], args)
689 682
690 683
691 684 def gethgcmd(): # TODO: convert to bytes, like on Windows?
692 685 return sys.argv[:1]
693 686
694 687
695 688 def makedir(path: bytes, notindexed: bool) -> None:
696 689 os.mkdir(path)
697 690
698 691
699 692 def lookupreg(
700 693 key: bytes,
701 694 name: Optional[bytes] = None,
702 695 scope: Optional[Union[int, Iterable[int]]] = None,
703 696 ) -> Optional[bytes]:
704 697 return None
705 698
706 699
707 700 def hidewindow() -> None:
708 701 """Hide current shell window.
709 702
710 703 Used to hide the window opened when starting asynchronous
711 704 child process under Windows, unneeded on other systems.
712 705 """
713 706 pass
714 707
715 708
716 709 class cachestat:
717 710 def __init__(self, path: bytes) -> None:
718 711 self.stat = os.stat(path)
719 712
720 713 def cacheable(self) -> bool:
721 714 return bool(self.stat.st_ino)
722 715
723 716 __hash__ = object.__hash__
724 717
725 718 def __eq__(self, other: Any) -> bool:
726 719 try:
727 720 # Only dev, ino, size, mtime and atime are likely to change. Out
728 721 # of these, we shouldn't compare atime but should compare the
729 722 # rest. However, one of the other fields changing indicates
730 723 # something fishy going on, so return False if anything but atime
731 724 # changes.
732 725 return (
733 726 self.stat.st_mode == other.stat.st_mode
734 727 and self.stat.st_ino == other.stat.st_ino
735 728 and self.stat.st_dev == other.stat.st_dev
736 729 and self.stat.st_nlink == other.stat.st_nlink
737 730 and self.stat.st_uid == other.stat.st_uid
738 731 and self.stat.st_gid == other.stat.st_gid
739 732 and self.stat.st_size == other.stat.st_size
740 733 and self.stat[stat.ST_MTIME] == other.stat[stat.ST_MTIME]
741 734 and self.stat[stat.ST_CTIME] == other.stat[stat.ST_CTIME]
742 735 )
743 736 except AttributeError:
744 737 return False
745 738
746 739 def __ne__(self, other: Any) -> bool:
747 740 return not self == other
748 741
749 742
750 743 def statislink(st: Optional[os.stat_result]) -> bool:
751 744 '''check whether a stat result is a symlink'''
752 745 return stat.S_ISLNK(st.st_mode) if st else False
753 746
754 747
755 748 def statisexec(st: Optional[os.stat_result]) -> bool:
756 749 '''check whether a stat result is an executable file'''
757 750 return (st.st_mode & 0o100 != 0) if st else False
758 751
759 752
760 753 def poll(fds):
761 754 """block until something happens on any file descriptor
762 755
763 756 This is a generic helper that will check for any activity
764 757 (read, write. exception) and return the list of touched files.
765 758
766 759 In unsupported cases, it will raise a NotImplementedError"""
767 760 try:
768 761 res = select.select(fds, fds, fds)
769 762 except ValueError: # out of range file descriptor
770 763 raise NotImplementedError()
771 764 return sorted(list(set(sum(res, []))))
772 765
773 766
774 767 def readpipe(pipe) -> bytes:
775 768 """Read all available data from a pipe."""
776 769 # We can't fstat() a pipe because Linux will always report 0.
777 770 # So, we set the pipe to non-blocking mode and read everything
778 771 # that's available.
779 772 flags = fcntl.fcntl(pipe, fcntl.F_GETFL)
780 773 flags |= os.O_NONBLOCK
781 774 oldflags = fcntl.fcntl(pipe, fcntl.F_SETFL, flags)
782 775
783 776 try:
784 777 chunks = []
785 778 while True:
786 779 try:
787 780 s = pipe.read()
788 781 if not s:
789 782 break
790 783 chunks.append(s)
791 784 except IOError:
792 785 break
793 786
794 787 return b''.join(chunks)
795 788 finally:
796 789 fcntl.fcntl(pipe, fcntl.F_SETFL, oldflags)
797 790
798 791
799 792 def bindunixsocket(sock, path: bytes) -> None:
800 793 """Bind the UNIX domain socket to the specified path"""
801 794 # use relative path instead of full path at bind() if possible, since
802 795 # AF_UNIX path has very small length limit (107 chars) on common
803 796 # platforms (see sys/un.h)
804 797 dirname, basename = os.path.split(path)
805 798 bakwdfd = None
806 799
807 800 try:
808 801 if dirname:
809 802 bakwdfd = os.open(b'.', os.O_DIRECTORY)
810 803 os.chdir(dirname)
811 804 sock.bind(basename)
812 805 if bakwdfd:
813 806 os.fchdir(bakwdfd)
814 807 finally:
815 808 if bakwdfd:
816 809 os.close(bakwdfd)
@@ -1,102 +1,103 b''
1 1 import array
2 2 import errno
3 3 import fcntl
4 4 import os
5 5 import sys
6 import typing
6 7
7 8 from typing import (
8 9 List,
9 10 Tuple,
10 11 )
11 12
12 13 from . import (
13 14 encoding,
14 15 pycompat,
15 16 util,
16 17 )
17 18
18 if pycompat.TYPE_CHECKING:
19 if typing.TYPE_CHECKING:
19 20 from . import ui as uimod
20 21
21 22 # BSD 'more' escapes ANSI color sequences by default. This can be disabled by
22 23 # $MORE variable, but there's no compatible option with Linux 'more'. Given
23 24 # OS X is widely used and most modern Unix systems would have 'less', setting
24 25 # 'less' as the default seems reasonable.
25 26 fallbackpager = b'less'
26 27
27 28
28 29 def _rcfiles(path: bytes) -> List[bytes]:
29 30 rcs = [os.path.join(path, b'hgrc')]
30 31 rcdir = os.path.join(path, b'hgrc.d')
31 32 try:
32 33 rcs.extend(
33 34 [
34 35 os.path.join(rcdir, f)
35 36 for f, kind in sorted(util.listdir(rcdir))
36 37 if f.endswith(b".rc")
37 38 ]
38 39 )
39 40 except OSError:
40 41 pass
41 42 return rcs
42 43
43 44
44 45 def systemrcpath() -> List[bytes]:
45 46 path = []
46 47 if pycompat.sysplatform == b'plan9':
47 48 root = b'lib/mercurial'
48 49 else:
49 50 root = b'etc/mercurial'
50 51 # old mod_python does not set sys.argv
51 52 if len(getattr(sys, 'argv', [])) > 0:
52 53 p = os.path.dirname(os.path.dirname(pycompat.sysargv[0]))
53 54 if p != b'/':
54 55 path.extend(_rcfiles(os.path.join(p, root)))
55 56 path.extend(_rcfiles(b'/' + root))
56 57 return path
57 58
58 59
59 60 def userrcpath() -> List[bytes]:
60 61 if pycompat.sysplatform == b'plan9':
61 62 return [encoding.environ[b'home'] + b'/lib/hgrc']
62 63 elif pycompat.isdarwin:
63 64 return [os.path.expanduser(b'~/.hgrc')]
64 65 else:
65 66 confighome = encoding.environ.get(b'XDG_CONFIG_HOME')
66 67 if confighome is None or not os.path.isabs(confighome):
67 68 confighome = os.path.expanduser(b'~/.config')
68 69
69 70 return [
70 71 os.path.expanduser(b'~/.hgrc'),
71 72 os.path.join(confighome, b'hg', b'hgrc'),
72 73 ]
73 74
74 75
75 76 def termsize(ui: "uimod.ui") -> Tuple[int, int]:
76 77 try:
77 78 import termios
78 79
79 80 TIOCGWINSZ = termios.TIOCGWINSZ # unavailable on IRIX (issue3449)
80 81 except (AttributeError, ImportError):
81 82 return 80, 24
82 83
83 84 for dev in (ui.ferr, ui.fout, ui.fin):
84 85 try:
85 86 try:
86 87 fd = dev.fileno()
87 88 except AttributeError:
88 89 continue
89 90 if not os.isatty(fd):
90 91 continue
91 92 arri = fcntl.ioctl(fd, TIOCGWINSZ, b'\0' * 8)
92 93 height, width = array.array('h', arri)[:2]
93 94 if width > 0 and height > 0:
94 95 return width, height
95 96 except ValueError:
96 97 pass
97 98 except IOError as e:
98 99 if e.errno == errno.EINVAL:
99 100 pass
100 101 else:
101 102 raise
102 103 return 80, 24
@@ -1,116 +1,117 b''
1 1 import os
2 2 import winreg # pytype: disable=import-error
3 3
4 4 from typing import (
5 5 List,
6 TYPE_CHECKING,
6 7 Tuple,
7 8 )
8 9
9 10 from . import (
10 11 encoding,
11 12 pycompat,
12 13 util,
13 14 win32,
14 15 )
15 16
16 if pycompat.TYPE_CHECKING:
17 if TYPE_CHECKING:
17 18 from . import ui as uimod
18 19
19 20 # MS-DOS 'more' is the only pager available by default on Windows.
20 21 fallbackpager = b'more'
21 22
22 23
23 24 def systemrcpath() -> List[bytes]:
24 25 '''return default os-specific hgrc search path'''
25 26 rcpath = []
26 27 filename = win32.executablepath()
27 28 # Use mercurial.ini found in directory with hg.exe
28 29 progrc = os.path.join(os.path.dirname(filename), b'mercurial.ini')
29 30 rcpath.append(progrc)
30 31
31 32 def _processdir(progrcd: bytes) -> None:
32 33 if os.path.isdir(progrcd):
33 34 for f, kind in sorted(util.listdir(progrcd)):
34 35 if f.endswith(b'.rc'):
35 36 rcpath.append(os.path.join(progrcd, f))
36 37
37 38 # Use hgrc.d found in directory with hg.exe
38 39 _processdir(os.path.join(os.path.dirname(filename), b'hgrc.d'))
39 40
40 41 # treat a PROGRAMDATA directory as equivalent to /etc/mercurial
41 42 programdata = encoding.environ.get(b'PROGRAMDATA')
42 43 if programdata:
43 44 programdata = os.path.join(programdata, b'Mercurial')
44 45 _processdir(os.path.join(programdata, b'hgrc.d'))
45 46
46 47 ini = os.path.join(programdata, b'mercurial.ini')
47 48 if os.path.isfile(ini):
48 49 rcpath.append(ini)
49 50
50 51 ini = os.path.join(programdata, b'hgrc')
51 52 if os.path.isfile(ini):
52 53 rcpath.append(ini)
53 54
54 55 # next look for a system rcpath in the registry
55 56 value = util.lookupreg(
56 57 # pytype: disable=module-attr
57 58 b'SOFTWARE\\Mercurial',
58 59 None,
59 60 winreg.HKEY_LOCAL_MACHINE
60 61 # pytype: enable=module-attr
61 62 )
62 63 if value and isinstance(value, bytes):
63 64 value = util.localpath(value)
64 65 for p in value.split(pycompat.ospathsep):
65 66 if p.lower().endswith(b'mercurial.ini'):
66 67 rcpath.append(p)
67 68 else:
68 69 _processdir(p)
69 70 return rcpath
70 71
71 72
72 73 def userrcpath() -> List[bytes]:
73 74 '''return os-specific hgrc search path to the user dir'''
74 75 home = _legacy_expanduser(b'~')
75 76 path = [os.path.join(home, b'mercurial.ini'), os.path.join(home, b'.hgrc')]
76 77 userprofile = encoding.environ.get(b'USERPROFILE')
77 78 if userprofile and userprofile != home:
78 79 path.append(os.path.join(userprofile, b'mercurial.ini'))
79 80 path.append(os.path.join(userprofile, b'.hgrc'))
80 81 return path
81 82
82 83
83 84 def _legacy_expanduser(path: bytes) -> bytes:
84 85 """Expand ~ and ~user constructs in the pre 3.8 style"""
85 86
86 87 # Python 3.8+ changed the expansion of '~' from HOME to USERPROFILE. See
87 88 # https://bugs.python.org/issue36264. It also seems to capitalize the drive
88 89 # letter, as though it was processed through os.path.realpath().
89 90 if not path.startswith(b'~'):
90 91 return path
91 92
92 93 i, n = 1, len(path)
93 94 while i < n and path[i] not in b'\\/':
94 95 i += 1
95 96
96 97 if b'HOME' in encoding.environ:
97 98 userhome = encoding.environ[b'HOME']
98 99 elif b'USERPROFILE' in encoding.environ:
99 100 userhome = encoding.environ[b'USERPROFILE']
100 101 elif b'HOMEPATH' not in encoding.environ:
101 102 return path
102 103 else:
103 104 try:
104 105 drive = encoding.environ[b'HOMEDRIVE']
105 106 except KeyError:
106 107 drive = b''
107 108 userhome = os.path.join(drive, encoding.environ[b'HOMEPATH'])
108 109
109 110 if i != 1: # ~user
110 111 userhome = os.path.join(os.path.dirname(userhome), path[1:i])
111 112
112 113 return userhome + path[i:]
113 114
114 115
115 116 def termsize(ui: "uimod.ui") -> Tuple[int, int]:
116 117 return win32.termsize()
@@ -1,385 +1,384 b''
1 1 # state.py - writing and reading state files in Mercurial
2 2 #
3 3 # Copyright 2018 Pulkit Goyal <pulkitmgoyal@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """
9 9 This file contains class to wrap the state for commands and other
10 10 related logic.
11 11
12 12 All the data related to the command state is stored as dictionary in the object.
13 13 The class has methods using which the data can be stored to disk in a file under
14 14 .hg/ directory.
15 15
16 16 We store the data on disk in cbor, for which we use the CBOR format to encode
17 17 the data.
18 18 """
19 19
20 20
21 21 import contextlib
22 22
23 from typing import (
24 Any,
25 Dict,
26 )
27
23 28 from .i18n import _
24 29
25 30 from . import (
26 31 error,
27 pycompat,
28 32 util,
29 33 )
30 34 from .utils import cborutil
31 35
32 if pycompat.TYPE_CHECKING:
33 from typing import (
34 Any,
35 Dict,
36 )
37
38 for t in (Any, Dict):
39 assert t
36 # keeps pyflakes happy
37 for t in (Any, Dict):
38 assert t
40 39
41 40
42 41 class cmdstate:
43 42 """a wrapper class to store the state of commands like `rebase`, `graft`,
44 43 `histedit`, `shelve` etc. Extensions can also use this to write state files.
45 44
46 45 All the data for the state is stored in the form of key-value pairs in a
47 46 dictionary.
48 47
49 48 The class object can write all the data to a file in .hg/ directory and
50 49 can populate the object data reading that file.
51 50
52 51 Uses cbor to serialize and deserialize data while writing and reading from
53 52 disk.
54 53 """
55 54
56 55 def __init__(self, repo, fname):
57 56 """repo is the repo object
58 57 fname is the file name in which data should be stored in .hg directory
59 58 """
60 59 self._repo = repo
61 60 self.fname = fname
62 61
63 62 def read(self):
64 63 # type: () -> Dict[bytes, Any]
65 64 """read the existing state file and return a dict of data stored"""
66 65 return self._read()
67 66
68 67 def save(self, version, data):
69 68 """write all the state data stored to .hg/<filename> file
70 69
71 70 we use third-party library cbor to serialize data to write in the file.
72 71 """
73 72 if not isinstance(version, int):
74 73 raise error.ProgrammingError(
75 74 b"version of state file should be an integer"
76 75 )
77 76
78 77 with self._repo.vfs(self.fname, b'wb', atomictemp=True) as fp:
79 78 fp.write(b'%d\n' % version)
80 79 for chunk in cborutil.streamencode(data):
81 80 fp.write(chunk)
82 81
83 82 def _read(self):
84 83 """reads the state file and returns a dictionary which contain
85 84 data in the same format as it was before storing"""
86 85 with self._repo.vfs(self.fname, b'rb') as fp:
87 86 try:
88 87 int(fp.readline())
89 88 except ValueError:
90 89 raise error.CorruptedState(
91 90 b"unknown version of state file found"
92 91 )
93 92
94 93 return cborutil.decodeall(fp.read())[0]
95 94
96 95 def delete(self):
97 96 """drop the state file if exists"""
98 97 util.unlinkpath(self._repo.vfs.join(self.fname), ignoremissing=True)
99 98
100 99 def exists(self):
101 100 """check whether the state file exists or not"""
102 101 return self._repo.vfs.exists(self.fname)
103 102
104 103
105 104 class _statecheck:
106 105 """a utility class that deals with multistep operations like graft,
107 106 histedit, bisect, update etc and check whether such commands
108 107 are in an unfinished conditition or not and return appropriate message
109 108 and hint.
110 109 It also has the ability to register and determine the states of any new
111 110 multistep operation or multistep command extension.
112 111 """
113 112
114 113 def __init__(
115 114 self,
116 115 opname,
117 116 fname,
118 117 clearable,
119 118 allowcommit,
120 119 reportonly,
121 120 continueflag,
122 121 stopflag,
123 122 childopnames,
124 123 cmdmsg,
125 124 cmdhint,
126 125 statushint,
127 126 abortfunc,
128 127 continuefunc,
129 128 ):
130 129 self._opname = opname
131 130 self._fname = fname
132 131 self._clearable = clearable
133 132 self._allowcommit = allowcommit
134 133 self._reportonly = reportonly
135 134 self._continueflag = continueflag
136 135 self._stopflag = stopflag
137 136 self._childopnames = childopnames
138 137 self._delegating = False
139 138 self._cmdmsg = cmdmsg
140 139 self._cmdhint = cmdhint
141 140 self._statushint = statushint
142 141 self.abortfunc = abortfunc
143 142 self.continuefunc = continuefunc
144 143
145 144 def statusmsg(self):
146 145 """returns the hint message corresponding to the command for
147 146 hg status --verbose
148 147 """
149 148 if not self._statushint:
150 149 hint = _(
151 150 b'To continue: hg %s --continue\n'
152 151 b'To abort: hg %s --abort'
153 152 ) % (self._opname, self._opname)
154 153 if self._stopflag:
155 154 hint = hint + (
156 155 _(b'\nTo stop: hg %s --stop') % (self._opname)
157 156 )
158 157 return hint
159 158 return self._statushint
160 159
161 160 def hint(self):
162 161 """returns the hint message corresponding to an interrupted
163 162 operation
164 163 """
165 164 if not self._cmdhint:
166 165 if not self._stopflag:
167 166 return _(b"use 'hg %s --continue' or 'hg %s --abort'") % (
168 167 self._opname,
169 168 self._opname,
170 169 )
171 170 else:
172 171 return _(
173 172 b"use 'hg %s --continue', 'hg %s --abort', "
174 173 b"or 'hg %s --stop'"
175 174 ) % (
176 175 self._opname,
177 176 self._opname,
178 177 self._opname,
179 178 )
180 179
181 180 return self._cmdhint
182 181
183 182 def msg(self):
184 183 """returns the status message corresponding to the command"""
185 184 if not self._cmdmsg:
186 185 return _(b'%s in progress') % (self._opname)
187 186 return self._cmdmsg
188 187
189 188 def continuemsg(self):
190 189 """returns appropriate continue message corresponding to command"""
191 190 return _(b'hg %s --continue') % (self._opname)
192 191
193 192 def isunfinished(self, repo):
194 193 """determines whether a multi-step operation is in progress
195 194 or not
196 195 """
197 196 if self._opname == b'merge':
198 197 return len(repo[None].parents()) > 1
199 198 elif self._delegating:
200 199 return False
201 200 else:
202 201 return repo.vfs.exists(self._fname)
203 202
204 203
205 204 # A list of statecheck objects for multistep operations like graft.
206 205 _unfinishedstates = []
207 206 _unfinishedstatesbyname = {}
208 207
209 208
210 209 def addunfinished(
211 210 opname,
212 211 fname,
213 212 clearable=False,
214 213 allowcommit=False,
215 214 reportonly=False,
216 215 continueflag=False,
217 216 stopflag=False,
218 217 childopnames=None,
219 218 cmdmsg=b"",
220 219 cmdhint=b"",
221 220 statushint=b"",
222 221 abortfunc=None,
223 222 continuefunc=None,
224 223 ):
225 224 """this registers a new command or operation to unfinishedstates
226 225 opname is the name the command or operation
227 226 fname is the file name in which data should be stored in .hg directory.
228 227 It is None for merge command.
229 228 clearable boolean determines whether or not interrupted states can be
230 229 cleared by running `hg update -C .` which in turn deletes the
231 230 state file.
232 231 allowcommit boolean decides whether commit is allowed during interrupted
233 232 state or not.
234 233 reportonly flag is used for operations like bisect where we just
235 234 need to detect the operation using 'hg status --verbose'
236 235 continueflag is a boolean determines whether or not a command supports
237 236 `--continue` option or not.
238 237 stopflag is a boolean that determines whether or not a command supports
239 238 --stop flag
240 239 childopnames is a list of other opnames this op uses as sub-steps of its
241 240 own execution. They must already be added.
242 241 cmdmsg is used to pass a different status message in case standard
243 242 message of the format "abort: cmdname in progress" is not desired.
244 243 cmdhint is used to pass a different hint message in case standard
245 244 message of the format "To continue: hg cmdname --continue
246 245 To abort: hg cmdname --abort" is not desired.
247 246 statushint is used to pass a different status message in case standard
248 247 message of the format ('To continue: hg cmdname --continue'
249 248 'To abort: hg cmdname --abort') is not desired
250 249 abortfunc stores the function required to abort an unfinished state.
251 250 continuefunc stores the function required to finish an interrupted
252 251 operation.
253 252 """
254 253 childopnames = childopnames or []
255 254 statecheckobj = _statecheck(
256 255 opname,
257 256 fname,
258 257 clearable,
259 258 allowcommit,
260 259 reportonly,
261 260 continueflag,
262 261 stopflag,
263 262 childopnames,
264 263 cmdmsg,
265 264 cmdhint,
266 265 statushint,
267 266 abortfunc,
268 267 continuefunc,
269 268 )
270 269
271 270 if opname == b'merge':
272 271 _unfinishedstates.append(statecheckobj)
273 272 else:
274 273 # This check enforces that for any op 'foo' which depends on op 'bar',
275 274 # 'foo' comes before 'bar' in _unfinishedstates. This ensures that
276 275 # getrepostate() always returns the most specific applicable answer.
277 276 for childopname in childopnames:
278 277 if childopname not in _unfinishedstatesbyname:
279 278 raise error.ProgrammingError(
280 279 _(b'op %s depends on unknown op %s') % (opname, childopname)
281 280 )
282 281
283 282 _unfinishedstates.insert(0, statecheckobj)
284 283
285 284 if opname in _unfinishedstatesbyname:
286 285 raise error.ProgrammingError(_(b'op %s registered twice') % opname)
287 286 _unfinishedstatesbyname[opname] = statecheckobj
288 287
289 288
290 289 def _getparentandchild(opname, childopname):
291 290 p = _unfinishedstatesbyname.get(opname, None)
292 291 if not p:
293 292 raise error.ProgrammingError(_(b'unknown op %s') % opname)
294 293 if childopname not in p._childopnames:
295 294 raise error.ProgrammingError(
296 295 _(b'op %s does not delegate to %s') % (opname, childopname)
297 296 )
298 297 c = _unfinishedstatesbyname[childopname]
299 298 return p, c
300 299
301 300
302 301 @contextlib.contextmanager
303 302 def delegating(repo, opname, childopname):
304 303 """context wrapper for delegations from opname to childopname.
305 304
306 305 requires that childopname was specified when opname was registered.
307 306
308 307 Usage:
309 308 def my_command_foo_that_uses_rebase(...):
310 309 ...
311 310 with state.delegating(repo, 'foo', 'rebase'):
312 311 _run_rebase(...)
313 312 ...
314 313 """
315 314
316 315 p, c = _getparentandchild(opname, childopname)
317 316 if p._delegating:
318 317 raise error.ProgrammingError(
319 318 _(b'cannot delegate from op %s recursively') % opname
320 319 )
321 320 p._delegating = True
322 321 try:
323 322 yield
324 323 except error.ConflictResolutionRequired as e:
325 324 # Rewrite conflict resolution advice for the parent opname.
326 325 if e.opname == childopname:
327 326 raise error.ConflictResolutionRequired(opname)
328 327 raise e
329 328 finally:
330 329 p._delegating = False
331 330
332 331
333 332 def ischildunfinished(repo, opname, childopname):
334 333 """Returns true if both opname and childopname are unfinished."""
335 334
336 335 p, c = _getparentandchild(opname, childopname)
337 336 return (p._delegating or p.isunfinished(repo)) and c.isunfinished(repo)
338 337
339 338
340 339 def continuechild(ui, repo, opname, childopname):
341 340 """Checks that childopname is in progress, and continues it."""
342 341
343 342 p, c = _getparentandchild(opname, childopname)
344 343 if not ischildunfinished(repo, opname, childopname):
345 344 raise error.ProgrammingError(
346 345 _(b'child op %s of parent %s is not unfinished')
347 346 % (childopname, opname)
348 347 )
349 348 if not c.continuefunc:
350 349 raise error.ProgrammingError(
351 350 _(b'op %s has no continue function') % childopname
352 351 )
353 352 return c.continuefunc(ui, repo)
354 353
355 354
356 355 addunfinished(
357 356 b'update',
358 357 fname=b'updatestate',
359 358 clearable=True,
360 359 cmdmsg=_(b'last update was interrupted'),
361 360 cmdhint=_(b"use 'hg update' to get a consistent checkout"),
362 361 statushint=_(b"To continue: hg update ."),
363 362 )
364 363 addunfinished(
365 364 b'bisect',
366 365 fname=b'bisect.state',
367 366 allowcommit=True,
368 367 reportonly=True,
369 368 cmdhint=_(b"use 'hg bisect --reset'"),
370 369 statushint=_(
371 370 b'To mark the changeset good: hg bisect --good\n'
372 371 b'To mark the changeset bad: hg bisect --bad\n'
373 372 b'To abort: hg bisect --reset\n'
374 373 ),
375 374 )
376 375
377 376
378 377 def getrepostate(repo):
379 378 # experimental config: commands.status.skipstates
380 379 skip = set(repo.ui.configlist(b'commands', b'status.skipstates'))
381 380 for state in _unfinishedstates:
382 381 if state._opname in skip:
383 382 continue
384 383 if state.isunfinished(repo):
385 384 return (state._opname, state.statusmsg())
@@ -1,504 +1,525 b''
1 1 # subrepoutil.py - sub-repository operations and substate handling
2 2 #
3 3 # Copyright 2009-2010 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import os
10 10 import posixpath
11 11 import re
12 import typing
13
14 from typing import (
15 Any,
16 Dict,
17 List,
18 Optional,
19 Set,
20 Tuple,
21 )
12 22
13 23 from .i18n import _
14 24 from . import (
15 25 config,
16 26 error,
17 27 filemerge,
18 28 pathutil,
19 29 phases,
20 pycompat,
21 30 util,
22 31 )
23 32 from .utils import (
24 33 stringutil,
25 34 urlutil,
26 35 )
27 36
37 # keeps pyflakes happy
38 assert [
39 Any,
40 Dict,
41 List,
42 Optional,
43 Set,
44 Tuple,
45 ]
46
28 47 nullstate = (b'', b'', b'empty')
29 48
30 if pycompat.TYPE_CHECKING:
31 from typing import (
32 Any,
33 Dict,
34 List,
35 Optional,
36 Set,
37 Tuple,
38 )
49 if typing.TYPE_CHECKING:
39 50 from . import (
40 51 context,
41 52 localrepo,
42 53 match as matchmod,
43 54 scmutil,
44 55 subrepo,
45 56 ui as uimod,
46 57 )
47 58
48 Substate = Dict[bytes, Tuple[bytes, bytes, bytes]]
59 # keeps pyflakes happy
60 assert [
61 context,
62 localrepo,
63 matchmod,
64 scmutil,
65 subrepo,
66 uimod,
67 ]
68
69 Substate = Dict[bytes, Tuple[bytes, bytes, bytes]]
49 70
50 71
51 72 def state(ctx, ui):
52 73 # type: (context.changectx, uimod.ui) -> Substate
53 74 """return a state dict, mapping subrepo paths configured in .hgsub
54 75 to tuple: (source from .hgsub, revision from .hgsubstate, kind
55 76 (key in types dict))
56 77 """
57 78 p = config.config()
58 79 repo = ctx.repo()
59 80
60 81 def read(f, sections=None, remap=None):
61 82 if f in ctx:
62 83 try:
63 84 data = ctx[f].data()
64 85 except FileNotFoundError:
65 86 # handle missing subrepo spec files as removed
66 87 ui.warn(
67 88 _(b"warning: subrepo spec file \'%s\' not found\n")
68 89 % repo.pathto(f)
69 90 )
70 91 return
71 92 p.parse(f, data, sections, remap, read)
72 93 else:
73 94 raise error.Abort(
74 95 _(b"subrepo spec file \'%s\' not found") % repo.pathto(f)
75 96 )
76 97
77 98 if b'.hgsub' in ctx:
78 99 read(b'.hgsub')
79 100
80 101 for path, src in ui.configitems(b'subpaths'):
81 102 p.set(b'subpaths', path, src, ui.configsource(b'subpaths', path))
82 103
83 104 rev = {}
84 105 if b'.hgsubstate' in ctx:
85 106 try:
86 107 for i, l in enumerate(ctx[b'.hgsubstate'].data().splitlines()):
87 108 l = l.lstrip()
88 109 if not l:
89 110 continue
90 111 try:
91 112 revision, path = l.split(b" ", 1)
92 113 except ValueError:
93 114 raise error.Abort(
94 115 _(
95 116 b"invalid subrepository revision "
96 117 b"specifier in \'%s\' line %d"
97 118 )
98 119 % (repo.pathto(b'.hgsubstate'), (i + 1))
99 120 )
100 121 rev[path] = revision
101 122 except FileNotFoundError:
102 123 pass
103 124
104 125 def remap(src):
105 126 # type: (bytes) -> bytes
106 127 for pattern, repl in p.items(b'subpaths'):
107 128 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
108 129 # does a string decode.
109 130 repl = stringutil.escapestr(repl)
110 131 # However, we still want to allow back references to go
111 132 # through unharmed, so we turn r'\\1' into r'\1'. Again,
112 133 # extra escapes are needed because re.sub string decodes.
113 134 repl = re.sub(br'\\\\([0-9]+)', br'\\\1', repl)
114 135 try:
115 136 src = re.sub(pattern, repl, src, 1)
116 137 except re.error as e:
117 138 raise error.Abort(
118 139 _(b"bad subrepository pattern in %s: %s")
119 140 % (
120 141 p.source(b'subpaths', pattern),
121 142 stringutil.forcebytestr(e),
122 143 )
123 144 )
124 145 return src
125 146
126 147 state = {}
127 148 for path, src in p.items(b''): # type: bytes
128 149 kind = b'hg'
129 150 if src.startswith(b'['):
130 151 if b']' not in src:
131 152 raise error.Abort(_(b'missing ] in subrepository source'))
132 153 kind, src = src.split(b']', 1)
133 154 kind = kind[1:]
134 155 src = src.lstrip() # strip any extra whitespace after ']'
135 156
136 157 if not urlutil.url(src).isabs():
137 158 parent = _abssource(repo, abort=False)
138 159 if parent:
139 160 parent = urlutil.url(parent)
140 161 parent.path = posixpath.join(parent.path or b'', src)
141 162 parent.path = posixpath.normpath(parent.path)
142 163 joined = bytes(parent)
143 164 # Remap the full joined path and use it if it changes,
144 165 # else remap the original source.
145 166 remapped = remap(joined)
146 167 if remapped == joined:
147 168 src = remap(src)
148 169 else:
149 170 src = remapped
150 171
151 172 src = remap(src)
152 173 state[util.pconvert(path)] = (src.strip(), rev.get(path, b''), kind)
153 174
154 175 return state
155 176
156 177
157 178 def writestate(repo, state):
158 179 # type: (localrepo.localrepository, Substate) -> None
159 180 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
160 181 lines = [
161 182 b'%s %s\n' % (state[s][1], s)
162 183 for s in sorted(state)
163 184 if state[s][1] != nullstate[1]
164 185 ]
165 186 repo.wwrite(b'.hgsubstate', b''.join(lines), b'')
166 187
167 188
168 189 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
169 190 # type: (localrepo.localrepository, context.workingctx, context.changectx, context.changectx, bool, Optional[Any]) -> Substate
170 191 # TODO: type the `labels` arg
171 192 """delegated from merge.applyupdates: merging of .hgsubstate file
172 193 in working context, merging context and ancestor context"""
173 194 if mctx == actx: # backwards?
174 195 actx = wctx.p1()
175 196 s1 = wctx.substate
176 197 s2 = mctx.substate
177 198 sa = actx.substate
178 199 sm = {}
179 200
180 201 repo.ui.debug(b"subrepo merge %s %s %s\n" % (wctx, mctx, actx))
181 202
182 203 def debug(s, msg, r=b""):
183 204 if r:
184 205 r = b"%s:%s:%s" % r
185 206 repo.ui.debug(b" subrepo %s: %s %s\n" % (s, msg, r))
186 207
187 208 promptssrc = filemerge.partextras(labels)
188 209 for s, l in sorted(s1.items()):
189 210 a = sa.get(s, nullstate)
190 211 ld = l # local state with possible dirty flag for compares
191 212 if wctx.sub(s).dirty():
192 213 ld = (l[0], l[1] + b"+")
193 214 if wctx == actx: # overwrite
194 215 a = ld
195 216
196 217 prompts = promptssrc.copy()
197 218 prompts[b's'] = s
198 219 if s in s2:
199 220 r = s2[s]
200 221 if ld == r or r == a: # no change or local is newer
201 222 sm[s] = l
202 223 continue
203 224 elif ld == a: # other side changed
204 225 debug(s, b"other changed, get", r)
205 226 wctx.sub(s).get(r, overwrite)
206 227 sm[s] = r
207 228 elif ld[0] != r[0]: # sources differ
208 229 prompts[b'lo'] = l[0]
209 230 prompts[b'ro'] = r[0]
210 231 if repo.ui.promptchoice(
211 232 _(
212 233 b' subrepository sources for %(s)s differ\n'
213 234 b'you can use (l)ocal%(l)s source (%(lo)s)'
214 235 b' or (r)emote%(o)s source (%(ro)s).\n'
215 236 b'what do you want to do?'
216 237 b'$$ &Local $$ &Remote'
217 238 )
218 239 % prompts,
219 240 0,
220 241 ):
221 242 debug(s, b"prompt changed, get", r)
222 243 wctx.sub(s).get(r, overwrite)
223 244 sm[s] = r
224 245 elif ld[1] == a[1]: # local side is unchanged
225 246 debug(s, b"other side changed, get", r)
226 247 wctx.sub(s).get(r, overwrite)
227 248 sm[s] = r
228 249 else:
229 250 debug(s, b"both sides changed")
230 251 srepo = wctx.sub(s)
231 252 prompts[b'sl'] = srepo.shortid(l[1])
232 253 prompts[b'sr'] = srepo.shortid(r[1])
233 254 option = repo.ui.promptchoice(
234 255 _(
235 256 b' subrepository %(s)s diverged (local revision: %(sl)s, '
236 257 b'remote revision: %(sr)s)\n'
237 258 b'you can (m)erge, keep (l)ocal%(l)s or keep '
238 259 b'(r)emote%(o)s.\n'
239 260 b'what do you want to do?'
240 261 b'$$ &Merge $$ &Local $$ &Remote'
241 262 )
242 263 % prompts,
243 264 0,
244 265 )
245 266 if option == 0:
246 267 wctx.sub(s).merge(r)
247 268 sm[s] = l
248 269 debug(s, b"merge with", r)
249 270 elif option == 1:
250 271 sm[s] = l
251 272 debug(s, b"keep local subrepo revision", l)
252 273 else:
253 274 wctx.sub(s).get(r, overwrite)
254 275 sm[s] = r
255 276 debug(s, b"get remote subrepo revision", r)
256 277 elif ld == a: # remote removed, local unchanged
257 278 debug(s, b"remote removed, remove")
258 279 wctx.sub(s).remove()
259 280 elif a == nullstate: # not present in remote or ancestor
260 281 debug(s, b"local added, keep")
261 282 sm[s] = l
262 283 continue
263 284 else:
264 285 if repo.ui.promptchoice(
265 286 _(
266 287 b' local%(l)s changed subrepository %(s)s'
267 288 b' which remote%(o)s removed\n'
268 289 b'use (c)hanged version or (d)elete?'
269 290 b'$$ &Changed $$ &Delete'
270 291 )
271 292 % prompts,
272 293 0,
273 294 ):
274 295 debug(s, b"prompt remove")
275 296 wctx.sub(s).remove()
276 297
277 298 for s, r in sorted(s2.items()):
278 299 if s in s1:
279 300 continue
280 301 elif s not in sa:
281 302 debug(s, b"remote added, get", r)
282 303 mctx.sub(s).get(r)
283 304 sm[s] = r
284 305 elif r != sa[s]:
285 306 prompts = promptssrc.copy()
286 307 prompts[b's'] = s
287 308 if (
288 309 repo.ui.promptchoice(
289 310 _(
290 311 b' remote%(o)s changed subrepository %(s)s'
291 312 b' which local%(l)s removed\n'
292 313 b'use (c)hanged version or (d)elete?'
293 314 b'$$ &Changed $$ &Delete'
294 315 )
295 316 % prompts,
296 317 0,
297 318 )
298 319 == 0
299 320 ):
300 321 debug(s, b"prompt recreate", r)
301 322 mctx.sub(s).get(r)
302 323 sm[s] = r
303 324
304 325 # record merged .hgsubstate
305 326 writestate(repo, sm)
306 327 return sm
307 328
308 329
309 330 def precommit(ui, wctx, status, match, force=False):
310 331 # type: (uimod.ui, context.workingcommitctx, scmutil.status, matchmod.basematcher, bool) -> Tuple[List[bytes], Set[bytes], Substate]
311 332 """Calculate .hgsubstate changes that should be applied before committing
312 333
313 334 Returns (subs, commitsubs, newstate) where
314 335 - subs: changed subrepos (including dirty ones)
315 336 - commitsubs: dirty subrepos which the caller needs to commit recursively
316 337 - newstate: new state dict which the caller must write to .hgsubstate
317 338
318 339 This also updates the given status argument.
319 340 """
320 341 subs = []
321 342 commitsubs = set()
322 343 newstate = wctx.substate.copy()
323 344
324 345 # only manage subrepos and .hgsubstate if .hgsub is present
325 346 if b'.hgsub' in wctx:
326 347 # we'll decide whether to track this ourselves, thanks
327 348 for c in status.modified, status.added, status.removed:
328 349 if b'.hgsubstate' in c:
329 350 c.remove(b'.hgsubstate')
330 351
331 352 # compare current state to last committed state
332 353 # build new substate based on last committed state
333 354 oldstate = wctx.p1().substate
334 355 for s in sorted(newstate.keys()):
335 356 if not match(s):
336 357 # ignore working copy, use old state if present
337 358 if s in oldstate:
338 359 newstate[s] = oldstate[s]
339 360 continue
340 361 if not force:
341 362 raise error.Abort(
342 363 _(b"commit with new subrepo %s excluded") % s
343 364 )
344 365 dirtyreason = wctx.sub(s).dirtyreason(True)
345 366 if dirtyreason:
346 367 if not ui.configbool(b'ui', b'commitsubrepos'):
347 368 raise error.Abort(
348 369 dirtyreason,
349 370 hint=_(b"use --subrepos for recursive commit"),
350 371 )
351 372 subs.append(s)
352 373 commitsubs.add(s)
353 374 else:
354 375 bs = wctx.sub(s).basestate()
355 376 newstate[s] = (newstate[s][0], bs, newstate[s][2])
356 377 if oldstate.get(s, (None, None, None))[1] != bs:
357 378 subs.append(s)
358 379
359 380 # check for removed subrepos
360 381 for p in wctx.parents():
361 382 r = [s for s in p.substate if s not in newstate]
362 383 subs += [s for s in r if match(s)]
363 384 if subs:
364 385 if not match(b'.hgsub') and b'.hgsub' in (
365 386 wctx.modified() + wctx.added()
366 387 ):
367 388 raise error.Abort(_(b"can't commit subrepos without .hgsub"))
368 389 status.modified.insert(0, b'.hgsubstate')
369 390
370 391 elif b'.hgsub' in status.removed:
371 392 # clean up .hgsubstate when .hgsub is removed
372 393 if b'.hgsubstate' in wctx and b'.hgsubstate' not in (
373 394 status.modified + status.added + status.removed
374 395 ):
375 396 status.removed.insert(0, b'.hgsubstate')
376 397
377 398 return subs, commitsubs, newstate
378 399
379 400
380 401 def repo_rel_or_abs_source(repo):
381 402 """return the source of this repo
382 403
383 404 Either absolute or relative the outermost repo"""
384 405 parent = repo
385 406 chunks = []
386 407 while hasattr(parent, '_subparent'):
387 408 source = urlutil.url(parent._subsource)
388 409 chunks.append(bytes(source))
389 410 if source.isabs():
390 411 break
391 412 parent = parent._subparent
392 413
393 414 chunks.reverse()
394 415 path = posixpath.join(*chunks)
395 416 return posixpath.normpath(path)
396 417
397 418
398 419 def reporelpath(repo):
399 420 # type: (localrepo.localrepository) -> bytes
400 421 """return path to this (sub)repo as seen from outermost repo"""
401 422 parent = repo
402 423 while hasattr(parent, '_subparent'):
403 424 parent = parent._subparent
404 425 return repo.root[len(pathutil.normasprefix(parent.root)) :]
405 426
406 427
407 428 def subrelpath(sub):
408 429 # type: (subrepo.abstractsubrepo) -> bytes
409 430 """return path to this subrepo as seen from outermost repo"""
410 431 return sub._relpath
411 432
412 433
413 434 def _abssource(repo, push=False, abort=True):
414 435 # type: (localrepo.localrepository, bool, bool) -> Optional[bytes]
415 436 """return pull/push path of repo - either based on parent repo .hgsub info
416 437 or on the top repo config. Abort or return None if no source found."""
417 438 if hasattr(repo, '_subparent'):
418 439 source = urlutil.url(repo._subsource)
419 440 if source.isabs():
420 441 return bytes(source)
421 442 source.path = posixpath.normpath(source.path)
422 443 parent = _abssource(repo._subparent, push, abort=False)
423 444 if parent:
424 445 parent = urlutil.url(util.pconvert(parent))
425 446 parent.path = posixpath.join(parent.path or b'', source.path)
426 447 parent.path = posixpath.normpath(parent.path)
427 448 return bytes(parent)
428 449 else: # recursion reached top repo
429 450 path = None
430 451 if hasattr(repo, '_subtoppath'):
431 452 path = repo._subtoppath
432 453 elif push and repo.ui.config(b'paths', b'default-push'):
433 454 path = repo.ui.config(b'paths', b'default-push')
434 455 elif repo.ui.config(b'paths', b'default'):
435 456 path = repo.ui.config(b'paths', b'default')
436 457 elif repo.shared():
437 458 # chop off the .hg component to get the default path form. This has
438 459 # already run through vfsmod.vfs(..., realpath=True), so it doesn't
439 460 # have problems with 'C:'
440 461 return os.path.dirname(repo.sharedpath)
441 462 if path:
442 463 # issue5770: 'C:\' and 'C:' are not equivalent paths. The former is
443 464 # as expected: an absolute path to the root of the C: drive. The
444 465 # latter is a relative path, and works like so:
445 466 #
446 467 # C:\>cd C:\some\path
447 468 # C:\>D:
448 469 # D:\>python -c "import os; print os.path.abspath('C:')"
449 470 # C:\some\path
450 471 #
451 472 # D:\>python -c "import os; print os.path.abspath('C:relative')"
452 473 # C:\some\path\relative
453 474 if urlutil.hasdriveletter(path):
454 475 if len(path) == 2 or path[2:3] not in br'\/':
455 476 path = util.abspath(path)
456 477 return path
457 478
458 479 if abort:
459 480 raise error.Abort(_(b"default path for subrepository not found"))
460 481
461 482
462 483 def newcommitphase(ui, ctx):
463 484 # type: (uimod.ui, context.changectx) -> int
464 485 commitphase = phases.newcommitphase(ui)
465 486 substate = getattr(ctx, "substate", None)
466 487 if not substate:
467 488 return commitphase
468 489 check = ui.config(b'phases', b'checksubrepos')
469 490 if check not in (b'ignore', b'follow', b'abort'):
470 491 raise error.Abort(
471 492 _(b'invalid phases.checksubrepos configuration: %s') % check
472 493 )
473 494 if check == b'ignore':
474 495 return commitphase
475 496 maxphase = phases.public
476 497 maxsub = None
477 498 for s in sorted(substate):
478 499 sub = ctx.sub(s)
479 500 subphase = sub.phase(substate[s][1])
480 501 if maxphase < subphase:
481 502 maxphase = subphase
482 503 maxsub = s
483 504 if commitphase < maxphase:
484 505 if check == b'abort':
485 506 raise error.Abort(
486 507 _(
487 508 b"can't commit in %s phase"
488 509 b" conflicting %s from subrepository %s"
489 510 )
490 511 % (
491 512 phases.phasenames[commitphase],
492 513 phases.phasenames[maxphase],
493 514 maxsub,
494 515 )
495 516 )
496 517 ui.warn(
497 518 _(
498 519 b"warning: changes are committed in"
499 520 b" %s phase from subrepository %s\n"
500 521 )
501 522 % (phases.phasenames[maxphase], maxsub)
502 523 )
503 524 return maxphase
504 525 return commitphase
@@ -1,2346 +1,2347 b''
1 1 # ui.py - user interface bits for mercurial
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import collections
10 10 import contextlib
11 11 import datetime
12 12 import errno
13 13 import inspect
14 14 import os
15 15 import re
16 16 import signal
17 17 import socket
18 18 import subprocess
19 19 import sys
20 20 import traceback
21 import typing
21 22
22 23 from typing import (
23 24 Any,
24 25 Callable,
25 26 Dict,
26 27 List,
27 28 NoReturn,
28 29 Optional,
29 30 Tuple,
30 31 Type,
31 32 TypeVar,
32 33 Union,
33 34 cast,
34 35 overload,
35 36 )
36 37
37 38 from .i18n import _
38 39 from .node import hex
39 40 from .pycompat import (
40 41 open,
41 42 )
42 43
43 44 from . import (
44 45 color,
45 46 config,
46 47 configitems,
47 48 encoding,
48 49 error,
49 50 extensions,
50 51 formatter,
51 52 loggingutil,
52 53 progress,
53 54 pycompat,
54 55 rcutil,
55 56 scmutil,
56 57 util,
57 58 )
58 59 from .utils import (
59 60 dateutil,
60 61 procutil,
61 62 resourceutil,
62 63 stringutil,
63 64 urlutil,
64 65 )
65 66
66 67 _ConfigItems = Dict[Tuple[bytes, bytes], object] # {(section, name) : value}
67 68 # The **opts args of the various write() methods can be basically anything, but
68 69 # there's no way to express it as "anything but str". So type it to be the
69 70 # handful of known types that are used.
70 71 _MsgOpts = Union[bytes, bool, List["_PromptChoice"]]
71 72 _PromptChoice = Tuple[bytes, bytes]
72 73 _Tui = TypeVar('_Tui', bound="ui")
73 74
74 75 urlreq = util.urlreq
75 76
76 77 # for use with str.translate(None, _keepalnum), to keep just alphanumerics
77 78 _keepalnum: bytes = b''.join(
78 79 c for c in map(pycompat.bytechr, range(256)) if not c.isalnum()
79 80 )
80 81
81 82 # The config knobs that will be altered (if unset) by ui.tweakdefaults.
82 83 tweakrc: bytes = b"""
83 84 [ui]
84 85 # The rollback command is dangerous. As a rule, don't use it.
85 86 rollback = False
86 87 # Make `hg status` report copy information
87 88 statuscopies = yes
88 89 # Prefer curses UIs when available. Revert to plain-text with `text`.
89 90 interface = curses
90 91 # Make compatible commands emit cwd-relative paths by default.
91 92 relative-paths = yes
92 93
93 94 [commands]
94 95 # Grep working directory by default.
95 96 grep.all-files = True
96 97 # Refuse to perform an `hg update` that would cause a file content merge
97 98 update.check = noconflict
98 99 # Show conflicts information in `hg status`
99 100 status.verbose = True
100 101 # Make `hg resolve` with no action (like `-m`) fail instead of re-merging.
101 102 resolve.explicit-re-merge = True
102 103
103 104 [diff]
104 105 git = 1
105 106 showfunc = 1
106 107 word-diff = 1
107 108 """
108 109
109 110 samplehgrcs: Dict[bytes, bytes] = {
110 111 b'user': b"""# example user config (see 'hg help config' for more info)
111 112 [ui]
112 113 # name and email, e.g.
113 114 # username = Jane Doe <jdoe@example.com>
114 115 username =
115 116
116 117 # We recommend enabling tweakdefaults to get slight improvements to
117 118 # the UI over time. Make sure to set HGPLAIN in the environment when
118 119 # writing scripts!
119 120 # tweakdefaults = True
120 121
121 122 # uncomment to disable color in command output
122 123 # (see 'hg help color' for details)
123 124 # color = never
124 125
125 126 # uncomment to disable command output pagination
126 127 # (see 'hg help pager' for details)
127 128 # paginate = never
128 129
129 130 [extensions]
130 131 # uncomment the lines below to enable some popular extensions
131 132 # (see 'hg help extensions' for more info)
132 133 #
133 134 # histedit =
134 135 # rebase =
135 136 # uncommit =
136 137 """,
137 138 b'cloned': b"""# example repository config (see 'hg help config' for more info)
138 139 [paths]
139 140 default = %s
140 141
141 142 # path aliases to other clones of this repo in URLs or filesystem paths
142 143 # (see 'hg help config.paths' for more info)
143 144 #
144 145 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
145 146 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
146 147 # my-clone = /home/jdoe/jdoes-clone
147 148
148 149 [ui]
149 150 # name and email (local to this repository, optional), e.g.
150 151 # username = Jane Doe <jdoe@example.com>
151 152 """,
152 153 b'local': b"""# example repository config (see 'hg help config' for more info)
153 154 [paths]
154 155 # path aliases to other clones of this repo in URLs or filesystem paths
155 156 # (see 'hg help config.paths' for more info)
156 157 #
157 158 # default = http://example.com/hg/example-repo
158 159 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
159 160 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
160 161 # my-clone = /home/jdoe/jdoes-clone
161 162
162 163 [ui]
163 164 # name and email (local to this repository, optional), e.g.
164 165 # username = Jane Doe <jdoe@example.com>
165 166 """,
166 167 b'global': b"""# example system-wide hg config (see 'hg help config' for more info)
167 168
168 169 [ui]
169 170 # uncomment to disable color in command output
170 171 # (see 'hg help color' for details)
171 172 # color = never
172 173
173 174 # uncomment to disable command output pagination
174 175 # (see 'hg help pager' for details)
175 176 # paginate = never
176 177
177 178 [extensions]
178 179 # uncomment the lines below to enable some popular extensions
179 180 # (see 'hg help extensions' for more info)
180 181 #
181 182 # blackbox =
182 183 # churn =
183 184 """,
184 185 }
185 186
186 187
187 188 def _maybestrurl(maybebytes):
188 189 return pycompat.rapply(pycompat.strurl, maybebytes)
189 190
190 191
191 192 def _maybebytesurl(maybestr):
192 193 return pycompat.rapply(pycompat.bytesurl, maybestr)
193 194
194 195
195 196 class httppasswordmgrdbproxy:
196 197 """Delays loading urllib2 until it's needed."""
197 198
198 199 def __init__(self) -> None:
199 200 self._mgr = None
200 201
201 202 def _get_mgr(self):
202 203 if self._mgr is None:
203 204 self._mgr = urlreq.httppasswordmgrwithdefaultrealm()
204 205 return self._mgr
205 206
206 207 def add_password(self, realm, uris, user, passwd):
207 208 return self._get_mgr().add_password(
208 209 _maybestrurl(realm),
209 210 _maybestrurl(uris),
210 211 _maybestrurl(user),
211 212 _maybestrurl(passwd),
212 213 )
213 214
214 215 def find_user_password(self, realm, uri):
215 216 mgr = self._get_mgr()
216 217 return _maybebytesurl(
217 218 mgr.find_user_password(_maybestrurl(realm), _maybestrurl(uri))
218 219 )
219 220
220 221
221 222 def _catchterm(*args) -> NoReturn:
222 223 raise error.SignalInterrupt
223 224
224 225
225 226 # unique object used to detect no default value has been provided when
226 227 # retrieving configuration value.
227 228 _unset = object()
228 229
229 230 # _reqexithandlers: callbacks run at the end of a request
230 231 _reqexithandlers: List = []
231 232
232 233
233 234 class ui:
234 235 def __init__(self, src: Optional["ui"] = None) -> None:
235 236 """Create a fresh new ui object if no src given
236 237
237 238 Use uimod.ui.load() to create a ui which knows global and user configs.
238 239 In most cases, you should use ui.copy() to create a copy of an existing
239 240 ui object.
240 241 """
241 242 # _buffers: used for temporary capture of output
242 243 self._buffers = []
243 244 # 3-tuple describing how each buffer in the stack behaves.
244 245 # Values are (capture stderr, capture subprocesses, apply labels).
245 246 self._bufferstates = []
246 247 # When a buffer is active, defines whether we are expanding labels.
247 248 # This exists to prevent an extra list lookup.
248 249 self._bufferapplylabels = None
249 250 self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
250 251 self._reportuntrusted = True
251 252 self._knownconfig = configitems.coreitems
252 253 self._ocfg = config.config() # overlay
253 254 self._tcfg = config.config() # trusted
254 255 self._ucfg = config.config() # untrusted
255 256 self._trustusers = set()
256 257 self._trustgroups = set()
257 258 self.callhooks = True
258 259 # hold the root to use for each [paths] entry
259 260 self._path_to_root = {}
260 261 # Insecure server connections requested.
261 262 self.insecureconnections = False
262 263 # Blocked time
263 264 self.logblockedtimes = False
264 265 # color mode: see mercurial/color.py for possible value
265 266 self._colormode = None
266 267 self._terminfoparams = {}
267 268 self._styles = {}
268 269 self._uninterruptible = False
269 270 self.showtimestamp = False
270 271
271 272 if src:
272 273 self._fout = src._fout
273 274 self._ferr = src._ferr
274 275 self._fin = src._fin
275 276 self._fmsg = src._fmsg
276 277 self._fmsgout = src._fmsgout
277 278 self._fmsgerr = src._fmsgerr
278 279 self._finoutredirected = src._finoutredirected
279 280 self._loggers = src._loggers.copy()
280 281 self.pageractive = src.pageractive
281 282 self._disablepager = src._disablepager
282 283 self._tweaked = src._tweaked
283 284
284 285 self._tcfg = src._tcfg.copy()
285 286 self._ucfg = src._ucfg.copy()
286 287 self._ocfg = src._ocfg.copy()
287 288 self._trustusers = src._trustusers.copy()
288 289 self._trustgroups = src._trustgroups.copy()
289 290 self.environ = src.environ
290 291 self.callhooks = src.callhooks
291 292 self._path_to_root = src._path_to_root
292 293 self.insecureconnections = src.insecureconnections
293 294 self._colormode = src._colormode
294 295 self._terminfoparams = src._terminfoparams.copy()
295 296 self._styles = src._styles.copy()
296 297
297 298 self.fixconfig()
298 299
299 300 self.httppasswordmgrdb = src.httppasswordmgrdb
300 301 self._blockedtimes = src._blockedtimes
301 302 else:
302 303 self._fout = procutil.stdout
303 304 self._ferr = procutil.stderr
304 305 self._fin = procutil.stdin
305 306 self._fmsg = None
306 307 self._fmsgout = self.fout # configurable
307 308 self._fmsgerr = self.ferr # configurable
308 309 self._finoutredirected = False
309 310 self._loggers = {}
310 311 self.pageractive = False
311 312 self._disablepager = False
312 313 self._tweaked = False
313 314
314 315 # shared read-only environment
315 316 self.environ = encoding.environ
316 317
317 318 self.httppasswordmgrdb = httppasswordmgrdbproxy()
318 319 self._blockedtimes = collections.defaultdict(int)
319 320
320 321 allowed = self.configlist(b'experimental', b'exportableenviron')
321 322 if b'*' in allowed:
322 323 self._exportableenviron = self.environ
323 324 else:
324 325 self._exportableenviron = {}
325 326 for k in allowed:
326 327 if k in self.environ:
327 328 self._exportableenviron[k] = self.environ[k]
328 329
329 330 def _new_source(self) -> None:
330 331 self._ocfg.new_source()
331 332 self._tcfg.new_source()
332 333 self._ucfg.new_source()
333 334
334 335 @classmethod
335 336 def load(cls: Type[_Tui]) -> _Tui:
336 337 """Create a ui and load global and user configs"""
337 338 u = cls()
338 339 # we always trust global config files and environment variables
339 340 for t, f in rcutil.rccomponents():
340 341 if t == b'path':
341 342 u.readconfig(f, trust=True)
342 343 elif t == b'resource':
343 344 u.read_resource_config(f, trust=True)
344 345 elif t == b'items':
345 346 u._new_source()
346 347 sections = set()
347 348 for section, name, value, source in f:
348 349 # do not set u._ocfg
349 350 # XXX clean this up once immutable config object is a thing
350 351 u._tcfg.set(section, name, value, source)
351 352 u._ucfg.set(section, name, value, source)
352 353 sections.add(section)
353 354 for section in sections:
354 355 u.fixconfig(section=section)
355 356 else:
356 357 raise error.ProgrammingError(b'unknown rctype: %s' % t)
357 358 u._maybetweakdefaults()
358 359 u._new_source() # anything after that is a different level
359 360 return u
360 361
361 362 def _maybetweakdefaults(self) -> None:
362 363 if not self.configbool(b'ui', b'tweakdefaults'):
363 364 return
364 365 if self._tweaked or self.plain(b'tweakdefaults'):
365 366 return
366 367
367 368 # Note: it is SUPER IMPORTANT that you set self._tweaked to
368 369 # True *before* any calls to setconfig(), otherwise you'll get
369 370 # infinite recursion between setconfig and this method.
370 371 #
371 372 # TODO: We should extract an inner method in setconfig() to
372 373 # avoid this weirdness.
373 374 self._tweaked = True
374 375 tmpcfg = config.config()
375 376 tmpcfg.parse(b'<tweakdefaults>', tweakrc)
376 377 for section in tmpcfg:
377 378 for name, value in tmpcfg.items(section):
378 379 if not self.hasconfig(section, name):
379 380 self.setconfig(section, name, value, b"<tweakdefaults>")
380 381
381 382 def copy(self: _Tui) -> _Tui:
382 383 return self.__class__(self)
383 384
384 385 def resetstate(self) -> None:
385 386 """Clear internal state that shouldn't persist across commands"""
386 387 if self._progbar:
387 388 self._progbar.resetstate() # reset last-print time of progress bar
388 389 self.httppasswordmgrdb = httppasswordmgrdbproxy()
389 390
390 391 @contextlib.contextmanager
391 392 def timeblockedsection(self, key: bytes):
392 393 # this is open-coded below - search for timeblockedsection to find them
393 394 starttime = util.timer()
394 395 try:
395 396 yield
396 397 finally:
397 398 self._blockedtimes[key + b'_blocked'] += (
398 399 util.timer() - starttime
399 400 ) * 1000
400 401
401 402 @contextlib.contextmanager
402 403 def uninterruptible(self):
403 404 """Mark an operation as unsafe.
404 405
405 406 Most operations on a repository are safe to interrupt, but a
406 407 few are risky (for example repair.strip). This context manager
407 408 lets you advise Mercurial that something risky is happening so
408 409 that control-C etc can be blocked if desired.
409 410 """
410 411 enabled = self.configbool(b'experimental', b'nointerrupt')
411 412 if enabled and self.configbool(
412 413 b'experimental', b'nointerrupt-interactiveonly'
413 414 ):
414 415 enabled = self.interactive()
415 416 if self._uninterruptible or not enabled:
416 417 # if nointerrupt support is turned off, the process isn't
417 418 # interactive, or we're already in an uninterruptible
418 419 # block, do nothing.
419 420 yield
420 421 return
421 422
422 423 def warn():
423 424 self.warn(_(b"shutting down cleanly\n"))
424 425 self.warn(
425 426 _(b"press ^C again to terminate immediately (dangerous)\n")
426 427 )
427 428 return True
428 429
429 430 with procutil.uninterruptible(warn):
430 431 try:
431 432 self._uninterruptible = True
432 433 yield
433 434 finally:
434 435 self._uninterruptible = False
435 436
436 437 def formatter(self, topic: bytes, opts):
437 438 return formatter.formatter(self, self, topic, opts)
438 439
439 440 def _trusted(self, fp, f: bytes) -> bool:
440 441 st = util.fstat(fp)
441 442 if util.isowner(st):
442 443 return True
443 444
444 445 tusers, tgroups = self._trustusers, self._trustgroups
445 446 if b'*' in tusers or b'*' in tgroups:
446 447 return True
447 448
448 449 user = util.username(st.st_uid)
449 450 group = util.groupname(st.st_gid)
450 451 if user in tusers or group in tgroups or user == util.username():
451 452 return True
452 453
453 454 if self._reportuntrusted:
454 455 self.warn(
455 456 _(
456 457 b'not trusting file %s from untrusted '
457 458 b'user %s, group %s\n'
458 459 )
459 460 % (f, user, group)
460 461 )
461 462 return False
462 463
463 464 def read_resource_config(
464 465 self, name, root=None, trust=False, sections=None, remap=None
465 466 ) -> None:
466 467 try:
467 468 fp = resourceutil.open_resource(name[0], name[1])
468 469 except IOError:
469 470 if not sections: # ignore unless we were looking for something
470 471 return
471 472 raise
472 473
473 474 self._readconfig(
474 475 b'resource:%s.%s' % name, fp, root, trust, sections, remap
475 476 )
476 477
477 478 def readconfig(
478 479 self, filename, root=None, trust=False, sections=None, remap=None
479 480 ) -> None:
480 481 try:
481 482 fp = open(filename, 'rb')
482 483 except IOError:
483 484 if not sections: # ignore unless we were looking for something
484 485 return
485 486 raise
486 487
487 488 self._readconfig(filename, fp, root, trust, sections, remap)
488 489
489 490 def _readconfig(
490 491 self, filename, fp, root=None, trust=False, sections=None, remap=None
491 492 ) -> None:
492 493 with fp:
493 494 cfg = config.config()
494 495 trusted = sections or trust or self._trusted(fp, filename)
495 496
496 497 try:
497 498 cfg.read(filename, fp, sections=sections, remap=remap)
498 499 except error.ConfigError as inst:
499 500 if trusted:
500 501 raise
501 502 self.warn(
502 503 _(b'ignored %s: %s\n') % (inst.location, inst.message)
503 504 )
504 505
505 506 self._applyconfig(cfg, trusted, root)
506 507
507 508 def applyconfig(
508 509 self, configitems: _ConfigItems, source=b"", root=None
509 510 ) -> None:
510 511 """Add configitems from a non-file source. Unlike with ``setconfig()``,
511 512 they can be overridden by subsequent config file reads. The items are
512 513 in the same format as ``configoverride()``, namely a dict of the
513 514 following structures: {(section, name) : value}
514 515
515 516 Typically this is used by extensions that inject themselves into the
516 517 config file load procedure by monkeypatching ``localrepo.loadhgrc()``.
517 518 """
518 519 cfg = config.config()
519 520
520 521 for (section, name), value in configitems.items():
521 522 cfg.set(section, name, value, source)
522 523
523 524 self._applyconfig(cfg, True, root)
524 525
525 526 def _applyconfig(self, cfg, trusted, root) -> None:
526 527 if self.plain():
527 528 for k in (
528 529 b'debug',
529 530 b'fallbackencoding',
530 531 b'quiet',
531 532 b'slash',
532 533 b'logtemplate',
533 534 b'message-output',
534 535 b'statuscopies',
535 536 b'style',
536 537 b'traceback',
537 538 b'verbose',
538 539 ):
539 540 if k in cfg[b'ui']:
540 541 del cfg[b'ui'][k]
541 542 for k, v in cfg.items(b'defaults'):
542 543 del cfg[b'defaults'][k]
543 544 for k, v in cfg.items(b'commands'):
544 545 del cfg[b'commands'][k]
545 546 for k, v in cfg.items(b'command-templates'):
546 547 del cfg[b'command-templates'][k]
547 548 # Don't remove aliases from the configuration if in the exceptionlist
548 549 if self.plain(b'alias'):
549 550 for k, v in cfg.items(b'alias'):
550 551 del cfg[b'alias'][k]
551 552 if self.plain(b'revsetalias'):
552 553 for k, v in cfg.items(b'revsetalias'):
553 554 del cfg[b'revsetalias'][k]
554 555 if self.plain(b'templatealias'):
555 556 for k, v in cfg.items(b'templatealias'):
556 557 del cfg[b'templatealias'][k]
557 558
558 559 if trusted:
559 560 self._tcfg.update(cfg)
560 561 self._tcfg.update(self._ocfg)
561 562 self._ucfg.update(cfg)
562 563 self._ucfg.update(self._ocfg)
563 564
564 565 if root is None:
565 566 root = os.path.expanduser(b'~')
566 567 self.fixconfig(root=root)
567 568
568 569 def fixconfig(self, root=None, section=None) -> None:
569 570 if section in (None, b'paths'):
570 571 # expand vars and ~
571 572 # translate paths relative to root (or home) into absolute paths
572 573 root = root or encoding.getcwd()
573 574 for c in self._tcfg, self._ucfg, self._ocfg:
574 575 for n, p in c.items(b'paths'):
575 576 old_p = p
576 577 s = self.configsource(b'paths', n) or b'none'
577 578 root_key = (n, p, s)
578 579 if root_key not in self._path_to_root:
579 580 self._path_to_root[root_key] = root
580 581 # Ignore sub-options.
581 582 if b':' in n:
582 583 continue
583 584 if not p:
584 585 continue
585 586 if b'%%' in p:
586 587 if s is None:
587 588 s = 'none'
588 589 self.warn(
589 590 _(b"(deprecated '%%' in path %s=%s from %s)\n")
590 591 % (n, p, s)
591 592 )
592 593 p = p.replace(b'%%', b'%')
593 594 if p != old_p:
594 595 c.alter(b"paths", n, p)
595 596
596 597 if section in (None, b'ui'):
597 598 # update ui options
598 599 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
599 600 self.debugflag = self.configbool(b'ui', b'debug')
600 601 self.verbose = self.debugflag or self.configbool(b'ui', b'verbose')
601 602 self.quiet = not self.debugflag and self.configbool(b'ui', b'quiet')
602 603 if self.verbose and self.quiet:
603 604 self.quiet = self.verbose = False
604 605 self._reportuntrusted = self.debugflag or self.configbool(
605 606 b"ui", b"report_untrusted"
606 607 )
607 608 self.showtimestamp = self.configbool(b'ui', b'timestamp-output')
608 609 self.tracebackflag = self.configbool(b'ui', b'traceback')
609 610 self.logblockedtimes = self.configbool(b'ui', b'logblockedtimes')
610 611
611 612 if section in (None, b'trusted'):
612 613 # update trust information
613 614 self._trustusers.update(self.configlist(b'trusted', b'users'))
614 615 self._trustgroups.update(self.configlist(b'trusted', b'groups'))
615 616
616 617 if section in (None, b'devel', b'ui') and self.debugflag:
617 618 tracked = set()
618 619 if self.configbool(b'devel', b'debug.extensions'):
619 620 tracked.add(b'extension')
620 621 if tracked:
621 622 logger = loggingutil.fileobjectlogger(self._ferr, tracked)
622 623 self.setlogger(b'debug', logger)
623 624
624 625 def backupconfig(self, section, item):
625 626 return (
626 627 self._ocfg.backup(section, item),
627 628 self._tcfg.backup(section, item),
628 629 self._ucfg.backup(section, item),
629 630 )
630 631
631 632 def restoreconfig(self, data) -> None:
632 633 self._ocfg.restore(data[0])
633 634 self._tcfg.restore(data[1])
634 635 self._ucfg.restore(data[2])
635 636
636 637 def setconfig(self, section, name, value, source=b'') -> None:
637 638 for cfg in (self._ocfg, self._tcfg, self._ucfg):
638 639 cfg.set(section, name, value, source)
639 640 self.fixconfig(section=section)
640 641 self._maybetweakdefaults()
641 642
642 643 def _data(self, untrusted):
643 644 return untrusted and self._ucfg or self._tcfg
644 645
645 646 def configsource(self, section, name, untrusted=False):
646 647 return self._data(untrusted).source(section, name)
647 648
648 649 def config(self, section, name, default=_unset, untrusted=False):
649 650 """return the plain string version of a config"""
650 651 value = self._config(
651 652 section, name, default=default, untrusted=untrusted
652 653 )
653 654 if value is _unset:
654 655 return None
655 656 return value
656 657
657 658 def _config(self, section, name, default=_unset, untrusted=False):
658 659 value = itemdefault = default
659 660 item = self._knownconfig.get(section, {}).get(name)
660 661 alternates = [(section, name)]
661 662
662 663 if item is not None and item.in_core_extension is not None:
663 664 # Only return the default for an in-core extension item if said
664 665 # extension is enabled
665 666 if item.in_core_extension in extensions.extensions(self):
666 667 item = None
667 668
668 669 if item is not None:
669 670 alternates.extend(item.alias)
670 671 if callable(item.default):
671 672 itemdefault = item.default()
672 673 else:
673 674 itemdefault = item.default
674 675 else:
675 676 msg = b"accessing unregistered config item: '%s.%s'"
676 677 msg %= (section, name)
677 678 self.develwarn(msg, 2, b'warn-config-unknown')
678 679
679 680 if default is _unset:
680 681 if item is None:
681 682 value = default
682 683 elif item.default is configitems.dynamicdefault:
683 684 value = None
684 685 msg = b"config item requires an explicit default value: '%s.%s'"
685 686 msg %= (section, name)
686 687 self.develwarn(msg, 2, b'warn-config-default')
687 688 else:
688 689 value = itemdefault
689 690 elif (
690 691 item is not None
691 692 and item.default is not configitems.dynamicdefault
692 693 and default != itemdefault
693 694 ):
694 695 msg = (
695 696 b"specifying a mismatched default value for a registered "
696 697 b"config item: '%s.%s' '%s'"
697 698 )
698 699 msg %= (section, name, pycompat.bytestr(default))
699 700 self.develwarn(msg, 2, b'warn-config-default')
700 701
701 702 candidates = []
702 703 config = self._data(untrusted)
703 704 for s, n in alternates:
704 705 candidate = config.get(s, n, None)
705 706 if candidate is not None:
706 707 candidates.append((s, n, candidate))
707 708 if candidates:
708 709
709 710 def level(x):
710 711 return config.level(x[0], x[1])
711 712
712 713 value = max(candidates, key=level)[2]
713 714
714 715 if self.debugflag and not untrusted and self._reportuntrusted:
715 716 for s, n in alternates:
716 717 uvalue = self._ucfg.get(s, n)
717 718 if uvalue is not None and uvalue != value:
718 719 self.debug(
719 720 b"ignoring untrusted configuration option "
720 721 b"%s.%s = %s\n" % (s, n, uvalue)
721 722 )
722 723 return value
723 724
724 725 def config_default(self, section, name):
725 726 """return the default value for a config option
726 727
727 728 The default is returned "raw", for example if it is a callable, the
728 729 callable was not called.
729 730 """
730 731 item = self._knownconfig.get(section, {}).get(name)
731 732
732 733 if item is None:
733 734 raise KeyError((section, name))
734 735 return item.default
735 736
736 737 def configsuboptions(self, section, name, default=_unset, untrusted=False):
737 738 """Get a config option and all sub-options.
738 739
739 740 Some config options have sub-options that are declared with the
740 741 format "key:opt = value". This method is used to return the main
741 742 option and all its declared sub-options.
742 743
743 744 Returns a 2-tuple of ``(option, sub-options)``, where `sub-options``
744 745 is a dict of defined sub-options where keys and values are strings.
745 746 """
746 747 main = self.config(section, name, default, untrusted=untrusted)
747 748 data = self._data(untrusted)
748 749 sub = {}
749 750 prefix = b'%s:' % name
750 751 for k, v in data.items(section):
751 752 if k.startswith(prefix):
752 753 sub[k[len(prefix) :]] = v
753 754
754 755 if self.debugflag and not untrusted and self._reportuntrusted:
755 756 for k, v in sub.items():
756 757 uvalue = self._ucfg.get(section, b'%s:%s' % (name, k))
757 758 if uvalue is not None and uvalue != v:
758 759 self.debug(
759 760 b'ignoring untrusted configuration option '
760 761 b'%s:%s.%s = %s\n' % (section, name, k, uvalue)
761 762 )
762 763
763 764 return main, sub
764 765
765 766 def configpath(self, section, name, default=_unset, untrusted=False):
766 767 """get a path config item, expanded relative to repo root or config
767 768 file"""
768 769 v = self.config(section, name, default, untrusted)
769 770 if v is None:
770 771 return None
771 772 if not os.path.isabs(v) or b"://" not in v:
772 773 src = self.configsource(section, name, untrusted)
773 774 if b':' in src:
774 775 base = os.path.dirname(src.rsplit(b':')[0])
775 776 v = os.path.join(base, os.path.expanduser(v))
776 777 return v
777 778
778 779 def configbool(self, section, name, default=_unset, untrusted=False):
779 780 """parse a configuration element as a boolean
780 781
781 782 >>> u = ui(); s = b'foo'
782 783 >>> u.setconfig(s, b'true', b'yes')
783 784 >>> u.configbool(s, b'true')
784 785 True
785 786 >>> u.setconfig(s, b'false', b'no')
786 787 >>> u.configbool(s, b'false')
787 788 False
788 789 >>> u.configbool(s, b'unknown')
789 790 False
790 791 >>> u.configbool(s, b'unknown', True)
791 792 True
792 793 >>> u.setconfig(s, b'invalid', b'somevalue')
793 794 >>> u.configbool(s, b'invalid')
794 795 Traceback (most recent call last):
795 796 ...
796 797 ConfigError: foo.invalid is not a boolean ('somevalue')
797 798 """
798 799
799 800 v = self._config(section, name, default, untrusted=untrusted)
800 801 if v is None:
801 802 return v
802 803 if v is _unset:
803 804 if default is _unset:
804 805 return False
805 806 return default
806 807 if isinstance(v, bool):
807 808 return v
808 809 b = stringutil.parsebool(v)
809 810 if b is None:
810 811 raise error.ConfigError(
811 812 _(b"%s.%s is not a boolean ('%s')") % (section, name, v)
812 813 )
813 814 return b
814 815
815 816 def configwith(
816 817 self, convert, section, name, default=_unset, desc=None, untrusted=False
817 818 ):
818 819 """parse a configuration element with a conversion function
819 820
820 821 >>> u = ui(); s = b'foo'
821 822 >>> u.setconfig(s, b'float1', b'42')
822 823 >>> u.configwith(float, s, b'float1')
823 824 42.0
824 825 >>> u.setconfig(s, b'float2', b'-4.25')
825 826 >>> u.configwith(float, s, b'float2')
826 827 -4.25
827 828 >>> u.configwith(float, s, b'unknown', 7)
828 829 7.0
829 830 >>> u.setconfig(s, b'invalid', b'somevalue')
830 831 >>> u.configwith(float, s, b'invalid')
831 832 Traceback (most recent call last):
832 833 ...
833 834 ConfigError: foo.invalid is not a valid float ('somevalue')
834 835 >>> u.configwith(float, s, b'invalid', desc=b'womble')
835 836 Traceback (most recent call last):
836 837 ...
837 838 ConfigError: foo.invalid is not a valid womble ('somevalue')
838 839 """
839 840
840 841 v = self.config(section, name, default, untrusted)
841 842 if v is None:
842 843 return v # do not attempt to convert None
843 844 try:
844 845 return convert(v)
845 846 except (ValueError, error.ParseError):
846 847 if desc is None:
847 848 desc = pycompat.sysbytes(convert.__name__)
848 849 raise error.ConfigError(
849 850 _(b"%s.%s is not a valid %s ('%s')") % (section, name, desc, v)
850 851 )
851 852
852 853 def configint(self, section, name, default=_unset, untrusted=False):
853 854 """parse a configuration element as an integer
854 855
855 856 >>> u = ui(); s = b'foo'
856 857 >>> u.setconfig(s, b'int1', b'42')
857 858 >>> u.configint(s, b'int1')
858 859 42
859 860 >>> u.setconfig(s, b'int2', b'-42')
860 861 >>> u.configint(s, b'int2')
861 862 -42
862 863 >>> u.configint(s, b'unknown', 7)
863 864 7
864 865 >>> u.setconfig(s, b'invalid', b'somevalue')
865 866 >>> u.configint(s, b'invalid')
866 867 Traceback (most recent call last):
867 868 ...
868 869 ConfigError: foo.invalid is not a valid integer ('somevalue')
869 870 """
870 871
871 872 return self.configwith(
872 873 int, section, name, default, b'integer', untrusted
873 874 )
874 875
875 876 def configbytes(self, section, name, default=_unset, untrusted=False):
876 877 """parse a configuration element as a quantity in bytes
877 878
878 879 Units can be specified as b (bytes), k or kb (kilobytes), m or
879 880 mb (megabytes), g or gb (gigabytes).
880 881
881 882 >>> u = ui(); s = b'foo'
882 883 >>> u.setconfig(s, b'val1', b'42')
883 884 >>> u.configbytes(s, b'val1')
884 885 42
885 886 >>> u.setconfig(s, b'val2', b'42.5 kb')
886 887 >>> u.configbytes(s, b'val2')
887 888 43520
888 889 >>> u.configbytes(s, b'unknown', b'7 MB')
889 890 7340032
890 891 >>> u.setconfig(s, b'invalid', b'somevalue')
891 892 >>> u.configbytes(s, b'invalid')
892 893 Traceback (most recent call last):
893 894 ...
894 895 ConfigError: foo.invalid is not a byte quantity ('somevalue')
895 896 """
896 897
897 898 value = self._config(section, name, default, untrusted)
898 899 if value is _unset:
899 900 if default is _unset:
900 901 default = 0
901 902 value = default
902 903 if not isinstance(value, bytes):
903 904 return value
904 905 try:
905 906 return util.sizetoint(value)
906 907 except error.ParseError:
907 908 raise error.ConfigError(
908 909 _(b"%s.%s is not a byte quantity ('%s')")
909 910 % (section, name, value)
910 911 )
911 912
912 913 def configlist(self, section, name, default=_unset, untrusted=False):
913 914 """parse a configuration element as a list of comma/space separated
914 915 strings
915 916
916 917 >>> u = ui(); s = b'foo'
917 918 >>> u.setconfig(s, b'list1', b'this,is "a small" ,test')
918 919 >>> u.configlist(s, b'list1')
919 920 ['this', 'is', 'a small', 'test']
920 921 >>> u.setconfig(s, b'list2', b'this, is "a small" , test ')
921 922 >>> u.configlist(s, b'list2')
922 923 ['this', 'is', 'a small', 'test']
923 924 """
924 925 # default is not always a list
925 926 v = self.configwith(
926 927 stringutil.parselist, section, name, default, b'list', untrusted
927 928 )
928 929 if isinstance(v, bytes):
929 930 return stringutil.parselist(v)
930 931 elif v is None:
931 932 return []
932 933 return v
933 934
934 935 def configdate(self, section, name, default=_unset, untrusted=False):
935 936 """parse a configuration element as a tuple of ints
936 937
937 938 >>> u = ui(); s = b'foo'
938 939 >>> u.setconfig(s, b'date', b'0 0')
939 940 >>> u.configdate(s, b'date')
940 941 (0, 0)
941 942 """
942 943 if self.config(section, name, default, untrusted):
943 944 return self.configwith(
944 945 dateutil.parsedate, section, name, default, b'date', untrusted
945 946 )
946 947 if default is _unset:
947 948 return None
948 949 return default
949 950
950 951 def configdefault(self, section, name):
951 952 """returns the default value of the config item"""
952 953 item = self._knownconfig.get(section, {}).get(name)
953 954 itemdefault = None
954 955 if item is not None:
955 956 if callable(item.default):
956 957 itemdefault = item.default()
957 958 else:
958 959 itemdefault = item.default
959 960 return itemdefault
960 961
961 962 def hasconfig(self, section, name, untrusted=False):
962 963 return self._data(untrusted).hasitem(section, name)
963 964
964 965 def has_section(self, section, untrusted=False):
965 966 '''tell whether section exists in config.'''
966 967 return section in self._data(untrusted)
967 968
968 969 def configitems(self, section, untrusted=False, ignoresub=False):
969 970 items = self._data(untrusted).items(section)
970 971 if ignoresub:
971 972 items = [i for i in items if b':' not in i[0]]
972 973 if self.debugflag and not untrusted and self._reportuntrusted:
973 974 for k, v in self._ucfg.items(section):
974 975 if self._tcfg.get(section, k) != v:
975 976 self.debug(
976 977 b"ignoring untrusted configuration option "
977 978 b"%s.%s = %s\n" % (section, k, v)
978 979 )
979 980 return items
980 981
981 982 def walkconfig(self, untrusted=False, all_known=False):
982 983 defined = self._walk_config(untrusted)
983 984 if not all_known:
984 985 for d in defined:
985 986 yield d
986 987 return
987 988 known = self._walk_known()
988 989 current_defined = next(defined, None)
989 990 current_known = next(known, None)
990 991 while current_defined is not None or current_known is not None:
991 992 if current_defined is None:
992 993 yield current_known
993 994 current_known = next(known, None)
994 995 elif current_known is None:
995 996 yield current_defined
996 997 current_defined = next(defined, None)
997 998 elif current_known[0:2] == current_defined[0:2]:
998 999 yield current_defined
999 1000 current_defined = next(defined, None)
1000 1001 current_known = next(known, None)
1001 1002 elif current_known[0:2] < current_defined[0:2]:
1002 1003 yield current_known
1003 1004 current_known = next(known, None)
1004 1005 else:
1005 1006 yield current_defined
1006 1007 current_defined = next(defined, None)
1007 1008
1008 1009 def _walk_known(self):
1009 1010 for section, items in sorted(self._knownconfig.items()):
1010 1011 for k, i in sorted(items.items()):
1011 1012 # We don't have a way to display generic well, so skip them
1012 1013 if i.generic:
1013 1014 continue
1014 1015 if callable(i.default):
1015 1016 default = i.default()
1016 1017 elif i.default is configitems.dynamicdefault:
1017 1018 default = b'<DYNAMIC>'
1018 1019 else:
1019 1020 default = i.default
1020 1021 yield section, i.name, default
1021 1022
1022 1023 def _walk_config(self, untrusted):
1023 1024 cfg = self._data(untrusted)
1024 1025 for section in cfg.sections():
1025 1026 for name, value in self.configitems(section, untrusted):
1026 1027 yield section, name, value
1027 1028
1028 1029 def plain(self, feature: Optional[bytes] = None) -> bool:
1029 1030 """is plain mode active?
1030 1031
1031 1032 Plain mode means that all configuration variables which affect
1032 1033 the behavior and output of Mercurial should be
1033 1034 ignored. Additionally, the output should be stable,
1034 1035 reproducible and suitable for use in scripts or applications.
1035 1036
1036 1037 The only way to trigger plain mode is by setting either the
1037 1038 `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
1038 1039
1039 1040 The return value can either be
1040 1041 - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
1041 1042 - False if feature is disabled by default and not included in HGPLAIN
1042 1043 - True otherwise
1043 1044 """
1044 1045 if (
1045 1046 b'HGPLAIN' not in encoding.environ
1046 1047 and b'HGPLAINEXCEPT' not in encoding.environ
1047 1048 ):
1048 1049 return False
1049 1050 exceptions = (
1050 1051 encoding.environ.get(b'HGPLAINEXCEPT', b'').strip().split(b',')
1051 1052 )
1052 1053 # TODO: add support for HGPLAIN=+feature,-feature syntax
1053 1054 if b'+strictflags' not in encoding.environ.get(b'HGPLAIN', b'').split(
1054 1055 b','
1055 1056 ):
1056 1057 exceptions.append(b'strictflags')
1057 1058 if feature and exceptions:
1058 1059 return feature not in exceptions
1059 1060 return True
1060 1061
1061 1062 def username(self, acceptempty=False):
1062 1063 """Return default username to be used in commits.
1063 1064
1064 1065 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
1065 1066 and stop searching if one of these is set.
1066 1067 If not found and acceptempty is True, returns None.
1067 1068 If not found and ui.askusername is True, ask the user, else use
1068 1069 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
1069 1070 If no username could be found, raise an Abort error.
1070 1071 """
1071 1072 user = encoding.environ.get(b"HGUSER")
1072 1073 if user is None:
1073 1074 user = self.config(b"ui", b"username")
1074 1075 if user is not None:
1075 1076 user = os.path.expandvars(user)
1076 1077 if user is None:
1077 1078 user = encoding.environ.get(b"EMAIL")
1078 1079 if user is None and acceptempty:
1079 1080 return user
1080 1081 if user is None and self.configbool(b"ui", b"askusername"):
1081 1082 user = self.prompt(_(b"enter a commit username:"), default=None)
1082 1083 if user is None and not self.interactive():
1083 1084 try:
1084 1085 user = b'%s@%s' % (
1085 1086 procutil.getuser(),
1086 1087 encoding.strtolocal(socket.getfqdn()),
1087 1088 )
1088 1089 self.warn(_(b"no username found, using '%s' instead\n") % user)
1089 1090 except KeyError:
1090 1091 pass
1091 1092 if not user:
1092 1093 raise error.Abort(
1093 1094 _(b'no username supplied'),
1094 1095 hint=_(b"use 'hg config --edit' " b'to set your username'),
1095 1096 )
1096 1097 if b"\n" in user:
1097 1098 raise error.Abort(
1098 1099 _(b"username %r contains a newline\n") % pycompat.bytestr(user)
1099 1100 )
1100 1101 return user
1101 1102
1102 1103 def shortuser(self, user: bytes) -> bytes:
1103 1104 """Return a short representation of a user name or email address."""
1104 1105 if not self.verbose:
1105 1106 user = stringutil.shortuser(user)
1106 1107 return user
1107 1108
1108 1109 @util.propertycache
1109 1110 def paths(self):
1110 1111 return urlutil.paths(self)
1111 1112
1112 1113 @property
1113 1114 def fout(self):
1114 1115 return self._fout
1115 1116
1116 1117 @util.propertycache
1117 1118 def _fout_is_a_tty(self):
1118 1119 self._isatty(self._fout)
1119 1120
1120 1121 @fout.setter
1121 1122 def fout(self, f):
1122 1123 self._fout = f
1123 1124 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
1124 1125 if '_fout_is_a_tty' in vars(self):
1125 1126 del self._fout_is_a_tty
1126 1127
1127 1128 @property
1128 1129 def ferr(self):
1129 1130 return self._ferr
1130 1131
1131 1132 @ferr.setter
1132 1133 def ferr(self, f):
1133 1134 self._ferr = f
1134 1135 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
1135 1136
1136 1137 @property
1137 1138 def fin(self):
1138 1139 return self._fin
1139 1140
1140 1141 @fin.setter
1141 1142 def fin(self, f):
1142 1143 self._fin = f
1143 1144
1144 1145 @property
1145 1146 def fmsg(self):
1146 1147 """Stream dedicated for status/error messages; may be None if
1147 1148 fout/ferr are used"""
1148 1149 return self._fmsg
1149 1150
1150 1151 @fmsg.setter
1151 1152 def fmsg(self, f):
1152 1153 self._fmsg = f
1153 1154 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
1154 1155
1155 1156 @contextlib.contextmanager
1156 1157 def silent(
1157 1158 self, error: bool = False, subproc: bool = False, labeled: bool = False
1158 1159 ):
1159 1160 self.pushbuffer(error=error, subproc=subproc, labeled=labeled)
1160 1161 try:
1161 1162 yield
1162 1163 finally:
1163 1164 self.popbuffer()
1164 1165
1165 1166 def pushbuffer(
1166 1167 self, error: bool = False, subproc: bool = False, labeled: bool = False
1167 1168 ) -> None:
1168 1169 """install a buffer to capture standard output of the ui object
1169 1170
1170 1171 If error is True, the error output will be captured too.
1171 1172
1172 1173 If subproc is True, output from subprocesses (typically hooks) will be
1173 1174 captured too.
1174 1175
1175 1176 If labeled is True, any labels associated with buffered
1176 1177 output will be handled. By default, this has no effect
1177 1178 on the output returned, but extensions and GUI tools may
1178 1179 handle this argument and returned styled output. If output
1179 1180 is being buffered so it can be captured and parsed or
1180 1181 processed, labeled should not be set to True.
1181 1182 """
1182 1183 self._buffers.append([])
1183 1184 self._bufferstates.append((error, subproc, labeled))
1184 1185 self._bufferapplylabels = labeled
1185 1186
1186 1187 def popbuffer(self) -> bytes:
1187 1188 '''pop the last buffer and return the buffered output'''
1188 1189 self._bufferstates.pop()
1189 1190 if self._bufferstates:
1190 1191 self._bufferapplylabels = self._bufferstates[-1][2]
1191 1192 else:
1192 1193 self._bufferapplylabels = None
1193 1194
1194 1195 return b"".join(self._buffers.pop())
1195 1196
1196 1197 def _isbuffered(self, dest) -> bool:
1197 1198 if dest is self._fout:
1198 1199 return bool(self._buffers)
1199 1200 if dest is self._ferr:
1200 1201 return bool(self._bufferstates and self._bufferstates[-1][0])
1201 1202 return False
1202 1203
1203 1204 def canwritewithoutlabels(self) -> bool:
1204 1205 '''check if write skips the label'''
1205 1206 if self._buffers and not self._bufferapplylabels:
1206 1207 return True
1207 1208 return self._colormode is None
1208 1209
1209 1210 def canbatchlabeledwrites(self) -> bool:
1210 1211 '''check if write calls with labels are batchable'''
1211 1212 # Windows color printing is special, see ``write``.
1212 1213 return self._colormode != b'win32'
1213 1214
1214 1215 def write(self, *args: bytes, **opts: _MsgOpts) -> None:
1215 1216 """write args to output
1216 1217
1217 1218 By default, this method simply writes to the buffer or stdout.
1218 1219 Color mode can be set on the UI class to have the output decorated
1219 1220 with color modifier before being written to stdout.
1220 1221
1221 1222 The color used is controlled by an optional keyword argument, "label".
1222 1223 This should be a string containing label names separated by space.
1223 1224 Label names take the form of "topic.type". For example, ui.debug()
1224 1225 issues a label of "ui.debug".
1225 1226
1226 1227 Progress reports via stderr are normally cleared before writing as
1227 1228 stdout and stderr go to the same terminal. This can be skipped with
1228 1229 the optional keyword argument "keepprogressbar". The progress bar
1229 1230 will continue to occupy a partial line on stderr in that case.
1230 1231 This functionality is intended when Mercurial acts as data source
1231 1232 in a pipe.
1232 1233
1233 1234 When labeling output for a specific command, a label of
1234 1235 "cmdname.type" is recommended. For example, status issues
1235 1236 a label of "status.modified" for modified files.
1236 1237 """
1237 1238 dest = self._fout
1238 1239
1239 1240 # inlined _write() for speed
1240 1241 if self._buffers:
1241 1242 label = opts.get('label', b'')
1242 1243 if label and self._bufferapplylabels:
1243 1244 self._buffers[-1].extend(self.label(a, label) for a in args)
1244 1245 else:
1245 1246 self._buffers[-1].extend(args)
1246 1247 return
1247 1248
1248 1249 # inlined _writenobuf() for speed
1249 1250 if not opts.get('keepprogressbar', self._fout_is_a_tty):
1250 1251 self._progclear()
1251 1252 msg = b''.join(args)
1252 1253
1253 1254 # opencode timeblockedsection because this is a critical path
1254 1255 starttime = util.timer()
1255 1256 try:
1256 1257 if self._colormode == b'win32':
1257 1258 # windows color printing is its own can of crab, defer to
1258 1259 # the color module and that is it.
1259 1260 color.win32print(self, dest.write, msg, **opts)
1260 1261 else:
1261 1262 if self._colormode is not None:
1262 1263 label = opts.get('label', b'')
1263 1264 msg = self.label(msg, label)
1264 1265 dest.write(msg)
1265 1266 except IOError as err:
1266 1267 raise error.StdioError(err)
1267 1268 finally:
1268 1269 self._blockedtimes[b'stdio_blocked'] += (
1269 1270 util.timer() - starttime
1270 1271 ) * 1000
1271 1272
1272 1273 def write_err(self, *args: bytes, **opts: _MsgOpts) -> None:
1273 1274 self._write(self._ferr, *args, **opts)
1274 1275
1275 1276 def _write(self, dest, *args: bytes, **opts: _MsgOpts) -> None:
1276 1277 # update write() as well if you touch this code
1277 1278 if self._isbuffered(dest):
1278 1279 label = opts.get('label', b'')
1279 1280 if label and self._bufferapplylabels:
1280 1281 self._buffers[-1].extend(self.label(a, label) for a in args)
1281 1282 else:
1282 1283 self._buffers[-1].extend(args)
1283 1284 else:
1284 1285 self._writenobuf(dest, *args, **opts)
1285 1286
1286 1287 def _writenobuf(self, dest, *args: bytes, **opts: _MsgOpts) -> None:
1287 1288 # update write() as well if you touch this code
1288 1289 if not opts.get('keepprogressbar', self._fout_is_a_tty):
1289 1290 self._progclear()
1290 1291 msg = b''.join(args)
1291 1292
1292 1293 # opencode timeblockedsection because this is a critical path
1293 1294 starttime = util.timer()
1294 1295 try:
1295 1296 if dest is self._ferr and not getattr(self._fout, 'closed', False):
1296 1297 self._fout.flush()
1297 1298 if getattr(dest, 'structured', False):
1298 1299 # channel for machine-readable output with metadata, where
1299 1300 # no extra colorization is necessary.
1300 1301 dest.write(msg, **opts)
1301 1302 elif self._colormode == b'win32':
1302 1303 # windows color printing is its own can of crab, defer to
1303 1304 # the color module and that is it.
1304 1305 color.win32print(self, dest.write, msg, **opts)
1305 1306 else:
1306 1307 if self._colormode is not None:
1307 1308 label = opts.get('label', b'')
1308 1309 msg = self.label(msg, label)
1309 1310 dest.write(msg)
1310 1311 # stderr may be buffered under win32 when redirected to files,
1311 1312 # including stdout.
1312 1313 if dest is self._ferr and not getattr(dest, 'closed', False):
1313 1314 dest.flush()
1314 1315 except IOError as err:
1315 1316 if dest is self._ferr and err.errno in (
1316 1317 errno.EPIPE,
1317 1318 errno.EIO,
1318 1319 errno.EBADF,
1319 1320 ):
1320 1321 # no way to report the error, so ignore it
1321 1322 return
1322 1323 raise error.StdioError(err)
1323 1324 finally:
1324 1325 self._blockedtimes[b'stdio_blocked'] += (
1325 1326 util.timer() - starttime
1326 1327 ) * 1000
1327 1328
1328 1329 def _writemsg(self, dest, *args: bytes, **opts: _MsgOpts) -> None:
1329 1330 timestamp = self.showtimestamp and opts.get('type') in {
1330 1331 b'debug',
1331 1332 b'error',
1332 1333 b'note',
1333 1334 b'status',
1334 1335 b'warning',
1335 1336 }
1336 1337 if timestamp:
1337 1338 args = (
1338 1339 b'[%s] '
1339 1340 % pycompat.bytestr(datetime.datetime.now().isoformat()),
1340 1341 ) + args
1341 1342 _writemsgwith(self._write, dest, *args, **opts)
1342 1343 if timestamp:
1343 1344 dest.flush()
1344 1345
1345 1346 def _writemsgnobuf(self, dest, *args: bytes, **opts: _MsgOpts) -> None:
1346 1347 _writemsgwith(self._writenobuf, dest, *args, **opts)
1347 1348
1348 1349 def flush(self) -> None:
1349 1350 # opencode timeblockedsection because this is a critical path
1350 1351 starttime = util.timer()
1351 1352 try:
1352 1353 try:
1353 1354 self._fout.flush()
1354 1355 except IOError as err:
1355 1356 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
1356 1357 raise error.StdioError(err)
1357 1358 finally:
1358 1359 try:
1359 1360 self._ferr.flush()
1360 1361 except IOError as err:
1361 1362 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
1362 1363 raise error.StdioError(err)
1363 1364 finally:
1364 1365 self._blockedtimes[b'stdio_blocked'] += (
1365 1366 util.timer() - starttime
1366 1367 ) * 1000
1367 1368
1368 1369 def _isatty(self, fh) -> bool:
1369 1370 if self.configbool(b'ui', b'nontty'):
1370 1371 return False
1371 1372 return procutil.isatty(fh)
1372 1373
1373 1374 def protectfinout(self):
1374 1375 """Duplicate ui streams and redirect original if they are stdio
1375 1376
1376 1377 Returns (fin, fout) which point to the original ui fds, but may be
1377 1378 copy of them. The returned streams can be considered "owned" in that
1378 1379 print(), exec(), etc. never reach to them.
1379 1380 """
1380 1381 if self._finoutredirected:
1381 1382 # if already redirected, protectstdio() would just create another
1382 1383 # nullfd pair, which is equivalent to returning self._fin/_fout.
1383 1384 return self._fin, self._fout
1384 1385 fin, fout = procutil.protectstdio(self._fin, self._fout)
1385 1386 self._finoutredirected = (fin, fout) != (self._fin, self._fout)
1386 1387 return fin, fout
1387 1388
1388 1389 def restorefinout(self, fin, fout):
1389 1390 """Restore ui streams from possibly duplicated (fin, fout)"""
1390 1391 if (fin, fout) == (self._fin, self._fout):
1391 1392 return
1392 1393 procutil.restorestdio(self._fin, self._fout, fin, fout)
1393 1394 # protectfinout() won't create more than one duplicated streams,
1394 1395 # so we can just turn the redirection flag off.
1395 1396 self._finoutredirected = False
1396 1397
1397 1398 @contextlib.contextmanager
1398 1399 def protectedfinout(self):
1399 1400 """Run code block with protected standard streams"""
1400 1401 fin, fout = self.protectfinout()
1401 1402 try:
1402 1403 yield fin, fout
1403 1404 finally:
1404 1405 self.restorefinout(fin, fout)
1405 1406
1406 1407 def disablepager(self) -> None:
1407 1408 self._disablepager = True
1408 1409
1409 1410 def pager(self, command: bytes) -> None:
1410 1411 """Start a pager for subsequent command output.
1411 1412
1412 1413 Commands which produce a long stream of output should call
1413 1414 this function to activate the user's preferred pagination
1414 1415 mechanism (which may be no pager). Calling this function
1415 1416 precludes any future use of interactive functionality, such as
1416 1417 prompting the user or activating curses.
1417 1418
1418 1419 Args:
1419 1420 command: The full, non-aliased name of the command. That is, "log"
1420 1421 not "history, "summary" not "summ", etc.
1421 1422 """
1422 1423 if self._disablepager or self.pageractive:
1423 1424 # how pager should do is already determined
1424 1425 return
1425 1426
1426 1427 if not command.startswith(b'internal-always-') and (
1427 1428 # explicit --pager=on (= 'internal-always-' prefix) should
1428 1429 # take precedence over disabling factors below
1429 1430 command in self.configlist(b'pager', b'ignore')
1430 1431 or not self.configbool(b'ui', b'paginate')
1431 1432 or not self.configbool(b'pager', b'attend-' + command, True)
1432 1433 or encoding.environ.get(b'TERM') == b'dumb'
1433 1434 # TODO: if we want to allow HGPLAINEXCEPT=pager,
1434 1435 # formatted() will need some adjustment.
1435 1436 or not self.formatted()
1436 1437 or self.plain()
1437 1438 or self._buffers
1438 1439 # TODO: expose debugger-enabled on the UI object
1439 1440 or b'--debugger' in pycompat.sysargv
1440 1441 ):
1441 1442 # We only want to paginate if the ui appears to be
1442 1443 # interactive, the user didn't say HGPLAIN or
1443 1444 # HGPLAINEXCEPT=pager, and the user didn't specify --debug.
1444 1445 return
1445 1446
1446 1447 # py2exe doesn't appear to be able to use legacy I/O, and nothing is
1447 1448 # output to the pager for paged commands. Piping to `more` in cmd.exe
1448 1449 # works, but is easy to forget. Just disable pager for py2exe, but
1449 1450 # leave it working for pyoxidizer and exewrapper builds.
1450 1451 if pycompat.iswindows and getattr(sys, "frozen", None) == "console_exe":
1451 1452 self.debug(b"pager is unavailable with py2exe packaging\n")
1452 1453 return
1453 1454
1454 1455 pagercmd = self.config(b'pager', b'pager', rcutil.fallbackpager)
1455 1456 if not pagercmd:
1456 1457 return
1457 1458
1458 1459 pagerenv = {}
1459 1460 for name, value in rcutil.defaultpagerenv().items():
1460 1461 if name not in encoding.environ:
1461 1462 pagerenv[name] = value
1462 1463
1463 1464 self.debug(
1464 1465 b'starting pager for command %s\n' % stringutil.pprint(command)
1465 1466 )
1466 1467 self.flush()
1467 1468
1468 1469 wasformatted = self.formatted()
1469 1470 if hasattr(signal, "SIGPIPE"):
1470 1471 signal.signal(signal.SIGPIPE, _catchterm)
1471 1472 if self._runpager(pagercmd, pagerenv):
1472 1473 self.pageractive = True
1473 1474 # Preserve the formatted-ness of the UI. This is important
1474 1475 # because we mess with stdout, which might confuse
1475 1476 # auto-detection of things being formatted.
1476 1477 self.setconfig(b'ui', b'formatted', wasformatted, b'pager')
1477 1478 self.setconfig(b'ui', b'interactive', False, b'pager')
1478 1479
1479 1480 # If pagermode differs from color.mode, reconfigure color now that
1480 1481 # pageractive is set.
1481 1482 cm = self._colormode
1482 1483 if cm != self.config(b'color', b'pagermode', cm):
1483 1484 color.setup(self)
1484 1485 else:
1485 1486 # If the pager can't be spawned in dispatch when --pager=on is
1486 1487 # given, don't try again when the command runs, to avoid a duplicate
1487 1488 # warning about a missing pager command.
1488 1489 self.disablepager()
1489 1490
1490 1491 def _runpager(self, command: bytes, env=None) -> bool:
1491 1492 """Actually start the pager and set up file descriptors.
1492 1493
1493 1494 This is separate in part so that extensions (like chg) can
1494 1495 override how a pager is invoked.
1495 1496 """
1496 1497 if command == b'cat':
1497 1498 # Save ourselves some work.
1498 1499 return False
1499 1500 # If the command doesn't contain any of these characters, we
1500 1501 # assume it's a binary and exec it directly. This means for
1501 1502 # simple pager command configurations, we can degrade
1502 1503 # gracefully and tell the user about their broken pager.
1503 1504 shell = any(c in command for c in b"|&;<>()$`\\\"' \t\n*?[#~=%")
1504 1505
1505 1506 if pycompat.iswindows and not shell:
1506 1507 # Window's built-in `more` cannot be invoked with shell=False, but
1507 1508 # its `more.com` can. Hide this implementation detail from the
1508 1509 # user so we can also get sane bad PAGER behavior. MSYS has
1509 1510 # `more.exe`, so do a cmd.exe style resolution of the executable to
1510 1511 # determine which one to use.
1511 1512 fullcmd = procutil.findexe(command)
1512 1513 if not fullcmd:
1513 1514 self.warn(
1514 1515 _(b"missing pager command '%s', skipping pager\n") % command
1515 1516 )
1516 1517 return False
1517 1518
1518 1519 command = fullcmd
1519 1520
1520 1521 try:
1521 1522 pager = subprocess.Popen(
1522 1523 procutil.tonativestr(command),
1523 1524 shell=shell,
1524 1525 bufsize=-1,
1525 1526 close_fds=procutil.closefds,
1526 1527 stdin=subprocess.PIPE,
1527 1528 stdout=procutil.stdout,
1528 1529 stderr=procutil.stderr,
1529 1530 env=procutil.tonativeenv(procutil.shellenviron(env)),
1530 1531 )
1531 1532 except FileNotFoundError:
1532 1533 if not shell:
1533 1534 self.warn(
1534 1535 _(b"missing pager command '%s', skipping pager\n") % command
1535 1536 )
1536 1537 return False
1537 1538 raise
1538 1539
1539 1540 # back up original file descriptors
1540 1541 if pycompat.sysplatform != b'OpenVMS':
1541 1542 stdoutfd = os.dup(procutil.stdout.fileno())
1542 1543 stderrfd = os.dup(procutil.stderr.fileno())
1543 1544
1544 1545 os.dup2(pager.stdin.fileno(), procutil.stdout.fileno())
1545 1546 if self._isatty(procutil.stderr):
1546 1547 os.dup2(pager.stdin.fileno(), procutil.stderr.fileno())
1547 1548
1548 1549 @self.atexit
1549 1550 def killpager():
1550 1551 if hasattr(signal, "SIGINT"):
1551 1552 signal.signal(signal.SIGINT, signal.SIG_IGN)
1552 1553 # restore original fds, closing pager.stdin copies in the process
1553 1554 if pycompat.sysplatform == b'OpenVMS':
1554 1555 pager.kill()
1555 1556 os.dup2(stdoutfd, procutil.stdout.fileno())
1556 1557 os.dup2(stderrfd, procutil.stderr.fileno())
1557 1558 pager.stdin.close()
1558 1559 pager.wait()
1559 1560
1560 1561 return True
1561 1562
1562 1563 @property
1563 1564 def _exithandlers(self):
1564 1565 return _reqexithandlers
1565 1566
1566 1567 def atexit(self, func, *args, **kwargs):
1567 1568 """register a function to run after dispatching a request
1568 1569
1569 1570 Handlers do not stay registered across request boundaries."""
1570 1571 self._exithandlers.append((func, args, kwargs))
1571 1572 return func
1572 1573
1573 1574 def interface(self, feature: bytes) -> bytes:
1574 1575 """what interface to use for interactive console features?
1575 1576
1576 1577 The interface is controlled by the value of `ui.interface` but also by
1577 1578 the value of feature-specific configuration. For example:
1578 1579
1579 1580 ui.interface.histedit = text
1580 1581 ui.interface.chunkselector = curses
1581 1582
1582 1583 Here the features are "histedit" and "chunkselector".
1583 1584
1584 1585 The configuration above means that the default interfaces for commands
1585 1586 is curses, the interface for histedit is text and the interface for
1586 1587 selecting chunk is crecord (the best curses interface available).
1587 1588
1588 1589 Consider the following example:
1589 1590 ui.interface = curses
1590 1591 ui.interface.histedit = text
1591 1592
1592 1593 Then histedit will use the text interface and chunkselector will use
1593 1594 the default curses interface (crecord at the moment).
1594 1595 """
1595 1596 alldefaults = frozenset([b"text", b"curses"])
1596 1597
1597 1598 featureinterfaces = {
1598 1599 b"chunkselector": [
1599 1600 b"text",
1600 1601 b"curses",
1601 1602 ],
1602 1603 b"histedit": [
1603 1604 b"text",
1604 1605 b"curses",
1605 1606 ],
1606 1607 }
1607 1608
1608 1609 # Feature-specific interface
1609 1610 if feature not in featureinterfaces.keys():
1610 1611 # Programming error, not user error
1611 1612 raise ValueError(b"Unknown feature requested %s" % feature)
1612 1613
1613 1614 availableinterfaces = frozenset(featureinterfaces[feature])
1614 1615 if alldefaults > availableinterfaces:
1615 1616 # Programming error, not user error. We need a use case to
1616 1617 # define the right thing to do here.
1617 1618 raise ValueError(
1618 1619 b"Feature %s does not handle all default interfaces" % feature
1619 1620 )
1620 1621
1621 1622 if self.plain() or encoding.environ.get(b'TERM') == b'dumb':
1622 1623 return b"text"
1623 1624
1624 1625 # Default interface for all the features
1625 1626 defaultinterface = b"text"
1626 1627 i = self.config(b"ui", b"interface")
1627 1628 if i in alldefaults:
1628 1629 defaultinterface = cast(bytes, i) # cast to help pytype
1629 1630
1630 1631 choseninterface: bytes = defaultinterface
1631 1632 f = self.config(b"ui", b"interface.%s" % feature)
1632 1633 if f in availableinterfaces:
1633 1634 choseninterface = cast(bytes, f) # cast to help pytype
1634 1635
1635 1636 if i is not None and defaultinterface != i:
1636 1637 if f is not None:
1637 1638 self.warn(_(b"invalid value for ui.interface: %s\n") % (i,))
1638 1639 else:
1639 1640 self.warn(
1640 1641 _(b"invalid value for ui.interface: %s (using %s)\n")
1641 1642 % (i, choseninterface)
1642 1643 )
1643 1644 if f is not None and choseninterface != f:
1644 1645 self.warn(
1645 1646 _(b"invalid value for ui.interface.%s: %s (using %s)\n")
1646 1647 % (feature, f, choseninterface)
1647 1648 )
1648 1649
1649 1650 return choseninterface
1650 1651
1651 1652 def interactive(self):
1652 1653 """is interactive input allowed?
1653 1654
1654 1655 An interactive session is a session where input can be reasonably read
1655 1656 from `sys.stdin'. If this function returns false, any attempt to read
1656 1657 from stdin should fail with an error, unless a sensible default has been
1657 1658 specified.
1658 1659
1659 1660 Interactiveness is triggered by the value of the `ui.interactive'
1660 1661 configuration variable or - if it is unset - when `sys.stdin' points
1661 1662 to a terminal device.
1662 1663
1663 1664 This function refers to input only; for output, see `ui.formatted()'.
1664 1665 """
1665 1666 i = self.configbool(b"ui", b"interactive")
1666 1667 if i is None:
1667 1668 # some environments replace stdin without implementing isatty
1668 1669 # usually those are non-interactive
1669 1670 return self._isatty(self._fin)
1670 1671
1671 1672 return i
1672 1673
1673 1674 def termwidth(self) -> int:
1674 1675 """how wide is the terminal in columns?"""
1675 1676 if b'COLUMNS' in encoding.environ:
1676 1677 try:
1677 1678 return int(encoding.environ[b'COLUMNS'])
1678 1679 except ValueError:
1679 1680 pass
1680 1681 return scmutil.termsize(self)[0]
1681 1682
1682 1683 def formatted(self):
1683 1684 """should formatted output be used?
1684 1685
1685 1686 It is often desirable to format the output to suite the output medium.
1686 1687 Examples of this are truncating long lines or colorizing messages.
1687 1688 However, this is not often not desirable when piping output into other
1688 1689 utilities, e.g. `grep'.
1689 1690
1690 1691 Formatted output is triggered by the value of the `ui.formatted'
1691 1692 configuration variable or - if it is unset - when `sys.stdout' points
1692 1693 to a terminal device. Please note that `ui.formatted' should be
1693 1694 considered an implementation detail; it is not intended for use outside
1694 1695 Mercurial or its extensions.
1695 1696
1696 1697 This function refers to output only; for input, see `ui.interactive()'.
1697 1698 This function always returns false when in plain mode, see `ui.plain()'.
1698 1699 """
1699 1700 if self.plain():
1700 1701 return False
1701 1702
1702 1703 i = self.configbool(b"ui", b"formatted")
1703 1704 if i is None:
1704 1705 # some environments replace stdout without implementing isatty
1705 1706 # usually those are non-interactive
1706 1707 return self._isatty(self._fout)
1707 1708
1708 1709 return i
1709 1710
1710 1711 def _readline(
1711 1712 self,
1712 1713 prompt: bytes = b' ',
1713 1714 promptopts: Optional[Dict[str, _MsgOpts]] = None,
1714 1715 ) -> bytes:
1715 1716 # Replacing stdin/stdout temporarily is a hard problem on Python 3
1716 1717 # because they have to be text streams with *no buffering*. Instead,
1717 1718 # we use rawinput() only if call_readline() will be invoked by
1718 1719 # PyOS_Readline(), so no I/O will be made at Python layer.
1719 1720 usereadline = (
1720 1721 self._isatty(self._fin)
1721 1722 and self._isatty(self._fout)
1722 1723 and procutil.isstdin(self._fin)
1723 1724 and procutil.isstdout(self._fout)
1724 1725 )
1725 1726 if usereadline:
1726 1727 try:
1727 1728 # magically add command line editing support, where
1728 1729 # available
1729 1730 import readline
1730 1731
1731 1732 # force demandimport to really load the module
1732 1733 readline.read_history_file
1733 1734 # windows sometimes raises something other than ImportError
1734 1735 except Exception:
1735 1736 usereadline = False
1736 1737
1737 1738 if self._colormode == b'win32' or not usereadline:
1738 1739 if not promptopts:
1739 1740 promptopts = {}
1740 1741 self._writemsgnobuf(
1741 1742 self._fmsgout, prompt, type=b'prompt', **promptopts
1742 1743 )
1743 1744 self.flush()
1744 1745 prompt = b' '
1745 1746 else:
1746 1747 prompt = self.label(prompt, b'ui.prompt') + b' '
1747 1748
1748 1749 # prompt ' ' must exist; otherwise readline may delete entire line
1749 1750 # - http://bugs.python.org/issue12833
1750 1751 with self.timeblockedsection(b'stdio'):
1751 1752 if usereadline:
1752 1753 self.flush()
1753 1754 prompt = encoding.strfromlocal(prompt)
1754 1755 line = encoding.strtolocal(input(prompt))
1755 1756 # When stdin is in binary mode on Windows, it can cause
1756 1757 # input() to emit an extra trailing carriage return
1757 1758 if pycompat.oslinesep == b'\r\n' and line.endswith(b'\r'):
1758 1759 line = line[:-1]
1759 1760 else:
1760 1761 self._fout.write(pycompat.bytestr(prompt))
1761 1762 self._fout.flush()
1762 1763 line = self._fin.readline()
1763 1764 if not line:
1764 1765 raise EOFError
1765 1766 line = line.rstrip(pycompat.oslinesep)
1766 1767
1767 1768 return line
1768 1769
1769 if pycompat.TYPE_CHECKING:
1770 if typing.TYPE_CHECKING:
1770 1771
1771 1772 @overload
1772 1773 def prompt(self, msg: bytes, default: bytes) -> bytes:
1773 1774 pass
1774 1775
1775 1776 @overload
1776 1777 def prompt(self, msg: bytes, default: None) -> Optional[bytes]:
1777 1778 pass
1778 1779
1779 1780 def prompt(self, msg, default=b"y"):
1780 1781 """Prompt user with msg, read response.
1781 1782 If ui is not interactive, the default is returned.
1782 1783 """
1783 1784 return self._prompt(msg, default=default)
1784 1785
1785 if pycompat.TYPE_CHECKING:
1786 if typing.TYPE_CHECKING:
1786 1787
1787 1788 @overload
1788 1789 def _prompt(
1789 1790 self, msg: bytes, default: bytes, **opts: _MsgOpts
1790 1791 ) -> bytes:
1791 1792 pass
1792 1793
1793 1794 @overload
1794 1795 def _prompt(
1795 1796 self, msg: bytes, default: None, **opts: _MsgOpts
1796 1797 ) -> Optional[bytes]:
1797 1798 pass
1798 1799
1799 1800 def _prompt(self, msg, default=b'y', **opts):
1800 1801 opts = {**opts, 'default': default}
1801 1802 if not self.interactive():
1802 1803 self._writemsg(self._fmsgout, msg, b' ', type=b'prompt', **opts)
1803 1804 self._writemsg(
1804 1805 self._fmsgout, default or b'', b"\n", type=b'promptecho'
1805 1806 )
1806 1807 return default
1807 1808 try:
1808 1809 r = self._readline(prompt=msg, promptopts=opts)
1809 1810 if not r:
1810 1811 r = default
1811 1812 if self.configbool(b'ui', b'promptecho'):
1812 1813 self._writemsg(
1813 1814 self._fmsgout, r or b'', b"\n", type=b'promptecho'
1814 1815 )
1815 1816 return r
1816 1817 except EOFError:
1817 1818 raise error.ResponseExpected()
1818 1819
1819 1820 @staticmethod
1820 1821 def extractchoices(prompt: bytes) -> Tuple[bytes, List[_PromptChoice]]:
1821 1822 """Extract prompt message and list of choices from specified prompt.
1822 1823
1823 1824 This returns tuple "(message, choices)", and "choices" is the
1824 1825 list of tuple "(response character, text without &)".
1825 1826
1826 1827 >>> ui.extractchoices(b"awake? $$ &Yes $$ &No")
1827 1828 ('awake? ', [('y', 'Yes'), ('n', 'No')])
1828 1829 >>> ui.extractchoices(b"line\\nbreak? $$ &Yes $$ &No")
1829 1830 ('line\\nbreak? ', [('y', 'Yes'), ('n', 'No')])
1830 1831 >>> ui.extractchoices(b"want lots of $$money$$?$$Ye&s$$N&o")
1831 1832 ('want lots of $$money$$?', [('s', 'Yes'), ('o', 'No')])
1832 1833 """
1833 1834
1834 1835 # Sadly, the prompt string may have been built with a filename
1835 1836 # containing "$$" so let's try to find the first valid-looking
1836 1837 # prompt to start parsing. Sadly, we also can't rely on
1837 1838 # choices containing spaces, ASCII, or basically anything
1838 1839 # except an ampersand followed by a character.
1839 1840 m = re.match(br'(?s)(.+?)\$\$([^$]*&[^ $].*)', prompt)
1840 1841
1841 1842 assert m is not None # help pytype
1842 1843
1843 1844 msg = m.group(1)
1844 1845 choices = [p.strip(b' ') for p in m.group(2).split(b'$$')]
1845 1846
1846 1847 def choicetuple(s):
1847 1848 ampidx = s.index(b'&')
1848 1849 return s[ampidx + 1 : ampidx + 2].lower(), s.replace(b'&', b'', 1)
1849 1850
1850 1851 return (msg, [choicetuple(s) for s in choices])
1851 1852
1852 1853 def promptchoice(self, prompt: bytes, default: int = 0) -> int:
1853 1854 """Prompt user with a message, read response, and ensure it matches
1854 1855 one of the provided choices. The prompt is formatted as follows:
1855 1856
1856 1857 "would you like fries with that (Yn)? $$ &Yes $$ &No"
1857 1858
1858 1859 The index of the choice is returned. Responses are case
1859 1860 insensitive. If ui is not interactive, the default is
1860 1861 returned.
1861 1862 """
1862 1863
1863 1864 msg, choices = self.extractchoices(prompt)
1864 1865 resps = [r for r, t in choices]
1865 1866 while True:
1866 1867 r = self._prompt(msg, default=resps[default], choices=choices)
1867 1868 if r.lower() in resps:
1868 1869 return resps.index(r.lower())
1869 1870 # TODO: shouldn't it be a warning?
1870 1871 self._writemsg(self._fmsgout, _(b"unrecognized response\n"))
1871 1872
1872 1873 def getpass(
1873 1874 self, prompt: Optional[bytes] = None, default: Optional[bytes] = None
1874 1875 ) -> Optional[bytes]:
1875 1876 if not self.interactive():
1876 1877 return default
1877 1878 try:
1878 1879 self._writemsg(
1879 1880 self._fmsgerr,
1880 1881 prompt or _(b'password: '),
1881 1882 type=b'prompt',
1882 1883 password=True,
1883 1884 )
1884 1885 # disable getpass() only if explicitly specified. it's still valid
1885 1886 # to interact with tty even if fin is not a tty.
1886 1887 with self.timeblockedsection(b'stdio'):
1887 1888 if self.configbool(b'ui', b'nontty'):
1888 1889 l = self._fin.readline()
1889 1890 if not l:
1890 1891 raise EOFError
1891 1892 return l.rstrip(b'\n')
1892 1893 else:
1893 1894 return util.get_password()
1894 1895 except EOFError:
1895 1896 raise error.ResponseExpected()
1896 1897
1897 1898 def status(self, *msg: bytes, **opts: _MsgOpts) -> None:
1898 1899 """write status message to output (if ui.quiet is False)
1899 1900
1900 1901 This adds an output label of "ui.status".
1901 1902 """
1902 1903 if not self.quiet:
1903 1904 self._writemsg(self._fmsgout, type=b'status', *msg, **opts)
1904 1905
1905 1906 def warn(self, *msg: bytes, **opts: _MsgOpts) -> None:
1906 1907 """write warning message to output (stderr)
1907 1908
1908 1909 This adds an output label of "ui.warning".
1909 1910 """
1910 1911 self._writemsg(self._fmsgerr, type=b'warning', *msg, **opts)
1911 1912
1912 1913 def error(self, *msg: bytes, **opts: _MsgOpts) -> None:
1913 1914 """write error message to output (stderr)
1914 1915
1915 1916 This adds an output label of "ui.error".
1916 1917 """
1917 1918 self._writemsg(self._fmsgerr, type=b'error', *msg, **opts)
1918 1919
1919 1920 def note(self, *msg: bytes, **opts: _MsgOpts) -> None:
1920 1921 """write note to output (if ui.verbose is True)
1921 1922
1922 1923 This adds an output label of "ui.note".
1923 1924 """
1924 1925 if self.verbose:
1925 1926 self._writemsg(self._fmsgout, type=b'note', *msg, **opts)
1926 1927
1927 1928 def debug(self, *msg: bytes, **opts: _MsgOpts) -> None:
1928 1929 """write debug message to output (if ui.debugflag is True)
1929 1930
1930 1931 This adds an output label of "ui.debug".
1931 1932 """
1932 1933 if self.debugflag:
1933 1934 self._writemsg(self._fmsgout, type=b'debug', *msg, **opts)
1934 1935 self.log(b'debug', b'%s', b''.join(msg))
1935 1936
1936 1937 # Aliases to defeat check-code.
1937 1938 statusnoi18n = status
1938 1939 notenoi18n = note
1939 1940 warnnoi18n = warn
1940 1941 writenoi18n = write
1941 1942
1942 1943 def edit(
1943 1944 self,
1944 1945 text: bytes,
1945 1946 user: bytes,
1946 1947 extra: Optional[Dict[bytes, Any]] = None, # TODO: value type of bytes?
1947 1948 editform=None,
1948 1949 pending=None,
1949 1950 repopath: Optional[bytes] = None,
1950 1951 action: Optional[bytes] = None,
1951 1952 ) -> bytes:
1952 1953 if action is None:
1953 1954 self.develwarn(
1954 1955 b'action is None but will soon be a required '
1955 1956 b'parameter to ui.edit()'
1956 1957 )
1957 1958 extra_defaults = {
1958 1959 b'prefix': b'editor',
1959 1960 b'suffix': b'.txt',
1960 1961 }
1961 1962 if extra is not None:
1962 1963 if extra.get(b'suffix') is not None:
1963 1964 self.develwarn(
1964 1965 b'extra.suffix is not None but will soon be '
1965 1966 b'ignored by ui.edit()'
1966 1967 )
1967 1968 extra_defaults.update(extra)
1968 1969 extra = extra_defaults
1969 1970
1970 1971 if action == b'diff':
1971 1972 suffix = b'.diff'
1972 1973 elif action:
1973 1974 suffix = b'.%s.hg.txt' % action
1974 1975 else:
1975 1976 suffix = extra[b'suffix']
1976 1977
1977 1978 rdir = None
1978 1979 if self.configbool(b'experimental', b'editortmpinhg'):
1979 1980 rdir = repopath
1980 1981 (fd, name) = pycompat.mkstemp(
1981 1982 prefix=b'hg-' + extra[b'prefix'] + b'-', suffix=suffix, dir=rdir
1982 1983 )
1983 1984 try:
1984 1985 with os.fdopen(fd, 'wb') as f:
1985 1986 f.write(util.tonativeeol(text))
1986 1987
1987 1988 environ = {b'HGUSER': user}
1988 1989 if b'transplant_source' in extra:
1989 1990 environ.update(
1990 1991 {b'HGREVISION': hex(extra[b'transplant_source'])}
1991 1992 )
1992 1993 for label in (b'intermediate-source', b'source', b'rebase_source'):
1993 1994 if label in extra:
1994 1995 environ.update({b'HGREVISION': extra[label]})
1995 1996 break
1996 1997 if editform:
1997 1998 environ.update({b'HGEDITFORM': editform})
1998 1999 if pending:
1999 2000 environ.update({b'HG_PENDING': pending})
2000 2001
2001 2002 editor = self.geteditor()
2002 2003
2003 2004 self.system(
2004 2005 b"%s \"%s\"" % (editor, name),
2005 2006 environ=environ,
2006 2007 onerr=error.CanceledError,
2007 2008 errprefix=_(b"edit failed"),
2008 2009 blockedtag=b'editor',
2009 2010 )
2010 2011
2011 2012 with open(name, 'rb') as f:
2012 2013 t = util.fromnativeeol(f.read())
2013 2014 finally:
2014 2015 os.unlink(name)
2015 2016
2016 2017 return t
2017 2018
2018 2019 def system(
2019 2020 self,
2020 2021 cmd: bytes,
2021 2022 environ=None,
2022 2023 cwd: Optional[bytes] = None,
2023 2024 onerr: Optional[Callable[[bytes], Exception]] = None,
2024 2025 errprefix: Optional[bytes] = None,
2025 2026 blockedtag: Optional[bytes] = None,
2026 2027 ) -> int:
2027 2028 """execute shell command with appropriate output stream. command
2028 2029 output will be redirected if fout is not stdout.
2029 2030
2030 2031 if command fails and onerr is None, return status, else raise onerr
2031 2032 object as exception.
2032 2033 """
2033 2034 if blockedtag is None:
2034 2035 # Long cmds tend to be because of an absolute path on cmd. Keep
2035 2036 # the tail end instead
2036 2037 cmdsuffix = cmd.translate(None, _keepalnum)[-85:]
2037 2038 blockedtag = b'unknown_system_' + cmdsuffix
2038 2039 out = self._fout
2039 2040 if any(s[1] for s in self._bufferstates):
2040 2041 out = self
2041 2042 with self.timeblockedsection(blockedtag):
2042 2043 rc = self._runsystem(cmd, environ=environ, cwd=cwd, out=out)
2043 2044 if rc and onerr:
2044 2045 errmsg = b'%s %s' % (
2045 2046 procutil.shellsplit(cmd)[0],
2046 2047 procutil.explainexit(rc),
2047 2048 )
2048 2049 if errprefix:
2049 2050 errmsg = b'%s: %s' % (errprefix, errmsg)
2050 2051 raise onerr(errmsg)
2051 2052 return rc
2052 2053
2053 2054 def _runsystem(self, cmd: bytes, environ, cwd: Optional[bytes], out) -> int:
2054 2055 """actually execute the given shell command (can be overridden by
2055 2056 extensions like chg)"""
2056 2057 return procutil.system(cmd, environ=environ, cwd=cwd, out=out)
2057 2058
2058 2059 def traceback(self, exc=None, force: bool = False):
2059 2060 """print exception traceback if traceback printing enabled or forced.
2060 2061 only to call in exception handler. returns true if traceback
2061 2062 printed."""
2062 2063 if self.tracebackflag or force:
2063 2064 if exc is None:
2064 2065 exc = sys.exc_info()
2065 2066 cause = getattr(exc[1], 'cause', None)
2066 2067
2067 2068 if cause is not None:
2068 2069 causetb = traceback.format_tb(cause[2])
2069 2070 exctb = traceback.format_tb(exc[2])
2070 2071 exconly = traceback.format_exception_only(cause[0], cause[1])
2071 2072
2072 2073 # exclude frame where 'exc' was chained and rethrown from exctb
2073 2074 self.write_err(
2074 2075 b'Traceback (most recent call last):\n',
2075 2076 encoding.strtolocal(''.join(exctb[:-1])),
2076 2077 encoding.strtolocal(''.join(causetb)),
2077 2078 encoding.strtolocal(''.join(exconly)),
2078 2079 )
2079 2080 else:
2080 2081 output = traceback.format_exception(exc[0], exc[1], exc[2])
2081 2082 self.write_err(encoding.strtolocal(''.join(output)))
2082 2083 return self.tracebackflag or force
2083 2084
2084 2085 def geteditor(self):
2085 2086 '''return editor to use'''
2086 2087 if pycompat.sysplatform == b'plan9':
2087 2088 # vi is the MIPS instruction simulator on Plan 9. We
2088 2089 # instead default to E to plumb commit messages to
2089 2090 # avoid confusion.
2090 2091 editor = b'E'
2091 2092 elif pycompat.isdarwin:
2092 2093 # vi on darwin is POSIX compatible to a fault, and that includes
2093 2094 # exiting non-zero if you make any mistake when running an ex
2094 2095 # command. Proof: `vi -c ':unknown' -c ':qa'; echo $?` produces 1,
2095 2096 # while s/vi/vim/ doesn't.
2096 2097 editor = b'vim'
2097 2098 else:
2098 2099 editor = b'vi'
2099 2100 return encoding.environ.get(b"HGEDITOR") or self.config(
2100 2101 b"ui", b"editor", editor
2101 2102 )
2102 2103
2103 2104 @util.propertycache
2104 2105 def _progbar(self) -> Optional[progress.progbar]:
2105 2106 """setup the progbar singleton to the ui object"""
2106 2107 if (
2107 2108 self.quiet
2108 2109 or self.debugflag
2109 2110 or self.configbool(b'progress', b'disable')
2110 2111 or not progress.shouldprint(self)
2111 2112 ):
2112 2113 return None
2113 2114 return getprogbar(self)
2114 2115
2115 2116 def _progclear(self) -> None:
2116 2117 """clear progress bar output if any. use it before any output"""
2117 2118 if not haveprogbar(): # nothing loaded yet
2118 2119 return
2119 2120 if self._progbar is not None and self._progbar.printed:
2120 2121 self._progbar.clear()
2121 2122
2122 2123 def makeprogress(
2123 2124 self, topic: bytes, unit: bytes = b"", total: Optional[int] = None
2124 2125 ) -> scmutil.progress:
2125 2126 """Create a progress helper for the specified topic"""
2126 2127 if getattr(self._fmsgerr, 'structured', False):
2127 2128 # channel for machine-readable output with metadata, just send
2128 2129 # raw information
2129 2130 # TODO: consider porting some useful information (e.g. estimated
2130 2131 # time) from progbar. we might want to support update delay to
2131 2132 # reduce the cost of transferring progress messages.
2132 2133 def updatebar(topic, pos, item, unit, total):
2133 2134 self._fmsgerr.write(
2134 2135 None,
2135 2136 type=b'progress',
2136 2137 topic=topic,
2137 2138 pos=pos,
2138 2139 item=item,
2139 2140 unit=unit,
2140 2141 total=total,
2141 2142 )
2142 2143
2143 2144 elif self._progbar is not None:
2144 2145 updatebar = self._progbar.progress
2145 2146 else:
2146 2147
2147 2148 def updatebar(topic, pos, item, unit, total):
2148 2149 pass
2149 2150
2150 2151 return scmutil.progress(self, updatebar, topic, unit, total)
2151 2152
2152 2153 def getlogger(self, name):
2153 2154 """Returns a logger of the given name; or None if not registered"""
2154 2155 return self._loggers.get(name)
2155 2156
2156 2157 def setlogger(self, name, logger) -> None:
2157 2158 """Install logger which can be identified later by the given name
2158 2159
2159 2160 More than one loggers can be registered. Use extension or module
2160 2161 name to uniquely identify the logger instance.
2161 2162 """
2162 2163 self._loggers[name] = logger
2163 2164
2164 2165 def log(self, event, msgfmt, *msgargs, **opts) -> None:
2165 2166 """hook for logging facility extensions
2166 2167
2167 2168 event should be a readily-identifiable subsystem, which will
2168 2169 allow filtering.
2169 2170
2170 2171 msgfmt should be a newline-terminated format string to log, and
2171 2172 *msgargs are %-formatted into it.
2172 2173
2173 2174 **opts currently has no defined meanings.
2174 2175 """
2175 2176 if not self._loggers:
2176 2177 return
2177 2178 activeloggers = [l for l in self._loggers.values() if l.tracked(event)]
2178 2179 if not activeloggers:
2179 2180 return
2180 2181 msg = msgfmt % msgargs
2181 2182 opts = pycompat.byteskwargs(opts)
2182 2183 # guard against recursion from e.g. ui.debug()
2183 2184 registeredloggers = self._loggers
2184 2185 self._loggers = {}
2185 2186 try:
2186 2187 for logger in activeloggers:
2187 2188 logger.log(self, event, msg, opts)
2188 2189 finally:
2189 2190 self._loggers = registeredloggers
2190 2191
2191 2192 def label(self, msg: bytes, label: bytes) -> bytes:
2192 2193 """style msg based on supplied label
2193 2194
2194 2195 If some color mode is enabled, this will add the necessary control
2195 2196 characters to apply such color. In addition, 'debug' color mode adds
2196 2197 markup showing which label affects a piece of text.
2197 2198
2198 2199 ui.write(s, 'label') is equivalent to
2199 2200 ui.write(ui.label(s, 'label')).
2200 2201 """
2201 2202 if self._colormode is not None:
2202 2203 return color.colorlabel(self, msg, label)
2203 2204 return msg
2204 2205
2205 2206 def develwarn(
2206 2207 self, msg: bytes, stacklevel: int = 1, config: Optional[bytes] = None
2207 2208 ) -> None:
2208 2209 """issue a developer warning message
2209 2210
2210 2211 Use 'stacklevel' to report the offender some layers further up in the
2211 2212 stack.
2212 2213 """
2213 2214 if not self.configbool(b'devel', b'all-warnings'):
2214 2215 if config is None or not self.configbool(b'devel', config):
2215 2216 return
2216 2217 msg = b'devel-warn: ' + msg
2217 2218 stacklevel += 1 # get in develwarn
2218 2219 if self.tracebackflag:
2219 2220 util.debugstacktrace(msg, stacklevel, self._ferr, self._fout)
2220 2221 self.log(
2221 2222 b'develwarn',
2222 2223 b'%s at:\n%s'
2223 2224 % (msg, b''.join(util.getstackframes(stacklevel))),
2224 2225 )
2225 2226 else:
2226 2227 curframe = inspect.currentframe()
2227 2228 calframe = inspect.getouterframes(curframe, 2)
2228 2229 fname, lineno, fmsg = calframe[stacklevel][1:4]
2229 2230 fname, fmsg = pycompat.sysbytes(fname), pycompat.sysbytes(fmsg)
2230 2231 self.write_err(b'%s at: %s:%d (%s)\n' % (msg, fname, lineno, fmsg))
2231 2232 self.log(
2232 2233 b'develwarn', b'%s at: %s:%d (%s)\n', msg, fname, lineno, fmsg
2233 2234 )
2234 2235
2235 2236 # avoid cycles
2236 2237 del curframe
2237 2238 del calframe
2238 2239
2239 2240 def deprecwarn(
2240 2241 self,
2241 2242 msg: bytes,
2242 2243 version: bytes,
2243 2244 stacklevel: int = 2,
2244 2245 ) -> None:
2245 2246 """issue a deprecation warning
2246 2247
2247 2248 - msg: message explaining what is deprecated and how to upgrade,
2248 2249 - version: last version where the API will be supported,
2249 2250 """
2250 2251 if not (
2251 2252 self.configbool(b'devel', b'all-warnings')
2252 2253 or self.configbool(b'devel', b'deprec-warn')
2253 2254 ):
2254 2255 return
2255 2256 msg += (
2256 2257 b"\n(compatibility will be dropped after Mercurial-%s,"
2257 2258 b" update your code.)"
2258 2259 ) % version
2259 2260 self.develwarn(msg, stacklevel=stacklevel, config=b'deprec-warn')
2260 2261
2261 2262 def exportableenviron(self):
2262 2263 """The environment variables that are safe to export, e.g. through
2263 2264 hgweb.
2264 2265 """
2265 2266 return self._exportableenviron
2266 2267
2267 2268 @contextlib.contextmanager
2268 2269 def configoverride(self, overrides: _ConfigItems, source: bytes = b""):
2269 2270 """Context manager for temporary config overrides
2270 2271 `overrides` must be a dict of the following structure:
2271 2272 {(section, name) : value}"""
2272 2273 backups = {}
2273 2274 try:
2274 2275 for (section, name), value in overrides.items():
2275 2276 backups[(section, name)] = self.backupconfig(section, name)
2276 2277 self.setconfig(section, name, value, source)
2277 2278 yield
2278 2279 finally:
2279 2280 for __, backup in backups.items():
2280 2281 self.restoreconfig(backup)
2281 2282 # just restoring ui.quiet config to the previous value is not enough
2282 2283 # as it does not update ui.quiet class member
2283 2284 if (b'ui', b'quiet') in overrides:
2284 2285 self.fixconfig(section=b'ui')
2285 2286
2286 2287 def estimatememory(self) -> Optional[int]:
2287 2288 """Provide an estimate for the available system memory in Bytes.
2288 2289
2289 2290 This can be overriden via ui.available-memory. It returns None, if
2290 2291 no estimate can be computed.
2291 2292 """
2292 2293 value = self.config(b'ui', b'available-memory')
2293 2294 if value is not None:
2294 2295 try:
2295 2296 return util.sizetoint(value)
2296 2297 except error.ParseError:
2297 2298 raise error.ConfigError(
2298 2299 _(b"ui.available-memory value is invalid ('%s')") % value
2299 2300 )
2300 2301 return util._estimatememory()
2301 2302
2302 2303
2303 2304 # we instantiate one globally shared progress bar to avoid
2304 2305 # competing progress bars when multiple UI objects get created
2305 2306 _progresssingleton: Optional[progress.progbar] = None
2306 2307
2307 2308
2308 2309 def getprogbar(ui: ui) -> progress.progbar:
2309 2310 global _progresssingleton
2310 2311 if _progresssingleton is None:
2311 2312 # passing 'ui' object to the singleton is fishy,
2312 2313 # this is how the extension used to work but feel free to rework it.
2313 2314 _progresssingleton = progress.progbar(ui)
2314 2315 return _progresssingleton
2315 2316
2316 2317
2317 2318 def haveprogbar() -> bool:
2318 2319 return _progresssingleton is not None
2319 2320
2320 2321
2321 2322 def _selectmsgdests(ui: ui):
2322 2323 name = ui.config(b'ui', b'message-output')
2323 2324 if name == b'channel':
2324 2325 if ui.fmsg:
2325 2326 return ui.fmsg, ui.fmsg
2326 2327 else:
2327 2328 # fall back to ferr if channel isn't ready so that status/error
2328 2329 # messages can be printed
2329 2330 return ui.ferr, ui.ferr
2330 2331 if name == b'stdio':
2331 2332 return ui.fout, ui.ferr
2332 2333 if name == b'stderr':
2333 2334 return ui.ferr, ui.ferr
2334 2335 raise error.Abort(b'invalid ui.message-output destination: %s' % name)
2335 2336
2336 2337
2337 2338 def _writemsgwith(write, dest, *args: bytes, **opts: _MsgOpts) -> None:
2338 2339 """Write ui message with the given ui._write*() function
2339 2340
2340 2341 The specified message type is translated to 'ui.<type>' label if the dest
2341 2342 isn't a structured channel, so that the message will be colorized.
2342 2343 """
2343 2344 # TODO: maybe change 'type' to a mandatory option
2344 2345 if 'type' in opts and not getattr(dest, 'structured', False):
2345 2346 opts['label'] = opts.get('label', b'') + b' ui.%s' % opts.pop('type')
2346 2347 write(dest, *args, **opts)
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now