##// END OF EJS Templates
rewriting: add an option for rewrite commands to use the archived phase...
Boris Feld -
r41961:64de5f44 default
parent child Browse files
Show More
@@ -1,1452 +1,1455 b''
1 1 # configitems.py - centralized declaration of configuration option
2 2 #
3 3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import functools
11 11 import re
12 12
13 13 from . import (
14 14 encoding,
15 15 error,
16 16 )
17 17
18 18 def loadconfigtable(ui, extname, configtable):
19 19 """update config item known to the ui with the extension ones"""
20 20 for section, items in sorted(configtable.items()):
21 21 knownitems = ui._knownconfig.setdefault(section, itemregister())
22 22 knownkeys = set(knownitems)
23 23 newkeys = set(items)
24 24 for key in sorted(knownkeys & newkeys):
25 25 msg = "extension '%s' overwrite config item '%s.%s'"
26 26 msg %= (extname, section, key)
27 27 ui.develwarn(msg, config='warn-config')
28 28
29 29 knownitems.update(items)
30 30
31 31 class configitem(object):
32 32 """represent a known config item
33 33
34 34 :section: the official config section where to find this item,
35 35 :name: the official name within the section,
36 36 :default: default value for this item,
37 37 :alias: optional list of tuples as alternatives,
38 38 :generic: this is a generic definition, match name using regular expression.
39 39 """
40 40
41 41 def __init__(self, section, name, default=None, alias=(),
42 42 generic=False, priority=0):
43 43 self.section = section
44 44 self.name = name
45 45 self.default = default
46 46 self.alias = list(alias)
47 47 self.generic = generic
48 48 self.priority = priority
49 49 self._re = None
50 50 if generic:
51 51 self._re = re.compile(self.name)
52 52
53 53 class itemregister(dict):
54 54 """A specialized dictionary that can handle wild-card selection"""
55 55
56 56 def __init__(self):
57 57 super(itemregister, self).__init__()
58 58 self._generics = set()
59 59
60 60 def update(self, other):
61 61 super(itemregister, self).update(other)
62 62 self._generics.update(other._generics)
63 63
64 64 def __setitem__(self, key, item):
65 65 super(itemregister, self).__setitem__(key, item)
66 66 if item.generic:
67 67 self._generics.add(item)
68 68
69 69 def get(self, key):
70 70 baseitem = super(itemregister, self).get(key)
71 71 if baseitem is not None and not baseitem.generic:
72 72 return baseitem
73 73
74 74 # search for a matching generic item
75 75 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
76 76 for item in generics:
77 77 # we use 'match' instead of 'search' to make the matching simpler
78 78 # for people unfamiliar with regular expression. Having the match
79 79 # rooted to the start of the string will produce less surprising
80 80 # result for user writing simple regex for sub-attribute.
81 81 #
82 82 # For example using "color\..*" match produces an unsurprising
83 83 # result, while using search could suddenly match apparently
84 84 # unrelated configuration that happens to contains "color."
85 85 # anywhere. This is a tradeoff where we favor requiring ".*" on
86 86 # some match to avoid the need to prefix most pattern with "^".
87 87 # The "^" seems more error prone.
88 88 if item._re.match(key):
89 89 return item
90 90
91 91 return None
92 92
93 93 coreitems = {}
94 94
95 95 def _register(configtable, *args, **kwargs):
96 96 item = configitem(*args, **kwargs)
97 97 section = configtable.setdefault(item.section, itemregister())
98 98 if item.name in section:
99 99 msg = "duplicated config item registration for '%s.%s'"
100 100 raise error.ProgrammingError(msg % (item.section, item.name))
101 101 section[item.name] = item
102 102
103 103 # special value for case where the default is derived from other values
104 104 dynamicdefault = object()
105 105
106 106 # Registering actual config items
107 107
108 108 def getitemregister(configtable):
109 109 f = functools.partial(_register, configtable)
110 110 # export pseudo enum as configitem.*
111 111 f.dynamicdefault = dynamicdefault
112 112 return f
113 113
114 114 coreconfigitem = getitemregister(coreitems)
115 115
116 116 def _registerdiffopts(section, configprefix=''):
117 117 coreconfigitem(section, configprefix + 'nodates',
118 118 default=False,
119 119 )
120 120 coreconfigitem(section, configprefix + 'showfunc',
121 121 default=False,
122 122 )
123 123 coreconfigitem(section, configprefix + 'unified',
124 124 default=None,
125 125 )
126 126 coreconfigitem(section, configprefix + 'git',
127 127 default=False,
128 128 )
129 129 coreconfigitem(section, configprefix + 'ignorews',
130 130 default=False,
131 131 )
132 132 coreconfigitem(section, configprefix + 'ignorewsamount',
133 133 default=False,
134 134 )
135 135 coreconfigitem(section, configprefix + 'ignoreblanklines',
136 136 default=False,
137 137 )
138 138 coreconfigitem(section, configprefix + 'ignorewseol',
139 139 default=False,
140 140 )
141 141 coreconfigitem(section, configprefix + 'nobinary',
142 142 default=False,
143 143 )
144 144 coreconfigitem(section, configprefix + 'noprefix',
145 145 default=False,
146 146 )
147 147 coreconfigitem(section, configprefix + 'word-diff',
148 148 default=False,
149 149 )
150 150
151 151 coreconfigitem('alias', '.*',
152 152 default=dynamicdefault,
153 153 generic=True,
154 154 )
155 155 coreconfigitem('auth', 'cookiefile',
156 156 default=None,
157 157 )
158 158 _registerdiffopts(section='annotate')
159 159 # bookmarks.pushing: internal hack for discovery
160 160 coreconfigitem('bookmarks', 'pushing',
161 161 default=list,
162 162 )
163 163 # bundle.mainreporoot: internal hack for bundlerepo
164 164 coreconfigitem('bundle', 'mainreporoot',
165 165 default='',
166 166 )
167 167 coreconfigitem('censor', 'policy',
168 168 default='abort',
169 169 )
170 170 coreconfigitem('chgserver', 'idletimeout',
171 171 default=3600,
172 172 )
173 173 coreconfigitem('chgserver', 'skiphash',
174 174 default=False,
175 175 )
176 176 coreconfigitem('cmdserver', 'log',
177 177 default=None,
178 178 )
179 179 coreconfigitem('cmdserver', 'max-log-files',
180 180 default=7,
181 181 )
182 182 coreconfigitem('cmdserver', 'max-log-size',
183 183 default='1 MB',
184 184 )
185 185 coreconfigitem('cmdserver', 'max-repo-cache',
186 186 default=0,
187 187 )
188 188 coreconfigitem('cmdserver', 'message-encodings',
189 189 default=list,
190 190 )
191 191 coreconfigitem('cmdserver', 'track-log',
192 192 default=lambda: ['chgserver', 'cmdserver', 'repocache'],
193 193 )
194 194 coreconfigitem('color', '.*',
195 195 default=None,
196 196 generic=True,
197 197 )
198 198 coreconfigitem('color', 'mode',
199 199 default='auto',
200 200 )
201 201 coreconfigitem('color', 'pagermode',
202 202 default=dynamicdefault,
203 203 )
204 204 _registerdiffopts(section='commands', configprefix='commit.interactive.')
205 205 coreconfigitem('commands', 'grep.all-files',
206 206 default=False,
207 207 )
208 208 coreconfigitem('commands', 'resolve.confirm',
209 209 default=False,
210 210 )
211 211 coreconfigitem('commands', 'resolve.explicit-re-merge',
212 212 default=False,
213 213 )
214 214 coreconfigitem('commands', 'resolve.mark-check',
215 215 default='none',
216 216 )
217 217 _registerdiffopts(section='commands', configprefix='revert.interactive.')
218 218 coreconfigitem('commands', 'show.aliasprefix',
219 219 default=list,
220 220 )
221 221 coreconfigitem('commands', 'status.relative',
222 222 default=False,
223 223 )
224 224 coreconfigitem('commands', 'status.skipstates',
225 225 default=[],
226 226 )
227 227 coreconfigitem('commands', 'status.terse',
228 228 default='',
229 229 )
230 230 coreconfigitem('commands', 'status.verbose',
231 231 default=False,
232 232 )
233 233 coreconfigitem('commands', 'update.check',
234 234 default=None,
235 235 )
236 236 coreconfigitem('commands', 'update.requiredest',
237 237 default=False,
238 238 )
239 239 coreconfigitem('committemplate', '.*',
240 240 default=None,
241 241 generic=True,
242 242 )
243 243 coreconfigitem('convert', 'bzr.saverev',
244 244 default=True,
245 245 )
246 246 coreconfigitem('convert', 'cvsps.cache',
247 247 default=True,
248 248 )
249 249 coreconfigitem('convert', 'cvsps.fuzz',
250 250 default=60,
251 251 )
252 252 coreconfigitem('convert', 'cvsps.logencoding',
253 253 default=None,
254 254 )
255 255 coreconfigitem('convert', 'cvsps.mergefrom',
256 256 default=None,
257 257 )
258 258 coreconfigitem('convert', 'cvsps.mergeto',
259 259 default=None,
260 260 )
261 261 coreconfigitem('convert', 'git.committeractions',
262 262 default=lambda: ['messagedifferent'],
263 263 )
264 264 coreconfigitem('convert', 'git.extrakeys',
265 265 default=list,
266 266 )
267 267 coreconfigitem('convert', 'git.findcopiesharder',
268 268 default=False,
269 269 )
270 270 coreconfigitem('convert', 'git.remoteprefix',
271 271 default='remote',
272 272 )
273 273 coreconfigitem('convert', 'git.renamelimit',
274 274 default=400,
275 275 )
276 276 coreconfigitem('convert', 'git.saverev',
277 277 default=True,
278 278 )
279 279 coreconfigitem('convert', 'git.similarity',
280 280 default=50,
281 281 )
282 282 coreconfigitem('convert', 'git.skipsubmodules',
283 283 default=False,
284 284 )
285 285 coreconfigitem('convert', 'hg.clonebranches',
286 286 default=False,
287 287 )
288 288 coreconfigitem('convert', 'hg.ignoreerrors',
289 289 default=False,
290 290 )
291 291 coreconfigitem('convert', 'hg.revs',
292 292 default=None,
293 293 )
294 294 coreconfigitem('convert', 'hg.saverev',
295 295 default=False,
296 296 )
297 297 coreconfigitem('convert', 'hg.sourcename',
298 298 default=None,
299 299 )
300 300 coreconfigitem('convert', 'hg.startrev',
301 301 default=None,
302 302 )
303 303 coreconfigitem('convert', 'hg.tagsbranch',
304 304 default='default',
305 305 )
306 306 coreconfigitem('convert', 'hg.usebranchnames',
307 307 default=True,
308 308 )
309 309 coreconfigitem('convert', 'ignoreancestorcheck',
310 310 default=False,
311 311 )
312 312 coreconfigitem('convert', 'localtimezone',
313 313 default=False,
314 314 )
315 315 coreconfigitem('convert', 'p4.encoding',
316 316 default=dynamicdefault,
317 317 )
318 318 coreconfigitem('convert', 'p4.startrev',
319 319 default=0,
320 320 )
321 321 coreconfigitem('convert', 'skiptags',
322 322 default=False,
323 323 )
324 324 coreconfigitem('convert', 'svn.debugsvnlog',
325 325 default=True,
326 326 )
327 327 coreconfigitem('convert', 'svn.trunk',
328 328 default=None,
329 329 )
330 330 coreconfigitem('convert', 'svn.tags',
331 331 default=None,
332 332 )
333 333 coreconfigitem('convert', 'svn.branches',
334 334 default=None,
335 335 )
336 336 coreconfigitem('convert', 'svn.startrev',
337 337 default=0,
338 338 )
339 339 coreconfigitem('debug', 'dirstate.delaywrite',
340 340 default=0,
341 341 )
342 342 coreconfigitem('defaults', '.*',
343 343 default=None,
344 344 generic=True,
345 345 )
346 346 coreconfigitem('devel', 'all-warnings',
347 347 default=False,
348 348 )
349 349 coreconfigitem('devel', 'bundle2.debug',
350 350 default=False,
351 351 )
352 352 coreconfigitem('devel', 'bundle.delta',
353 353 default='',
354 354 )
355 355 coreconfigitem('devel', 'cache-vfs',
356 356 default=None,
357 357 )
358 358 coreconfigitem('devel', 'check-locks',
359 359 default=False,
360 360 )
361 361 coreconfigitem('devel', 'check-relroot',
362 362 default=False,
363 363 )
364 364 coreconfigitem('devel', 'default-date',
365 365 default=None,
366 366 )
367 367 coreconfigitem('devel', 'deprec-warn',
368 368 default=False,
369 369 )
370 370 coreconfigitem('devel', 'disableloaddefaultcerts',
371 371 default=False,
372 372 )
373 373 coreconfigitem('devel', 'warn-empty-changegroup',
374 374 default=False,
375 375 )
376 376 coreconfigitem('devel', 'legacy.exchange',
377 377 default=list,
378 378 )
379 379 coreconfigitem('devel', 'servercafile',
380 380 default='',
381 381 )
382 382 coreconfigitem('devel', 'serverexactprotocol',
383 383 default='',
384 384 )
385 385 coreconfigitem('devel', 'serverrequirecert',
386 386 default=False,
387 387 )
388 388 coreconfigitem('devel', 'strip-obsmarkers',
389 389 default=True,
390 390 )
391 391 coreconfigitem('devel', 'warn-config',
392 392 default=None,
393 393 )
394 394 coreconfigitem('devel', 'warn-config-default',
395 395 default=None,
396 396 )
397 397 coreconfigitem('devel', 'user.obsmarker',
398 398 default=None,
399 399 )
400 400 coreconfigitem('devel', 'warn-config-unknown',
401 401 default=None,
402 402 )
403 403 coreconfigitem('devel', 'debug.copies',
404 404 default=False,
405 405 )
406 406 coreconfigitem('devel', 'debug.extensions',
407 407 default=False,
408 408 )
409 409 coreconfigitem('devel', 'debug.peer-request',
410 410 default=False,
411 411 )
412 412 _registerdiffopts(section='diff')
413 413 coreconfigitem('email', 'bcc',
414 414 default=None,
415 415 )
416 416 coreconfigitem('email', 'cc',
417 417 default=None,
418 418 )
419 419 coreconfigitem('email', 'charsets',
420 420 default=list,
421 421 )
422 422 coreconfigitem('email', 'from',
423 423 default=None,
424 424 )
425 425 coreconfigitem('email', 'method',
426 426 default='smtp',
427 427 )
428 428 coreconfigitem('email', 'reply-to',
429 429 default=None,
430 430 )
431 431 coreconfigitem('email', 'to',
432 432 default=None,
433 433 )
434 434 coreconfigitem('experimental', 'archivemetatemplate',
435 435 default=dynamicdefault,
436 436 )
437 437 coreconfigitem('experimental', 'auto-publish',
438 438 default='publish',
439 439 )
440 440 coreconfigitem('experimental', 'bundle-phases',
441 441 default=False,
442 442 )
443 443 coreconfigitem('experimental', 'bundle2-advertise',
444 444 default=True,
445 445 )
446 446 coreconfigitem('experimental', 'bundle2-output-capture',
447 447 default=False,
448 448 )
449 449 coreconfigitem('experimental', 'bundle2.pushback',
450 450 default=False,
451 451 )
452 452 coreconfigitem('experimental', 'bundle2lazylocking',
453 453 default=False,
454 454 )
455 455 coreconfigitem('experimental', 'bundlecomplevel',
456 456 default=None,
457 457 )
458 458 coreconfigitem('experimental', 'bundlecomplevel.bzip2',
459 459 default=None,
460 460 )
461 461 coreconfigitem('experimental', 'bundlecomplevel.gzip',
462 462 default=None,
463 463 )
464 464 coreconfigitem('experimental', 'bundlecomplevel.none',
465 465 default=None,
466 466 )
467 467 coreconfigitem('experimental', 'bundlecomplevel.zstd',
468 468 default=None,
469 469 )
470 470 coreconfigitem('experimental', 'changegroup3',
471 471 default=False,
472 472 )
473 coreconfigitem('experimental', 'cleanup-as-archived',
474 default=False,
475 )
473 476 coreconfigitem('experimental', 'clientcompressionengines',
474 477 default=list,
475 478 )
476 479 coreconfigitem('experimental', 'copytrace',
477 480 default='on',
478 481 )
479 482 coreconfigitem('experimental', 'copytrace.movecandidateslimit',
480 483 default=100,
481 484 )
482 485 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
483 486 default=100,
484 487 )
485 488 coreconfigitem('experimental', 'copies.read-from',
486 489 default="filelog-only",
487 490 )
488 491 coreconfigitem('experimental', 'crecordtest',
489 492 default=None,
490 493 )
491 494 coreconfigitem('experimental', 'directaccess',
492 495 default=False,
493 496 )
494 497 coreconfigitem('experimental', 'directaccess.revnums',
495 498 default=False,
496 499 )
497 500 coreconfigitem('experimental', 'editortmpinhg',
498 501 default=False,
499 502 )
500 503 coreconfigitem('experimental', 'evolution',
501 504 default=list,
502 505 )
503 506 coreconfigitem('experimental', 'evolution.allowdivergence',
504 507 default=False,
505 508 alias=[('experimental', 'allowdivergence')]
506 509 )
507 510 coreconfigitem('experimental', 'evolution.allowunstable',
508 511 default=None,
509 512 )
510 513 coreconfigitem('experimental', 'evolution.createmarkers',
511 514 default=None,
512 515 )
513 516 coreconfigitem('experimental', 'evolution.effect-flags',
514 517 default=True,
515 518 alias=[('experimental', 'effect-flags')]
516 519 )
517 520 coreconfigitem('experimental', 'evolution.exchange',
518 521 default=None,
519 522 )
520 523 coreconfigitem('experimental', 'evolution.bundle-obsmarker',
521 524 default=False,
522 525 )
523 526 coreconfigitem('experimental', 'evolution.report-instabilities',
524 527 default=True,
525 528 )
526 529 coreconfigitem('experimental', 'evolution.track-operation',
527 530 default=True,
528 531 )
529 532 coreconfigitem('experimental', 'maxdeltachainspan',
530 533 default=-1,
531 534 )
532 535 coreconfigitem('experimental', 'mergetempdirprefix',
533 536 default=None,
534 537 )
535 538 coreconfigitem('experimental', 'mmapindexthreshold',
536 539 default=None,
537 540 )
538 541 coreconfigitem('experimental', 'narrow',
539 542 default=False,
540 543 )
541 544 coreconfigitem('experimental', 'nonnormalparanoidcheck',
542 545 default=False,
543 546 )
544 547 coreconfigitem('experimental', 'exportableenviron',
545 548 default=list,
546 549 )
547 550 coreconfigitem('experimental', 'extendedheader.index',
548 551 default=None,
549 552 )
550 553 coreconfigitem('experimental', 'extendedheader.similarity',
551 554 default=False,
552 555 )
553 556 coreconfigitem('experimental', 'format.compression',
554 557 default='zlib',
555 558 )
556 559 coreconfigitem('experimental', 'graphshorten',
557 560 default=False,
558 561 )
559 562 coreconfigitem('experimental', 'graphstyle.parent',
560 563 default=dynamicdefault,
561 564 )
562 565 coreconfigitem('experimental', 'graphstyle.missing',
563 566 default=dynamicdefault,
564 567 )
565 568 coreconfigitem('experimental', 'graphstyle.grandparent',
566 569 default=dynamicdefault,
567 570 )
568 571 coreconfigitem('experimental', 'hook-track-tags',
569 572 default=False,
570 573 )
571 574 coreconfigitem('experimental', 'httppeer.advertise-v2',
572 575 default=False,
573 576 )
574 577 coreconfigitem('experimental', 'httppeer.v2-encoder-order',
575 578 default=None,
576 579 )
577 580 coreconfigitem('experimental', 'httppostargs',
578 581 default=False,
579 582 )
580 583 coreconfigitem('experimental', 'mergedriver',
581 584 default=None,
582 585 )
583 586 coreconfigitem('experimental', 'nointerrupt', default=False)
584 587 coreconfigitem('experimental', 'nointerrupt-interactiveonly', default=True)
585 588
586 589 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
587 590 default=False,
588 591 )
589 592 coreconfigitem('experimental', 'remotenames',
590 593 default=False,
591 594 )
592 595 coreconfigitem('experimental', 'removeemptydirs',
593 596 default=True,
594 597 )
595 598 coreconfigitem('experimental', 'revisions.prefixhexnode',
596 599 default=False,
597 600 )
598 601 coreconfigitem('experimental', 'revlogv2',
599 602 default=None,
600 603 )
601 604 coreconfigitem('experimental', 'revisions.disambiguatewithin',
602 605 default=None,
603 606 )
604 607 coreconfigitem('experimental', 'server.filesdata.recommended-batch-size',
605 608 default=50000,
606 609 )
607 610 coreconfigitem('experimental', 'server.manifestdata.recommended-batch-size',
608 611 default=100000,
609 612 )
610 613 coreconfigitem('experimental', 'server.stream-narrow-clones',
611 614 default=False,
612 615 )
613 616 coreconfigitem('experimental', 'single-head-per-branch',
614 617 default=False,
615 618 )
616 619 coreconfigitem('experimental', 'sshserver.support-v2',
617 620 default=False,
618 621 )
619 622 coreconfigitem('experimental', 'sparse-read',
620 623 default=False,
621 624 )
622 625 coreconfigitem('experimental', 'sparse-read.density-threshold',
623 626 default=0.50,
624 627 )
625 628 coreconfigitem('experimental', 'sparse-read.min-gap-size',
626 629 default='65K',
627 630 )
628 631 coreconfigitem('experimental', 'treemanifest',
629 632 default=False,
630 633 )
631 634 coreconfigitem('experimental', 'update.atomic-file',
632 635 default=False,
633 636 )
634 637 coreconfigitem('experimental', 'sshpeer.advertise-v2',
635 638 default=False,
636 639 )
637 640 coreconfigitem('experimental', 'web.apiserver',
638 641 default=False,
639 642 )
640 643 coreconfigitem('experimental', 'web.api.http-v2',
641 644 default=False,
642 645 )
643 646 coreconfigitem('experimental', 'web.api.debugreflect',
644 647 default=False,
645 648 )
646 649 coreconfigitem('experimental', 'worker.wdir-get-thread-safe',
647 650 default=False,
648 651 )
649 652 coreconfigitem('experimental', 'xdiff',
650 653 default=False,
651 654 )
652 655 coreconfigitem('extensions', '.*',
653 656 default=None,
654 657 generic=True,
655 658 )
656 659 coreconfigitem('extdata', '.*',
657 660 default=None,
658 661 generic=True,
659 662 )
660 663 coreconfigitem('format', 'chunkcachesize',
661 664 default=None,
662 665 )
663 666 coreconfigitem('format', 'dotencode',
664 667 default=True,
665 668 )
666 669 coreconfigitem('format', 'generaldelta',
667 670 default=False,
668 671 )
669 672 coreconfigitem('format', 'manifestcachesize',
670 673 default=None,
671 674 )
672 675 coreconfigitem('format', 'maxchainlen',
673 676 default=dynamicdefault,
674 677 )
675 678 coreconfigitem('format', 'obsstore-version',
676 679 default=None,
677 680 )
678 681 coreconfigitem('format', 'sparse-revlog',
679 682 default=True,
680 683 )
681 684 coreconfigitem('format', 'usefncache',
682 685 default=True,
683 686 )
684 687 coreconfigitem('format', 'usegeneraldelta',
685 688 default=True,
686 689 )
687 690 coreconfigitem('format', 'usestore',
688 691 default=True,
689 692 )
690 693 coreconfigitem('format', 'internal-phase',
691 694 default=False,
692 695 )
693 696 coreconfigitem('fsmonitor', 'warn_when_unused',
694 697 default=True,
695 698 )
696 699 coreconfigitem('fsmonitor', 'warn_update_file_count',
697 700 default=50000,
698 701 )
699 702 coreconfigitem('help', br'hidden-command\..*',
700 703 default=False,
701 704 generic=True,
702 705 )
703 706 coreconfigitem('help', br'hidden-topic\..*',
704 707 default=False,
705 708 generic=True,
706 709 )
707 710 coreconfigitem('hooks', '.*',
708 711 default=dynamicdefault,
709 712 generic=True,
710 713 )
711 714 coreconfigitem('hgweb-paths', '.*',
712 715 default=list,
713 716 generic=True,
714 717 )
715 718 coreconfigitem('hostfingerprints', '.*',
716 719 default=list,
717 720 generic=True,
718 721 )
719 722 coreconfigitem('hostsecurity', 'ciphers',
720 723 default=None,
721 724 )
722 725 coreconfigitem('hostsecurity', 'disabletls10warning',
723 726 default=False,
724 727 )
725 728 coreconfigitem('hostsecurity', 'minimumprotocol',
726 729 default=dynamicdefault,
727 730 )
728 731 coreconfigitem('hostsecurity', '.*:minimumprotocol$',
729 732 default=dynamicdefault,
730 733 generic=True,
731 734 )
732 735 coreconfigitem('hostsecurity', '.*:ciphers$',
733 736 default=dynamicdefault,
734 737 generic=True,
735 738 )
736 739 coreconfigitem('hostsecurity', '.*:fingerprints$',
737 740 default=list,
738 741 generic=True,
739 742 )
740 743 coreconfigitem('hostsecurity', '.*:verifycertsfile$',
741 744 default=None,
742 745 generic=True,
743 746 )
744 747
745 748 coreconfigitem('http_proxy', 'always',
746 749 default=False,
747 750 )
748 751 coreconfigitem('http_proxy', 'host',
749 752 default=None,
750 753 )
751 754 coreconfigitem('http_proxy', 'no',
752 755 default=list,
753 756 )
754 757 coreconfigitem('http_proxy', 'passwd',
755 758 default=None,
756 759 )
757 760 coreconfigitem('http_proxy', 'user',
758 761 default=None,
759 762 )
760 763
761 764 coreconfigitem('http', 'timeout',
762 765 default=None,
763 766 )
764 767
765 768 coreconfigitem('logtoprocess', 'commandexception',
766 769 default=None,
767 770 )
768 771 coreconfigitem('logtoprocess', 'commandfinish',
769 772 default=None,
770 773 )
771 774 coreconfigitem('logtoprocess', 'command',
772 775 default=None,
773 776 )
774 777 coreconfigitem('logtoprocess', 'develwarn',
775 778 default=None,
776 779 )
777 780 coreconfigitem('logtoprocess', 'uiblocked',
778 781 default=None,
779 782 )
780 783 coreconfigitem('merge', 'checkunknown',
781 784 default='abort',
782 785 )
783 786 coreconfigitem('merge', 'checkignored',
784 787 default='abort',
785 788 )
786 789 coreconfigitem('experimental', 'merge.checkpathconflicts',
787 790 default=False,
788 791 )
789 792 coreconfigitem('merge', 'followcopies',
790 793 default=True,
791 794 )
792 795 coreconfigitem('merge', 'on-failure',
793 796 default='continue',
794 797 )
795 798 coreconfigitem('merge', 'preferancestor',
796 799 default=lambda: ['*'],
797 800 )
798 801 coreconfigitem('merge', 'strict-capability-check',
799 802 default=False,
800 803 )
801 804 coreconfigitem('merge-tools', '.*',
802 805 default=None,
803 806 generic=True,
804 807 )
805 808 coreconfigitem('merge-tools', br'.*\.args$',
806 809 default="$local $base $other",
807 810 generic=True,
808 811 priority=-1,
809 812 )
810 813 coreconfigitem('merge-tools', br'.*\.binary$',
811 814 default=False,
812 815 generic=True,
813 816 priority=-1,
814 817 )
815 818 coreconfigitem('merge-tools', br'.*\.check$',
816 819 default=list,
817 820 generic=True,
818 821 priority=-1,
819 822 )
820 823 coreconfigitem('merge-tools', br'.*\.checkchanged$',
821 824 default=False,
822 825 generic=True,
823 826 priority=-1,
824 827 )
825 828 coreconfigitem('merge-tools', br'.*\.executable$',
826 829 default=dynamicdefault,
827 830 generic=True,
828 831 priority=-1,
829 832 )
830 833 coreconfigitem('merge-tools', br'.*\.fixeol$',
831 834 default=False,
832 835 generic=True,
833 836 priority=-1,
834 837 )
835 838 coreconfigitem('merge-tools', br'.*\.gui$',
836 839 default=False,
837 840 generic=True,
838 841 priority=-1,
839 842 )
840 843 coreconfigitem('merge-tools', br'.*\.mergemarkers$',
841 844 default='basic',
842 845 generic=True,
843 846 priority=-1,
844 847 )
845 848 coreconfigitem('merge-tools', br'.*\.mergemarkertemplate$',
846 849 default=dynamicdefault, # take from ui.mergemarkertemplate
847 850 generic=True,
848 851 priority=-1,
849 852 )
850 853 coreconfigitem('merge-tools', br'.*\.priority$',
851 854 default=0,
852 855 generic=True,
853 856 priority=-1,
854 857 )
855 858 coreconfigitem('merge-tools', br'.*\.premerge$',
856 859 default=dynamicdefault,
857 860 generic=True,
858 861 priority=-1,
859 862 )
860 863 coreconfigitem('merge-tools', br'.*\.symlink$',
861 864 default=False,
862 865 generic=True,
863 866 priority=-1,
864 867 )
865 868 coreconfigitem('pager', 'attend-.*',
866 869 default=dynamicdefault,
867 870 generic=True,
868 871 )
869 872 coreconfigitem('pager', 'ignore',
870 873 default=list,
871 874 )
872 875 coreconfigitem('pager', 'pager',
873 876 default=dynamicdefault,
874 877 )
875 878 coreconfigitem('patch', 'eol',
876 879 default='strict',
877 880 )
878 881 coreconfigitem('patch', 'fuzz',
879 882 default=2,
880 883 )
881 884 coreconfigitem('paths', 'default',
882 885 default=None,
883 886 )
884 887 coreconfigitem('paths', 'default-push',
885 888 default=None,
886 889 )
887 890 coreconfigitem('paths', '.*',
888 891 default=None,
889 892 generic=True,
890 893 )
891 894 coreconfigitem('phases', 'checksubrepos',
892 895 default='follow',
893 896 )
894 897 coreconfigitem('phases', 'new-commit',
895 898 default='draft',
896 899 )
897 900 coreconfigitem('phases', 'publish',
898 901 default=True,
899 902 )
900 903 coreconfigitem('profiling', 'enabled',
901 904 default=False,
902 905 )
903 906 coreconfigitem('profiling', 'format',
904 907 default='text',
905 908 )
906 909 coreconfigitem('profiling', 'freq',
907 910 default=1000,
908 911 )
909 912 coreconfigitem('profiling', 'limit',
910 913 default=30,
911 914 )
912 915 coreconfigitem('profiling', 'nested',
913 916 default=0,
914 917 )
915 918 coreconfigitem('profiling', 'output',
916 919 default=None,
917 920 )
918 921 coreconfigitem('profiling', 'showmax',
919 922 default=0.999,
920 923 )
921 924 coreconfigitem('profiling', 'showmin',
922 925 default=dynamicdefault,
923 926 )
924 927 coreconfigitem('profiling', 'sort',
925 928 default='inlinetime',
926 929 )
927 930 coreconfigitem('profiling', 'statformat',
928 931 default='hotpath',
929 932 )
930 933 coreconfigitem('profiling', 'time-track',
931 934 default=dynamicdefault,
932 935 )
933 936 coreconfigitem('profiling', 'type',
934 937 default='stat',
935 938 )
936 939 coreconfigitem('progress', 'assume-tty',
937 940 default=False,
938 941 )
939 942 coreconfigitem('progress', 'changedelay',
940 943 default=1,
941 944 )
942 945 coreconfigitem('progress', 'clear-complete',
943 946 default=True,
944 947 )
945 948 coreconfigitem('progress', 'debug',
946 949 default=False,
947 950 )
948 951 coreconfigitem('progress', 'delay',
949 952 default=3,
950 953 )
951 954 coreconfigitem('progress', 'disable',
952 955 default=False,
953 956 )
954 957 coreconfigitem('progress', 'estimateinterval',
955 958 default=60.0,
956 959 )
957 960 coreconfigitem('progress', 'format',
958 961 default=lambda: ['topic', 'bar', 'number', 'estimate'],
959 962 )
960 963 coreconfigitem('progress', 'refresh',
961 964 default=0.1,
962 965 )
963 966 coreconfigitem('progress', 'width',
964 967 default=dynamicdefault,
965 968 )
966 969 coreconfigitem('push', 'pushvars.server',
967 970 default=False,
968 971 )
969 972 coreconfigitem('rewrite', 'backup-bundle',
970 973 default=True,
971 974 alias=[('ui', 'history-editing-backup')],
972 975 )
973 976 coreconfigitem('rewrite', 'update-timestamp',
974 977 default=False,
975 978 )
976 979 coreconfigitem('storage', 'new-repo-backend',
977 980 default='revlogv1',
978 981 )
979 982 coreconfigitem('storage', 'revlog.optimize-delta-parent-choice',
980 983 default=True,
981 984 alias=[('format', 'aggressivemergedeltas')],
982 985 )
983 986 coreconfigitem('server', 'bookmarks-pushkey-compat',
984 987 default=True,
985 988 )
986 989 coreconfigitem('server', 'bundle1',
987 990 default=True,
988 991 )
989 992 coreconfigitem('server', 'bundle1gd',
990 993 default=None,
991 994 )
992 995 coreconfigitem('server', 'bundle1.pull',
993 996 default=None,
994 997 )
995 998 coreconfigitem('server', 'bundle1gd.pull',
996 999 default=None,
997 1000 )
998 1001 coreconfigitem('server', 'bundle1.push',
999 1002 default=None,
1000 1003 )
1001 1004 coreconfigitem('server', 'bundle1gd.push',
1002 1005 default=None,
1003 1006 )
1004 1007 coreconfigitem('server', 'bundle2.stream',
1005 1008 default=True,
1006 1009 alias=[('experimental', 'bundle2.stream')]
1007 1010 )
1008 1011 coreconfigitem('server', 'compressionengines',
1009 1012 default=list,
1010 1013 )
1011 1014 coreconfigitem('server', 'concurrent-push-mode',
1012 1015 default='strict',
1013 1016 )
1014 1017 coreconfigitem('server', 'disablefullbundle',
1015 1018 default=False,
1016 1019 )
1017 1020 coreconfigitem('server', 'maxhttpheaderlen',
1018 1021 default=1024,
1019 1022 )
1020 1023 coreconfigitem('server', 'pullbundle',
1021 1024 default=False,
1022 1025 )
1023 1026 coreconfigitem('server', 'preferuncompressed',
1024 1027 default=False,
1025 1028 )
1026 1029 coreconfigitem('server', 'streamunbundle',
1027 1030 default=False,
1028 1031 )
1029 1032 coreconfigitem('server', 'uncompressed',
1030 1033 default=True,
1031 1034 )
1032 1035 coreconfigitem('server', 'uncompressedallowsecret',
1033 1036 default=False,
1034 1037 )
1035 1038 coreconfigitem('server', 'validate',
1036 1039 default=False,
1037 1040 )
1038 1041 coreconfigitem('server', 'zliblevel',
1039 1042 default=-1,
1040 1043 )
1041 1044 coreconfigitem('server', 'zstdlevel',
1042 1045 default=3,
1043 1046 )
1044 1047 coreconfigitem('share', 'pool',
1045 1048 default=None,
1046 1049 )
1047 1050 coreconfigitem('share', 'poolnaming',
1048 1051 default='identity',
1049 1052 )
1050 1053 coreconfigitem('smtp', 'host',
1051 1054 default=None,
1052 1055 )
1053 1056 coreconfigitem('smtp', 'local_hostname',
1054 1057 default=None,
1055 1058 )
1056 1059 coreconfigitem('smtp', 'password',
1057 1060 default=None,
1058 1061 )
1059 1062 coreconfigitem('smtp', 'port',
1060 1063 default=dynamicdefault,
1061 1064 )
1062 1065 coreconfigitem('smtp', 'tls',
1063 1066 default='none',
1064 1067 )
1065 1068 coreconfigitem('smtp', 'username',
1066 1069 default=None,
1067 1070 )
1068 1071 coreconfigitem('sparse', 'missingwarning',
1069 1072 default=True,
1070 1073 )
1071 1074 coreconfigitem('subrepos', 'allowed',
1072 1075 default=dynamicdefault, # to make backporting simpler
1073 1076 )
1074 1077 coreconfigitem('subrepos', 'hg:allowed',
1075 1078 default=dynamicdefault,
1076 1079 )
1077 1080 coreconfigitem('subrepos', 'git:allowed',
1078 1081 default=dynamicdefault,
1079 1082 )
1080 1083 coreconfigitem('subrepos', 'svn:allowed',
1081 1084 default=dynamicdefault,
1082 1085 )
1083 1086 coreconfigitem('templates', '.*',
1084 1087 default=None,
1085 1088 generic=True,
1086 1089 )
1087 1090 coreconfigitem('templateconfig', '.*',
1088 1091 default=dynamicdefault,
1089 1092 generic=True,
1090 1093 )
1091 1094 coreconfigitem('trusted', 'groups',
1092 1095 default=list,
1093 1096 )
1094 1097 coreconfigitem('trusted', 'users',
1095 1098 default=list,
1096 1099 )
1097 1100 coreconfigitem('ui', '_usedassubrepo',
1098 1101 default=False,
1099 1102 )
1100 1103 coreconfigitem('ui', 'allowemptycommit',
1101 1104 default=False,
1102 1105 )
1103 1106 coreconfigitem('ui', 'archivemeta',
1104 1107 default=True,
1105 1108 )
1106 1109 coreconfigitem('ui', 'askusername',
1107 1110 default=False,
1108 1111 )
1109 1112 coreconfigitem('ui', 'clonebundlefallback',
1110 1113 default=False,
1111 1114 )
1112 1115 coreconfigitem('ui', 'clonebundleprefers',
1113 1116 default=list,
1114 1117 )
1115 1118 coreconfigitem('ui', 'clonebundles',
1116 1119 default=True,
1117 1120 )
1118 1121 coreconfigitem('ui', 'color',
1119 1122 default='auto',
1120 1123 )
1121 1124 coreconfigitem('ui', 'commitsubrepos',
1122 1125 default=False,
1123 1126 )
1124 1127 coreconfigitem('ui', 'debug',
1125 1128 default=False,
1126 1129 )
1127 1130 coreconfigitem('ui', 'debugger',
1128 1131 default=None,
1129 1132 )
1130 1133 coreconfigitem('ui', 'editor',
1131 1134 default=dynamicdefault,
1132 1135 )
1133 1136 coreconfigitem('ui', 'fallbackencoding',
1134 1137 default=None,
1135 1138 )
1136 1139 coreconfigitem('ui', 'forcecwd',
1137 1140 default=None,
1138 1141 )
1139 1142 coreconfigitem('ui', 'forcemerge',
1140 1143 default=None,
1141 1144 )
1142 1145 coreconfigitem('ui', 'formatdebug',
1143 1146 default=False,
1144 1147 )
1145 1148 coreconfigitem('ui', 'formatjson',
1146 1149 default=False,
1147 1150 )
1148 1151 coreconfigitem('ui', 'formatted',
1149 1152 default=None,
1150 1153 )
1151 1154 coreconfigitem('ui', 'graphnodetemplate',
1152 1155 default=None,
1153 1156 )
1154 1157 coreconfigitem('ui', 'interactive',
1155 1158 default=None,
1156 1159 )
1157 1160 coreconfigitem('ui', 'interface',
1158 1161 default=None,
1159 1162 )
1160 1163 coreconfigitem('ui', 'interface.chunkselector',
1161 1164 default=None,
1162 1165 )
1163 1166 coreconfigitem('ui', 'large-file-limit',
1164 1167 default=10000000,
1165 1168 )
1166 1169 coreconfigitem('ui', 'logblockedtimes',
1167 1170 default=False,
1168 1171 )
1169 1172 coreconfigitem('ui', 'logtemplate',
1170 1173 default=None,
1171 1174 )
1172 1175 coreconfigitem('ui', 'merge',
1173 1176 default=None,
1174 1177 )
1175 1178 coreconfigitem('ui', 'mergemarkers',
1176 1179 default='basic',
1177 1180 )
1178 1181 coreconfigitem('ui', 'mergemarkertemplate',
1179 1182 default=('{node|short} '
1180 1183 '{ifeq(tags, "tip", "", '
1181 1184 'ifeq(tags, "", "", "{tags} "))}'
1182 1185 '{if(bookmarks, "{bookmarks} ")}'
1183 1186 '{ifeq(branch, "default", "", "{branch} ")}'
1184 1187 '- {author|user}: {desc|firstline}')
1185 1188 )
1186 1189 coreconfigitem('ui', 'message-output',
1187 1190 default='stdio',
1188 1191 )
1189 1192 coreconfigitem('ui', 'nontty',
1190 1193 default=False,
1191 1194 )
1192 1195 coreconfigitem('ui', 'origbackuppath',
1193 1196 default=None,
1194 1197 )
1195 1198 coreconfigitem('ui', 'paginate',
1196 1199 default=True,
1197 1200 )
1198 1201 coreconfigitem('ui', 'patch',
1199 1202 default=None,
1200 1203 )
1201 1204 coreconfigitem('ui', 'pre-merge-tool-output-template',
1202 1205 default=None,
1203 1206 )
1204 1207 coreconfigitem('ui', 'portablefilenames',
1205 1208 default='warn',
1206 1209 )
1207 1210 coreconfigitem('ui', 'promptecho',
1208 1211 default=False,
1209 1212 )
1210 1213 coreconfigitem('ui', 'quiet',
1211 1214 default=False,
1212 1215 )
1213 1216 coreconfigitem('ui', 'quietbookmarkmove',
1214 1217 default=False,
1215 1218 )
1216 1219 coreconfigitem('ui', 'relative-paths',
1217 1220 default='legacy',
1218 1221 )
1219 1222 coreconfigitem('ui', 'remotecmd',
1220 1223 default='hg',
1221 1224 )
1222 1225 coreconfigitem('ui', 'report_untrusted',
1223 1226 default=True,
1224 1227 )
1225 1228 coreconfigitem('ui', 'rollback',
1226 1229 default=True,
1227 1230 )
1228 1231 coreconfigitem('ui', 'signal-safe-lock',
1229 1232 default=True,
1230 1233 )
1231 1234 coreconfigitem('ui', 'slash',
1232 1235 default=False,
1233 1236 )
1234 1237 coreconfigitem('ui', 'ssh',
1235 1238 default='ssh',
1236 1239 )
1237 1240 coreconfigitem('ui', 'ssherrorhint',
1238 1241 default=None,
1239 1242 )
1240 1243 coreconfigitem('ui', 'statuscopies',
1241 1244 default=False,
1242 1245 )
1243 1246 coreconfigitem('ui', 'strict',
1244 1247 default=False,
1245 1248 )
1246 1249 coreconfigitem('ui', 'style',
1247 1250 default='',
1248 1251 )
1249 1252 coreconfigitem('ui', 'supportcontact',
1250 1253 default=None,
1251 1254 )
1252 1255 coreconfigitem('ui', 'textwidth',
1253 1256 default=78,
1254 1257 )
1255 1258 coreconfigitem('ui', 'timeout',
1256 1259 default='600',
1257 1260 )
1258 1261 coreconfigitem('ui', 'timeout.warn',
1259 1262 default=0,
1260 1263 )
1261 1264 coreconfigitem('ui', 'traceback',
1262 1265 default=False,
1263 1266 )
1264 1267 coreconfigitem('ui', 'tweakdefaults',
1265 1268 default=False,
1266 1269 )
1267 1270 coreconfigitem('ui', 'username',
1268 1271 alias=[('ui', 'user')]
1269 1272 )
1270 1273 coreconfigitem('ui', 'verbose',
1271 1274 default=False,
1272 1275 )
1273 1276 coreconfigitem('verify', 'skipflags',
1274 1277 default=None,
1275 1278 )
1276 1279 coreconfigitem('web', 'allowbz2',
1277 1280 default=False,
1278 1281 )
1279 1282 coreconfigitem('web', 'allowgz',
1280 1283 default=False,
1281 1284 )
1282 1285 coreconfigitem('web', 'allow-pull',
1283 1286 alias=[('web', 'allowpull')],
1284 1287 default=True,
1285 1288 )
1286 1289 coreconfigitem('web', 'allow-push',
1287 1290 alias=[('web', 'allow_push')],
1288 1291 default=list,
1289 1292 )
1290 1293 coreconfigitem('web', 'allowzip',
1291 1294 default=False,
1292 1295 )
1293 1296 coreconfigitem('web', 'archivesubrepos',
1294 1297 default=False,
1295 1298 )
1296 1299 coreconfigitem('web', 'cache',
1297 1300 default=True,
1298 1301 )
1299 1302 coreconfigitem('web', 'comparisoncontext',
1300 1303 default=5,
1301 1304 )
1302 1305 coreconfigitem('web', 'contact',
1303 1306 default=None,
1304 1307 )
1305 1308 coreconfigitem('web', 'deny_push',
1306 1309 default=list,
1307 1310 )
1308 1311 coreconfigitem('web', 'guessmime',
1309 1312 default=False,
1310 1313 )
1311 1314 coreconfigitem('web', 'hidden',
1312 1315 default=False,
1313 1316 )
1314 1317 coreconfigitem('web', 'labels',
1315 1318 default=list,
1316 1319 )
1317 1320 coreconfigitem('web', 'logoimg',
1318 1321 default='hglogo.png',
1319 1322 )
1320 1323 coreconfigitem('web', 'logourl',
1321 1324 default='https://mercurial-scm.org/',
1322 1325 )
1323 1326 coreconfigitem('web', 'accesslog',
1324 1327 default='-',
1325 1328 )
1326 1329 coreconfigitem('web', 'address',
1327 1330 default='',
1328 1331 )
1329 1332 coreconfigitem('web', 'allow-archive',
1330 1333 alias=[('web', 'allow_archive')],
1331 1334 default=list,
1332 1335 )
1333 1336 coreconfigitem('web', 'allow_read',
1334 1337 default=list,
1335 1338 )
1336 1339 coreconfigitem('web', 'baseurl',
1337 1340 default=None,
1338 1341 )
1339 1342 coreconfigitem('web', 'cacerts',
1340 1343 default=None,
1341 1344 )
1342 1345 coreconfigitem('web', 'certificate',
1343 1346 default=None,
1344 1347 )
1345 1348 coreconfigitem('web', 'collapse',
1346 1349 default=False,
1347 1350 )
1348 1351 coreconfigitem('web', 'csp',
1349 1352 default=None,
1350 1353 )
1351 1354 coreconfigitem('web', 'deny_read',
1352 1355 default=list,
1353 1356 )
1354 1357 coreconfigitem('web', 'descend',
1355 1358 default=True,
1356 1359 )
1357 1360 coreconfigitem('web', 'description',
1358 1361 default="",
1359 1362 )
1360 1363 coreconfigitem('web', 'encoding',
1361 1364 default=lambda: encoding.encoding,
1362 1365 )
1363 1366 coreconfigitem('web', 'errorlog',
1364 1367 default='-',
1365 1368 )
1366 1369 coreconfigitem('web', 'ipv6',
1367 1370 default=False,
1368 1371 )
1369 1372 coreconfigitem('web', 'maxchanges',
1370 1373 default=10,
1371 1374 )
1372 1375 coreconfigitem('web', 'maxfiles',
1373 1376 default=10,
1374 1377 )
1375 1378 coreconfigitem('web', 'maxshortchanges',
1376 1379 default=60,
1377 1380 )
1378 1381 coreconfigitem('web', 'motd',
1379 1382 default='',
1380 1383 )
1381 1384 coreconfigitem('web', 'name',
1382 1385 default=dynamicdefault,
1383 1386 )
1384 1387 coreconfigitem('web', 'port',
1385 1388 default=8000,
1386 1389 )
1387 1390 coreconfigitem('web', 'prefix',
1388 1391 default='',
1389 1392 )
1390 1393 coreconfigitem('web', 'push_ssl',
1391 1394 default=True,
1392 1395 )
1393 1396 coreconfigitem('web', 'refreshinterval',
1394 1397 default=20,
1395 1398 )
1396 1399 coreconfigitem('web', 'server-header',
1397 1400 default=None,
1398 1401 )
1399 1402 coreconfigitem('web', 'static',
1400 1403 default=None,
1401 1404 )
1402 1405 coreconfigitem('web', 'staticurl',
1403 1406 default=None,
1404 1407 )
1405 1408 coreconfigitem('web', 'stripes',
1406 1409 default=1,
1407 1410 )
1408 1411 coreconfigitem('web', 'style',
1409 1412 default='paper',
1410 1413 )
1411 1414 coreconfigitem('web', 'templates',
1412 1415 default=None,
1413 1416 )
1414 1417 coreconfigitem('web', 'view',
1415 1418 default='served',
1416 1419 )
1417 1420 coreconfigitem('worker', 'backgroundclose',
1418 1421 default=dynamicdefault,
1419 1422 )
1420 1423 # Windows defaults to a limit of 512 open files. A buffer of 128
1421 1424 # should give us enough headway.
1422 1425 coreconfigitem('worker', 'backgroundclosemaxqueue',
1423 1426 default=384,
1424 1427 )
1425 1428 coreconfigitem('worker', 'backgroundcloseminfilecount',
1426 1429 default=2048,
1427 1430 )
1428 1431 coreconfigitem('worker', 'backgroundclosethreadcount',
1429 1432 default=4,
1430 1433 )
1431 1434 coreconfigitem('worker', 'enabled',
1432 1435 default=True,
1433 1436 )
1434 1437 coreconfigitem('worker', 'numcpus',
1435 1438 default=None,
1436 1439 )
1437 1440
1438 1441 # Rebase related configuration moved to core because other extension are doing
1439 1442 # strange things. For example, shelve import the extensions to reuse some bit
1440 1443 # without formally loading it.
1441 1444 coreconfigitem('commands', 'rebase.requiredest',
1442 1445 default=False,
1443 1446 )
1444 1447 coreconfigitem('experimental', 'rebaseskipobsolete',
1445 1448 default=True,
1446 1449 )
1447 1450 coreconfigitem('rebase', 'singletransaction',
1448 1451 default=False,
1449 1452 )
1450 1453 coreconfigitem('rebase', 'experimental.inmemory',
1451 1454 default=False,
1452 1455 )
@@ -1,1891 +1,1903 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import posixpath
15 15 import re
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 bin,
22 22 hex,
23 23 nullid,
24 24 nullrev,
25 25 short,
26 26 wdirid,
27 27 wdirrev,
28 28 )
29 29
30 30 from . import (
31 31 encoding,
32 32 error,
33 33 match as matchmod,
34 34 obsolete,
35 35 obsutil,
36 36 pathutil,
37 37 phases,
38 38 policy,
39 39 pycompat,
40 40 revsetlang,
41 41 similar,
42 42 smartset,
43 43 url,
44 44 util,
45 45 vfs,
46 46 )
47 47
48 48 from .utils import (
49 49 procutil,
50 50 stringutil,
51 51 )
52 52
53 53 if pycompat.iswindows:
54 54 from . import scmwindows as scmplatform
55 55 else:
56 56 from . import scmposix as scmplatform
57 57
58 58 parsers = policy.importmod(r'parsers')
59 59
60 60 termsize = scmplatform.termsize
61 61
62 62 class status(tuple):
63 63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 64 and 'ignored' properties are only relevant to the working copy.
65 65 '''
66 66
67 67 __slots__ = ()
68 68
69 69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 70 clean):
71 71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 72 ignored, clean))
73 73
74 74 @property
75 75 def modified(self):
76 76 '''files that have been modified'''
77 77 return self[0]
78 78
79 79 @property
80 80 def added(self):
81 81 '''files that have been added'''
82 82 return self[1]
83 83
84 84 @property
85 85 def removed(self):
86 86 '''files that have been removed'''
87 87 return self[2]
88 88
89 89 @property
90 90 def deleted(self):
91 91 '''files that are in the dirstate, but have been deleted from the
92 92 working copy (aka "missing")
93 93 '''
94 94 return self[3]
95 95
96 96 @property
97 97 def unknown(self):
98 98 '''files not in the dirstate that are not ignored'''
99 99 return self[4]
100 100
101 101 @property
102 102 def ignored(self):
103 103 '''files not in the dirstate that are ignored (by _dirignore())'''
104 104 return self[5]
105 105
106 106 @property
107 107 def clean(self):
108 108 '''files that have not been modified'''
109 109 return self[6]
110 110
111 111 def __repr__(self, *args, **kwargs):
112 112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 113 r'unknown=%s, ignored=%s, clean=%s>') %
114 114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115 115
116 116 def itersubrepos(ctx1, ctx2):
117 117 """find subrepos in ctx1 or ctx2"""
118 118 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 120 # has been modified (in ctx2) but not yet committed (in ctx1).
121 121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123 123
124 124 missing = set()
125 125
126 126 for subpath in ctx2.substate:
127 127 if subpath not in ctx1.substate:
128 128 del subpaths[subpath]
129 129 missing.add(subpath)
130 130
131 131 for subpath, ctx in sorted(subpaths.iteritems()):
132 132 yield subpath, ctx.sub(subpath)
133 133
134 134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 135 # status and diff will have an accurate result when it does
136 136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 137 # against itself.
138 138 for subpath in missing:
139 139 yield subpath, ctx2.nullsub(subpath, ctx1)
140 140
141 141 def nochangesfound(ui, repo, excluded=None):
142 142 '''Report no changes for push/pull, excluded is None or a list of
143 143 nodes excluded from the push/pull.
144 144 '''
145 145 secretlist = []
146 146 if excluded:
147 147 for n in excluded:
148 148 ctx = repo[n]
149 149 if ctx.phase() >= phases.secret and not ctx.extinct():
150 150 secretlist.append(n)
151 151
152 152 if secretlist:
153 153 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 154 % len(secretlist))
155 155 else:
156 156 ui.status(_("no changes found\n"))
157 157
158 158 def callcatch(ui, func):
159 159 """call func() with global exception handling
160 160
161 161 return func() if no exception happens. otherwise do some error handling
162 162 and return an exit code accordingly. does not handle all exceptions.
163 163 """
164 164 try:
165 165 try:
166 166 return func()
167 167 except: # re-raises
168 168 ui.traceback()
169 169 raise
170 170 # Global exception handling, alphabetically
171 171 # Mercurial-specific first, followed by built-in and library exceptions
172 172 except error.LockHeld as inst:
173 173 if inst.errno == errno.ETIMEDOUT:
174 174 reason = _('timed out waiting for lock held by %r') % (
175 175 pycompat.bytestr(inst.locker))
176 176 else:
177 177 reason = _('lock held by %r') % inst.locker
178 178 ui.error(_("abort: %s: %s\n") % (
179 179 inst.desc or stringutil.forcebytestr(inst.filename), reason))
180 180 if not inst.locker:
181 181 ui.error(_("(lock might be very busy)\n"))
182 182 except error.LockUnavailable as inst:
183 183 ui.error(_("abort: could not lock %s: %s\n") %
184 184 (inst.desc or stringutil.forcebytestr(inst.filename),
185 185 encoding.strtolocal(inst.strerror)))
186 186 except error.OutOfBandError as inst:
187 187 if inst.args:
188 188 msg = _("abort: remote error:\n")
189 189 else:
190 190 msg = _("abort: remote error\n")
191 191 ui.error(msg)
192 192 if inst.args:
193 193 ui.error(''.join(inst.args))
194 194 if inst.hint:
195 195 ui.error('(%s)\n' % inst.hint)
196 196 except error.RepoError as inst:
197 197 ui.error(_("abort: %s!\n") % inst)
198 198 if inst.hint:
199 199 ui.error(_("(%s)\n") % inst.hint)
200 200 except error.ResponseError as inst:
201 201 ui.error(_("abort: %s") % inst.args[0])
202 202 msg = inst.args[1]
203 203 if isinstance(msg, type(u'')):
204 204 msg = pycompat.sysbytes(msg)
205 205 if not isinstance(msg, bytes):
206 206 ui.error(" %r\n" % (msg,))
207 207 elif not msg:
208 208 ui.error(_(" empty string\n"))
209 209 else:
210 210 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
211 211 except error.CensoredNodeError as inst:
212 212 ui.error(_("abort: file censored %s!\n") % inst)
213 213 except error.StorageError as inst:
214 214 ui.error(_("abort: %s!\n") % inst)
215 215 if inst.hint:
216 216 ui.error(_("(%s)\n") % inst.hint)
217 217 except error.InterventionRequired as inst:
218 218 ui.error("%s\n" % inst)
219 219 if inst.hint:
220 220 ui.error(_("(%s)\n") % inst.hint)
221 221 return 1
222 222 except error.WdirUnsupported:
223 223 ui.error(_("abort: working directory revision cannot be specified\n"))
224 224 except error.Abort as inst:
225 225 ui.error(_("abort: %s\n") % inst)
226 226 if inst.hint:
227 227 ui.error(_("(%s)\n") % inst.hint)
228 228 except ImportError as inst:
229 229 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
230 230 m = stringutil.forcebytestr(inst).split()[-1]
231 231 if m in "mpatch bdiff".split():
232 232 ui.error(_("(did you forget to compile extensions?)\n"))
233 233 elif m in "zlib".split():
234 234 ui.error(_("(is your Python install correct?)\n"))
235 235 except (IOError, OSError) as inst:
236 236 if util.safehasattr(inst, "code"): # HTTPError
237 237 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
238 238 elif util.safehasattr(inst, "reason"): # URLError or SSLError
239 239 try: # usually it is in the form (errno, strerror)
240 240 reason = inst.reason.args[1]
241 241 except (AttributeError, IndexError):
242 242 # it might be anything, for example a string
243 243 reason = inst.reason
244 244 if isinstance(reason, pycompat.unicode):
245 245 # SSLError of Python 2.7.9 contains a unicode
246 246 reason = encoding.unitolocal(reason)
247 247 ui.error(_("abort: error: %s\n") % reason)
248 248 elif (util.safehasattr(inst, "args")
249 249 and inst.args and inst.args[0] == errno.EPIPE):
250 250 pass
251 251 elif getattr(inst, "strerror", None): # common IOError or OSError
252 252 if getattr(inst, "filename", None) is not None:
253 253 ui.error(_("abort: %s: '%s'\n") % (
254 254 encoding.strtolocal(inst.strerror),
255 255 stringutil.forcebytestr(inst.filename)))
256 256 else:
257 257 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 258 else: # suspicious IOError
259 259 raise
260 260 except MemoryError:
261 261 ui.error(_("abort: out of memory\n"))
262 262 except SystemExit as inst:
263 263 # Commands shouldn't sys.exit directly, but give a return code.
264 264 # Just in case catch this and and pass exit code to caller.
265 265 return inst.code
266 266
267 267 return -1
268 268
269 269 def checknewlabel(repo, lbl, kind):
270 270 # Do not use the "kind" parameter in ui output.
271 271 # It makes strings difficult to translate.
272 272 if lbl in ['tip', '.', 'null']:
273 273 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 274 for c in (':', '\0', '\n', '\r'):
275 275 if c in lbl:
276 276 raise error.Abort(
277 277 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 278 try:
279 279 int(lbl)
280 280 raise error.Abort(_("cannot use an integer as a name"))
281 281 except ValueError:
282 282 pass
283 283 if lbl.strip() != lbl:
284 284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285 285
286 286 def checkfilename(f):
287 287 '''Check that the filename f is an acceptable filename for a tracked file'''
288 288 if '\r' in f or '\n' in f:
289 289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
290 290 % pycompat.bytestr(f))
291 291
292 292 def checkportable(ui, f):
293 293 '''Check if filename f is portable and warn or abort depending on config'''
294 294 checkfilename(f)
295 295 abort, warn = checkportabilityalert(ui)
296 296 if abort or warn:
297 297 msg = util.checkwinfilename(f)
298 298 if msg:
299 299 msg = "%s: %s" % (msg, procutil.shellquote(f))
300 300 if abort:
301 301 raise error.Abort(msg)
302 302 ui.warn(_("warning: %s\n") % msg)
303 303
304 304 def checkportabilityalert(ui):
305 305 '''check if the user's config requests nothing, a warning, or abort for
306 306 non-portable filenames'''
307 307 val = ui.config('ui', 'portablefilenames')
308 308 lval = val.lower()
309 309 bval = stringutil.parsebool(val)
310 310 abort = pycompat.iswindows or lval == 'abort'
311 311 warn = bval or lval == 'warn'
312 312 if bval is None and not (warn or abort or lval == 'ignore'):
313 313 raise error.ConfigError(
314 314 _("ui.portablefilenames value is invalid ('%s')") % val)
315 315 return abort, warn
316 316
317 317 class casecollisionauditor(object):
318 318 def __init__(self, ui, abort, dirstate):
319 319 self._ui = ui
320 320 self._abort = abort
321 321 allfiles = '\0'.join(dirstate._map)
322 322 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
323 323 self._dirstate = dirstate
324 324 # The purpose of _newfiles is so that we don't complain about
325 325 # case collisions if someone were to call this object with the
326 326 # same filename twice.
327 327 self._newfiles = set()
328 328
329 329 def __call__(self, f):
330 330 if f in self._newfiles:
331 331 return
332 332 fl = encoding.lower(f)
333 333 if fl in self._loweredfiles and f not in self._dirstate:
334 334 msg = _('possible case-folding collision for %s') % f
335 335 if self._abort:
336 336 raise error.Abort(msg)
337 337 self._ui.warn(_("warning: %s\n") % msg)
338 338 self._loweredfiles.add(fl)
339 339 self._newfiles.add(f)
340 340
341 341 def filteredhash(repo, maxrev):
342 342 """build hash of filtered revisions in the current repoview.
343 343
344 344 Multiple caches perform up-to-date validation by checking that the
345 345 tiprev and tipnode stored in the cache file match the current repository.
346 346 However, this is not sufficient for validating repoviews because the set
347 347 of revisions in the view may change without the repository tiprev and
348 348 tipnode changing.
349 349
350 350 This function hashes all the revs filtered from the view and returns
351 351 that SHA-1 digest.
352 352 """
353 353 cl = repo.changelog
354 354 if not cl.filteredrevs:
355 355 return None
356 356 key = None
357 357 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
358 358 if revs:
359 359 s = hashlib.sha1()
360 360 for rev in revs:
361 361 s.update('%d;' % rev)
362 362 key = s.digest()
363 363 return key
364 364
365 365 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
366 366 '''yield every hg repository under path, always recursively.
367 367 The recurse flag will only control recursion into repo working dirs'''
368 368 def errhandler(err):
369 369 if err.filename == path:
370 370 raise err
371 371 samestat = getattr(os.path, 'samestat', None)
372 372 if followsym and samestat is not None:
373 373 def adddir(dirlst, dirname):
374 374 dirstat = os.stat(dirname)
375 375 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
376 376 if not match:
377 377 dirlst.append(dirstat)
378 378 return not match
379 379 else:
380 380 followsym = False
381 381
382 382 if (seen_dirs is None) and followsym:
383 383 seen_dirs = []
384 384 adddir(seen_dirs, path)
385 385 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
386 386 dirs.sort()
387 387 if '.hg' in dirs:
388 388 yield root # found a repository
389 389 qroot = os.path.join(root, '.hg', 'patches')
390 390 if os.path.isdir(os.path.join(qroot, '.hg')):
391 391 yield qroot # we have a patch queue repo here
392 392 if recurse:
393 393 # avoid recursing inside the .hg directory
394 394 dirs.remove('.hg')
395 395 else:
396 396 dirs[:] = [] # don't descend further
397 397 elif followsym:
398 398 newdirs = []
399 399 for d in dirs:
400 400 fname = os.path.join(root, d)
401 401 if adddir(seen_dirs, fname):
402 402 if os.path.islink(fname):
403 403 for hgname in walkrepos(fname, True, seen_dirs):
404 404 yield hgname
405 405 else:
406 406 newdirs.append(d)
407 407 dirs[:] = newdirs
408 408
409 409 def binnode(ctx):
410 410 """Return binary node id for a given basectx"""
411 411 node = ctx.node()
412 412 if node is None:
413 413 return wdirid
414 414 return node
415 415
416 416 def intrev(ctx):
417 417 """Return integer for a given basectx that can be used in comparison or
418 418 arithmetic operation"""
419 419 rev = ctx.rev()
420 420 if rev is None:
421 421 return wdirrev
422 422 return rev
423 423
424 424 def formatchangeid(ctx):
425 425 """Format changectx as '{rev}:{node|formatnode}', which is the default
426 426 template provided by logcmdutil.changesettemplater"""
427 427 repo = ctx.repo()
428 428 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
429 429
430 430 def formatrevnode(ui, rev, node):
431 431 """Format given revision and node depending on the current verbosity"""
432 432 if ui.debugflag:
433 433 hexfunc = hex
434 434 else:
435 435 hexfunc = short
436 436 return '%d:%s' % (rev, hexfunc(node))
437 437
438 438 def resolvehexnodeidprefix(repo, prefix):
439 439 if (prefix.startswith('x') and
440 440 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
441 441 prefix = prefix[1:]
442 442 try:
443 443 # Uses unfiltered repo because it's faster when prefix is ambiguous/
444 444 # This matches the shortesthexnodeidprefix() function below.
445 445 node = repo.unfiltered().changelog._partialmatch(prefix)
446 446 except error.AmbiguousPrefixLookupError:
447 447 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
448 448 if revset:
449 449 # Clear config to avoid infinite recursion
450 450 configoverrides = {('experimental',
451 451 'revisions.disambiguatewithin'): None}
452 452 with repo.ui.configoverride(configoverrides):
453 453 revs = repo.anyrevs([revset], user=True)
454 454 matches = []
455 455 for rev in revs:
456 456 node = repo.changelog.node(rev)
457 457 if hex(node).startswith(prefix):
458 458 matches.append(node)
459 459 if len(matches) == 1:
460 460 return matches[0]
461 461 raise
462 462 if node is None:
463 463 return
464 464 repo.changelog.rev(node) # make sure node isn't filtered
465 465 return node
466 466
467 467 def mayberevnum(repo, prefix):
468 468 """Checks if the given prefix may be mistaken for a revision number"""
469 469 try:
470 470 i = int(prefix)
471 471 # if we are a pure int, then starting with zero will not be
472 472 # confused as a rev; or, obviously, if the int is larger
473 473 # than the value of the tip rev. We still need to disambiguate if
474 474 # prefix == '0', since that *is* a valid revnum.
475 475 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
476 476 return False
477 477 return True
478 478 except ValueError:
479 479 return False
480 480
481 481 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
482 482 """Find the shortest unambiguous prefix that matches hexnode.
483 483
484 484 If "cache" is not None, it must be a dictionary that can be used for
485 485 caching between calls to this method.
486 486 """
487 487 # _partialmatch() of filtered changelog could take O(len(repo)) time,
488 488 # which would be unacceptably slow. so we look for hash collision in
489 489 # unfiltered space, which means some hashes may be slightly longer.
490 490
491 491 minlength=max(minlength, 1)
492 492
493 493 def disambiguate(prefix):
494 494 """Disambiguate against revnums."""
495 495 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
496 496 if mayberevnum(repo, prefix):
497 497 return 'x' + prefix
498 498 else:
499 499 return prefix
500 500
501 501 hexnode = hex(node)
502 502 for length in range(len(prefix), len(hexnode) + 1):
503 503 prefix = hexnode[:length]
504 504 if not mayberevnum(repo, prefix):
505 505 return prefix
506 506
507 507 cl = repo.unfiltered().changelog
508 508 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
509 509 if revset:
510 510 revs = None
511 511 if cache is not None:
512 512 revs = cache.get('disambiguationrevset')
513 513 if revs is None:
514 514 revs = repo.anyrevs([revset], user=True)
515 515 if cache is not None:
516 516 cache['disambiguationrevset'] = revs
517 517 if cl.rev(node) in revs:
518 518 hexnode = hex(node)
519 519 nodetree = None
520 520 if cache is not None:
521 521 nodetree = cache.get('disambiguationnodetree')
522 522 if not nodetree:
523 523 try:
524 524 nodetree = parsers.nodetree(cl.index, len(revs))
525 525 except AttributeError:
526 526 # no native nodetree
527 527 pass
528 528 else:
529 529 for r in revs:
530 530 nodetree.insert(r)
531 531 if cache is not None:
532 532 cache['disambiguationnodetree'] = nodetree
533 533 if nodetree is not None:
534 534 length = max(nodetree.shortest(node), minlength)
535 535 prefix = hexnode[:length]
536 536 return disambiguate(prefix)
537 537 for length in range(minlength, len(hexnode) + 1):
538 538 matches = []
539 539 prefix = hexnode[:length]
540 540 for rev in revs:
541 541 otherhexnode = repo[rev].hex()
542 542 if prefix == otherhexnode[:length]:
543 543 matches.append(otherhexnode)
544 544 if len(matches) == 1:
545 545 return disambiguate(prefix)
546 546
547 547 try:
548 548 return disambiguate(cl.shortest(node, minlength))
549 549 except error.LookupError:
550 550 raise error.RepoLookupError()
551 551
552 552 def isrevsymbol(repo, symbol):
553 553 """Checks if a symbol exists in the repo.
554 554
555 555 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
556 556 symbol is an ambiguous nodeid prefix.
557 557 """
558 558 try:
559 559 revsymbol(repo, symbol)
560 560 return True
561 561 except error.RepoLookupError:
562 562 return False
563 563
564 564 def revsymbol(repo, symbol):
565 565 """Returns a context given a single revision symbol (as string).
566 566
567 567 This is similar to revsingle(), but accepts only a single revision symbol,
568 568 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
569 569 not "max(public())".
570 570 """
571 571 if not isinstance(symbol, bytes):
572 572 msg = ("symbol (%s of type %s) was not a string, did you mean "
573 573 "repo[symbol]?" % (symbol, type(symbol)))
574 574 raise error.ProgrammingError(msg)
575 575 try:
576 576 if symbol in ('.', 'tip', 'null'):
577 577 return repo[symbol]
578 578
579 579 try:
580 580 r = int(symbol)
581 581 if '%d' % r != symbol:
582 582 raise ValueError
583 583 l = len(repo.changelog)
584 584 if r < 0:
585 585 r += l
586 586 if r < 0 or r >= l and r != wdirrev:
587 587 raise ValueError
588 588 return repo[r]
589 589 except error.FilteredIndexError:
590 590 raise
591 591 except (ValueError, OverflowError, IndexError):
592 592 pass
593 593
594 594 if len(symbol) == 40:
595 595 try:
596 596 node = bin(symbol)
597 597 rev = repo.changelog.rev(node)
598 598 return repo[rev]
599 599 except error.FilteredLookupError:
600 600 raise
601 601 except (TypeError, LookupError):
602 602 pass
603 603
604 604 # look up bookmarks through the name interface
605 605 try:
606 606 node = repo.names.singlenode(repo, symbol)
607 607 rev = repo.changelog.rev(node)
608 608 return repo[rev]
609 609 except KeyError:
610 610 pass
611 611
612 612 node = resolvehexnodeidprefix(repo, symbol)
613 613 if node is not None:
614 614 rev = repo.changelog.rev(node)
615 615 return repo[rev]
616 616
617 617 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
618 618
619 619 except error.WdirUnsupported:
620 620 return repo[None]
621 621 except (error.FilteredIndexError, error.FilteredLookupError,
622 622 error.FilteredRepoLookupError):
623 623 raise _filterederror(repo, symbol)
624 624
625 625 def _filterederror(repo, changeid):
626 626 """build an exception to be raised about a filtered changeid
627 627
628 628 This is extracted in a function to help extensions (eg: evolve) to
629 629 experiment with various message variants."""
630 630 if repo.filtername.startswith('visible'):
631 631
632 632 # Check if the changeset is obsolete
633 633 unfilteredrepo = repo.unfiltered()
634 634 ctx = revsymbol(unfilteredrepo, changeid)
635 635
636 636 # If the changeset is obsolete, enrich the message with the reason
637 637 # that made this changeset not visible
638 638 if ctx.obsolete():
639 639 msg = obsutil._getfilteredreason(repo, changeid, ctx)
640 640 else:
641 641 msg = _("hidden revision '%s'") % changeid
642 642
643 643 hint = _('use --hidden to access hidden revisions')
644 644
645 645 return error.FilteredRepoLookupError(msg, hint=hint)
646 646 msg = _("filtered revision '%s' (not in '%s' subset)")
647 647 msg %= (changeid, repo.filtername)
648 648 return error.FilteredRepoLookupError(msg)
649 649
650 650 def revsingle(repo, revspec, default='.', localalias=None):
651 651 if not revspec and revspec != 0:
652 652 return repo[default]
653 653
654 654 l = revrange(repo, [revspec], localalias=localalias)
655 655 if not l:
656 656 raise error.Abort(_('empty revision set'))
657 657 return repo[l.last()]
658 658
659 659 def _pairspec(revspec):
660 660 tree = revsetlang.parse(revspec)
661 661 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
662 662
663 663 def revpair(repo, revs):
664 664 if not revs:
665 665 return repo['.'], repo[None]
666 666
667 667 l = revrange(repo, revs)
668 668
669 669 if not l:
670 670 raise error.Abort(_('empty revision range'))
671 671
672 672 first = l.first()
673 673 second = l.last()
674 674
675 675 if (first == second and len(revs) >= 2
676 676 and not all(revrange(repo, [r]) for r in revs)):
677 677 raise error.Abort(_('empty revision on one side of range'))
678 678
679 679 # if top-level is range expression, the result must always be a pair
680 680 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
681 681 return repo[first], repo[None]
682 682
683 683 return repo[first], repo[second]
684 684
685 685 def revrange(repo, specs, localalias=None):
686 686 """Execute 1 to many revsets and return the union.
687 687
688 688 This is the preferred mechanism for executing revsets using user-specified
689 689 config options, such as revset aliases.
690 690
691 691 The revsets specified by ``specs`` will be executed via a chained ``OR``
692 692 expression. If ``specs`` is empty, an empty result is returned.
693 693
694 694 ``specs`` can contain integers, in which case they are assumed to be
695 695 revision numbers.
696 696
697 697 It is assumed the revsets are already formatted. If you have arguments
698 698 that need to be expanded in the revset, call ``revsetlang.formatspec()``
699 699 and pass the result as an element of ``specs``.
700 700
701 701 Specifying a single revset is allowed.
702 702
703 703 Returns a ``revset.abstractsmartset`` which is a list-like interface over
704 704 integer revisions.
705 705 """
706 706 allspecs = []
707 707 for spec in specs:
708 708 if isinstance(spec, int):
709 709 spec = revsetlang.formatspec('%d', spec)
710 710 allspecs.append(spec)
711 711 return repo.anyrevs(allspecs, user=True, localalias=localalias)
712 712
713 713 def meaningfulparents(repo, ctx):
714 714 """Return list of meaningful (or all if debug) parentrevs for rev.
715 715
716 716 For merges (two non-nullrev revisions) both parents are meaningful.
717 717 Otherwise the first parent revision is considered meaningful if it
718 718 is not the preceding revision.
719 719 """
720 720 parents = ctx.parents()
721 721 if len(parents) > 1:
722 722 return parents
723 723 if repo.ui.debugflag:
724 724 return [parents[0], repo[nullrev]]
725 725 if parents[0].rev() >= intrev(ctx) - 1:
726 726 return []
727 727 return parents
728 728
729 729 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
730 730 """Return a function that produced paths for presenting to the user.
731 731
732 732 The returned function takes a repo-relative path and produces a path
733 733 that can be presented in the UI.
734 734
735 735 Depending on the value of ui.relative-paths, either a repo-relative or
736 736 cwd-relative path will be produced.
737 737
738 738 legacyrelativevalue is the value to use if ui.relative-paths=legacy
739 739
740 740 If forcerelativevalue is not None, then that value will be used regardless
741 741 of what ui.relative-paths is set to.
742 742 """
743 743 if forcerelativevalue is not None:
744 744 relative = forcerelativevalue
745 745 else:
746 746 config = repo.ui.config('ui', 'relative-paths')
747 747 if config == 'legacy':
748 748 relative = legacyrelativevalue
749 749 else:
750 750 relative = stringutil.parsebool(config)
751 751 if relative is None:
752 752 raise error.ConfigError(
753 753 _("ui.relative-paths is not a boolean ('%s')") % config)
754 754
755 755 if relative:
756 756 cwd = repo.getcwd()
757 757 pathto = repo.pathto
758 758 return lambda f: pathto(f, cwd)
759 759 elif repo.ui.configbool('ui', 'slash'):
760 760 return lambda f: f
761 761 else:
762 762 return util.localpath
763 763
764 764 def subdiruipathfn(subpath, uipathfn):
765 765 '''Create a new uipathfn that treats the file as relative to subpath.'''
766 766 return lambda f: uipathfn(posixpath.join(subpath, f))
767 767
768 768 def anypats(pats, opts):
769 769 '''Checks if any patterns, including --include and --exclude were given.
770 770
771 771 Some commands (e.g. addremove) use this condition for deciding whether to
772 772 print absolute or relative paths.
773 773 '''
774 774 return bool(pats or opts.get('include') or opts.get('exclude'))
775 775
776 776 def expandpats(pats):
777 777 '''Expand bare globs when running on windows.
778 778 On posix we assume it already has already been done by sh.'''
779 779 if not util.expandglobs:
780 780 return list(pats)
781 781 ret = []
782 782 for kindpat in pats:
783 783 kind, pat = matchmod._patsplit(kindpat, None)
784 784 if kind is None:
785 785 try:
786 786 globbed = glob.glob(pat)
787 787 except re.error:
788 788 globbed = [pat]
789 789 if globbed:
790 790 ret.extend(globbed)
791 791 continue
792 792 ret.append(kindpat)
793 793 return ret
794 794
795 795 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
796 796 badfn=None):
797 797 '''Return a matcher and the patterns that were used.
798 798 The matcher will warn about bad matches, unless an alternate badfn callback
799 799 is provided.'''
800 800 if opts is None:
801 801 opts = {}
802 802 if not globbed and default == 'relpath':
803 803 pats = expandpats(pats or [])
804 804
805 805 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
806 806 def bad(f, msg):
807 807 ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
808 808
809 809 if badfn is None:
810 810 badfn = bad
811 811
812 812 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
813 813 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
814 814
815 815 if m.always():
816 816 pats = []
817 817 return m, pats
818 818
819 819 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
820 820 badfn=None):
821 821 '''Return a matcher that will warn about bad matches.'''
822 822 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
823 823
824 824 def matchall(repo):
825 825 '''Return a matcher that will efficiently match everything.'''
826 826 return matchmod.always()
827 827
828 828 def matchfiles(repo, files, badfn=None):
829 829 '''Return a matcher that will efficiently match exactly these files.'''
830 830 return matchmod.exact(files, badfn=badfn)
831 831
832 832 def parsefollowlinespattern(repo, rev, pat, msg):
833 833 """Return a file name from `pat` pattern suitable for usage in followlines
834 834 logic.
835 835 """
836 836 if not matchmod.patkind(pat):
837 837 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
838 838 else:
839 839 ctx = repo[rev]
840 840 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
841 841 files = [f for f in ctx if m(f)]
842 842 if len(files) != 1:
843 843 raise error.ParseError(msg)
844 844 return files[0]
845 845
846 846 def getorigvfs(ui, repo):
847 847 """return a vfs suitable to save 'orig' file
848 848
849 849 return None if no special directory is configured"""
850 850 origbackuppath = ui.config('ui', 'origbackuppath')
851 851 if not origbackuppath:
852 852 return None
853 853 return vfs.vfs(repo.wvfs.join(origbackuppath))
854 854
855 855 def backuppath(ui, repo, filepath):
856 856 '''customize where working copy backup files (.orig files) are created
857 857
858 858 Fetch user defined path from config file: [ui] origbackuppath = <path>
859 859 Fall back to default (filepath with .orig suffix) if not specified
860 860
861 861 filepath is repo-relative
862 862
863 863 Returns an absolute path
864 864 '''
865 865 origvfs = getorigvfs(ui, repo)
866 866 if origvfs is None:
867 867 return repo.wjoin(filepath + ".orig")
868 868
869 869 origbackupdir = origvfs.dirname(filepath)
870 870 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
871 871 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
872 872
873 873 # Remove any files that conflict with the backup file's path
874 874 for f in reversed(list(util.finddirs(filepath))):
875 875 if origvfs.isfileorlink(f):
876 876 ui.note(_('removing conflicting file: %s\n')
877 877 % origvfs.join(f))
878 878 origvfs.unlink(f)
879 879 break
880 880
881 881 origvfs.makedirs(origbackupdir)
882 882
883 883 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
884 884 ui.note(_('removing conflicting directory: %s\n')
885 885 % origvfs.join(filepath))
886 886 origvfs.rmtree(filepath, forcibly=True)
887 887
888 888 return origvfs.join(filepath)
889 889
890 890 class _containsnode(object):
891 891 """proxy __contains__(node) to container.__contains__ which accepts revs"""
892 892
893 893 def __init__(self, repo, revcontainer):
894 894 self._torev = repo.changelog.rev
895 895 self._revcontains = revcontainer.__contains__
896 896
897 897 def __contains__(self, node):
898 898 return self._revcontains(self._torev(node))
899 899
900 900 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
901 901 fixphase=False, targetphase=None, backup=True):
902 902 """do common cleanups when old nodes are replaced by new nodes
903 903
904 904 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
905 905 (we might also want to move working directory parent in the future)
906 906
907 907 By default, bookmark moves are calculated automatically from 'replacements',
908 908 but 'moves' can be used to override that. Also, 'moves' may include
909 909 additional bookmark moves that should not have associated obsmarkers.
910 910
911 911 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
912 912 have replacements. operation is a string, like "rebase".
913 913
914 914 metadata is dictionary containing metadata to be stored in obsmarker if
915 915 obsolescence is enabled.
916 916 """
917 917 assert fixphase or targetphase is None
918 918 if not replacements and not moves:
919 919 return
920 920
921 921 # translate mapping's other forms
922 922 if not util.safehasattr(replacements, 'items'):
923 923 replacements = {(n,): () for n in replacements}
924 924 else:
925 925 # upgrading non tuple "source" to tuple ones for BC
926 926 repls = {}
927 927 for key, value in replacements.items():
928 928 if not isinstance(key, tuple):
929 929 key = (key,)
930 930 repls[key] = value
931 931 replacements = repls
932 932
933 933 # Unfiltered repo is needed since nodes in replacements might be hidden.
934 934 unfi = repo.unfiltered()
935 935
936 936 # Calculate bookmark movements
937 937 if moves is None:
938 938 moves = {}
939 939 for oldnodes, newnodes in replacements.items():
940 940 for oldnode in oldnodes:
941 941 if oldnode in moves:
942 942 continue
943 943 if len(newnodes) > 1:
944 944 # usually a split, take the one with biggest rev number
945 945 newnode = next(unfi.set('max(%ln)', newnodes)).node()
946 946 elif len(newnodes) == 0:
947 947 # move bookmark backwards
948 948 allreplaced = []
949 949 for rep in replacements:
950 950 allreplaced.extend(rep)
951 951 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
952 952 allreplaced))
953 953 if roots:
954 954 newnode = roots[0].node()
955 955 else:
956 956 newnode = nullid
957 957 else:
958 958 newnode = newnodes[0]
959 959 moves[oldnode] = newnode
960 960
961 961 allnewnodes = [n for ns in replacements.values() for n in ns]
962 962 toretract = {}
963 963 toadvance = {}
964 964 if fixphase:
965 965 precursors = {}
966 966 for oldnodes, newnodes in replacements.items():
967 967 for oldnode in oldnodes:
968 968 for newnode in newnodes:
969 969 precursors.setdefault(newnode, []).append(oldnode)
970 970
971 971 allnewnodes.sort(key=lambda n: unfi[n].rev())
972 972 newphases = {}
973 973 def phase(ctx):
974 974 return newphases.get(ctx.node(), ctx.phase())
975 975 for newnode in allnewnodes:
976 976 ctx = unfi[newnode]
977 977 parentphase = max(phase(p) for p in ctx.parents())
978 978 if targetphase is None:
979 979 oldphase = max(unfi[oldnode].phase()
980 980 for oldnode in precursors[newnode])
981 981 newphase = max(oldphase, parentphase)
982 982 else:
983 983 newphase = max(targetphase, parentphase)
984 984 newphases[newnode] = newphase
985 985 if newphase > ctx.phase():
986 986 toretract.setdefault(newphase, []).append(newnode)
987 987 elif newphase < ctx.phase():
988 988 toadvance.setdefault(newphase, []).append(newnode)
989 989
990 990 with repo.transaction('cleanup') as tr:
991 991 # Move bookmarks
992 992 bmarks = repo._bookmarks
993 993 bmarkchanges = []
994 994 for oldnode, newnode in moves.items():
995 995 oldbmarks = repo.nodebookmarks(oldnode)
996 996 if not oldbmarks:
997 997 continue
998 998 from . import bookmarks # avoid import cycle
999 999 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
1000 1000 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1001 1001 hex(oldnode), hex(newnode)))
1002 1002 # Delete divergent bookmarks being parents of related newnodes
1003 1003 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
1004 1004 allnewnodes, newnode, oldnode)
1005 1005 deletenodes = _containsnode(repo, deleterevs)
1006 1006 for name in oldbmarks:
1007 1007 bmarkchanges.append((name, newnode))
1008 1008 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1009 1009 bmarkchanges.append((b, None))
1010 1010
1011 1011 if bmarkchanges:
1012 1012 bmarks.applychanges(repo, tr, bmarkchanges)
1013 1013
1014 1014 for phase, nodes in toretract.items():
1015 1015 phases.retractboundary(repo, tr, phase, nodes)
1016 1016 for phase, nodes in toadvance.items():
1017 1017 phases.advanceboundary(repo, tr, phase, nodes)
1018 1018
1019 mayusearchived = repo.ui.config('experimental', 'cleanup-as-archived')
1019 1020 # Obsolete or strip nodes
1020 1021 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1021 1022 # If a node is already obsoleted, and we want to obsolete it
1022 1023 # without a successor, skip that obssolete request since it's
1023 1024 # unnecessary. That's the "if s or not isobs(n)" check below.
1024 1025 # Also sort the node in topology order, that might be useful for
1025 1026 # some obsstore logic.
1026 1027 # NOTE: the sorting might belong to createmarkers.
1027 1028 torev = unfi.changelog.rev
1028 1029 sortfunc = lambda ns: torev(ns[0][0])
1029 1030 rels = []
1030 1031 for ns, s in sorted(replacements.items(), key=sortfunc):
1031 1032 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1032 1033 rels.append(rel)
1033 1034 if rels:
1034 1035 obsolete.createmarkers(repo, rels, operation=operation,
1035 1036 metadata=metadata)
1037 elif phases.supportinternal(repo) and mayusearchived:
1038 # this assume we do not have "unstable" nodes above the cleaned ones
1039 allreplaced = set()
1040 for ns in replacements.keys():
1041 allreplaced.update(ns)
1042 if backup:
1043 from . import repair # avoid import cycle
1044 node = min(allreplaced, key=repo.changelog.rev)
1045 repair.backupbundle(repo, allreplaced, allreplaced, node,
1046 operation)
1047 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1036 1048 else:
1037 1049 from . import repair # avoid import cycle
1038 1050 tostrip = list(n for ns in replacements for n in ns)
1039 1051 if tostrip:
1040 1052 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1041 1053 backup=backup)
1042 1054
1043 1055 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1044 1056 if opts is None:
1045 1057 opts = {}
1046 1058 m = matcher
1047 1059 dry_run = opts.get('dry_run')
1048 1060 try:
1049 1061 similarity = float(opts.get('similarity') or 0)
1050 1062 except ValueError:
1051 1063 raise error.Abort(_('similarity must be a number'))
1052 1064 if similarity < 0 or similarity > 100:
1053 1065 raise error.Abort(_('similarity must be between 0 and 100'))
1054 1066 similarity /= 100.0
1055 1067
1056 1068 ret = 0
1057 1069
1058 1070 wctx = repo[None]
1059 1071 for subpath in sorted(wctx.substate):
1060 1072 submatch = matchmod.subdirmatcher(subpath, m)
1061 1073 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1062 1074 sub = wctx.sub(subpath)
1063 1075 subprefix = repo.wvfs.reljoin(prefix, subpath)
1064 1076 subuipathfn = subdiruipathfn(subpath, uipathfn)
1065 1077 try:
1066 1078 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1067 1079 ret = 1
1068 1080 except error.LookupError:
1069 1081 repo.ui.status(_("skipping missing subrepository: %s\n")
1070 1082 % uipathfn(subpath))
1071 1083
1072 1084 rejected = []
1073 1085 def badfn(f, msg):
1074 1086 if f in m.files():
1075 1087 m.bad(f, msg)
1076 1088 rejected.append(f)
1077 1089
1078 1090 badmatch = matchmod.badmatch(m, badfn)
1079 1091 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1080 1092 badmatch)
1081 1093
1082 1094 unknownset = set(unknown + forgotten)
1083 1095 toprint = unknownset.copy()
1084 1096 toprint.update(deleted)
1085 1097 for abs in sorted(toprint):
1086 1098 if repo.ui.verbose or not m.exact(abs):
1087 1099 if abs in unknownset:
1088 1100 status = _('adding %s\n') % uipathfn(abs)
1089 1101 label = 'ui.addremove.added'
1090 1102 else:
1091 1103 status = _('removing %s\n') % uipathfn(abs)
1092 1104 label = 'ui.addremove.removed'
1093 1105 repo.ui.status(status, label=label)
1094 1106
1095 1107 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1096 1108 similarity, uipathfn)
1097 1109
1098 1110 if not dry_run:
1099 1111 _markchanges(repo, unknown + forgotten, deleted, renames)
1100 1112
1101 1113 for f in rejected:
1102 1114 if f in m.files():
1103 1115 return 1
1104 1116 return ret
1105 1117
1106 1118 def marktouched(repo, files, similarity=0.0):
1107 1119 '''Assert that files have somehow been operated upon. files are relative to
1108 1120 the repo root.'''
1109 1121 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1110 1122 rejected = []
1111 1123
1112 1124 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1113 1125
1114 1126 if repo.ui.verbose:
1115 1127 unknownset = set(unknown + forgotten)
1116 1128 toprint = unknownset.copy()
1117 1129 toprint.update(deleted)
1118 1130 for abs in sorted(toprint):
1119 1131 if abs in unknownset:
1120 1132 status = _('adding %s\n') % abs
1121 1133 else:
1122 1134 status = _('removing %s\n') % abs
1123 1135 repo.ui.status(status)
1124 1136
1125 1137 # TODO: We should probably have the caller pass in uipathfn and apply it to
1126 1138 # the messages above too. legacyrelativevalue=True is consistent with how
1127 1139 # it used to work.
1128 1140 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1129 1141 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1130 1142 similarity, uipathfn)
1131 1143
1132 1144 _markchanges(repo, unknown + forgotten, deleted, renames)
1133 1145
1134 1146 for f in rejected:
1135 1147 if f in m.files():
1136 1148 return 1
1137 1149 return 0
1138 1150
1139 1151 def _interestingfiles(repo, matcher):
1140 1152 '''Walk dirstate with matcher, looking for files that addremove would care
1141 1153 about.
1142 1154
1143 1155 This is different from dirstate.status because it doesn't care about
1144 1156 whether files are modified or clean.'''
1145 1157 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1146 1158 audit_path = pathutil.pathauditor(repo.root, cached=True)
1147 1159
1148 1160 ctx = repo[None]
1149 1161 dirstate = repo.dirstate
1150 1162 matcher = repo.narrowmatch(matcher, includeexact=True)
1151 1163 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1152 1164 unknown=True, ignored=False, full=False)
1153 1165 for abs, st in walkresults.iteritems():
1154 1166 dstate = dirstate[abs]
1155 1167 if dstate == '?' and audit_path.check(abs):
1156 1168 unknown.append(abs)
1157 1169 elif dstate != 'r' and not st:
1158 1170 deleted.append(abs)
1159 1171 elif dstate == 'r' and st:
1160 1172 forgotten.append(abs)
1161 1173 # for finding renames
1162 1174 elif dstate == 'r' and not st:
1163 1175 removed.append(abs)
1164 1176 elif dstate == 'a':
1165 1177 added.append(abs)
1166 1178
1167 1179 return added, unknown, deleted, removed, forgotten
1168 1180
1169 1181 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1170 1182 '''Find renames from removed files to added ones.'''
1171 1183 renames = {}
1172 1184 if similarity > 0:
1173 1185 for old, new, score in similar.findrenames(repo, added, removed,
1174 1186 similarity):
1175 1187 if (repo.ui.verbose or not matcher.exact(old)
1176 1188 or not matcher.exact(new)):
1177 1189 repo.ui.status(_('recording removal of %s as rename to %s '
1178 1190 '(%d%% similar)\n') %
1179 1191 (uipathfn(old), uipathfn(new),
1180 1192 score * 100))
1181 1193 renames[new] = old
1182 1194 return renames
1183 1195
1184 1196 def _markchanges(repo, unknown, deleted, renames):
1185 1197 '''Marks the files in unknown as added, the files in deleted as removed,
1186 1198 and the files in renames as copied.'''
1187 1199 wctx = repo[None]
1188 1200 with repo.wlock():
1189 1201 wctx.forget(deleted)
1190 1202 wctx.add(unknown)
1191 1203 for new, old in renames.iteritems():
1192 1204 wctx.copy(old, new)
1193 1205
1194 1206 def getrenamedfn(repo, endrev=None):
1195 1207 rcache = {}
1196 1208 if endrev is None:
1197 1209 endrev = len(repo)
1198 1210
1199 1211 def getrenamed(fn, rev):
1200 1212 '''looks up all renames for a file (up to endrev) the first
1201 1213 time the file is given. It indexes on the changerev and only
1202 1214 parses the manifest if linkrev != changerev.
1203 1215 Returns rename info for fn at changerev rev.'''
1204 1216 if fn not in rcache:
1205 1217 rcache[fn] = {}
1206 1218 fl = repo.file(fn)
1207 1219 for i in fl:
1208 1220 lr = fl.linkrev(i)
1209 1221 renamed = fl.renamed(fl.node(i))
1210 1222 rcache[fn][lr] = renamed and renamed[0]
1211 1223 if lr >= endrev:
1212 1224 break
1213 1225 if rev in rcache[fn]:
1214 1226 return rcache[fn][rev]
1215 1227
1216 1228 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1217 1229 # filectx logic.
1218 1230 try:
1219 1231 return repo[rev][fn].copysource()
1220 1232 except error.LookupError:
1221 1233 return None
1222 1234
1223 1235 return getrenamed
1224 1236
1225 1237 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1226 1238 """Update the dirstate to reflect the intent of copying src to dst. For
1227 1239 different reasons it might not end with dst being marked as copied from src.
1228 1240 """
1229 1241 origsrc = repo.dirstate.copied(src) or src
1230 1242 if dst == origsrc: # copying back a copy?
1231 1243 if repo.dirstate[dst] not in 'mn' and not dryrun:
1232 1244 repo.dirstate.normallookup(dst)
1233 1245 else:
1234 1246 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1235 1247 if not ui.quiet:
1236 1248 ui.warn(_("%s has not been committed yet, so no copy "
1237 1249 "data will be stored for %s.\n")
1238 1250 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1239 1251 if repo.dirstate[dst] in '?r' and not dryrun:
1240 1252 wctx.add([dst])
1241 1253 elif not dryrun:
1242 1254 wctx.copy(origsrc, dst)
1243 1255
1244 1256 def writerequires(opener, requirements):
1245 1257 with opener('requires', 'w', atomictemp=True) as fp:
1246 1258 for r in sorted(requirements):
1247 1259 fp.write("%s\n" % r)
1248 1260
1249 1261 class filecachesubentry(object):
1250 1262 def __init__(self, path, stat):
1251 1263 self.path = path
1252 1264 self.cachestat = None
1253 1265 self._cacheable = None
1254 1266
1255 1267 if stat:
1256 1268 self.cachestat = filecachesubentry.stat(self.path)
1257 1269
1258 1270 if self.cachestat:
1259 1271 self._cacheable = self.cachestat.cacheable()
1260 1272 else:
1261 1273 # None means we don't know yet
1262 1274 self._cacheable = None
1263 1275
1264 1276 def refresh(self):
1265 1277 if self.cacheable():
1266 1278 self.cachestat = filecachesubentry.stat(self.path)
1267 1279
1268 1280 def cacheable(self):
1269 1281 if self._cacheable is not None:
1270 1282 return self._cacheable
1271 1283
1272 1284 # we don't know yet, assume it is for now
1273 1285 return True
1274 1286
1275 1287 def changed(self):
1276 1288 # no point in going further if we can't cache it
1277 1289 if not self.cacheable():
1278 1290 return True
1279 1291
1280 1292 newstat = filecachesubentry.stat(self.path)
1281 1293
1282 1294 # we may not know if it's cacheable yet, check again now
1283 1295 if newstat and self._cacheable is None:
1284 1296 self._cacheable = newstat.cacheable()
1285 1297
1286 1298 # check again
1287 1299 if not self._cacheable:
1288 1300 return True
1289 1301
1290 1302 if self.cachestat != newstat:
1291 1303 self.cachestat = newstat
1292 1304 return True
1293 1305 else:
1294 1306 return False
1295 1307
1296 1308 @staticmethod
1297 1309 def stat(path):
1298 1310 try:
1299 1311 return util.cachestat(path)
1300 1312 except OSError as e:
1301 1313 if e.errno != errno.ENOENT:
1302 1314 raise
1303 1315
1304 1316 class filecacheentry(object):
1305 1317 def __init__(self, paths, stat=True):
1306 1318 self._entries = []
1307 1319 for path in paths:
1308 1320 self._entries.append(filecachesubentry(path, stat))
1309 1321
1310 1322 def changed(self):
1311 1323 '''true if any entry has changed'''
1312 1324 for entry in self._entries:
1313 1325 if entry.changed():
1314 1326 return True
1315 1327 return False
1316 1328
1317 1329 def refresh(self):
1318 1330 for entry in self._entries:
1319 1331 entry.refresh()
1320 1332
1321 1333 class filecache(object):
1322 1334 """A property like decorator that tracks files under .hg/ for updates.
1323 1335
1324 1336 On first access, the files defined as arguments are stat()ed and the
1325 1337 results cached. The decorated function is called. The results are stashed
1326 1338 away in a ``_filecache`` dict on the object whose method is decorated.
1327 1339
1328 1340 On subsequent access, the cached result is used as it is set to the
1329 1341 instance dictionary.
1330 1342
1331 1343 On external property set/delete operations, the caller must update the
1332 1344 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1333 1345 instead of directly setting <attr>.
1334 1346
1335 1347 When using the property API, the cached data is always used if available.
1336 1348 No stat() is performed to check if the file has changed.
1337 1349
1338 1350 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1339 1351 can populate an entry before the property's getter is called. In this case,
1340 1352 entries in ``_filecache`` will be used during property operations,
1341 1353 if available. If the underlying file changes, it is up to external callers
1342 1354 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1343 1355 method result as well as possibly calling ``del obj._filecache[attr]`` to
1344 1356 remove the ``filecacheentry``.
1345 1357 """
1346 1358
1347 1359 def __init__(self, *paths):
1348 1360 self.paths = paths
1349 1361
1350 1362 def join(self, obj, fname):
1351 1363 """Used to compute the runtime path of a cached file.
1352 1364
1353 1365 Users should subclass filecache and provide their own version of this
1354 1366 function to call the appropriate join function on 'obj' (an instance
1355 1367 of the class that its member function was decorated).
1356 1368 """
1357 1369 raise NotImplementedError
1358 1370
1359 1371 def __call__(self, func):
1360 1372 self.func = func
1361 1373 self.sname = func.__name__
1362 1374 self.name = pycompat.sysbytes(self.sname)
1363 1375 return self
1364 1376
1365 1377 def __get__(self, obj, type=None):
1366 1378 # if accessed on the class, return the descriptor itself.
1367 1379 if obj is None:
1368 1380 return self
1369 1381
1370 1382 assert self.sname not in obj.__dict__
1371 1383
1372 1384 entry = obj._filecache.get(self.name)
1373 1385
1374 1386 if entry:
1375 1387 if entry.changed():
1376 1388 entry.obj = self.func(obj)
1377 1389 else:
1378 1390 paths = [self.join(obj, path) for path in self.paths]
1379 1391
1380 1392 # We stat -before- creating the object so our cache doesn't lie if
1381 1393 # a writer modified between the time we read and stat
1382 1394 entry = filecacheentry(paths, True)
1383 1395 entry.obj = self.func(obj)
1384 1396
1385 1397 obj._filecache[self.name] = entry
1386 1398
1387 1399 obj.__dict__[self.sname] = entry.obj
1388 1400 return entry.obj
1389 1401
1390 1402 # don't implement __set__(), which would make __dict__ lookup as slow as
1391 1403 # function call.
1392 1404
1393 1405 def set(self, obj, value):
1394 1406 if self.name not in obj._filecache:
1395 1407 # we add an entry for the missing value because X in __dict__
1396 1408 # implies X in _filecache
1397 1409 paths = [self.join(obj, path) for path in self.paths]
1398 1410 ce = filecacheentry(paths, False)
1399 1411 obj._filecache[self.name] = ce
1400 1412 else:
1401 1413 ce = obj._filecache[self.name]
1402 1414
1403 1415 ce.obj = value # update cached copy
1404 1416 obj.__dict__[self.sname] = value # update copy returned by obj.x
1405 1417
1406 1418 def extdatasource(repo, source):
1407 1419 """Gather a map of rev -> value dict from the specified source
1408 1420
1409 1421 A source spec is treated as a URL, with a special case shell: type
1410 1422 for parsing the output from a shell command.
1411 1423
1412 1424 The data is parsed as a series of newline-separated records where
1413 1425 each record is a revision specifier optionally followed by a space
1414 1426 and a freeform string value. If the revision is known locally, it
1415 1427 is converted to a rev, otherwise the record is skipped.
1416 1428
1417 1429 Note that both key and value are treated as UTF-8 and converted to
1418 1430 the local encoding. This allows uniformity between local and
1419 1431 remote data sources.
1420 1432 """
1421 1433
1422 1434 spec = repo.ui.config("extdata", source)
1423 1435 if not spec:
1424 1436 raise error.Abort(_("unknown extdata source '%s'") % source)
1425 1437
1426 1438 data = {}
1427 1439 src = proc = None
1428 1440 try:
1429 1441 if spec.startswith("shell:"):
1430 1442 # external commands should be run relative to the repo root
1431 1443 cmd = spec[6:]
1432 1444 proc = subprocess.Popen(procutil.tonativestr(cmd),
1433 1445 shell=True, bufsize=-1,
1434 1446 close_fds=procutil.closefds,
1435 1447 stdout=subprocess.PIPE,
1436 1448 cwd=procutil.tonativestr(repo.root))
1437 1449 src = proc.stdout
1438 1450 else:
1439 1451 # treat as a URL or file
1440 1452 src = url.open(repo.ui, spec)
1441 1453 for l in src:
1442 1454 if " " in l:
1443 1455 k, v = l.strip().split(" ", 1)
1444 1456 else:
1445 1457 k, v = l.strip(), ""
1446 1458
1447 1459 k = encoding.tolocal(k)
1448 1460 try:
1449 1461 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1450 1462 except (error.LookupError, error.RepoLookupError):
1451 1463 pass # we ignore data for nodes that don't exist locally
1452 1464 finally:
1453 1465 if proc:
1454 1466 proc.communicate()
1455 1467 if src:
1456 1468 src.close()
1457 1469 if proc and proc.returncode != 0:
1458 1470 raise error.Abort(_("extdata command '%s' failed: %s")
1459 1471 % (cmd, procutil.explainexit(proc.returncode)))
1460 1472
1461 1473 return data
1462 1474
1463 1475 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1464 1476 if lock is None:
1465 1477 raise error.LockInheritanceContractViolation(
1466 1478 'lock can only be inherited while held')
1467 1479 if environ is None:
1468 1480 environ = {}
1469 1481 with lock.inherit() as locker:
1470 1482 environ[envvar] = locker
1471 1483 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1472 1484
1473 1485 def wlocksub(repo, cmd, *args, **kwargs):
1474 1486 """run cmd as a subprocess that allows inheriting repo's wlock
1475 1487
1476 1488 This can only be called while the wlock is held. This takes all the
1477 1489 arguments that ui.system does, and returns the exit code of the
1478 1490 subprocess."""
1479 1491 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1480 1492 **kwargs)
1481 1493
1482 1494 class progress(object):
1483 1495 def __init__(self, ui, updatebar, topic, unit="", total=None):
1484 1496 self.ui = ui
1485 1497 self.pos = 0
1486 1498 self.topic = topic
1487 1499 self.unit = unit
1488 1500 self.total = total
1489 1501 self.debug = ui.configbool('progress', 'debug')
1490 1502 self._updatebar = updatebar
1491 1503
1492 1504 def __enter__(self):
1493 1505 return self
1494 1506
1495 1507 def __exit__(self, exc_type, exc_value, exc_tb):
1496 1508 self.complete()
1497 1509
1498 1510 def update(self, pos, item="", total=None):
1499 1511 assert pos is not None
1500 1512 if total:
1501 1513 self.total = total
1502 1514 self.pos = pos
1503 1515 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1504 1516 if self.debug:
1505 1517 self._printdebug(item)
1506 1518
1507 1519 def increment(self, step=1, item="", total=None):
1508 1520 self.update(self.pos + step, item, total)
1509 1521
1510 1522 def complete(self):
1511 1523 self.pos = None
1512 1524 self.unit = ""
1513 1525 self.total = None
1514 1526 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1515 1527
1516 1528 def _printdebug(self, item):
1517 1529 if self.unit:
1518 1530 unit = ' ' + self.unit
1519 1531 if item:
1520 1532 item = ' ' + item
1521 1533
1522 1534 if self.total:
1523 1535 pct = 100.0 * self.pos / self.total
1524 1536 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1525 1537 % (self.topic, item, self.pos, self.total, unit, pct))
1526 1538 else:
1527 1539 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1528 1540
1529 1541 def gdinitconfig(ui):
1530 1542 """helper function to know if a repo should be created as general delta
1531 1543 """
1532 1544 # experimental config: format.generaldelta
1533 1545 return (ui.configbool('format', 'generaldelta')
1534 1546 or ui.configbool('format', 'usegeneraldelta'))
1535 1547
1536 1548 def gddeltaconfig(ui):
1537 1549 """helper function to know if incoming delta should be optimised
1538 1550 """
1539 1551 # experimental config: format.generaldelta
1540 1552 return ui.configbool('format', 'generaldelta')
1541 1553
1542 1554 class simplekeyvaluefile(object):
1543 1555 """A simple file with key=value lines
1544 1556
1545 1557 Keys must be alphanumerics and start with a letter, values must not
1546 1558 contain '\n' characters"""
1547 1559 firstlinekey = '__firstline'
1548 1560
1549 1561 def __init__(self, vfs, path, keys=None):
1550 1562 self.vfs = vfs
1551 1563 self.path = path
1552 1564
1553 1565 def read(self, firstlinenonkeyval=False):
1554 1566 """Read the contents of a simple key-value file
1555 1567
1556 1568 'firstlinenonkeyval' indicates whether the first line of file should
1557 1569 be treated as a key-value pair or reuturned fully under the
1558 1570 __firstline key."""
1559 1571 lines = self.vfs.readlines(self.path)
1560 1572 d = {}
1561 1573 if firstlinenonkeyval:
1562 1574 if not lines:
1563 1575 e = _("empty simplekeyvalue file")
1564 1576 raise error.CorruptedState(e)
1565 1577 # we don't want to include '\n' in the __firstline
1566 1578 d[self.firstlinekey] = lines[0][:-1]
1567 1579 del lines[0]
1568 1580
1569 1581 try:
1570 1582 # the 'if line.strip()' part prevents us from failing on empty
1571 1583 # lines which only contain '\n' therefore are not skipped
1572 1584 # by 'if line'
1573 1585 updatedict = dict(line[:-1].split('=', 1) for line in lines
1574 1586 if line.strip())
1575 1587 if self.firstlinekey in updatedict:
1576 1588 e = _("%r can't be used as a key")
1577 1589 raise error.CorruptedState(e % self.firstlinekey)
1578 1590 d.update(updatedict)
1579 1591 except ValueError as e:
1580 1592 raise error.CorruptedState(str(e))
1581 1593 return d
1582 1594
1583 1595 def write(self, data, firstline=None):
1584 1596 """Write key=>value mapping to a file
1585 1597 data is a dict. Keys must be alphanumerical and start with a letter.
1586 1598 Values must not contain newline characters.
1587 1599
1588 1600 If 'firstline' is not None, it is written to file before
1589 1601 everything else, as it is, not in a key=value form"""
1590 1602 lines = []
1591 1603 if firstline is not None:
1592 1604 lines.append('%s\n' % firstline)
1593 1605
1594 1606 for k, v in data.items():
1595 1607 if k == self.firstlinekey:
1596 1608 e = "key name '%s' is reserved" % self.firstlinekey
1597 1609 raise error.ProgrammingError(e)
1598 1610 if not k[0:1].isalpha():
1599 1611 e = "keys must start with a letter in a key-value file"
1600 1612 raise error.ProgrammingError(e)
1601 1613 if not k.isalnum():
1602 1614 e = "invalid key name in a simple key-value file"
1603 1615 raise error.ProgrammingError(e)
1604 1616 if '\n' in v:
1605 1617 e = "invalid value in a simple key-value file"
1606 1618 raise error.ProgrammingError(e)
1607 1619 lines.append("%s=%s\n" % (k, v))
1608 1620 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1609 1621 fp.write(''.join(lines))
1610 1622
1611 1623 _reportobsoletedsource = [
1612 1624 'debugobsolete',
1613 1625 'pull',
1614 1626 'push',
1615 1627 'serve',
1616 1628 'unbundle',
1617 1629 ]
1618 1630
1619 1631 _reportnewcssource = [
1620 1632 'pull',
1621 1633 'unbundle',
1622 1634 ]
1623 1635
1624 1636 def prefetchfiles(repo, revs, match):
1625 1637 """Invokes the registered file prefetch functions, allowing extensions to
1626 1638 ensure the corresponding files are available locally, before the command
1627 1639 uses them."""
1628 1640 if match:
1629 1641 # The command itself will complain about files that don't exist, so
1630 1642 # don't duplicate the message.
1631 1643 match = matchmod.badmatch(match, lambda fn, msg: None)
1632 1644 else:
1633 1645 match = matchall(repo)
1634 1646
1635 1647 fileprefetchhooks(repo, revs, match)
1636 1648
1637 1649 # a list of (repo, revs, match) prefetch functions
1638 1650 fileprefetchhooks = util.hooks()
1639 1651
1640 1652 # A marker that tells the evolve extension to suppress its own reporting
1641 1653 _reportstroubledchangesets = True
1642 1654
1643 1655 def registersummarycallback(repo, otr, txnname=''):
1644 1656 """register a callback to issue a summary after the transaction is closed
1645 1657 """
1646 1658 def txmatch(sources):
1647 1659 return any(txnname.startswith(source) for source in sources)
1648 1660
1649 1661 categories = []
1650 1662
1651 1663 def reportsummary(func):
1652 1664 """decorator for report callbacks."""
1653 1665 # The repoview life cycle is shorter than the one of the actual
1654 1666 # underlying repository. So the filtered object can die before the
1655 1667 # weakref is used leading to troubles. We keep a reference to the
1656 1668 # unfiltered object and restore the filtering when retrieving the
1657 1669 # repository through the weakref.
1658 1670 filtername = repo.filtername
1659 1671 reporef = weakref.ref(repo.unfiltered())
1660 1672 def wrapped(tr):
1661 1673 repo = reporef()
1662 1674 if filtername:
1663 1675 repo = repo.filtered(filtername)
1664 1676 func(repo, tr)
1665 1677 newcat = '%02i-txnreport' % len(categories)
1666 1678 otr.addpostclose(newcat, wrapped)
1667 1679 categories.append(newcat)
1668 1680 return wrapped
1669 1681
1670 1682 if txmatch(_reportobsoletedsource):
1671 1683 @reportsummary
1672 1684 def reportobsoleted(repo, tr):
1673 1685 obsoleted = obsutil.getobsoleted(repo, tr)
1674 1686 if obsoleted:
1675 1687 repo.ui.status(_('obsoleted %i changesets\n')
1676 1688 % len(obsoleted))
1677 1689
1678 1690 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1679 1691 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1680 1692 instabilitytypes = [
1681 1693 ('orphan', 'orphan'),
1682 1694 ('phase-divergent', 'phasedivergent'),
1683 1695 ('content-divergent', 'contentdivergent'),
1684 1696 ]
1685 1697
1686 1698 def getinstabilitycounts(repo):
1687 1699 filtered = repo.changelog.filteredrevs
1688 1700 counts = {}
1689 1701 for instability, revset in instabilitytypes:
1690 1702 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1691 1703 filtered)
1692 1704 return counts
1693 1705
1694 1706 oldinstabilitycounts = getinstabilitycounts(repo)
1695 1707 @reportsummary
1696 1708 def reportnewinstabilities(repo, tr):
1697 1709 newinstabilitycounts = getinstabilitycounts(repo)
1698 1710 for instability, revset in instabilitytypes:
1699 1711 delta = (newinstabilitycounts[instability] -
1700 1712 oldinstabilitycounts[instability])
1701 1713 msg = getinstabilitymessage(delta, instability)
1702 1714 if msg:
1703 1715 repo.ui.warn(msg)
1704 1716
1705 1717 if txmatch(_reportnewcssource):
1706 1718 @reportsummary
1707 1719 def reportnewcs(repo, tr):
1708 1720 """Report the range of new revisions pulled/unbundled."""
1709 1721 origrepolen = tr.changes.get('origrepolen', len(repo))
1710 1722 unfi = repo.unfiltered()
1711 1723 if origrepolen >= len(unfi):
1712 1724 return
1713 1725
1714 1726 # Compute the bounds of new visible revisions' range.
1715 1727 revs = smartset.spanset(repo, start=origrepolen)
1716 1728 if revs:
1717 1729 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1718 1730
1719 1731 if minrev == maxrev:
1720 1732 revrange = minrev
1721 1733 else:
1722 1734 revrange = '%s:%s' % (minrev, maxrev)
1723 1735 draft = len(repo.revs('%ld and draft()', revs))
1724 1736 secret = len(repo.revs('%ld and secret()', revs))
1725 1737 if not (draft or secret):
1726 1738 msg = _('new changesets %s\n') % revrange
1727 1739 elif draft and secret:
1728 1740 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1729 1741 msg %= (revrange, draft, secret)
1730 1742 elif draft:
1731 1743 msg = _('new changesets %s (%d drafts)\n')
1732 1744 msg %= (revrange, draft)
1733 1745 elif secret:
1734 1746 msg = _('new changesets %s (%d secrets)\n')
1735 1747 msg %= (revrange, secret)
1736 1748 else:
1737 1749 errormsg = 'entered unreachable condition'
1738 1750 raise error.ProgrammingError(errormsg)
1739 1751 repo.ui.status(msg)
1740 1752
1741 1753 # search new changesets directly pulled as obsolete
1742 1754 duplicates = tr.changes.get('revduplicates', ())
1743 1755 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1744 1756 origrepolen, duplicates)
1745 1757 cl = repo.changelog
1746 1758 extinctadded = [r for r in obsadded if r not in cl]
1747 1759 if extinctadded:
1748 1760 # They are not just obsolete, but obsolete and invisible
1749 1761 # we call them "extinct" internally but the terms have not been
1750 1762 # exposed to users.
1751 1763 msg = '(%d other changesets obsolete on arrival)\n'
1752 1764 repo.ui.status(msg % len(extinctadded))
1753 1765
1754 1766 @reportsummary
1755 1767 def reportphasechanges(repo, tr):
1756 1768 """Report statistics of phase changes for changesets pre-existing
1757 1769 pull/unbundle.
1758 1770 """
1759 1771 origrepolen = tr.changes.get('origrepolen', len(repo))
1760 1772 phasetracking = tr.changes.get('phases', {})
1761 1773 if not phasetracking:
1762 1774 return
1763 1775 published = [
1764 1776 rev for rev, (old, new) in phasetracking.iteritems()
1765 1777 if new == phases.public and rev < origrepolen
1766 1778 ]
1767 1779 if not published:
1768 1780 return
1769 1781 repo.ui.status(_('%d local changesets published\n')
1770 1782 % len(published))
1771 1783
1772 1784 def getinstabilitymessage(delta, instability):
1773 1785 """function to return the message to show warning about new instabilities
1774 1786
1775 1787 exists as a separate function so that extension can wrap to show more
1776 1788 information like how to fix instabilities"""
1777 1789 if delta > 0:
1778 1790 return _('%i new %s changesets\n') % (delta, instability)
1779 1791
1780 1792 def nodesummaries(repo, nodes, maxnumnodes=4):
1781 1793 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1782 1794 return ' '.join(short(h) for h in nodes)
1783 1795 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1784 1796 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1785 1797
1786 1798 def enforcesinglehead(repo, tr, desc):
1787 1799 """check that no named branch has multiple heads"""
1788 1800 if desc in ('strip', 'repair'):
1789 1801 # skip the logic during strip
1790 1802 return
1791 1803 visible = repo.filtered('visible')
1792 1804 # possible improvement: we could restrict the check to affected branch
1793 1805 for name, heads in visible.branchmap().iteritems():
1794 1806 if len(heads) > 1:
1795 1807 msg = _('rejecting multiple heads on branch "%s"')
1796 1808 msg %= name
1797 1809 hint = _('%d heads: %s')
1798 1810 hint %= (len(heads), nodesummaries(repo, heads))
1799 1811 raise error.Abort(msg, hint=hint)
1800 1812
1801 1813 def wrapconvertsink(sink):
1802 1814 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1803 1815 before it is used, whether or not the convert extension was formally loaded.
1804 1816 """
1805 1817 return sink
1806 1818
1807 1819 def unhidehashlikerevs(repo, specs, hiddentype):
1808 1820 """parse the user specs and unhide changesets whose hash or revision number
1809 1821 is passed.
1810 1822
1811 1823 hiddentype can be: 1) 'warn': warn while unhiding changesets
1812 1824 2) 'nowarn': don't warn while unhiding changesets
1813 1825
1814 1826 returns a repo object with the required changesets unhidden
1815 1827 """
1816 1828 if not repo.filtername or not repo.ui.configbool('experimental',
1817 1829 'directaccess'):
1818 1830 return repo
1819 1831
1820 1832 if repo.filtername not in ('visible', 'visible-hidden'):
1821 1833 return repo
1822 1834
1823 1835 symbols = set()
1824 1836 for spec in specs:
1825 1837 try:
1826 1838 tree = revsetlang.parse(spec)
1827 1839 except error.ParseError: # will be reported by scmutil.revrange()
1828 1840 continue
1829 1841
1830 1842 symbols.update(revsetlang.gethashlikesymbols(tree))
1831 1843
1832 1844 if not symbols:
1833 1845 return repo
1834 1846
1835 1847 revs = _getrevsfromsymbols(repo, symbols)
1836 1848
1837 1849 if not revs:
1838 1850 return repo
1839 1851
1840 1852 if hiddentype == 'warn':
1841 1853 unfi = repo.unfiltered()
1842 1854 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1843 1855 repo.ui.warn(_("warning: accessing hidden changesets for write "
1844 1856 "operation: %s\n") % revstr)
1845 1857
1846 1858 # we have to use new filtername to separate branch/tags cache until we can
1847 1859 # disbale these cache when revisions are dynamically pinned.
1848 1860 return repo.filtered('visible-hidden', revs)
1849 1861
1850 1862 def _getrevsfromsymbols(repo, symbols):
1851 1863 """parse the list of symbols and returns a set of revision numbers of hidden
1852 1864 changesets present in symbols"""
1853 1865 revs = set()
1854 1866 unfi = repo.unfiltered()
1855 1867 unficl = unfi.changelog
1856 1868 cl = repo.changelog
1857 1869 tiprev = len(unficl)
1858 1870 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1859 1871 for s in symbols:
1860 1872 try:
1861 1873 n = int(s)
1862 1874 if n <= tiprev:
1863 1875 if not allowrevnums:
1864 1876 continue
1865 1877 else:
1866 1878 if n not in cl:
1867 1879 revs.add(n)
1868 1880 continue
1869 1881 except ValueError:
1870 1882 pass
1871 1883
1872 1884 try:
1873 1885 s = resolvehexnodeidprefix(unfi, s)
1874 1886 except (error.LookupError, error.WdirUnsupported):
1875 1887 s = None
1876 1888
1877 1889 if s is not None:
1878 1890 rev = unficl.rev(s)
1879 1891 if rev not in cl:
1880 1892 revs.add(rev)
1881 1893
1882 1894 return revs
1883 1895
1884 1896 def bookmarkrevs(repo, mark):
1885 1897 """
1886 1898 Select revisions reachable by a given bookmark
1887 1899 """
1888 1900 return repo.revs("ancestors(bookmark(%s)) - "
1889 1901 "ancestors(head() and not bookmark(%s)) - "
1890 1902 "ancestors(bookmark() and not bookmark(%s))",
1891 1903 mark, mark, mark)
@@ -1,77 +1,143 b''
1 1 =========================================================
2 2 Test features and behaviors related to the archived phase
3 3 =========================================================
4 4
5 5 $ cat << EOF >> $HGRCPATH
6 6 > [format]
7 7 > internal-phase=yes
8 8 > [extensions]
9 9 > strip=
10 10 > [experimental]
11 11 > EOF
12 12
13 13 $ hg init repo
14 14 $ cd repo
15 15 $ echo root > a
16 16 $ hg add a
17 17 $ hg ci -m 'root'
18 18
19 19 Test that bundle can unarchive a changeset
20 20 ------------------------------------------
21 21
22 22 $ echo foo >> a
23 23 $ hg st
24 24 M a
25 25 $ hg ci -m 'unbundletesting'
26 26 $ hg log -G
27 27 @ changeset: 1:883aadbbf309
28 28 | tag: tip
29 29 | user: test
30 30 | date: Thu Jan 01 00:00:00 1970 +0000
31 31 | summary: unbundletesting
32 32 |
33 33 o changeset: 0:c1863a3840c6
34 34 user: test
35 35 date: Thu Jan 01 00:00:00 1970 +0000
36 36 summary: root
37 37
38 38 $ hg strip --soft --rev '.'
39 39 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
40 40 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/883aadbbf309-efc55adc-backup.hg
41 41 $ hg log -G
42 42 @ changeset: 0:c1863a3840c6
43 43 tag: tip
44 44 user: test
45 45 date: Thu Jan 01 00:00:00 1970 +0000
46 46 summary: root
47 47
48 48 $ hg log -G --hidden
49 49 o changeset: 1:883aadbbf309
50 50 | tag: tip
51 51 | user: test
52 52 | date: Thu Jan 01 00:00:00 1970 +0000
53 53 | summary: unbundletesting
54 54 |
55 55 @ changeset: 0:c1863a3840c6
56 56 user: test
57 57 date: Thu Jan 01 00:00:00 1970 +0000
58 58 summary: root
59 59
60 60 $ hg unbundle .hg/strip-backup/883aadbbf309-efc55adc-backup.hg
61 61 adding changesets
62 62 adding manifests
63 63 adding file changes
64 64 added 0 changesets with 0 changes to 1 files
65 65 (run 'hg update' to get a working copy)
66 66 $ hg log -G
67 67 o changeset: 1:883aadbbf309
68 68 | tag: tip
69 69 | user: test
70 70 | date: Thu Jan 01 00:00:00 1970 +0000
71 71 | summary: unbundletesting
72 72 |
73 73 @ changeset: 0:c1863a3840c6
74 74 user: test
75 75 date: Thu Jan 01 00:00:00 1970 +0000
76 76 summary: root
77 77
78
79 Test that history rewriting command can use the archived phase when allowed to
80 ------------------------------------------------------------------------------
81
82 $ hg up 'desc(unbundletesting)'
83 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
84 $ echo bar >> a
85 $ hg commit --amend --config experimental.cleanup-as-archived=yes
86 $ hg log -G
87 @ changeset: 2:d1e73e428f29
88 | tag: tip
89 | parent: 0:c1863a3840c6
90 | user: test
91 | date: Thu Jan 01 00:00:00 1970 +0000
92 | summary: unbundletesting
93 |
94 o changeset: 0:c1863a3840c6
95 user: test
96 date: Thu Jan 01 00:00:00 1970 +0000
97 summary: root
98
99 $ hg log -G --hidden
100 @ changeset: 2:d1e73e428f29
101 | tag: tip
102 | parent: 0:c1863a3840c6
103 | user: test
104 | date: Thu Jan 01 00:00:00 1970 +0000
105 | summary: unbundletesting
106 |
107 | o changeset: 1:883aadbbf309
108 |/ user: test
109 | date: Thu Jan 01 00:00:00 1970 +0000
110 | summary: unbundletesting
111 |
112 o changeset: 0:c1863a3840c6
113 user: test
114 date: Thu Jan 01 00:00:00 1970 +0000
115 summary: root
116
117 $ ls -1 .hg/strip-backup/
118 883aadbbf309-efc55adc-amend.hg
119 883aadbbf309-efc55adc-backup.hg
120 $ hg unbundle .hg/strip-backup/883aadbbf309*amend.hg
121 adding changesets
122 adding manifests
123 adding file changes
124 added 0 changesets with 0 changes to 1 files
125 (run 'hg update' to get a working copy)
126 $ hg log -G
127 @ changeset: 2:d1e73e428f29
128 | tag: tip
129 | parent: 0:c1863a3840c6
130 | user: test
131 | date: Thu Jan 01 00:00:00 1970 +0000
132 | summary: unbundletesting
133 |
134 | o changeset: 1:883aadbbf309
135 |/ user: test
136 | date: Thu Jan 01 00:00:00 1970 +0000
137 | summary: unbundletesting
138 |
139 o changeset: 0:c1863a3840c6
140 user: test
141 date: Thu Jan 01 00:00:00 1970 +0000
142 summary: root
143
General Comments 0
You need to be logged in to leave comments. Login now