##// END OF EJS Templates
ui: ensure `getpass()` returns bytes...
Matt Harbison -
r47013:07b0a687 stable
parent child Browse files
Show More
@@ -1,4578 +1,4582 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import glob
15 15 import operator
16 16 import os
17 17 import platform
18 18 import random
19 19 import re
20 20 import socket
21 21 import ssl
22 22 import stat
23 23 import string
24 24 import subprocess
25 25 import sys
26 26 import time
27 27
28 28 from .i18n import _
29 29 from .node import (
30 30 bin,
31 31 hex,
32 32 nullid,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 mergestate as mergestatemod,
62 62 metadata,
63 63 obsolete,
64 64 obsutil,
65 65 pathutil,
66 66 phases,
67 67 policy,
68 68 pvec,
69 69 pycompat,
70 70 registrar,
71 71 repair,
72 72 revlog,
73 73 revset,
74 74 revsetlang,
75 75 scmutil,
76 76 setdiscovery,
77 77 simplemerge,
78 78 sshpeer,
79 79 sslutil,
80 80 streamclone,
81 81 tags as tagsmod,
82 82 templater,
83 83 treediscovery,
84 84 upgrade,
85 85 url as urlmod,
86 86 util,
87 87 vfs as vfsmod,
88 88 wireprotoframing,
89 89 wireprotoserver,
90 90 wireprotov2peer,
91 91 )
92 92 from .utils import (
93 93 cborutil,
94 94 compression,
95 95 dateutil,
96 96 procutil,
97 97 stringutil,
98 98 )
99 99
100 100 from .revlogutils import (
101 101 deltas as deltautil,
102 102 nodemap,
103 103 sidedata,
104 104 )
105 105
106 106 release = lockmod.release
107 107
108 108 command = registrar.command()
109 109
110 110
111 111 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
112 112 def debugancestor(ui, repo, *args):
113 113 """find the ancestor revision of two revisions in a given index"""
114 114 if len(args) == 3:
115 115 index, rev1, rev2 = args
116 116 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
117 117 lookup = r.lookup
118 118 elif len(args) == 2:
119 119 if not repo:
120 120 raise error.Abort(
121 121 _(b'there is no Mercurial repository here (.hg not found)')
122 122 )
123 123 rev1, rev2 = args
124 124 r = repo.changelog
125 125 lookup = repo.lookup
126 126 else:
127 127 raise error.Abort(_(b'either two or three arguments required'))
128 128 a = r.ancestor(lookup(rev1), lookup(rev2))
129 129 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
130 130
131 131
132 132 @command(b'debugantivirusrunning', [])
133 133 def debugantivirusrunning(ui, repo):
134 134 """attempt to trigger an antivirus scanner to see if one is active"""
135 135 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
136 136 f.write(
137 137 util.b85decode(
138 138 # This is a base85-armored version of the EICAR test file. See
139 139 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
140 140 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
141 141 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
142 142 )
143 143 )
144 144 # Give an AV engine time to scan the file.
145 145 time.sleep(2)
146 146 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
147 147
148 148
149 149 @command(b'debugapplystreamclonebundle', [], b'FILE')
150 150 def debugapplystreamclonebundle(ui, repo, fname):
151 151 """apply a stream clone bundle file"""
152 152 f = hg.openpath(ui, fname)
153 153 gen = exchange.readbundle(ui, f, fname)
154 154 gen.apply(repo)
155 155
156 156
157 157 @command(
158 158 b'debugbuilddag',
159 159 [
160 160 (
161 161 b'm',
162 162 b'mergeable-file',
163 163 None,
164 164 _(b'add single file mergeable changes'),
165 165 ),
166 166 (
167 167 b'o',
168 168 b'overwritten-file',
169 169 None,
170 170 _(b'add single file all revs overwrite'),
171 171 ),
172 172 (b'n', b'new-file', None, _(b'add new file at each rev')),
173 173 ],
174 174 _(b'[OPTION]... [TEXT]'),
175 175 )
176 176 def debugbuilddag(
177 177 ui,
178 178 repo,
179 179 text=None,
180 180 mergeable_file=False,
181 181 overwritten_file=False,
182 182 new_file=False,
183 183 ):
184 184 """builds a repo with a given DAG from scratch in the current empty repo
185 185
186 186 The description of the DAG is read from stdin if not given on the
187 187 command line.
188 188
189 189 Elements:
190 190
191 191 - "+n" is a linear run of n nodes based on the current default parent
192 192 - "." is a single node based on the current default parent
193 193 - "$" resets the default parent to null (implied at the start);
194 194 otherwise the default parent is always the last node created
195 195 - "<p" sets the default parent to the backref p
196 196 - "*p" is a fork at parent p, which is a backref
197 197 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
198 198 - "/p2" is a merge of the preceding node and p2
199 199 - ":tag" defines a local tag for the preceding node
200 200 - "@branch" sets the named branch for subsequent nodes
201 201 - "#...\\n" is a comment up to the end of the line
202 202
203 203 Whitespace between the above elements is ignored.
204 204
205 205 A backref is either
206 206
207 207 - a number n, which references the node curr-n, where curr is the current
208 208 node, or
209 209 - the name of a local tag you placed earlier using ":tag", or
210 210 - empty to denote the default parent.
211 211
212 212 All string valued-elements are either strictly alphanumeric, or must
213 213 be enclosed in double quotes ("..."), with "\\" as escape character.
214 214 """
215 215
216 216 if text is None:
217 217 ui.status(_(b"reading DAG from stdin\n"))
218 218 text = ui.fin.read()
219 219
220 220 cl = repo.changelog
221 221 if len(cl) > 0:
222 222 raise error.Abort(_(b'repository is not empty'))
223 223
224 224 # determine number of revs in DAG
225 225 total = 0
226 226 for type, data in dagparser.parsedag(text):
227 227 if type == b'n':
228 228 total += 1
229 229
230 230 if mergeable_file:
231 231 linesperrev = 2
232 232 # make a file with k lines per rev
233 233 initialmergedlines = [
234 234 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
235 235 ]
236 236 initialmergedlines.append(b"")
237 237
238 238 tags = []
239 239 progress = ui.makeprogress(
240 240 _(b'building'), unit=_(b'revisions'), total=total
241 241 )
242 242 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
243 243 at = -1
244 244 atbranch = b'default'
245 245 nodeids = []
246 246 id = 0
247 247 progress.update(id)
248 248 for type, data in dagparser.parsedag(text):
249 249 if type == b'n':
250 250 ui.note((b'node %s\n' % pycompat.bytestr(data)))
251 251 id, ps = data
252 252
253 253 files = []
254 254 filecontent = {}
255 255
256 256 p2 = None
257 257 if mergeable_file:
258 258 fn = b"mf"
259 259 p1 = repo[ps[0]]
260 260 if len(ps) > 1:
261 261 p2 = repo[ps[1]]
262 262 pa = p1.ancestor(p2)
263 263 base, local, other = [
264 264 x[fn].data() for x in (pa, p1, p2)
265 265 ]
266 266 m3 = simplemerge.Merge3Text(base, local, other)
267 267 ml = [l.strip() for l in m3.merge_lines()]
268 268 ml.append(b"")
269 269 elif at > 0:
270 270 ml = p1[fn].data().split(b"\n")
271 271 else:
272 272 ml = initialmergedlines
273 273 ml[id * linesperrev] += b" r%i" % id
274 274 mergedtext = b"\n".join(ml)
275 275 files.append(fn)
276 276 filecontent[fn] = mergedtext
277 277
278 278 if overwritten_file:
279 279 fn = b"of"
280 280 files.append(fn)
281 281 filecontent[fn] = b"r%i\n" % id
282 282
283 283 if new_file:
284 284 fn = b"nf%i" % id
285 285 files.append(fn)
286 286 filecontent[fn] = b"r%i\n" % id
287 287 if len(ps) > 1:
288 288 if not p2:
289 289 p2 = repo[ps[1]]
290 290 for fn in p2:
291 291 if fn.startswith(b"nf"):
292 292 files.append(fn)
293 293 filecontent[fn] = p2[fn].data()
294 294
295 295 def fctxfn(repo, cx, path):
296 296 if path in filecontent:
297 297 return context.memfilectx(
298 298 repo, cx, path, filecontent[path]
299 299 )
300 300 return None
301 301
302 302 if len(ps) == 0 or ps[0] < 0:
303 303 pars = [None, None]
304 304 elif len(ps) == 1:
305 305 pars = [nodeids[ps[0]], None]
306 306 else:
307 307 pars = [nodeids[p] for p in ps]
308 308 cx = context.memctx(
309 309 repo,
310 310 pars,
311 311 b"r%i" % id,
312 312 files,
313 313 fctxfn,
314 314 date=(id, 0),
315 315 user=b"debugbuilddag",
316 316 extra={b'branch': atbranch},
317 317 )
318 318 nodeid = repo.commitctx(cx)
319 319 nodeids.append(nodeid)
320 320 at = id
321 321 elif type == b'l':
322 322 id, name = data
323 323 ui.note((b'tag %s\n' % name))
324 324 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
325 325 elif type == b'a':
326 326 ui.note((b'branch %s\n' % data))
327 327 atbranch = data
328 328 progress.update(id)
329 329
330 330 if tags:
331 331 repo.vfs.write(b"localtags", b"".join(tags))
332 332
333 333
334 334 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
335 335 indent_string = b' ' * indent
336 336 if all:
337 337 ui.writenoi18n(
338 338 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
339 339 % indent_string
340 340 )
341 341
342 342 def showchunks(named):
343 343 ui.write(b"\n%s%s\n" % (indent_string, named))
344 344 for deltadata in gen.deltaiter():
345 345 node, p1, p2, cs, deltabase, delta, flags = deltadata
346 346 ui.write(
347 347 b"%s%s %s %s %s %s %d\n"
348 348 % (
349 349 indent_string,
350 350 hex(node),
351 351 hex(p1),
352 352 hex(p2),
353 353 hex(cs),
354 354 hex(deltabase),
355 355 len(delta),
356 356 )
357 357 )
358 358
359 359 gen.changelogheader()
360 360 showchunks(b"changelog")
361 361 gen.manifestheader()
362 362 showchunks(b"manifest")
363 363 for chunkdata in iter(gen.filelogheader, {}):
364 364 fname = chunkdata[b'filename']
365 365 showchunks(fname)
366 366 else:
367 367 if isinstance(gen, bundle2.unbundle20):
368 368 raise error.Abort(_(b'use debugbundle2 for this file'))
369 369 gen.changelogheader()
370 370 for deltadata in gen.deltaiter():
371 371 node, p1, p2, cs, deltabase, delta, flags = deltadata
372 372 ui.write(b"%s%s\n" % (indent_string, hex(node)))
373 373
374 374
375 375 def _debugobsmarkers(ui, part, indent=0, **opts):
376 376 """display version and markers contained in 'data'"""
377 377 opts = pycompat.byteskwargs(opts)
378 378 data = part.read()
379 379 indent_string = b' ' * indent
380 380 try:
381 381 version, markers = obsolete._readmarkers(data)
382 382 except error.UnknownVersion as exc:
383 383 msg = b"%sunsupported version: %s (%d bytes)\n"
384 384 msg %= indent_string, exc.version, len(data)
385 385 ui.write(msg)
386 386 else:
387 387 msg = b"%sversion: %d (%d bytes)\n"
388 388 msg %= indent_string, version, len(data)
389 389 ui.write(msg)
390 390 fm = ui.formatter(b'debugobsolete', opts)
391 391 for rawmarker in sorted(markers):
392 392 m = obsutil.marker(None, rawmarker)
393 393 fm.startitem()
394 394 fm.plain(indent_string)
395 395 cmdutil.showmarker(fm, m)
396 396 fm.end()
397 397
398 398
399 399 def _debugphaseheads(ui, data, indent=0):
400 400 """display version and markers contained in 'data'"""
401 401 indent_string = b' ' * indent
402 402 headsbyphase = phases.binarydecode(data)
403 403 for phase in phases.allphases:
404 404 for head in headsbyphase[phase]:
405 405 ui.write(indent_string)
406 406 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
407 407
408 408
409 409 def _quasirepr(thing):
410 410 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
411 411 return b'{%s}' % (
412 412 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
413 413 )
414 414 return pycompat.bytestr(repr(thing))
415 415
416 416
417 417 def _debugbundle2(ui, gen, all=None, **opts):
418 418 """lists the contents of a bundle2"""
419 419 if not isinstance(gen, bundle2.unbundle20):
420 420 raise error.Abort(_(b'not a bundle2 file'))
421 421 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
422 422 parttypes = opts.get('part_type', [])
423 423 for part in gen.iterparts():
424 424 if parttypes and part.type not in parttypes:
425 425 continue
426 426 msg = b'%s -- %s (mandatory: %r)\n'
427 427 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
428 428 if part.type == b'changegroup':
429 429 version = part.params.get(b'version', b'01')
430 430 cg = changegroup.getunbundler(version, part, b'UN')
431 431 if not ui.quiet:
432 432 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
433 433 if part.type == b'obsmarkers':
434 434 if not ui.quiet:
435 435 _debugobsmarkers(ui, part, indent=4, **opts)
436 436 if part.type == b'phase-heads':
437 437 if not ui.quiet:
438 438 _debugphaseheads(ui, part, indent=4)
439 439
440 440
441 441 @command(
442 442 b'debugbundle',
443 443 [
444 444 (b'a', b'all', None, _(b'show all details')),
445 445 (b'', b'part-type', [], _(b'show only the named part type')),
446 446 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
447 447 ],
448 448 _(b'FILE'),
449 449 norepo=True,
450 450 )
451 451 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
452 452 """lists the contents of a bundle"""
453 453 with hg.openpath(ui, bundlepath) as f:
454 454 if spec:
455 455 spec = exchange.getbundlespec(ui, f)
456 456 ui.write(b'%s\n' % spec)
457 457 return
458 458
459 459 gen = exchange.readbundle(ui, f, bundlepath)
460 460 if isinstance(gen, bundle2.unbundle20):
461 461 return _debugbundle2(ui, gen, all=all, **opts)
462 462 _debugchangegroup(ui, gen, all=all, **opts)
463 463
464 464
465 465 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
466 466 def debugcapabilities(ui, path, **opts):
467 467 """lists the capabilities of a remote peer"""
468 468 opts = pycompat.byteskwargs(opts)
469 469 peer = hg.peer(ui, opts, path)
470 470 caps = peer.capabilities()
471 471 ui.writenoi18n(b'Main capabilities:\n')
472 472 for c in sorted(caps):
473 473 ui.write(b' %s\n' % c)
474 474 b2caps = bundle2.bundle2caps(peer)
475 475 if b2caps:
476 476 ui.writenoi18n(b'Bundle2 capabilities:\n')
477 477 for key, values in sorted(pycompat.iteritems(b2caps)):
478 478 ui.write(b' %s\n' % key)
479 479 for v in values:
480 480 ui.write(b' %s\n' % v)
481 481
482 482
483 483 @command(b'debugchangedfiles', [], b'REV')
484 484 def debugchangedfiles(ui, repo, rev):
485 485 """list the stored files changes for a revision"""
486 486 ctx = scmutil.revsingle(repo, rev, None)
487 487 sd = repo.changelog.sidedata(ctx.rev())
488 488 files_block = sd.get(sidedata.SD_FILES)
489 489 if files_block is not None:
490 490 files = metadata.decode_files_sidedata(sd)
491 491 for f in sorted(files.touched):
492 492 if f in files.added:
493 493 action = b"added"
494 494 elif f in files.removed:
495 495 action = b"removed"
496 496 elif f in files.merged:
497 497 action = b"merged"
498 498 elif f in files.salvaged:
499 499 action = b"salvaged"
500 500 else:
501 501 action = b"touched"
502 502
503 503 copy_parent = b""
504 504 copy_source = b""
505 505 if f in files.copied_from_p1:
506 506 copy_parent = b"p1"
507 507 copy_source = files.copied_from_p1[f]
508 508 elif f in files.copied_from_p2:
509 509 copy_parent = b"p2"
510 510 copy_source = files.copied_from_p2[f]
511 511
512 512 data = (action, copy_parent, f, copy_source)
513 513 template = b"%-8s %2s: %s, %s;\n"
514 514 ui.write(template % data)
515 515
516 516
517 517 @command(b'debugcheckstate', [], b'')
518 518 def debugcheckstate(ui, repo):
519 519 """validate the correctness of the current dirstate"""
520 520 parent1, parent2 = repo.dirstate.parents()
521 521 m1 = repo[parent1].manifest()
522 522 m2 = repo[parent2].manifest()
523 523 errors = 0
524 524 for f in repo.dirstate:
525 525 state = repo.dirstate[f]
526 526 if state in b"nr" and f not in m1:
527 527 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
528 528 errors += 1
529 529 if state in b"a" and f in m1:
530 530 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
531 531 errors += 1
532 532 if state in b"m" and f not in m1 and f not in m2:
533 533 ui.warn(
534 534 _(b"%s in state %s, but not in either manifest\n") % (f, state)
535 535 )
536 536 errors += 1
537 537 for f in m1:
538 538 state = repo.dirstate[f]
539 539 if state not in b"nrm":
540 540 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
541 541 errors += 1
542 542 if errors:
543 543 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
544 544 raise error.Abort(errstr)
545 545
546 546
547 547 @command(
548 548 b'debugcolor',
549 549 [(b'', b'style', None, _(b'show all configured styles'))],
550 550 b'hg debugcolor',
551 551 )
552 552 def debugcolor(ui, repo, **opts):
553 553 """show available color, effects or style"""
554 554 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
555 555 if opts.get('style'):
556 556 return _debugdisplaystyle(ui)
557 557 else:
558 558 return _debugdisplaycolor(ui)
559 559
560 560
561 561 def _debugdisplaycolor(ui):
562 562 ui = ui.copy()
563 563 ui._styles.clear()
564 564 for effect in color._activeeffects(ui).keys():
565 565 ui._styles[effect] = effect
566 566 if ui._terminfoparams:
567 567 for k, v in ui.configitems(b'color'):
568 568 if k.startswith(b'color.'):
569 569 ui._styles[k] = k[6:]
570 570 elif k.startswith(b'terminfo.'):
571 571 ui._styles[k] = k[9:]
572 572 ui.write(_(b'available colors:\n'))
573 573 # sort label with a '_' after the other to group '_background' entry.
574 574 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
575 575 for colorname, label in items:
576 576 ui.write(b'%s\n' % colorname, label=label)
577 577
578 578
579 579 def _debugdisplaystyle(ui):
580 580 ui.write(_(b'available style:\n'))
581 581 if not ui._styles:
582 582 return
583 583 width = max(len(s) for s in ui._styles)
584 584 for label, effects in sorted(ui._styles.items()):
585 585 ui.write(b'%s' % label, label=label)
586 586 if effects:
587 587 # 50
588 588 ui.write(b': ')
589 589 ui.write(b' ' * (max(0, width - len(label))))
590 590 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
591 591 ui.write(b'\n')
592 592
593 593
594 594 @command(b'debugcreatestreamclonebundle', [], b'FILE')
595 595 def debugcreatestreamclonebundle(ui, repo, fname):
596 596 """create a stream clone bundle file
597 597
598 598 Stream bundles are special bundles that are essentially archives of
599 599 revlog files. They are commonly used for cloning very quickly.
600 600 """
601 601 # TODO we may want to turn this into an abort when this functionality
602 602 # is moved into `hg bundle`.
603 603 if phases.hassecret(repo):
604 604 ui.warn(
605 605 _(
606 606 b'(warning: stream clone bundle will contain secret '
607 607 b'revisions)\n'
608 608 )
609 609 )
610 610
611 611 requirements, gen = streamclone.generatebundlev1(repo)
612 612 changegroup.writechunks(ui, gen, fname)
613 613
614 614 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
615 615
616 616
617 617 @command(
618 618 b'debugdag',
619 619 [
620 620 (b't', b'tags', None, _(b'use tags as labels')),
621 621 (b'b', b'branches', None, _(b'annotate with branch names')),
622 622 (b'', b'dots', None, _(b'use dots for runs')),
623 623 (b's', b'spaces', None, _(b'separate elements by spaces')),
624 624 ],
625 625 _(b'[OPTION]... [FILE [REV]...]'),
626 626 optionalrepo=True,
627 627 )
628 628 def debugdag(ui, repo, file_=None, *revs, **opts):
629 629 """format the changelog or an index DAG as a concise textual description
630 630
631 631 If you pass a revlog index, the revlog's DAG is emitted. If you list
632 632 revision numbers, they get labeled in the output as rN.
633 633
634 634 Otherwise, the changelog DAG of the current repo is emitted.
635 635 """
636 636 spaces = opts.get('spaces')
637 637 dots = opts.get('dots')
638 638 if file_:
639 639 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
640 640 revs = {int(r) for r in revs}
641 641
642 642 def events():
643 643 for r in rlog:
644 644 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
645 645 if r in revs:
646 646 yield b'l', (r, b"r%i" % r)
647 647
648 648 elif repo:
649 649 cl = repo.changelog
650 650 tags = opts.get('tags')
651 651 branches = opts.get('branches')
652 652 if tags:
653 653 labels = {}
654 654 for l, n in repo.tags().items():
655 655 labels.setdefault(cl.rev(n), []).append(l)
656 656
657 657 def events():
658 658 b = b"default"
659 659 for r in cl:
660 660 if branches:
661 661 newb = cl.read(cl.node(r))[5][b'branch']
662 662 if newb != b:
663 663 yield b'a', newb
664 664 b = newb
665 665 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
666 666 if tags:
667 667 ls = labels.get(r)
668 668 if ls:
669 669 for l in ls:
670 670 yield b'l', (r, l)
671 671
672 672 else:
673 673 raise error.Abort(_(b'need repo for changelog dag'))
674 674
675 675 for line in dagparser.dagtextlines(
676 676 events(),
677 677 addspaces=spaces,
678 678 wraplabels=True,
679 679 wrapannotations=True,
680 680 wrapnonlinear=dots,
681 681 usedots=dots,
682 682 maxlinewidth=70,
683 683 ):
684 684 ui.write(line)
685 685 ui.write(b"\n")
686 686
687 687
688 688 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
689 689 def debugdata(ui, repo, file_, rev=None, **opts):
690 690 """dump the contents of a data file revision"""
691 691 opts = pycompat.byteskwargs(opts)
692 692 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
693 693 if rev is not None:
694 694 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
695 695 file_, rev = None, file_
696 696 elif rev is None:
697 697 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
698 698 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
699 699 try:
700 700 ui.write(r.rawdata(r.lookup(rev)))
701 701 except KeyError:
702 702 raise error.Abort(_(b'invalid revision identifier %s') % rev)
703 703
704 704
705 705 @command(
706 706 b'debugdate',
707 707 [(b'e', b'extended', None, _(b'try extended date formats'))],
708 708 _(b'[-e] DATE [RANGE]'),
709 709 norepo=True,
710 710 optionalrepo=True,
711 711 )
712 712 def debugdate(ui, date, range=None, **opts):
713 713 """parse and display a date"""
714 714 if opts["extended"]:
715 715 d = dateutil.parsedate(date, dateutil.extendeddateformats)
716 716 else:
717 717 d = dateutil.parsedate(date)
718 718 ui.writenoi18n(b"internal: %d %d\n" % d)
719 719 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
720 720 if range:
721 721 m = dateutil.matchdate(range)
722 722 ui.writenoi18n(b"match: %s\n" % m(d[0]))
723 723
724 724
725 725 @command(
726 726 b'debugdeltachain',
727 727 cmdutil.debugrevlogopts + cmdutil.formatteropts,
728 728 _(b'-c|-m|FILE'),
729 729 optionalrepo=True,
730 730 )
731 731 def debugdeltachain(ui, repo, file_=None, **opts):
732 732 """dump information about delta chains in a revlog
733 733
734 734 Output can be templatized. Available template keywords are:
735 735
736 736 :``rev``: revision number
737 737 :``chainid``: delta chain identifier (numbered by unique base)
738 738 :``chainlen``: delta chain length to this revision
739 739 :``prevrev``: previous revision in delta chain
740 740 :``deltatype``: role of delta / how it was computed
741 741 :``compsize``: compressed size of revision
742 742 :``uncompsize``: uncompressed size of revision
743 743 :``chainsize``: total size of compressed revisions in chain
744 744 :``chainratio``: total chain size divided by uncompressed revision size
745 745 (new delta chains typically start at ratio 2.00)
746 746 :``lindist``: linear distance from base revision in delta chain to end
747 747 of this revision
748 748 :``extradist``: total size of revisions not part of this delta chain from
749 749 base of delta chain to end of this revision; a measurement
750 750 of how much extra data we need to read/seek across to read
751 751 the delta chain for this revision
752 752 :``extraratio``: extradist divided by chainsize; another representation of
753 753 how much unrelated data is needed to load this delta chain
754 754
755 755 If the repository is configured to use the sparse read, additional keywords
756 756 are available:
757 757
758 758 :``readsize``: total size of data read from the disk for a revision
759 759 (sum of the sizes of all the blocks)
760 760 :``largestblock``: size of the largest block of data read from the disk
761 761 :``readdensity``: density of useful bytes in the data read from the disk
762 762 :``srchunks``: in how many data hunks the whole revision would be read
763 763
764 764 The sparse read can be enabled with experimental.sparse-read = True
765 765 """
766 766 opts = pycompat.byteskwargs(opts)
767 767 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
768 768 index = r.index
769 769 start = r.start
770 770 length = r.length
771 771 generaldelta = r.version & revlog.FLAG_GENERALDELTA
772 772 withsparseread = getattr(r, '_withsparseread', False)
773 773
774 774 def revinfo(rev):
775 775 e = index[rev]
776 776 compsize = e[1]
777 777 uncompsize = e[2]
778 778 chainsize = 0
779 779
780 780 if generaldelta:
781 781 if e[3] == e[5]:
782 782 deltatype = b'p1'
783 783 elif e[3] == e[6]:
784 784 deltatype = b'p2'
785 785 elif e[3] == rev - 1:
786 786 deltatype = b'prev'
787 787 elif e[3] == rev:
788 788 deltatype = b'base'
789 789 else:
790 790 deltatype = b'other'
791 791 else:
792 792 if e[3] == rev:
793 793 deltatype = b'base'
794 794 else:
795 795 deltatype = b'prev'
796 796
797 797 chain = r._deltachain(rev)[0]
798 798 for iterrev in chain:
799 799 e = index[iterrev]
800 800 chainsize += e[1]
801 801
802 802 return compsize, uncompsize, deltatype, chain, chainsize
803 803
804 804 fm = ui.formatter(b'debugdeltachain', opts)
805 805
806 806 fm.plain(
807 807 b' rev chain# chainlen prev delta '
808 808 b'size rawsize chainsize ratio lindist extradist '
809 809 b'extraratio'
810 810 )
811 811 if withsparseread:
812 812 fm.plain(b' readsize largestblk rddensity srchunks')
813 813 fm.plain(b'\n')
814 814
815 815 chainbases = {}
816 816 for rev in r:
817 817 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
818 818 chainbase = chain[0]
819 819 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
820 820 basestart = start(chainbase)
821 821 revstart = start(rev)
822 822 lineardist = revstart + comp - basestart
823 823 extradist = lineardist - chainsize
824 824 try:
825 825 prevrev = chain[-2]
826 826 except IndexError:
827 827 prevrev = -1
828 828
829 829 if uncomp != 0:
830 830 chainratio = float(chainsize) / float(uncomp)
831 831 else:
832 832 chainratio = chainsize
833 833
834 834 if chainsize != 0:
835 835 extraratio = float(extradist) / float(chainsize)
836 836 else:
837 837 extraratio = extradist
838 838
839 839 fm.startitem()
840 840 fm.write(
841 841 b'rev chainid chainlen prevrev deltatype compsize '
842 842 b'uncompsize chainsize chainratio lindist extradist '
843 843 b'extraratio',
844 844 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
845 845 rev,
846 846 chainid,
847 847 len(chain),
848 848 prevrev,
849 849 deltatype,
850 850 comp,
851 851 uncomp,
852 852 chainsize,
853 853 chainratio,
854 854 lineardist,
855 855 extradist,
856 856 extraratio,
857 857 rev=rev,
858 858 chainid=chainid,
859 859 chainlen=len(chain),
860 860 prevrev=prevrev,
861 861 deltatype=deltatype,
862 862 compsize=comp,
863 863 uncompsize=uncomp,
864 864 chainsize=chainsize,
865 865 chainratio=chainratio,
866 866 lindist=lineardist,
867 867 extradist=extradist,
868 868 extraratio=extraratio,
869 869 )
870 870 if withsparseread:
871 871 readsize = 0
872 872 largestblock = 0
873 873 srchunks = 0
874 874
875 875 for revschunk in deltautil.slicechunk(r, chain):
876 876 srchunks += 1
877 877 blkend = start(revschunk[-1]) + length(revschunk[-1])
878 878 blksize = blkend - start(revschunk[0])
879 879
880 880 readsize += blksize
881 881 if largestblock < blksize:
882 882 largestblock = blksize
883 883
884 884 if readsize:
885 885 readdensity = float(chainsize) / float(readsize)
886 886 else:
887 887 readdensity = 1
888 888
889 889 fm.write(
890 890 b'readsize largestblock readdensity srchunks',
891 891 b' %10d %10d %9.5f %8d',
892 892 readsize,
893 893 largestblock,
894 894 readdensity,
895 895 srchunks,
896 896 readsize=readsize,
897 897 largestblock=largestblock,
898 898 readdensity=readdensity,
899 899 srchunks=srchunks,
900 900 )
901 901
902 902 fm.plain(b'\n')
903 903
904 904 fm.end()
905 905
906 906
907 907 @command(
908 908 b'debugdirstate|debugstate',
909 909 [
910 910 (
911 911 b'',
912 912 b'nodates',
913 913 None,
914 914 _(b'do not display the saved mtime (DEPRECATED)'),
915 915 ),
916 916 (b'', b'dates', True, _(b'display the saved mtime')),
917 917 (b'', b'datesort', None, _(b'sort by saved mtime')),
918 918 ],
919 919 _(b'[OPTION]...'),
920 920 )
921 921 def debugstate(ui, repo, **opts):
922 922 """show the contents of the current dirstate"""
923 923
924 924 nodates = not opts['dates']
925 925 if opts.get('nodates') is not None:
926 926 nodates = True
927 927 datesort = opts.get('datesort')
928 928
929 929 if datesort:
930 930 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
931 931 else:
932 932 keyfunc = None # sort by filename
933 933 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
934 934 if ent[3] == -1:
935 935 timestr = b'unset '
936 936 elif nodates:
937 937 timestr = b'set '
938 938 else:
939 939 timestr = time.strftime(
940 940 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
941 941 )
942 942 timestr = encoding.strtolocal(timestr)
943 943 if ent[1] & 0o20000:
944 944 mode = b'lnk'
945 945 else:
946 946 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
947 947 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
948 948 for f in repo.dirstate.copies():
949 949 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
950 950
951 951
952 952 @command(
953 953 b'debugdiscovery',
954 954 [
955 955 (b'', b'old', None, _(b'use old-style discovery')),
956 956 (
957 957 b'',
958 958 b'nonheads',
959 959 None,
960 960 _(b'use old-style discovery with non-heads included'),
961 961 ),
962 962 (b'', b'rev', [], b'restrict discovery to this set of revs'),
963 963 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
964 964 ]
965 965 + cmdutil.remoteopts,
966 966 _(b'[--rev REV] [OTHER]'),
967 967 )
968 968 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
969 969 """runs the changeset discovery protocol in isolation"""
970 970 opts = pycompat.byteskwargs(opts)
971 971 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
972 972 remote = hg.peer(repo, opts, remoteurl)
973 973 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
974 974
975 975 # make sure tests are repeatable
976 976 random.seed(int(opts[b'seed']))
977 977
978 978 if opts.get(b'old'):
979 979
980 980 def doit(pushedrevs, remoteheads, remote=remote):
981 981 if not util.safehasattr(remote, b'branches'):
982 982 # enable in-client legacy support
983 983 remote = localrepo.locallegacypeer(remote.local())
984 984 common, _in, hds = treediscovery.findcommonincoming(
985 985 repo, remote, force=True
986 986 )
987 987 common = set(common)
988 988 if not opts.get(b'nonheads'):
989 989 ui.writenoi18n(
990 990 b"unpruned common: %s\n"
991 991 % b" ".join(sorted(short(n) for n in common))
992 992 )
993 993
994 994 clnode = repo.changelog.node
995 995 common = repo.revs(b'heads(::%ln)', common)
996 996 common = {clnode(r) for r in common}
997 997 return common, hds
998 998
999 999 else:
1000 1000
1001 1001 def doit(pushedrevs, remoteheads, remote=remote):
1002 1002 nodes = None
1003 1003 if pushedrevs:
1004 1004 revs = scmutil.revrange(repo, pushedrevs)
1005 1005 nodes = [repo[r].node() for r in revs]
1006 1006 common, any, hds = setdiscovery.findcommonheads(
1007 1007 ui, repo, remote, ancestorsof=nodes
1008 1008 )
1009 1009 return common, hds
1010 1010
1011 1011 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1012 1012 localrevs = opts[b'rev']
1013 1013 with util.timedcm('debug-discovery') as t:
1014 1014 common, hds = doit(localrevs, remoterevs)
1015 1015
1016 1016 # compute all statistics
1017 1017 common = set(common)
1018 1018 rheads = set(hds)
1019 1019 lheads = set(repo.heads())
1020 1020
1021 1021 data = {}
1022 1022 data[b'elapsed'] = t.elapsed
1023 1023 data[b'nb-common'] = len(common)
1024 1024 data[b'nb-common-local'] = len(common & lheads)
1025 1025 data[b'nb-common-remote'] = len(common & rheads)
1026 1026 data[b'nb-common-both'] = len(common & rheads & lheads)
1027 1027 data[b'nb-local'] = len(lheads)
1028 1028 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
1029 1029 data[b'nb-remote'] = len(rheads)
1030 1030 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
1031 1031 data[b'nb-revs'] = len(repo.revs(b'all()'))
1032 1032 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
1033 1033 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
1034 1034
1035 1035 # display discovery summary
1036 1036 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1037 1037 ui.writenoi18n(b"heads summary:\n")
1038 1038 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
1039 1039 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
1040 1040 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
1041 1041 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
1042 1042 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
1043 1043 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
1044 1044 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
1045 1045 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
1046 1046 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
1047 1047 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
1048 1048 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1049 1049 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1050 1050 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1051 1051
1052 1052 if ui.verbose:
1053 1053 ui.writenoi18n(
1054 1054 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1055 1055 )
1056 1056
1057 1057
1058 1058 _chunksize = 4 << 10
1059 1059
1060 1060
1061 1061 @command(
1062 1062 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1063 1063 )
1064 1064 def debugdownload(ui, repo, url, output=None, **opts):
1065 1065 """download a resource using Mercurial logic and config
1066 1066 """
1067 1067 fh = urlmod.open(ui, url, output)
1068 1068
1069 1069 dest = ui
1070 1070 if output:
1071 1071 dest = open(output, b"wb", _chunksize)
1072 1072 try:
1073 1073 data = fh.read(_chunksize)
1074 1074 while data:
1075 1075 dest.write(data)
1076 1076 data = fh.read(_chunksize)
1077 1077 finally:
1078 1078 if output:
1079 1079 dest.close()
1080 1080
1081 1081
1082 1082 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1083 1083 def debugextensions(ui, repo, **opts):
1084 1084 '''show information about active extensions'''
1085 1085 opts = pycompat.byteskwargs(opts)
1086 1086 exts = extensions.extensions(ui)
1087 1087 hgver = util.version()
1088 1088 fm = ui.formatter(b'debugextensions', opts)
1089 1089 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1090 1090 isinternal = extensions.ismoduleinternal(extmod)
1091 1091 extsource = None
1092 1092
1093 1093 if util.safehasattr(extmod, '__file__'):
1094 1094 extsource = pycompat.fsencode(extmod.__file__)
1095 1095 elif getattr(sys, 'oxidized', False):
1096 1096 extsource = pycompat.sysexecutable
1097 1097 if isinternal:
1098 1098 exttestedwith = [] # never expose magic string to users
1099 1099 else:
1100 1100 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1101 1101 extbuglink = getattr(extmod, 'buglink', None)
1102 1102
1103 1103 fm.startitem()
1104 1104
1105 1105 if ui.quiet or ui.verbose:
1106 1106 fm.write(b'name', b'%s\n', extname)
1107 1107 else:
1108 1108 fm.write(b'name', b'%s', extname)
1109 1109 if isinternal or hgver in exttestedwith:
1110 1110 fm.plain(b'\n')
1111 1111 elif not exttestedwith:
1112 1112 fm.plain(_(b' (untested!)\n'))
1113 1113 else:
1114 1114 lasttestedversion = exttestedwith[-1]
1115 1115 fm.plain(b' (%s!)\n' % lasttestedversion)
1116 1116
1117 1117 fm.condwrite(
1118 1118 ui.verbose and extsource,
1119 1119 b'source',
1120 1120 _(b' location: %s\n'),
1121 1121 extsource or b"",
1122 1122 )
1123 1123
1124 1124 if ui.verbose:
1125 1125 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1126 1126 fm.data(bundled=isinternal)
1127 1127
1128 1128 fm.condwrite(
1129 1129 ui.verbose and exttestedwith,
1130 1130 b'testedwith',
1131 1131 _(b' tested with: %s\n'),
1132 1132 fm.formatlist(exttestedwith, name=b'ver'),
1133 1133 )
1134 1134
1135 1135 fm.condwrite(
1136 1136 ui.verbose and extbuglink,
1137 1137 b'buglink',
1138 1138 _(b' bug reporting: %s\n'),
1139 1139 extbuglink or b"",
1140 1140 )
1141 1141
1142 1142 fm.end()
1143 1143
1144 1144
1145 1145 @command(
1146 1146 b'debugfileset',
1147 1147 [
1148 1148 (
1149 1149 b'r',
1150 1150 b'rev',
1151 1151 b'',
1152 1152 _(b'apply the filespec on this revision'),
1153 1153 _(b'REV'),
1154 1154 ),
1155 1155 (
1156 1156 b'',
1157 1157 b'all-files',
1158 1158 False,
1159 1159 _(b'test files from all revisions and working directory'),
1160 1160 ),
1161 1161 (
1162 1162 b's',
1163 1163 b'show-matcher',
1164 1164 None,
1165 1165 _(b'print internal representation of matcher'),
1166 1166 ),
1167 1167 (
1168 1168 b'p',
1169 1169 b'show-stage',
1170 1170 [],
1171 1171 _(b'print parsed tree at the given stage'),
1172 1172 _(b'NAME'),
1173 1173 ),
1174 1174 ],
1175 1175 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1176 1176 )
1177 1177 def debugfileset(ui, repo, expr, **opts):
1178 1178 '''parse and apply a fileset specification'''
1179 1179 from . import fileset
1180 1180
1181 1181 fileset.symbols # force import of fileset so we have predicates to optimize
1182 1182 opts = pycompat.byteskwargs(opts)
1183 1183 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1184 1184
1185 1185 stages = [
1186 1186 (b'parsed', pycompat.identity),
1187 1187 (b'analyzed', filesetlang.analyze),
1188 1188 (b'optimized', filesetlang.optimize),
1189 1189 ]
1190 1190 stagenames = {n for n, f in stages}
1191 1191
1192 1192 showalways = set()
1193 1193 if ui.verbose and not opts[b'show_stage']:
1194 1194 # show parsed tree by --verbose (deprecated)
1195 1195 showalways.add(b'parsed')
1196 1196 if opts[b'show_stage'] == [b'all']:
1197 1197 showalways.update(stagenames)
1198 1198 else:
1199 1199 for n in opts[b'show_stage']:
1200 1200 if n not in stagenames:
1201 1201 raise error.Abort(_(b'invalid stage name: %s') % n)
1202 1202 showalways.update(opts[b'show_stage'])
1203 1203
1204 1204 tree = filesetlang.parse(expr)
1205 1205 for n, f in stages:
1206 1206 tree = f(tree)
1207 1207 if n in showalways:
1208 1208 if opts[b'show_stage'] or n != b'parsed':
1209 1209 ui.write(b"* %s:\n" % n)
1210 1210 ui.write(filesetlang.prettyformat(tree), b"\n")
1211 1211
1212 1212 files = set()
1213 1213 if opts[b'all_files']:
1214 1214 for r in repo:
1215 1215 c = repo[r]
1216 1216 files.update(c.files())
1217 1217 files.update(c.substate)
1218 1218 if opts[b'all_files'] or ctx.rev() is None:
1219 1219 wctx = repo[None]
1220 1220 files.update(
1221 1221 repo.dirstate.walk(
1222 1222 scmutil.matchall(repo),
1223 1223 subrepos=list(wctx.substate),
1224 1224 unknown=True,
1225 1225 ignored=True,
1226 1226 )
1227 1227 )
1228 1228 files.update(wctx.substate)
1229 1229 else:
1230 1230 files.update(ctx.files())
1231 1231 files.update(ctx.substate)
1232 1232
1233 1233 m = ctx.matchfileset(repo.getcwd(), expr)
1234 1234 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1235 1235 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1236 1236 for f in sorted(files):
1237 1237 if not m(f):
1238 1238 continue
1239 1239 ui.write(b"%s\n" % f)
1240 1240
1241 1241
1242 1242 @command(b'debugformat', [] + cmdutil.formatteropts)
1243 1243 def debugformat(ui, repo, **opts):
1244 1244 """display format information about the current repository
1245 1245
1246 1246 Use --verbose to get extra information about current config value and
1247 1247 Mercurial default."""
1248 1248 opts = pycompat.byteskwargs(opts)
1249 1249 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1250 1250 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1251 1251
1252 1252 def makeformatname(name):
1253 1253 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1254 1254
1255 1255 fm = ui.formatter(b'debugformat', opts)
1256 1256 if fm.isplain():
1257 1257
1258 1258 def formatvalue(value):
1259 1259 if util.safehasattr(value, b'startswith'):
1260 1260 return value
1261 1261 if value:
1262 1262 return b'yes'
1263 1263 else:
1264 1264 return b'no'
1265 1265
1266 1266 else:
1267 1267 formatvalue = pycompat.identity
1268 1268
1269 1269 fm.plain(b'format-variant')
1270 1270 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1271 1271 fm.plain(b' repo')
1272 1272 if ui.verbose:
1273 1273 fm.plain(b' config default')
1274 1274 fm.plain(b'\n')
1275 1275 for fv in upgrade.allformatvariant:
1276 1276 fm.startitem()
1277 1277 repovalue = fv.fromrepo(repo)
1278 1278 configvalue = fv.fromconfig(repo)
1279 1279
1280 1280 if repovalue != configvalue:
1281 1281 namelabel = b'formatvariant.name.mismatchconfig'
1282 1282 repolabel = b'formatvariant.repo.mismatchconfig'
1283 1283 elif repovalue != fv.default:
1284 1284 namelabel = b'formatvariant.name.mismatchdefault'
1285 1285 repolabel = b'formatvariant.repo.mismatchdefault'
1286 1286 else:
1287 1287 namelabel = b'formatvariant.name.uptodate'
1288 1288 repolabel = b'formatvariant.repo.uptodate'
1289 1289
1290 1290 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1291 1291 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1292 1292 if fv.default != configvalue:
1293 1293 configlabel = b'formatvariant.config.special'
1294 1294 else:
1295 1295 configlabel = b'formatvariant.config.default'
1296 1296 fm.condwrite(
1297 1297 ui.verbose,
1298 1298 b'config',
1299 1299 b' %6s',
1300 1300 formatvalue(configvalue),
1301 1301 label=configlabel,
1302 1302 )
1303 1303 fm.condwrite(
1304 1304 ui.verbose,
1305 1305 b'default',
1306 1306 b' %7s',
1307 1307 formatvalue(fv.default),
1308 1308 label=b'formatvariant.default',
1309 1309 )
1310 1310 fm.plain(b'\n')
1311 1311 fm.end()
1312 1312
1313 1313
1314 1314 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1315 1315 def debugfsinfo(ui, path=b"."):
1316 1316 """show information detected about current filesystem"""
1317 1317 ui.writenoi18n(b'path: %s\n' % path)
1318 1318 ui.writenoi18n(
1319 1319 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1320 1320 )
1321 1321 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1322 1322 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1323 1323 ui.writenoi18n(
1324 1324 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1325 1325 )
1326 1326 ui.writenoi18n(
1327 1327 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1328 1328 )
1329 1329 casesensitive = b'(unknown)'
1330 1330 try:
1331 1331 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1332 1332 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1333 1333 except OSError:
1334 1334 pass
1335 1335 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1336 1336
1337 1337
1338 1338 @command(
1339 1339 b'debuggetbundle',
1340 1340 [
1341 1341 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1342 1342 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1343 1343 (
1344 1344 b't',
1345 1345 b'type',
1346 1346 b'bzip2',
1347 1347 _(b'bundle compression type to use'),
1348 1348 _(b'TYPE'),
1349 1349 ),
1350 1350 ],
1351 1351 _(b'REPO FILE [-H|-C ID]...'),
1352 1352 norepo=True,
1353 1353 )
1354 1354 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1355 1355 """retrieves a bundle from a repo
1356 1356
1357 1357 Every ID must be a full-length hex node id string. Saves the bundle to the
1358 1358 given file.
1359 1359 """
1360 1360 opts = pycompat.byteskwargs(opts)
1361 1361 repo = hg.peer(ui, opts, repopath)
1362 1362 if not repo.capable(b'getbundle'):
1363 1363 raise error.Abort(b"getbundle() not supported by target repository")
1364 1364 args = {}
1365 1365 if common:
1366 1366 args['common'] = [bin(s) for s in common]
1367 1367 if head:
1368 1368 args['heads'] = [bin(s) for s in head]
1369 1369 # TODO: get desired bundlecaps from command line.
1370 1370 args['bundlecaps'] = None
1371 1371 bundle = repo.getbundle(b'debug', **args)
1372 1372
1373 1373 bundletype = opts.get(b'type', b'bzip2').lower()
1374 1374 btypes = {
1375 1375 b'none': b'HG10UN',
1376 1376 b'bzip2': b'HG10BZ',
1377 1377 b'gzip': b'HG10GZ',
1378 1378 b'bundle2': b'HG20',
1379 1379 }
1380 1380 bundletype = btypes.get(bundletype)
1381 1381 if bundletype not in bundle2.bundletypes:
1382 1382 raise error.Abort(_(b'unknown bundle type specified with --type'))
1383 1383 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1384 1384
1385 1385
1386 1386 @command(b'debugignore', [], b'[FILE]')
1387 1387 def debugignore(ui, repo, *files, **opts):
1388 1388 """display the combined ignore pattern and information about ignored files
1389 1389
1390 1390 With no argument display the combined ignore pattern.
1391 1391
1392 1392 Given space separated file names, shows if the given file is ignored and
1393 1393 if so, show the ignore rule (file and line number) that matched it.
1394 1394 """
1395 1395 ignore = repo.dirstate._ignore
1396 1396 if not files:
1397 1397 # Show all the patterns
1398 1398 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1399 1399 else:
1400 1400 m = scmutil.match(repo[None], pats=files)
1401 1401 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1402 1402 for f in m.files():
1403 1403 nf = util.normpath(f)
1404 1404 ignored = None
1405 1405 ignoredata = None
1406 1406 if nf != b'.':
1407 1407 if ignore(nf):
1408 1408 ignored = nf
1409 1409 ignoredata = repo.dirstate._ignorefileandline(nf)
1410 1410 else:
1411 1411 for p in pathutil.finddirs(nf):
1412 1412 if ignore(p):
1413 1413 ignored = p
1414 1414 ignoredata = repo.dirstate._ignorefileandline(p)
1415 1415 break
1416 1416 if ignored:
1417 1417 if ignored == nf:
1418 1418 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1419 1419 else:
1420 1420 ui.write(
1421 1421 _(
1422 1422 b"%s is ignored because of "
1423 1423 b"containing directory %s\n"
1424 1424 )
1425 1425 % (uipathfn(f), ignored)
1426 1426 )
1427 1427 ignorefile, lineno, line = ignoredata
1428 1428 ui.write(
1429 1429 _(b"(ignore rule in %s, line %d: '%s')\n")
1430 1430 % (ignorefile, lineno, line)
1431 1431 )
1432 1432 else:
1433 1433 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1434 1434
1435 1435
1436 1436 @command(
1437 1437 b'debugindex',
1438 1438 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1439 1439 _(b'-c|-m|FILE'),
1440 1440 )
1441 1441 def debugindex(ui, repo, file_=None, **opts):
1442 1442 """dump index data for a storage primitive"""
1443 1443 opts = pycompat.byteskwargs(opts)
1444 1444 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1445 1445
1446 1446 if ui.debugflag:
1447 1447 shortfn = hex
1448 1448 else:
1449 1449 shortfn = short
1450 1450
1451 1451 idlen = 12
1452 1452 for i in store:
1453 1453 idlen = len(shortfn(store.node(i)))
1454 1454 break
1455 1455
1456 1456 fm = ui.formatter(b'debugindex', opts)
1457 1457 fm.plain(
1458 1458 b' rev linkrev %s %s p2\n'
1459 1459 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1460 1460 )
1461 1461
1462 1462 for rev in store:
1463 1463 node = store.node(rev)
1464 1464 parents = store.parents(node)
1465 1465
1466 1466 fm.startitem()
1467 1467 fm.write(b'rev', b'%6d ', rev)
1468 1468 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1469 1469 fm.write(b'node', b'%s ', shortfn(node))
1470 1470 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1471 1471 fm.write(b'p2', b'%s', shortfn(parents[1]))
1472 1472 fm.plain(b'\n')
1473 1473
1474 1474 fm.end()
1475 1475
1476 1476
1477 1477 @command(
1478 1478 b'debugindexdot',
1479 1479 cmdutil.debugrevlogopts,
1480 1480 _(b'-c|-m|FILE'),
1481 1481 optionalrepo=True,
1482 1482 )
1483 1483 def debugindexdot(ui, repo, file_=None, **opts):
1484 1484 """dump an index DAG as a graphviz dot file"""
1485 1485 opts = pycompat.byteskwargs(opts)
1486 1486 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1487 1487 ui.writenoi18n(b"digraph G {\n")
1488 1488 for i in r:
1489 1489 node = r.node(i)
1490 1490 pp = r.parents(node)
1491 1491 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1492 1492 if pp[1] != nullid:
1493 1493 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1494 1494 ui.write(b"}\n")
1495 1495
1496 1496
1497 1497 @command(b'debugindexstats', [])
1498 1498 def debugindexstats(ui, repo):
1499 1499 """show stats related to the changelog index"""
1500 1500 repo.changelog.shortest(nullid, 1)
1501 1501 index = repo.changelog.index
1502 1502 if not util.safehasattr(index, b'stats'):
1503 1503 raise error.Abort(_(b'debugindexstats only works with native code'))
1504 1504 for k, v in sorted(index.stats().items()):
1505 1505 ui.write(b'%s: %d\n' % (k, v))
1506 1506
1507 1507
1508 1508 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1509 1509 def debuginstall(ui, **opts):
1510 1510 '''test Mercurial installation
1511 1511
1512 1512 Returns 0 on success.
1513 1513 '''
1514 1514 opts = pycompat.byteskwargs(opts)
1515 1515
1516 1516 problems = 0
1517 1517
1518 1518 fm = ui.formatter(b'debuginstall', opts)
1519 1519 fm.startitem()
1520 1520
1521 1521 # encoding might be unknown or wrong. don't translate these messages.
1522 1522 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1523 1523 err = None
1524 1524 try:
1525 1525 codecs.lookup(pycompat.sysstr(encoding.encoding))
1526 1526 except LookupError as inst:
1527 1527 err = stringutil.forcebytestr(inst)
1528 1528 problems += 1
1529 1529 fm.condwrite(
1530 1530 err,
1531 1531 b'encodingerror',
1532 1532 b" %s\n (check that your locale is properly set)\n",
1533 1533 err,
1534 1534 )
1535 1535
1536 1536 # Python
1537 1537 pythonlib = None
1538 1538 if util.safehasattr(os, '__file__'):
1539 1539 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1540 1540 elif getattr(sys, 'oxidized', False):
1541 1541 pythonlib = pycompat.sysexecutable
1542 1542
1543 1543 fm.write(
1544 1544 b'pythonexe',
1545 1545 _(b"checking Python executable (%s)\n"),
1546 1546 pycompat.sysexecutable or _(b"unknown"),
1547 1547 )
1548 1548 fm.write(
1549 1549 b'pythonimplementation',
1550 1550 _(b"checking Python implementation (%s)\n"),
1551 1551 pycompat.sysbytes(platform.python_implementation()),
1552 1552 )
1553 1553 fm.write(
1554 1554 b'pythonver',
1555 1555 _(b"checking Python version (%s)\n"),
1556 1556 (b"%d.%d.%d" % sys.version_info[:3]),
1557 1557 )
1558 1558 fm.write(
1559 1559 b'pythonlib',
1560 1560 _(b"checking Python lib (%s)...\n"),
1561 1561 pythonlib or _(b"unknown"),
1562 1562 )
1563 1563
1564 1564 try:
1565 1565 from . import rustext
1566 1566
1567 1567 rustext.__doc__ # trigger lazy import
1568 1568 except ImportError:
1569 1569 rustext = None
1570 1570
1571 1571 security = set(sslutil.supportedprotocols)
1572 1572 if sslutil.hassni:
1573 1573 security.add(b'sni')
1574 1574
1575 1575 fm.write(
1576 1576 b'pythonsecurity',
1577 1577 _(b"checking Python security support (%s)\n"),
1578 1578 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1579 1579 )
1580 1580
1581 1581 # These are warnings, not errors. So don't increment problem count. This
1582 1582 # may change in the future.
1583 1583 if b'tls1.2' not in security:
1584 1584 fm.plain(
1585 1585 _(
1586 1586 b' TLS 1.2 not supported by Python install; '
1587 1587 b'network connections lack modern security\n'
1588 1588 )
1589 1589 )
1590 1590 if b'sni' not in security:
1591 1591 fm.plain(
1592 1592 _(
1593 1593 b' SNI not supported by Python install; may have '
1594 1594 b'connectivity issues with some servers\n'
1595 1595 )
1596 1596 )
1597 1597
1598 1598 fm.plain(
1599 1599 _(
1600 1600 b"checking Rust extensions (%s)\n"
1601 1601 % (b'missing' if rustext is None else b'installed')
1602 1602 ),
1603 1603 )
1604 1604
1605 1605 # TODO print CA cert info
1606 1606
1607 1607 # hg version
1608 1608 hgver = util.version()
1609 1609 fm.write(
1610 1610 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1611 1611 )
1612 1612 fm.write(
1613 1613 b'hgverextra',
1614 1614 _(b"checking Mercurial custom build (%s)\n"),
1615 1615 b'+'.join(hgver.split(b'+')[1:]),
1616 1616 )
1617 1617
1618 1618 # compiled modules
1619 1619 hgmodules = None
1620 1620 if util.safehasattr(sys.modules[__name__], '__file__'):
1621 1621 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1622 1622 elif getattr(sys, 'oxidized', False):
1623 1623 hgmodules = pycompat.sysexecutable
1624 1624
1625 1625 fm.write(
1626 1626 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1627 1627 )
1628 1628 fm.write(
1629 1629 b'hgmodules',
1630 1630 _(b"checking installed modules (%s)...\n"),
1631 1631 hgmodules or _(b"unknown"),
1632 1632 )
1633 1633
1634 1634 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1635 1635 rustext = rustandc # for now, that's the only case
1636 1636 cext = policy.policy in (b'c', b'allow') or rustandc
1637 1637 nopure = cext or rustext
1638 1638 if nopure:
1639 1639 err = None
1640 1640 try:
1641 1641 if cext:
1642 1642 from .cext import ( # pytype: disable=import-error
1643 1643 base85,
1644 1644 bdiff,
1645 1645 mpatch,
1646 1646 osutil,
1647 1647 )
1648 1648
1649 1649 # quiet pyflakes
1650 1650 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1651 1651 if rustext:
1652 1652 from .rustext import ( # pytype: disable=import-error
1653 1653 ancestor,
1654 1654 dirstate,
1655 1655 )
1656 1656
1657 1657 dir(ancestor), dir(dirstate) # quiet pyflakes
1658 1658 except Exception as inst:
1659 1659 err = stringutil.forcebytestr(inst)
1660 1660 problems += 1
1661 1661 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1662 1662
1663 1663 compengines = util.compengines._engines.values()
1664 1664 fm.write(
1665 1665 b'compengines',
1666 1666 _(b'checking registered compression engines (%s)\n'),
1667 1667 fm.formatlist(
1668 1668 sorted(e.name() for e in compengines),
1669 1669 name=b'compengine',
1670 1670 fmt=b'%s',
1671 1671 sep=b', ',
1672 1672 ),
1673 1673 )
1674 1674 fm.write(
1675 1675 b'compenginesavail',
1676 1676 _(b'checking available compression engines (%s)\n'),
1677 1677 fm.formatlist(
1678 1678 sorted(e.name() for e in compengines if e.available()),
1679 1679 name=b'compengine',
1680 1680 fmt=b'%s',
1681 1681 sep=b', ',
1682 1682 ),
1683 1683 )
1684 1684 wirecompengines = compression.compengines.supportedwireengines(
1685 1685 compression.SERVERROLE
1686 1686 )
1687 1687 fm.write(
1688 1688 b'compenginesserver',
1689 1689 _(
1690 1690 b'checking available compression engines '
1691 1691 b'for wire protocol (%s)\n'
1692 1692 ),
1693 1693 fm.formatlist(
1694 1694 [e.name() for e in wirecompengines if e.wireprotosupport()],
1695 1695 name=b'compengine',
1696 1696 fmt=b'%s',
1697 1697 sep=b', ',
1698 1698 ),
1699 1699 )
1700 1700 re2 = b'missing'
1701 1701 if util._re2:
1702 1702 re2 = b'available'
1703 1703 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1704 1704 fm.data(re2=bool(util._re2))
1705 1705
1706 1706 # templates
1707 1707 p = templater.templatedir()
1708 1708 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1709 1709 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1710 1710 if p:
1711 1711 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1712 1712 if m:
1713 1713 # template found, check if it is working
1714 1714 err = None
1715 1715 try:
1716 1716 templater.templater.frommapfile(m)
1717 1717 except Exception as inst:
1718 1718 err = stringutil.forcebytestr(inst)
1719 1719 p = None
1720 1720 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1721 1721 else:
1722 1722 p = None
1723 1723 fm.condwrite(
1724 1724 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1725 1725 )
1726 1726 fm.condwrite(
1727 1727 not m,
1728 1728 b'defaulttemplatenotfound',
1729 1729 _(b" template '%s' not found\n"),
1730 1730 b"default",
1731 1731 )
1732 1732 if not p:
1733 1733 problems += 1
1734 1734 fm.condwrite(
1735 1735 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1736 1736 )
1737 1737
1738 1738 # editor
1739 1739 editor = ui.geteditor()
1740 1740 editor = util.expandpath(editor)
1741 1741 editorbin = procutil.shellsplit(editor)[0]
1742 1742 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1743 1743 cmdpath = procutil.findexe(editorbin)
1744 1744 fm.condwrite(
1745 1745 not cmdpath and editor == b'vi',
1746 1746 b'vinotfound',
1747 1747 _(
1748 1748 b" No commit editor set and can't find %s in PATH\n"
1749 1749 b" (specify a commit editor in your configuration"
1750 1750 b" file)\n"
1751 1751 ),
1752 1752 not cmdpath and editor == b'vi' and editorbin,
1753 1753 )
1754 1754 fm.condwrite(
1755 1755 not cmdpath and editor != b'vi',
1756 1756 b'editornotfound',
1757 1757 _(
1758 1758 b" Can't find editor '%s' in PATH\n"
1759 1759 b" (specify a commit editor in your configuration"
1760 1760 b" file)\n"
1761 1761 ),
1762 1762 not cmdpath and editorbin,
1763 1763 )
1764 1764 if not cmdpath and editor != b'vi':
1765 1765 problems += 1
1766 1766
1767 1767 # check username
1768 1768 username = None
1769 1769 err = None
1770 1770 try:
1771 1771 username = ui.username()
1772 1772 except error.Abort as e:
1773 1773 err = e.message
1774 1774 problems += 1
1775 1775
1776 1776 fm.condwrite(
1777 1777 username, b'username', _(b"checking username (%s)\n"), username
1778 1778 )
1779 1779 fm.condwrite(
1780 1780 err,
1781 1781 b'usernameerror',
1782 1782 _(
1783 1783 b"checking username...\n %s\n"
1784 1784 b" (specify a username in your configuration file)\n"
1785 1785 ),
1786 1786 err,
1787 1787 )
1788 1788
1789 1789 for name, mod in extensions.extensions():
1790 1790 handler = getattr(mod, 'debuginstall', None)
1791 1791 if handler is not None:
1792 1792 problems += handler(ui, fm)
1793 1793
1794 1794 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1795 1795 if not problems:
1796 1796 fm.data(problems=problems)
1797 1797 fm.condwrite(
1798 1798 problems,
1799 1799 b'problems',
1800 1800 _(b"%d problems detected, please check your install!\n"),
1801 1801 problems,
1802 1802 )
1803 1803 fm.end()
1804 1804
1805 1805 return problems
1806 1806
1807 1807
1808 1808 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1809 1809 def debugknown(ui, repopath, *ids, **opts):
1810 1810 """test whether node ids are known to a repo
1811 1811
1812 1812 Every ID must be a full-length hex node id string. Returns a list of 0s
1813 1813 and 1s indicating unknown/known.
1814 1814 """
1815 1815 opts = pycompat.byteskwargs(opts)
1816 1816 repo = hg.peer(ui, opts, repopath)
1817 1817 if not repo.capable(b'known'):
1818 1818 raise error.Abort(b"known() not supported by target repository")
1819 1819 flags = repo.known([bin(s) for s in ids])
1820 1820 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1821 1821
1822 1822
1823 1823 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1824 1824 def debuglabelcomplete(ui, repo, *args):
1825 1825 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1826 1826 debugnamecomplete(ui, repo, *args)
1827 1827
1828 1828
1829 1829 @command(
1830 1830 b'debuglocks',
1831 1831 [
1832 1832 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1833 1833 (
1834 1834 b'W',
1835 1835 b'force-wlock',
1836 1836 None,
1837 1837 _(b'free the working state lock (DANGEROUS)'),
1838 1838 ),
1839 1839 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1840 1840 (
1841 1841 b'S',
1842 1842 b'set-wlock',
1843 1843 None,
1844 1844 _(b'set the working state lock until stopped'),
1845 1845 ),
1846 1846 ],
1847 1847 _(b'[OPTION]...'),
1848 1848 )
1849 1849 def debuglocks(ui, repo, **opts):
1850 1850 """show or modify state of locks
1851 1851
1852 1852 By default, this command will show which locks are held. This
1853 1853 includes the user and process holding the lock, the amount of time
1854 1854 the lock has been held, and the machine name where the process is
1855 1855 running if it's not local.
1856 1856
1857 1857 Locks protect the integrity of Mercurial's data, so should be
1858 1858 treated with care. System crashes or other interruptions may cause
1859 1859 locks to not be properly released, though Mercurial will usually
1860 1860 detect and remove such stale locks automatically.
1861 1861
1862 1862 However, detecting stale locks may not always be possible (for
1863 1863 instance, on a shared filesystem). Removing locks may also be
1864 1864 blocked by filesystem permissions.
1865 1865
1866 1866 Setting a lock will prevent other commands from changing the data.
1867 1867 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1868 1868 The set locks are removed when the command exits.
1869 1869
1870 1870 Returns 0 if no locks are held.
1871 1871
1872 1872 """
1873 1873
1874 1874 if opts.get('force_lock'):
1875 1875 repo.svfs.unlink(b'lock')
1876 1876 if opts.get('force_wlock'):
1877 1877 repo.vfs.unlink(b'wlock')
1878 1878 if opts.get('force_lock') or opts.get('force_wlock'):
1879 1879 return 0
1880 1880
1881 1881 locks = []
1882 1882 try:
1883 1883 if opts.get('set_wlock'):
1884 1884 try:
1885 1885 locks.append(repo.wlock(False))
1886 1886 except error.LockHeld:
1887 1887 raise error.Abort(_(b'wlock is already held'))
1888 1888 if opts.get('set_lock'):
1889 1889 try:
1890 1890 locks.append(repo.lock(False))
1891 1891 except error.LockHeld:
1892 1892 raise error.Abort(_(b'lock is already held'))
1893 1893 if len(locks):
1894 1894 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1895 1895 return 0
1896 1896 finally:
1897 1897 release(*locks)
1898 1898
1899 1899 now = time.time()
1900 1900 held = 0
1901 1901
1902 1902 def report(vfs, name, method):
1903 1903 # this causes stale locks to get reaped for more accurate reporting
1904 1904 try:
1905 1905 l = method(False)
1906 1906 except error.LockHeld:
1907 1907 l = None
1908 1908
1909 1909 if l:
1910 1910 l.release()
1911 1911 else:
1912 1912 try:
1913 1913 st = vfs.lstat(name)
1914 1914 age = now - st[stat.ST_MTIME]
1915 1915 user = util.username(st.st_uid)
1916 1916 locker = vfs.readlock(name)
1917 1917 if b":" in locker:
1918 1918 host, pid = locker.split(b':')
1919 1919 if host == socket.gethostname():
1920 1920 locker = b'user %s, process %s' % (user or b'None', pid)
1921 1921 else:
1922 1922 locker = b'user %s, process %s, host %s' % (
1923 1923 user or b'None',
1924 1924 pid,
1925 1925 host,
1926 1926 )
1927 1927 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1928 1928 return 1
1929 1929 except OSError as e:
1930 1930 if e.errno != errno.ENOENT:
1931 1931 raise
1932 1932
1933 1933 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1934 1934 return 0
1935 1935
1936 1936 held += report(repo.svfs, b"lock", repo.lock)
1937 1937 held += report(repo.vfs, b"wlock", repo.wlock)
1938 1938
1939 1939 return held
1940 1940
1941 1941
1942 1942 @command(
1943 1943 b'debugmanifestfulltextcache',
1944 1944 [
1945 1945 (b'', b'clear', False, _(b'clear the cache')),
1946 1946 (
1947 1947 b'a',
1948 1948 b'add',
1949 1949 [],
1950 1950 _(b'add the given manifest nodes to the cache'),
1951 1951 _(b'NODE'),
1952 1952 ),
1953 1953 ],
1954 1954 b'',
1955 1955 )
1956 1956 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1957 1957 """show, clear or amend the contents of the manifest fulltext cache"""
1958 1958
1959 1959 def getcache():
1960 1960 r = repo.manifestlog.getstorage(b'')
1961 1961 try:
1962 1962 return r._fulltextcache
1963 1963 except AttributeError:
1964 1964 msg = _(
1965 1965 b"Current revlog implementation doesn't appear to have a "
1966 1966 b"manifest fulltext cache\n"
1967 1967 )
1968 1968 raise error.Abort(msg)
1969 1969
1970 1970 if opts.get('clear'):
1971 1971 with repo.wlock():
1972 1972 cache = getcache()
1973 1973 cache.clear(clear_persisted_data=True)
1974 1974 return
1975 1975
1976 1976 if add:
1977 1977 with repo.wlock():
1978 1978 m = repo.manifestlog
1979 1979 store = m.getstorage(b'')
1980 1980 for n in add:
1981 1981 try:
1982 1982 manifest = m[store.lookup(n)]
1983 1983 except error.LookupError as e:
1984 1984 raise error.Abort(e, hint=b"Check your manifest node id")
1985 1985 manifest.read() # stores revisision in cache too
1986 1986 return
1987 1987
1988 1988 cache = getcache()
1989 1989 if not len(cache):
1990 1990 ui.write(_(b'cache empty\n'))
1991 1991 else:
1992 1992 ui.write(
1993 1993 _(
1994 1994 b'cache contains %d manifest entries, in order of most to '
1995 1995 b'least recent:\n'
1996 1996 )
1997 1997 % (len(cache),)
1998 1998 )
1999 1999 totalsize = 0
2000 2000 for nodeid in cache:
2001 2001 # Use cache.get to not update the LRU order
2002 2002 data = cache.peek(nodeid)
2003 2003 size = len(data)
2004 2004 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2005 2005 ui.write(
2006 2006 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2007 2007 )
2008 2008 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2009 2009 ui.write(
2010 2010 _(b'total cache data size %s, on-disk %s\n')
2011 2011 % (util.bytecount(totalsize), util.bytecount(ondisk))
2012 2012 )
2013 2013
2014 2014
2015 2015 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2016 2016 def debugmergestate(ui, repo, *args, **opts):
2017 2017 """print merge state
2018 2018
2019 2019 Use --verbose to print out information about whether v1 or v2 merge state
2020 2020 was chosen."""
2021 2021
2022 2022 if ui.verbose:
2023 2023 ms = mergestatemod.mergestate(repo)
2024 2024
2025 2025 # sort so that reasonable information is on top
2026 2026 v1records = ms._readrecordsv1()
2027 2027 v2records = ms._readrecordsv2()
2028 2028
2029 2029 if not v1records and not v2records:
2030 2030 pass
2031 2031 elif not v2records:
2032 2032 ui.writenoi18n(b'no version 2 merge state\n')
2033 2033 elif ms._v1v2match(v1records, v2records):
2034 2034 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2035 2035 else:
2036 2036 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2037 2037
2038 2038 opts = pycompat.byteskwargs(opts)
2039 2039 if not opts[b'template']:
2040 2040 opts[b'template'] = (
2041 2041 b'{if(commits, "", "no merge state found\n")}'
2042 2042 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2043 2043 b'{files % "file: {path} (state \\"{state}\\")\n'
2044 2044 b'{if(local_path, "'
2045 2045 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2046 2046 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2047 2047 b' other path: {other_path} (node {other_node})\n'
2048 2048 b'")}'
2049 2049 b'{if(rename_side, "'
2050 2050 b' rename side: {rename_side}\n'
2051 2051 b' renamed path: {renamed_path}\n'
2052 2052 b'")}'
2053 2053 b'{extras % " extra: {key} = {value}\n"}'
2054 2054 b'"}'
2055 2055 b'{extras % "extra: {file} ({key} = {value})\n"}'
2056 2056 )
2057 2057
2058 2058 ms = mergestatemod.mergestate.read(repo)
2059 2059
2060 2060 fm = ui.formatter(b'debugmergestate', opts)
2061 2061 fm.startitem()
2062 2062
2063 2063 fm_commits = fm.nested(b'commits')
2064 2064 if ms.active():
2065 2065 for name, node, label_index in (
2066 2066 (b'local', ms.local, 0),
2067 2067 (b'other', ms.other, 1),
2068 2068 ):
2069 2069 fm_commits.startitem()
2070 2070 fm_commits.data(name=name)
2071 2071 fm_commits.data(node=hex(node))
2072 2072 if ms._labels and len(ms._labels) > label_index:
2073 2073 fm_commits.data(label=ms._labels[label_index])
2074 2074 fm_commits.end()
2075 2075
2076 2076 fm_files = fm.nested(b'files')
2077 2077 if ms.active():
2078 2078 for f in ms:
2079 2079 fm_files.startitem()
2080 2080 fm_files.data(path=f)
2081 2081 state = ms._state[f]
2082 2082 fm_files.data(state=state[0])
2083 2083 if state[0] in (
2084 2084 mergestatemod.MERGE_RECORD_UNRESOLVED,
2085 2085 mergestatemod.MERGE_RECORD_RESOLVED,
2086 2086 ):
2087 2087 fm_files.data(local_key=state[1])
2088 2088 fm_files.data(local_path=state[2])
2089 2089 fm_files.data(ancestor_path=state[3])
2090 2090 fm_files.data(ancestor_node=state[4])
2091 2091 fm_files.data(other_path=state[5])
2092 2092 fm_files.data(other_node=state[6])
2093 2093 fm_files.data(local_flags=state[7])
2094 2094 elif state[0] in (
2095 2095 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2096 2096 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2097 2097 ):
2098 2098 fm_files.data(renamed_path=state[1])
2099 2099 fm_files.data(rename_side=state[2])
2100 2100 fm_extras = fm_files.nested(b'extras')
2101 2101 for k, v in sorted(ms.extras(f).items()):
2102 2102 fm_extras.startitem()
2103 2103 fm_extras.data(key=k)
2104 2104 fm_extras.data(value=v)
2105 2105 fm_extras.end()
2106 2106
2107 2107 fm_files.end()
2108 2108
2109 2109 fm_extras = fm.nested(b'extras')
2110 2110 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2111 2111 if f in ms:
2112 2112 # If file is in mergestate, we have already processed it's extras
2113 2113 continue
2114 2114 for k, v in pycompat.iteritems(d):
2115 2115 fm_extras.startitem()
2116 2116 fm_extras.data(file=f)
2117 2117 fm_extras.data(key=k)
2118 2118 fm_extras.data(value=v)
2119 2119 fm_extras.end()
2120 2120
2121 2121 fm.end()
2122 2122
2123 2123
2124 2124 @command(b'debugnamecomplete', [], _(b'NAME...'))
2125 2125 def debugnamecomplete(ui, repo, *args):
2126 2126 '''complete "names" - tags, open branch names, bookmark names'''
2127 2127
2128 2128 names = set()
2129 2129 # since we previously only listed open branches, we will handle that
2130 2130 # specially (after this for loop)
2131 2131 for name, ns in pycompat.iteritems(repo.names):
2132 2132 if name != b'branches':
2133 2133 names.update(ns.listnames(repo))
2134 2134 names.update(
2135 2135 tag
2136 2136 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2137 2137 if not closed
2138 2138 )
2139 2139 completions = set()
2140 2140 if not args:
2141 2141 args = [b'']
2142 2142 for a in args:
2143 2143 completions.update(n for n in names if n.startswith(a))
2144 2144 ui.write(b'\n'.join(sorted(completions)))
2145 2145 ui.write(b'\n')
2146 2146
2147 2147
2148 2148 @command(
2149 2149 b'debugnodemap',
2150 2150 [
2151 2151 (
2152 2152 b'',
2153 2153 b'dump-new',
2154 2154 False,
2155 2155 _(b'write a (new) persistent binary nodemap on stdin'),
2156 2156 ),
2157 2157 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2158 2158 (
2159 2159 b'',
2160 2160 b'check',
2161 2161 False,
2162 2162 _(b'check that the data on disk data are correct.'),
2163 2163 ),
2164 2164 (
2165 2165 b'',
2166 2166 b'metadata',
2167 2167 False,
2168 2168 _(b'display the on disk meta data for the nodemap'),
2169 2169 ),
2170 2170 ],
2171 2171 )
2172 2172 def debugnodemap(ui, repo, **opts):
2173 2173 """write and inspect on disk nodemap
2174 2174 """
2175 2175 if opts['dump_new']:
2176 2176 unfi = repo.unfiltered()
2177 2177 cl = unfi.changelog
2178 2178 if util.safehasattr(cl.index, "nodemap_data_all"):
2179 2179 data = cl.index.nodemap_data_all()
2180 2180 else:
2181 2181 data = nodemap.persistent_data(cl.index)
2182 2182 ui.write(data)
2183 2183 elif opts['dump_disk']:
2184 2184 unfi = repo.unfiltered()
2185 2185 cl = unfi.changelog
2186 2186 nm_data = nodemap.persisted_data(cl)
2187 2187 if nm_data is not None:
2188 2188 docket, data = nm_data
2189 2189 ui.write(data[:])
2190 2190 elif opts['check']:
2191 2191 unfi = repo.unfiltered()
2192 2192 cl = unfi.changelog
2193 2193 nm_data = nodemap.persisted_data(cl)
2194 2194 if nm_data is not None:
2195 2195 docket, data = nm_data
2196 2196 return nodemap.check_data(ui, cl.index, data)
2197 2197 elif opts['metadata']:
2198 2198 unfi = repo.unfiltered()
2199 2199 cl = unfi.changelog
2200 2200 nm_data = nodemap.persisted_data(cl)
2201 2201 if nm_data is not None:
2202 2202 docket, data = nm_data
2203 2203 ui.write((b"uid: %s\n") % docket.uid)
2204 2204 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2205 2205 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2206 2206 ui.write((b"data-length: %d\n") % docket.data_length)
2207 2207 ui.write((b"data-unused: %d\n") % docket.data_unused)
2208 2208 unused_perc = docket.data_unused * 100.0 / docket.data_length
2209 2209 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2210 2210
2211 2211
2212 2212 @command(
2213 2213 b'debugobsolete',
2214 2214 [
2215 2215 (b'', b'flags', 0, _(b'markers flag')),
2216 2216 (
2217 2217 b'',
2218 2218 b'record-parents',
2219 2219 False,
2220 2220 _(b'record parent information for the precursor'),
2221 2221 ),
2222 2222 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2223 2223 (
2224 2224 b'',
2225 2225 b'exclusive',
2226 2226 False,
2227 2227 _(b'restrict display to markers only relevant to REV'),
2228 2228 ),
2229 2229 (b'', b'index', False, _(b'display index of the marker')),
2230 2230 (b'', b'delete', [], _(b'delete markers specified by indices')),
2231 2231 ]
2232 2232 + cmdutil.commitopts2
2233 2233 + cmdutil.formatteropts,
2234 2234 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2235 2235 )
2236 2236 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2237 2237 """create arbitrary obsolete marker
2238 2238
2239 2239 With no arguments, displays the list of obsolescence markers."""
2240 2240
2241 2241 opts = pycompat.byteskwargs(opts)
2242 2242
2243 2243 def parsenodeid(s):
2244 2244 try:
2245 2245 # We do not use revsingle/revrange functions here to accept
2246 2246 # arbitrary node identifiers, possibly not present in the
2247 2247 # local repository.
2248 2248 n = bin(s)
2249 2249 if len(n) != len(nullid):
2250 2250 raise TypeError()
2251 2251 return n
2252 2252 except TypeError:
2253 2253 raise error.Abort(
2254 2254 b'changeset references must be full hexadecimal '
2255 2255 b'node identifiers'
2256 2256 )
2257 2257
2258 2258 if opts.get(b'delete'):
2259 2259 indices = []
2260 2260 for v in opts.get(b'delete'):
2261 2261 try:
2262 2262 indices.append(int(v))
2263 2263 except ValueError:
2264 2264 raise error.Abort(
2265 2265 _(b'invalid index value: %r') % v,
2266 2266 hint=_(b'use integers for indices'),
2267 2267 )
2268 2268
2269 2269 if repo.currenttransaction():
2270 2270 raise error.Abort(
2271 2271 _(b'cannot delete obsmarkers in the middle of transaction.')
2272 2272 )
2273 2273
2274 2274 with repo.lock():
2275 2275 n = repair.deleteobsmarkers(repo.obsstore, indices)
2276 2276 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2277 2277
2278 2278 return
2279 2279
2280 2280 if precursor is not None:
2281 2281 if opts[b'rev']:
2282 2282 raise error.Abort(b'cannot select revision when creating marker')
2283 2283 metadata = {}
2284 2284 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2285 2285 succs = tuple(parsenodeid(succ) for succ in successors)
2286 2286 l = repo.lock()
2287 2287 try:
2288 2288 tr = repo.transaction(b'debugobsolete')
2289 2289 try:
2290 2290 date = opts.get(b'date')
2291 2291 if date:
2292 2292 date = dateutil.parsedate(date)
2293 2293 else:
2294 2294 date = None
2295 2295 prec = parsenodeid(precursor)
2296 2296 parents = None
2297 2297 if opts[b'record_parents']:
2298 2298 if prec not in repo.unfiltered():
2299 2299 raise error.Abort(
2300 2300 b'cannot used --record-parents on '
2301 2301 b'unknown changesets'
2302 2302 )
2303 2303 parents = repo.unfiltered()[prec].parents()
2304 2304 parents = tuple(p.node() for p in parents)
2305 2305 repo.obsstore.create(
2306 2306 tr,
2307 2307 prec,
2308 2308 succs,
2309 2309 opts[b'flags'],
2310 2310 parents=parents,
2311 2311 date=date,
2312 2312 metadata=metadata,
2313 2313 ui=ui,
2314 2314 )
2315 2315 tr.close()
2316 2316 except ValueError as exc:
2317 2317 raise error.Abort(
2318 2318 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2319 2319 )
2320 2320 finally:
2321 2321 tr.release()
2322 2322 finally:
2323 2323 l.release()
2324 2324 else:
2325 2325 if opts[b'rev']:
2326 2326 revs = scmutil.revrange(repo, opts[b'rev'])
2327 2327 nodes = [repo[r].node() for r in revs]
2328 2328 markers = list(
2329 2329 obsutil.getmarkers(
2330 2330 repo, nodes=nodes, exclusive=opts[b'exclusive']
2331 2331 )
2332 2332 )
2333 2333 markers.sort(key=lambda x: x._data)
2334 2334 else:
2335 2335 markers = obsutil.getmarkers(repo)
2336 2336
2337 2337 markerstoiter = markers
2338 2338 isrelevant = lambda m: True
2339 2339 if opts.get(b'rev') and opts.get(b'index'):
2340 2340 markerstoiter = obsutil.getmarkers(repo)
2341 2341 markerset = set(markers)
2342 2342 isrelevant = lambda m: m in markerset
2343 2343
2344 2344 fm = ui.formatter(b'debugobsolete', opts)
2345 2345 for i, m in enumerate(markerstoiter):
2346 2346 if not isrelevant(m):
2347 2347 # marker can be irrelevant when we're iterating over a set
2348 2348 # of markers (markerstoiter) which is bigger than the set
2349 2349 # of markers we want to display (markers)
2350 2350 # this can happen if both --index and --rev options are
2351 2351 # provided and thus we need to iterate over all of the markers
2352 2352 # to get the correct indices, but only display the ones that
2353 2353 # are relevant to --rev value
2354 2354 continue
2355 2355 fm.startitem()
2356 2356 ind = i if opts.get(b'index') else None
2357 2357 cmdutil.showmarker(fm, m, index=ind)
2358 2358 fm.end()
2359 2359
2360 2360
2361 2361 @command(
2362 2362 b'debugp1copies',
2363 2363 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2364 2364 _(b'[-r REV]'),
2365 2365 )
2366 2366 def debugp1copies(ui, repo, **opts):
2367 2367 """dump copy information compared to p1"""
2368 2368
2369 2369 opts = pycompat.byteskwargs(opts)
2370 2370 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2371 2371 for dst, src in ctx.p1copies().items():
2372 2372 ui.write(b'%s -> %s\n' % (src, dst))
2373 2373
2374 2374
2375 2375 @command(
2376 2376 b'debugp2copies',
2377 2377 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2378 2378 _(b'[-r REV]'),
2379 2379 )
2380 2380 def debugp1copies(ui, repo, **opts):
2381 2381 """dump copy information compared to p2"""
2382 2382
2383 2383 opts = pycompat.byteskwargs(opts)
2384 2384 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2385 2385 for dst, src in ctx.p2copies().items():
2386 2386 ui.write(b'%s -> %s\n' % (src, dst))
2387 2387
2388 2388
2389 2389 @command(
2390 2390 b'debugpathcomplete',
2391 2391 [
2392 2392 (b'f', b'full', None, _(b'complete an entire path')),
2393 2393 (b'n', b'normal', None, _(b'show only normal files')),
2394 2394 (b'a', b'added', None, _(b'show only added files')),
2395 2395 (b'r', b'removed', None, _(b'show only removed files')),
2396 2396 ],
2397 2397 _(b'FILESPEC...'),
2398 2398 )
2399 2399 def debugpathcomplete(ui, repo, *specs, **opts):
2400 2400 '''complete part or all of a tracked path
2401 2401
2402 2402 This command supports shells that offer path name completion. It
2403 2403 currently completes only files already known to the dirstate.
2404 2404
2405 2405 Completion extends only to the next path segment unless
2406 2406 --full is specified, in which case entire paths are used.'''
2407 2407
2408 2408 def complete(path, acceptable):
2409 2409 dirstate = repo.dirstate
2410 2410 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2411 2411 rootdir = repo.root + pycompat.ossep
2412 2412 if spec != repo.root and not spec.startswith(rootdir):
2413 2413 return [], []
2414 2414 if os.path.isdir(spec):
2415 2415 spec += b'/'
2416 2416 spec = spec[len(rootdir) :]
2417 2417 fixpaths = pycompat.ossep != b'/'
2418 2418 if fixpaths:
2419 2419 spec = spec.replace(pycompat.ossep, b'/')
2420 2420 speclen = len(spec)
2421 2421 fullpaths = opts['full']
2422 2422 files, dirs = set(), set()
2423 2423 adddir, addfile = dirs.add, files.add
2424 2424 for f, st in pycompat.iteritems(dirstate):
2425 2425 if f.startswith(spec) and st[0] in acceptable:
2426 2426 if fixpaths:
2427 2427 f = f.replace(b'/', pycompat.ossep)
2428 2428 if fullpaths:
2429 2429 addfile(f)
2430 2430 continue
2431 2431 s = f.find(pycompat.ossep, speclen)
2432 2432 if s >= 0:
2433 2433 adddir(f[:s])
2434 2434 else:
2435 2435 addfile(f)
2436 2436 return files, dirs
2437 2437
2438 2438 acceptable = b''
2439 2439 if opts['normal']:
2440 2440 acceptable += b'nm'
2441 2441 if opts['added']:
2442 2442 acceptable += b'a'
2443 2443 if opts['removed']:
2444 2444 acceptable += b'r'
2445 2445 cwd = repo.getcwd()
2446 2446 if not specs:
2447 2447 specs = [b'.']
2448 2448
2449 2449 files, dirs = set(), set()
2450 2450 for spec in specs:
2451 2451 f, d = complete(spec, acceptable or b'nmar')
2452 2452 files.update(f)
2453 2453 dirs.update(d)
2454 2454 files.update(dirs)
2455 2455 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2456 2456 ui.write(b'\n')
2457 2457
2458 2458
2459 2459 @command(
2460 2460 b'debugpathcopies',
2461 2461 cmdutil.walkopts,
2462 2462 b'hg debugpathcopies REV1 REV2 [FILE]',
2463 2463 inferrepo=True,
2464 2464 )
2465 2465 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2466 2466 """show copies between two revisions"""
2467 2467 ctx1 = scmutil.revsingle(repo, rev1)
2468 2468 ctx2 = scmutil.revsingle(repo, rev2)
2469 2469 m = scmutil.match(ctx1, pats, opts)
2470 2470 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2471 2471 ui.write(b'%s -> %s\n' % (src, dst))
2472 2472
2473 2473
2474 2474 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2475 2475 def debugpeer(ui, path):
2476 2476 """establish a connection to a peer repository"""
2477 2477 # Always enable peer request logging. Requires --debug to display
2478 2478 # though.
2479 2479 overrides = {
2480 2480 (b'devel', b'debug.peer-request'): True,
2481 2481 }
2482 2482
2483 2483 with ui.configoverride(overrides):
2484 2484 peer = hg.peer(ui, {}, path)
2485 2485
2486 2486 local = peer.local() is not None
2487 2487 canpush = peer.canpush()
2488 2488
2489 2489 ui.write(_(b'url: %s\n') % peer.url())
2490 2490 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2491 2491 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2492 2492
2493 2493
2494 2494 @command(
2495 2495 b'debugpickmergetool',
2496 2496 [
2497 2497 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2498 2498 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2499 2499 ]
2500 2500 + cmdutil.walkopts
2501 2501 + cmdutil.mergetoolopts,
2502 2502 _(b'[PATTERN]...'),
2503 2503 inferrepo=True,
2504 2504 )
2505 2505 def debugpickmergetool(ui, repo, *pats, **opts):
2506 2506 """examine which merge tool is chosen for specified file
2507 2507
2508 2508 As described in :hg:`help merge-tools`, Mercurial examines
2509 2509 configurations below in this order to decide which merge tool is
2510 2510 chosen for specified file.
2511 2511
2512 2512 1. ``--tool`` option
2513 2513 2. ``HGMERGE`` environment variable
2514 2514 3. configurations in ``merge-patterns`` section
2515 2515 4. configuration of ``ui.merge``
2516 2516 5. configurations in ``merge-tools`` section
2517 2517 6. ``hgmerge`` tool (for historical reason only)
2518 2518 7. default tool for fallback (``:merge`` or ``:prompt``)
2519 2519
2520 2520 This command writes out examination result in the style below::
2521 2521
2522 2522 FILE = MERGETOOL
2523 2523
2524 2524 By default, all files known in the first parent context of the
2525 2525 working directory are examined. Use file patterns and/or -I/-X
2526 2526 options to limit target files. -r/--rev is also useful to examine
2527 2527 files in another context without actual updating to it.
2528 2528
2529 2529 With --debug, this command shows warning messages while matching
2530 2530 against ``merge-patterns`` and so on, too. It is recommended to
2531 2531 use this option with explicit file patterns and/or -I/-X options,
2532 2532 because this option increases amount of output per file according
2533 2533 to configurations in hgrc.
2534 2534
2535 2535 With -v/--verbose, this command shows configurations below at
2536 2536 first (only if specified).
2537 2537
2538 2538 - ``--tool`` option
2539 2539 - ``HGMERGE`` environment variable
2540 2540 - configuration of ``ui.merge``
2541 2541
2542 2542 If merge tool is chosen before matching against
2543 2543 ``merge-patterns``, this command can't show any helpful
2544 2544 information, even with --debug. In such case, information above is
2545 2545 useful to know why a merge tool is chosen.
2546 2546 """
2547 2547 opts = pycompat.byteskwargs(opts)
2548 2548 overrides = {}
2549 2549 if opts[b'tool']:
2550 2550 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2551 2551 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2552 2552
2553 2553 with ui.configoverride(overrides, b'debugmergepatterns'):
2554 2554 hgmerge = encoding.environ.get(b"HGMERGE")
2555 2555 if hgmerge is not None:
2556 2556 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2557 2557 uimerge = ui.config(b"ui", b"merge")
2558 2558 if uimerge:
2559 2559 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2560 2560
2561 2561 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2562 2562 m = scmutil.match(ctx, pats, opts)
2563 2563 changedelete = opts[b'changedelete']
2564 2564 for path in ctx.walk(m):
2565 2565 fctx = ctx[path]
2566 2566 try:
2567 2567 if not ui.debugflag:
2568 2568 ui.pushbuffer(error=True)
2569 2569 tool, toolpath = filemerge._picktool(
2570 2570 repo,
2571 2571 ui,
2572 2572 path,
2573 2573 fctx.isbinary(),
2574 2574 b'l' in fctx.flags(),
2575 2575 changedelete,
2576 2576 )
2577 2577 finally:
2578 2578 if not ui.debugflag:
2579 2579 ui.popbuffer()
2580 2580 ui.write(b'%s = %s\n' % (path, tool))
2581 2581
2582 2582
2583 2583 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2584 2584 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2585 2585 '''access the pushkey key/value protocol
2586 2586
2587 2587 With two args, list the keys in the given namespace.
2588 2588
2589 2589 With five args, set a key to new if it currently is set to old.
2590 2590 Reports success or failure.
2591 2591 '''
2592 2592
2593 2593 target = hg.peer(ui, {}, repopath)
2594 2594 if keyinfo:
2595 2595 key, old, new = keyinfo
2596 2596 with target.commandexecutor() as e:
2597 2597 r = e.callcommand(
2598 2598 b'pushkey',
2599 2599 {
2600 2600 b'namespace': namespace,
2601 2601 b'key': key,
2602 2602 b'old': old,
2603 2603 b'new': new,
2604 2604 },
2605 2605 ).result()
2606 2606
2607 2607 ui.status(pycompat.bytestr(r) + b'\n')
2608 2608 return not r
2609 2609 else:
2610 2610 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2611 2611 ui.write(
2612 2612 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2613 2613 )
2614 2614
2615 2615
2616 2616 @command(b'debugpvec', [], _(b'A B'))
2617 2617 def debugpvec(ui, repo, a, b=None):
2618 2618 ca = scmutil.revsingle(repo, a)
2619 2619 cb = scmutil.revsingle(repo, b)
2620 2620 pa = pvec.ctxpvec(ca)
2621 2621 pb = pvec.ctxpvec(cb)
2622 2622 if pa == pb:
2623 2623 rel = b"="
2624 2624 elif pa > pb:
2625 2625 rel = b">"
2626 2626 elif pa < pb:
2627 2627 rel = b"<"
2628 2628 elif pa | pb:
2629 2629 rel = b"|"
2630 2630 ui.write(_(b"a: %s\n") % pa)
2631 2631 ui.write(_(b"b: %s\n") % pb)
2632 2632 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2633 2633 ui.write(
2634 2634 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2635 2635 % (
2636 2636 abs(pa._depth - pb._depth),
2637 2637 pvec._hamming(pa._vec, pb._vec),
2638 2638 pa.distance(pb),
2639 2639 rel,
2640 2640 )
2641 2641 )
2642 2642
2643 2643
2644 2644 @command(
2645 2645 b'debugrebuilddirstate|debugrebuildstate',
2646 2646 [
2647 2647 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2648 2648 (
2649 2649 b'',
2650 2650 b'minimal',
2651 2651 None,
2652 2652 _(
2653 2653 b'only rebuild files that are inconsistent with '
2654 2654 b'the working copy parent'
2655 2655 ),
2656 2656 ),
2657 2657 ],
2658 2658 _(b'[-r REV]'),
2659 2659 )
2660 2660 def debugrebuilddirstate(ui, repo, rev, **opts):
2661 2661 """rebuild the dirstate as it would look like for the given revision
2662 2662
2663 2663 If no revision is specified the first current parent will be used.
2664 2664
2665 2665 The dirstate will be set to the files of the given revision.
2666 2666 The actual working directory content or existing dirstate
2667 2667 information such as adds or removes is not considered.
2668 2668
2669 2669 ``minimal`` will only rebuild the dirstate status for files that claim to be
2670 2670 tracked but are not in the parent manifest, or that exist in the parent
2671 2671 manifest but are not in the dirstate. It will not change adds, removes, or
2672 2672 modified files that are in the working copy parent.
2673 2673
2674 2674 One use of this command is to make the next :hg:`status` invocation
2675 2675 check the actual file content.
2676 2676 """
2677 2677 ctx = scmutil.revsingle(repo, rev)
2678 2678 with repo.wlock():
2679 2679 dirstate = repo.dirstate
2680 2680 changedfiles = None
2681 2681 # See command doc for what minimal does.
2682 2682 if opts.get('minimal'):
2683 2683 manifestfiles = set(ctx.manifest().keys())
2684 2684 dirstatefiles = set(dirstate)
2685 2685 manifestonly = manifestfiles - dirstatefiles
2686 2686 dsonly = dirstatefiles - manifestfiles
2687 2687 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2688 2688 changedfiles = manifestonly | dsnotadded
2689 2689
2690 2690 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2691 2691
2692 2692
2693 2693 @command(b'debugrebuildfncache', [], b'')
2694 2694 def debugrebuildfncache(ui, repo):
2695 2695 """rebuild the fncache file"""
2696 2696 repair.rebuildfncache(ui, repo)
2697 2697
2698 2698
2699 2699 @command(
2700 2700 b'debugrename',
2701 2701 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2702 2702 _(b'[-r REV] [FILE]...'),
2703 2703 )
2704 2704 def debugrename(ui, repo, *pats, **opts):
2705 2705 """dump rename information"""
2706 2706
2707 2707 opts = pycompat.byteskwargs(opts)
2708 2708 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2709 2709 m = scmutil.match(ctx, pats, opts)
2710 2710 for abs in ctx.walk(m):
2711 2711 fctx = ctx[abs]
2712 2712 o = fctx.filelog().renamed(fctx.filenode())
2713 2713 rel = repo.pathto(abs)
2714 2714 if o:
2715 2715 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2716 2716 else:
2717 2717 ui.write(_(b"%s not renamed\n") % rel)
2718 2718
2719 2719
2720 2720 @command(b'debugrequires|debugrequirements', [], b'')
2721 2721 def debugrequirements(ui, repo):
2722 2722 """ print the current repo requirements """
2723 2723 for r in sorted(repo.requirements):
2724 2724 ui.write(b"%s\n" % r)
2725 2725
2726 2726
2727 2727 @command(
2728 2728 b'debugrevlog',
2729 2729 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2730 2730 _(b'-c|-m|FILE'),
2731 2731 optionalrepo=True,
2732 2732 )
2733 2733 def debugrevlog(ui, repo, file_=None, **opts):
2734 2734 """show data and statistics about a revlog"""
2735 2735 opts = pycompat.byteskwargs(opts)
2736 2736 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2737 2737
2738 2738 if opts.get(b"dump"):
2739 2739 numrevs = len(r)
2740 2740 ui.write(
2741 2741 (
2742 2742 b"# rev p1rev p2rev start end deltastart base p1 p2"
2743 2743 b" rawsize totalsize compression heads chainlen\n"
2744 2744 )
2745 2745 )
2746 2746 ts = 0
2747 2747 heads = set()
2748 2748
2749 2749 for rev in pycompat.xrange(numrevs):
2750 2750 dbase = r.deltaparent(rev)
2751 2751 if dbase == -1:
2752 2752 dbase = rev
2753 2753 cbase = r.chainbase(rev)
2754 2754 clen = r.chainlen(rev)
2755 2755 p1, p2 = r.parentrevs(rev)
2756 2756 rs = r.rawsize(rev)
2757 2757 ts = ts + rs
2758 2758 heads -= set(r.parentrevs(rev))
2759 2759 heads.add(rev)
2760 2760 try:
2761 2761 compression = ts / r.end(rev)
2762 2762 except ZeroDivisionError:
2763 2763 compression = 0
2764 2764 ui.write(
2765 2765 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2766 2766 b"%11d %5d %8d\n"
2767 2767 % (
2768 2768 rev,
2769 2769 p1,
2770 2770 p2,
2771 2771 r.start(rev),
2772 2772 r.end(rev),
2773 2773 r.start(dbase),
2774 2774 r.start(cbase),
2775 2775 r.start(p1),
2776 2776 r.start(p2),
2777 2777 rs,
2778 2778 ts,
2779 2779 compression,
2780 2780 len(heads),
2781 2781 clen,
2782 2782 )
2783 2783 )
2784 2784 return 0
2785 2785
2786 2786 v = r.version
2787 2787 format = v & 0xFFFF
2788 2788 flags = []
2789 2789 gdelta = False
2790 2790 if v & revlog.FLAG_INLINE_DATA:
2791 2791 flags.append(b'inline')
2792 2792 if v & revlog.FLAG_GENERALDELTA:
2793 2793 gdelta = True
2794 2794 flags.append(b'generaldelta')
2795 2795 if not flags:
2796 2796 flags = [b'(none)']
2797 2797
2798 2798 ### tracks merge vs single parent
2799 2799 nummerges = 0
2800 2800
2801 2801 ### tracks ways the "delta" are build
2802 2802 # nodelta
2803 2803 numempty = 0
2804 2804 numemptytext = 0
2805 2805 numemptydelta = 0
2806 2806 # full file content
2807 2807 numfull = 0
2808 2808 # intermediate snapshot against a prior snapshot
2809 2809 numsemi = 0
2810 2810 # snapshot count per depth
2811 2811 numsnapdepth = collections.defaultdict(lambda: 0)
2812 2812 # delta against previous revision
2813 2813 numprev = 0
2814 2814 # delta against first or second parent (not prev)
2815 2815 nump1 = 0
2816 2816 nump2 = 0
2817 2817 # delta against neither prev nor parents
2818 2818 numother = 0
2819 2819 # delta against prev that are also first or second parent
2820 2820 # (details of `numprev`)
2821 2821 nump1prev = 0
2822 2822 nump2prev = 0
2823 2823
2824 2824 # data about delta chain of each revs
2825 2825 chainlengths = []
2826 2826 chainbases = []
2827 2827 chainspans = []
2828 2828
2829 2829 # data about each revision
2830 2830 datasize = [None, 0, 0]
2831 2831 fullsize = [None, 0, 0]
2832 2832 semisize = [None, 0, 0]
2833 2833 # snapshot count per depth
2834 2834 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2835 2835 deltasize = [None, 0, 0]
2836 2836 chunktypecounts = {}
2837 2837 chunktypesizes = {}
2838 2838
2839 2839 def addsize(size, l):
2840 2840 if l[0] is None or size < l[0]:
2841 2841 l[0] = size
2842 2842 if size > l[1]:
2843 2843 l[1] = size
2844 2844 l[2] += size
2845 2845
2846 2846 numrevs = len(r)
2847 2847 for rev in pycompat.xrange(numrevs):
2848 2848 p1, p2 = r.parentrevs(rev)
2849 2849 delta = r.deltaparent(rev)
2850 2850 if format > 0:
2851 2851 addsize(r.rawsize(rev), datasize)
2852 2852 if p2 != nullrev:
2853 2853 nummerges += 1
2854 2854 size = r.length(rev)
2855 2855 if delta == nullrev:
2856 2856 chainlengths.append(0)
2857 2857 chainbases.append(r.start(rev))
2858 2858 chainspans.append(size)
2859 2859 if size == 0:
2860 2860 numempty += 1
2861 2861 numemptytext += 1
2862 2862 else:
2863 2863 numfull += 1
2864 2864 numsnapdepth[0] += 1
2865 2865 addsize(size, fullsize)
2866 2866 addsize(size, snapsizedepth[0])
2867 2867 else:
2868 2868 chainlengths.append(chainlengths[delta] + 1)
2869 2869 baseaddr = chainbases[delta]
2870 2870 revaddr = r.start(rev)
2871 2871 chainbases.append(baseaddr)
2872 2872 chainspans.append((revaddr - baseaddr) + size)
2873 2873 if size == 0:
2874 2874 numempty += 1
2875 2875 numemptydelta += 1
2876 2876 elif r.issnapshot(rev):
2877 2877 addsize(size, semisize)
2878 2878 numsemi += 1
2879 2879 depth = r.snapshotdepth(rev)
2880 2880 numsnapdepth[depth] += 1
2881 2881 addsize(size, snapsizedepth[depth])
2882 2882 else:
2883 2883 addsize(size, deltasize)
2884 2884 if delta == rev - 1:
2885 2885 numprev += 1
2886 2886 if delta == p1:
2887 2887 nump1prev += 1
2888 2888 elif delta == p2:
2889 2889 nump2prev += 1
2890 2890 elif delta == p1:
2891 2891 nump1 += 1
2892 2892 elif delta == p2:
2893 2893 nump2 += 1
2894 2894 elif delta != nullrev:
2895 2895 numother += 1
2896 2896
2897 2897 # Obtain data on the raw chunks in the revlog.
2898 2898 if util.safehasattr(r, b'_getsegmentforrevs'):
2899 2899 segment = r._getsegmentforrevs(rev, rev)[1]
2900 2900 else:
2901 2901 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2902 2902 if segment:
2903 2903 chunktype = bytes(segment[0:1])
2904 2904 else:
2905 2905 chunktype = b'empty'
2906 2906
2907 2907 if chunktype not in chunktypecounts:
2908 2908 chunktypecounts[chunktype] = 0
2909 2909 chunktypesizes[chunktype] = 0
2910 2910
2911 2911 chunktypecounts[chunktype] += 1
2912 2912 chunktypesizes[chunktype] += size
2913 2913
2914 2914 # Adjust size min value for empty cases
2915 2915 for size in (datasize, fullsize, semisize, deltasize):
2916 2916 if size[0] is None:
2917 2917 size[0] = 0
2918 2918
2919 2919 numdeltas = numrevs - numfull - numempty - numsemi
2920 2920 numoprev = numprev - nump1prev - nump2prev
2921 2921 totalrawsize = datasize[2]
2922 2922 datasize[2] /= numrevs
2923 2923 fulltotal = fullsize[2]
2924 2924 if numfull == 0:
2925 2925 fullsize[2] = 0
2926 2926 else:
2927 2927 fullsize[2] /= numfull
2928 2928 semitotal = semisize[2]
2929 2929 snaptotal = {}
2930 2930 if numsemi > 0:
2931 2931 semisize[2] /= numsemi
2932 2932 for depth in snapsizedepth:
2933 2933 snaptotal[depth] = snapsizedepth[depth][2]
2934 2934 snapsizedepth[depth][2] /= numsnapdepth[depth]
2935 2935
2936 2936 deltatotal = deltasize[2]
2937 2937 if numdeltas > 0:
2938 2938 deltasize[2] /= numdeltas
2939 2939 totalsize = fulltotal + semitotal + deltatotal
2940 2940 avgchainlen = sum(chainlengths) / numrevs
2941 2941 maxchainlen = max(chainlengths)
2942 2942 maxchainspan = max(chainspans)
2943 2943 compratio = 1
2944 2944 if totalsize:
2945 2945 compratio = totalrawsize / totalsize
2946 2946
2947 2947 basedfmtstr = b'%%%dd\n'
2948 2948 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2949 2949
2950 2950 def dfmtstr(max):
2951 2951 return basedfmtstr % len(str(max))
2952 2952
2953 2953 def pcfmtstr(max, padding=0):
2954 2954 return basepcfmtstr % (len(str(max)), b' ' * padding)
2955 2955
2956 2956 def pcfmt(value, total):
2957 2957 if total:
2958 2958 return (value, 100 * float(value) / total)
2959 2959 else:
2960 2960 return value, 100.0
2961 2961
2962 2962 ui.writenoi18n(b'format : %d\n' % format)
2963 2963 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2964 2964
2965 2965 ui.write(b'\n')
2966 2966 fmt = pcfmtstr(totalsize)
2967 2967 fmt2 = dfmtstr(totalsize)
2968 2968 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2969 2969 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2970 2970 ui.writenoi18n(
2971 2971 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2972 2972 )
2973 2973 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2974 2974 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2975 2975 ui.writenoi18n(
2976 2976 b' text : '
2977 2977 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2978 2978 )
2979 2979 ui.writenoi18n(
2980 2980 b' delta : '
2981 2981 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2982 2982 )
2983 2983 ui.writenoi18n(
2984 2984 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2985 2985 )
2986 2986 for depth in sorted(numsnapdepth):
2987 2987 ui.write(
2988 2988 (b' lvl-%-3d : ' % depth)
2989 2989 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2990 2990 )
2991 2991 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2992 2992 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2993 2993 ui.writenoi18n(
2994 2994 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2995 2995 )
2996 2996 for depth in sorted(numsnapdepth):
2997 2997 ui.write(
2998 2998 (b' lvl-%-3d : ' % depth)
2999 2999 + fmt % pcfmt(snaptotal[depth], totalsize)
3000 3000 )
3001 3001 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3002 3002
3003 3003 def fmtchunktype(chunktype):
3004 3004 if chunktype == b'empty':
3005 3005 return b' %s : ' % chunktype
3006 3006 elif chunktype in pycompat.bytestr(string.ascii_letters):
3007 3007 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3008 3008 else:
3009 3009 return b' 0x%s : ' % hex(chunktype)
3010 3010
3011 3011 ui.write(b'\n')
3012 3012 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3013 3013 for chunktype in sorted(chunktypecounts):
3014 3014 ui.write(fmtchunktype(chunktype))
3015 3015 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3016 3016 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3017 3017 for chunktype in sorted(chunktypecounts):
3018 3018 ui.write(fmtchunktype(chunktype))
3019 3019 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3020 3020
3021 3021 ui.write(b'\n')
3022 3022 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3023 3023 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3024 3024 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3025 3025 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3026 3026 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3027 3027
3028 3028 if format > 0:
3029 3029 ui.write(b'\n')
3030 3030 ui.writenoi18n(
3031 3031 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3032 3032 % tuple(datasize)
3033 3033 )
3034 3034 ui.writenoi18n(
3035 3035 b'full revision size (min/max/avg) : %d / %d / %d\n'
3036 3036 % tuple(fullsize)
3037 3037 )
3038 3038 ui.writenoi18n(
3039 3039 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3040 3040 % tuple(semisize)
3041 3041 )
3042 3042 for depth in sorted(snapsizedepth):
3043 3043 if depth == 0:
3044 3044 continue
3045 3045 ui.writenoi18n(
3046 3046 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3047 3047 % ((depth,) + tuple(snapsizedepth[depth]))
3048 3048 )
3049 3049 ui.writenoi18n(
3050 3050 b'delta size (min/max/avg) : %d / %d / %d\n'
3051 3051 % tuple(deltasize)
3052 3052 )
3053 3053
3054 3054 if numdeltas > 0:
3055 3055 ui.write(b'\n')
3056 3056 fmt = pcfmtstr(numdeltas)
3057 3057 fmt2 = pcfmtstr(numdeltas, 4)
3058 3058 ui.writenoi18n(
3059 3059 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3060 3060 )
3061 3061 if numprev > 0:
3062 3062 ui.writenoi18n(
3063 3063 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3064 3064 )
3065 3065 ui.writenoi18n(
3066 3066 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3067 3067 )
3068 3068 ui.writenoi18n(
3069 3069 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3070 3070 )
3071 3071 if gdelta:
3072 3072 ui.writenoi18n(
3073 3073 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3074 3074 )
3075 3075 ui.writenoi18n(
3076 3076 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3077 3077 )
3078 3078 ui.writenoi18n(
3079 3079 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3080 3080 )
3081 3081
3082 3082
3083 3083 @command(
3084 3084 b'debugrevlogindex',
3085 3085 cmdutil.debugrevlogopts
3086 3086 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3087 3087 _(b'[-f FORMAT] -c|-m|FILE'),
3088 3088 optionalrepo=True,
3089 3089 )
3090 3090 def debugrevlogindex(ui, repo, file_=None, **opts):
3091 3091 """dump the contents of a revlog index"""
3092 3092 opts = pycompat.byteskwargs(opts)
3093 3093 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3094 3094 format = opts.get(b'format', 0)
3095 3095 if format not in (0, 1):
3096 3096 raise error.Abort(_(b"unknown format %d") % format)
3097 3097
3098 3098 if ui.debugflag:
3099 3099 shortfn = hex
3100 3100 else:
3101 3101 shortfn = short
3102 3102
3103 3103 # There might not be anything in r, so have a sane default
3104 3104 idlen = 12
3105 3105 for i in r:
3106 3106 idlen = len(shortfn(r.node(i)))
3107 3107 break
3108 3108
3109 3109 if format == 0:
3110 3110 if ui.verbose:
3111 3111 ui.writenoi18n(
3112 3112 b" rev offset length linkrev %s %s p2\n"
3113 3113 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3114 3114 )
3115 3115 else:
3116 3116 ui.writenoi18n(
3117 3117 b" rev linkrev %s %s p2\n"
3118 3118 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3119 3119 )
3120 3120 elif format == 1:
3121 3121 if ui.verbose:
3122 3122 ui.writenoi18n(
3123 3123 (
3124 3124 b" rev flag offset length size link p1"
3125 3125 b" p2 %s\n"
3126 3126 )
3127 3127 % b"nodeid".rjust(idlen)
3128 3128 )
3129 3129 else:
3130 3130 ui.writenoi18n(
3131 3131 b" rev flag size link p1 p2 %s\n"
3132 3132 % b"nodeid".rjust(idlen)
3133 3133 )
3134 3134
3135 3135 for i in r:
3136 3136 node = r.node(i)
3137 3137 if format == 0:
3138 3138 try:
3139 3139 pp = r.parents(node)
3140 3140 except Exception:
3141 3141 pp = [nullid, nullid]
3142 3142 if ui.verbose:
3143 3143 ui.write(
3144 3144 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3145 3145 % (
3146 3146 i,
3147 3147 r.start(i),
3148 3148 r.length(i),
3149 3149 r.linkrev(i),
3150 3150 shortfn(node),
3151 3151 shortfn(pp[0]),
3152 3152 shortfn(pp[1]),
3153 3153 )
3154 3154 )
3155 3155 else:
3156 3156 ui.write(
3157 3157 b"% 6d % 7d %s %s %s\n"
3158 3158 % (
3159 3159 i,
3160 3160 r.linkrev(i),
3161 3161 shortfn(node),
3162 3162 shortfn(pp[0]),
3163 3163 shortfn(pp[1]),
3164 3164 )
3165 3165 )
3166 3166 elif format == 1:
3167 3167 pr = r.parentrevs(i)
3168 3168 if ui.verbose:
3169 3169 ui.write(
3170 3170 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3171 3171 % (
3172 3172 i,
3173 3173 r.flags(i),
3174 3174 r.start(i),
3175 3175 r.length(i),
3176 3176 r.rawsize(i),
3177 3177 r.linkrev(i),
3178 3178 pr[0],
3179 3179 pr[1],
3180 3180 shortfn(node),
3181 3181 )
3182 3182 )
3183 3183 else:
3184 3184 ui.write(
3185 3185 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3186 3186 % (
3187 3187 i,
3188 3188 r.flags(i),
3189 3189 r.rawsize(i),
3190 3190 r.linkrev(i),
3191 3191 pr[0],
3192 3192 pr[1],
3193 3193 shortfn(node),
3194 3194 )
3195 3195 )
3196 3196
3197 3197
3198 3198 @command(
3199 3199 b'debugrevspec',
3200 3200 [
3201 3201 (
3202 3202 b'',
3203 3203 b'optimize',
3204 3204 None,
3205 3205 _(b'print parsed tree after optimizing (DEPRECATED)'),
3206 3206 ),
3207 3207 (
3208 3208 b'',
3209 3209 b'show-revs',
3210 3210 True,
3211 3211 _(b'print list of result revisions (default)'),
3212 3212 ),
3213 3213 (
3214 3214 b's',
3215 3215 b'show-set',
3216 3216 None,
3217 3217 _(b'print internal representation of result set'),
3218 3218 ),
3219 3219 (
3220 3220 b'p',
3221 3221 b'show-stage',
3222 3222 [],
3223 3223 _(b'print parsed tree at the given stage'),
3224 3224 _(b'NAME'),
3225 3225 ),
3226 3226 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3227 3227 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3228 3228 ],
3229 3229 b'REVSPEC',
3230 3230 )
3231 3231 def debugrevspec(ui, repo, expr, **opts):
3232 3232 """parse and apply a revision specification
3233 3233
3234 3234 Use -p/--show-stage option to print the parsed tree at the given stages.
3235 3235 Use -p all to print tree at every stage.
3236 3236
3237 3237 Use --no-show-revs option with -s or -p to print only the set
3238 3238 representation or the parsed tree respectively.
3239 3239
3240 3240 Use --verify-optimized to compare the optimized result with the unoptimized
3241 3241 one. Returns 1 if the optimized result differs.
3242 3242 """
3243 3243 opts = pycompat.byteskwargs(opts)
3244 3244 aliases = ui.configitems(b'revsetalias')
3245 3245 stages = [
3246 3246 (b'parsed', lambda tree: tree),
3247 3247 (
3248 3248 b'expanded',
3249 3249 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3250 3250 ),
3251 3251 (b'concatenated', revsetlang.foldconcat),
3252 3252 (b'analyzed', revsetlang.analyze),
3253 3253 (b'optimized', revsetlang.optimize),
3254 3254 ]
3255 3255 if opts[b'no_optimized']:
3256 3256 stages = stages[:-1]
3257 3257 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3258 3258 raise error.Abort(
3259 3259 _(b'cannot use --verify-optimized with --no-optimized')
3260 3260 )
3261 3261 stagenames = {n for n, f in stages}
3262 3262
3263 3263 showalways = set()
3264 3264 showchanged = set()
3265 3265 if ui.verbose and not opts[b'show_stage']:
3266 3266 # show parsed tree by --verbose (deprecated)
3267 3267 showalways.add(b'parsed')
3268 3268 showchanged.update([b'expanded', b'concatenated'])
3269 3269 if opts[b'optimize']:
3270 3270 showalways.add(b'optimized')
3271 3271 if opts[b'show_stage'] and opts[b'optimize']:
3272 3272 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3273 3273 if opts[b'show_stage'] == [b'all']:
3274 3274 showalways.update(stagenames)
3275 3275 else:
3276 3276 for n in opts[b'show_stage']:
3277 3277 if n not in stagenames:
3278 3278 raise error.Abort(_(b'invalid stage name: %s') % n)
3279 3279 showalways.update(opts[b'show_stage'])
3280 3280
3281 3281 treebystage = {}
3282 3282 printedtree = None
3283 3283 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3284 3284 for n, f in stages:
3285 3285 treebystage[n] = tree = f(tree)
3286 3286 if n in showalways or (n in showchanged and tree != printedtree):
3287 3287 if opts[b'show_stage'] or n != b'parsed':
3288 3288 ui.write(b"* %s:\n" % n)
3289 3289 ui.write(revsetlang.prettyformat(tree), b"\n")
3290 3290 printedtree = tree
3291 3291
3292 3292 if opts[b'verify_optimized']:
3293 3293 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3294 3294 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3295 3295 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3296 3296 ui.writenoi18n(
3297 3297 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3298 3298 )
3299 3299 ui.writenoi18n(
3300 3300 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3301 3301 )
3302 3302 arevs = list(arevs)
3303 3303 brevs = list(brevs)
3304 3304 if arevs == brevs:
3305 3305 return 0
3306 3306 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3307 3307 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3308 3308 sm = difflib.SequenceMatcher(None, arevs, brevs)
3309 3309 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3310 3310 if tag in ('delete', 'replace'):
3311 3311 for c in arevs[alo:ahi]:
3312 3312 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3313 3313 if tag in ('insert', 'replace'):
3314 3314 for c in brevs[blo:bhi]:
3315 3315 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3316 3316 if tag == 'equal':
3317 3317 for c in arevs[alo:ahi]:
3318 3318 ui.write(b' %d\n' % c)
3319 3319 return 1
3320 3320
3321 3321 func = revset.makematcher(tree)
3322 3322 revs = func(repo)
3323 3323 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3324 3324 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3325 3325 if not opts[b'show_revs']:
3326 3326 return
3327 3327 for c in revs:
3328 3328 ui.write(b"%d\n" % c)
3329 3329
3330 3330
3331 3331 @command(
3332 3332 b'debugserve',
3333 3333 [
3334 3334 (
3335 3335 b'',
3336 3336 b'sshstdio',
3337 3337 False,
3338 3338 _(b'run an SSH server bound to process handles'),
3339 3339 ),
3340 3340 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3341 3341 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3342 3342 ],
3343 3343 b'',
3344 3344 )
3345 3345 def debugserve(ui, repo, **opts):
3346 3346 """run a server with advanced settings
3347 3347
3348 3348 This command is similar to :hg:`serve`. It exists partially as a
3349 3349 workaround to the fact that ``hg serve --stdio`` must have specific
3350 3350 arguments for security reasons.
3351 3351 """
3352 3352 opts = pycompat.byteskwargs(opts)
3353 3353
3354 3354 if not opts[b'sshstdio']:
3355 3355 raise error.Abort(_(b'only --sshstdio is currently supported'))
3356 3356
3357 3357 logfh = None
3358 3358
3359 3359 if opts[b'logiofd'] and opts[b'logiofile']:
3360 3360 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3361 3361
3362 3362 if opts[b'logiofd']:
3363 3363 # Ideally we would be line buffered. But line buffering in binary
3364 3364 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3365 3365 # buffering could have performance impacts. But since this isn't
3366 3366 # performance critical code, it should be fine.
3367 3367 try:
3368 3368 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3369 3369 except OSError as e:
3370 3370 if e.errno != errno.ESPIPE:
3371 3371 raise
3372 3372 # can't seek a pipe, so `ab` mode fails on py3
3373 3373 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3374 3374 elif opts[b'logiofile']:
3375 3375 logfh = open(opts[b'logiofile'], b'ab', 0)
3376 3376
3377 3377 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3378 3378 s.serve_forever()
3379 3379
3380 3380
3381 3381 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3382 3382 def debugsetparents(ui, repo, rev1, rev2=None):
3383 3383 """manually set the parents of the current working directory
3384 3384
3385 3385 This is useful for writing repository conversion tools, but should
3386 3386 be used with care. For example, neither the working directory nor the
3387 3387 dirstate is updated, so file status may be incorrect after running this
3388 3388 command.
3389 3389
3390 3390 Returns 0 on success.
3391 3391 """
3392 3392
3393 3393 node1 = scmutil.revsingle(repo, rev1).node()
3394 3394 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3395 3395
3396 3396 with repo.wlock():
3397 3397 repo.setparents(node1, node2)
3398 3398
3399 3399
3400 3400 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3401 3401 def debugsidedata(ui, repo, file_, rev=None, **opts):
3402 3402 """dump the side data for a cl/manifest/file revision
3403 3403
3404 3404 Use --verbose to dump the sidedata content."""
3405 3405 opts = pycompat.byteskwargs(opts)
3406 3406 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3407 3407 if rev is not None:
3408 3408 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3409 3409 file_, rev = None, file_
3410 3410 elif rev is None:
3411 3411 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3412 3412 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3413 3413 r = getattr(r, '_revlog', r)
3414 3414 try:
3415 3415 sidedata = r.sidedata(r.lookup(rev))
3416 3416 except KeyError:
3417 3417 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3418 3418 if sidedata:
3419 3419 sidedata = list(sidedata.items())
3420 3420 sidedata.sort()
3421 3421 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3422 3422 for key, value in sidedata:
3423 3423 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3424 3424 if ui.verbose:
3425 3425 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3426 3426
3427 3427
3428 3428 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3429 3429 def debugssl(ui, repo, source=None, **opts):
3430 3430 '''test a secure connection to a server
3431 3431
3432 3432 This builds the certificate chain for the server on Windows, installing the
3433 3433 missing intermediates and trusted root via Windows Update if necessary. It
3434 3434 does nothing on other platforms.
3435 3435
3436 3436 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3437 3437 that server is used. See :hg:`help urls` for more information.
3438 3438
3439 3439 If the update succeeds, retry the original operation. Otherwise, the cause
3440 3440 of the SSL error is likely another issue.
3441 3441 '''
3442 3442 if not pycompat.iswindows:
3443 3443 raise error.Abort(
3444 3444 _(b'certificate chain building is only possible on Windows')
3445 3445 )
3446 3446
3447 3447 if not source:
3448 3448 if not repo:
3449 3449 raise error.Abort(
3450 3450 _(
3451 3451 b"there is no Mercurial repository here, and no "
3452 3452 b"server specified"
3453 3453 )
3454 3454 )
3455 3455 source = b"default"
3456 3456
3457 3457 source, branches = hg.parseurl(ui.expandpath(source))
3458 3458 url = util.url(source)
3459 3459
3460 3460 defaultport = {b'https': 443, b'ssh': 22}
3461 3461 if url.scheme in defaultport:
3462 3462 try:
3463 3463 addr = (url.host, int(url.port or defaultport[url.scheme]))
3464 3464 except ValueError:
3465 3465 raise error.Abort(_(b"malformed port number in URL"))
3466 3466 else:
3467 3467 raise error.Abort(_(b"only https and ssh connections are supported"))
3468 3468
3469 3469 from . import win32
3470 3470
3471 3471 s = ssl.wrap_socket(
3472 3472 socket.socket(),
3473 3473 ssl_version=ssl.PROTOCOL_TLS,
3474 3474 cert_reqs=ssl.CERT_NONE,
3475 3475 ca_certs=None,
3476 3476 )
3477 3477
3478 3478 try:
3479 3479 s.connect(addr)
3480 3480 cert = s.getpeercert(True)
3481 3481
3482 3482 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3483 3483
3484 3484 complete = win32.checkcertificatechain(cert, build=False)
3485 3485
3486 3486 if not complete:
3487 3487 ui.status(_(b'certificate chain is incomplete, updating... '))
3488 3488
3489 3489 if not win32.checkcertificatechain(cert):
3490 3490 ui.status(_(b'failed.\n'))
3491 3491 else:
3492 3492 ui.status(_(b'done.\n'))
3493 3493 else:
3494 3494 ui.status(_(b'full certificate chain is available\n'))
3495 3495 finally:
3496 3496 s.close()
3497 3497
3498 3498
3499 3499 @command(
3500 3500 b"debugbackupbundle",
3501 3501 [
3502 3502 (
3503 3503 b"",
3504 3504 b"recover",
3505 3505 b"",
3506 3506 b"brings the specified changeset back into the repository",
3507 3507 )
3508 3508 ]
3509 3509 + cmdutil.logopts,
3510 3510 _(b"hg debugbackupbundle [--recover HASH]"),
3511 3511 )
3512 3512 def debugbackupbundle(ui, repo, *pats, **opts):
3513 3513 """lists the changesets available in backup bundles
3514 3514
3515 3515 Without any arguments, this command prints a list of the changesets in each
3516 3516 backup bundle.
3517 3517
3518 3518 --recover takes a changeset hash and unbundles the first bundle that
3519 3519 contains that hash, which puts that changeset back in your repository.
3520 3520
3521 3521 --verbose will print the entire commit message and the bundle path for that
3522 3522 backup.
3523 3523 """
3524 3524 backups = list(
3525 3525 filter(
3526 3526 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3527 3527 )
3528 3528 )
3529 3529 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3530 3530
3531 3531 opts = pycompat.byteskwargs(opts)
3532 3532 opts[b"bundle"] = b""
3533 3533 opts[b"force"] = None
3534 3534 limit = logcmdutil.getlimit(opts)
3535 3535
3536 3536 def display(other, chlist, displayer):
3537 3537 if opts.get(b"newest_first"):
3538 3538 chlist.reverse()
3539 3539 count = 0
3540 3540 for n in chlist:
3541 3541 if limit is not None and count >= limit:
3542 3542 break
3543 3543 parents = [True for p in other.changelog.parents(n) if p != nullid]
3544 3544 if opts.get(b"no_merges") and len(parents) == 2:
3545 3545 continue
3546 3546 count += 1
3547 3547 displayer.show(other[n])
3548 3548
3549 3549 recovernode = opts.get(b"recover")
3550 3550 if recovernode:
3551 3551 if scmutil.isrevsymbol(repo, recovernode):
3552 3552 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3553 3553 return
3554 3554 elif backups:
3555 3555 msg = _(
3556 3556 b"Recover changesets using: hg debugbackupbundle --recover "
3557 3557 b"<changeset hash>\n\nAvailable backup changesets:"
3558 3558 )
3559 3559 ui.status(msg, label=b"status.removed")
3560 3560 else:
3561 3561 ui.status(_(b"no backup changesets found\n"))
3562 3562 return
3563 3563
3564 3564 for backup in backups:
3565 3565 # Much of this is copied from the hg incoming logic
3566 3566 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3567 3567 source, branches = hg.parseurl(source, opts.get(b"branch"))
3568 3568 try:
3569 3569 other = hg.peer(repo, opts, source)
3570 3570 except error.LookupError as ex:
3571 3571 msg = _(b"\nwarning: unable to open bundle %s") % source
3572 3572 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3573 3573 ui.warn(msg, hint=hint)
3574 3574 continue
3575 3575 revs, checkout = hg.addbranchrevs(
3576 3576 repo, other, branches, opts.get(b"rev")
3577 3577 )
3578 3578
3579 3579 if revs:
3580 3580 revs = [other.lookup(rev) for rev in revs]
3581 3581
3582 3582 quiet = ui.quiet
3583 3583 try:
3584 3584 ui.quiet = True
3585 3585 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3586 3586 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3587 3587 )
3588 3588 except error.LookupError:
3589 3589 continue
3590 3590 finally:
3591 3591 ui.quiet = quiet
3592 3592
3593 3593 try:
3594 3594 if not chlist:
3595 3595 continue
3596 3596 if recovernode:
3597 3597 with repo.lock(), repo.transaction(b"unbundle") as tr:
3598 3598 if scmutil.isrevsymbol(other, recovernode):
3599 3599 ui.status(_(b"Unbundling %s\n") % (recovernode))
3600 3600 f = hg.openpath(ui, source)
3601 3601 gen = exchange.readbundle(ui, f, source)
3602 3602 if isinstance(gen, bundle2.unbundle20):
3603 3603 bundle2.applybundle(
3604 3604 repo,
3605 3605 gen,
3606 3606 tr,
3607 3607 source=b"unbundle",
3608 3608 url=b"bundle:" + source,
3609 3609 )
3610 3610 else:
3611 3611 gen.apply(repo, b"unbundle", b"bundle:" + source)
3612 3612 break
3613 3613 else:
3614 3614 backupdate = encoding.strtolocal(
3615 3615 time.strftime(
3616 3616 "%a %H:%M, %Y-%m-%d",
3617 3617 time.localtime(os.path.getmtime(source)),
3618 3618 )
3619 3619 )
3620 3620 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3621 3621 if ui.verbose:
3622 3622 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3623 3623 else:
3624 3624 opts[
3625 3625 b"template"
3626 3626 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3627 3627 displayer = logcmdutil.changesetdisplayer(
3628 3628 ui, other, opts, False
3629 3629 )
3630 3630 display(other, chlist, displayer)
3631 3631 displayer.close()
3632 3632 finally:
3633 3633 cleanupfn()
3634 3634
3635 3635
3636 3636 @command(
3637 3637 b'debugsub',
3638 3638 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3639 3639 _(b'[-r REV] [REV]'),
3640 3640 )
3641 3641 def debugsub(ui, repo, rev=None):
3642 3642 ctx = scmutil.revsingle(repo, rev, None)
3643 3643 for k, v in sorted(ctx.substate.items()):
3644 3644 ui.writenoi18n(b'path %s\n' % k)
3645 3645 ui.writenoi18n(b' source %s\n' % v[0])
3646 3646 ui.writenoi18n(b' revision %s\n' % v[1])
3647 3647
3648 3648
3649 3649 @command(
3650 3650 b'debugsuccessorssets',
3651 3651 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3652 3652 _(b'[REV]'),
3653 3653 )
3654 3654 def debugsuccessorssets(ui, repo, *revs, **opts):
3655 3655 """show set of successors for revision
3656 3656
3657 3657 A successors set of changeset A is a consistent group of revisions that
3658 3658 succeed A. It contains non-obsolete changesets only unless closests
3659 3659 successors set is set.
3660 3660
3661 3661 In most cases a changeset A has a single successors set containing a single
3662 3662 successor (changeset A replaced by A').
3663 3663
3664 3664 A changeset that is made obsolete with no successors are called "pruned".
3665 3665 Such changesets have no successors sets at all.
3666 3666
3667 3667 A changeset that has been "split" will have a successors set containing
3668 3668 more than one successor.
3669 3669
3670 3670 A changeset that has been rewritten in multiple different ways is called
3671 3671 "divergent". Such changesets have multiple successor sets (each of which
3672 3672 may also be split, i.e. have multiple successors).
3673 3673
3674 3674 Results are displayed as follows::
3675 3675
3676 3676 <rev1>
3677 3677 <successors-1A>
3678 3678 <rev2>
3679 3679 <successors-2A>
3680 3680 <successors-2B1> <successors-2B2> <successors-2B3>
3681 3681
3682 3682 Here rev2 has two possible (i.e. divergent) successors sets. The first
3683 3683 holds one element, whereas the second holds three (i.e. the changeset has
3684 3684 been split).
3685 3685 """
3686 3686 # passed to successorssets caching computation from one call to another
3687 3687 cache = {}
3688 3688 ctx2str = bytes
3689 3689 node2str = short
3690 3690 for rev in scmutil.revrange(repo, revs):
3691 3691 ctx = repo[rev]
3692 3692 ui.write(b'%s\n' % ctx2str(ctx))
3693 3693 for succsset in obsutil.successorssets(
3694 3694 repo, ctx.node(), closest=opts['closest'], cache=cache
3695 3695 ):
3696 3696 if succsset:
3697 3697 ui.write(b' ')
3698 3698 ui.write(node2str(succsset[0]))
3699 3699 for node in succsset[1:]:
3700 3700 ui.write(b' ')
3701 3701 ui.write(node2str(node))
3702 3702 ui.write(b'\n')
3703 3703
3704 3704
3705 3705 @command(b'debugtagscache', [])
3706 3706 def debugtagscache(ui, repo):
3707 3707 """display the contents of .hg/cache/hgtagsfnodes1"""
3708 3708 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3709 3709 for r in repo:
3710 3710 node = repo[r].node()
3711 3711 tagsnode = cache.getfnode(node, computemissing=False)
3712 3712 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3713 3713 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3714 3714
3715 3715
3716 3716 @command(
3717 3717 b'debugtemplate',
3718 3718 [
3719 3719 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3720 3720 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3721 3721 ],
3722 3722 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3723 3723 optionalrepo=True,
3724 3724 )
3725 3725 def debugtemplate(ui, repo, tmpl, **opts):
3726 3726 """parse and apply a template
3727 3727
3728 3728 If -r/--rev is given, the template is processed as a log template and
3729 3729 applied to the given changesets. Otherwise, it is processed as a generic
3730 3730 template.
3731 3731
3732 3732 Use --verbose to print the parsed tree.
3733 3733 """
3734 3734 revs = None
3735 3735 if opts['rev']:
3736 3736 if repo is None:
3737 3737 raise error.RepoError(
3738 3738 _(b'there is no Mercurial repository here (.hg not found)')
3739 3739 )
3740 3740 revs = scmutil.revrange(repo, opts['rev'])
3741 3741
3742 3742 props = {}
3743 3743 for d in opts['define']:
3744 3744 try:
3745 3745 k, v = (e.strip() for e in d.split(b'=', 1))
3746 3746 if not k or k == b'ui':
3747 3747 raise ValueError
3748 3748 props[k] = v
3749 3749 except ValueError:
3750 3750 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3751 3751
3752 3752 if ui.verbose:
3753 3753 aliases = ui.configitems(b'templatealias')
3754 3754 tree = templater.parse(tmpl)
3755 3755 ui.note(templater.prettyformat(tree), b'\n')
3756 3756 newtree = templater.expandaliases(tree, aliases)
3757 3757 if newtree != tree:
3758 3758 ui.notenoi18n(
3759 3759 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3760 3760 )
3761 3761
3762 3762 if revs is None:
3763 3763 tres = formatter.templateresources(ui, repo)
3764 3764 t = formatter.maketemplater(ui, tmpl, resources=tres)
3765 3765 if ui.verbose:
3766 3766 kwds, funcs = t.symbolsuseddefault()
3767 3767 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3768 3768 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3769 3769 ui.write(t.renderdefault(props))
3770 3770 else:
3771 3771 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3772 3772 if ui.verbose:
3773 3773 kwds, funcs = displayer.t.symbolsuseddefault()
3774 3774 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3775 3775 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3776 3776 for r in revs:
3777 3777 displayer.show(repo[r], **pycompat.strkwargs(props))
3778 3778 displayer.close()
3779 3779
3780 3780
3781 3781 @command(
3782 3782 b'debuguigetpass',
3783 3783 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3784 3784 _(b'[-p TEXT]'),
3785 3785 norepo=True,
3786 3786 )
3787 3787 def debuguigetpass(ui, prompt=b''):
3788 3788 """show prompt to type password"""
3789 3789 r = ui.getpass(prompt)
3790 if r is not None:
3791 r = encoding.strtolocal(r)
3792 else:
3793 r = b"<default response>"
3790 3794 ui.writenoi18n(b'response: %s\n' % r)
3791 3795
3792 3796
3793 3797 @command(
3794 3798 b'debuguiprompt',
3795 3799 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3796 3800 _(b'[-p TEXT]'),
3797 3801 norepo=True,
3798 3802 )
3799 3803 def debuguiprompt(ui, prompt=b''):
3800 3804 """show plain prompt"""
3801 3805 r = ui.prompt(prompt)
3802 3806 ui.writenoi18n(b'response: %s\n' % r)
3803 3807
3804 3808
3805 3809 @command(b'debugupdatecaches', [])
3806 3810 def debugupdatecaches(ui, repo, *pats, **opts):
3807 3811 """warm all known caches in the repository"""
3808 3812 with repo.wlock(), repo.lock():
3809 3813 repo.updatecaches(full=True)
3810 3814
3811 3815
3812 3816 @command(
3813 3817 b'debugupgraderepo',
3814 3818 [
3815 3819 (
3816 3820 b'o',
3817 3821 b'optimize',
3818 3822 [],
3819 3823 _(b'extra optimization to perform'),
3820 3824 _(b'NAME'),
3821 3825 ),
3822 3826 (b'', b'run', False, _(b'performs an upgrade')),
3823 3827 (b'', b'backup', True, _(b'keep the old repository content around')),
3824 3828 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3825 3829 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3826 3830 ],
3827 3831 )
3828 3832 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3829 3833 """upgrade a repository to use different features
3830 3834
3831 3835 If no arguments are specified, the repository is evaluated for upgrade
3832 3836 and a list of problems and potential optimizations is printed.
3833 3837
3834 3838 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3835 3839 can be influenced via additional arguments. More details will be provided
3836 3840 by the command output when run without ``--run``.
3837 3841
3838 3842 During the upgrade, the repository will be locked and no writes will be
3839 3843 allowed.
3840 3844
3841 3845 At the end of the upgrade, the repository may not be readable while new
3842 3846 repository data is swapped in. This window will be as long as it takes to
3843 3847 rename some directories inside the ``.hg`` directory. On most machines, this
3844 3848 should complete almost instantaneously and the chances of a consumer being
3845 3849 unable to access the repository should be low.
3846 3850
3847 3851 By default, all revlog will be upgraded. You can restrict this using flag
3848 3852 such as `--manifest`:
3849 3853
3850 3854 * `--manifest`: only optimize the manifest
3851 3855 * `--no-manifest`: optimize all revlog but the manifest
3852 3856 * `--changelog`: optimize the changelog only
3853 3857 * `--no-changelog --no-manifest`: optimize filelogs only
3854 3858 """
3855 3859 return upgrade.upgraderepo(
3856 3860 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3857 3861 )
3858 3862
3859 3863
3860 3864 @command(
3861 3865 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3862 3866 )
3863 3867 def debugwalk(ui, repo, *pats, **opts):
3864 3868 """show how files match on given patterns"""
3865 3869 opts = pycompat.byteskwargs(opts)
3866 3870 m = scmutil.match(repo[None], pats, opts)
3867 3871 if ui.verbose:
3868 3872 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3869 3873 items = list(repo[None].walk(m))
3870 3874 if not items:
3871 3875 return
3872 3876 f = lambda fn: fn
3873 3877 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3874 3878 f = lambda fn: util.normpath(fn)
3875 3879 fmt = b'f %%-%ds %%-%ds %%s' % (
3876 3880 max([len(abs) for abs in items]),
3877 3881 max([len(repo.pathto(abs)) for abs in items]),
3878 3882 )
3879 3883 for abs in items:
3880 3884 line = fmt % (
3881 3885 abs,
3882 3886 f(repo.pathto(abs)),
3883 3887 m.exact(abs) and b'exact' or b'',
3884 3888 )
3885 3889 ui.write(b"%s\n" % line.rstrip())
3886 3890
3887 3891
3888 3892 @command(b'debugwhyunstable', [], _(b'REV'))
3889 3893 def debugwhyunstable(ui, repo, rev):
3890 3894 """explain instabilities of a changeset"""
3891 3895 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3892 3896 dnodes = b''
3893 3897 if entry.get(b'divergentnodes'):
3894 3898 dnodes = (
3895 3899 b' '.join(
3896 3900 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3897 3901 for ctx in entry[b'divergentnodes']
3898 3902 )
3899 3903 + b' '
3900 3904 )
3901 3905 ui.write(
3902 3906 b'%s: %s%s %s\n'
3903 3907 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3904 3908 )
3905 3909
3906 3910
3907 3911 @command(
3908 3912 b'debugwireargs',
3909 3913 [
3910 3914 (b'', b'three', b'', b'three'),
3911 3915 (b'', b'four', b'', b'four'),
3912 3916 (b'', b'five', b'', b'five'),
3913 3917 ]
3914 3918 + cmdutil.remoteopts,
3915 3919 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3916 3920 norepo=True,
3917 3921 )
3918 3922 def debugwireargs(ui, repopath, *vals, **opts):
3919 3923 opts = pycompat.byteskwargs(opts)
3920 3924 repo = hg.peer(ui, opts, repopath)
3921 3925 for opt in cmdutil.remoteopts:
3922 3926 del opts[opt[1]]
3923 3927 args = {}
3924 3928 for k, v in pycompat.iteritems(opts):
3925 3929 if v:
3926 3930 args[k] = v
3927 3931 args = pycompat.strkwargs(args)
3928 3932 # run twice to check that we don't mess up the stream for the next command
3929 3933 res1 = repo.debugwireargs(*vals, **args)
3930 3934 res2 = repo.debugwireargs(*vals, **args)
3931 3935 ui.write(b"%s\n" % res1)
3932 3936 if res1 != res2:
3933 3937 ui.warn(b"%s\n" % res2)
3934 3938
3935 3939
3936 3940 def _parsewirelangblocks(fh):
3937 3941 activeaction = None
3938 3942 blocklines = []
3939 3943 lastindent = 0
3940 3944
3941 3945 for line in fh:
3942 3946 line = line.rstrip()
3943 3947 if not line:
3944 3948 continue
3945 3949
3946 3950 if line.startswith(b'#'):
3947 3951 continue
3948 3952
3949 3953 if not line.startswith(b' '):
3950 3954 # New block. Flush previous one.
3951 3955 if activeaction:
3952 3956 yield activeaction, blocklines
3953 3957
3954 3958 activeaction = line
3955 3959 blocklines = []
3956 3960 lastindent = 0
3957 3961 continue
3958 3962
3959 3963 # Else we start with an indent.
3960 3964
3961 3965 if not activeaction:
3962 3966 raise error.Abort(_(b'indented line outside of block'))
3963 3967
3964 3968 indent = len(line) - len(line.lstrip())
3965 3969
3966 3970 # If this line is indented more than the last line, concatenate it.
3967 3971 if indent > lastindent and blocklines:
3968 3972 blocklines[-1] += line.lstrip()
3969 3973 else:
3970 3974 blocklines.append(line)
3971 3975 lastindent = indent
3972 3976
3973 3977 # Flush last block.
3974 3978 if activeaction:
3975 3979 yield activeaction, blocklines
3976 3980
3977 3981
3978 3982 @command(
3979 3983 b'debugwireproto',
3980 3984 [
3981 3985 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3982 3986 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3983 3987 (
3984 3988 b'',
3985 3989 b'noreadstderr',
3986 3990 False,
3987 3991 _(b'do not read from stderr of the remote'),
3988 3992 ),
3989 3993 (
3990 3994 b'',
3991 3995 b'nologhandshake',
3992 3996 False,
3993 3997 _(b'do not log I/O related to the peer handshake'),
3994 3998 ),
3995 3999 ]
3996 4000 + cmdutil.remoteopts,
3997 4001 _(b'[PATH]'),
3998 4002 optionalrepo=True,
3999 4003 )
4000 4004 def debugwireproto(ui, repo, path=None, **opts):
4001 4005 """send wire protocol commands to a server
4002 4006
4003 4007 This command can be used to issue wire protocol commands to remote
4004 4008 peers and to debug the raw data being exchanged.
4005 4009
4006 4010 ``--localssh`` will start an SSH server against the current repository
4007 4011 and connect to that. By default, the connection will perform a handshake
4008 4012 and establish an appropriate peer instance.
4009 4013
4010 4014 ``--peer`` can be used to bypass the handshake protocol and construct a
4011 4015 peer instance using the specified class type. Valid values are ``raw``,
4012 4016 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4013 4017 raw data payloads and don't support higher-level command actions.
4014 4018
4015 4019 ``--noreadstderr`` can be used to disable automatic reading from stderr
4016 4020 of the peer (for SSH connections only). Disabling automatic reading of
4017 4021 stderr is useful for making output more deterministic.
4018 4022
4019 4023 Commands are issued via a mini language which is specified via stdin.
4020 4024 The language consists of individual actions to perform. An action is
4021 4025 defined by a block. A block is defined as a line with no leading
4022 4026 space followed by 0 or more lines with leading space. Blocks are
4023 4027 effectively a high-level command with additional metadata.
4024 4028
4025 4029 Lines beginning with ``#`` are ignored.
4026 4030
4027 4031 The following sections denote available actions.
4028 4032
4029 4033 raw
4030 4034 ---
4031 4035
4032 4036 Send raw data to the server.
4033 4037
4034 4038 The block payload contains the raw data to send as one atomic send
4035 4039 operation. The data may not actually be delivered in a single system
4036 4040 call: it depends on the abilities of the transport being used.
4037 4041
4038 4042 Each line in the block is de-indented and concatenated. Then, that
4039 4043 value is evaluated as a Python b'' literal. This allows the use of
4040 4044 backslash escaping, etc.
4041 4045
4042 4046 raw+
4043 4047 ----
4044 4048
4045 4049 Behaves like ``raw`` except flushes output afterwards.
4046 4050
4047 4051 command <X>
4048 4052 -----------
4049 4053
4050 4054 Send a request to run a named command, whose name follows the ``command``
4051 4055 string.
4052 4056
4053 4057 Arguments to the command are defined as lines in this block. The format of
4054 4058 each line is ``<key> <value>``. e.g.::
4055 4059
4056 4060 command listkeys
4057 4061 namespace bookmarks
4058 4062
4059 4063 If the value begins with ``eval:``, it will be interpreted as a Python
4060 4064 literal expression. Otherwise values are interpreted as Python b'' literals.
4061 4065 This allows sending complex types and encoding special byte sequences via
4062 4066 backslash escaping.
4063 4067
4064 4068 The following arguments have special meaning:
4065 4069
4066 4070 ``PUSHFILE``
4067 4071 When defined, the *push* mechanism of the peer will be used instead
4068 4072 of the static request-response mechanism and the content of the
4069 4073 file specified in the value of this argument will be sent as the
4070 4074 command payload.
4071 4075
4072 4076 This can be used to submit a local bundle file to the remote.
4073 4077
4074 4078 batchbegin
4075 4079 ----------
4076 4080
4077 4081 Instruct the peer to begin a batched send.
4078 4082
4079 4083 All ``command`` blocks are queued for execution until the next
4080 4084 ``batchsubmit`` block.
4081 4085
4082 4086 batchsubmit
4083 4087 -----------
4084 4088
4085 4089 Submit previously queued ``command`` blocks as a batch request.
4086 4090
4087 4091 This action MUST be paired with a ``batchbegin`` action.
4088 4092
4089 4093 httprequest <method> <path>
4090 4094 ---------------------------
4091 4095
4092 4096 (HTTP peer only)
4093 4097
4094 4098 Send an HTTP request to the peer.
4095 4099
4096 4100 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4097 4101
4098 4102 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4099 4103 headers to add to the request. e.g. ``Accept: foo``.
4100 4104
4101 4105 The following arguments are special:
4102 4106
4103 4107 ``BODYFILE``
4104 4108 The content of the file defined as the value to this argument will be
4105 4109 transferred verbatim as the HTTP request body.
4106 4110
4107 4111 ``frame <type> <flags> <payload>``
4108 4112 Send a unified protocol frame as part of the request body.
4109 4113
4110 4114 All frames will be collected and sent as the body to the HTTP
4111 4115 request.
4112 4116
4113 4117 close
4114 4118 -----
4115 4119
4116 4120 Close the connection to the server.
4117 4121
4118 4122 flush
4119 4123 -----
4120 4124
4121 4125 Flush data written to the server.
4122 4126
4123 4127 readavailable
4124 4128 -------------
4125 4129
4126 4130 Close the write end of the connection and read all available data from
4127 4131 the server.
4128 4132
4129 4133 If the connection to the server encompasses multiple pipes, we poll both
4130 4134 pipes and read available data.
4131 4135
4132 4136 readline
4133 4137 --------
4134 4138
4135 4139 Read a line of output from the server. If there are multiple output
4136 4140 pipes, reads only the main pipe.
4137 4141
4138 4142 ereadline
4139 4143 ---------
4140 4144
4141 4145 Like ``readline``, but read from the stderr pipe, if available.
4142 4146
4143 4147 read <X>
4144 4148 --------
4145 4149
4146 4150 ``read()`` N bytes from the server's main output pipe.
4147 4151
4148 4152 eread <X>
4149 4153 ---------
4150 4154
4151 4155 ``read()`` N bytes from the server's stderr pipe, if available.
4152 4156
4153 4157 Specifying Unified Frame-Based Protocol Frames
4154 4158 ----------------------------------------------
4155 4159
4156 4160 It is possible to emit a *Unified Frame-Based Protocol* by using special
4157 4161 syntax.
4158 4162
4159 4163 A frame is composed as a type, flags, and payload. These can be parsed
4160 4164 from a string of the form:
4161 4165
4162 4166 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4163 4167
4164 4168 ``request-id`` and ``stream-id`` are integers defining the request and
4165 4169 stream identifiers.
4166 4170
4167 4171 ``type`` can be an integer value for the frame type or the string name
4168 4172 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4169 4173 ``command-name``.
4170 4174
4171 4175 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4172 4176 components. Each component (and there can be just one) can be an integer
4173 4177 or a flag name for stream flags or frame flags, respectively. Values are
4174 4178 resolved to integers and then bitwise OR'd together.
4175 4179
4176 4180 ``payload`` represents the raw frame payload. If it begins with
4177 4181 ``cbor:``, the following string is evaluated as Python code and the
4178 4182 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4179 4183 as a Python byte string literal.
4180 4184 """
4181 4185 opts = pycompat.byteskwargs(opts)
4182 4186
4183 4187 if opts[b'localssh'] and not repo:
4184 4188 raise error.Abort(_(b'--localssh requires a repository'))
4185 4189
4186 4190 if opts[b'peer'] and opts[b'peer'] not in (
4187 4191 b'raw',
4188 4192 b'http2',
4189 4193 b'ssh1',
4190 4194 b'ssh2',
4191 4195 ):
4192 4196 raise error.Abort(
4193 4197 _(b'invalid value for --peer'),
4194 4198 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4195 4199 )
4196 4200
4197 4201 if path and opts[b'localssh']:
4198 4202 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4199 4203
4200 4204 if ui.interactive():
4201 4205 ui.write(_(b'(waiting for commands on stdin)\n'))
4202 4206
4203 4207 blocks = list(_parsewirelangblocks(ui.fin))
4204 4208
4205 4209 proc = None
4206 4210 stdin = None
4207 4211 stdout = None
4208 4212 stderr = None
4209 4213 opener = None
4210 4214
4211 4215 if opts[b'localssh']:
4212 4216 # We start the SSH server in its own process so there is process
4213 4217 # separation. This prevents a whole class of potential bugs around
4214 4218 # shared state from interfering with server operation.
4215 4219 args = procutil.hgcmd() + [
4216 4220 b'-R',
4217 4221 repo.root,
4218 4222 b'debugserve',
4219 4223 b'--sshstdio',
4220 4224 ]
4221 4225 proc = subprocess.Popen(
4222 4226 pycompat.rapply(procutil.tonativestr, args),
4223 4227 stdin=subprocess.PIPE,
4224 4228 stdout=subprocess.PIPE,
4225 4229 stderr=subprocess.PIPE,
4226 4230 bufsize=0,
4227 4231 )
4228 4232
4229 4233 stdin = proc.stdin
4230 4234 stdout = proc.stdout
4231 4235 stderr = proc.stderr
4232 4236
4233 4237 # We turn the pipes into observers so we can log I/O.
4234 4238 if ui.verbose or opts[b'peer'] == b'raw':
4235 4239 stdin = util.makeloggingfileobject(
4236 4240 ui, proc.stdin, b'i', logdata=True
4237 4241 )
4238 4242 stdout = util.makeloggingfileobject(
4239 4243 ui, proc.stdout, b'o', logdata=True
4240 4244 )
4241 4245 stderr = util.makeloggingfileobject(
4242 4246 ui, proc.stderr, b'e', logdata=True
4243 4247 )
4244 4248
4245 4249 # --localssh also implies the peer connection settings.
4246 4250
4247 4251 url = b'ssh://localserver'
4248 4252 autoreadstderr = not opts[b'noreadstderr']
4249 4253
4250 4254 if opts[b'peer'] == b'ssh1':
4251 4255 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4252 4256 peer = sshpeer.sshv1peer(
4253 4257 ui,
4254 4258 url,
4255 4259 proc,
4256 4260 stdin,
4257 4261 stdout,
4258 4262 stderr,
4259 4263 None,
4260 4264 autoreadstderr=autoreadstderr,
4261 4265 )
4262 4266 elif opts[b'peer'] == b'ssh2':
4263 4267 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4264 4268 peer = sshpeer.sshv2peer(
4265 4269 ui,
4266 4270 url,
4267 4271 proc,
4268 4272 stdin,
4269 4273 stdout,
4270 4274 stderr,
4271 4275 None,
4272 4276 autoreadstderr=autoreadstderr,
4273 4277 )
4274 4278 elif opts[b'peer'] == b'raw':
4275 4279 ui.write(_(b'using raw connection to peer\n'))
4276 4280 peer = None
4277 4281 else:
4278 4282 ui.write(_(b'creating ssh peer from handshake results\n'))
4279 4283 peer = sshpeer.makepeer(
4280 4284 ui,
4281 4285 url,
4282 4286 proc,
4283 4287 stdin,
4284 4288 stdout,
4285 4289 stderr,
4286 4290 autoreadstderr=autoreadstderr,
4287 4291 )
4288 4292
4289 4293 elif path:
4290 4294 # We bypass hg.peer() so we can proxy the sockets.
4291 4295 # TODO consider not doing this because we skip
4292 4296 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4293 4297 u = util.url(path)
4294 4298 if u.scheme != b'http':
4295 4299 raise error.Abort(_(b'only http:// paths are currently supported'))
4296 4300
4297 4301 url, authinfo = u.authinfo()
4298 4302 openerargs = {
4299 4303 'useragent': b'Mercurial debugwireproto',
4300 4304 }
4301 4305
4302 4306 # Turn pipes/sockets into observers so we can log I/O.
4303 4307 if ui.verbose:
4304 4308 openerargs.update(
4305 4309 {
4306 4310 'loggingfh': ui,
4307 4311 'loggingname': b's',
4308 4312 'loggingopts': {'logdata': True, 'logdataapis': False,},
4309 4313 }
4310 4314 )
4311 4315
4312 4316 if ui.debugflag:
4313 4317 openerargs['loggingopts']['logdataapis'] = True
4314 4318
4315 4319 # Don't send default headers when in raw mode. This allows us to
4316 4320 # bypass most of the behavior of our URL handling code so we can
4317 4321 # have near complete control over what's sent on the wire.
4318 4322 if opts[b'peer'] == b'raw':
4319 4323 openerargs['sendaccept'] = False
4320 4324
4321 4325 opener = urlmod.opener(ui, authinfo, **openerargs)
4322 4326
4323 4327 if opts[b'peer'] == b'http2':
4324 4328 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4325 4329 # We go through makepeer() because we need an API descriptor for
4326 4330 # the peer instance to be useful.
4327 4331 with ui.configoverride(
4328 4332 {(b'experimental', b'httppeer.advertise-v2'): True}
4329 4333 ):
4330 4334 if opts[b'nologhandshake']:
4331 4335 ui.pushbuffer()
4332 4336
4333 4337 peer = httppeer.makepeer(ui, path, opener=opener)
4334 4338
4335 4339 if opts[b'nologhandshake']:
4336 4340 ui.popbuffer()
4337 4341
4338 4342 if not isinstance(peer, httppeer.httpv2peer):
4339 4343 raise error.Abort(
4340 4344 _(
4341 4345 b'could not instantiate HTTP peer for '
4342 4346 b'wire protocol version 2'
4343 4347 ),
4344 4348 hint=_(
4345 4349 b'the server may not have the feature '
4346 4350 b'enabled or is not allowing this '
4347 4351 b'client version'
4348 4352 ),
4349 4353 )
4350 4354
4351 4355 elif opts[b'peer'] == b'raw':
4352 4356 ui.write(_(b'using raw connection to peer\n'))
4353 4357 peer = None
4354 4358 elif opts[b'peer']:
4355 4359 raise error.Abort(
4356 4360 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4357 4361 )
4358 4362 else:
4359 4363 peer = httppeer.makepeer(ui, path, opener=opener)
4360 4364
4361 4365 # We /could/ populate stdin/stdout with sock.makefile()...
4362 4366 else:
4363 4367 raise error.Abort(_(b'unsupported connection configuration'))
4364 4368
4365 4369 batchedcommands = None
4366 4370
4367 4371 # Now perform actions based on the parsed wire language instructions.
4368 4372 for action, lines in blocks:
4369 4373 if action in (b'raw', b'raw+'):
4370 4374 if not stdin:
4371 4375 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4372 4376
4373 4377 # Concatenate the data together.
4374 4378 data = b''.join(l.lstrip() for l in lines)
4375 4379 data = stringutil.unescapestr(data)
4376 4380 stdin.write(data)
4377 4381
4378 4382 if action == b'raw+':
4379 4383 stdin.flush()
4380 4384 elif action == b'flush':
4381 4385 if not stdin:
4382 4386 raise error.Abort(_(b'cannot call flush on this peer'))
4383 4387 stdin.flush()
4384 4388 elif action.startswith(b'command'):
4385 4389 if not peer:
4386 4390 raise error.Abort(
4387 4391 _(
4388 4392 b'cannot send commands unless peer instance '
4389 4393 b'is available'
4390 4394 )
4391 4395 )
4392 4396
4393 4397 command = action.split(b' ', 1)[1]
4394 4398
4395 4399 args = {}
4396 4400 for line in lines:
4397 4401 # We need to allow empty values.
4398 4402 fields = line.lstrip().split(b' ', 1)
4399 4403 if len(fields) == 1:
4400 4404 key = fields[0]
4401 4405 value = b''
4402 4406 else:
4403 4407 key, value = fields
4404 4408
4405 4409 if value.startswith(b'eval:'):
4406 4410 value = stringutil.evalpythonliteral(value[5:])
4407 4411 else:
4408 4412 value = stringutil.unescapestr(value)
4409 4413
4410 4414 args[key] = value
4411 4415
4412 4416 if batchedcommands is not None:
4413 4417 batchedcommands.append((command, args))
4414 4418 continue
4415 4419
4416 4420 ui.status(_(b'sending %s command\n') % command)
4417 4421
4418 4422 if b'PUSHFILE' in args:
4419 4423 with open(args[b'PUSHFILE'], 'rb') as fh:
4420 4424 del args[b'PUSHFILE']
4421 4425 res, output = peer._callpush(
4422 4426 command, fh, **pycompat.strkwargs(args)
4423 4427 )
4424 4428 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4425 4429 ui.status(
4426 4430 _(b'remote output: %s\n') % stringutil.escapestr(output)
4427 4431 )
4428 4432 else:
4429 4433 with peer.commandexecutor() as e:
4430 4434 res = e.callcommand(command, args).result()
4431 4435
4432 4436 if isinstance(res, wireprotov2peer.commandresponse):
4433 4437 val = res.objects()
4434 4438 ui.status(
4435 4439 _(b'response: %s\n')
4436 4440 % stringutil.pprint(val, bprefix=True, indent=2)
4437 4441 )
4438 4442 else:
4439 4443 ui.status(
4440 4444 _(b'response: %s\n')
4441 4445 % stringutil.pprint(res, bprefix=True, indent=2)
4442 4446 )
4443 4447
4444 4448 elif action == b'batchbegin':
4445 4449 if batchedcommands is not None:
4446 4450 raise error.Abort(_(b'nested batchbegin not allowed'))
4447 4451
4448 4452 batchedcommands = []
4449 4453 elif action == b'batchsubmit':
4450 4454 # There is a batching API we could go through. But it would be
4451 4455 # difficult to normalize requests into function calls. It is easier
4452 4456 # to bypass this layer and normalize to commands + args.
4453 4457 ui.status(
4454 4458 _(b'sending batch with %d sub-commands\n')
4455 4459 % len(batchedcommands)
4456 4460 )
4457 4461 assert peer is not None
4458 4462 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4459 4463 ui.status(
4460 4464 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4461 4465 )
4462 4466
4463 4467 batchedcommands = None
4464 4468
4465 4469 elif action.startswith(b'httprequest '):
4466 4470 if not opener:
4467 4471 raise error.Abort(
4468 4472 _(b'cannot use httprequest without an HTTP peer')
4469 4473 )
4470 4474
4471 4475 request = action.split(b' ', 2)
4472 4476 if len(request) != 3:
4473 4477 raise error.Abort(
4474 4478 _(
4475 4479 b'invalid httprequest: expected format is '
4476 4480 b'"httprequest <method> <path>'
4477 4481 )
4478 4482 )
4479 4483
4480 4484 method, httppath = request[1:]
4481 4485 headers = {}
4482 4486 body = None
4483 4487 frames = []
4484 4488 for line in lines:
4485 4489 line = line.lstrip()
4486 4490 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4487 4491 if m:
4488 4492 # Headers need to use native strings.
4489 4493 key = pycompat.strurl(m.group(1))
4490 4494 value = pycompat.strurl(m.group(2))
4491 4495 headers[key] = value
4492 4496 continue
4493 4497
4494 4498 if line.startswith(b'BODYFILE '):
4495 4499 with open(line.split(b' ', 1), b'rb') as fh:
4496 4500 body = fh.read()
4497 4501 elif line.startswith(b'frame '):
4498 4502 frame = wireprotoframing.makeframefromhumanstring(
4499 4503 line[len(b'frame ') :]
4500 4504 )
4501 4505
4502 4506 frames.append(frame)
4503 4507 else:
4504 4508 raise error.Abort(
4505 4509 _(b'unknown argument to httprequest: %s') % line
4506 4510 )
4507 4511
4508 4512 url = path + httppath
4509 4513
4510 4514 if frames:
4511 4515 body = b''.join(bytes(f) for f in frames)
4512 4516
4513 4517 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4514 4518
4515 4519 # urllib.Request insists on using has_data() as a proxy for
4516 4520 # determining the request method. Override that to use our
4517 4521 # explicitly requested method.
4518 4522 req.get_method = lambda: pycompat.sysstr(method)
4519 4523
4520 4524 try:
4521 4525 res = opener.open(req)
4522 4526 body = res.read()
4523 4527 except util.urlerr.urlerror as e:
4524 4528 # read() method must be called, but only exists in Python 2
4525 4529 getattr(e, 'read', lambda: None)()
4526 4530 continue
4527 4531
4528 4532 ct = res.headers.get('Content-Type')
4529 4533 if ct == 'application/mercurial-cbor':
4530 4534 ui.write(
4531 4535 _(b'cbor> %s\n')
4532 4536 % stringutil.pprint(
4533 4537 cborutil.decodeall(body), bprefix=True, indent=2
4534 4538 )
4535 4539 )
4536 4540
4537 4541 elif action == b'close':
4538 4542 assert peer is not None
4539 4543 peer.close()
4540 4544 elif action == b'readavailable':
4541 4545 if not stdout or not stderr:
4542 4546 raise error.Abort(
4543 4547 _(b'readavailable not available on this peer')
4544 4548 )
4545 4549
4546 4550 stdin.close()
4547 4551 stdout.read()
4548 4552 stderr.read()
4549 4553
4550 4554 elif action == b'readline':
4551 4555 if not stdout:
4552 4556 raise error.Abort(_(b'readline not available on this peer'))
4553 4557 stdout.readline()
4554 4558 elif action == b'ereadline':
4555 4559 if not stderr:
4556 4560 raise error.Abort(_(b'ereadline not available on this peer'))
4557 4561 stderr.readline()
4558 4562 elif action.startswith(b'read '):
4559 4563 count = int(action.split(b' ', 1)[1])
4560 4564 if not stdout:
4561 4565 raise error.Abort(_(b'read not available on this peer'))
4562 4566 stdout.read(count)
4563 4567 elif action.startswith(b'eread '):
4564 4568 count = int(action.split(b' ', 1)[1])
4565 4569 if not stderr:
4566 4570 raise error.Abort(_(b'eread not available on this peer'))
4567 4571 stderr.read(count)
4568 4572 else:
4569 4573 raise error.Abort(_(b'unknown action: %s') % action)
4570 4574
4571 4575 if batchedcommands is not None:
4572 4576 raise error.Abort(_(b'unclosed "batchbegin" request'))
4573 4577
4574 4578 if peer:
4575 4579 peer.close()
4576 4580
4577 4581 if proc:
4578 4582 proc.kill()
@@ -1,517 +1,519 b''
1 1 # mail.py - mail sending bits for mercurial
2 2 #
3 3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import email
11 11 import email.charset
12 12 import email.generator
13 13 import email.header
14 14 import email.message
15 15 import email.parser
16 16 import io
17 17 import os
18 18 import smtplib
19 19 import socket
20 20 import time
21 21
22 22 from .i18n import _
23 23 from .pycompat import (
24 24 getattr,
25 25 open,
26 26 )
27 27 from . import (
28 28 encoding,
29 29 error,
30 30 pycompat,
31 31 sslutil,
32 32 util,
33 33 )
34 34 from .utils import (
35 35 procutil,
36 36 stringutil,
37 37 )
38 38
39 39 if pycompat.TYPE_CHECKING:
40 40 from typing import Any, List, Tuple, Union
41 41
42 42 # keep pyflakes happy
43 43 assert all((Any, List, Tuple, Union))
44 44
45 45
46 46 class STARTTLS(smtplib.SMTP):
47 47 '''Derived class to verify the peer certificate for STARTTLS.
48 48
49 49 This class allows to pass any keyword arguments to SSL socket creation.
50 50 '''
51 51
52 52 def __init__(self, ui, host=None, **kwargs):
53 53 smtplib.SMTP.__init__(self, **kwargs)
54 54 self._ui = ui
55 55 self._host = host
56 56
57 57 def starttls(self, keyfile=None, certfile=None):
58 58 if not self.has_extn("starttls"):
59 59 msg = b"STARTTLS extension not supported by server"
60 60 raise smtplib.SMTPException(msg)
61 61 (resp, reply) = self.docmd("STARTTLS")
62 62 if resp == 220:
63 63 self.sock = sslutil.wrapsocket(
64 64 self.sock,
65 65 keyfile,
66 66 certfile,
67 67 ui=self._ui,
68 68 serverhostname=self._host,
69 69 )
70 70 self.file = self.sock.makefile("rb")
71 71 self.helo_resp = None
72 72 self.ehlo_resp = None
73 73 self.esmtp_features = {}
74 74 self.does_esmtp = 0
75 75 return (resp, reply)
76 76
77 77
78 78 class SMTPS(smtplib.SMTP):
79 79 '''Derived class to verify the peer certificate for SMTPS.
80 80
81 81 This class allows to pass any keyword arguments to SSL socket creation.
82 82 '''
83 83
84 84 def __init__(self, ui, keyfile=None, certfile=None, host=None, **kwargs):
85 85 self.keyfile = keyfile
86 86 self.certfile = certfile
87 87 smtplib.SMTP.__init__(self, **kwargs)
88 88 self._host = host
89 89 self.default_port = smtplib.SMTP_SSL_PORT
90 90 self._ui = ui
91 91
92 92 def _get_socket(self, host, port, timeout):
93 93 if self.debuglevel > 0:
94 94 self._ui.debug(b'connect: %r\n' % ((host, port),))
95 95 new_socket = socket.create_connection((host, port), timeout)
96 96 new_socket = sslutil.wrapsocket(
97 97 new_socket,
98 98 self.keyfile,
99 99 self.certfile,
100 100 ui=self._ui,
101 101 serverhostname=self._host,
102 102 )
103 103 self.file = new_socket.makefile('rb')
104 104 return new_socket
105 105
106 106
107 107 def _pyhastls():
108 108 # type: () -> bool
109 109 """Returns true iff Python has TLS support, false otherwise."""
110 110 try:
111 111 import ssl
112 112
113 113 getattr(ssl, 'HAS_TLS', False)
114 114 return True
115 115 except ImportError:
116 116 return False
117 117
118 118
119 119 def _smtp(ui):
120 120 '''build an smtp connection and return a function to send mail'''
121 121 local_hostname = ui.config(b'smtp', b'local_hostname')
122 122 tls = ui.config(b'smtp', b'tls')
123 123 # backward compatible: when tls = true, we use starttls.
124 124 starttls = tls == b'starttls' or stringutil.parsebool(tls)
125 125 smtps = tls == b'smtps'
126 126 if (starttls or smtps) and not _pyhastls():
127 127 raise error.Abort(_(b"can't use TLS: Python SSL support not installed"))
128 128 mailhost = ui.config(b'smtp', b'host')
129 129 if not mailhost:
130 130 raise error.Abort(_(b'smtp.host not configured - cannot send mail'))
131 131 if smtps:
132 132 ui.note(_(b'(using smtps)\n'))
133 133 s = SMTPS(ui, local_hostname=local_hostname, host=mailhost)
134 134 elif starttls:
135 135 s = STARTTLS(ui, local_hostname=local_hostname, host=mailhost)
136 136 else:
137 137 s = smtplib.SMTP(local_hostname=local_hostname)
138 138 if smtps:
139 139 defaultport = 465
140 140 else:
141 141 defaultport = 25
142 142 mailport = util.getport(ui.config(b'smtp', b'port', defaultport))
143 143 ui.note(_(b'sending mail: smtp host %s, port %d\n') % (mailhost, mailport))
144 144 s.connect(host=mailhost, port=mailport)
145 145 if starttls:
146 146 ui.note(_(b'(using starttls)\n'))
147 147 s.ehlo()
148 148 s.starttls()
149 149 s.ehlo()
150 150 if starttls or smtps:
151 151 ui.note(_(b'(verifying remote certificate)\n'))
152 152 sslutil.validatesocket(s.sock)
153 153 username = ui.config(b'smtp', b'username')
154 154 password = ui.config(b'smtp', b'password')
155 155 if username:
156 156 if password:
157 157 password = encoding.strfromlocal(password)
158 158 else:
159 159 password = ui.getpass()
160 if password is not None:
161 password = encoding.strfromlocal(password)
160 162 if username and password:
161 163 ui.note(_(b'(authenticating to mail server as %s)\n') % username)
162 164 username = encoding.strfromlocal(username)
163 165 try:
164 166 s.login(username, password)
165 167 except smtplib.SMTPException as inst:
166 168 raise error.Abort(inst)
167 169
168 170 def send(sender, recipients, msg):
169 171 try:
170 172 return s.sendmail(sender, recipients, msg)
171 173 except smtplib.SMTPRecipientsRefused as inst:
172 174 recipients = [r[1] for r in inst.recipients.values()]
173 175 raise error.Abort(b'\n' + b'\n'.join(recipients))
174 176 except smtplib.SMTPException as inst:
175 177 raise error.Abort(inst)
176 178
177 179 return send
178 180
179 181
180 182 def _sendmail(ui, sender, recipients, msg):
181 183 '''send mail using sendmail.'''
182 184 program = ui.config(b'email', b'method')
183 185
184 186 def stremail(x):
185 187 return procutil.shellquote(stringutil.email(encoding.strtolocal(x)))
186 188
187 189 cmdline = b'%s -f %s %s' % (
188 190 program,
189 191 stremail(sender),
190 192 b' '.join(map(stremail, recipients)),
191 193 )
192 194 ui.note(_(b'sending mail: %s\n') % cmdline)
193 195 fp = procutil.popen(cmdline, b'wb')
194 196 fp.write(util.tonativeeol(msg))
195 197 ret = fp.close()
196 198 if ret:
197 199 raise error.Abort(
198 200 b'%s %s'
199 201 % (
200 202 os.path.basename(procutil.shellsplit(program)[0]),
201 203 procutil.explainexit(ret),
202 204 )
203 205 )
204 206
205 207
206 208 def _mbox(mbox, sender, recipients, msg):
207 209 '''write mails to mbox'''
208 210 fp = open(mbox, b'ab+')
209 211 # Should be time.asctime(), but Windows prints 2-characters day
210 212 # of month instead of one. Make them print the same thing.
211 213 date = time.strftime('%a %b %d %H:%M:%S %Y', time.localtime())
212 214 fp.write(
213 215 b'From %s %s\n'
214 216 % (encoding.strtolocal(sender), encoding.strtolocal(date))
215 217 )
216 218 fp.write(msg)
217 219 fp.write(b'\n\n')
218 220 fp.close()
219 221
220 222
221 223 def connect(ui, mbox=None):
222 224 '''make a mail connection. return a function to send mail.
223 225 call as sendmail(sender, list-of-recipients, msg).'''
224 226 if mbox:
225 227 open(mbox, b'wb').close()
226 228 return lambda s, r, m: _mbox(mbox, s, r, m)
227 229 if ui.config(b'email', b'method') == b'smtp':
228 230 return _smtp(ui)
229 231 return lambda s, r, m: _sendmail(ui, s, r, m)
230 232
231 233
232 234 def sendmail(ui, sender, recipients, msg, mbox=None):
233 235 send = connect(ui, mbox=mbox)
234 236 return send(sender, recipients, msg)
235 237
236 238
237 239 def validateconfig(ui):
238 240 '''determine if we have enough config data to try sending email.'''
239 241 method = ui.config(b'email', b'method')
240 242 if method == b'smtp':
241 243 if not ui.config(b'smtp', b'host'):
242 244 raise error.Abort(
243 245 _(
244 246 b'smtp specified as email transport, '
245 247 b'but no smtp host configured'
246 248 )
247 249 )
248 250 else:
249 251 if not procutil.findexe(method):
250 252 raise error.Abort(
251 253 _(b'%r specified as email transport, but not in PATH') % method
252 254 )
253 255
254 256
255 257 def codec2iana(cs):
256 258 # type: (str) -> str
257 259 ''''''
258 260 cs = email.charset.Charset(cs).input_charset.lower()
259 261
260 262 # "latin1" normalizes to "iso8859-1", standard calls for "iso-8859-1"
261 263 if cs.startswith("iso") and not cs.startswith("iso-"):
262 264 return "iso-" + cs[3:]
263 265 return cs
264 266
265 267
266 268 def mimetextpatch(s, subtype='plain', display=False):
267 269 # type: (bytes, str, bool) -> email.message.Message
268 270 '''Return MIME message suitable for a patch.
269 271 Charset will be detected by first trying to decode as us-ascii, then utf-8,
270 272 and finally the global encodings. If all those fail, fall back to
271 273 ISO-8859-1, an encoding with that allows all byte sequences.
272 274 Transfer encodings will be used if necessary.'''
273 275
274 276 cs = [
275 277 'us-ascii',
276 278 'utf-8',
277 279 pycompat.sysstr(encoding.encoding),
278 280 pycompat.sysstr(encoding.fallbackencoding),
279 281 ]
280 282 if display:
281 283 cs = ['us-ascii']
282 284 for charset in cs:
283 285 try:
284 286 s.decode(charset)
285 287 return mimetextqp(s, subtype, codec2iana(charset))
286 288 except UnicodeDecodeError:
287 289 pass
288 290
289 291 return mimetextqp(s, subtype, "iso-8859-1")
290 292
291 293
292 294 def mimetextqp(body, subtype, charset):
293 295 # type: (bytes, str, str) -> email.message.Message
294 296 '''Return MIME message.
295 297 Quoted-printable transfer encoding will be used if necessary.
296 298 '''
297 299 cs = email.charset.Charset(charset)
298 300 msg = email.message.Message()
299 301 msg.set_type('text/' + subtype)
300 302
301 303 for line in body.splitlines():
302 304 if len(line) > 950:
303 305 cs.body_encoding = email.charset.QP
304 306 break
305 307
306 308 # On Python 2, this simply assigns a value. Python 3 inspects
307 309 # body and does different things depending on whether it has
308 310 # encode() or decode() attributes. We can get the old behavior
309 311 # if we pass a str and charset is None and we call set_charset().
310 312 # But we may get into trouble later due to Python attempting to
311 313 # encode/decode using the registered charset (or attempting to
312 314 # use ascii in the absence of a charset).
313 315 msg.set_payload(body, cs)
314 316
315 317 return msg
316 318
317 319
318 320 def _charsets(ui):
319 321 # type: (Any) -> List[str]
320 322 '''Obtains charsets to send mail parts not containing patches.'''
321 323 charsets = [
322 324 pycompat.sysstr(cs.lower())
323 325 for cs in ui.configlist(b'email', b'charsets')
324 326 ]
325 327 fallbacks = [
326 328 pycompat.sysstr(encoding.fallbackencoding.lower()),
327 329 pycompat.sysstr(encoding.encoding.lower()),
328 330 'utf-8',
329 331 ]
330 332 for cs in fallbacks: # find unique charsets while keeping order
331 333 if cs not in charsets:
332 334 charsets.append(cs)
333 335 return [cs for cs in charsets if not cs.endswith('ascii')]
334 336
335 337
336 338 def _encode(ui, s, charsets):
337 339 # type: (Any, bytes, List[str]) -> Tuple[bytes, str]
338 340 '''Returns (converted) string, charset tuple.
339 341 Finds out best charset by cycling through sendcharsets in descending
340 342 order. Tries both encoding and fallbackencoding for input. Only as
341 343 last resort send as is in fake ascii.
342 344 Caveat: Do not use for mail parts containing patches!'''
343 345 sendcharsets = charsets or _charsets(ui)
344 346 if not isinstance(s, bytes):
345 347 # We have unicode data, which we need to try and encode to
346 348 # some reasonable-ish encoding. Try the encodings the user
347 349 # wants, and fall back to garbage-in-ascii.
348 350 for ocs in sendcharsets:
349 351 try:
350 352 return s.encode(ocs), ocs
351 353 except UnicodeEncodeError:
352 354 pass
353 355 except LookupError:
354 356 ui.warn(
355 357 _(b'ignoring invalid sendcharset: %s\n')
356 358 % pycompat.sysbytes(ocs)
357 359 )
358 360 else:
359 361 # Everything failed, ascii-armor what we've got and send it.
360 362 return s.encode('ascii', 'backslashreplace'), 'us-ascii'
361 363 # We have a bytes of unknown encoding. We'll try and guess a valid
362 364 # encoding, falling back to pretending we had ascii even though we
363 365 # know that's wrong.
364 366 try:
365 367 s.decode('ascii')
366 368 except UnicodeDecodeError:
367 369 for ics in (encoding.encoding, encoding.fallbackencoding):
368 370 ics = pycompat.sysstr(ics)
369 371 try:
370 372 u = s.decode(ics)
371 373 except UnicodeDecodeError:
372 374 continue
373 375 for ocs in sendcharsets:
374 376 try:
375 377 return u.encode(ocs), ocs
376 378 except UnicodeEncodeError:
377 379 pass
378 380 except LookupError:
379 381 ui.warn(
380 382 _(b'ignoring invalid sendcharset: %s\n')
381 383 % pycompat.sysbytes(ocs)
382 384 )
383 385 # if ascii, or all conversion attempts fail, send (broken) ascii
384 386 return s, 'us-ascii'
385 387
386 388
387 389 def headencode(ui, s, charsets=None, display=False):
388 390 # type: (Any, Union[bytes, str], List[str], bool) -> str
389 391 '''Returns RFC-2047 compliant header from given string.'''
390 392 if not display:
391 393 # split into words?
392 394 s, cs = _encode(ui, s, charsets)
393 395 return email.header.Header(s, cs).encode()
394 396 return encoding.strfromlocal(s)
395 397
396 398
397 399 def _addressencode(ui, name, addr, charsets=None):
398 400 # type: (Any, str, str, List[str]) -> str
399 401 addr = encoding.strtolocal(addr)
400 402 name = headencode(ui, name, charsets)
401 403 try:
402 404 acc, dom = addr.split(b'@')
403 405 acc.decode('ascii')
404 406 dom = dom.decode(pycompat.sysstr(encoding.encoding)).encode('idna')
405 407 addr = b'%s@%s' % (acc, dom)
406 408 except UnicodeDecodeError:
407 409 raise error.Abort(_(b'invalid email address: %s') % addr)
408 410 except ValueError:
409 411 try:
410 412 # too strict?
411 413 addr.decode('ascii')
412 414 except UnicodeDecodeError:
413 415 raise error.Abort(_(b'invalid local address: %s') % addr)
414 416 return email.utils.formataddr((name, encoding.strfromlocal(addr)))
415 417
416 418
417 419 def addressencode(ui, address, charsets=None, display=False):
418 420 # type: (Any, bytes, List[str], bool) -> str
419 421 '''Turns address into RFC-2047 compliant header.'''
420 422 if display or not address:
421 423 return encoding.strfromlocal(address or b'')
422 424 name, addr = email.utils.parseaddr(encoding.strfromlocal(address))
423 425 return _addressencode(ui, name, addr, charsets)
424 426
425 427
426 428 def addrlistencode(ui, addrs, charsets=None, display=False):
427 429 # type: (Any, List[bytes], List[str], bool) -> List[str]
428 430 '''Turns a list of addresses into a list of RFC-2047 compliant headers.
429 431 A single element of input list may contain multiple addresses, but output
430 432 always has one address per item'''
431 433 straddrs = []
432 434 for a in addrs:
433 435 assert isinstance(a, bytes), '%r unexpectedly not a bytestr' % a
434 436 straddrs.append(encoding.strfromlocal(a))
435 437 if display:
436 438 return [a.strip() for a in straddrs if a.strip()]
437 439
438 440 result = []
439 441 for name, addr in email.utils.getaddresses(straddrs):
440 442 if name or addr:
441 443 r = _addressencode(ui, name, addr, charsets)
442 444 result.append(r)
443 445 return result
444 446
445 447
446 448 def mimeencode(ui, s, charsets=None, display=False):
447 449 # type: (Any, bytes, List[str], bool) -> email.message.Message
448 450 '''creates mime text object, encodes it if needed, and sets
449 451 charset and transfer-encoding accordingly.'''
450 452 cs = 'us-ascii'
451 453 if not display:
452 454 s, cs = _encode(ui, s, charsets)
453 455 return mimetextqp(s, 'plain', cs)
454 456
455 457
456 458 if pycompat.ispy3:
457 459
458 460 Generator = email.generator.BytesGenerator
459 461
460 462 def parse(fp):
461 463 # type: (Any) -> email.message.Message
462 464 ep = email.parser.Parser()
463 465 # disable the "universal newlines" mode, which isn't binary safe.
464 466 # I have no idea if ascii/surrogateescape is correct, but that's
465 467 # what the standard Python email parser does.
466 468 fp = io.TextIOWrapper(
467 469 fp, encoding='ascii', errors='surrogateescape', newline=chr(10)
468 470 )
469 471 try:
470 472 return ep.parse(fp)
471 473 finally:
472 474 fp.detach()
473 475
474 476 def parsebytes(data):
475 477 # type: (bytes) -> email.message.Message
476 478 ep = email.parser.BytesParser()
477 479 return ep.parsebytes(data)
478 480
479 481
480 482 else:
481 483
482 484 Generator = email.generator.Generator
483 485
484 486 def parse(fp):
485 487 # type: (Any) -> email.message.Message
486 488 ep = email.parser.Parser()
487 489 return ep.parse(fp)
488 490
489 491 def parsebytes(data):
490 492 # type: (str) -> email.message.Message
491 493 ep = email.parser.Parser()
492 494 return ep.parsestr(data)
493 495
494 496
495 497 def headdecode(s):
496 498 # type: (Union[email.header.Header, bytes]) -> bytes
497 499 '''Decodes RFC-2047 header'''
498 500 uparts = []
499 501 for part, charset in email.header.decode_header(s):
500 502 if charset is not None:
501 503 try:
502 504 uparts.append(part.decode(charset))
503 505 continue
504 506 except (UnicodeDecodeError, LookupError):
505 507 pass
506 508 # On Python 3, decode_header() may return either bytes or unicode
507 509 # depending on whether the header has =?<charset>? or not
508 510 if isinstance(part, type(u'')):
509 511 uparts.append(part)
510 512 continue
511 513 try:
512 514 uparts.append(part.decode('UTF-8'))
513 515 continue
514 516 except UnicodeDecodeError:
515 517 pass
516 518 uparts.append(part.decode('ISO-8859-1'))
517 519 return encoding.unitolocal(u' '.join(uparts))
@@ -1,2370 +1,2370 b''
1 1 # ui.py - user interface bits for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import collections
11 11 import contextlib
12 12 import datetime
13 13 import errno
14 14 import getpass
15 15 import inspect
16 16 import os
17 17 import re
18 18 import signal
19 19 import socket
20 20 import subprocess
21 21 import sys
22 22 import traceback
23 23
24 24 from .i18n import _
25 25 from .node import hex
26 26 from .pycompat import (
27 27 getattr,
28 28 open,
29 29 setattr,
30 30 )
31 31
32 32 from . import (
33 33 color,
34 34 config,
35 35 configitems,
36 36 encoding,
37 37 error,
38 38 formatter,
39 39 loggingutil,
40 40 progress,
41 41 pycompat,
42 42 rcutil,
43 43 scmutil,
44 44 util,
45 45 )
46 46 from .utils import (
47 47 dateutil,
48 48 procutil,
49 49 resourceutil,
50 50 stringutil,
51 51 )
52 52
53 53 urlreq = util.urlreq
54 54
55 55 # for use with str.translate(None, _keepalnum), to keep just alphanumerics
56 56 _keepalnum = b''.join(
57 57 c for c in map(pycompat.bytechr, range(256)) if not c.isalnum()
58 58 )
59 59
60 60 # The config knobs that will be altered (if unset) by ui.tweakdefaults.
61 61 tweakrc = b"""
62 62 [ui]
63 63 # The rollback command is dangerous. As a rule, don't use it.
64 64 rollback = False
65 65 # Make `hg status` report copy information
66 66 statuscopies = yes
67 67 # Prefer curses UIs when available. Revert to plain-text with `text`.
68 68 interface = curses
69 69 # Make compatible commands emit cwd-relative paths by default.
70 70 relative-paths = yes
71 71
72 72 [commands]
73 73 # Grep working directory by default.
74 74 grep.all-files = True
75 75 # Refuse to perform an `hg update` that would cause a file content merge
76 76 update.check = noconflict
77 77 # Show conflicts information in `hg status`
78 78 status.verbose = True
79 79 # Make `hg resolve` with no action (like `-m`) fail instead of re-merging.
80 80 resolve.explicit-re-merge = True
81 81
82 82 [diff]
83 83 git = 1
84 84 showfunc = 1
85 85 word-diff = 1
86 86 """
87 87
88 88 samplehgrcs = {
89 89 b'user': b"""# example user config (see 'hg help config' for more info)
90 90 [ui]
91 91 # name and email, e.g.
92 92 # username = Jane Doe <jdoe@example.com>
93 93 username =
94 94
95 95 # We recommend enabling tweakdefaults to get slight improvements to
96 96 # the UI over time. Make sure to set HGPLAIN in the environment when
97 97 # writing scripts!
98 98 # tweakdefaults = True
99 99
100 100 # uncomment to disable color in command output
101 101 # (see 'hg help color' for details)
102 102 # color = never
103 103
104 104 # uncomment to disable command output pagination
105 105 # (see 'hg help pager' for details)
106 106 # paginate = never
107 107
108 108 [extensions]
109 109 # uncomment the lines below to enable some popular extensions
110 110 # (see 'hg help extensions' for more info)
111 111 #
112 112 # histedit =
113 113 # rebase =
114 114 # uncommit =
115 115 """,
116 116 b'cloned': b"""# example repository config (see 'hg help config' for more info)
117 117 [paths]
118 118 default = %s
119 119
120 120 # path aliases to other clones of this repo in URLs or filesystem paths
121 121 # (see 'hg help config.paths' for more info)
122 122 #
123 123 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
124 124 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
125 125 # my-clone = /home/jdoe/jdoes-clone
126 126
127 127 [ui]
128 128 # name and email (local to this repository, optional), e.g.
129 129 # username = Jane Doe <jdoe@example.com>
130 130 """,
131 131 b'local': b"""# example repository config (see 'hg help config' for more info)
132 132 [paths]
133 133 # path aliases to other clones of this repo in URLs or filesystem paths
134 134 # (see 'hg help config.paths' for more info)
135 135 #
136 136 # default = http://example.com/hg/example-repo
137 137 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
138 138 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
139 139 # my-clone = /home/jdoe/jdoes-clone
140 140
141 141 [ui]
142 142 # name and email (local to this repository, optional), e.g.
143 143 # username = Jane Doe <jdoe@example.com>
144 144 """,
145 145 b'global': b"""# example system-wide hg config (see 'hg help config' for more info)
146 146
147 147 [ui]
148 148 # uncomment to disable color in command output
149 149 # (see 'hg help color' for details)
150 150 # color = never
151 151
152 152 # uncomment to disable command output pagination
153 153 # (see 'hg help pager' for details)
154 154 # paginate = never
155 155
156 156 [extensions]
157 157 # uncomment the lines below to enable some popular extensions
158 158 # (see 'hg help extensions' for more info)
159 159 #
160 160 # blackbox =
161 161 # churn =
162 162 """,
163 163 }
164 164
165 165
166 166 def _maybestrurl(maybebytes):
167 167 return pycompat.rapply(pycompat.strurl, maybebytes)
168 168
169 169
170 170 def _maybebytesurl(maybestr):
171 171 return pycompat.rapply(pycompat.bytesurl, maybestr)
172 172
173 173
174 174 class httppasswordmgrdbproxy(object):
175 175 """Delays loading urllib2 until it's needed."""
176 176
177 177 def __init__(self):
178 178 self._mgr = None
179 179
180 180 def _get_mgr(self):
181 181 if self._mgr is None:
182 182 self._mgr = urlreq.httppasswordmgrwithdefaultrealm()
183 183 return self._mgr
184 184
185 185 def add_password(self, realm, uris, user, passwd):
186 186 return self._get_mgr().add_password(
187 187 _maybestrurl(realm),
188 188 _maybestrurl(uris),
189 189 _maybestrurl(user),
190 190 _maybestrurl(passwd),
191 191 )
192 192
193 193 def find_user_password(self, realm, uri):
194 194 mgr = self._get_mgr()
195 195 return _maybebytesurl(
196 196 mgr.find_user_password(_maybestrurl(realm), _maybestrurl(uri))
197 197 )
198 198
199 199
200 200 def _catchterm(*args):
201 201 raise error.SignalInterrupt
202 202
203 203
204 204 # unique object used to detect no default value has been provided when
205 205 # retrieving configuration value.
206 206 _unset = object()
207 207
208 208 # _reqexithandlers: callbacks run at the end of a request
209 209 _reqexithandlers = []
210 210
211 211
212 212 class ui(object):
213 213 def __init__(self, src=None):
214 214 """Create a fresh new ui object if no src given
215 215
216 216 Use uimod.ui.load() to create a ui which knows global and user configs.
217 217 In most cases, you should use ui.copy() to create a copy of an existing
218 218 ui object.
219 219 """
220 220 # _buffers: used for temporary capture of output
221 221 self._buffers = []
222 222 # 3-tuple describing how each buffer in the stack behaves.
223 223 # Values are (capture stderr, capture subprocesses, apply labels).
224 224 self._bufferstates = []
225 225 # When a buffer is active, defines whether we are expanding labels.
226 226 # This exists to prevent an extra list lookup.
227 227 self._bufferapplylabels = None
228 228 self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
229 229 self._reportuntrusted = True
230 230 self._knownconfig = configitems.coreitems
231 231 self._ocfg = config.config() # overlay
232 232 self._tcfg = config.config() # trusted
233 233 self._ucfg = config.config() # untrusted
234 234 self._trustusers = set()
235 235 self._trustgroups = set()
236 236 self.callhooks = True
237 237 # Insecure server connections requested.
238 238 self.insecureconnections = False
239 239 # Blocked time
240 240 self.logblockedtimes = False
241 241 # color mode: see mercurial/color.py for possible value
242 242 self._colormode = None
243 243 self._terminfoparams = {}
244 244 self._styles = {}
245 245 self._uninterruptible = False
246 246 self.showtimestamp = False
247 247
248 248 if src:
249 249 self._fout = src._fout
250 250 self._ferr = src._ferr
251 251 self._fin = src._fin
252 252 self._fmsg = src._fmsg
253 253 self._fmsgout = src._fmsgout
254 254 self._fmsgerr = src._fmsgerr
255 255 self._finoutredirected = src._finoutredirected
256 256 self._loggers = src._loggers.copy()
257 257 self.pageractive = src.pageractive
258 258 self._disablepager = src._disablepager
259 259 self._tweaked = src._tweaked
260 260
261 261 self._tcfg = src._tcfg.copy()
262 262 self._ucfg = src._ucfg.copy()
263 263 self._ocfg = src._ocfg.copy()
264 264 self._trustusers = src._trustusers.copy()
265 265 self._trustgroups = src._trustgroups.copy()
266 266 self.environ = src.environ
267 267 self.callhooks = src.callhooks
268 268 self.insecureconnections = src.insecureconnections
269 269 self._colormode = src._colormode
270 270 self._terminfoparams = src._terminfoparams.copy()
271 271 self._styles = src._styles.copy()
272 272
273 273 self.fixconfig()
274 274
275 275 self.httppasswordmgrdb = src.httppasswordmgrdb
276 276 self._blockedtimes = src._blockedtimes
277 277 else:
278 278 self._fout = procutil.stdout
279 279 self._ferr = procutil.stderr
280 280 self._fin = procutil.stdin
281 281 self._fmsg = None
282 282 self._fmsgout = self.fout # configurable
283 283 self._fmsgerr = self.ferr # configurable
284 284 self._finoutredirected = False
285 285 self._loggers = {}
286 286 self.pageractive = False
287 287 self._disablepager = False
288 288 self._tweaked = False
289 289
290 290 # shared read-only environment
291 291 self.environ = encoding.environ
292 292
293 293 self.httppasswordmgrdb = httppasswordmgrdbproxy()
294 294 self._blockedtimes = collections.defaultdict(int)
295 295
296 296 allowed = self.configlist(b'experimental', b'exportableenviron')
297 297 if b'*' in allowed:
298 298 self._exportableenviron = self.environ
299 299 else:
300 300 self._exportableenviron = {}
301 301 for k in allowed:
302 302 if k in self.environ:
303 303 self._exportableenviron[k] = self.environ[k]
304 304
305 305 @classmethod
306 306 def load(cls):
307 307 """Create a ui and load global and user configs"""
308 308 u = cls()
309 309 # we always trust global config files and environment variables
310 310 for t, f in rcutil.rccomponents():
311 311 if t == b'path':
312 312 u.readconfig(f, trust=True)
313 313 elif t == b'resource':
314 314 u.read_resource_config(f, trust=True)
315 315 elif t == b'items':
316 316 sections = set()
317 317 for section, name, value, source in f:
318 318 # do not set u._ocfg
319 319 # XXX clean this up once immutable config object is a thing
320 320 u._tcfg.set(section, name, value, source)
321 321 u._ucfg.set(section, name, value, source)
322 322 sections.add(section)
323 323 for section in sections:
324 324 u.fixconfig(section=section)
325 325 else:
326 326 raise error.ProgrammingError(b'unknown rctype: %s' % t)
327 327 u._maybetweakdefaults()
328 328 return u
329 329
330 330 def _maybetweakdefaults(self):
331 331 if not self.configbool(b'ui', b'tweakdefaults'):
332 332 return
333 333 if self._tweaked or self.plain(b'tweakdefaults'):
334 334 return
335 335
336 336 # Note: it is SUPER IMPORTANT that you set self._tweaked to
337 337 # True *before* any calls to setconfig(), otherwise you'll get
338 338 # infinite recursion between setconfig and this method.
339 339 #
340 340 # TODO: We should extract an inner method in setconfig() to
341 341 # avoid this weirdness.
342 342 self._tweaked = True
343 343 tmpcfg = config.config()
344 344 tmpcfg.parse(b'<tweakdefaults>', tweakrc)
345 345 for section in tmpcfg:
346 346 for name, value in tmpcfg.items(section):
347 347 if not self.hasconfig(section, name):
348 348 self.setconfig(section, name, value, b"<tweakdefaults>")
349 349
350 350 def copy(self):
351 351 return self.__class__(self)
352 352
353 353 def resetstate(self):
354 354 """Clear internal state that shouldn't persist across commands"""
355 355 if self._progbar:
356 356 self._progbar.resetstate() # reset last-print time of progress bar
357 357 self.httppasswordmgrdb = httppasswordmgrdbproxy()
358 358
359 359 @contextlib.contextmanager
360 360 def timeblockedsection(self, key):
361 361 # this is open-coded below - search for timeblockedsection to find them
362 362 starttime = util.timer()
363 363 try:
364 364 yield
365 365 finally:
366 366 self._blockedtimes[key + b'_blocked'] += (
367 367 util.timer() - starttime
368 368 ) * 1000
369 369
370 370 @contextlib.contextmanager
371 371 def uninterruptible(self):
372 372 """Mark an operation as unsafe.
373 373
374 374 Most operations on a repository are safe to interrupt, but a
375 375 few are risky (for example repair.strip). This context manager
376 376 lets you advise Mercurial that something risky is happening so
377 377 that control-C etc can be blocked if desired.
378 378 """
379 379 enabled = self.configbool(b'experimental', b'nointerrupt')
380 380 if enabled and self.configbool(
381 381 b'experimental', b'nointerrupt-interactiveonly'
382 382 ):
383 383 enabled = self.interactive()
384 384 if self._uninterruptible or not enabled:
385 385 # if nointerrupt support is turned off, the process isn't
386 386 # interactive, or we're already in an uninterruptible
387 387 # block, do nothing.
388 388 yield
389 389 return
390 390
391 391 def warn():
392 392 self.warn(_(b"shutting down cleanly\n"))
393 393 self.warn(
394 394 _(b"press ^C again to terminate immediately (dangerous)\n")
395 395 )
396 396 return True
397 397
398 398 with procutil.uninterruptible(warn):
399 399 try:
400 400 self._uninterruptible = True
401 401 yield
402 402 finally:
403 403 self._uninterruptible = False
404 404
405 405 def formatter(self, topic, opts):
406 406 return formatter.formatter(self, self, topic, opts)
407 407
408 408 def _trusted(self, fp, f):
409 409 st = util.fstat(fp)
410 410 if util.isowner(st):
411 411 return True
412 412
413 413 tusers, tgroups = self._trustusers, self._trustgroups
414 414 if b'*' in tusers or b'*' in tgroups:
415 415 return True
416 416
417 417 user = util.username(st.st_uid)
418 418 group = util.groupname(st.st_gid)
419 419 if user in tusers or group in tgroups or user == util.username():
420 420 return True
421 421
422 422 if self._reportuntrusted:
423 423 self.warn(
424 424 _(
425 425 b'not trusting file %s from untrusted '
426 426 b'user %s, group %s\n'
427 427 )
428 428 % (f, user, group)
429 429 )
430 430 return False
431 431
432 432 def read_resource_config(
433 433 self, name, root=None, trust=False, sections=None, remap=None
434 434 ):
435 435 try:
436 436 fp = resourceutil.open_resource(name[0], name[1])
437 437 except IOError:
438 438 if not sections: # ignore unless we were looking for something
439 439 return
440 440 raise
441 441
442 442 self._readconfig(
443 443 b'resource:%s.%s' % name, fp, root, trust, sections, remap
444 444 )
445 445
446 446 def readconfig(
447 447 self, filename, root=None, trust=False, sections=None, remap=None
448 448 ):
449 449 try:
450 450 fp = open(filename, 'rb')
451 451 except IOError:
452 452 if not sections: # ignore unless we were looking for something
453 453 return
454 454 raise
455 455
456 456 self._readconfig(filename, fp, root, trust, sections, remap)
457 457
458 458 def _readconfig(
459 459 self, filename, fp, root=None, trust=False, sections=None, remap=None
460 460 ):
461 461 with fp:
462 462 cfg = config.config()
463 463 trusted = sections or trust or self._trusted(fp, filename)
464 464
465 465 try:
466 466 cfg.read(filename, fp, sections=sections, remap=remap)
467 467 except error.ParseError as inst:
468 468 if trusted:
469 469 raise
470 470 self.warn(_(b'ignored: %s\n') % stringutil.forcebytestr(inst))
471 471
472 472 self._applyconfig(cfg, trusted, root)
473 473
474 474 def applyconfig(self, configitems, source=b"", root=None):
475 475 """Add configitems from a non-file source. Unlike with ``setconfig()``,
476 476 they can be overridden by subsequent config file reads. The items are
477 477 in the same format as ``configoverride()``, namely a dict of the
478 478 following structures: {(section, name) : value}
479 479
480 480 Typically this is used by extensions that inject themselves into the
481 481 config file load procedure by monkeypatching ``localrepo.loadhgrc()``.
482 482 """
483 483 cfg = config.config()
484 484
485 485 for (section, name), value in configitems.items():
486 486 cfg.set(section, name, value, source)
487 487
488 488 self._applyconfig(cfg, True, root)
489 489
490 490 def _applyconfig(self, cfg, trusted, root):
491 491 if self.plain():
492 492 for k in (
493 493 b'debug',
494 494 b'fallbackencoding',
495 495 b'quiet',
496 496 b'slash',
497 497 b'logtemplate',
498 498 b'message-output',
499 499 b'statuscopies',
500 500 b'style',
501 501 b'traceback',
502 502 b'verbose',
503 503 ):
504 504 if k in cfg[b'ui']:
505 505 del cfg[b'ui'][k]
506 506 for k, v in cfg.items(b'defaults'):
507 507 del cfg[b'defaults'][k]
508 508 for k, v in cfg.items(b'commands'):
509 509 del cfg[b'commands'][k]
510 510 # Don't remove aliases from the configuration if in the exceptionlist
511 511 if self.plain(b'alias'):
512 512 for k, v in cfg.items(b'alias'):
513 513 del cfg[b'alias'][k]
514 514 if self.plain(b'revsetalias'):
515 515 for k, v in cfg.items(b'revsetalias'):
516 516 del cfg[b'revsetalias'][k]
517 517 if self.plain(b'templatealias'):
518 518 for k, v in cfg.items(b'templatealias'):
519 519 del cfg[b'templatealias'][k]
520 520
521 521 if trusted:
522 522 self._tcfg.update(cfg)
523 523 self._tcfg.update(self._ocfg)
524 524 self._ucfg.update(cfg)
525 525 self._ucfg.update(self._ocfg)
526 526
527 527 if root is None:
528 528 root = os.path.expanduser(b'~')
529 529 self.fixconfig(root=root)
530 530
531 531 def fixconfig(self, root=None, section=None):
532 532 if section in (None, b'paths'):
533 533 # expand vars and ~
534 534 # translate paths relative to root (or home) into absolute paths
535 535 root = root or encoding.getcwd()
536 536 for c in self._tcfg, self._ucfg, self._ocfg:
537 537 for n, p in c.items(b'paths'):
538 538 # Ignore sub-options.
539 539 if b':' in n:
540 540 continue
541 541 if not p:
542 542 continue
543 543 if b'%%' in p:
544 544 s = self.configsource(b'paths', n) or b'none'
545 545 self.warn(
546 546 _(b"(deprecated '%%' in path %s=%s from %s)\n")
547 547 % (n, p, s)
548 548 )
549 549 p = p.replace(b'%%', b'%')
550 550 p = util.expandpath(p)
551 551 if not util.hasscheme(p) and not os.path.isabs(p):
552 552 p = os.path.normpath(os.path.join(root, p))
553 553 c.set(b"paths", n, p)
554 554
555 555 if section in (None, b'ui'):
556 556 # update ui options
557 557 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
558 558 self.debugflag = self.configbool(b'ui', b'debug')
559 559 self.verbose = self.debugflag or self.configbool(b'ui', b'verbose')
560 560 self.quiet = not self.debugflag and self.configbool(b'ui', b'quiet')
561 561 if self.verbose and self.quiet:
562 562 self.quiet = self.verbose = False
563 563 self._reportuntrusted = self.debugflag or self.configbool(
564 564 b"ui", b"report_untrusted"
565 565 )
566 566 self.showtimestamp = self.configbool(b'ui', b'timestamp-output')
567 567 self.tracebackflag = self.configbool(b'ui', b'traceback')
568 568 self.logblockedtimes = self.configbool(b'ui', b'logblockedtimes')
569 569
570 570 if section in (None, b'trusted'):
571 571 # update trust information
572 572 self._trustusers.update(self.configlist(b'trusted', b'users'))
573 573 self._trustgroups.update(self.configlist(b'trusted', b'groups'))
574 574
575 575 if section in (None, b'devel', b'ui') and self.debugflag:
576 576 tracked = set()
577 577 if self.configbool(b'devel', b'debug.extensions'):
578 578 tracked.add(b'extension')
579 579 if tracked:
580 580 logger = loggingutil.fileobjectlogger(self._ferr, tracked)
581 581 self.setlogger(b'debug', logger)
582 582
583 583 def backupconfig(self, section, item):
584 584 return (
585 585 self._ocfg.backup(section, item),
586 586 self._tcfg.backup(section, item),
587 587 self._ucfg.backup(section, item),
588 588 )
589 589
590 590 def restoreconfig(self, data):
591 591 self._ocfg.restore(data[0])
592 592 self._tcfg.restore(data[1])
593 593 self._ucfg.restore(data[2])
594 594
595 595 def setconfig(self, section, name, value, source=b''):
596 596 for cfg in (self._ocfg, self._tcfg, self._ucfg):
597 597 cfg.set(section, name, value, source)
598 598 self.fixconfig(section=section)
599 599 self._maybetweakdefaults()
600 600
601 601 def _data(self, untrusted):
602 602 return untrusted and self._ucfg or self._tcfg
603 603
604 604 def configsource(self, section, name, untrusted=False):
605 605 return self._data(untrusted).source(section, name)
606 606
607 607 def config(self, section, name, default=_unset, untrusted=False):
608 608 """return the plain string version of a config"""
609 609 value = self._config(
610 610 section, name, default=default, untrusted=untrusted
611 611 )
612 612 if value is _unset:
613 613 return None
614 614 return value
615 615
616 616 def _config(self, section, name, default=_unset, untrusted=False):
617 617 value = itemdefault = default
618 618 item = self._knownconfig.get(section, {}).get(name)
619 619 alternates = [(section, name)]
620 620
621 621 if item is not None:
622 622 alternates.extend(item.alias)
623 623 if callable(item.default):
624 624 itemdefault = item.default()
625 625 else:
626 626 itemdefault = item.default
627 627 else:
628 628 msg = b"accessing unregistered config item: '%s.%s'"
629 629 msg %= (section, name)
630 630 self.develwarn(msg, 2, b'warn-config-unknown')
631 631
632 632 if default is _unset:
633 633 if item is None:
634 634 value = default
635 635 elif item.default is configitems.dynamicdefault:
636 636 value = None
637 637 msg = b"config item requires an explicit default value: '%s.%s'"
638 638 msg %= (section, name)
639 639 self.develwarn(msg, 2, b'warn-config-default')
640 640 else:
641 641 value = itemdefault
642 642 elif (
643 643 item is not None
644 644 and item.default is not configitems.dynamicdefault
645 645 and default != itemdefault
646 646 ):
647 647 msg = (
648 648 b"specifying a mismatched default value for a registered "
649 649 b"config item: '%s.%s' '%s'"
650 650 )
651 651 msg %= (section, name, pycompat.bytestr(default))
652 652 self.develwarn(msg, 2, b'warn-config-default')
653 653
654 654 for s, n in alternates:
655 655 candidate = self._data(untrusted).get(s, n, None)
656 656 if candidate is not None:
657 657 value = candidate
658 658 break
659 659
660 660 if self.debugflag and not untrusted and self._reportuntrusted:
661 661 for s, n in alternates:
662 662 uvalue = self._ucfg.get(s, n)
663 663 if uvalue is not None and uvalue != value:
664 664 self.debug(
665 665 b"ignoring untrusted configuration option "
666 666 b"%s.%s = %s\n" % (s, n, uvalue)
667 667 )
668 668 return value
669 669
670 670 def configsuboptions(self, section, name, default=_unset, untrusted=False):
671 671 """Get a config option and all sub-options.
672 672
673 673 Some config options have sub-options that are declared with the
674 674 format "key:opt = value". This method is used to return the main
675 675 option and all its declared sub-options.
676 676
677 677 Returns a 2-tuple of ``(option, sub-options)``, where `sub-options``
678 678 is a dict of defined sub-options where keys and values are strings.
679 679 """
680 680 main = self.config(section, name, default, untrusted=untrusted)
681 681 data = self._data(untrusted)
682 682 sub = {}
683 683 prefix = b'%s:' % name
684 684 for k, v in data.items(section):
685 685 if k.startswith(prefix):
686 686 sub[k[len(prefix) :]] = v
687 687
688 688 if self.debugflag and not untrusted and self._reportuntrusted:
689 689 for k, v in sub.items():
690 690 uvalue = self._ucfg.get(section, b'%s:%s' % (name, k))
691 691 if uvalue is not None and uvalue != v:
692 692 self.debug(
693 693 b'ignoring untrusted configuration option '
694 694 b'%s:%s.%s = %s\n' % (section, name, k, uvalue)
695 695 )
696 696
697 697 return main, sub
698 698
699 699 def configpath(self, section, name, default=_unset, untrusted=False):
700 700 """get a path config item, expanded relative to repo root or config
701 701 file"""
702 702 v = self.config(section, name, default, untrusted)
703 703 if v is None:
704 704 return None
705 705 if not os.path.isabs(v) or b"://" not in v:
706 706 src = self.configsource(section, name, untrusted)
707 707 if b':' in src:
708 708 base = os.path.dirname(src.rsplit(b':')[0])
709 709 v = os.path.join(base, os.path.expanduser(v))
710 710 return v
711 711
712 712 def configbool(self, section, name, default=_unset, untrusted=False):
713 713 """parse a configuration element as a boolean
714 714
715 715 >>> u = ui(); s = b'foo'
716 716 >>> u.setconfig(s, b'true', b'yes')
717 717 >>> u.configbool(s, b'true')
718 718 True
719 719 >>> u.setconfig(s, b'false', b'no')
720 720 >>> u.configbool(s, b'false')
721 721 False
722 722 >>> u.configbool(s, b'unknown')
723 723 False
724 724 >>> u.configbool(s, b'unknown', True)
725 725 True
726 726 >>> u.setconfig(s, b'invalid', b'somevalue')
727 727 >>> u.configbool(s, b'invalid')
728 728 Traceback (most recent call last):
729 729 ...
730 730 ConfigError: foo.invalid is not a boolean ('somevalue')
731 731 """
732 732
733 733 v = self._config(section, name, default, untrusted=untrusted)
734 734 if v is None:
735 735 return v
736 736 if v is _unset:
737 737 if default is _unset:
738 738 return False
739 739 return default
740 740 if isinstance(v, bool):
741 741 return v
742 742 b = stringutil.parsebool(v)
743 743 if b is None:
744 744 raise error.ConfigError(
745 745 _(b"%s.%s is not a boolean ('%s')") % (section, name, v)
746 746 )
747 747 return b
748 748
749 749 def configwith(
750 750 self, convert, section, name, default=_unset, desc=None, untrusted=False
751 751 ):
752 752 """parse a configuration element with a conversion function
753 753
754 754 >>> u = ui(); s = b'foo'
755 755 >>> u.setconfig(s, b'float1', b'42')
756 756 >>> u.configwith(float, s, b'float1')
757 757 42.0
758 758 >>> u.setconfig(s, b'float2', b'-4.25')
759 759 >>> u.configwith(float, s, b'float2')
760 760 -4.25
761 761 >>> u.configwith(float, s, b'unknown', 7)
762 762 7.0
763 763 >>> u.setconfig(s, b'invalid', b'somevalue')
764 764 >>> u.configwith(float, s, b'invalid')
765 765 Traceback (most recent call last):
766 766 ...
767 767 ConfigError: foo.invalid is not a valid float ('somevalue')
768 768 >>> u.configwith(float, s, b'invalid', desc=b'womble')
769 769 Traceback (most recent call last):
770 770 ...
771 771 ConfigError: foo.invalid is not a valid womble ('somevalue')
772 772 """
773 773
774 774 v = self.config(section, name, default, untrusted)
775 775 if v is None:
776 776 return v # do not attempt to convert None
777 777 try:
778 778 return convert(v)
779 779 except (ValueError, error.ParseError):
780 780 if desc is None:
781 781 desc = pycompat.sysbytes(convert.__name__)
782 782 raise error.ConfigError(
783 783 _(b"%s.%s is not a valid %s ('%s')") % (section, name, desc, v)
784 784 )
785 785
786 786 def configint(self, section, name, default=_unset, untrusted=False):
787 787 """parse a configuration element as an integer
788 788
789 789 >>> u = ui(); s = b'foo'
790 790 >>> u.setconfig(s, b'int1', b'42')
791 791 >>> u.configint(s, b'int1')
792 792 42
793 793 >>> u.setconfig(s, b'int2', b'-42')
794 794 >>> u.configint(s, b'int2')
795 795 -42
796 796 >>> u.configint(s, b'unknown', 7)
797 797 7
798 798 >>> u.setconfig(s, b'invalid', b'somevalue')
799 799 >>> u.configint(s, b'invalid')
800 800 Traceback (most recent call last):
801 801 ...
802 802 ConfigError: foo.invalid is not a valid integer ('somevalue')
803 803 """
804 804
805 805 return self.configwith(
806 806 int, section, name, default, b'integer', untrusted
807 807 )
808 808
809 809 def configbytes(self, section, name, default=_unset, untrusted=False):
810 810 """parse a configuration element as a quantity in bytes
811 811
812 812 Units can be specified as b (bytes), k or kb (kilobytes), m or
813 813 mb (megabytes), g or gb (gigabytes).
814 814
815 815 >>> u = ui(); s = b'foo'
816 816 >>> u.setconfig(s, b'val1', b'42')
817 817 >>> u.configbytes(s, b'val1')
818 818 42
819 819 >>> u.setconfig(s, b'val2', b'42.5 kb')
820 820 >>> u.configbytes(s, b'val2')
821 821 43520
822 822 >>> u.configbytes(s, b'unknown', b'7 MB')
823 823 7340032
824 824 >>> u.setconfig(s, b'invalid', b'somevalue')
825 825 >>> u.configbytes(s, b'invalid')
826 826 Traceback (most recent call last):
827 827 ...
828 828 ConfigError: foo.invalid is not a byte quantity ('somevalue')
829 829 """
830 830
831 831 value = self._config(section, name, default, untrusted)
832 832 if value is _unset:
833 833 if default is _unset:
834 834 default = 0
835 835 value = default
836 836 if not isinstance(value, bytes):
837 837 return value
838 838 try:
839 839 return util.sizetoint(value)
840 840 except error.ParseError:
841 841 raise error.ConfigError(
842 842 _(b"%s.%s is not a byte quantity ('%s')")
843 843 % (section, name, value)
844 844 )
845 845
846 846 def configlist(self, section, name, default=_unset, untrusted=False):
847 847 """parse a configuration element as a list of comma/space separated
848 848 strings
849 849
850 850 >>> u = ui(); s = b'foo'
851 851 >>> u.setconfig(s, b'list1', b'this,is "a small" ,test')
852 852 >>> u.configlist(s, b'list1')
853 853 ['this', 'is', 'a small', 'test']
854 854 >>> u.setconfig(s, b'list2', b'this, is "a small" , test ')
855 855 >>> u.configlist(s, b'list2')
856 856 ['this', 'is', 'a small', 'test']
857 857 """
858 858 # default is not always a list
859 859 v = self.configwith(
860 860 config.parselist, section, name, default, b'list', untrusted
861 861 )
862 862 if isinstance(v, bytes):
863 863 return config.parselist(v)
864 864 elif v is None:
865 865 return []
866 866 return v
867 867
868 868 def configdate(self, section, name, default=_unset, untrusted=False):
869 869 """parse a configuration element as a tuple of ints
870 870
871 871 >>> u = ui(); s = b'foo'
872 872 >>> u.setconfig(s, b'date', b'0 0')
873 873 >>> u.configdate(s, b'date')
874 874 (0, 0)
875 875 """
876 876 if self.config(section, name, default, untrusted):
877 877 return self.configwith(
878 878 dateutil.parsedate, section, name, default, b'date', untrusted
879 879 )
880 880 if default is _unset:
881 881 return None
882 882 return default
883 883
884 884 def configdefault(self, section, name):
885 885 """returns the default value of the config item"""
886 886 item = self._knownconfig.get(section, {}).get(name)
887 887 itemdefault = None
888 888 if item is not None:
889 889 if callable(item.default):
890 890 itemdefault = item.default()
891 891 else:
892 892 itemdefault = item.default
893 893 return itemdefault
894 894
895 895 def hasconfig(self, section, name, untrusted=False):
896 896 return self._data(untrusted).hasitem(section, name)
897 897
898 898 def has_section(self, section, untrusted=False):
899 899 '''tell whether section exists in config.'''
900 900 return section in self._data(untrusted)
901 901
902 902 def configitems(self, section, untrusted=False, ignoresub=False):
903 903 items = self._data(untrusted).items(section)
904 904 if ignoresub:
905 905 items = [i for i in items if b':' not in i[0]]
906 906 if self.debugflag and not untrusted and self._reportuntrusted:
907 907 for k, v in self._ucfg.items(section):
908 908 if self._tcfg.get(section, k) != v:
909 909 self.debug(
910 910 b"ignoring untrusted configuration option "
911 911 b"%s.%s = %s\n" % (section, k, v)
912 912 )
913 913 return items
914 914
915 915 def walkconfig(self, untrusted=False):
916 916 cfg = self._data(untrusted)
917 917 for section in cfg.sections():
918 918 for name, value in self.configitems(section, untrusted):
919 919 yield section, name, value
920 920
921 921 def plain(self, feature=None):
922 922 '''is plain mode active?
923 923
924 924 Plain mode means that all configuration variables which affect
925 925 the behavior and output of Mercurial should be
926 926 ignored. Additionally, the output should be stable,
927 927 reproducible and suitable for use in scripts or applications.
928 928
929 929 The only way to trigger plain mode is by setting either the
930 930 `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
931 931
932 932 The return value can either be
933 933 - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
934 934 - False if feature is disabled by default and not included in HGPLAIN
935 935 - True otherwise
936 936 '''
937 937 if (
938 938 b'HGPLAIN' not in encoding.environ
939 939 and b'HGPLAINEXCEPT' not in encoding.environ
940 940 ):
941 941 return False
942 942 exceptions = (
943 943 encoding.environ.get(b'HGPLAINEXCEPT', b'').strip().split(b',')
944 944 )
945 945 # TODO: add support for HGPLAIN=+feature,-feature syntax
946 946 if b'+strictflags' not in encoding.environ.get(b'HGPLAIN', b'').split(
947 947 b','
948 948 ):
949 949 exceptions.append(b'strictflags')
950 950 if feature and exceptions:
951 951 return feature not in exceptions
952 952 return True
953 953
954 954 def username(self, acceptempty=False):
955 955 """Return default username to be used in commits.
956 956
957 957 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
958 958 and stop searching if one of these is set.
959 959 If not found and acceptempty is True, returns None.
960 960 If not found and ui.askusername is True, ask the user, else use
961 961 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
962 962 If no username could be found, raise an Abort error.
963 963 """
964 964 user = encoding.environ.get(b"HGUSER")
965 965 if user is None:
966 966 user = self.config(b"ui", b"username")
967 967 if user is not None:
968 968 user = os.path.expandvars(user)
969 969 if user is None:
970 970 user = encoding.environ.get(b"EMAIL")
971 971 if user is None and acceptempty:
972 972 return user
973 973 if user is None and self.configbool(b"ui", b"askusername"):
974 974 user = self.prompt(_(b"enter a commit username:"), default=None)
975 975 if user is None and not self.interactive():
976 976 try:
977 977 user = b'%s@%s' % (
978 978 procutil.getuser(),
979 979 encoding.strtolocal(socket.getfqdn()),
980 980 )
981 981 self.warn(_(b"no username found, using '%s' instead\n") % user)
982 982 except KeyError:
983 983 pass
984 984 if not user:
985 985 raise error.Abort(
986 986 _(b'no username supplied'),
987 987 hint=_(b"use 'hg config --edit' " b'to set your username'),
988 988 )
989 989 if b"\n" in user:
990 990 raise error.Abort(
991 991 _(b"username %r contains a newline\n") % pycompat.bytestr(user)
992 992 )
993 993 return user
994 994
995 995 def shortuser(self, user):
996 996 """Return a short representation of a user name or email address."""
997 997 if not self.verbose:
998 998 user = stringutil.shortuser(user)
999 999 return user
1000 1000
1001 1001 def expandpath(self, loc, default=None):
1002 1002 """Return repository location relative to cwd or from [paths]"""
1003 1003 try:
1004 1004 p = self.paths.getpath(loc)
1005 1005 if p:
1006 1006 return p.rawloc
1007 1007 except error.RepoError:
1008 1008 pass
1009 1009
1010 1010 if default:
1011 1011 try:
1012 1012 p = self.paths.getpath(default)
1013 1013 if p:
1014 1014 return p.rawloc
1015 1015 except error.RepoError:
1016 1016 pass
1017 1017
1018 1018 return loc
1019 1019
1020 1020 @util.propertycache
1021 1021 def paths(self):
1022 1022 return paths(self)
1023 1023
1024 1024 @property
1025 1025 def fout(self):
1026 1026 return self._fout
1027 1027
1028 1028 @fout.setter
1029 1029 def fout(self, f):
1030 1030 self._fout = f
1031 1031 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
1032 1032
1033 1033 @property
1034 1034 def ferr(self):
1035 1035 return self._ferr
1036 1036
1037 1037 @ferr.setter
1038 1038 def ferr(self, f):
1039 1039 self._ferr = f
1040 1040 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
1041 1041
1042 1042 @property
1043 1043 def fin(self):
1044 1044 return self._fin
1045 1045
1046 1046 @fin.setter
1047 1047 def fin(self, f):
1048 1048 self._fin = f
1049 1049
1050 1050 @property
1051 1051 def fmsg(self):
1052 1052 """Stream dedicated for status/error messages; may be None if
1053 1053 fout/ferr are used"""
1054 1054 return self._fmsg
1055 1055
1056 1056 @fmsg.setter
1057 1057 def fmsg(self, f):
1058 1058 self._fmsg = f
1059 1059 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
1060 1060
1061 1061 def pushbuffer(self, error=False, subproc=False, labeled=False):
1062 1062 """install a buffer to capture standard output of the ui object
1063 1063
1064 1064 If error is True, the error output will be captured too.
1065 1065
1066 1066 If subproc is True, output from subprocesses (typically hooks) will be
1067 1067 captured too.
1068 1068
1069 1069 If labeled is True, any labels associated with buffered
1070 1070 output will be handled. By default, this has no effect
1071 1071 on the output returned, but extensions and GUI tools may
1072 1072 handle this argument and returned styled output. If output
1073 1073 is being buffered so it can be captured and parsed or
1074 1074 processed, labeled should not be set to True.
1075 1075 """
1076 1076 self._buffers.append([])
1077 1077 self._bufferstates.append((error, subproc, labeled))
1078 1078 self._bufferapplylabels = labeled
1079 1079
1080 1080 def popbuffer(self):
1081 1081 '''pop the last buffer and return the buffered output'''
1082 1082 self._bufferstates.pop()
1083 1083 if self._bufferstates:
1084 1084 self._bufferapplylabels = self._bufferstates[-1][2]
1085 1085 else:
1086 1086 self._bufferapplylabels = None
1087 1087
1088 1088 return b"".join(self._buffers.pop())
1089 1089
1090 1090 def _isbuffered(self, dest):
1091 1091 if dest is self._fout:
1092 1092 return bool(self._buffers)
1093 1093 if dest is self._ferr:
1094 1094 return bool(self._bufferstates and self._bufferstates[-1][0])
1095 1095 return False
1096 1096
1097 1097 def canwritewithoutlabels(self):
1098 1098 '''check if write skips the label'''
1099 1099 if self._buffers and not self._bufferapplylabels:
1100 1100 return True
1101 1101 return self._colormode is None
1102 1102
1103 1103 def canbatchlabeledwrites(self):
1104 1104 '''check if write calls with labels are batchable'''
1105 1105 # Windows color printing is special, see ``write``.
1106 1106 return self._colormode != b'win32'
1107 1107
1108 1108 def write(self, *args, **opts):
1109 1109 '''write args to output
1110 1110
1111 1111 By default, this method simply writes to the buffer or stdout.
1112 1112 Color mode can be set on the UI class to have the output decorated
1113 1113 with color modifier before being written to stdout.
1114 1114
1115 1115 The color used is controlled by an optional keyword argument, "label".
1116 1116 This should be a string containing label names separated by space.
1117 1117 Label names take the form of "topic.type". For example, ui.debug()
1118 1118 issues a label of "ui.debug".
1119 1119
1120 1120 Progress reports via stderr are normally cleared before writing as
1121 1121 stdout and stderr go to the same terminal. This can be skipped with
1122 1122 the optional keyword argument "keepprogressbar". The progress bar
1123 1123 will continue to occupy a partial line on stderr in that case.
1124 1124 This functionality is intended when Mercurial acts as data source
1125 1125 in a pipe.
1126 1126
1127 1127 When labeling output for a specific command, a label of
1128 1128 "cmdname.type" is recommended. For example, status issues
1129 1129 a label of "status.modified" for modified files.
1130 1130 '''
1131 1131 dest = self._fout
1132 1132
1133 1133 # inlined _write() for speed
1134 1134 if self._buffers:
1135 1135 label = opts.get('label', b'')
1136 1136 if label and self._bufferapplylabels:
1137 1137 self._buffers[-1].extend(self.label(a, label) for a in args)
1138 1138 else:
1139 1139 self._buffers[-1].extend(args)
1140 1140 return
1141 1141
1142 1142 # inlined _writenobuf() for speed
1143 1143 if not opts.get('keepprogressbar', False):
1144 1144 self._progclear()
1145 1145 msg = b''.join(args)
1146 1146
1147 1147 # opencode timeblockedsection because this is a critical path
1148 1148 starttime = util.timer()
1149 1149 try:
1150 1150 if self._colormode == b'win32':
1151 1151 # windows color printing is its own can of crab, defer to
1152 1152 # the color module and that is it.
1153 1153 color.win32print(self, dest.write, msg, **opts)
1154 1154 else:
1155 1155 if self._colormode is not None:
1156 1156 label = opts.get('label', b'')
1157 1157 msg = self.label(msg, label)
1158 1158 dest.write(msg)
1159 1159 except IOError as err:
1160 1160 raise error.StdioError(err)
1161 1161 finally:
1162 1162 self._blockedtimes[b'stdio_blocked'] += (
1163 1163 util.timer() - starttime
1164 1164 ) * 1000
1165 1165
1166 1166 def write_err(self, *args, **opts):
1167 1167 self._write(self._ferr, *args, **opts)
1168 1168
1169 1169 def _write(self, dest, *args, **opts):
1170 1170 # update write() as well if you touch this code
1171 1171 if self._isbuffered(dest):
1172 1172 label = opts.get('label', b'')
1173 1173 if label and self._bufferapplylabels:
1174 1174 self._buffers[-1].extend(self.label(a, label) for a in args)
1175 1175 else:
1176 1176 self._buffers[-1].extend(args)
1177 1177 else:
1178 1178 self._writenobuf(dest, *args, **opts)
1179 1179
1180 1180 def _writenobuf(self, dest, *args, **opts):
1181 1181 # update write() as well if you touch this code
1182 1182 if not opts.get('keepprogressbar', False):
1183 1183 self._progclear()
1184 1184 msg = b''.join(args)
1185 1185
1186 1186 # opencode timeblockedsection because this is a critical path
1187 1187 starttime = util.timer()
1188 1188 try:
1189 1189 if dest is self._ferr and not getattr(self._fout, 'closed', False):
1190 1190 self._fout.flush()
1191 1191 if getattr(dest, 'structured', False):
1192 1192 # channel for machine-readable output with metadata, where
1193 1193 # no extra colorization is necessary.
1194 1194 dest.write(msg, **opts)
1195 1195 elif self._colormode == b'win32':
1196 1196 # windows color printing is its own can of crab, defer to
1197 1197 # the color module and that is it.
1198 1198 color.win32print(self, dest.write, msg, **opts)
1199 1199 else:
1200 1200 if self._colormode is not None:
1201 1201 label = opts.get('label', b'')
1202 1202 msg = self.label(msg, label)
1203 1203 dest.write(msg)
1204 1204 # stderr may be buffered under win32 when redirected to files,
1205 1205 # including stdout.
1206 1206 if dest is self._ferr and not getattr(dest, 'closed', False):
1207 1207 dest.flush()
1208 1208 except IOError as err:
1209 1209 if dest is self._ferr and err.errno in (
1210 1210 errno.EPIPE,
1211 1211 errno.EIO,
1212 1212 errno.EBADF,
1213 1213 ):
1214 1214 # no way to report the error, so ignore it
1215 1215 return
1216 1216 raise error.StdioError(err)
1217 1217 finally:
1218 1218 self._blockedtimes[b'stdio_blocked'] += (
1219 1219 util.timer() - starttime
1220 1220 ) * 1000
1221 1221
1222 1222 def _writemsg(self, dest, *args, **opts):
1223 1223 timestamp = self.showtimestamp and opts.get('type') in {
1224 1224 b'debug',
1225 1225 b'error',
1226 1226 b'note',
1227 1227 b'status',
1228 1228 b'warning',
1229 1229 }
1230 1230 if timestamp:
1231 1231 args = (
1232 1232 b'[%s] '
1233 1233 % pycompat.bytestr(datetime.datetime.now().isoformat()),
1234 1234 ) + args
1235 1235 _writemsgwith(self._write, dest, *args, **opts)
1236 1236 if timestamp:
1237 1237 dest.flush()
1238 1238
1239 1239 def _writemsgnobuf(self, dest, *args, **opts):
1240 1240 _writemsgwith(self._writenobuf, dest, *args, **opts)
1241 1241
1242 1242 def flush(self):
1243 1243 # opencode timeblockedsection because this is a critical path
1244 1244 starttime = util.timer()
1245 1245 try:
1246 1246 try:
1247 1247 self._fout.flush()
1248 1248 except IOError as err:
1249 1249 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
1250 1250 raise error.StdioError(err)
1251 1251 finally:
1252 1252 try:
1253 1253 self._ferr.flush()
1254 1254 except IOError as err:
1255 1255 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
1256 1256 raise error.StdioError(err)
1257 1257 finally:
1258 1258 self._blockedtimes[b'stdio_blocked'] += (
1259 1259 util.timer() - starttime
1260 1260 ) * 1000
1261 1261
1262 1262 def _isatty(self, fh):
1263 1263 if self.configbool(b'ui', b'nontty'):
1264 1264 return False
1265 1265 return procutil.isatty(fh)
1266 1266
1267 1267 def protectfinout(self):
1268 1268 """Duplicate ui streams and redirect original if they are stdio
1269 1269
1270 1270 Returns (fin, fout) which point to the original ui fds, but may be
1271 1271 copy of them. The returned streams can be considered "owned" in that
1272 1272 print(), exec(), etc. never reach to them.
1273 1273 """
1274 1274 if self._finoutredirected:
1275 1275 # if already redirected, protectstdio() would just create another
1276 1276 # nullfd pair, which is equivalent to returning self._fin/_fout.
1277 1277 return self._fin, self._fout
1278 1278 fin, fout = procutil.protectstdio(self._fin, self._fout)
1279 1279 self._finoutredirected = (fin, fout) != (self._fin, self._fout)
1280 1280 return fin, fout
1281 1281
1282 1282 def restorefinout(self, fin, fout):
1283 1283 """Restore ui streams from possibly duplicated (fin, fout)"""
1284 1284 if (fin, fout) == (self._fin, self._fout):
1285 1285 return
1286 1286 procutil.restorestdio(self._fin, self._fout, fin, fout)
1287 1287 # protectfinout() won't create more than one duplicated streams,
1288 1288 # so we can just turn the redirection flag off.
1289 1289 self._finoutredirected = False
1290 1290
1291 1291 @contextlib.contextmanager
1292 1292 def protectedfinout(self):
1293 1293 """Run code block with protected standard streams"""
1294 1294 fin, fout = self.protectfinout()
1295 1295 try:
1296 1296 yield fin, fout
1297 1297 finally:
1298 1298 self.restorefinout(fin, fout)
1299 1299
1300 1300 def disablepager(self):
1301 1301 self._disablepager = True
1302 1302
1303 1303 def pager(self, command):
1304 1304 """Start a pager for subsequent command output.
1305 1305
1306 1306 Commands which produce a long stream of output should call
1307 1307 this function to activate the user's preferred pagination
1308 1308 mechanism (which may be no pager). Calling this function
1309 1309 precludes any future use of interactive functionality, such as
1310 1310 prompting the user or activating curses.
1311 1311
1312 1312 Args:
1313 1313 command: The full, non-aliased name of the command. That is, "log"
1314 1314 not "history, "summary" not "summ", etc.
1315 1315 """
1316 1316 if self._disablepager or self.pageractive:
1317 1317 # how pager should do is already determined
1318 1318 return
1319 1319
1320 1320 if not command.startswith(b'internal-always-') and (
1321 1321 # explicit --pager=on (= 'internal-always-' prefix) should
1322 1322 # take precedence over disabling factors below
1323 1323 command in self.configlist(b'pager', b'ignore')
1324 1324 or not self.configbool(b'ui', b'paginate')
1325 1325 or not self.configbool(b'pager', b'attend-' + command, True)
1326 1326 or encoding.environ.get(b'TERM') == b'dumb'
1327 1327 # TODO: if we want to allow HGPLAINEXCEPT=pager,
1328 1328 # formatted() will need some adjustment.
1329 1329 or not self.formatted()
1330 1330 or self.plain()
1331 1331 or self._buffers
1332 1332 # TODO: expose debugger-enabled on the UI object
1333 1333 or b'--debugger' in pycompat.sysargv
1334 1334 ):
1335 1335 # We only want to paginate if the ui appears to be
1336 1336 # interactive, the user didn't say HGPLAIN or
1337 1337 # HGPLAINEXCEPT=pager, and the user didn't specify --debug.
1338 1338 return
1339 1339
1340 1340 pagercmd = self.config(b'pager', b'pager', rcutil.fallbackpager)
1341 1341 if not pagercmd:
1342 1342 return
1343 1343
1344 1344 pagerenv = {}
1345 1345 for name, value in rcutil.defaultpagerenv().items():
1346 1346 if name not in encoding.environ:
1347 1347 pagerenv[name] = value
1348 1348
1349 1349 self.debug(
1350 1350 b'starting pager for command %s\n' % stringutil.pprint(command)
1351 1351 )
1352 1352 self.flush()
1353 1353
1354 1354 wasformatted = self.formatted()
1355 1355 if util.safehasattr(signal, b"SIGPIPE"):
1356 1356 signal.signal(signal.SIGPIPE, _catchterm)
1357 1357 if self._runpager(pagercmd, pagerenv):
1358 1358 self.pageractive = True
1359 1359 # Preserve the formatted-ness of the UI. This is important
1360 1360 # because we mess with stdout, which might confuse
1361 1361 # auto-detection of things being formatted.
1362 1362 self.setconfig(b'ui', b'formatted', wasformatted, b'pager')
1363 1363 self.setconfig(b'ui', b'interactive', False, b'pager')
1364 1364
1365 1365 # If pagermode differs from color.mode, reconfigure color now that
1366 1366 # pageractive is set.
1367 1367 cm = self._colormode
1368 1368 if cm != self.config(b'color', b'pagermode', cm):
1369 1369 color.setup(self)
1370 1370 else:
1371 1371 # If the pager can't be spawned in dispatch when --pager=on is
1372 1372 # given, don't try again when the command runs, to avoid a duplicate
1373 1373 # warning about a missing pager command.
1374 1374 self.disablepager()
1375 1375
1376 1376 def _runpager(self, command, env=None):
1377 1377 """Actually start the pager and set up file descriptors.
1378 1378
1379 1379 This is separate in part so that extensions (like chg) can
1380 1380 override how a pager is invoked.
1381 1381 """
1382 1382 if command == b'cat':
1383 1383 # Save ourselves some work.
1384 1384 return False
1385 1385 # If the command doesn't contain any of these characters, we
1386 1386 # assume it's a binary and exec it directly. This means for
1387 1387 # simple pager command configurations, we can degrade
1388 1388 # gracefully and tell the user about their broken pager.
1389 1389 shell = any(c in command for c in b"|&;<>()$`\\\"' \t\n*?[#~=%")
1390 1390
1391 1391 if pycompat.iswindows and not shell:
1392 1392 # Window's built-in `more` cannot be invoked with shell=False, but
1393 1393 # its `more.com` can. Hide this implementation detail from the
1394 1394 # user so we can also get sane bad PAGER behavior. MSYS has
1395 1395 # `more.exe`, so do a cmd.exe style resolution of the executable to
1396 1396 # determine which one to use.
1397 1397 fullcmd = procutil.findexe(command)
1398 1398 if not fullcmd:
1399 1399 self.warn(
1400 1400 _(b"missing pager command '%s', skipping pager\n") % command
1401 1401 )
1402 1402 return False
1403 1403
1404 1404 command = fullcmd
1405 1405
1406 1406 try:
1407 1407 pager = subprocess.Popen(
1408 1408 procutil.tonativestr(command),
1409 1409 shell=shell,
1410 1410 bufsize=-1,
1411 1411 close_fds=procutil.closefds,
1412 1412 stdin=subprocess.PIPE,
1413 1413 stdout=procutil.stdout,
1414 1414 stderr=procutil.stderr,
1415 1415 env=procutil.tonativeenv(procutil.shellenviron(env)),
1416 1416 )
1417 1417 except OSError as e:
1418 1418 if e.errno == errno.ENOENT and not shell:
1419 1419 self.warn(
1420 1420 _(b"missing pager command '%s', skipping pager\n") % command
1421 1421 )
1422 1422 return False
1423 1423 raise
1424 1424
1425 1425 # back up original file descriptors
1426 1426 stdoutfd = os.dup(procutil.stdout.fileno())
1427 1427 stderrfd = os.dup(procutil.stderr.fileno())
1428 1428
1429 1429 os.dup2(pager.stdin.fileno(), procutil.stdout.fileno())
1430 1430 if self._isatty(procutil.stderr):
1431 1431 os.dup2(pager.stdin.fileno(), procutil.stderr.fileno())
1432 1432
1433 1433 @self.atexit
1434 1434 def killpager():
1435 1435 if util.safehasattr(signal, b"SIGINT"):
1436 1436 signal.signal(signal.SIGINT, signal.SIG_IGN)
1437 1437 # restore original fds, closing pager.stdin copies in the process
1438 1438 os.dup2(stdoutfd, procutil.stdout.fileno())
1439 1439 os.dup2(stderrfd, procutil.stderr.fileno())
1440 1440 pager.stdin.close()
1441 1441 pager.wait()
1442 1442
1443 1443 return True
1444 1444
1445 1445 @property
1446 1446 def _exithandlers(self):
1447 1447 return _reqexithandlers
1448 1448
1449 1449 def atexit(self, func, *args, **kwargs):
1450 1450 '''register a function to run after dispatching a request
1451 1451
1452 1452 Handlers do not stay registered across request boundaries.'''
1453 1453 self._exithandlers.append((func, args, kwargs))
1454 1454 return func
1455 1455
1456 1456 def interface(self, feature):
1457 1457 """what interface to use for interactive console features?
1458 1458
1459 1459 The interface is controlled by the value of `ui.interface` but also by
1460 1460 the value of feature-specific configuration. For example:
1461 1461
1462 1462 ui.interface.histedit = text
1463 1463 ui.interface.chunkselector = curses
1464 1464
1465 1465 Here the features are "histedit" and "chunkselector".
1466 1466
1467 1467 The configuration above means that the default interfaces for commands
1468 1468 is curses, the interface for histedit is text and the interface for
1469 1469 selecting chunk is crecord (the best curses interface available).
1470 1470
1471 1471 Consider the following example:
1472 1472 ui.interface = curses
1473 1473 ui.interface.histedit = text
1474 1474
1475 1475 Then histedit will use the text interface and chunkselector will use
1476 1476 the default curses interface (crecord at the moment).
1477 1477 """
1478 1478 alldefaults = frozenset([b"text", b"curses"])
1479 1479
1480 1480 featureinterfaces = {
1481 1481 b"chunkselector": [b"text", b"curses",],
1482 1482 b"histedit": [b"text", b"curses",],
1483 1483 }
1484 1484
1485 1485 # Feature-specific interface
1486 1486 if feature not in featureinterfaces.keys():
1487 1487 # Programming error, not user error
1488 1488 raise ValueError(b"Unknown feature requested %s" % feature)
1489 1489
1490 1490 availableinterfaces = frozenset(featureinterfaces[feature])
1491 1491 if alldefaults > availableinterfaces:
1492 1492 # Programming error, not user error. We need a use case to
1493 1493 # define the right thing to do here.
1494 1494 raise ValueError(
1495 1495 b"Feature %s does not handle all default interfaces" % feature
1496 1496 )
1497 1497
1498 1498 if self.plain() or encoding.environ.get(b'TERM') == b'dumb':
1499 1499 return b"text"
1500 1500
1501 1501 # Default interface for all the features
1502 1502 defaultinterface = b"text"
1503 1503 i = self.config(b"ui", b"interface")
1504 1504 if i in alldefaults:
1505 1505 defaultinterface = i
1506 1506
1507 1507 choseninterface = defaultinterface
1508 1508 f = self.config(b"ui", b"interface.%s" % feature)
1509 1509 if f in availableinterfaces:
1510 1510 choseninterface = f
1511 1511
1512 1512 if i is not None and defaultinterface != i:
1513 1513 if f is not None:
1514 1514 self.warn(_(b"invalid value for ui.interface: %s\n") % (i,))
1515 1515 else:
1516 1516 self.warn(
1517 1517 _(b"invalid value for ui.interface: %s (using %s)\n")
1518 1518 % (i, choseninterface)
1519 1519 )
1520 1520 if f is not None and choseninterface != f:
1521 1521 self.warn(
1522 1522 _(b"invalid value for ui.interface.%s: %s (using %s)\n")
1523 1523 % (feature, f, choseninterface)
1524 1524 )
1525 1525
1526 1526 return choseninterface
1527 1527
1528 1528 def interactive(self):
1529 1529 '''is interactive input allowed?
1530 1530
1531 1531 An interactive session is a session where input can be reasonably read
1532 1532 from `sys.stdin'. If this function returns false, any attempt to read
1533 1533 from stdin should fail with an error, unless a sensible default has been
1534 1534 specified.
1535 1535
1536 1536 Interactiveness is triggered by the value of the `ui.interactive'
1537 1537 configuration variable or - if it is unset - when `sys.stdin' points
1538 1538 to a terminal device.
1539 1539
1540 1540 This function refers to input only; for output, see `ui.formatted()'.
1541 1541 '''
1542 1542 i = self.configbool(b"ui", b"interactive")
1543 1543 if i is None:
1544 1544 # some environments replace stdin without implementing isatty
1545 1545 # usually those are non-interactive
1546 1546 return self._isatty(self._fin)
1547 1547
1548 1548 return i
1549 1549
1550 1550 def termwidth(self):
1551 1551 '''how wide is the terminal in columns?
1552 1552 '''
1553 1553 if b'COLUMNS' in encoding.environ:
1554 1554 try:
1555 1555 return int(encoding.environ[b'COLUMNS'])
1556 1556 except ValueError:
1557 1557 pass
1558 1558 return scmutil.termsize(self)[0]
1559 1559
1560 1560 def formatted(self):
1561 1561 '''should formatted output be used?
1562 1562
1563 1563 It is often desirable to format the output to suite the output medium.
1564 1564 Examples of this are truncating long lines or colorizing messages.
1565 1565 However, this is not often not desirable when piping output into other
1566 1566 utilities, e.g. `grep'.
1567 1567
1568 1568 Formatted output is triggered by the value of the `ui.formatted'
1569 1569 configuration variable or - if it is unset - when `sys.stdout' points
1570 1570 to a terminal device. Please note that `ui.formatted' should be
1571 1571 considered an implementation detail; it is not intended for use outside
1572 1572 Mercurial or its extensions.
1573 1573
1574 1574 This function refers to output only; for input, see `ui.interactive()'.
1575 1575 This function always returns false when in plain mode, see `ui.plain()'.
1576 1576 '''
1577 1577 if self.plain():
1578 1578 return False
1579 1579
1580 1580 i = self.configbool(b"ui", b"formatted")
1581 1581 if i is None:
1582 1582 # some environments replace stdout without implementing isatty
1583 1583 # usually those are non-interactive
1584 1584 return self._isatty(self._fout)
1585 1585
1586 1586 return i
1587 1587
1588 1588 def _readline(self, prompt=b' ', promptopts=None):
1589 1589 # Replacing stdin/stdout temporarily is a hard problem on Python 3
1590 1590 # because they have to be text streams with *no buffering*. Instead,
1591 1591 # we use rawinput() only if call_readline() will be invoked by
1592 1592 # PyOS_Readline(), so no I/O will be made at Python layer.
1593 1593 usereadline = (
1594 1594 self._isatty(self._fin)
1595 1595 and self._isatty(self._fout)
1596 1596 and procutil.isstdin(self._fin)
1597 1597 and procutil.isstdout(self._fout)
1598 1598 )
1599 1599 if usereadline:
1600 1600 try:
1601 1601 # magically add command line editing support, where
1602 1602 # available
1603 1603 import readline
1604 1604
1605 1605 # force demandimport to really load the module
1606 1606 readline.read_history_file
1607 1607 # windows sometimes raises something other than ImportError
1608 1608 except Exception:
1609 1609 usereadline = False
1610 1610
1611 1611 if self._colormode == b'win32' or not usereadline:
1612 1612 if not promptopts:
1613 1613 promptopts = {}
1614 1614 self._writemsgnobuf(
1615 1615 self._fmsgout, prompt, type=b'prompt', **promptopts
1616 1616 )
1617 1617 self.flush()
1618 1618 prompt = b' '
1619 1619 else:
1620 1620 prompt = self.label(prompt, b'ui.prompt') + b' '
1621 1621
1622 1622 # prompt ' ' must exist; otherwise readline may delete entire line
1623 1623 # - http://bugs.python.org/issue12833
1624 1624 with self.timeblockedsection(b'stdio'):
1625 1625 if usereadline:
1626 1626 self.flush()
1627 1627 prompt = encoding.strfromlocal(prompt)
1628 1628 line = encoding.strtolocal(pycompat.rawinput(prompt))
1629 1629 # When stdin is in binary mode on Windows, it can cause
1630 1630 # raw_input() to emit an extra trailing carriage return
1631 1631 if pycompat.oslinesep == b'\r\n' and line.endswith(b'\r'):
1632 1632 line = line[:-1]
1633 1633 else:
1634 1634 self._fout.write(pycompat.bytestr(prompt))
1635 1635 self._fout.flush()
1636 1636 line = self._fin.readline()
1637 1637 if not line:
1638 1638 raise EOFError
1639 1639 line = line.rstrip(pycompat.oslinesep)
1640 1640
1641 1641 return line
1642 1642
1643 1643 def prompt(self, msg, default=b"y"):
1644 1644 """Prompt user with msg, read response.
1645 1645 If ui is not interactive, the default is returned.
1646 1646 """
1647 1647 return self._prompt(msg, default=default)
1648 1648
1649 1649 def _prompt(self, msg, **opts):
1650 1650 default = opts['default']
1651 1651 if not self.interactive():
1652 1652 self._writemsg(self._fmsgout, msg, b' ', type=b'prompt', **opts)
1653 1653 self._writemsg(
1654 1654 self._fmsgout, default or b'', b"\n", type=b'promptecho'
1655 1655 )
1656 1656 return default
1657 1657 try:
1658 1658 r = self._readline(prompt=msg, promptopts=opts)
1659 1659 if not r:
1660 1660 r = default
1661 1661 if self.configbool(b'ui', b'promptecho'):
1662 1662 self._writemsg(
1663 1663 self._fmsgout, r or b'', b"\n", type=b'promptecho'
1664 1664 )
1665 1665 return r
1666 1666 except EOFError:
1667 1667 raise error.ResponseExpected()
1668 1668
1669 1669 @staticmethod
1670 1670 def extractchoices(prompt):
1671 1671 """Extract prompt message and list of choices from specified prompt.
1672 1672
1673 1673 This returns tuple "(message, choices)", and "choices" is the
1674 1674 list of tuple "(response character, text without &)".
1675 1675
1676 1676 >>> ui.extractchoices(b"awake? $$ &Yes $$ &No")
1677 1677 ('awake? ', [('y', 'Yes'), ('n', 'No')])
1678 1678 >>> ui.extractchoices(b"line\\nbreak? $$ &Yes $$ &No")
1679 1679 ('line\\nbreak? ', [('y', 'Yes'), ('n', 'No')])
1680 1680 >>> ui.extractchoices(b"want lots of $$money$$?$$Ye&s$$N&o")
1681 1681 ('want lots of $$money$$?', [('s', 'Yes'), ('o', 'No')])
1682 1682 """
1683 1683
1684 1684 # Sadly, the prompt string may have been built with a filename
1685 1685 # containing "$$" so let's try to find the first valid-looking
1686 1686 # prompt to start parsing. Sadly, we also can't rely on
1687 1687 # choices containing spaces, ASCII, or basically anything
1688 1688 # except an ampersand followed by a character.
1689 1689 m = re.match(br'(?s)(.+?)\$\$([^$]*&[^ $].*)', prompt)
1690 1690 msg = m.group(1)
1691 1691 choices = [p.strip(b' ') for p in m.group(2).split(b'$$')]
1692 1692
1693 1693 def choicetuple(s):
1694 1694 ampidx = s.index(b'&')
1695 1695 return s[ampidx + 1 : ampidx + 2].lower(), s.replace(b'&', b'', 1)
1696 1696
1697 1697 return (msg, [choicetuple(s) for s in choices])
1698 1698
1699 1699 def promptchoice(self, prompt, default=0):
1700 1700 """Prompt user with a message, read response, and ensure it matches
1701 1701 one of the provided choices. The prompt is formatted as follows:
1702 1702
1703 1703 "would you like fries with that (Yn)? $$ &Yes $$ &No"
1704 1704
1705 1705 The index of the choice is returned. Responses are case
1706 1706 insensitive. If ui is not interactive, the default is
1707 1707 returned.
1708 1708 """
1709 1709
1710 1710 msg, choices = self.extractchoices(prompt)
1711 1711 resps = [r for r, t in choices]
1712 1712 while True:
1713 1713 r = self._prompt(msg, default=resps[default], choices=choices)
1714 1714 if r.lower() in resps:
1715 1715 return resps.index(r.lower())
1716 1716 # TODO: shouldn't it be a warning?
1717 1717 self._writemsg(self._fmsgout, _(b"unrecognized response\n"))
1718 1718
1719 1719 def getpass(self, prompt=None, default=None):
1720 1720 if not self.interactive():
1721 1721 return default
1722 1722 try:
1723 1723 self._writemsg(
1724 1724 self._fmsgerr,
1725 1725 prompt or _(b'password: '),
1726 1726 type=b'prompt',
1727 1727 password=True,
1728 1728 )
1729 1729 # disable getpass() only if explicitly specified. it's still valid
1730 1730 # to interact with tty even if fin is not a tty.
1731 1731 with self.timeblockedsection(b'stdio'):
1732 1732 if self.configbool(b'ui', b'nontty'):
1733 1733 l = self._fin.readline()
1734 1734 if not l:
1735 1735 raise EOFError
1736 1736 return l.rstrip(b'\n')
1737 1737 else:
1738 return getpass.getpass('')
1738 return encoding.strtolocal(getpass.getpass(''))
1739 1739 except EOFError:
1740 1740 raise error.ResponseExpected()
1741 1741
1742 1742 def status(self, *msg, **opts):
1743 1743 '''write status message to output (if ui.quiet is False)
1744 1744
1745 1745 This adds an output label of "ui.status".
1746 1746 '''
1747 1747 if not self.quiet:
1748 1748 self._writemsg(self._fmsgout, type=b'status', *msg, **opts)
1749 1749
1750 1750 def warn(self, *msg, **opts):
1751 1751 '''write warning message to output (stderr)
1752 1752
1753 1753 This adds an output label of "ui.warning".
1754 1754 '''
1755 1755 self._writemsg(self._fmsgerr, type=b'warning', *msg, **opts)
1756 1756
1757 1757 def error(self, *msg, **opts):
1758 1758 '''write error message to output (stderr)
1759 1759
1760 1760 This adds an output label of "ui.error".
1761 1761 '''
1762 1762 self._writemsg(self._fmsgerr, type=b'error', *msg, **opts)
1763 1763
1764 1764 def note(self, *msg, **opts):
1765 1765 '''write note to output (if ui.verbose is True)
1766 1766
1767 1767 This adds an output label of "ui.note".
1768 1768 '''
1769 1769 if self.verbose:
1770 1770 self._writemsg(self._fmsgout, type=b'note', *msg, **opts)
1771 1771
1772 1772 def debug(self, *msg, **opts):
1773 1773 '''write debug message to output (if ui.debugflag is True)
1774 1774
1775 1775 This adds an output label of "ui.debug".
1776 1776 '''
1777 1777 if self.debugflag:
1778 1778 self._writemsg(self._fmsgout, type=b'debug', *msg, **opts)
1779 1779 self.log(b'debug', b'%s', b''.join(msg))
1780 1780
1781 1781 # Aliases to defeat check-code.
1782 1782 statusnoi18n = status
1783 1783 notenoi18n = note
1784 1784 warnnoi18n = warn
1785 1785 writenoi18n = write
1786 1786
1787 1787 def edit(
1788 1788 self,
1789 1789 text,
1790 1790 user,
1791 1791 extra=None,
1792 1792 editform=None,
1793 1793 pending=None,
1794 1794 repopath=None,
1795 1795 action=None,
1796 1796 ):
1797 1797 if action is None:
1798 1798 self.develwarn(
1799 1799 b'action is None but will soon be a required '
1800 1800 b'parameter to ui.edit()'
1801 1801 )
1802 1802 extra_defaults = {
1803 1803 b'prefix': b'editor',
1804 1804 b'suffix': b'.txt',
1805 1805 }
1806 1806 if extra is not None:
1807 1807 if extra.get(b'suffix') is not None:
1808 1808 self.develwarn(
1809 1809 b'extra.suffix is not None but will soon be '
1810 1810 b'ignored by ui.edit()'
1811 1811 )
1812 1812 extra_defaults.update(extra)
1813 1813 extra = extra_defaults
1814 1814
1815 1815 if action == b'diff':
1816 1816 suffix = b'.diff'
1817 1817 elif action:
1818 1818 suffix = b'.%s.hg.txt' % action
1819 1819 else:
1820 1820 suffix = extra[b'suffix']
1821 1821
1822 1822 rdir = None
1823 1823 if self.configbool(b'experimental', b'editortmpinhg'):
1824 1824 rdir = repopath
1825 1825 (fd, name) = pycompat.mkstemp(
1826 1826 prefix=b'hg-' + extra[b'prefix'] + b'-', suffix=suffix, dir=rdir
1827 1827 )
1828 1828 try:
1829 1829 with os.fdopen(fd, 'wb') as f:
1830 1830 f.write(util.tonativeeol(text))
1831 1831
1832 1832 environ = {b'HGUSER': user}
1833 1833 if b'transplant_source' in extra:
1834 1834 environ.update(
1835 1835 {b'HGREVISION': hex(extra[b'transplant_source'])}
1836 1836 )
1837 1837 for label in (b'intermediate-source', b'source', b'rebase_source'):
1838 1838 if label in extra:
1839 1839 environ.update({b'HGREVISION': extra[label]})
1840 1840 break
1841 1841 if editform:
1842 1842 environ.update({b'HGEDITFORM': editform})
1843 1843 if pending:
1844 1844 environ.update({b'HG_PENDING': pending})
1845 1845
1846 1846 editor = self.geteditor()
1847 1847
1848 1848 self.system(
1849 1849 b"%s \"%s\"" % (editor, name),
1850 1850 environ=environ,
1851 1851 onerr=error.Abort,
1852 1852 errprefix=_(b"edit failed"),
1853 1853 blockedtag=b'editor',
1854 1854 )
1855 1855
1856 1856 with open(name, 'rb') as f:
1857 1857 t = util.fromnativeeol(f.read())
1858 1858 finally:
1859 1859 os.unlink(name)
1860 1860
1861 1861 return t
1862 1862
1863 1863 def system(
1864 1864 self,
1865 1865 cmd,
1866 1866 environ=None,
1867 1867 cwd=None,
1868 1868 onerr=None,
1869 1869 errprefix=None,
1870 1870 blockedtag=None,
1871 1871 ):
1872 1872 '''execute shell command with appropriate output stream. command
1873 1873 output will be redirected if fout is not stdout.
1874 1874
1875 1875 if command fails and onerr is None, return status, else raise onerr
1876 1876 object as exception.
1877 1877 '''
1878 1878 if blockedtag is None:
1879 1879 # Long cmds tend to be because of an absolute path on cmd. Keep
1880 1880 # the tail end instead
1881 1881 cmdsuffix = cmd.translate(None, _keepalnum)[-85:]
1882 1882 blockedtag = b'unknown_system_' + cmdsuffix
1883 1883 out = self._fout
1884 1884 if any(s[1] for s in self._bufferstates):
1885 1885 out = self
1886 1886 with self.timeblockedsection(blockedtag):
1887 1887 rc = self._runsystem(cmd, environ=environ, cwd=cwd, out=out)
1888 1888 if rc and onerr:
1889 1889 errmsg = b'%s %s' % (
1890 1890 procutil.shellsplit(cmd)[0],
1891 1891 procutil.explainexit(rc),
1892 1892 )
1893 1893 if errprefix:
1894 1894 errmsg = b'%s: %s' % (errprefix, errmsg)
1895 1895 raise onerr(errmsg)
1896 1896 return rc
1897 1897
1898 1898 def _runsystem(self, cmd, environ, cwd, out):
1899 1899 """actually execute the given shell command (can be overridden by
1900 1900 extensions like chg)"""
1901 1901 return procutil.system(cmd, environ=environ, cwd=cwd, out=out)
1902 1902
1903 1903 def traceback(self, exc=None, force=False):
1904 1904 '''print exception traceback if traceback printing enabled or forced.
1905 1905 only to call in exception handler. returns true if traceback
1906 1906 printed.'''
1907 1907 if self.tracebackflag or force:
1908 1908 if exc is None:
1909 1909 exc = sys.exc_info()
1910 1910 cause = getattr(exc[1], 'cause', None)
1911 1911
1912 1912 if cause is not None:
1913 1913 causetb = traceback.format_tb(cause[2])
1914 1914 exctb = traceback.format_tb(exc[2])
1915 1915 exconly = traceback.format_exception_only(cause[0], cause[1])
1916 1916
1917 1917 # exclude frame where 'exc' was chained and rethrown from exctb
1918 1918 self.write_err(
1919 1919 b'Traceback (most recent call last):\n',
1920 1920 encoding.strtolocal(''.join(exctb[:-1])),
1921 1921 encoding.strtolocal(''.join(causetb)),
1922 1922 encoding.strtolocal(''.join(exconly)),
1923 1923 )
1924 1924 else:
1925 1925 output = traceback.format_exception(exc[0], exc[1], exc[2])
1926 1926 self.write_err(encoding.strtolocal(''.join(output)))
1927 1927 return self.tracebackflag or force
1928 1928
1929 1929 def geteditor(self):
1930 1930 '''return editor to use'''
1931 1931 if pycompat.sysplatform == b'plan9':
1932 1932 # vi is the MIPS instruction simulator on Plan 9. We
1933 1933 # instead default to E to plumb commit messages to
1934 1934 # avoid confusion.
1935 1935 editor = b'E'
1936 1936 elif pycompat.isdarwin:
1937 1937 # vi on darwin is POSIX compatible to a fault, and that includes
1938 1938 # exiting non-zero if you make any mistake when running an ex
1939 1939 # command. Proof: `vi -c ':unknown' -c ':qa'; echo $?` produces 1,
1940 1940 # while s/vi/vim/ doesn't.
1941 1941 editor = b'vim'
1942 1942 else:
1943 1943 editor = b'vi'
1944 1944 return encoding.environ.get(b"HGEDITOR") or self.config(
1945 1945 b"ui", b"editor", editor
1946 1946 )
1947 1947
1948 1948 @util.propertycache
1949 1949 def _progbar(self):
1950 1950 """setup the progbar singleton to the ui object"""
1951 1951 if (
1952 1952 self.quiet
1953 1953 or self.debugflag
1954 1954 or self.configbool(b'progress', b'disable')
1955 1955 or not progress.shouldprint(self)
1956 1956 ):
1957 1957 return None
1958 1958 return getprogbar(self)
1959 1959
1960 1960 def _progclear(self):
1961 1961 """clear progress bar output if any. use it before any output"""
1962 1962 if not haveprogbar(): # nothing loaded yet
1963 1963 return
1964 1964 if self._progbar is not None and self._progbar.printed:
1965 1965 self._progbar.clear()
1966 1966
1967 1967 def makeprogress(self, topic, unit=b"", total=None):
1968 1968 """Create a progress helper for the specified topic"""
1969 1969 if getattr(self._fmsgerr, 'structured', False):
1970 1970 # channel for machine-readable output with metadata, just send
1971 1971 # raw information
1972 1972 # TODO: consider porting some useful information (e.g. estimated
1973 1973 # time) from progbar. we might want to support update delay to
1974 1974 # reduce the cost of transferring progress messages.
1975 1975 def updatebar(topic, pos, item, unit, total):
1976 1976 self._fmsgerr.write(
1977 1977 None,
1978 1978 type=b'progress',
1979 1979 topic=topic,
1980 1980 pos=pos,
1981 1981 item=item,
1982 1982 unit=unit,
1983 1983 total=total,
1984 1984 )
1985 1985
1986 1986 elif self._progbar is not None:
1987 1987 updatebar = self._progbar.progress
1988 1988 else:
1989 1989
1990 1990 def updatebar(topic, pos, item, unit, total):
1991 1991 pass
1992 1992
1993 1993 return scmutil.progress(self, updatebar, topic, unit, total)
1994 1994
1995 1995 def getlogger(self, name):
1996 1996 """Returns a logger of the given name; or None if not registered"""
1997 1997 return self._loggers.get(name)
1998 1998
1999 1999 def setlogger(self, name, logger):
2000 2000 """Install logger which can be identified later by the given name
2001 2001
2002 2002 More than one loggers can be registered. Use extension or module
2003 2003 name to uniquely identify the logger instance.
2004 2004 """
2005 2005 self._loggers[name] = logger
2006 2006
2007 2007 def log(self, event, msgfmt, *msgargs, **opts):
2008 2008 '''hook for logging facility extensions
2009 2009
2010 2010 event should be a readily-identifiable subsystem, which will
2011 2011 allow filtering.
2012 2012
2013 2013 msgfmt should be a newline-terminated format string to log, and
2014 2014 *msgargs are %-formatted into it.
2015 2015
2016 2016 **opts currently has no defined meanings.
2017 2017 '''
2018 2018 if not self._loggers:
2019 2019 return
2020 2020 activeloggers = [
2021 2021 l for l in pycompat.itervalues(self._loggers) if l.tracked(event)
2022 2022 ]
2023 2023 if not activeloggers:
2024 2024 return
2025 2025 msg = msgfmt % msgargs
2026 2026 opts = pycompat.byteskwargs(opts)
2027 2027 # guard against recursion from e.g. ui.debug()
2028 2028 registeredloggers = self._loggers
2029 2029 self._loggers = {}
2030 2030 try:
2031 2031 for logger in activeloggers:
2032 2032 logger.log(self, event, msg, opts)
2033 2033 finally:
2034 2034 self._loggers = registeredloggers
2035 2035
2036 2036 def label(self, msg, label):
2037 2037 '''style msg based on supplied label
2038 2038
2039 2039 If some color mode is enabled, this will add the necessary control
2040 2040 characters to apply such color. In addition, 'debug' color mode adds
2041 2041 markup showing which label affects a piece of text.
2042 2042
2043 2043 ui.write(s, 'label') is equivalent to
2044 2044 ui.write(ui.label(s, 'label')).
2045 2045 '''
2046 2046 if self._colormode is not None:
2047 2047 return color.colorlabel(self, msg, label)
2048 2048 return msg
2049 2049
2050 2050 def develwarn(self, msg, stacklevel=1, config=None):
2051 2051 """issue a developer warning message
2052 2052
2053 2053 Use 'stacklevel' to report the offender some layers further up in the
2054 2054 stack.
2055 2055 """
2056 2056 if not self.configbool(b'devel', b'all-warnings'):
2057 2057 if config is None or not self.configbool(b'devel', config):
2058 2058 return
2059 2059 msg = b'devel-warn: ' + msg
2060 2060 stacklevel += 1 # get in develwarn
2061 2061 if self.tracebackflag:
2062 2062 util.debugstacktrace(msg, stacklevel, self._ferr, self._fout)
2063 2063 self.log(
2064 2064 b'develwarn',
2065 2065 b'%s at:\n%s'
2066 2066 % (msg, b''.join(util.getstackframes(stacklevel))),
2067 2067 )
2068 2068 else:
2069 2069 curframe = inspect.currentframe()
2070 2070 calframe = inspect.getouterframes(curframe, 2)
2071 2071 fname, lineno, fmsg = calframe[stacklevel][1:4]
2072 2072 fname, fmsg = pycompat.sysbytes(fname), pycompat.sysbytes(fmsg)
2073 2073 self.write_err(b'%s at: %s:%d (%s)\n' % (msg, fname, lineno, fmsg))
2074 2074 self.log(
2075 2075 b'develwarn', b'%s at: %s:%d (%s)\n', msg, fname, lineno, fmsg
2076 2076 )
2077 2077
2078 2078 # avoid cycles
2079 2079 del curframe
2080 2080 del calframe
2081 2081
2082 2082 def deprecwarn(self, msg, version, stacklevel=2):
2083 2083 """issue a deprecation warning
2084 2084
2085 2085 - msg: message explaining what is deprecated and how to upgrade,
2086 2086 - version: last version where the API will be supported,
2087 2087 """
2088 2088 if not (
2089 2089 self.configbool(b'devel', b'all-warnings')
2090 2090 or self.configbool(b'devel', b'deprec-warn')
2091 2091 ):
2092 2092 return
2093 2093 msg += (
2094 2094 b"\n(compatibility will be dropped after Mercurial-%s,"
2095 2095 b" update your code.)"
2096 2096 ) % version
2097 2097 self.develwarn(msg, stacklevel=stacklevel, config=b'deprec-warn')
2098 2098
2099 2099 def exportableenviron(self):
2100 2100 """The environment variables that are safe to export, e.g. through
2101 2101 hgweb.
2102 2102 """
2103 2103 return self._exportableenviron
2104 2104
2105 2105 @contextlib.contextmanager
2106 2106 def configoverride(self, overrides, source=b""):
2107 2107 """Context manager for temporary config overrides
2108 2108 `overrides` must be a dict of the following structure:
2109 2109 {(section, name) : value}"""
2110 2110 backups = {}
2111 2111 try:
2112 2112 for (section, name), value in overrides.items():
2113 2113 backups[(section, name)] = self.backupconfig(section, name)
2114 2114 self.setconfig(section, name, value, source)
2115 2115 yield
2116 2116 finally:
2117 2117 for __, backup in backups.items():
2118 2118 self.restoreconfig(backup)
2119 2119 # just restoring ui.quiet config to the previous value is not enough
2120 2120 # as it does not update ui.quiet class member
2121 2121 if (b'ui', b'quiet') in overrides:
2122 2122 self.fixconfig(section=b'ui')
2123 2123
2124 2124 def estimatememory(self):
2125 2125 """Provide an estimate for the available system memory in Bytes.
2126 2126
2127 2127 This can be overriden via ui.available-memory. It returns None, if
2128 2128 no estimate can be computed.
2129 2129 """
2130 2130 value = self.config(b'ui', b'available-memory')
2131 2131 if value is not None:
2132 2132 try:
2133 2133 return util.sizetoint(value)
2134 2134 except error.ParseError:
2135 2135 raise error.ConfigError(
2136 2136 _(b"ui.available-memory value is invalid ('%s')") % value
2137 2137 )
2138 2138 return util._estimatememory()
2139 2139
2140 2140
2141 2141 class paths(dict):
2142 2142 """Represents a collection of paths and their configs.
2143 2143
2144 2144 Data is initially derived from ui instances and the config files they have
2145 2145 loaded.
2146 2146 """
2147 2147
2148 2148 def __init__(self, ui):
2149 2149 dict.__init__(self)
2150 2150
2151 2151 for name, loc in ui.configitems(b'paths', ignoresub=True):
2152 2152 # No location is the same as not existing.
2153 2153 if not loc:
2154 2154 continue
2155 2155 loc, sub = ui.configsuboptions(b'paths', name)
2156 2156 self[name] = path(ui, name, rawloc=loc, suboptions=sub)
2157 2157
2158 2158 def getpath(self, name, default=None):
2159 2159 """Return a ``path`` from a string, falling back to default.
2160 2160
2161 2161 ``name`` can be a named path or locations. Locations are filesystem
2162 2162 paths or URIs.
2163 2163
2164 2164 Returns None if ``name`` is not a registered path, a URI, or a local
2165 2165 path to a repo.
2166 2166 """
2167 2167 # Only fall back to default if no path was requested.
2168 2168 if name is None:
2169 2169 if not default:
2170 2170 default = ()
2171 2171 elif not isinstance(default, (tuple, list)):
2172 2172 default = (default,)
2173 2173 for k in default:
2174 2174 try:
2175 2175 return self[k]
2176 2176 except KeyError:
2177 2177 continue
2178 2178 return None
2179 2179
2180 2180 # Most likely empty string.
2181 2181 # This may need to raise in the future.
2182 2182 if not name:
2183 2183 return None
2184 2184
2185 2185 try:
2186 2186 return self[name]
2187 2187 except KeyError:
2188 2188 # Try to resolve as a local path or URI.
2189 2189 try:
2190 2190 # We don't pass sub-options in, so no need to pass ui instance.
2191 2191 return path(None, None, rawloc=name)
2192 2192 except ValueError:
2193 2193 raise error.RepoError(_(b'repository %s does not exist') % name)
2194 2194
2195 2195
2196 2196 _pathsuboptions = {}
2197 2197
2198 2198
2199 2199 def pathsuboption(option, attr):
2200 2200 """Decorator used to declare a path sub-option.
2201 2201
2202 2202 Arguments are the sub-option name and the attribute it should set on
2203 2203 ``path`` instances.
2204 2204
2205 2205 The decorated function will receive as arguments a ``ui`` instance,
2206 2206 ``path`` instance, and the string value of this option from the config.
2207 2207 The function should return the value that will be set on the ``path``
2208 2208 instance.
2209 2209
2210 2210 This decorator can be used to perform additional verification of
2211 2211 sub-options and to change the type of sub-options.
2212 2212 """
2213 2213
2214 2214 def register(func):
2215 2215 _pathsuboptions[option] = (attr, func)
2216 2216 return func
2217 2217
2218 2218 return register
2219 2219
2220 2220
2221 2221 @pathsuboption(b'pushurl', b'pushloc')
2222 2222 def pushurlpathoption(ui, path, value):
2223 2223 u = util.url(value)
2224 2224 # Actually require a URL.
2225 2225 if not u.scheme:
2226 2226 ui.warn(_(b'(paths.%s:pushurl not a URL; ignoring)\n') % path.name)
2227 2227 return None
2228 2228
2229 2229 # Don't support the #foo syntax in the push URL to declare branch to
2230 2230 # push.
2231 2231 if u.fragment:
2232 2232 ui.warn(
2233 2233 _(
2234 2234 b'("#fragment" in paths.%s:pushurl not supported; '
2235 2235 b'ignoring)\n'
2236 2236 )
2237 2237 % path.name
2238 2238 )
2239 2239 u.fragment = None
2240 2240
2241 2241 return bytes(u)
2242 2242
2243 2243
2244 2244 @pathsuboption(b'pushrev', b'pushrev')
2245 2245 def pushrevpathoption(ui, path, value):
2246 2246 return value
2247 2247
2248 2248
2249 2249 class path(object):
2250 2250 """Represents an individual path and its configuration."""
2251 2251
2252 2252 def __init__(self, ui, name, rawloc=None, suboptions=None):
2253 2253 """Construct a path from its config options.
2254 2254
2255 2255 ``ui`` is the ``ui`` instance the path is coming from.
2256 2256 ``name`` is the symbolic name of the path.
2257 2257 ``rawloc`` is the raw location, as defined in the config.
2258 2258 ``pushloc`` is the raw locations pushes should be made to.
2259 2259
2260 2260 If ``name`` is not defined, we require that the location be a) a local
2261 2261 filesystem path with a .hg directory or b) a URL. If not,
2262 2262 ``ValueError`` is raised.
2263 2263 """
2264 2264 if not rawloc:
2265 2265 raise ValueError(b'rawloc must be defined')
2266 2266
2267 2267 # Locations may define branches via syntax <base>#<branch>.
2268 2268 u = util.url(rawloc)
2269 2269 branch = None
2270 2270 if u.fragment:
2271 2271 branch = u.fragment
2272 2272 u.fragment = None
2273 2273
2274 2274 self.url = u
2275 2275 self.branch = branch
2276 2276
2277 2277 self.name = name
2278 2278 self.rawloc = rawloc
2279 2279 self.loc = b'%s' % u
2280 2280
2281 2281 # When given a raw location but not a symbolic name, validate the
2282 2282 # location is valid.
2283 2283 if not name and not u.scheme and not self._isvalidlocalpath(self.loc):
2284 2284 raise ValueError(
2285 2285 b'location is not a URL or path to a local '
2286 2286 b'repo: %s' % rawloc
2287 2287 )
2288 2288
2289 2289 suboptions = suboptions or {}
2290 2290
2291 2291 # Now process the sub-options. If a sub-option is registered, its
2292 2292 # attribute will always be present. The value will be None if there
2293 2293 # was no valid sub-option.
2294 2294 for suboption, (attr, func) in pycompat.iteritems(_pathsuboptions):
2295 2295 if suboption not in suboptions:
2296 2296 setattr(self, attr, None)
2297 2297 continue
2298 2298
2299 2299 value = func(ui, self, suboptions[suboption])
2300 2300 setattr(self, attr, value)
2301 2301
2302 2302 def _isvalidlocalpath(self, path):
2303 2303 """Returns True if the given path is a potentially valid repository.
2304 2304 This is its own function so that extensions can change the definition of
2305 2305 'valid' in this case (like when pulling from a git repo into a hg
2306 2306 one)."""
2307 2307 try:
2308 2308 return os.path.isdir(os.path.join(path, b'.hg'))
2309 2309 # Python 2 may return TypeError. Python 3, ValueError.
2310 2310 except (TypeError, ValueError):
2311 2311 return False
2312 2312
2313 2313 @property
2314 2314 def suboptions(self):
2315 2315 """Return sub-options and their values for this path.
2316 2316
2317 2317 This is intended to be used for presentation purposes.
2318 2318 """
2319 2319 d = {}
2320 2320 for subopt, (attr, _func) in pycompat.iteritems(_pathsuboptions):
2321 2321 value = getattr(self, attr)
2322 2322 if value is not None:
2323 2323 d[subopt] = value
2324 2324 return d
2325 2325
2326 2326
2327 2327 # we instantiate one globally shared progress bar to avoid
2328 2328 # competing progress bars when multiple UI objects get created
2329 2329 _progresssingleton = None
2330 2330
2331 2331
2332 2332 def getprogbar(ui):
2333 2333 global _progresssingleton
2334 2334 if _progresssingleton is None:
2335 2335 # passing 'ui' object to the singleton is fishy,
2336 2336 # this is how the extension used to work but feel free to rework it.
2337 2337 _progresssingleton = progress.progbar(ui)
2338 2338 return _progresssingleton
2339 2339
2340 2340
2341 2341 def haveprogbar():
2342 2342 return _progresssingleton is not None
2343 2343
2344 2344
2345 2345 def _selectmsgdests(ui):
2346 2346 name = ui.config(b'ui', b'message-output')
2347 2347 if name == b'channel':
2348 2348 if ui.fmsg:
2349 2349 return ui.fmsg, ui.fmsg
2350 2350 else:
2351 2351 # fall back to ferr if channel isn't ready so that status/error
2352 2352 # messages can be printed
2353 2353 return ui.ferr, ui.ferr
2354 2354 if name == b'stdio':
2355 2355 return ui.fout, ui.ferr
2356 2356 if name == b'stderr':
2357 2357 return ui.ferr, ui.ferr
2358 2358 raise error.Abort(b'invalid ui.message-output destination: %s' % name)
2359 2359
2360 2360
2361 2361 def _writemsgwith(write, dest, *args, **opts):
2362 2362 """Write ui message with the given ui._write*() function
2363 2363
2364 2364 The specified message type is translated to 'ui.<type>' label if the dest
2365 2365 isn't a structured channel, so that the message will be colorized.
2366 2366 """
2367 2367 # TODO: maybe change 'type' to a mandatory option
2368 2368 if 'type' in opts and not getattr(dest, 'structured', False):
2369 2369 opts['label'] = opts.get('label', b'') + b' ui.%s' % opts.pop('type')
2370 2370 write(dest, *args, **opts)
General Comments 0
You need to be logged in to leave comments. Login now