##// END OF EJS Templates
templater: add exception-raising version of open_template()...
Martin von Zweigbergk -
r45880:4aa484ef default
parent child Browse files
Show More
@@ -1,4529 +1,4529 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import glob
15 15 import operator
16 16 import os
17 17 import platform
18 18 import random
19 19 import re
20 20 import socket
21 21 import ssl
22 22 import stat
23 23 import string
24 24 import subprocess
25 25 import sys
26 26 import time
27 27
28 28 from .i18n import _
29 29 from .node import (
30 30 bin,
31 31 hex,
32 32 nullid,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 mergestate as mergestatemod,
62 62 obsolete,
63 63 obsutil,
64 64 pathutil,
65 65 phases,
66 66 policy,
67 67 pvec,
68 68 pycompat,
69 69 registrar,
70 70 repair,
71 71 revlog,
72 72 revset,
73 73 revsetlang,
74 74 scmutil,
75 75 setdiscovery,
76 76 simplemerge,
77 77 sshpeer,
78 78 sslutil,
79 79 streamclone,
80 80 tags as tagsmod,
81 81 templater,
82 82 treediscovery,
83 83 upgrade,
84 84 url as urlmod,
85 85 util,
86 86 vfs as vfsmod,
87 87 wireprotoframing,
88 88 wireprotoserver,
89 89 wireprotov2peer,
90 90 )
91 91 from .utils import (
92 92 cborutil,
93 93 compression,
94 94 dateutil,
95 95 procutil,
96 96 stringutil,
97 97 )
98 98
99 99 from .revlogutils import (
100 100 deltas as deltautil,
101 101 nodemap,
102 102 )
103 103
104 104 release = lockmod.release
105 105
106 106 command = registrar.command()
107 107
108 108
109 109 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
110 110 def debugancestor(ui, repo, *args):
111 111 """find the ancestor revision of two revisions in a given index"""
112 112 if len(args) == 3:
113 113 index, rev1, rev2 = args
114 114 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
115 115 lookup = r.lookup
116 116 elif len(args) == 2:
117 117 if not repo:
118 118 raise error.Abort(
119 119 _(b'there is no Mercurial repository here (.hg not found)')
120 120 )
121 121 rev1, rev2 = args
122 122 r = repo.changelog
123 123 lookup = repo.lookup
124 124 else:
125 125 raise error.Abort(_(b'either two or three arguments required'))
126 126 a = r.ancestor(lookup(rev1), lookup(rev2))
127 127 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
128 128
129 129
130 130 @command(b'debugantivirusrunning', [])
131 131 def debugantivirusrunning(ui, repo):
132 132 """attempt to trigger an antivirus scanner to see if one is active"""
133 133 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
134 134 f.write(
135 135 util.b85decode(
136 136 # This is a base85-armored version of the EICAR test file. See
137 137 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
138 138 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
139 139 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
140 140 )
141 141 )
142 142 # Give an AV engine time to scan the file.
143 143 time.sleep(2)
144 144 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
145 145
146 146
147 147 @command(b'debugapplystreamclonebundle', [], b'FILE')
148 148 def debugapplystreamclonebundle(ui, repo, fname):
149 149 """apply a stream clone bundle file"""
150 150 f = hg.openpath(ui, fname)
151 151 gen = exchange.readbundle(ui, f, fname)
152 152 gen.apply(repo)
153 153
154 154
155 155 @command(
156 156 b'debugbuilddag',
157 157 [
158 158 (
159 159 b'm',
160 160 b'mergeable-file',
161 161 None,
162 162 _(b'add single file mergeable changes'),
163 163 ),
164 164 (
165 165 b'o',
166 166 b'overwritten-file',
167 167 None,
168 168 _(b'add single file all revs overwrite'),
169 169 ),
170 170 (b'n', b'new-file', None, _(b'add new file at each rev')),
171 171 ],
172 172 _(b'[OPTION]... [TEXT]'),
173 173 )
174 174 def debugbuilddag(
175 175 ui,
176 176 repo,
177 177 text=None,
178 178 mergeable_file=False,
179 179 overwritten_file=False,
180 180 new_file=False,
181 181 ):
182 182 """builds a repo with a given DAG from scratch in the current empty repo
183 183
184 184 The description of the DAG is read from stdin if not given on the
185 185 command line.
186 186
187 187 Elements:
188 188
189 189 - "+n" is a linear run of n nodes based on the current default parent
190 190 - "." is a single node based on the current default parent
191 191 - "$" resets the default parent to null (implied at the start);
192 192 otherwise the default parent is always the last node created
193 193 - "<p" sets the default parent to the backref p
194 194 - "*p" is a fork at parent p, which is a backref
195 195 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
196 196 - "/p2" is a merge of the preceding node and p2
197 197 - ":tag" defines a local tag for the preceding node
198 198 - "@branch" sets the named branch for subsequent nodes
199 199 - "#...\\n" is a comment up to the end of the line
200 200
201 201 Whitespace between the above elements is ignored.
202 202
203 203 A backref is either
204 204
205 205 - a number n, which references the node curr-n, where curr is the current
206 206 node, or
207 207 - the name of a local tag you placed earlier using ":tag", or
208 208 - empty to denote the default parent.
209 209
210 210 All string valued-elements are either strictly alphanumeric, or must
211 211 be enclosed in double quotes ("..."), with "\\" as escape character.
212 212 """
213 213
214 214 if text is None:
215 215 ui.status(_(b"reading DAG from stdin\n"))
216 216 text = ui.fin.read()
217 217
218 218 cl = repo.changelog
219 219 if len(cl) > 0:
220 220 raise error.Abort(_(b'repository is not empty'))
221 221
222 222 # determine number of revs in DAG
223 223 total = 0
224 224 for type, data in dagparser.parsedag(text):
225 225 if type == b'n':
226 226 total += 1
227 227
228 228 if mergeable_file:
229 229 linesperrev = 2
230 230 # make a file with k lines per rev
231 231 initialmergedlines = [
232 232 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
233 233 ]
234 234 initialmergedlines.append(b"")
235 235
236 236 tags = []
237 237 progress = ui.makeprogress(
238 238 _(b'building'), unit=_(b'revisions'), total=total
239 239 )
240 240 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
241 241 at = -1
242 242 atbranch = b'default'
243 243 nodeids = []
244 244 id = 0
245 245 progress.update(id)
246 246 for type, data in dagparser.parsedag(text):
247 247 if type == b'n':
248 248 ui.note((b'node %s\n' % pycompat.bytestr(data)))
249 249 id, ps = data
250 250
251 251 files = []
252 252 filecontent = {}
253 253
254 254 p2 = None
255 255 if mergeable_file:
256 256 fn = b"mf"
257 257 p1 = repo[ps[0]]
258 258 if len(ps) > 1:
259 259 p2 = repo[ps[1]]
260 260 pa = p1.ancestor(p2)
261 261 base, local, other = [
262 262 x[fn].data() for x in (pa, p1, p2)
263 263 ]
264 264 m3 = simplemerge.Merge3Text(base, local, other)
265 265 ml = [l.strip() for l in m3.merge_lines()]
266 266 ml.append(b"")
267 267 elif at > 0:
268 268 ml = p1[fn].data().split(b"\n")
269 269 else:
270 270 ml = initialmergedlines
271 271 ml[id * linesperrev] += b" r%i" % id
272 272 mergedtext = b"\n".join(ml)
273 273 files.append(fn)
274 274 filecontent[fn] = mergedtext
275 275
276 276 if overwritten_file:
277 277 fn = b"of"
278 278 files.append(fn)
279 279 filecontent[fn] = b"r%i\n" % id
280 280
281 281 if new_file:
282 282 fn = b"nf%i" % id
283 283 files.append(fn)
284 284 filecontent[fn] = b"r%i\n" % id
285 285 if len(ps) > 1:
286 286 if not p2:
287 287 p2 = repo[ps[1]]
288 288 for fn in p2:
289 289 if fn.startswith(b"nf"):
290 290 files.append(fn)
291 291 filecontent[fn] = p2[fn].data()
292 292
293 293 def fctxfn(repo, cx, path):
294 294 if path in filecontent:
295 295 return context.memfilectx(
296 296 repo, cx, path, filecontent[path]
297 297 )
298 298 return None
299 299
300 300 if len(ps) == 0 or ps[0] < 0:
301 301 pars = [None, None]
302 302 elif len(ps) == 1:
303 303 pars = [nodeids[ps[0]], None]
304 304 else:
305 305 pars = [nodeids[p] for p in ps]
306 306 cx = context.memctx(
307 307 repo,
308 308 pars,
309 309 b"r%i" % id,
310 310 files,
311 311 fctxfn,
312 312 date=(id, 0),
313 313 user=b"debugbuilddag",
314 314 extra={b'branch': atbranch},
315 315 )
316 316 nodeid = repo.commitctx(cx)
317 317 nodeids.append(nodeid)
318 318 at = id
319 319 elif type == b'l':
320 320 id, name = data
321 321 ui.note((b'tag %s\n' % name))
322 322 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
323 323 elif type == b'a':
324 324 ui.note((b'branch %s\n' % data))
325 325 atbranch = data
326 326 progress.update(id)
327 327
328 328 if tags:
329 329 repo.vfs.write(b"localtags", b"".join(tags))
330 330
331 331
332 332 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
333 333 indent_string = b' ' * indent
334 334 if all:
335 335 ui.writenoi18n(
336 336 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
337 337 % indent_string
338 338 )
339 339
340 340 def showchunks(named):
341 341 ui.write(b"\n%s%s\n" % (indent_string, named))
342 342 for deltadata in gen.deltaiter():
343 343 node, p1, p2, cs, deltabase, delta, flags = deltadata
344 344 ui.write(
345 345 b"%s%s %s %s %s %s %d\n"
346 346 % (
347 347 indent_string,
348 348 hex(node),
349 349 hex(p1),
350 350 hex(p2),
351 351 hex(cs),
352 352 hex(deltabase),
353 353 len(delta),
354 354 )
355 355 )
356 356
357 357 gen.changelogheader()
358 358 showchunks(b"changelog")
359 359 gen.manifestheader()
360 360 showchunks(b"manifest")
361 361 for chunkdata in iter(gen.filelogheader, {}):
362 362 fname = chunkdata[b'filename']
363 363 showchunks(fname)
364 364 else:
365 365 if isinstance(gen, bundle2.unbundle20):
366 366 raise error.Abort(_(b'use debugbundle2 for this file'))
367 367 gen.changelogheader()
368 368 for deltadata in gen.deltaiter():
369 369 node, p1, p2, cs, deltabase, delta, flags = deltadata
370 370 ui.write(b"%s%s\n" % (indent_string, hex(node)))
371 371
372 372
373 373 def _debugobsmarkers(ui, part, indent=0, **opts):
374 374 """display version and markers contained in 'data'"""
375 375 opts = pycompat.byteskwargs(opts)
376 376 data = part.read()
377 377 indent_string = b' ' * indent
378 378 try:
379 379 version, markers = obsolete._readmarkers(data)
380 380 except error.UnknownVersion as exc:
381 381 msg = b"%sunsupported version: %s (%d bytes)\n"
382 382 msg %= indent_string, exc.version, len(data)
383 383 ui.write(msg)
384 384 else:
385 385 msg = b"%sversion: %d (%d bytes)\n"
386 386 msg %= indent_string, version, len(data)
387 387 ui.write(msg)
388 388 fm = ui.formatter(b'debugobsolete', opts)
389 389 for rawmarker in sorted(markers):
390 390 m = obsutil.marker(None, rawmarker)
391 391 fm.startitem()
392 392 fm.plain(indent_string)
393 393 cmdutil.showmarker(fm, m)
394 394 fm.end()
395 395
396 396
397 397 def _debugphaseheads(ui, data, indent=0):
398 398 """display version and markers contained in 'data'"""
399 399 indent_string = b' ' * indent
400 400 headsbyphase = phases.binarydecode(data)
401 401 for phase in phases.allphases:
402 402 for head in headsbyphase[phase]:
403 403 ui.write(indent_string)
404 404 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
405 405
406 406
407 407 def _quasirepr(thing):
408 408 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
409 409 return b'{%s}' % (
410 410 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
411 411 )
412 412 return pycompat.bytestr(repr(thing))
413 413
414 414
415 415 def _debugbundle2(ui, gen, all=None, **opts):
416 416 """lists the contents of a bundle2"""
417 417 if not isinstance(gen, bundle2.unbundle20):
418 418 raise error.Abort(_(b'not a bundle2 file'))
419 419 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
420 420 parttypes = opts.get('part_type', [])
421 421 for part in gen.iterparts():
422 422 if parttypes and part.type not in parttypes:
423 423 continue
424 424 msg = b'%s -- %s (mandatory: %r)\n'
425 425 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
426 426 if part.type == b'changegroup':
427 427 version = part.params.get(b'version', b'01')
428 428 cg = changegroup.getunbundler(version, part, b'UN')
429 429 if not ui.quiet:
430 430 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
431 431 if part.type == b'obsmarkers':
432 432 if not ui.quiet:
433 433 _debugobsmarkers(ui, part, indent=4, **opts)
434 434 if part.type == b'phase-heads':
435 435 if not ui.quiet:
436 436 _debugphaseheads(ui, part, indent=4)
437 437
438 438
439 439 @command(
440 440 b'debugbundle',
441 441 [
442 442 (b'a', b'all', None, _(b'show all details')),
443 443 (b'', b'part-type', [], _(b'show only the named part type')),
444 444 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
445 445 ],
446 446 _(b'FILE'),
447 447 norepo=True,
448 448 )
449 449 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
450 450 """lists the contents of a bundle"""
451 451 with hg.openpath(ui, bundlepath) as f:
452 452 if spec:
453 453 spec = exchange.getbundlespec(ui, f)
454 454 ui.write(b'%s\n' % spec)
455 455 return
456 456
457 457 gen = exchange.readbundle(ui, f, bundlepath)
458 458 if isinstance(gen, bundle2.unbundle20):
459 459 return _debugbundle2(ui, gen, all=all, **opts)
460 460 _debugchangegroup(ui, gen, all=all, **opts)
461 461
462 462
463 463 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
464 464 def debugcapabilities(ui, path, **opts):
465 465 """lists the capabilities of a remote peer"""
466 466 opts = pycompat.byteskwargs(opts)
467 467 peer = hg.peer(ui, opts, path)
468 468 caps = peer.capabilities()
469 469 ui.writenoi18n(b'Main capabilities:\n')
470 470 for c in sorted(caps):
471 471 ui.write(b' %s\n' % c)
472 472 b2caps = bundle2.bundle2caps(peer)
473 473 if b2caps:
474 474 ui.writenoi18n(b'Bundle2 capabilities:\n')
475 475 for key, values in sorted(pycompat.iteritems(b2caps)):
476 476 ui.write(b' %s\n' % key)
477 477 for v in values:
478 478 ui.write(b' %s\n' % v)
479 479
480 480
481 481 @command(b'debugcheckstate', [], b'')
482 482 def debugcheckstate(ui, repo):
483 483 """validate the correctness of the current dirstate"""
484 484 parent1, parent2 = repo.dirstate.parents()
485 485 m1 = repo[parent1].manifest()
486 486 m2 = repo[parent2].manifest()
487 487 errors = 0
488 488 for f in repo.dirstate:
489 489 state = repo.dirstate[f]
490 490 if state in b"nr" and f not in m1:
491 491 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
492 492 errors += 1
493 493 if state in b"a" and f in m1:
494 494 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
495 495 errors += 1
496 496 if state in b"m" and f not in m1 and f not in m2:
497 497 ui.warn(
498 498 _(b"%s in state %s, but not in either manifest\n") % (f, state)
499 499 )
500 500 errors += 1
501 501 for f in m1:
502 502 state = repo.dirstate[f]
503 503 if state not in b"nrm":
504 504 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
505 505 errors += 1
506 506 if errors:
507 507 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
508 508 raise error.Abort(errstr)
509 509
510 510
511 511 @command(
512 512 b'debugcolor',
513 513 [(b'', b'style', None, _(b'show all configured styles'))],
514 514 b'hg debugcolor',
515 515 )
516 516 def debugcolor(ui, repo, **opts):
517 517 """show available color, effects or style"""
518 518 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
519 519 if opts.get('style'):
520 520 return _debugdisplaystyle(ui)
521 521 else:
522 522 return _debugdisplaycolor(ui)
523 523
524 524
525 525 def _debugdisplaycolor(ui):
526 526 ui = ui.copy()
527 527 ui._styles.clear()
528 528 for effect in color._activeeffects(ui).keys():
529 529 ui._styles[effect] = effect
530 530 if ui._terminfoparams:
531 531 for k, v in ui.configitems(b'color'):
532 532 if k.startswith(b'color.'):
533 533 ui._styles[k] = k[6:]
534 534 elif k.startswith(b'terminfo.'):
535 535 ui._styles[k] = k[9:]
536 536 ui.write(_(b'available colors:\n'))
537 537 # sort label with a '_' after the other to group '_background' entry.
538 538 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
539 539 for colorname, label in items:
540 540 ui.write(b'%s\n' % colorname, label=label)
541 541
542 542
543 543 def _debugdisplaystyle(ui):
544 544 ui.write(_(b'available style:\n'))
545 545 if not ui._styles:
546 546 return
547 547 width = max(len(s) for s in ui._styles)
548 548 for label, effects in sorted(ui._styles.items()):
549 549 ui.write(b'%s' % label, label=label)
550 550 if effects:
551 551 # 50
552 552 ui.write(b': ')
553 553 ui.write(b' ' * (max(0, width - len(label))))
554 554 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
555 555 ui.write(b'\n')
556 556
557 557
558 558 @command(b'debugcreatestreamclonebundle', [], b'FILE')
559 559 def debugcreatestreamclonebundle(ui, repo, fname):
560 560 """create a stream clone bundle file
561 561
562 562 Stream bundles are special bundles that are essentially archives of
563 563 revlog files. They are commonly used for cloning very quickly.
564 564 """
565 565 # TODO we may want to turn this into an abort when this functionality
566 566 # is moved into `hg bundle`.
567 567 if phases.hassecret(repo):
568 568 ui.warn(
569 569 _(
570 570 b'(warning: stream clone bundle will contain secret '
571 571 b'revisions)\n'
572 572 )
573 573 )
574 574
575 575 requirements, gen = streamclone.generatebundlev1(repo)
576 576 changegroup.writechunks(ui, gen, fname)
577 577
578 578 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
579 579
580 580
581 581 @command(
582 582 b'debugdag',
583 583 [
584 584 (b't', b'tags', None, _(b'use tags as labels')),
585 585 (b'b', b'branches', None, _(b'annotate with branch names')),
586 586 (b'', b'dots', None, _(b'use dots for runs')),
587 587 (b's', b'spaces', None, _(b'separate elements by spaces')),
588 588 ],
589 589 _(b'[OPTION]... [FILE [REV]...]'),
590 590 optionalrepo=True,
591 591 )
592 592 def debugdag(ui, repo, file_=None, *revs, **opts):
593 593 """format the changelog or an index DAG as a concise textual description
594 594
595 595 If you pass a revlog index, the revlog's DAG is emitted. If you list
596 596 revision numbers, they get labeled in the output as rN.
597 597
598 598 Otherwise, the changelog DAG of the current repo is emitted.
599 599 """
600 600 spaces = opts.get('spaces')
601 601 dots = opts.get('dots')
602 602 if file_:
603 603 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
604 604 revs = {int(r) for r in revs}
605 605
606 606 def events():
607 607 for r in rlog:
608 608 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
609 609 if r in revs:
610 610 yield b'l', (r, b"r%i" % r)
611 611
612 612 elif repo:
613 613 cl = repo.changelog
614 614 tags = opts.get('tags')
615 615 branches = opts.get('branches')
616 616 if tags:
617 617 labels = {}
618 618 for l, n in repo.tags().items():
619 619 labels.setdefault(cl.rev(n), []).append(l)
620 620
621 621 def events():
622 622 b = b"default"
623 623 for r in cl:
624 624 if branches:
625 625 newb = cl.read(cl.node(r))[5][b'branch']
626 626 if newb != b:
627 627 yield b'a', newb
628 628 b = newb
629 629 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
630 630 if tags:
631 631 ls = labels.get(r)
632 632 if ls:
633 633 for l in ls:
634 634 yield b'l', (r, l)
635 635
636 636 else:
637 637 raise error.Abort(_(b'need repo for changelog dag'))
638 638
639 639 for line in dagparser.dagtextlines(
640 640 events(),
641 641 addspaces=spaces,
642 642 wraplabels=True,
643 643 wrapannotations=True,
644 644 wrapnonlinear=dots,
645 645 usedots=dots,
646 646 maxlinewidth=70,
647 647 ):
648 648 ui.write(line)
649 649 ui.write(b"\n")
650 650
651 651
652 652 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
653 653 def debugdata(ui, repo, file_, rev=None, **opts):
654 654 """dump the contents of a data file revision"""
655 655 opts = pycompat.byteskwargs(opts)
656 656 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
657 657 if rev is not None:
658 658 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
659 659 file_, rev = None, file_
660 660 elif rev is None:
661 661 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
662 662 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
663 663 try:
664 664 ui.write(r.rawdata(r.lookup(rev)))
665 665 except KeyError:
666 666 raise error.Abort(_(b'invalid revision identifier %s') % rev)
667 667
668 668
669 669 @command(
670 670 b'debugdate',
671 671 [(b'e', b'extended', None, _(b'try extended date formats'))],
672 672 _(b'[-e] DATE [RANGE]'),
673 673 norepo=True,
674 674 optionalrepo=True,
675 675 )
676 676 def debugdate(ui, date, range=None, **opts):
677 677 """parse and display a date"""
678 678 if opts["extended"]:
679 679 d = dateutil.parsedate(date, dateutil.extendeddateformats)
680 680 else:
681 681 d = dateutil.parsedate(date)
682 682 ui.writenoi18n(b"internal: %d %d\n" % d)
683 683 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
684 684 if range:
685 685 m = dateutil.matchdate(range)
686 686 ui.writenoi18n(b"match: %s\n" % m(d[0]))
687 687
688 688
689 689 @command(
690 690 b'debugdeltachain',
691 691 cmdutil.debugrevlogopts + cmdutil.formatteropts,
692 692 _(b'-c|-m|FILE'),
693 693 optionalrepo=True,
694 694 )
695 695 def debugdeltachain(ui, repo, file_=None, **opts):
696 696 """dump information about delta chains in a revlog
697 697
698 698 Output can be templatized. Available template keywords are:
699 699
700 700 :``rev``: revision number
701 701 :``chainid``: delta chain identifier (numbered by unique base)
702 702 :``chainlen``: delta chain length to this revision
703 703 :``prevrev``: previous revision in delta chain
704 704 :``deltatype``: role of delta / how it was computed
705 705 :``compsize``: compressed size of revision
706 706 :``uncompsize``: uncompressed size of revision
707 707 :``chainsize``: total size of compressed revisions in chain
708 708 :``chainratio``: total chain size divided by uncompressed revision size
709 709 (new delta chains typically start at ratio 2.00)
710 710 :``lindist``: linear distance from base revision in delta chain to end
711 711 of this revision
712 712 :``extradist``: total size of revisions not part of this delta chain from
713 713 base of delta chain to end of this revision; a measurement
714 714 of how much extra data we need to read/seek across to read
715 715 the delta chain for this revision
716 716 :``extraratio``: extradist divided by chainsize; another representation of
717 717 how much unrelated data is needed to load this delta chain
718 718
719 719 If the repository is configured to use the sparse read, additional keywords
720 720 are available:
721 721
722 722 :``readsize``: total size of data read from the disk for a revision
723 723 (sum of the sizes of all the blocks)
724 724 :``largestblock``: size of the largest block of data read from the disk
725 725 :``readdensity``: density of useful bytes in the data read from the disk
726 726 :``srchunks``: in how many data hunks the whole revision would be read
727 727
728 728 The sparse read can be enabled with experimental.sparse-read = True
729 729 """
730 730 opts = pycompat.byteskwargs(opts)
731 731 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
732 732 index = r.index
733 733 start = r.start
734 734 length = r.length
735 735 generaldelta = r.version & revlog.FLAG_GENERALDELTA
736 736 withsparseread = getattr(r, '_withsparseread', False)
737 737
738 738 def revinfo(rev):
739 739 e = index[rev]
740 740 compsize = e[1]
741 741 uncompsize = e[2]
742 742 chainsize = 0
743 743
744 744 if generaldelta:
745 745 if e[3] == e[5]:
746 746 deltatype = b'p1'
747 747 elif e[3] == e[6]:
748 748 deltatype = b'p2'
749 749 elif e[3] == rev - 1:
750 750 deltatype = b'prev'
751 751 elif e[3] == rev:
752 752 deltatype = b'base'
753 753 else:
754 754 deltatype = b'other'
755 755 else:
756 756 if e[3] == rev:
757 757 deltatype = b'base'
758 758 else:
759 759 deltatype = b'prev'
760 760
761 761 chain = r._deltachain(rev)[0]
762 762 for iterrev in chain:
763 763 e = index[iterrev]
764 764 chainsize += e[1]
765 765
766 766 return compsize, uncompsize, deltatype, chain, chainsize
767 767
768 768 fm = ui.formatter(b'debugdeltachain', opts)
769 769
770 770 fm.plain(
771 771 b' rev chain# chainlen prev delta '
772 772 b'size rawsize chainsize ratio lindist extradist '
773 773 b'extraratio'
774 774 )
775 775 if withsparseread:
776 776 fm.plain(b' readsize largestblk rddensity srchunks')
777 777 fm.plain(b'\n')
778 778
779 779 chainbases = {}
780 780 for rev in r:
781 781 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
782 782 chainbase = chain[0]
783 783 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
784 784 basestart = start(chainbase)
785 785 revstart = start(rev)
786 786 lineardist = revstart + comp - basestart
787 787 extradist = lineardist - chainsize
788 788 try:
789 789 prevrev = chain[-2]
790 790 except IndexError:
791 791 prevrev = -1
792 792
793 793 if uncomp != 0:
794 794 chainratio = float(chainsize) / float(uncomp)
795 795 else:
796 796 chainratio = chainsize
797 797
798 798 if chainsize != 0:
799 799 extraratio = float(extradist) / float(chainsize)
800 800 else:
801 801 extraratio = extradist
802 802
803 803 fm.startitem()
804 804 fm.write(
805 805 b'rev chainid chainlen prevrev deltatype compsize '
806 806 b'uncompsize chainsize chainratio lindist extradist '
807 807 b'extraratio',
808 808 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
809 809 rev,
810 810 chainid,
811 811 len(chain),
812 812 prevrev,
813 813 deltatype,
814 814 comp,
815 815 uncomp,
816 816 chainsize,
817 817 chainratio,
818 818 lineardist,
819 819 extradist,
820 820 extraratio,
821 821 rev=rev,
822 822 chainid=chainid,
823 823 chainlen=len(chain),
824 824 prevrev=prevrev,
825 825 deltatype=deltatype,
826 826 compsize=comp,
827 827 uncompsize=uncomp,
828 828 chainsize=chainsize,
829 829 chainratio=chainratio,
830 830 lindist=lineardist,
831 831 extradist=extradist,
832 832 extraratio=extraratio,
833 833 )
834 834 if withsparseread:
835 835 readsize = 0
836 836 largestblock = 0
837 837 srchunks = 0
838 838
839 839 for revschunk in deltautil.slicechunk(r, chain):
840 840 srchunks += 1
841 841 blkend = start(revschunk[-1]) + length(revschunk[-1])
842 842 blksize = blkend - start(revschunk[0])
843 843
844 844 readsize += blksize
845 845 if largestblock < blksize:
846 846 largestblock = blksize
847 847
848 848 if readsize:
849 849 readdensity = float(chainsize) / float(readsize)
850 850 else:
851 851 readdensity = 1
852 852
853 853 fm.write(
854 854 b'readsize largestblock readdensity srchunks',
855 855 b' %10d %10d %9.5f %8d',
856 856 readsize,
857 857 largestblock,
858 858 readdensity,
859 859 srchunks,
860 860 readsize=readsize,
861 861 largestblock=largestblock,
862 862 readdensity=readdensity,
863 863 srchunks=srchunks,
864 864 )
865 865
866 866 fm.plain(b'\n')
867 867
868 868 fm.end()
869 869
870 870
871 871 @command(
872 872 b'debugdirstate|debugstate',
873 873 [
874 874 (
875 875 b'',
876 876 b'nodates',
877 877 None,
878 878 _(b'do not display the saved mtime (DEPRECATED)'),
879 879 ),
880 880 (b'', b'dates', True, _(b'display the saved mtime')),
881 881 (b'', b'datesort', None, _(b'sort by saved mtime')),
882 882 ],
883 883 _(b'[OPTION]...'),
884 884 )
885 885 def debugstate(ui, repo, **opts):
886 886 """show the contents of the current dirstate"""
887 887
888 888 nodates = not opts['dates']
889 889 if opts.get('nodates') is not None:
890 890 nodates = True
891 891 datesort = opts.get('datesort')
892 892
893 893 if datesort:
894 894 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
895 895 else:
896 896 keyfunc = None # sort by filename
897 897 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
898 898 if ent[3] == -1:
899 899 timestr = b'unset '
900 900 elif nodates:
901 901 timestr = b'set '
902 902 else:
903 903 timestr = time.strftime(
904 904 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
905 905 )
906 906 timestr = encoding.strtolocal(timestr)
907 907 if ent[1] & 0o20000:
908 908 mode = b'lnk'
909 909 else:
910 910 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
911 911 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
912 912 for f in repo.dirstate.copies():
913 913 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
914 914
915 915
916 916 @command(
917 917 b'debugdiscovery',
918 918 [
919 919 (b'', b'old', None, _(b'use old-style discovery')),
920 920 (
921 921 b'',
922 922 b'nonheads',
923 923 None,
924 924 _(b'use old-style discovery with non-heads included'),
925 925 ),
926 926 (b'', b'rev', [], b'restrict discovery to this set of revs'),
927 927 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
928 928 ]
929 929 + cmdutil.remoteopts,
930 930 _(b'[--rev REV] [OTHER]'),
931 931 )
932 932 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
933 933 """runs the changeset discovery protocol in isolation"""
934 934 opts = pycompat.byteskwargs(opts)
935 935 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
936 936 remote = hg.peer(repo, opts, remoteurl)
937 937 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
938 938
939 939 # make sure tests are repeatable
940 940 random.seed(int(opts[b'seed']))
941 941
942 942 if opts.get(b'old'):
943 943
944 944 def doit(pushedrevs, remoteheads, remote=remote):
945 945 if not util.safehasattr(remote, b'branches'):
946 946 # enable in-client legacy support
947 947 remote = localrepo.locallegacypeer(remote.local())
948 948 common, _in, hds = treediscovery.findcommonincoming(
949 949 repo, remote, force=True
950 950 )
951 951 common = set(common)
952 952 if not opts.get(b'nonheads'):
953 953 ui.writenoi18n(
954 954 b"unpruned common: %s\n"
955 955 % b" ".join(sorted(short(n) for n in common))
956 956 )
957 957
958 958 clnode = repo.changelog.node
959 959 common = repo.revs(b'heads(::%ln)', common)
960 960 common = {clnode(r) for r in common}
961 961 return common, hds
962 962
963 963 else:
964 964
965 965 def doit(pushedrevs, remoteheads, remote=remote):
966 966 nodes = None
967 967 if pushedrevs:
968 968 revs = scmutil.revrange(repo, pushedrevs)
969 969 nodes = [repo[r].node() for r in revs]
970 970 common, any, hds = setdiscovery.findcommonheads(
971 971 ui, repo, remote, ancestorsof=nodes
972 972 )
973 973 return common, hds
974 974
975 975 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
976 976 localrevs = opts[b'rev']
977 977 with util.timedcm('debug-discovery') as t:
978 978 common, hds = doit(localrevs, remoterevs)
979 979
980 980 # compute all statistics
981 981 common = set(common)
982 982 rheads = set(hds)
983 983 lheads = set(repo.heads())
984 984
985 985 data = {}
986 986 data[b'elapsed'] = t.elapsed
987 987 data[b'nb-common'] = len(common)
988 988 data[b'nb-common-local'] = len(common & lheads)
989 989 data[b'nb-common-remote'] = len(common & rheads)
990 990 data[b'nb-common-both'] = len(common & rheads & lheads)
991 991 data[b'nb-local'] = len(lheads)
992 992 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
993 993 data[b'nb-remote'] = len(rheads)
994 994 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
995 995 data[b'nb-revs'] = len(repo.revs(b'all()'))
996 996 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
997 997 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
998 998
999 999 # display discovery summary
1000 1000 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1001 1001 ui.writenoi18n(b"heads summary:\n")
1002 1002 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
1003 1003 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
1004 1004 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
1005 1005 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
1006 1006 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
1007 1007 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
1008 1008 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
1009 1009 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
1010 1010 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
1011 1011 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
1012 1012 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1013 1013 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1014 1014 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1015 1015
1016 1016 if ui.verbose:
1017 1017 ui.writenoi18n(
1018 1018 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1019 1019 )
1020 1020
1021 1021
1022 1022 _chunksize = 4 << 10
1023 1023
1024 1024
1025 1025 @command(
1026 1026 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1027 1027 )
1028 1028 def debugdownload(ui, repo, url, output=None, **opts):
1029 1029 """download a resource using Mercurial logic and config
1030 1030 """
1031 1031 fh = urlmod.open(ui, url, output)
1032 1032
1033 1033 dest = ui
1034 1034 if output:
1035 1035 dest = open(output, b"wb", _chunksize)
1036 1036 try:
1037 1037 data = fh.read(_chunksize)
1038 1038 while data:
1039 1039 dest.write(data)
1040 1040 data = fh.read(_chunksize)
1041 1041 finally:
1042 1042 if output:
1043 1043 dest.close()
1044 1044
1045 1045
1046 1046 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1047 1047 def debugextensions(ui, repo, **opts):
1048 1048 '''show information about active extensions'''
1049 1049 opts = pycompat.byteskwargs(opts)
1050 1050 exts = extensions.extensions(ui)
1051 1051 hgver = util.version()
1052 1052 fm = ui.formatter(b'debugextensions', opts)
1053 1053 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1054 1054 isinternal = extensions.ismoduleinternal(extmod)
1055 1055 extsource = None
1056 1056
1057 1057 if util.safehasattr(extmod, '__file__'):
1058 1058 extsource = pycompat.fsencode(extmod.__file__)
1059 1059 elif getattr(sys, 'oxidized', False):
1060 1060 extsource = pycompat.sysexecutable
1061 1061 if isinternal:
1062 1062 exttestedwith = [] # never expose magic string to users
1063 1063 else:
1064 1064 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1065 1065 extbuglink = getattr(extmod, 'buglink', None)
1066 1066
1067 1067 fm.startitem()
1068 1068
1069 1069 if ui.quiet or ui.verbose:
1070 1070 fm.write(b'name', b'%s\n', extname)
1071 1071 else:
1072 1072 fm.write(b'name', b'%s', extname)
1073 1073 if isinternal or hgver in exttestedwith:
1074 1074 fm.plain(b'\n')
1075 1075 elif not exttestedwith:
1076 1076 fm.plain(_(b' (untested!)\n'))
1077 1077 else:
1078 1078 lasttestedversion = exttestedwith[-1]
1079 1079 fm.plain(b' (%s!)\n' % lasttestedversion)
1080 1080
1081 1081 fm.condwrite(
1082 1082 ui.verbose and extsource,
1083 1083 b'source',
1084 1084 _(b' location: %s\n'),
1085 1085 extsource or b"",
1086 1086 )
1087 1087
1088 1088 if ui.verbose:
1089 1089 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1090 1090 fm.data(bundled=isinternal)
1091 1091
1092 1092 fm.condwrite(
1093 1093 ui.verbose and exttestedwith,
1094 1094 b'testedwith',
1095 1095 _(b' tested with: %s\n'),
1096 1096 fm.formatlist(exttestedwith, name=b'ver'),
1097 1097 )
1098 1098
1099 1099 fm.condwrite(
1100 1100 ui.verbose and extbuglink,
1101 1101 b'buglink',
1102 1102 _(b' bug reporting: %s\n'),
1103 1103 extbuglink or b"",
1104 1104 )
1105 1105
1106 1106 fm.end()
1107 1107
1108 1108
1109 1109 @command(
1110 1110 b'debugfileset',
1111 1111 [
1112 1112 (
1113 1113 b'r',
1114 1114 b'rev',
1115 1115 b'',
1116 1116 _(b'apply the filespec on this revision'),
1117 1117 _(b'REV'),
1118 1118 ),
1119 1119 (
1120 1120 b'',
1121 1121 b'all-files',
1122 1122 False,
1123 1123 _(b'test files from all revisions and working directory'),
1124 1124 ),
1125 1125 (
1126 1126 b's',
1127 1127 b'show-matcher',
1128 1128 None,
1129 1129 _(b'print internal representation of matcher'),
1130 1130 ),
1131 1131 (
1132 1132 b'p',
1133 1133 b'show-stage',
1134 1134 [],
1135 1135 _(b'print parsed tree at the given stage'),
1136 1136 _(b'NAME'),
1137 1137 ),
1138 1138 ],
1139 1139 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1140 1140 )
1141 1141 def debugfileset(ui, repo, expr, **opts):
1142 1142 '''parse and apply a fileset specification'''
1143 1143 from . import fileset
1144 1144
1145 1145 fileset.symbols # force import of fileset so we have predicates to optimize
1146 1146 opts = pycompat.byteskwargs(opts)
1147 1147 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1148 1148
1149 1149 stages = [
1150 1150 (b'parsed', pycompat.identity),
1151 1151 (b'analyzed', filesetlang.analyze),
1152 1152 (b'optimized', filesetlang.optimize),
1153 1153 ]
1154 1154 stagenames = {n for n, f in stages}
1155 1155
1156 1156 showalways = set()
1157 1157 if ui.verbose and not opts[b'show_stage']:
1158 1158 # show parsed tree by --verbose (deprecated)
1159 1159 showalways.add(b'parsed')
1160 1160 if opts[b'show_stage'] == [b'all']:
1161 1161 showalways.update(stagenames)
1162 1162 else:
1163 1163 for n in opts[b'show_stage']:
1164 1164 if n not in stagenames:
1165 1165 raise error.Abort(_(b'invalid stage name: %s') % n)
1166 1166 showalways.update(opts[b'show_stage'])
1167 1167
1168 1168 tree = filesetlang.parse(expr)
1169 1169 for n, f in stages:
1170 1170 tree = f(tree)
1171 1171 if n in showalways:
1172 1172 if opts[b'show_stage'] or n != b'parsed':
1173 1173 ui.write(b"* %s:\n" % n)
1174 1174 ui.write(filesetlang.prettyformat(tree), b"\n")
1175 1175
1176 1176 files = set()
1177 1177 if opts[b'all_files']:
1178 1178 for r in repo:
1179 1179 c = repo[r]
1180 1180 files.update(c.files())
1181 1181 files.update(c.substate)
1182 1182 if opts[b'all_files'] or ctx.rev() is None:
1183 1183 wctx = repo[None]
1184 1184 files.update(
1185 1185 repo.dirstate.walk(
1186 1186 scmutil.matchall(repo),
1187 1187 subrepos=list(wctx.substate),
1188 1188 unknown=True,
1189 1189 ignored=True,
1190 1190 )
1191 1191 )
1192 1192 files.update(wctx.substate)
1193 1193 else:
1194 1194 files.update(ctx.files())
1195 1195 files.update(ctx.substate)
1196 1196
1197 1197 m = ctx.matchfileset(repo.getcwd(), expr)
1198 1198 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1199 1199 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1200 1200 for f in sorted(files):
1201 1201 if not m(f):
1202 1202 continue
1203 1203 ui.write(b"%s\n" % f)
1204 1204
1205 1205
1206 1206 @command(b'debugformat', [] + cmdutil.formatteropts)
1207 1207 def debugformat(ui, repo, **opts):
1208 1208 """display format information about the current repository
1209 1209
1210 1210 Use --verbose to get extra information about current config value and
1211 1211 Mercurial default."""
1212 1212 opts = pycompat.byteskwargs(opts)
1213 1213 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1214 1214 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1215 1215
1216 1216 def makeformatname(name):
1217 1217 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1218 1218
1219 1219 fm = ui.formatter(b'debugformat', opts)
1220 1220 if fm.isplain():
1221 1221
1222 1222 def formatvalue(value):
1223 1223 if util.safehasattr(value, b'startswith'):
1224 1224 return value
1225 1225 if value:
1226 1226 return b'yes'
1227 1227 else:
1228 1228 return b'no'
1229 1229
1230 1230 else:
1231 1231 formatvalue = pycompat.identity
1232 1232
1233 1233 fm.plain(b'format-variant')
1234 1234 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1235 1235 fm.plain(b' repo')
1236 1236 if ui.verbose:
1237 1237 fm.plain(b' config default')
1238 1238 fm.plain(b'\n')
1239 1239 for fv in upgrade.allformatvariant:
1240 1240 fm.startitem()
1241 1241 repovalue = fv.fromrepo(repo)
1242 1242 configvalue = fv.fromconfig(repo)
1243 1243
1244 1244 if repovalue != configvalue:
1245 1245 namelabel = b'formatvariant.name.mismatchconfig'
1246 1246 repolabel = b'formatvariant.repo.mismatchconfig'
1247 1247 elif repovalue != fv.default:
1248 1248 namelabel = b'formatvariant.name.mismatchdefault'
1249 1249 repolabel = b'formatvariant.repo.mismatchdefault'
1250 1250 else:
1251 1251 namelabel = b'formatvariant.name.uptodate'
1252 1252 repolabel = b'formatvariant.repo.uptodate'
1253 1253
1254 1254 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1255 1255 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1256 1256 if fv.default != configvalue:
1257 1257 configlabel = b'formatvariant.config.special'
1258 1258 else:
1259 1259 configlabel = b'formatvariant.config.default'
1260 1260 fm.condwrite(
1261 1261 ui.verbose,
1262 1262 b'config',
1263 1263 b' %6s',
1264 1264 formatvalue(configvalue),
1265 1265 label=configlabel,
1266 1266 )
1267 1267 fm.condwrite(
1268 1268 ui.verbose,
1269 1269 b'default',
1270 1270 b' %7s',
1271 1271 formatvalue(fv.default),
1272 1272 label=b'formatvariant.default',
1273 1273 )
1274 1274 fm.plain(b'\n')
1275 1275 fm.end()
1276 1276
1277 1277
1278 1278 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1279 1279 def debugfsinfo(ui, path=b"."):
1280 1280 """show information detected about current filesystem"""
1281 1281 ui.writenoi18n(b'path: %s\n' % path)
1282 1282 ui.writenoi18n(
1283 1283 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1284 1284 )
1285 1285 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1286 1286 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1287 1287 ui.writenoi18n(
1288 1288 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1289 1289 )
1290 1290 ui.writenoi18n(
1291 1291 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1292 1292 )
1293 1293 casesensitive = b'(unknown)'
1294 1294 try:
1295 1295 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1296 1296 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1297 1297 except OSError:
1298 1298 pass
1299 1299 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1300 1300
1301 1301
1302 1302 @command(
1303 1303 b'debuggetbundle',
1304 1304 [
1305 1305 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1306 1306 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1307 1307 (
1308 1308 b't',
1309 1309 b'type',
1310 1310 b'bzip2',
1311 1311 _(b'bundle compression type to use'),
1312 1312 _(b'TYPE'),
1313 1313 ),
1314 1314 ],
1315 1315 _(b'REPO FILE [-H|-C ID]...'),
1316 1316 norepo=True,
1317 1317 )
1318 1318 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1319 1319 """retrieves a bundle from a repo
1320 1320
1321 1321 Every ID must be a full-length hex node id string. Saves the bundle to the
1322 1322 given file.
1323 1323 """
1324 1324 opts = pycompat.byteskwargs(opts)
1325 1325 repo = hg.peer(ui, opts, repopath)
1326 1326 if not repo.capable(b'getbundle'):
1327 1327 raise error.Abort(b"getbundle() not supported by target repository")
1328 1328 args = {}
1329 1329 if common:
1330 1330 args['common'] = [bin(s) for s in common]
1331 1331 if head:
1332 1332 args['heads'] = [bin(s) for s in head]
1333 1333 # TODO: get desired bundlecaps from command line.
1334 1334 args['bundlecaps'] = None
1335 1335 bundle = repo.getbundle(b'debug', **args)
1336 1336
1337 1337 bundletype = opts.get(b'type', b'bzip2').lower()
1338 1338 btypes = {
1339 1339 b'none': b'HG10UN',
1340 1340 b'bzip2': b'HG10BZ',
1341 1341 b'gzip': b'HG10GZ',
1342 1342 b'bundle2': b'HG20',
1343 1343 }
1344 1344 bundletype = btypes.get(bundletype)
1345 1345 if bundletype not in bundle2.bundletypes:
1346 1346 raise error.Abort(_(b'unknown bundle type specified with --type'))
1347 1347 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1348 1348
1349 1349
1350 1350 @command(b'debugignore', [], b'[FILE]')
1351 1351 def debugignore(ui, repo, *files, **opts):
1352 1352 """display the combined ignore pattern and information about ignored files
1353 1353
1354 1354 With no argument display the combined ignore pattern.
1355 1355
1356 1356 Given space separated file names, shows if the given file is ignored and
1357 1357 if so, show the ignore rule (file and line number) that matched it.
1358 1358 """
1359 1359 ignore = repo.dirstate._ignore
1360 1360 if not files:
1361 1361 # Show all the patterns
1362 1362 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1363 1363 else:
1364 1364 m = scmutil.match(repo[None], pats=files)
1365 1365 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1366 1366 for f in m.files():
1367 1367 nf = util.normpath(f)
1368 1368 ignored = None
1369 1369 ignoredata = None
1370 1370 if nf != b'.':
1371 1371 if ignore(nf):
1372 1372 ignored = nf
1373 1373 ignoredata = repo.dirstate._ignorefileandline(nf)
1374 1374 else:
1375 1375 for p in pathutil.finddirs(nf):
1376 1376 if ignore(p):
1377 1377 ignored = p
1378 1378 ignoredata = repo.dirstate._ignorefileandline(p)
1379 1379 break
1380 1380 if ignored:
1381 1381 if ignored == nf:
1382 1382 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1383 1383 else:
1384 1384 ui.write(
1385 1385 _(
1386 1386 b"%s is ignored because of "
1387 1387 b"containing directory %s\n"
1388 1388 )
1389 1389 % (uipathfn(f), ignored)
1390 1390 )
1391 1391 ignorefile, lineno, line = ignoredata
1392 1392 ui.write(
1393 1393 _(b"(ignore rule in %s, line %d: '%s')\n")
1394 1394 % (ignorefile, lineno, line)
1395 1395 )
1396 1396 else:
1397 1397 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1398 1398
1399 1399
1400 1400 @command(
1401 1401 b'debugindex',
1402 1402 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1403 1403 _(b'-c|-m|FILE'),
1404 1404 )
1405 1405 def debugindex(ui, repo, file_=None, **opts):
1406 1406 """dump index data for a storage primitive"""
1407 1407 opts = pycompat.byteskwargs(opts)
1408 1408 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1409 1409
1410 1410 if ui.debugflag:
1411 1411 shortfn = hex
1412 1412 else:
1413 1413 shortfn = short
1414 1414
1415 1415 idlen = 12
1416 1416 for i in store:
1417 1417 idlen = len(shortfn(store.node(i)))
1418 1418 break
1419 1419
1420 1420 fm = ui.formatter(b'debugindex', opts)
1421 1421 fm.plain(
1422 1422 b' rev linkrev %s %s p2\n'
1423 1423 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1424 1424 )
1425 1425
1426 1426 for rev in store:
1427 1427 node = store.node(rev)
1428 1428 parents = store.parents(node)
1429 1429
1430 1430 fm.startitem()
1431 1431 fm.write(b'rev', b'%6d ', rev)
1432 1432 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1433 1433 fm.write(b'node', b'%s ', shortfn(node))
1434 1434 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1435 1435 fm.write(b'p2', b'%s', shortfn(parents[1]))
1436 1436 fm.plain(b'\n')
1437 1437
1438 1438 fm.end()
1439 1439
1440 1440
1441 1441 @command(
1442 1442 b'debugindexdot',
1443 1443 cmdutil.debugrevlogopts,
1444 1444 _(b'-c|-m|FILE'),
1445 1445 optionalrepo=True,
1446 1446 )
1447 1447 def debugindexdot(ui, repo, file_=None, **opts):
1448 1448 """dump an index DAG as a graphviz dot file"""
1449 1449 opts = pycompat.byteskwargs(opts)
1450 1450 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1451 1451 ui.writenoi18n(b"digraph G {\n")
1452 1452 for i in r:
1453 1453 node = r.node(i)
1454 1454 pp = r.parents(node)
1455 1455 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1456 1456 if pp[1] != nullid:
1457 1457 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1458 1458 ui.write(b"}\n")
1459 1459
1460 1460
1461 1461 @command(b'debugindexstats', [])
1462 1462 def debugindexstats(ui, repo):
1463 1463 """show stats related to the changelog index"""
1464 1464 repo.changelog.shortest(nullid, 1)
1465 1465 index = repo.changelog.index
1466 1466 if not util.safehasattr(index, b'stats'):
1467 1467 raise error.Abort(_(b'debugindexstats only works with native code'))
1468 1468 for k, v in sorted(index.stats().items()):
1469 1469 ui.write(b'%s: %d\n' % (k, v))
1470 1470
1471 1471
1472 1472 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1473 1473 def debuginstall(ui, **opts):
1474 1474 '''test Mercurial installation
1475 1475
1476 1476 Returns 0 on success.
1477 1477 '''
1478 1478 opts = pycompat.byteskwargs(opts)
1479 1479
1480 1480 problems = 0
1481 1481
1482 1482 fm = ui.formatter(b'debuginstall', opts)
1483 1483 fm.startitem()
1484 1484
1485 1485 # encoding might be unknown or wrong. don't translate these messages.
1486 1486 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1487 1487 err = None
1488 1488 try:
1489 1489 codecs.lookup(pycompat.sysstr(encoding.encoding))
1490 1490 except LookupError as inst:
1491 1491 err = stringutil.forcebytestr(inst)
1492 1492 problems += 1
1493 1493 fm.condwrite(
1494 1494 err,
1495 1495 b'encodingerror',
1496 1496 b" %s\n (check that your locale is properly set)\n",
1497 1497 err,
1498 1498 )
1499 1499
1500 1500 # Python
1501 1501 pythonlib = None
1502 1502 if util.safehasattr(os, '__file__'):
1503 1503 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1504 1504 elif getattr(sys, 'oxidized', False):
1505 1505 pythonlib = pycompat.sysexecutable
1506 1506
1507 1507 fm.write(
1508 1508 b'pythonexe',
1509 1509 _(b"checking Python executable (%s)\n"),
1510 1510 pycompat.sysexecutable or _(b"unknown"),
1511 1511 )
1512 1512 fm.write(
1513 1513 b'pythonimplementation',
1514 1514 _(b"checking Python implementation (%s)\n"),
1515 1515 pycompat.sysbytes(platform.python_implementation()),
1516 1516 )
1517 1517 fm.write(
1518 1518 b'pythonver',
1519 1519 _(b"checking Python version (%s)\n"),
1520 1520 (b"%d.%d.%d" % sys.version_info[:3]),
1521 1521 )
1522 1522 fm.write(
1523 1523 b'pythonlib',
1524 1524 _(b"checking Python lib (%s)...\n"),
1525 1525 pythonlib or _(b"unknown"),
1526 1526 )
1527 1527
1528 1528 try:
1529 1529 from . import rustext
1530 1530
1531 1531 rustext.__doc__ # trigger lazy import
1532 1532 except ImportError:
1533 1533 rustext = None
1534 1534
1535 1535 security = set(sslutil.supportedprotocols)
1536 1536 if sslutil.hassni:
1537 1537 security.add(b'sni')
1538 1538
1539 1539 fm.write(
1540 1540 b'pythonsecurity',
1541 1541 _(b"checking Python security support (%s)\n"),
1542 1542 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1543 1543 )
1544 1544
1545 1545 # These are warnings, not errors. So don't increment problem count. This
1546 1546 # may change in the future.
1547 1547 if b'tls1.2' not in security:
1548 1548 fm.plain(
1549 1549 _(
1550 1550 b' TLS 1.2 not supported by Python install; '
1551 1551 b'network connections lack modern security\n'
1552 1552 )
1553 1553 )
1554 1554 if b'sni' not in security:
1555 1555 fm.plain(
1556 1556 _(
1557 1557 b' SNI not supported by Python install; may have '
1558 1558 b'connectivity issues with some servers\n'
1559 1559 )
1560 1560 )
1561 1561
1562 1562 fm.plain(
1563 1563 _(
1564 1564 b"checking Rust extensions (%s)\n"
1565 1565 % (b'missing' if rustext is None else b'installed')
1566 1566 ),
1567 1567 )
1568 1568
1569 1569 # TODO print CA cert info
1570 1570
1571 1571 # hg version
1572 1572 hgver = util.version()
1573 1573 fm.write(
1574 1574 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1575 1575 )
1576 1576 fm.write(
1577 1577 b'hgverextra',
1578 1578 _(b"checking Mercurial custom build (%s)\n"),
1579 1579 b'+'.join(hgver.split(b'+')[1:]),
1580 1580 )
1581 1581
1582 1582 # compiled modules
1583 1583 hgmodules = None
1584 1584 if util.safehasattr(sys.modules[__name__], '__file__'):
1585 1585 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1586 1586 elif getattr(sys, 'oxidized', False):
1587 1587 hgmodules = pycompat.sysexecutable
1588 1588
1589 1589 fm.write(
1590 1590 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1591 1591 )
1592 1592 fm.write(
1593 1593 b'hgmodules',
1594 1594 _(b"checking installed modules (%s)...\n"),
1595 1595 hgmodules or _(b"unknown"),
1596 1596 )
1597 1597
1598 1598 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1599 1599 rustext = rustandc # for now, that's the only case
1600 1600 cext = policy.policy in (b'c', b'allow') or rustandc
1601 1601 nopure = cext or rustext
1602 1602 if nopure:
1603 1603 err = None
1604 1604 try:
1605 1605 if cext:
1606 1606 from .cext import ( # pytype: disable=import-error
1607 1607 base85,
1608 1608 bdiff,
1609 1609 mpatch,
1610 1610 osutil,
1611 1611 )
1612 1612
1613 1613 # quiet pyflakes
1614 1614 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1615 1615 if rustext:
1616 1616 from .rustext import ( # pytype: disable=import-error
1617 1617 ancestor,
1618 1618 dirstate,
1619 1619 )
1620 1620
1621 1621 dir(ancestor), dir(dirstate) # quiet pyflakes
1622 1622 except Exception as inst:
1623 1623 err = stringutil.forcebytestr(inst)
1624 1624 problems += 1
1625 1625 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1626 1626
1627 1627 compengines = util.compengines._engines.values()
1628 1628 fm.write(
1629 1629 b'compengines',
1630 1630 _(b'checking registered compression engines (%s)\n'),
1631 1631 fm.formatlist(
1632 1632 sorted(e.name() for e in compengines),
1633 1633 name=b'compengine',
1634 1634 fmt=b'%s',
1635 1635 sep=b', ',
1636 1636 ),
1637 1637 )
1638 1638 fm.write(
1639 1639 b'compenginesavail',
1640 1640 _(b'checking available compression engines (%s)\n'),
1641 1641 fm.formatlist(
1642 1642 sorted(e.name() for e in compengines if e.available()),
1643 1643 name=b'compengine',
1644 1644 fmt=b'%s',
1645 1645 sep=b', ',
1646 1646 ),
1647 1647 )
1648 1648 wirecompengines = compression.compengines.supportedwireengines(
1649 1649 compression.SERVERROLE
1650 1650 )
1651 1651 fm.write(
1652 1652 b'compenginesserver',
1653 1653 _(
1654 1654 b'checking available compression engines '
1655 1655 b'for wire protocol (%s)\n'
1656 1656 ),
1657 1657 fm.formatlist(
1658 1658 [e.name() for e in wirecompengines if e.wireprotosupport()],
1659 1659 name=b'compengine',
1660 1660 fmt=b'%s',
1661 1661 sep=b', ',
1662 1662 ),
1663 1663 )
1664 1664 re2 = b'missing'
1665 1665 if util._re2:
1666 1666 re2 = b'available'
1667 1667 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1668 1668 fm.data(re2=bool(util._re2))
1669 1669
1670 1670 # templates
1671 1671 p = templater.templatedir()
1672 1672 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1673 1673 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1674 1674 if p:
1675 (m, fp) = templater.open_template(b"map-cmdline.default")
1675 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1676 1676 if m:
1677 1677 # template found, check if it is working
1678 1678 err = None
1679 1679 try:
1680 1680 templater.templater.frommapfile(m)
1681 1681 except Exception as inst:
1682 1682 err = stringutil.forcebytestr(inst)
1683 1683 p = None
1684 1684 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1685 1685 else:
1686 1686 p = None
1687 1687 fm.condwrite(
1688 1688 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1689 1689 )
1690 1690 fm.condwrite(
1691 1691 not m,
1692 1692 b'defaulttemplatenotfound',
1693 1693 _(b" template '%s' not found\n"),
1694 1694 b"default",
1695 1695 )
1696 1696 if not p:
1697 1697 problems += 1
1698 1698 fm.condwrite(
1699 1699 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1700 1700 )
1701 1701
1702 1702 # editor
1703 1703 editor = ui.geteditor()
1704 1704 editor = util.expandpath(editor)
1705 1705 editorbin = procutil.shellsplit(editor)[0]
1706 1706 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1707 1707 cmdpath = procutil.findexe(editorbin)
1708 1708 fm.condwrite(
1709 1709 not cmdpath and editor == b'vi',
1710 1710 b'vinotfound',
1711 1711 _(
1712 1712 b" No commit editor set and can't find %s in PATH\n"
1713 1713 b" (specify a commit editor in your configuration"
1714 1714 b" file)\n"
1715 1715 ),
1716 1716 not cmdpath and editor == b'vi' and editorbin,
1717 1717 )
1718 1718 fm.condwrite(
1719 1719 not cmdpath and editor != b'vi',
1720 1720 b'editornotfound',
1721 1721 _(
1722 1722 b" Can't find editor '%s' in PATH\n"
1723 1723 b" (specify a commit editor in your configuration"
1724 1724 b" file)\n"
1725 1725 ),
1726 1726 not cmdpath and editorbin,
1727 1727 )
1728 1728 if not cmdpath and editor != b'vi':
1729 1729 problems += 1
1730 1730
1731 1731 # check username
1732 1732 username = None
1733 1733 err = None
1734 1734 try:
1735 1735 username = ui.username()
1736 1736 except error.Abort as e:
1737 1737 err = stringutil.forcebytestr(e)
1738 1738 problems += 1
1739 1739
1740 1740 fm.condwrite(
1741 1741 username, b'username', _(b"checking username (%s)\n"), username
1742 1742 )
1743 1743 fm.condwrite(
1744 1744 err,
1745 1745 b'usernameerror',
1746 1746 _(
1747 1747 b"checking username...\n %s\n"
1748 1748 b" (specify a username in your configuration file)\n"
1749 1749 ),
1750 1750 err,
1751 1751 )
1752 1752
1753 1753 for name, mod in extensions.extensions():
1754 1754 handler = getattr(mod, 'debuginstall', None)
1755 1755 if handler is not None:
1756 1756 problems += handler(ui, fm)
1757 1757
1758 1758 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1759 1759 if not problems:
1760 1760 fm.data(problems=problems)
1761 1761 fm.condwrite(
1762 1762 problems,
1763 1763 b'problems',
1764 1764 _(b"%d problems detected, please check your install!\n"),
1765 1765 problems,
1766 1766 )
1767 1767 fm.end()
1768 1768
1769 1769 return problems
1770 1770
1771 1771
1772 1772 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1773 1773 def debugknown(ui, repopath, *ids, **opts):
1774 1774 """test whether node ids are known to a repo
1775 1775
1776 1776 Every ID must be a full-length hex node id string. Returns a list of 0s
1777 1777 and 1s indicating unknown/known.
1778 1778 """
1779 1779 opts = pycompat.byteskwargs(opts)
1780 1780 repo = hg.peer(ui, opts, repopath)
1781 1781 if not repo.capable(b'known'):
1782 1782 raise error.Abort(b"known() not supported by target repository")
1783 1783 flags = repo.known([bin(s) for s in ids])
1784 1784 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1785 1785
1786 1786
1787 1787 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1788 1788 def debuglabelcomplete(ui, repo, *args):
1789 1789 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1790 1790 debugnamecomplete(ui, repo, *args)
1791 1791
1792 1792
1793 1793 @command(
1794 1794 b'debuglocks',
1795 1795 [
1796 1796 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1797 1797 (
1798 1798 b'W',
1799 1799 b'force-wlock',
1800 1800 None,
1801 1801 _(b'free the working state lock (DANGEROUS)'),
1802 1802 ),
1803 1803 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1804 1804 (
1805 1805 b'S',
1806 1806 b'set-wlock',
1807 1807 None,
1808 1808 _(b'set the working state lock until stopped'),
1809 1809 ),
1810 1810 ],
1811 1811 _(b'[OPTION]...'),
1812 1812 )
1813 1813 def debuglocks(ui, repo, **opts):
1814 1814 """show or modify state of locks
1815 1815
1816 1816 By default, this command will show which locks are held. This
1817 1817 includes the user and process holding the lock, the amount of time
1818 1818 the lock has been held, and the machine name where the process is
1819 1819 running if it's not local.
1820 1820
1821 1821 Locks protect the integrity of Mercurial's data, so should be
1822 1822 treated with care. System crashes or other interruptions may cause
1823 1823 locks to not be properly released, though Mercurial will usually
1824 1824 detect and remove such stale locks automatically.
1825 1825
1826 1826 However, detecting stale locks may not always be possible (for
1827 1827 instance, on a shared filesystem). Removing locks may also be
1828 1828 blocked by filesystem permissions.
1829 1829
1830 1830 Setting a lock will prevent other commands from changing the data.
1831 1831 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1832 1832 The set locks are removed when the command exits.
1833 1833
1834 1834 Returns 0 if no locks are held.
1835 1835
1836 1836 """
1837 1837
1838 1838 if opts.get('force_lock'):
1839 1839 repo.svfs.unlink(b'lock')
1840 1840 if opts.get('force_wlock'):
1841 1841 repo.vfs.unlink(b'wlock')
1842 1842 if opts.get('force_lock') or opts.get('force_wlock'):
1843 1843 return 0
1844 1844
1845 1845 locks = []
1846 1846 try:
1847 1847 if opts.get('set_wlock'):
1848 1848 try:
1849 1849 locks.append(repo.wlock(False))
1850 1850 except error.LockHeld:
1851 1851 raise error.Abort(_(b'wlock is already held'))
1852 1852 if opts.get('set_lock'):
1853 1853 try:
1854 1854 locks.append(repo.lock(False))
1855 1855 except error.LockHeld:
1856 1856 raise error.Abort(_(b'lock is already held'))
1857 1857 if len(locks):
1858 1858 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1859 1859 return 0
1860 1860 finally:
1861 1861 release(*locks)
1862 1862
1863 1863 now = time.time()
1864 1864 held = 0
1865 1865
1866 1866 def report(vfs, name, method):
1867 1867 # this causes stale locks to get reaped for more accurate reporting
1868 1868 try:
1869 1869 l = method(False)
1870 1870 except error.LockHeld:
1871 1871 l = None
1872 1872
1873 1873 if l:
1874 1874 l.release()
1875 1875 else:
1876 1876 try:
1877 1877 st = vfs.lstat(name)
1878 1878 age = now - st[stat.ST_MTIME]
1879 1879 user = util.username(st.st_uid)
1880 1880 locker = vfs.readlock(name)
1881 1881 if b":" in locker:
1882 1882 host, pid = locker.split(b':')
1883 1883 if host == socket.gethostname():
1884 1884 locker = b'user %s, process %s' % (user or b'None', pid)
1885 1885 else:
1886 1886 locker = b'user %s, process %s, host %s' % (
1887 1887 user or b'None',
1888 1888 pid,
1889 1889 host,
1890 1890 )
1891 1891 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1892 1892 return 1
1893 1893 except OSError as e:
1894 1894 if e.errno != errno.ENOENT:
1895 1895 raise
1896 1896
1897 1897 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1898 1898 return 0
1899 1899
1900 1900 held += report(repo.svfs, b"lock", repo.lock)
1901 1901 held += report(repo.vfs, b"wlock", repo.wlock)
1902 1902
1903 1903 return held
1904 1904
1905 1905
1906 1906 @command(
1907 1907 b'debugmanifestfulltextcache',
1908 1908 [
1909 1909 (b'', b'clear', False, _(b'clear the cache')),
1910 1910 (
1911 1911 b'a',
1912 1912 b'add',
1913 1913 [],
1914 1914 _(b'add the given manifest nodes to the cache'),
1915 1915 _(b'NODE'),
1916 1916 ),
1917 1917 ],
1918 1918 b'',
1919 1919 )
1920 1920 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1921 1921 """show, clear or amend the contents of the manifest fulltext cache"""
1922 1922
1923 1923 def getcache():
1924 1924 r = repo.manifestlog.getstorage(b'')
1925 1925 try:
1926 1926 return r._fulltextcache
1927 1927 except AttributeError:
1928 1928 msg = _(
1929 1929 b"Current revlog implementation doesn't appear to have a "
1930 1930 b"manifest fulltext cache\n"
1931 1931 )
1932 1932 raise error.Abort(msg)
1933 1933
1934 1934 if opts.get('clear'):
1935 1935 with repo.wlock():
1936 1936 cache = getcache()
1937 1937 cache.clear(clear_persisted_data=True)
1938 1938 return
1939 1939
1940 1940 if add:
1941 1941 with repo.wlock():
1942 1942 m = repo.manifestlog
1943 1943 store = m.getstorage(b'')
1944 1944 for n in add:
1945 1945 try:
1946 1946 manifest = m[store.lookup(n)]
1947 1947 except error.LookupError as e:
1948 1948 raise error.Abort(e, hint=b"Check your manifest node id")
1949 1949 manifest.read() # stores revisision in cache too
1950 1950 return
1951 1951
1952 1952 cache = getcache()
1953 1953 if not len(cache):
1954 1954 ui.write(_(b'cache empty\n'))
1955 1955 else:
1956 1956 ui.write(
1957 1957 _(
1958 1958 b'cache contains %d manifest entries, in order of most to '
1959 1959 b'least recent:\n'
1960 1960 )
1961 1961 % (len(cache),)
1962 1962 )
1963 1963 totalsize = 0
1964 1964 for nodeid in cache:
1965 1965 # Use cache.get to not update the LRU order
1966 1966 data = cache.peek(nodeid)
1967 1967 size = len(data)
1968 1968 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1969 1969 ui.write(
1970 1970 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1971 1971 )
1972 1972 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1973 1973 ui.write(
1974 1974 _(b'total cache data size %s, on-disk %s\n')
1975 1975 % (util.bytecount(totalsize), util.bytecount(ondisk))
1976 1976 )
1977 1977
1978 1978
1979 1979 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
1980 1980 def debugmergestate(ui, repo, *args, **opts):
1981 1981 """print merge state
1982 1982
1983 1983 Use --verbose to print out information about whether v1 or v2 merge state
1984 1984 was chosen."""
1985 1985
1986 1986 if ui.verbose:
1987 1987 ms = mergestatemod.mergestate(repo)
1988 1988
1989 1989 # sort so that reasonable information is on top
1990 1990 v1records = ms._readrecordsv1()
1991 1991 v2records = ms._readrecordsv2()
1992 1992
1993 1993 if not v1records and not v2records:
1994 1994 pass
1995 1995 elif not v2records:
1996 1996 ui.writenoi18n(b'no version 2 merge state\n')
1997 1997 elif ms._v1v2match(v1records, v2records):
1998 1998 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
1999 1999 else:
2000 2000 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2001 2001
2002 2002 opts = pycompat.byteskwargs(opts)
2003 2003 if not opts[b'template']:
2004 2004 opts[b'template'] = (
2005 2005 b'{if(commits, "", "no merge state found\n")}'
2006 2006 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2007 2007 b'{files % "file: {path} (state \\"{state}\\")\n'
2008 2008 b'{if(local_path, "'
2009 2009 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2010 2010 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2011 2011 b' other path: {other_path} (node {other_node})\n'
2012 2012 b'")}'
2013 2013 b'{if(rename_side, "'
2014 2014 b' rename side: {rename_side}\n'
2015 2015 b' renamed path: {renamed_path}\n'
2016 2016 b'")}'
2017 2017 b'{extras % " extra: {key} = {value}\n"}'
2018 2018 b'"}'
2019 2019 )
2020 2020
2021 2021 ms = mergestatemod.mergestate.read(repo)
2022 2022
2023 2023 fm = ui.formatter(b'debugmergestate', opts)
2024 2024 fm.startitem()
2025 2025
2026 2026 fm_commits = fm.nested(b'commits')
2027 2027 if ms.active():
2028 2028 for name, node, label_index in (
2029 2029 (b'local', ms.local, 0),
2030 2030 (b'other', ms.other, 1),
2031 2031 ):
2032 2032 fm_commits.startitem()
2033 2033 fm_commits.data(name=name)
2034 2034 fm_commits.data(node=hex(node))
2035 2035 if ms._labels and len(ms._labels) > label_index:
2036 2036 fm_commits.data(label=ms._labels[label_index])
2037 2037 fm_commits.end()
2038 2038
2039 2039 fm_files = fm.nested(b'files')
2040 2040 if ms.active():
2041 2041 for f in ms:
2042 2042 fm_files.startitem()
2043 2043 fm_files.data(path=f)
2044 2044 state = ms._state[f]
2045 2045 fm_files.data(state=state[0])
2046 2046 if state[0] in (
2047 2047 mergestatemod.MERGE_RECORD_UNRESOLVED,
2048 2048 mergestatemod.MERGE_RECORD_RESOLVED,
2049 2049 ):
2050 2050 fm_files.data(local_key=state[1])
2051 2051 fm_files.data(local_path=state[2])
2052 2052 fm_files.data(ancestor_path=state[3])
2053 2053 fm_files.data(ancestor_node=state[4])
2054 2054 fm_files.data(other_path=state[5])
2055 2055 fm_files.data(other_node=state[6])
2056 2056 fm_files.data(local_flags=state[7])
2057 2057 elif state[0] in (
2058 2058 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2059 2059 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2060 2060 ):
2061 2061 fm_files.data(renamed_path=state[1])
2062 2062 fm_files.data(rename_side=state[2])
2063 2063 fm_extras = fm_files.nested(b'extras')
2064 2064 for k, v in ms.extras(f).items():
2065 2065 fm_extras.startitem()
2066 2066 fm_extras.data(key=k)
2067 2067 fm_extras.data(value=v)
2068 2068 fm_extras.end()
2069 2069
2070 2070 fm_files.end()
2071 2071
2072 2072 fm.end()
2073 2073
2074 2074
2075 2075 @command(b'debugnamecomplete', [], _(b'NAME...'))
2076 2076 def debugnamecomplete(ui, repo, *args):
2077 2077 '''complete "names" - tags, open branch names, bookmark names'''
2078 2078
2079 2079 names = set()
2080 2080 # since we previously only listed open branches, we will handle that
2081 2081 # specially (after this for loop)
2082 2082 for name, ns in pycompat.iteritems(repo.names):
2083 2083 if name != b'branches':
2084 2084 names.update(ns.listnames(repo))
2085 2085 names.update(
2086 2086 tag
2087 2087 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2088 2088 if not closed
2089 2089 )
2090 2090 completions = set()
2091 2091 if not args:
2092 2092 args = [b'']
2093 2093 for a in args:
2094 2094 completions.update(n for n in names if n.startswith(a))
2095 2095 ui.write(b'\n'.join(sorted(completions)))
2096 2096 ui.write(b'\n')
2097 2097
2098 2098
2099 2099 @command(
2100 2100 b'debugnodemap',
2101 2101 [
2102 2102 (
2103 2103 b'',
2104 2104 b'dump-new',
2105 2105 False,
2106 2106 _(b'write a (new) persistent binary nodemap on stdin'),
2107 2107 ),
2108 2108 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2109 2109 (
2110 2110 b'',
2111 2111 b'check',
2112 2112 False,
2113 2113 _(b'check that the data on disk data are correct.'),
2114 2114 ),
2115 2115 (
2116 2116 b'',
2117 2117 b'metadata',
2118 2118 False,
2119 2119 _(b'display the on disk meta data for the nodemap'),
2120 2120 ),
2121 2121 ],
2122 2122 )
2123 2123 def debugnodemap(ui, repo, **opts):
2124 2124 """write and inspect on disk nodemap
2125 2125 """
2126 2126 if opts['dump_new']:
2127 2127 unfi = repo.unfiltered()
2128 2128 cl = unfi.changelog
2129 2129 if util.safehasattr(cl.index, "nodemap_data_all"):
2130 2130 data = cl.index.nodemap_data_all()
2131 2131 else:
2132 2132 data = nodemap.persistent_data(cl.index)
2133 2133 ui.write(data)
2134 2134 elif opts['dump_disk']:
2135 2135 unfi = repo.unfiltered()
2136 2136 cl = unfi.changelog
2137 2137 nm_data = nodemap.persisted_data(cl)
2138 2138 if nm_data is not None:
2139 2139 docket, data = nm_data
2140 2140 ui.write(data[:])
2141 2141 elif opts['check']:
2142 2142 unfi = repo.unfiltered()
2143 2143 cl = unfi.changelog
2144 2144 nm_data = nodemap.persisted_data(cl)
2145 2145 if nm_data is not None:
2146 2146 docket, data = nm_data
2147 2147 return nodemap.check_data(ui, cl.index, data)
2148 2148 elif opts['metadata']:
2149 2149 unfi = repo.unfiltered()
2150 2150 cl = unfi.changelog
2151 2151 nm_data = nodemap.persisted_data(cl)
2152 2152 if nm_data is not None:
2153 2153 docket, data = nm_data
2154 2154 ui.write((b"uid: %s\n") % docket.uid)
2155 2155 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2156 2156 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2157 2157 ui.write((b"data-length: %d\n") % docket.data_length)
2158 2158 ui.write((b"data-unused: %d\n") % docket.data_unused)
2159 2159 unused_perc = docket.data_unused * 100.0 / docket.data_length
2160 2160 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2161 2161
2162 2162
2163 2163 @command(
2164 2164 b'debugobsolete',
2165 2165 [
2166 2166 (b'', b'flags', 0, _(b'markers flag')),
2167 2167 (
2168 2168 b'',
2169 2169 b'record-parents',
2170 2170 False,
2171 2171 _(b'record parent information for the precursor'),
2172 2172 ),
2173 2173 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2174 2174 (
2175 2175 b'',
2176 2176 b'exclusive',
2177 2177 False,
2178 2178 _(b'restrict display to markers only relevant to REV'),
2179 2179 ),
2180 2180 (b'', b'index', False, _(b'display index of the marker')),
2181 2181 (b'', b'delete', [], _(b'delete markers specified by indices')),
2182 2182 ]
2183 2183 + cmdutil.commitopts2
2184 2184 + cmdutil.formatteropts,
2185 2185 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2186 2186 )
2187 2187 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2188 2188 """create arbitrary obsolete marker
2189 2189
2190 2190 With no arguments, displays the list of obsolescence markers."""
2191 2191
2192 2192 opts = pycompat.byteskwargs(opts)
2193 2193
2194 2194 def parsenodeid(s):
2195 2195 try:
2196 2196 # We do not use revsingle/revrange functions here to accept
2197 2197 # arbitrary node identifiers, possibly not present in the
2198 2198 # local repository.
2199 2199 n = bin(s)
2200 2200 if len(n) != len(nullid):
2201 2201 raise TypeError()
2202 2202 return n
2203 2203 except TypeError:
2204 2204 raise error.Abort(
2205 2205 b'changeset references must be full hexadecimal '
2206 2206 b'node identifiers'
2207 2207 )
2208 2208
2209 2209 if opts.get(b'delete'):
2210 2210 indices = []
2211 2211 for v in opts.get(b'delete'):
2212 2212 try:
2213 2213 indices.append(int(v))
2214 2214 except ValueError:
2215 2215 raise error.Abort(
2216 2216 _(b'invalid index value: %r') % v,
2217 2217 hint=_(b'use integers for indices'),
2218 2218 )
2219 2219
2220 2220 if repo.currenttransaction():
2221 2221 raise error.Abort(
2222 2222 _(b'cannot delete obsmarkers in the middle of transaction.')
2223 2223 )
2224 2224
2225 2225 with repo.lock():
2226 2226 n = repair.deleteobsmarkers(repo.obsstore, indices)
2227 2227 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2228 2228
2229 2229 return
2230 2230
2231 2231 if precursor is not None:
2232 2232 if opts[b'rev']:
2233 2233 raise error.Abort(b'cannot select revision when creating marker')
2234 2234 metadata = {}
2235 2235 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2236 2236 succs = tuple(parsenodeid(succ) for succ in successors)
2237 2237 l = repo.lock()
2238 2238 try:
2239 2239 tr = repo.transaction(b'debugobsolete')
2240 2240 try:
2241 2241 date = opts.get(b'date')
2242 2242 if date:
2243 2243 date = dateutil.parsedate(date)
2244 2244 else:
2245 2245 date = None
2246 2246 prec = parsenodeid(precursor)
2247 2247 parents = None
2248 2248 if opts[b'record_parents']:
2249 2249 if prec not in repo.unfiltered():
2250 2250 raise error.Abort(
2251 2251 b'cannot used --record-parents on '
2252 2252 b'unknown changesets'
2253 2253 )
2254 2254 parents = repo.unfiltered()[prec].parents()
2255 2255 parents = tuple(p.node() for p in parents)
2256 2256 repo.obsstore.create(
2257 2257 tr,
2258 2258 prec,
2259 2259 succs,
2260 2260 opts[b'flags'],
2261 2261 parents=parents,
2262 2262 date=date,
2263 2263 metadata=metadata,
2264 2264 ui=ui,
2265 2265 )
2266 2266 tr.close()
2267 2267 except ValueError as exc:
2268 2268 raise error.Abort(
2269 2269 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2270 2270 )
2271 2271 finally:
2272 2272 tr.release()
2273 2273 finally:
2274 2274 l.release()
2275 2275 else:
2276 2276 if opts[b'rev']:
2277 2277 revs = scmutil.revrange(repo, opts[b'rev'])
2278 2278 nodes = [repo[r].node() for r in revs]
2279 2279 markers = list(
2280 2280 obsutil.getmarkers(
2281 2281 repo, nodes=nodes, exclusive=opts[b'exclusive']
2282 2282 )
2283 2283 )
2284 2284 markers.sort(key=lambda x: x._data)
2285 2285 else:
2286 2286 markers = obsutil.getmarkers(repo)
2287 2287
2288 2288 markerstoiter = markers
2289 2289 isrelevant = lambda m: True
2290 2290 if opts.get(b'rev') and opts.get(b'index'):
2291 2291 markerstoiter = obsutil.getmarkers(repo)
2292 2292 markerset = set(markers)
2293 2293 isrelevant = lambda m: m in markerset
2294 2294
2295 2295 fm = ui.formatter(b'debugobsolete', opts)
2296 2296 for i, m in enumerate(markerstoiter):
2297 2297 if not isrelevant(m):
2298 2298 # marker can be irrelevant when we're iterating over a set
2299 2299 # of markers (markerstoiter) which is bigger than the set
2300 2300 # of markers we want to display (markers)
2301 2301 # this can happen if both --index and --rev options are
2302 2302 # provided and thus we need to iterate over all of the markers
2303 2303 # to get the correct indices, but only display the ones that
2304 2304 # are relevant to --rev value
2305 2305 continue
2306 2306 fm.startitem()
2307 2307 ind = i if opts.get(b'index') else None
2308 2308 cmdutil.showmarker(fm, m, index=ind)
2309 2309 fm.end()
2310 2310
2311 2311
2312 2312 @command(
2313 2313 b'debugp1copies',
2314 2314 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2315 2315 _(b'[-r REV]'),
2316 2316 )
2317 2317 def debugp1copies(ui, repo, **opts):
2318 2318 """dump copy information compared to p1"""
2319 2319
2320 2320 opts = pycompat.byteskwargs(opts)
2321 2321 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2322 2322 for dst, src in ctx.p1copies().items():
2323 2323 ui.write(b'%s -> %s\n' % (src, dst))
2324 2324
2325 2325
2326 2326 @command(
2327 2327 b'debugp2copies',
2328 2328 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2329 2329 _(b'[-r REV]'),
2330 2330 )
2331 2331 def debugp1copies(ui, repo, **opts):
2332 2332 """dump copy information compared to p2"""
2333 2333
2334 2334 opts = pycompat.byteskwargs(opts)
2335 2335 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2336 2336 for dst, src in ctx.p2copies().items():
2337 2337 ui.write(b'%s -> %s\n' % (src, dst))
2338 2338
2339 2339
2340 2340 @command(
2341 2341 b'debugpathcomplete',
2342 2342 [
2343 2343 (b'f', b'full', None, _(b'complete an entire path')),
2344 2344 (b'n', b'normal', None, _(b'show only normal files')),
2345 2345 (b'a', b'added', None, _(b'show only added files')),
2346 2346 (b'r', b'removed', None, _(b'show only removed files')),
2347 2347 ],
2348 2348 _(b'FILESPEC...'),
2349 2349 )
2350 2350 def debugpathcomplete(ui, repo, *specs, **opts):
2351 2351 '''complete part or all of a tracked path
2352 2352
2353 2353 This command supports shells that offer path name completion. It
2354 2354 currently completes only files already known to the dirstate.
2355 2355
2356 2356 Completion extends only to the next path segment unless
2357 2357 --full is specified, in which case entire paths are used.'''
2358 2358
2359 2359 def complete(path, acceptable):
2360 2360 dirstate = repo.dirstate
2361 2361 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2362 2362 rootdir = repo.root + pycompat.ossep
2363 2363 if spec != repo.root and not spec.startswith(rootdir):
2364 2364 return [], []
2365 2365 if os.path.isdir(spec):
2366 2366 spec += b'/'
2367 2367 spec = spec[len(rootdir) :]
2368 2368 fixpaths = pycompat.ossep != b'/'
2369 2369 if fixpaths:
2370 2370 spec = spec.replace(pycompat.ossep, b'/')
2371 2371 speclen = len(spec)
2372 2372 fullpaths = opts['full']
2373 2373 files, dirs = set(), set()
2374 2374 adddir, addfile = dirs.add, files.add
2375 2375 for f, st in pycompat.iteritems(dirstate):
2376 2376 if f.startswith(spec) and st[0] in acceptable:
2377 2377 if fixpaths:
2378 2378 f = f.replace(b'/', pycompat.ossep)
2379 2379 if fullpaths:
2380 2380 addfile(f)
2381 2381 continue
2382 2382 s = f.find(pycompat.ossep, speclen)
2383 2383 if s >= 0:
2384 2384 adddir(f[:s])
2385 2385 else:
2386 2386 addfile(f)
2387 2387 return files, dirs
2388 2388
2389 2389 acceptable = b''
2390 2390 if opts['normal']:
2391 2391 acceptable += b'nm'
2392 2392 if opts['added']:
2393 2393 acceptable += b'a'
2394 2394 if opts['removed']:
2395 2395 acceptable += b'r'
2396 2396 cwd = repo.getcwd()
2397 2397 if not specs:
2398 2398 specs = [b'.']
2399 2399
2400 2400 files, dirs = set(), set()
2401 2401 for spec in specs:
2402 2402 f, d = complete(spec, acceptable or b'nmar')
2403 2403 files.update(f)
2404 2404 dirs.update(d)
2405 2405 files.update(dirs)
2406 2406 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2407 2407 ui.write(b'\n')
2408 2408
2409 2409
2410 2410 @command(
2411 2411 b'debugpathcopies',
2412 2412 cmdutil.walkopts,
2413 2413 b'hg debugpathcopies REV1 REV2 [FILE]',
2414 2414 inferrepo=True,
2415 2415 )
2416 2416 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2417 2417 """show copies between two revisions"""
2418 2418 ctx1 = scmutil.revsingle(repo, rev1)
2419 2419 ctx2 = scmutil.revsingle(repo, rev2)
2420 2420 m = scmutil.match(ctx1, pats, opts)
2421 2421 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2422 2422 ui.write(b'%s -> %s\n' % (src, dst))
2423 2423
2424 2424
2425 2425 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2426 2426 def debugpeer(ui, path):
2427 2427 """establish a connection to a peer repository"""
2428 2428 # Always enable peer request logging. Requires --debug to display
2429 2429 # though.
2430 2430 overrides = {
2431 2431 (b'devel', b'debug.peer-request'): True,
2432 2432 }
2433 2433
2434 2434 with ui.configoverride(overrides):
2435 2435 peer = hg.peer(ui, {}, path)
2436 2436
2437 2437 local = peer.local() is not None
2438 2438 canpush = peer.canpush()
2439 2439
2440 2440 ui.write(_(b'url: %s\n') % peer.url())
2441 2441 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2442 2442 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2443 2443
2444 2444
2445 2445 @command(
2446 2446 b'debugpickmergetool',
2447 2447 [
2448 2448 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2449 2449 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2450 2450 ]
2451 2451 + cmdutil.walkopts
2452 2452 + cmdutil.mergetoolopts,
2453 2453 _(b'[PATTERN]...'),
2454 2454 inferrepo=True,
2455 2455 )
2456 2456 def debugpickmergetool(ui, repo, *pats, **opts):
2457 2457 """examine which merge tool is chosen for specified file
2458 2458
2459 2459 As described in :hg:`help merge-tools`, Mercurial examines
2460 2460 configurations below in this order to decide which merge tool is
2461 2461 chosen for specified file.
2462 2462
2463 2463 1. ``--tool`` option
2464 2464 2. ``HGMERGE`` environment variable
2465 2465 3. configurations in ``merge-patterns`` section
2466 2466 4. configuration of ``ui.merge``
2467 2467 5. configurations in ``merge-tools`` section
2468 2468 6. ``hgmerge`` tool (for historical reason only)
2469 2469 7. default tool for fallback (``:merge`` or ``:prompt``)
2470 2470
2471 2471 This command writes out examination result in the style below::
2472 2472
2473 2473 FILE = MERGETOOL
2474 2474
2475 2475 By default, all files known in the first parent context of the
2476 2476 working directory are examined. Use file patterns and/or -I/-X
2477 2477 options to limit target files. -r/--rev is also useful to examine
2478 2478 files in another context without actual updating to it.
2479 2479
2480 2480 With --debug, this command shows warning messages while matching
2481 2481 against ``merge-patterns`` and so on, too. It is recommended to
2482 2482 use this option with explicit file patterns and/or -I/-X options,
2483 2483 because this option increases amount of output per file according
2484 2484 to configurations in hgrc.
2485 2485
2486 2486 With -v/--verbose, this command shows configurations below at
2487 2487 first (only if specified).
2488 2488
2489 2489 - ``--tool`` option
2490 2490 - ``HGMERGE`` environment variable
2491 2491 - configuration of ``ui.merge``
2492 2492
2493 2493 If merge tool is chosen before matching against
2494 2494 ``merge-patterns``, this command can't show any helpful
2495 2495 information, even with --debug. In such case, information above is
2496 2496 useful to know why a merge tool is chosen.
2497 2497 """
2498 2498 opts = pycompat.byteskwargs(opts)
2499 2499 overrides = {}
2500 2500 if opts[b'tool']:
2501 2501 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2502 2502 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2503 2503
2504 2504 with ui.configoverride(overrides, b'debugmergepatterns'):
2505 2505 hgmerge = encoding.environ.get(b"HGMERGE")
2506 2506 if hgmerge is not None:
2507 2507 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2508 2508 uimerge = ui.config(b"ui", b"merge")
2509 2509 if uimerge:
2510 2510 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2511 2511
2512 2512 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2513 2513 m = scmutil.match(ctx, pats, opts)
2514 2514 changedelete = opts[b'changedelete']
2515 2515 for path in ctx.walk(m):
2516 2516 fctx = ctx[path]
2517 2517 try:
2518 2518 if not ui.debugflag:
2519 2519 ui.pushbuffer(error=True)
2520 2520 tool, toolpath = filemerge._picktool(
2521 2521 repo,
2522 2522 ui,
2523 2523 path,
2524 2524 fctx.isbinary(),
2525 2525 b'l' in fctx.flags(),
2526 2526 changedelete,
2527 2527 )
2528 2528 finally:
2529 2529 if not ui.debugflag:
2530 2530 ui.popbuffer()
2531 2531 ui.write(b'%s = %s\n' % (path, tool))
2532 2532
2533 2533
2534 2534 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2535 2535 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2536 2536 '''access the pushkey key/value protocol
2537 2537
2538 2538 With two args, list the keys in the given namespace.
2539 2539
2540 2540 With five args, set a key to new if it currently is set to old.
2541 2541 Reports success or failure.
2542 2542 '''
2543 2543
2544 2544 target = hg.peer(ui, {}, repopath)
2545 2545 if keyinfo:
2546 2546 key, old, new = keyinfo
2547 2547 with target.commandexecutor() as e:
2548 2548 r = e.callcommand(
2549 2549 b'pushkey',
2550 2550 {
2551 2551 b'namespace': namespace,
2552 2552 b'key': key,
2553 2553 b'old': old,
2554 2554 b'new': new,
2555 2555 },
2556 2556 ).result()
2557 2557
2558 2558 ui.status(pycompat.bytestr(r) + b'\n')
2559 2559 return not r
2560 2560 else:
2561 2561 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2562 2562 ui.write(
2563 2563 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2564 2564 )
2565 2565
2566 2566
2567 2567 @command(b'debugpvec', [], _(b'A B'))
2568 2568 def debugpvec(ui, repo, a, b=None):
2569 2569 ca = scmutil.revsingle(repo, a)
2570 2570 cb = scmutil.revsingle(repo, b)
2571 2571 pa = pvec.ctxpvec(ca)
2572 2572 pb = pvec.ctxpvec(cb)
2573 2573 if pa == pb:
2574 2574 rel = b"="
2575 2575 elif pa > pb:
2576 2576 rel = b">"
2577 2577 elif pa < pb:
2578 2578 rel = b"<"
2579 2579 elif pa | pb:
2580 2580 rel = b"|"
2581 2581 ui.write(_(b"a: %s\n") % pa)
2582 2582 ui.write(_(b"b: %s\n") % pb)
2583 2583 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2584 2584 ui.write(
2585 2585 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2586 2586 % (
2587 2587 abs(pa._depth - pb._depth),
2588 2588 pvec._hamming(pa._vec, pb._vec),
2589 2589 pa.distance(pb),
2590 2590 rel,
2591 2591 )
2592 2592 )
2593 2593
2594 2594
2595 2595 @command(
2596 2596 b'debugrebuilddirstate|debugrebuildstate',
2597 2597 [
2598 2598 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2599 2599 (
2600 2600 b'',
2601 2601 b'minimal',
2602 2602 None,
2603 2603 _(
2604 2604 b'only rebuild files that are inconsistent with '
2605 2605 b'the working copy parent'
2606 2606 ),
2607 2607 ),
2608 2608 ],
2609 2609 _(b'[-r REV]'),
2610 2610 )
2611 2611 def debugrebuilddirstate(ui, repo, rev, **opts):
2612 2612 """rebuild the dirstate as it would look like for the given revision
2613 2613
2614 2614 If no revision is specified the first current parent will be used.
2615 2615
2616 2616 The dirstate will be set to the files of the given revision.
2617 2617 The actual working directory content or existing dirstate
2618 2618 information such as adds or removes is not considered.
2619 2619
2620 2620 ``minimal`` will only rebuild the dirstate status for files that claim to be
2621 2621 tracked but are not in the parent manifest, or that exist in the parent
2622 2622 manifest but are not in the dirstate. It will not change adds, removes, or
2623 2623 modified files that are in the working copy parent.
2624 2624
2625 2625 One use of this command is to make the next :hg:`status` invocation
2626 2626 check the actual file content.
2627 2627 """
2628 2628 ctx = scmutil.revsingle(repo, rev)
2629 2629 with repo.wlock():
2630 2630 dirstate = repo.dirstate
2631 2631 changedfiles = None
2632 2632 # See command doc for what minimal does.
2633 2633 if opts.get('minimal'):
2634 2634 manifestfiles = set(ctx.manifest().keys())
2635 2635 dirstatefiles = set(dirstate)
2636 2636 manifestonly = manifestfiles - dirstatefiles
2637 2637 dsonly = dirstatefiles - manifestfiles
2638 2638 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2639 2639 changedfiles = manifestonly | dsnotadded
2640 2640
2641 2641 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2642 2642
2643 2643
2644 2644 @command(b'debugrebuildfncache', [], b'')
2645 2645 def debugrebuildfncache(ui, repo):
2646 2646 """rebuild the fncache file"""
2647 2647 repair.rebuildfncache(ui, repo)
2648 2648
2649 2649
2650 2650 @command(
2651 2651 b'debugrename',
2652 2652 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2653 2653 _(b'[-r REV] [FILE]...'),
2654 2654 )
2655 2655 def debugrename(ui, repo, *pats, **opts):
2656 2656 """dump rename information"""
2657 2657
2658 2658 opts = pycompat.byteskwargs(opts)
2659 2659 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2660 2660 m = scmutil.match(ctx, pats, opts)
2661 2661 for abs in ctx.walk(m):
2662 2662 fctx = ctx[abs]
2663 2663 o = fctx.filelog().renamed(fctx.filenode())
2664 2664 rel = repo.pathto(abs)
2665 2665 if o:
2666 2666 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2667 2667 else:
2668 2668 ui.write(_(b"%s not renamed\n") % rel)
2669 2669
2670 2670
2671 2671 @command(b'debugrequires|debugrequirements', [], b'')
2672 2672 def debugrequirements(ui, repo):
2673 2673 """ print the current repo requirements """
2674 2674 for r in sorted(repo.requirements):
2675 2675 ui.write(b"%s\n" % r)
2676 2676
2677 2677
2678 2678 @command(
2679 2679 b'debugrevlog',
2680 2680 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2681 2681 _(b'-c|-m|FILE'),
2682 2682 optionalrepo=True,
2683 2683 )
2684 2684 def debugrevlog(ui, repo, file_=None, **opts):
2685 2685 """show data and statistics about a revlog"""
2686 2686 opts = pycompat.byteskwargs(opts)
2687 2687 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2688 2688
2689 2689 if opts.get(b"dump"):
2690 2690 numrevs = len(r)
2691 2691 ui.write(
2692 2692 (
2693 2693 b"# rev p1rev p2rev start end deltastart base p1 p2"
2694 2694 b" rawsize totalsize compression heads chainlen\n"
2695 2695 )
2696 2696 )
2697 2697 ts = 0
2698 2698 heads = set()
2699 2699
2700 2700 for rev in pycompat.xrange(numrevs):
2701 2701 dbase = r.deltaparent(rev)
2702 2702 if dbase == -1:
2703 2703 dbase = rev
2704 2704 cbase = r.chainbase(rev)
2705 2705 clen = r.chainlen(rev)
2706 2706 p1, p2 = r.parentrevs(rev)
2707 2707 rs = r.rawsize(rev)
2708 2708 ts = ts + rs
2709 2709 heads -= set(r.parentrevs(rev))
2710 2710 heads.add(rev)
2711 2711 try:
2712 2712 compression = ts / r.end(rev)
2713 2713 except ZeroDivisionError:
2714 2714 compression = 0
2715 2715 ui.write(
2716 2716 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2717 2717 b"%11d %5d %8d\n"
2718 2718 % (
2719 2719 rev,
2720 2720 p1,
2721 2721 p2,
2722 2722 r.start(rev),
2723 2723 r.end(rev),
2724 2724 r.start(dbase),
2725 2725 r.start(cbase),
2726 2726 r.start(p1),
2727 2727 r.start(p2),
2728 2728 rs,
2729 2729 ts,
2730 2730 compression,
2731 2731 len(heads),
2732 2732 clen,
2733 2733 )
2734 2734 )
2735 2735 return 0
2736 2736
2737 2737 v = r.version
2738 2738 format = v & 0xFFFF
2739 2739 flags = []
2740 2740 gdelta = False
2741 2741 if v & revlog.FLAG_INLINE_DATA:
2742 2742 flags.append(b'inline')
2743 2743 if v & revlog.FLAG_GENERALDELTA:
2744 2744 gdelta = True
2745 2745 flags.append(b'generaldelta')
2746 2746 if not flags:
2747 2747 flags = [b'(none)']
2748 2748
2749 2749 ### tracks merge vs single parent
2750 2750 nummerges = 0
2751 2751
2752 2752 ### tracks ways the "delta" are build
2753 2753 # nodelta
2754 2754 numempty = 0
2755 2755 numemptytext = 0
2756 2756 numemptydelta = 0
2757 2757 # full file content
2758 2758 numfull = 0
2759 2759 # intermediate snapshot against a prior snapshot
2760 2760 numsemi = 0
2761 2761 # snapshot count per depth
2762 2762 numsnapdepth = collections.defaultdict(lambda: 0)
2763 2763 # delta against previous revision
2764 2764 numprev = 0
2765 2765 # delta against first or second parent (not prev)
2766 2766 nump1 = 0
2767 2767 nump2 = 0
2768 2768 # delta against neither prev nor parents
2769 2769 numother = 0
2770 2770 # delta against prev that are also first or second parent
2771 2771 # (details of `numprev`)
2772 2772 nump1prev = 0
2773 2773 nump2prev = 0
2774 2774
2775 2775 # data about delta chain of each revs
2776 2776 chainlengths = []
2777 2777 chainbases = []
2778 2778 chainspans = []
2779 2779
2780 2780 # data about each revision
2781 2781 datasize = [None, 0, 0]
2782 2782 fullsize = [None, 0, 0]
2783 2783 semisize = [None, 0, 0]
2784 2784 # snapshot count per depth
2785 2785 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2786 2786 deltasize = [None, 0, 0]
2787 2787 chunktypecounts = {}
2788 2788 chunktypesizes = {}
2789 2789
2790 2790 def addsize(size, l):
2791 2791 if l[0] is None or size < l[0]:
2792 2792 l[0] = size
2793 2793 if size > l[1]:
2794 2794 l[1] = size
2795 2795 l[2] += size
2796 2796
2797 2797 numrevs = len(r)
2798 2798 for rev in pycompat.xrange(numrevs):
2799 2799 p1, p2 = r.parentrevs(rev)
2800 2800 delta = r.deltaparent(rev)
2801 2801 if format > 0:
2802 2802 addsize(r.rawsize(rev), datasize)
2803 2803 if p2 != nullrev:
2804 2804 nummerges += 1
2805 2805 size = r.length(rev)
2806 2806 if delta == nullrev:
2807 2807 chainlengths.append(0)
2808 2808 chainbases.append(r.start(rev))
2809 2809 chainspans.append(size)
2810 2810 if size == 0:
2811 2811 numempty += 1
2812 2812 numemptytext += 1
2813 2813 else:
2814 2814 numfull += 1
2815 2815 numsnapdepth[0] += 1
2816 2816 addsize(size, fullsize)
2817 2817 addsize(size, snapsizedepth[0])
2818 2818 else:
2819 2819 chainlengths.append(chainlengths[delta] + 1)
2820 2820 baseaddr = chainbases[delta]
2821 2821 revaddr = r.start(rev)
2822 2822 chainbases.append(baseaddr)
2823 2823 chainspans.append((revaddr - baseaddr) + size)
2824 2824 if size == 0:
2825 2825 numempty += 1
2826 2826 numemptydelta += 1
2827 2827 elif r.issnapshot(rev):
2828 2828 addsize(size, semisize)
2829 2829 numsemi += 1
2830 2830 depth = r.snapshotdepth(rev)
2831 2831 numsnapdepth[depth] += 1
2832 2832 addsize(size, snapsizedepth[depth])
2833 2833 else:
2834 2834 addsize(size, deltasize)
2835 2835 if delta == rev - 1:
2836 2836 numprev += 1
2837 2837 if delta == p1:
2838 2838 nump1prev += 1
2839 2839 elif delta == p2:
2840 2840 nump2prev += 1
2841 2841 elif delta == p1:
2842 2842 nump1 += 1
2843 2843 elif delta == p2:
2844 2844 nump2 += 1
2845 2845 elif delta != nullrev:
2846 2846 numother += 1
2847 2847
2848 2848 # Obtain data on the raw chunks in the revlog.
2849 2849 if util.safehasattr(r, b'_getsegmentforrevs'):
2850 2850 segment = r._getsegmentforrevs(rev, rev)[1]
2851 2851 else:
2852 2852 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2853 2853 if segment:
2854 2854 chunktype = bytes(segment[0:1])
2855 2855 else:
2856 2856 chunktype = b'empty'
2857 2857
2858 2858 if chunktype not in chunktypecounts:
2859 2859 chunktypecounts[chunktype] = 0
2860 2860 chunktypesizes[chunktype] = 0
2861 2861
2862 2862 chunktypecounts[chunktype] += 1
2863 2863 chunktypesizes[chunktype] += size
2864 2864
2865 2865 # Adjust size min value for empty cases
2866 2866 for size in (datasize, fullsize, semisize, deltasize):
2867 2867 if size[0] is None:
2868 2868 size[0] = 0
2869 2869
2870 2870 numdeltas = numrevs - numfull - numempty - numsemi
2871 2871 numoprev = numprev - nump1prev - nump2prev
2872 2872 totalrawsize = datasize[2]
2873 2873 datasize[2] /= numrevs
2874 2874 fulltotal = fullsize[2]
2875 2875 if numfull == 0:
2876 2876 fullsize[2] = 0
2877 2877 else:
2878 2878 fullsize[2] /= numfull
2879 2879 semitotal = semisize[2]
2880 2880 snaptotal = {}
2881 2881 if numsemi > 0:
2882 2882 semisize[2] /= numsemi
2883 2883 for depth in snapsizedepth:
2884 2884 snaptotal[depth] = snapsizedepth[depth][2]
2885 2885 snapsizedepth[depth][2] /= numsnapdepth[depth]
2886 2886
2887 2887 deltatotal = deltasize[2]
2888 2888 if numdeltas > 0:
2889 2889 deltasize[2] /= numdeltas
2890 2890 totalsize = fulltotal + semitotal + deltatotal
2891 2891 avgchainlen = sum(chainlengths) / numrevs
2892 2892 maxchainlen = max(chainlengths)
2893 2893 maxchainspan = max(chainspans)
2894 2894 compratio = 1
2895 2895 if totalsize:
2896 2896 compratio = totalrawsize / totalsize
2897 2897
2898 2898 basedfmtstr = b'%%%dd\n'
2899 2899 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2900 2900
2901 2901 def dfmtstr(max):
2902 2902 return basedfmtstr % len(str(max))
2903 2903
2904 2904 def pcfmtstr(max, padding=0):
2905 2905 return basepcfmtstr % (len(str(max)), b' ' * padding)
2906 2906
2907 2907 def pcfmt(value, total):
2908 2908 if total:
2909 2909 return (value, 100 * float(value) / total)
2910 2910 else:
2911 2911 return value, 100.0
2912 2912
2913 2913 ui.writenoi18n(b'format : %d\n' % format)
2914 2914 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2915 2915
2916 2916 ui.write(b'\n')
2917 2917 fmt = pcfmtstr(totalsize)
2918 2918 fmt2 = dfmtstr(totalsize)
2919 2919 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2920 2920 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2921 2921 ui.writenoi18n(
2922 2922 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2923 2923 )
2924 2924 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2925 2925 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2926 2926 ui.writenoi18n(
2927 2927 b' text : '
2928 2928 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2929 2929 )
2930 2930 ui.writenoi18n(
2931 2931 b' delta : '
2932 2932 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2933 2933 )
2934 2934 ui.writenoi18n(
2935 2935 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2936 2936 )
2937 2937 for depth in sorted(numsnapdepth):
2938 2938 ui.write(
2939 2939 (b' lvl-%-3d : ' % depth)
2940 2940 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2941 2941 )
2942 2942 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2943 2943 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2944 2944 ui.writenoi18n(
2945 2945 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2946 2946 )
2947 2947 for depth in sorted(numsnapdepth):
2948 2948 ui.write(
2949 2949 (b' lvl-%-3d : ' % depth)
2950 2950 + fmt % pcfmt(snaptotal[depth], totalsize)
2951 2951 )
2952 2952 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2953 2953
2954 2954 def fmtchunktype(chunktype):
2955 2955 if chunktype == b'empty':
2956 2956 return b' %s : ' % chunktype
2957 2957 elif chunktype in pycompat.bytestr(string.ascii_letters):
2958 2958 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2959 2959 else:
2960 2960 return b' 0x%s : ' % hex(chunktype)
2961 2961
2962 2962 ui.write(b'\n')
2963 2963 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2964 2964 for chunktype in sorted(chunktypecounts):
2965 2965 ui.write(fmtchunktype(chunktype))
2966 2966 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2967 2967 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2968 2968 for chunktype in sorted(chunktypecounts):
2969 2969 ui.write(fmtchunktype(chunktype))
2970 2970 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2971 2971
2972 2972 ui.write(b'\n')
2973 2973 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2974 2974 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2975 2975 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2976 2976 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2977 2977 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2978 2978
2979 2979 if format > 0:
2980 2980 ui.write(b'\n')
2981 2981 ui.writenoi18n(
2982 2982 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2983 2983 % tuple(datasize)
2984 2984 )
2985 2985 ui.writenoi18n(
2986 2986 b'full revision size (min/max/avg) : %d / %d / %d\n'
2987 2987 % tuple(fullsize)
2988 2988 )
2989 2989 ui.writenoi18n(
2990 2990 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2991 2991 % tuple(semisize)
2992 2992 )
2993 2993 for depth in sorted(snapsizedepth):
2994 2994 if depth == 0:
2995 2995 continue
2996 2996 ui.writenoi18n(
2997 2997 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2998 2998 % ((depth,) + tuple(snapsizedepth[depth]))
2999 2999 )
3000 3000 ui.writenoi18n(
3001 3001 b'delta size (min/max/avg) : %d / %d / %d\n'
3002 3002 % tuple(deltasize)
3003 3003 )
3004 3004
3005 3005 if numdeltas > 0:
3006 3006 ui.write(b'\n')
3007 3007 fmt = pcfmtstr(numdeltas)
3008 3008 fmt2 = pcfmtstr(numdeltas, 4)
3009 3009 ui.writenoi18n(
3010 3010 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3011 3011 )
3012 3012 if numprev > 0:
3013 3013 ui.writenoi18n(
3014 3014 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3015 3015 )
3016 3016 ui.writenoi18n(
3017 3017 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3018 3018 )
3019 3019 ui.writenoi18n(
3020 3020 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3021 3021 )
3022 3022 if gdelta:
3023 3023 ui.writenoi18n(
3024 3024 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3025 3025 )
3026 3026 ui.writenoi18n(
3027 3027 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3028 3028 )
3029 3029 ui.writenoi18n(
3030 3030 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3031 3031 )
3032 3032
3033 3033
3034 3034 @command(
3035 3035 b'debugrevlogindex',
3036 3036 cmdutil.debugrevlogopts
3037 3037 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3038 3038 _(b'[-f FORMAT] -c|-m|FILE'),
3039 3039 optionalrepo=True,
3040 3040 )
3041 3041 def debugrevlogindex(ui, repo, file_=None, **opts):
3042 3042 """dump the contents of a revlog index"""
3043 3043 opts = pycompat.byteskwargs(opts)
3044 3044 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3045 3045 format = opts.get(b'format', 0)
3046 3046 if format not in (0, 1):
3047 3047 raise error.Abort(_(b"unknown format %d") % format)
3048 3048
3049 3049 if ui.debugflag:
3050 3050 shortfn = hex
3051 3051 else:
3052 3052 shortfn = short
3053 3053
3054 3054 # There might not be anything in r, so have a sane default
3055 3055 idlen = 12
3056 3056 for i in r:
3057 3057 idlen = len(shortfn(r.node(i)))
3058 3058 break
3059 3059
3060 3060 if format == 0:
3061 3061 if ui.verbose:
3062 3062 ui.writenoi18n(
3063 3063 b" rev offset length linkrev %s %s p2\n"
3064 3064 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3065 3065 )
3066 3066 else:
3067 3067 ui.writenoi18n(
3068 3068 b" rev linkrev %s %s p2\n"
3069 3069 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3070 3070 )
3071 3071 elif format == 1:
3072 3072 if ui.verbose:
3073 3073 ui.writenoi18n(
3074 3074 (
3075 3075 b" rev flag offset length size link p1"
3076 3076 b" p2 %s\n"
3077 3077 )
3078 3078 % b"nodeid".rjust(idlen)
3079 3079 )
3080 3080 else:
3081 3081 ui.writenoi18n(
3082 3082 b" rev flag size link p1 p2 %s\n"
3083 3083 % b"nodeid".rjust(idlen)
3084 3084 )
3085 3085
3086 3086 for i in r:
3087 3087 node = r.node(i)
3088 3088 if format == 0:
3089 3089 try:
3090 3090 pp = r.parents(node)
3091 3091 except Exception:
3092 3092 pp = [nullid, nullid]
3093 3093 if ui.verbose:
3094 3094 ui.write(
3095 3095 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3096 3096 % (
3097 3097 i,
3098 3098 r.start(i),
3099 3099 r.length(i),
3100 3100 r.linkrev(i),
3101 3101 shortfn(node),
3102 3102 shortfn(pp[0]),
3103 3103 shortfn(pp[1]),
3104 3104 )
3105 3105 )
3106 3106 else:
3107 3107 ui.write(
3108 3108 b"% 6d % 7d %s %s %s\n"
3109 3109 % (
3110 3110 i,
3111 3111 r.linkrev(i),
3112 3112 shortfn(node),
3113 3113 shortfn(pp[0]),
3114 3114 shortfn(pp[1]),
3115 3115 )
3116 3116 )
3117 3117 elif format == 1:
3118 3118 pr = r.parentrevs(i)
3119 3119 if ui.verbose:
3120 3120 ui.write(
3121 3121 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3122 3122 % (
3123 3123 i,
3124 3124 r.flags(i),
3125 3125 r.start(i),
3126 3126 r.length(i),
3127 3127 r.rawsize(i),
3128 3128 r.linkrev(i),
3129 3129 pr[0],
3130 3130 pr[1],
3131 3131 shortfn(node),
3132 3132 )
3133 3133 )
3134 3134 else:
3135 3135 ui.write(
3136 3136 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3137 3137 % (
3138 3138 i,
3139 3139 r.flags(i),
3140 3140 r.rawsize(i),
3141 3141 r.linkrev(i),
3142 3142 pr[0],
3143 3143 pr[1],
3144 3144 shortfn(node),
3145 3145 )
3146 3146 )
3147 3147
3148 3148
3149 3149 @command(
3150 3150 b'debugrevspec',
3151 3151 [
3152 3152 (
3153 3153 b'',
3154 3154 b'optimize',
3155 3155 None,
3156 3156 _(b'print parsed tree after optimizing (DEPRECATED)'),
3157 3157 ),
3158 3158 (
3159 3159 b'',
3160 3160 b'show-revs',
3161 3161 True,
3162 3162 _(b'print list of result revisions (default)'),
3163 3163 ),
3164 3164 (
3165 3165 b's',
3166 3166 b'show-set',
3167 3167 None,
3168 3168 _(b'print internal representation of result set'),
3169 3169 ),
3170 3170 (
3171 3171 b'p',
3172 3172 b'show-stage',
3173 3173 [],
3174 3174 _(b'print parsed tree at the given stage'),
3175 3175 _(b'NAME'),
3176 3176 ),
3177 3177 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3178 3178 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3179 3179 ],
3180 3180 b'REVSPEC',
3181 3181 )
3182 3182 def debugrevspec(ui, repo, expr, **opts):
3183 3183 """parse and apply a revision specification
3184 3184
3185 3185 Use -p/--show-stage option to print the parsed tree at the given stages.
3186 3186 Use -p all to print tree at every stage.
3187 3187
3188 3188 Use --no-show-revs option with -s or -p to print only the set
3189 3189 representation or the parsed tree respectively.
3190 3190
3191 3191 Use --verify-optimized to compare the optimized result with the unoptimized
3192 3192 one. Returns 1 if the optimized result differs.
3193 3193 """
3194 3194 opts = pycompat.byteskwargs(opts)
3195 3195 aliases = ui.configitems(b'revsetalias')
3196 3196 stages = [
3197 3197 (b'parsed', lambda tree: tree),
3198 3198 (
3199 3199 b'expanded',
3200 3200 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3201 3201 ),
3202 3202 (b'concatenated', revsetlang.foldconcat),
3203 3203 (b'analyzed', revsetlang.analyze),
3204 3204 (b'optimized', revsetlang.optimize),
3205 3205 ]
3206 3206 if opts[b'no_optimized']:
3207 3207 stages = stages[:-1]
3208 3208 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3209 3209 raise error.Abort(
3210 3210 _(b'cannot use --verify-optimized with --no-optimized')
3211 3211 )
3212 3212 stagenames = {n for n, f in stages}
3213 3213
3214 3214 showalways = set()
3215 3215 showchanged = set()
3216 3216 if ui.verbose and not opts[b'show_stage']:
3217 3217 # show parsed tree by --verbose (deprecated)
3218 3218 showalways.add(b'parsed')
3219 3219 showchanged.update([b'expanded', b'concatenated'])
3220 3220 if opts[b'optimize']:
3221 3221 showalways.add(b'optimized')
3222 3222 if opts[b'show_stage'] and opts[b'optimize']:
3223 3223 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3224 3224 if opts[b'show_stage'] == [b'all']:
3225 3225 showalways.update(stagenames)
3226 3226 else:
3227 3227 for n in opts[b'show_stage']:
3228 3228 if n not in stagenames:
3229 3229 raise error.Abort(_(b'invalid stage name: %s') % n)
3230 3230 showalways.update(opts[b'show_stage'])
3231 3231
3232 3232 treebystage = {}
3233 3233 printedtree = None
3234 3234 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3235 3235 for n, f in stages:
3236 3236 treebystage[n] = tree = f(tree)
3237 3237 if n in showalways or (n in showchanged and tree != printedtree):
3238 3238 if opts[b'show_stage'] or n != b'parsed':
3239 3239 ui.write(b"* %s:\n" % n)
3240 3240 ui.write(revsetlang.prettyformat(tree), b"\n")
3241 3241 printedtree = tree
3242 3242
3243 3243 if opts[b'verify_optimized']:
3244 3244 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3245 3245 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3246 3246 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3247 3247 ui.writenoi18n(
3248 3248 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3249 3249 )
3250 3250 ui.writenoi18n(
3251 3251 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3252 3252 )
3253 3253 arevs = list(arevs)
3254 3254 brevs = list(brevs)
3255 3255 if arevs == brevs:
3256 3256 return 0
3257 3257 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3258 3258 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3259 3259 sm = difflib.SequenceMatcher(None, arevs, brevs)
3260 3260 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3261 3261 if tag in ('delete', 'replace'):
3262 3262 for c in arevs[alo:ahi]:
3263 3263 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3264 3264 if tag in ('insert', 'replace'):
3265 3265 for c in brevs[blo:bhi]:
3266 3266 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3267 3267 if tag == 'equal':
3268 3268 for c in arevs[alo:ahi]:
3269 3269 ui.write(b' %d\n' % c)
3270 3270 return 1
3271 3271
3272 3272 func = revset.makematcher(tree)
3273 3273 revs = func(repo)
3274 3274 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3275 3275 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3276 3276 if not opts[b'show_revs']:
3277 3277 return
3278 3278 for c in revs:
3279 3279 ui.write(b"%d\n" % c)
3280 3280
3281 3281
3282 3282 @command(
3283 3283 b'debugserve',
3284 3284 [
3285 3285 (
3286 3286 b'',
3287 3287 b'sshstdio',
3288 3288 False,
3289 3289 _(b'run an SSH server bound to process handles'),
3290 3290 ),
3291 3291 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3292 3292 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3293 3293 ],
3294 3294 b'',
3295 3295 )
3296 3296 def debugserve(ui, repo, **opts):
3297 3297 """run a server with advanced settings
3298 3298
3299 3299 This command is similar to :hg:`serve`. It exists partially as a
3300 3300 workaround to the fact that ``hg serve --stdio`` must have specific
3301 3301 arguments for security reasons.
3302 3302 """
3303 3303 opts = pycompat.byteskwargs(opts)
3304 3304
3305 3305 if not opts[b'sshstdio']:
3306 3306 raise error.Abort(_(b'only --sshstdio is currently supported'))
3307 3307
3308 3308 logfh = None
3309 3309
3310 3310 if opts[b'logiofd'] and opts[b'logiofile']:
3311 3311 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3312 3312
3313 3313 if opts[b'logiofd']:
3314 3314 # Ideally we would be line buffered. But line buffering in binary
3315 3315 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3316 3316 # buffering could have performance impacts. But since this isn't
3317 3317 # performance critical code, it should be fine.
3318 3318 try:
3319 3319 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3320 3320 except OSError as e:
3321 3321 if e.errno != errno.ESPIPE:
3322 3322 raise
3323 3323 # can't seek a pipe, so `ab` mode fails on py3
3324 3324 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3325 3325 elif opts[b'logiofile']:
3326 3326 logfh = open(opts[b'logiofile'], b'ab', 0)
3327 3327
3328 3328 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3329 3329 s.serve_forever()
3330 3330
3331 3331
3332 3332 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3333 3333 def debugsetparents(ui, repo, rev1, rev2=None):
3334 3334 """manually set the parents of the current working directory
3335 3335
3336 3336 This is useful for writing repository conversion tools, but should
3337 3337 be used with care. For example, neither the working directory nor the
3338 3338 dirstate is updated, so file status may be incorrect after running this
3339 3339 command.
3340 3340
3341 3341 Returns 0 on success.
3342 3342 """
3343 3343
3344 3344 node1 = scmutil.revsingle(repo, rev1).node()
3345 3345 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3346 3346
3347 3347 with repo.wlock():
3348 3348 repo.setparents(node1, node2)
3349 3349
3350 3350
3351 3351 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3352 3352 def debugsidedata(ui, repo, file_, rev=None, **opts):
3353 3353 """dump the side data for a cl/manifest/file revision
3354 3354
3355 3355 Use --verbose to dump the sidedata content."""
3356 3356 opts = pycompat.byteskwargs(opts)
3357 3357 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3358 3358 if rev is not None:
3359 3359 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3360 3360 file_, rev = None, file_
3361 3361 elif rev is None:
3362 3362 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3363 3363 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3364 3364 r = getattr(r, '_revlog', r)
3365 3365 try:
3366 3366 sidedata = r.sidedata(r.lookup(rev))
3367 3367 except KeyError:
3368 3368 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3369 3369 if sidedata:
3370 3370 sidedata = list(sidedata.items())
3371 3371 sidedata.sort()
3372 3372 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3373 3373 for key, value in sidedata:
3374 3374 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3375 3375 if ui.verbose:
3376 3376 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3377 3377
3378 3378
3379 3379 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3380 3380 def debugssl(ui, repo, source=None, **opts):
3381 3381 '''test a secure connection to a server
3382 3382
3383 3383 This builds the certificate chain for the server on Windows, installing the
3384 3384 missing intermediates and trusted root via Windows Update if necessary. It
3385 3385 does nothing on other platforms.
3386 3386
3387 3387 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3388 3388 that server is used. See :hg:`help urls` for more information.
3389 3389
3390 3390 If the update succeeds, retry the original operation. Otherwise, the cause
3391 3391 of the SSL error is likely another issue.
3392 3392 '''
3393 3393 if not pycompat.iswindows:
3394 3394 raise error.Abort(
3395 3395 _(b'certificate chain building is only possible on Windows')
3396 3396 )
3397 3397
3398 3398 if not source:
3399 3399 if not repo:
3400 3400 raise error.Abort(
3401 3401 _(
3402 3402 b"there is no Mercurial repository here, and no "
3403 3403 b"server specified"
3404 3404 )
3405 3405 )
3406 3406 source = b"default"
3407 3407
3408 3408 source, branches = hg.parseurl(ui.expandpath(source))
3409 3409 url = util.url(source)
3410 3410
3411 3411 defaultport = {b'https': 443, b'ssh': 22}
3412 3412 if url.scheme in defaultport:
3413 3413 try:
3414 3414 addr = (url.host, int(url.port or defaultport[url.scheme]))
3415 3415 except ValueError:
3416 3416 raise error.Abort(_(b"malformed port number in URL"))
3417 3417 else:
3418 3418 raise error.Abort(_(b"only https and ssh connections are supported"))
3419 3419
3420 3420 from . import win32
3421 3421
3422 3422 s = ssl.wrap_socket(
3423 3423 socket.socket(),
3424 3424 ssl_version=ssl.PROTOCOL_TLS,
3425 3425 cert_reqs=ssl.CERT_NONE,
3426 3426 ca_certs=None,
3427 3427 )
3428 3428
3429 3429 try:
3430 3430 s.connect(addr)
3431 3431 cert = s.getpeercert(True)
3432 3432
3433 3433 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3434 3434
3435 3435 complete = win32.checkcertificatechain(cert, build=False)
3436 3436
3437 3437 if not complete:
3438 3438 ui.status(_(b'certificate chain is incomplete, updating... '))
3439 3439
3440 3440 if not win32.checkcertificatechain(cert):
3441 3441 ui.status(_(b'failed.\n'))
3442 3442 else:
3443 3443 ui.status(_(b'done.\n'))
3444 3444 else:
3445 3445 ui.status(_(b'full certificate chain is available\n'))
3446 3446 finally:
3447 3447 s.close()
3448 3448
3449 3449
3450 3450 @command(
3451 3451 b"debugbackupbundle",
3452 3452 [
3453 3453 (
3454 3454 b"",
3455 3455 b"recover",
3456 3456 b"",
3457 3457 b"brings the specified changeset back into the repository",
3458 3458 )
3459 3459 ]
3460 3460 + cmdutil.logopts,
3461 3461 _(b"hg debugbackupbundle [--recover HASH]"),
3462 3462 )
3463 3463 def debugbackupbundle(ui, repo, *pats, **opts):
3464 3464 """lists the changesets available in backup bundles
3465 3465
3466 3466 Without any arguments, this command prints a list of the changesets in each
3467 3467 backup bundle.
3468 3468
3469 3469 --recover takes a changeset hash and unbundles the first bundle that
3470 3470 contains that hash, which puts that changeset back in your repository.
3471 3471
3472 3472 --verbose will print the entire commit message and the bundle path for that
3473 3473 backup.
3474 3474 """
3475 3475 backups = list(
3476 3476 filter(
3477 3477 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3478 3478 )
3479 3479 )
3480 3480 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3481 3481
3482 3482 opts = pycompat.byteskwargs(opts)
3483 3483 opts[b"bundle"] = b""
3484 3484 opts[b"force"] = None
3485 3485 limit = logcmdutil.getlimit(opts)
3486 3486
3487 3487 def display(other, chlist, displayer):
3488 3488 if opts.get(b"newest_first"):
3489 3489 chlist.reverse()
3490 3490 count = 0
3491 3491 for n in chlist:
3492 3492 if limit is not None and count >= limit:
3493 3493 break
3494 3494 parents = [True for p in other.changelog.parents(n) if p != nullid]
3495 3495 if opts.get(b"no_merges") and len(parents) == 2:
3496 3496 continue
3497 3497 count += 1
3498 3498 displayer.show(other[n])
3499 3499
3500 3500 recovernode = opts.get(b"recover")
3501 3501 if recovernode:
3502 3502 if scmutil.isrevsymbol(repo, recovernode):
3503 3503 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3504 3504 return
3505 3505 elif backups:
3506 3506 msg = _(
3507 3507 b"Recover changesets using: hg debugbackupbundle --recover "
3508 3508 b"<changeset hash>\n\nAvailable backup changesets:"
3509 3509 )
3510 3510 ui.status(msg, label=b"status.removed")
3511 3511 else:
3512 3512 ui.status(_(b"no backup changesets found\n"))
3513 3513 return
3514 3514
3515 3515 for backup in backups:
3516 3516 # Much of this is copied from the hg incoming logic
3517 3517 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3518 3518 source, branches = hg.parseurl(source, opts.get(b"branch"))
3519 3519 try:
3520 3520 other = hg.peer(repo, opts, source)
3521 3521 except error.LookupError as ex:
3522 3522 msg = _(b"\nwarning: unable to open bundle %s") % source
3523 3523 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3524 3524 ui.warn(msg, hint=hint)
3525 3525 continue
3526 3526 revs, checkout = hg.addbranchrevs(
3527 3527 repo, other, branches, opts.get(b"rev")
3528 3528 )
3529 3529
3530 3530 if revs:
3531 3531 revs = [other.lookup(rev) for rev in revs]
3532 3532
3533 3533 quiet = ui.quiet
3534 3534 try:
3535 3535 ui.quiet = True
3536 3536 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3537 3537 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3538 3538 )
3539 3539 except error.LookupError:
3540 3540 continue
3541 3541 finally:
3542 3542 ui.quiet = quiet
3543 3543
3544 3544 try:
3545 3545 if not chlist:
3546 3546 continue
3547 3547 if recovernode:
3548 3548 with repo.lock(), repo.transaction(b"unbundle") as tr:
3549 3549 if scmutil.isrevsymbol(other, recovernode):
3550 3550 ui.status(_(b"Unbundling %s\n") % (recovernode))
3551 3551 f = hg.openpath(ui, source)
3552 3552 gen = exchange.readbundle(ui, f, source)
3553 3553 if isinstance(gen, bundle2.unbundle20):
3554 3554 bundle2.applybundle(
3555 3555 repo,
3556 3556 gen,
3557 3557 tr,
3558 3558 source=b"unbundle",
3559 3559 url=b"bundle:" + source,
3560 3560 )
3561 3561 else:
3562 3562 gen.apply(repo, b"unbundle", b"bundle:" + source)
3563 3563 break
3564 3564 else:
3565 3565 backupdate = encoding.strtolocal(
3566 3566 time.strftime(
3567 3567 "%a %H:%M, %Y-%m-%d",
3568 3568 time.localtime(os.path.getmtime(source)),
3569 3569 )
3570 3570 )
3571 3571 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3572 3572 if ui.verbose:
3573 3573 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3574 3574 else:
3575 3575 opts[
3576 3576 b"template"
3577 3577 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3578 3578 displayer = logcmdutil.changesetdisplayer(
3579 3579 ui, other, opts, False
3580 3580 )
3581 3581 display(other, chlist, displayer)
3582 3582 displayer.close()
3583 3583 finally:
3584 3584 cleanupfn()
3585 3585
3586 3586
3587 3587 @command(
3588 3588 b'debugsub',
3589 3589 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3590 3590 _(b'[-r REV] [REV]'),
3591 3591 )
3592 3592 def debugsub(ui, repo, rev=None):
3593 3593 ctx = scmutil.revsingle(repo, rev, None)
3594 3594 for k, v in sorted(ctx.substate.items()):
3595 3595 ui.writenoi18n(b'path %s\n' % k)
3596 3596 ui.writenoi18n(b' source %s\n' % v[0])
3597 3597 ui.writenoi18n(b' revision %s\n' % v[1])
3598 3598
3599 3599
3600 3600 @command(
3601 3601 b'debugsuccessorssets',
3602 3602 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3603 3603 _(b'[REV]'),
3604 3604 )
3605 3605 def debugsuccessorssets(ui, repo, *revs, **opts):
3606 3606 """show set of successors for revision
3607 3607
3608 3608 A successors set of changeset A is a consistent group of revisions that
3609 3609 succeed A. It contains non-obsolete changesets only unless closests
3610 3610 successors set is set.
3611 3611
3612 3612 In most cases a changeset A has a single successors set containing a single
3613 3613 successor (changeset A replaced by A').
3614 3614
3615 3615 A changeset that is made obsolete with no successors are called "pruned".
3616 3616 Such changesets have no successors sets at all.
3617 3617
3618 3618 A changeset that has been "split" will have a successors set containing
3619 3619 more than one successor.
3620 3620
3621 3621 A changeset that has been rewritten in multiple different ways is called
3622 3622 "divergent". Such changesets have multiple successor sets (each of which
3623 3623 may also be split, i.e. have multiple successors).
3624 3624
3625 3625 Results are displayed as follows::
3626 3626
3627 3627 <rev1>
3628 3628 <successors-1A>
3629 3629 <rev2>
3630 3630 <successors-2A>
3631 3631 <successors-2B1> <successors-2B2> <successors-2B3>
3632 3632
3633 3633 Here rev2 has two possible (i.e. divergent) successors sets. The first
3634 3634 holds one element, whereas the second holds three (i.e. the changeset has
3635 3635 been split).
3636 3636 """
3637 3637 # passed to successorssets caching computation from one call to another
3638 3638 cache = {}
3639 3639 ctx2str = bytes
3640 3640 node2str = short
3641 3641 for rev in scmutil.revrange(repo, revs):
3642 3642 ctx = repo[rev]
3643 3643 ui.write(b'%s\n' % ctx2str(ctx))
3644 3644 for succsset in obsutil.successorssets(
3645 3645 repo, ctx.node(), closest=opts['closest'], cache=cache
3646 3646 ):
3647 3647 if succsset:
3648 3648 ui.write(b' ')
3649 3649 ui.write(node2str(succsset[0]))
3650 3650 for node in succsset[1:]:
3651 3651 ui.write(b' ')
3652 3652 ui.write(node2str(node))
3653 3653 ui.write(b'\n')
3654 3654
3655 3655
3656 3656 @command(b'debugtagscache', [])
3657 3657 def debugtagscache(ui, repo):
3658 3658 """display the contents of .hg/cache/hgtagsfnodes1"""
3659 3659 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3660 3660 for r in repo:
3661 3661 node = repo[r].node()
3662 3662 tagsnode = cache.getfnode(node, computemissing=False)
3663 3663 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3664 3664 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3665 3665
3666 3666
3667 3667 @command(
3668 3668 b'debugtemplate',
3669 3669 [
3670 3670 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3671 3671 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3672 3672 ],
3673 3673 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3674 3674 optionalrepo=True,
3675 3675 )
3676 3676 def debugtemplate(ui, repo, tmpl, **opts):
3677 3677 """parse and apply a template
3678 3678
3679 3679 If -r/--rev is given, the template is processed as a log template and
3680 3680 applied to the given changesets. Otherwise, it is processed as a generic
3681 3681 template.
3682 3682
3683 3683 Use --verbose to print the parsed tree.
3684 3684 """
3685 3685 revs = None
3686 3686 if opts['rev']:
3687 3687 if repo is None:
3688 3688 raise error.RepoError(
3689 3689 _(b'there is no Mercurial repository here (.hg not found)')
3690 3690 )
3691 3691 revs = scmutil.revrange(repo, opts['rev'])
3692 3692
3693 3693 props = {}
3694 3694 for d in opts['define']:
3695 3695 try:
3696 3696 k, v = (e.strip() for e in d.split(b'=', 1))
3697 3697 if not k or k == b'ui':
3698 3698 raise ValueError
3699 3699 props[k] = v
3700 3700 except ValueError:
3701 3701 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3702 3702
3703 3703 if ui.verbose:
3704 3704 aliases = ui.configitems(b'templatealias')
3705 3705 tree = templater.parse(tmpl)
3706 3706 ui.note(templater.prettyformat(tree), b'\n')
3707 3707 newtree = templater.expandaliases(tree, aliases)
3708 3708 if newtree != tree:
3709 3709 ui.notenoi18n(
3710 3710 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3711 3711 )
3712 3712
3713 3713 if revs is None:
3714 3714 tres = formatter.templateresources(ui, repo)
3715 3715 t = formatter.maketemplater(ui, tmpl, resources=tres)
3716 3716 if ui.verbose:
3717 3717 kwds, funcs = t.symbolsuseddefault()
3718 3718 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3719 3719 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3720 3720 ui.write(t.renderdefault(props))
3721 3721 else:
3722 3722 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3723 3723 if ui.verbose:
3724 3724 kwds, funcs = displayer.t.symbolsuseddefault()
3725 3725 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3726 3726 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3727 3727 for r in revs:
3728 3728 displayer.show(repo[r], **pycompat.strkwargs(props))
3729 3729 displayer.close()
3730 3730
3731 3731
3732 3732 @command(
3733 3733 b'debuguigetpass',
3734 3734 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3735 3735 _(b'[-p TEXT]'),
3736 3736 norepo=True,
3737 3737 )
3738 3738 def debuguigetpass(ui, prompt=b''):
3739 3739 """show prompt to type password"""
3740 3740 r = ui.getpass(prompt)
3741 3741 ui.writenoi18n(b'response: %s\n' % r)
3742 3742
3743 3743
3744 3744 @command(
3745 3745 b'debuguiprompt',
3746 3746 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3747 3747 _(b'[-p TEXT]'),
3748 3748 norepo=True,
3749 3749 )
3750 3750 def debuguiprompt(ui, prompt=b''):
3751 3751 """show plain prompt"""
3752 3752 r = ui.prompt(prompt)
3753 3753 ui.writenoi18n(b'response: %s\n' % r)
3754 3754
3755 3755
3756 3756 @command(b'debugupdatecaches', [])
3757 3757 def debugupdatecaches(ui, repo, *pats, **opts):
3758 3758 """warm all known caches in the repository"""
3759 3759 with repo.wlock(), repo.lock():
3760 3760 repo.updatecaches(full=True)
3761 3761
3762 3762
3763 3763 @command(
3764 3764 b'debugupgraderepo',
3765 3765 [
3766 3766 (
3767 3767 b'o',
3768 3768 b'optimize',
3769 3769 [],
3770 3770 _(b'extra optimization to perform'),
3771 3771 _(b'NAME'),
3772 3772 ),
3773 3773 (b'', b'run', False, _(b'performs an upgrade')),
3774 3774 (b'', b'backup', True, _(b'keep the old repository content around')),
3775 3775 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3776 3776 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3777 3777 ],
3778 3778 )
3779 3779 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3780 3780 """upgrade a repository to use different features
3781 3781
3782 3782 If no arguments are specified, the repository is evaluated for upgrade
3783 3783 and a list of problems and potential optimizations is printed.
3784 3784
3785 3785 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3786 3786 can be influenced via additional arguments. More details will be provided
3787 3787 by the command output when run without ``--run``.
3788 3788
3789 3789 During the upgrade, the repository will be locked and no writes will be
3790 3790 allowed.
3791 3791
3792 3792 At the end of the upgrade, the repository may not be readable while new
3793 3793 repository data is swapped in. This window will be as long as it takes to
3794 3794 rename some directories inside the ``.hg`` directory. On most machines, this
3795 3795 should complete almost instantaneously and the chances of a consumer being
3796 3796 unable to access the repository should be low.
3797 3797
3798 3798 By default, all revlog will be upgraded. You can restrict this using flag
3799 3799 such as `--manifest`:
3800 3800
3801 3801 * `--manifest`: only optimize the manifest
3802 3802 * `--no-manifest`: optimize all revlog but the manifest
3803 3803 * `--changelog`: optimize the changelog only
3804 3804 * `--no-changelog --no-manifest`: optimize filelogs only
3805 3805 """
3806 3806 return upgrade.upgraderepo(
3807 3807 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3808 3808 )
3809 3809
3810 3810
3811 3811 @command(
3812 3812 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3813 3813 )
3814 3814 def debugwalk(ui, repo, *pats, **opts):
3815 3815 """show how files match on given patterns"""
3816 3816 opts = pycompat.byteskwargs(opts)
3817 3817 m = scmutil.match(repo[None], pats, opts)
3818 3818 if ui.verbose:
3819 3819 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3820 3820 items = list(repo[None].walk(m))
3821 3821 if not items:
3822 3822 return
3823 3823 f = lambda fn: fn
3824 3824 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3825 3825 f = lambda fn: util.normpath(fn)
3826 3826 fmt = b'f %%-%ds %%-%ds %%s' % (
3827 3827 max([len(abs) for abs in items]),
3828 3828 max([len(repo.pathto(abs)) for abs in items]),
3829 3829 )
3830 3830 for abs in items:
3831 3831 line = fmt % (
3832 3832 abs,
3833 3833 f(repo.pathto(abs)),
3834 3834 m.exact(abs) and b'exact' or b'',
3835 3835 )
3836 3836 ui.write(b"%s\n" % line.rstrip())
3837 3837
3838 3838
3839 3839 @command(b'debugwhyunstable', [], _(b'REV'))
3840 3840 def debugwhyunstable(ui, repo, rev):
3841 3841 """explain instabilities of a changeset"""
3842 3842 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3843 3843 dnodes = b''
3844 3844 if entry.get(b'divergentnodes'):
3845 3845 dnodes = (
3846 3846 b' '.join(
3847 3847 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3848 3848 for ctx in entry[b'divergentnodes']
3849 3849 )
3850 3850 + b' '
3851 3851 )
3852 3852 ui.write(
3853 3853 b'%s: %s%s %s\n'
3854 3854 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3855 3855 )
3856 3856
3857 3857
3858 3858 @command(
3859 3859 b'debugwireargs',
3860 3860 [
3861 3861 (b'', b'three', b'', b'three'),
3862 3862 (b'', b'four', b'', b'four'),
3863 3863 (b'', b'five', b'', b'five'),
3864 3864 ]
3865 3865 + cmdutil.remoteopts,
3866 3866 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3867 3867 norepo=True,
3868 3868 )
3869 3869 def debugwireargs(ui, repopath, *vals, **opts):
3870 3870 opts = pycompat.byteskwargs(opts)
3871 3871 repo = hg.peer(ui, opts, repopath)
3872 3872 for opt in cmdutil.remoteopts:
3873 3873 del opts[opt[1]]
3874 3874 args = {}
3875 3875 for k, v in pycompat.iteritems(opts):
3876 3876 if v:
3877 3877 args[k] = v
3878 3878 args = pycompat.strkwargs(args)
3879 3879 # run twice to check that we don't mess up the stream for the next command
3880 3880 res1 = repo.debugwireargs(*vals, **args)
3881 3881 res2 = repo.debugwireargs(*vals, **args)
3882 3882 ui.write(b"%s\n" % res1)
3883 3883 if res1 != res2:
3884 3884 ui.warn(b"%s\n" % res2)
3885 3885
3886 3886
3887 3887 def _parsewirelangblocks(fh):
3888 3888 activeaction = None
3889 3889 blocklines = []
3890 3890 lastindent = 0
3891 3891
3892 3892 for line in fh:
3893 3893 line = line.rstrip()
3894 3894 if not line:
3895 3895 continue
3896 3896
3897 3897 if line.startswith(b'#'):
3898 3898 continue
3899 3899
3900 3900 if not line.startswith(b' '):
3901 3901 # New block. Flush previous one.
3902 3902 if activeaction:
3903 3903 yield activeaction, blocklines
3904 3904
3905 3905 activeaction = line
3906 3906 blocklines = []
3907 3907 lastindent = 0
3908 3908 continue
3909 3909
3910 3910 # Else we start with an indent.
3911 3911
3912 3912 if not activeaction:
3913 3913 raise error.Abort(_(b'indented line outside of block'))
3914 3914
3915 3915 indent = len(line) - len(line.lstrip())
3916 3916
3917 3917 # If this line is indented more than the last line, concatenate it.
3918 3918 if indent > lastindent and blocklines:
3919 3919 blocklines[-1] += line.lstrip()
3920 3920 else:
3921 3921 blocklines.append(line)
3922 3922 lastindent = indent
3923 3923
3924 3924 # Flush last block.
3925 3925 if activeaction:
3926 3926 yield activeaction, blocklines
3927 3927
3928 3928
3929 3929 @command(
3930 3930 b'debugwireproto',
3931 3931 [
3932 3932 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3933 3933 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3934 3934 (
3935 3935 b'',
3936 3936 b'noreadstderr',
3937 3937 False,
3938 3938 _(b'do not read from stderr of the remote'),
3939 3939 ),
3940 3940 (
3941 3941 b'',
3942 3942 b'nologhandshake',
3943 3943 False,
3944 3944 _(b'do not log I/O related to the peer handshake'),
3945 3945 ),
3946 3946 ]
3947 3947 + cmdutil.remoteopts,
3948 3948 _(b'[PATH]'),
3949 3949 optionalrepo=True,
3950 3950 )
3951 3951 def debugwireproto(ui, repo, path=None, **opts):
3952 3952 """send wire protocol commands to a server
3953 3953
3954 3954 This command can be used to issue wire protocol commands to remote
3955 3955 peers and to debug the raw data being exchanged.
3956 3956
3957 3957 ``--localssh`` will start an SSH server against the current repository
3958 3958 and connect to that. By default, the connection will perform a handshake
3959 3959 and establish an appropriate peer instance.
3960 3960
3961 3961 ``--peer`` can be used to bypass the handshake protocol and construct a
3962 3962 peer instance using the specified class type. Valid values are ``raw``,
3963 3963 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3964 3964 raw data payloads and don't support higher-level command actions.
3965 3965
3966 3966 ``--noreadstderr`` can be used to disable automatic reading from stderr
3967 3967 of the peer (for SSH connections only). Disabling automatic reading of
3968 3968 stderr is useful for making output more deterministic.
3969 3969
3970 3970 Commands are issued via a mini language which is specified via stdin.
3971 3971 The language consists of individual actions to perform. An action is
3972 3972 defined by a block. A block is defined as a line with no leading
3973 3973 space followed by 0 or more lines with leading space. Blocks are
3974 3974 effectively a high-level command with additional metadata.
3975 3975
3976 3976 Lines beginning with ``#`` are ignored.
3977 3977
3978 3978 The following sections denote available actions.
3979 3979
3980 3980 raw
3981 3981 ---
3982 3982
3983 3983 Send raw data to the server.
3984 3984
3985 3985 The block payload contains the raw data to send as one atomic send
3986 3986 operation. The data may not actually be delivered in a single system
3987 3987 call: it depends on the abilities of the transport being used.
3988 3988
3989 3989 Each line in the block is de-indented and concatenated. Then, that
3990 3990 value is evaluated as a Python b'' literal. This allows the use of
3991 3991 backslash escaping, etc.
3992 3992
3993 3993 raw+
3994 3994 ----
3995 3995
3996 3996 Behaves like ``raw`` except flushes output afterwards.
3997 3997
3998 3998 command <X>
3999 3999 -----------
4000 4000
4001 4001 Send a request to run a named command, whose name follows the ``command``
4002 4002 string.
4003 4003
4004 4004 Arguments to the command are defined as lines in this block. The format of
4005 4005 each line is ``<key> <value>``. e.g.::
4006 4006
4007 4007 command listkeys
4008 4008 namespace bookmarks
4009 4009
4010 4010 If the value begins with ``eval:``, it will be interpreted as a Python
4011 4011 literal expression. Otherwise values are interpreted as Python b'' literals.
4012 4012 This allows sending complex types and encoding special byte sequences via
4013 4013 backslash escaping.
4014 4014
4015 4015 The following arguments have special meaning:
4016 4016
4017 4017 ``PUSHFILE``
4018 4018 When defined, the *push* mechanism of the peer will be used instead
4019 4019 of the static request-response mechanism and the content of the
4020 4020 file specified in the value of this argument will be sent as the
4021 4021 command payload.
4022 4022
4023 4023 This can be used to submit a local bundle file to the remote.
4024 4024
4025 4025 batchbegin
4026 4026 ----------
4027 4027
4028 4028 Instruct the peer to begin a batched send.
4029 4029
4030 4030 All ``command`` blocks are queued for execution until the next
4031 4031 ``batchsubmit`` block.
4032 4032
4033 4033 batchsubmit
4034 4034 -----------
4035 4035
4036 4036 Submit previously queued ``command`` blocks as a batch request.
4037 4037
4038 4038 This action MUST be paired with a ``batchbegin`` action.
4039 4039
4040 4040 httprequest <method> <path>
4041 4041 ---------------------------
4042 4042
4043 4043 (HTTP peer only)
4044 4044
4045 4045 Send an HTTP request to the peer.
4046 4046
4047 4047 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4048 4048
4049 4049 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4050 4050 headers to add to the request. e.g. ``Accept: foo``.
4051 4051
4052 4052 The following arguments are special:
4053 4053
4054 4054 ``BODYFILE``
4055 4055 The content of the file defined as the value to this argument will be
4056 4056 transferred verbatim as the HTTP request body.
4057 4057
4058 4058 ``frame <type> <flags> <payload>``
4059 4059 Send a unified protocol frame as part of the request body.
4060 4060
4061 4061 All frames will be collected and sent as the body to the HTTP
4062 4062 request.
4063 4063
4064 4064 close
4065 4065 -----
4066 4066
4067 4067 Close the connection to the server.
4068 4068
4069 4069 flush
4070 4070 -----
4071 4071
4072 4072 Flush data written to the server.
4073 4073
4074 4074 readavailable
4075 4075 -------------
4076 4076
4077 4077 Close the write end of the connection and read all available data from
4078 4078 the server.
4079 4079
4080 4080 If the connection to the server encompasses multiple pipes, we poll both
4081 4081 pipes and read available data.
4082 4082
4083 4083 readline
4084 4084 --------
4085 4085
4086 4086 Read a line of output from the server. If there are multiple output
4087 4087 pipes, reads only the main pipe.
4088 4088
4089 4089 ereadline
4090 4090 ---------
4091 4091
4092 4092 Like ``readline``, but read from the stderr pipe, if available.
4093 4093
4094 4094 read <X>
4095 4095 --------
4096 4096
4097 4097 ``read()`` N bytes from the server's main output pipe.
4098 4098
4099 4099 eread <X>
4100 4100 ---------
4101 4101
4102 4102 ``read()`` N bytes from the server's stderr pipe, if available.
4103 4103
4104 4104 Specifying Unified Frame-Based Protocol Frames
4105 4105 ----------------------------------------------
4106 4106
4107 4107 It is possible to emit a *Unified Frame-Based Protocol* by using special
4108 4108 syntax.
4109 4109
4110 4110 A frame is composed as a type, flags, and payload. These can be parsed
4111 4111 from a string of the form:
4112 4112
4113 4113 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4114 4114
4115 4115 ``request-id`` and ``stream-id`` are integers defining the request and
4116 4116 stream identifiers.
4117 4117
4118 4118 ``type`` can be an integer value for the frame type or the string name
4119 4119 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4120 4120 ``command-name``.
4121 4121
4122 4122 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4123 4123 components. Each component (and there can be just one) can be an integer
4124 4124 or a flag name for stream flags or frame flags, respectively. Values are
4125 4125 resolved to integers and then bitwise OR'd together.
4126 4126
4127 4127 ``payload`` represents the raw frame payload. If it begins with
4128 4128 ``cbor:``, the following string is evaluated as Python code and the
4129 4129 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4130 4130 as a Python byte string literal.
4131 4131 """
4132 4132 opts = pycompat.byteskwargs(opts)
4133 4133
4134 4134 if opts[b'localssh'] and not repo:
4135 4135 raise error.Abort(_(b'--localssh requires a repository'))
4136 4136
4137 4137 if opts[b'peer'] and opts[b'peer'] not in (
4138 4138 b'raw',
4139 4139 b'http2',
4140 4140 b'ssh1',
4141 4141 b'ssh2',
4142 4142 ):
4143 4143 raise error.Abort(
4144 4144 _(b'invalid value for --peer'),
4145 4145 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4146 4146 )
4147 4147
4148 4148 if path and opts[b'localssh']:
4149 4149 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4150 4150
4151 4151 if ui.interactive():
4152 4152 ui.write(_(b'(waiting for commands on stdin)\n'))
4153 4153
4154 4154 blocks = list(_parsewirelangblocks(ui.fin))
4155 4155
4156 4156 proc = None
4157 4157 stdin = None
4158 4158 stdout = None
4159 4159 stderr = None
4160 4160 opener = None
4161 4161
4162 4162 if opts[b'localssh']:
4163 4163 # We start the SSH server in its own process so there is process
4164 4164 # separation. This prevents a whole class of potential bugs around
4165 4165 # shared state from interfering with server operation.
4166 4166 args = procutil.hgcmd() + [
4167 4167 b'-R',
4168 4168 repo.root,
4169 4169 b'debugserve',
4170 4170 b'--sshstdio',
4171 4171 ]
4172 4172 proc = subprocess.Popen(
4173 4173 pycompat.rapply(procutil.tonativestr, args),
4174 4174 stdin=subprocess.PIPE,
4175 4175 stdout=subprocess.PIPE,
4176 4176 stderr=subprocess.PIPE,
4177 4177 bufsize=0,
4178 4178 )
4179 4179
4180 4180 stdin = proc.stdin
4181 4181 stdout = proc.stdout
4182 4182 stderr = proc.stderr
4183 4183
4184 4184 # We turn the pipes into observers so we can log I/O.
4185 4185 if ui.verbose or opts[b'peer'] == b'raw':
4186 4186 stdin = util.makeloggingfileobject(
4187 4187 ui, proc.stdin, b'i', logdata=True
4188 4188 )
4189 4189 stdout = util.makeloggingfileobject(
4190 4190 ui, proc.stdout, b'o', logdata=True
4191 4191 )
4192 4192 stderr = util.makeloggingfileobject(
4193 4193 ui, proc.stderr, b'e', logdata=True
4194 4194 )
4195 4195
4196 4196 # --localssh also implies the peer connection settings.
4197 4197
4198 4198 url = b'ssh://localserver'
4199 4199 autoreadstderr = not opts[b'noreadstderr']
4200 4200
4201 4201 if opts[b'peer'] == b'ssh1':
4202 4202 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4203 4203 peer = sshpeer.sshv1peer(
4204 4204 ui,
4205 4205 url,
4206 4206 proc,
4207 4207 stdin,
4208 4208 stdout,
4209 4209 stderr,
4210 4210 None,
4211 4211 autoreadstderr=autoreadstderr,
4212 4212 )
4213 4213 elif opts[b'peer'] == b'ssh2':
4214 4214 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4215 4215 peer = sshpeer.sshv2peer(
4216 4216 ui,
4217 4217 url,
4218 4218 proc,
4219 4219 stdin,
4220 4220 stdout,
4221 4221 stderr,
4222 4222 None,
4223 4223 autoreadstderr=autoreadstderr,
4224 4224 )
4225 4225 elif opts[b'peer'] == b'raw':
4226 4226 ui.write(_(b'using raw connection to peer\n'))
4227 4227 peer = None
4228 4228 else:
4229 4229 ui.write(_(b'creating ssh peer from handshake results\n'))
4230 4230 peer = sshpeer.makepeer(
4231 4231 ui,
4232 4232 url,
4233 4233 proc,
4234 4234 stdin,
4235 4235 stdout,
4236 4236 stderr,
4237 4237 autoreadstderr=autoreadstderr,
4238 4238 )
4239 4239
4240 4240 elif path:
4241 4241 # We bypass hg.peer() so we can proxy the sockets.
4242 4242 # TODO consider not doing this because we skip
4243 4243 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4244 4244 u = util.url(path)
4245 4245 if u.scheme != b'http':
4246 4246 raise error.Abort(_(b'only http:// paths are currently supported'))
4247 4247
4248 4248 url, authinfo = u.authinfo()
4249 4249 openerargs = {
4250 4250 'useragent': b'Mercurial debugwireproto',
4251 4251 }
4252 4252
4253 4253 # Turn pipes/sockets into observers so we can log I/O.
4254 4254 if ui.verbose:
4255 4255 openerargs.update(
4256 4256 {
4257 4257 'loggingfh': ui,
4258 4258 'loggingname': b's',
4259 4259 'loggingopts': {'logdata': True, 'logdataapis': False,},
4260 4260 }
4261 4261 )
4262 4262
4263 4263 if ui.debugflag:
4264 4264 openerargs['loggingopts']['logdataapis'] = True
4265 4265
4266 4266 # Don't send default headers when in raw mode. This allows us to
4267 4267 # bypass most of the behavior of our URL handling code so we can
4268 4268 # have near complete control over what's sent on the wire.
4269 4269 if opts[b'peer'] == b'raw':
4270 4270 openerargs['sendaccept'] = False
4271 4271
4272 4272 opener = urlmod.opener(ui, authinfo, **openerargs)
4273 4273
4274 4274 if opts[b'peer'] == b'http2':
4275 4275 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4276 4276 # We go through makepeer() because we need an API descriptor for
4277 4277 # the peer instance to be useful.
4278 4278 with ui.configoverride(
4279 4279 {(b'experimental', b'httppeer.advertise-v2'): True}
4280 4280 ):
4281 4281 if opts[b'nologhandshake']:
4282 4282 ui.pushbuffer()
4283 4283
4284 4284 peer = httppeer.makepeer(ui, path, opener=opener)
4285 4285
4286 4286 if opts[b'nologhandshake']:
4287 4287 ui.popbuffer()
4288 4288
4289 4289 if not isinstance(peer, httppeer.httpv2peer):
4290 4290 raise error.Abort(
4291 4291 _(
4292 4292 b'could not instantiate HTTP peer for '
4293 4293 b'wire protocol version 2'
4294 4294 ),
4295 4295 hint=_(
4296 4296 b'the server may not have the feature '
4297 4297 b'enabled or is not allowing this '
4298 4298 b'client version'
4299 4299 ),
4300 4300 )
4301 4301
4302 4302 elif opts[b'peer'] == b'raw':
4303 4303 ui.write(_(b'using raw connection to peer\n'))
4304 4304 peer = None
4305 4305 elif opts[b'peer']:
4306 4306 raise error.Abort(
4307 4307 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4308 4308 )
4309 4309 else:
4310 4310 peer = httppeer.makepeer(ui, path, opener=opener)
4311 4311
4312 4312 # We /could/ populate stdin/stdout with sock.makefile()...
4313 4313 else:
4314 4314 raise error.Abort(_(b'unsupported connection configuration'))
4315 4315
4316 4316 batchedcommands = None
4317 4317
4318 4318 # Now perform actions based on the parsed wire language instructions.
4319 4319 for action, lines in blocks:
4320 4320 if action in (b'raw', b'raw+'):
4321 4321 if not stdin:
4322 4322 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4323 4323
4324 4324 # Concatenate the data together.
4325 4325 data = b''.join(l.lstrip() for l in lines)
4326 4326 data = stringutil.unescapestr(data)
4327 4327 stdin.write(data)
4328 4328
4329 4329 if action == b'raw+':
4330 4330 stdin.flush()
4331 4331 elif action == b'flush':
4332 4332 if not stdin:
4333 4333 raise error.Abort(_(b'cannot call flush on this peer'))
4334 4334 stdin.flush()
4335 4335 elif action.startswith(b'command'):
4336 4336 if not peer:
4337 4337 raise error.Abort(
4338 4338 _(
4339 4339 b'cannot send commands unless peer instance '
4340 4340 b'is available'
4341 4341 )
4342 4342 )
4343 4343
4344 4344 command = action.split(b' ', 1)[1]
4345 4345
4346 4346 args = {}
4347 4347 for line in lines:
4348 4348 # We need to allow empty values.
4349 4349 fields = line.lstrip().split(b' ', 1)
4350 4350 if len(fields) == 1:
4351 4351 key = fields[0]
4352 4352 value = b''
4353 4353 else:
4354 4354 key, value = fields
4355 4355
4356 4356 if value.startswith(b'eval:'):
4357 4357 value = stringutil.evalpythonliteral(value[5:])
4358 4358 else:
4359 4359 value = stringutil.unescapestr(value)
4360 4360
4361 4361 args[key] = value
4362 4362
4363 4363 if batchedcommands is not None:
4364 4364 batchedcommands.append((command, args))
4365 4365 continue
4366 4366
4367 4367 ui.status(_(b'sending %s command\n') % command)
4368 4368
4369 4369 if b'PUSHFILE' in args:
4370 4370 with open(args[b'PUSHFILE'], 'rb') as fh:
4371 4371 del args[b'PUSHFILE']
4372 4372 res, output = peer._callpush(
4373 4373 command, fh, **pycompat.strkwargs(args)
4374 4374 )
4375 4375 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4376 4376 ui.status(
4377 4377 _(b'remote output: %s\n') % stringutil.escapestr(output)
4378 4378 )
4379 4379 else:
4380 4380 with peer.commandexecutor() as e:
4381 4381 res = e.callcommand(command, args).result()
4382 4382
4383 4383 if isinstance(res, wireprotov2peer.commandresponse):
4384 4384 val = res.objects()
4385 4385 ui.status(
4386 4386 _(b'response: %s\n')
4387 4387 % stringutil.pprint(val, bprefix=True, indent=2)
4388 4388 )
4389 4389 else:
4390 4390 ui.status(
4391 4391 _(b'response: %s\n')
4392 4392 % stringutil.pprint(res, bprefix=True, indent=2)
4393 4393 )
4394 4394
4395 4395 elif action == b'batchbegin':
4396 4396 if batchedcommands is not None:
4397 4397 raise error.Abort(_(b'nested batchbegin not allowed'))
4398 4398
4399 4399 batchedcommands = []
4400 4400 elif action == b'batchsubmit':
4401 4401 # There is a batching API we could go through. But it would be
4402 4402 # difficult to normalize requests into function calls. It is easier
4403 4403 # to bypass this layer and normalize to commands + args.
4404 4404 ui.status(
4405 4405 _(b'sending batch with %d sub-commands\n')
4406 4406 % len(batchedcommands)
4407 4407 )
4408 4408 assert peer is not None
4409 4409 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4410 4410 ui.status(
4411 4411 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4412 4412 )
4413 4413
4414 4414 batchedcommands = None
4415 4415
4416 4416 elif action.startswith(b'httprequest '):
4417 4417 if not opener:
4418 4418 raise error.Abort(
4419 4419 _(b'cannot use httprequest without an HTTP peer')
4420 4420 )
4421 4421
4422 4422 request = action.split(b' ', 2)
4423 4423 if len(request) != 3:
4424 4424 raise error.Abort(
4425 4425 _(
4426 4426 b'invalid httprequest: expected format is '
4427 4427 b'"httprequest <method> <path>'
4428 4428 )
4429 4429 )
4430 4430
4431 4431 method, httppath = request[1:]
4432 4432 headers = {}
4433 4433 body = None
4434 4434 frames = []
4435 4435 for line in lines:
4436 4436 line = line.lstrip()
4437 4437 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4438 4438 if m:
4439 4439 # Headers need to use native strings.
4440 4440 key = pycompat.strurl(m.group(1))
4441 4441 value = pycompat.strurl(m.group(2))
4442 4442 headers[key] = value
4443 4443 continue
4444 4444
4445 4445 if line.startswith(b'BODYFILE '):
4446 4446 with open(line.split(b' ', 1), b'rb') as fh:
4447 4447 body = fh.read()
4448 4448 elif line.startswith(b'frame '):
4449 4449 frame = wireprotoframing.makeframefromhumanstring(
4450 4450 line[len(b'frame ') :]
4451 4451 )
4452 4452
4453 4453 frames.append(frame)
4454 4454 else:
4455 4455 raise error.Abort(
4456 4456 _(b'unknown argument to httprequest: %s') % line
4457 4457 )
4458 4458
4459 4459 url = path + httppath
4460 4460
4461 4461 if frames:
4462 4462 body = b''.join(bytes(f) for f in frames)
4463 4463
4464 4464 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4465 4465
4466 4466 # urllib.Request insists on using has_data() as a proxy for
4467 4467 # determining the request method. Override that to use our
4468 4468 # explicitly requested method.
4469 4469 req.get_method = lambda: pycompat.sysstr(method)
4470 4470
4471 4471 try:
4472 4472 res = opener.open(req)
4473 4473 body = res.read()
4474 4474 except util.urlerr.urlerror as e:
4475 4475 # read() method must be called, but only exists in Python 2
4476 4476 getattr(e, 'read', lambda: None)()
4477 4477 continue
4478 4478
4479 4479 ct = res.headers.get('Content-Type')
4480 4480 if ct == 'application/mercurial-cbor':
4481 4481 ui.write(
4482 4482 _(b'cbor> %s\n')
4483 4483 % stringutil.pprint(
4484 4484 cborutil.decodeall(body), bprefix=True, indent=2
4485 4485 )
4486 4486 )
4487 4487
4488 4488 elif action == b'close':
4489 4489 assert peer is not None
4490 4490 peer.close()
4491 4491 elif action == b'readavailable':
4492 4492 if not stdout or not stderr:
4493 4493 raise error.Abort(
4494 4494 _(b'readavailable not available on this peer')
4495 4495 )
4496 4496
4497 4497 stdin.close()
4498 4498 stdout.read()
4499 4499 stderr.read()
4500 4500
4501 4501 elif action == b'readline':
4502 4502 if not stdout:
4503 4503 raise error.Abort(_(b'readline not available on this peer'))
4504 4504 stdout.readline()
4505 4505 elif action == b'ereadline':
4506 4506 if not stderr:
4507 4507 raise error.Abort(_(b'ereadline not available on this peer'))
4508 4508 stderr.readline()
4509 4509 elif action.startswith(b'read '):
4510 4510 count = int(action.split(b' ', 1)[1])
4511 4511 if not stdout:
4512 4512 raise error.Abort(_(b'read not available on this peer'))
4513 4513 stdout.read(count)
4514 4514 elif action.startswith(b'eread '):
4515 4515 count = int(action.split(b' ', 1)[1])
4516 4516 if not stderr:
4517 4517 raise error.Abort(_(b'eread not available on this peer'))
4518 4518 stderr.read(count)
4519 4519 else:
4520 4520 raise error.Abort(_(b'unknown action: %s') % action)
4521 4521
4522 4522 if batchedcommands is not None:
4523 4523 raise error.Abort(_(b'unclosed "batchbegin" request'))
4524 4524
4525 4525 if peer:
4526 4526 peer.close()
4527 4527
4528 4528 if proc:
4529 4529 proc.kill()
@@ -1,865 +1,865 b''
1 1 # formatter.py - generic output formatting for mercurial
2 2 #
3 3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """Generic output formatting for Mercurial
9 9
10 10 The formatter provides API to show data in various ways. The following
11 11 functions should be used in place of ui.write():
12 12
13 13 - fm.write() for unconditional output
14 14 - fm.condwrite() to show some extra data conditionally in plain output
15 15 - fm.context() to provide changectx to template output
16 16 - fm.data() to provide extra data to JSON or template output
17 17 - fm.plain() to show raw text that isn't provided to JSON or template output
18 18
19 19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
20 20 beforehand so the data is converted to the appropriate data type. Use
21 21 fm.isplain() if you need to convert or format data conditionally which isn't
22 22 supported by the formatter API.
23 23
24 24 To build nested structure (i.e. a list of dicts), use fm.nested().
25 25
26 26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
27 27
28 28 fm.condwrite() vs 'if cond:':
29 29
30 30 In most cases, use fm.condwrite() so users can selectively show the data
31 31 in template output. If it's costly to build data, use plain 'if cond:' with
32 32 fm.write().
33 33
34 34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
35 35
36 36 fm.nested() should be used to form a tree structure (a list of dicts of
37 37 lists of dicts...) which can be accessed through template keywords, e.g.
38 38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
39 39 exports a dict-type object to template, which can be accessed by e.g.
40 40 "{get(foo, key)}" function.
41 41
42 42 Doctest helper:
43 43
44 44 >>> def show(fn, verbose=False, **opts):
45 45 ... import sys
46 46 ... from . import ui as uimod
47 47 ... ui = uimod.ui()
48 48 ... ui.verbose = verbose
49 49 ... ui.pushbuffer()
50 50 ... try:
51 51 ... return fn(ui, ui.formatter(pycompat.sysbytes(fn.__name__),
52 52 ... pycompat.byteskwargs(opts)))
53 53 ... finally:
54 54 ... print(pycompat.sysstr(ui.popbuffer()), end='')
55 55
56 56 Basic example:
57 57
58 58 >>> def files(ui, fm):
59 59 ... files = [(b'foo', 123, (0, 0)), (b'bar', 456, (1, 0))]
60 60 ... for f in files:
61 61 ... fm.startitem()
62 62 ... fm.write(b'path', b'%s', f[0])
63 63 ... fm.condwrite(ui.verbose, b'date', b' %s',
64 64 ... fm.formatdate(f[2], b'%Y-%m-%d %H:%M:%S'))
65 65 ... fm.data(size=f[1])
66 66 ... fm.plain(b'\\n')
67 67 ... fm.end()
68 68 >>> show(files)
69 69 foo
70 70 bar
71 71 >>> show(files, verbose=True)
72 72 foo 1970-01-01 00:00:00
73 73 bar 1970-01-01 00:00:01
74 74 >>> show(files, template=b'json')
75 75 [
76 76 {
77 77 "date": [0, 0],
78 78 "path": "foo",
79 79 "size": 123
80 80 },
81 81 {
82 82 "date": [1, 0],
83 83 "path": "bar",
84 84 "size": 456
85 85 }
86 86 ]
87 87 >>> show(files, template=b'path: {path}\\ndate: {date|rfc3339date}\\n')
88 88 path: foo
89 89 date: 1970-01-01T00:00:00+00:00
90 90 path: bar
91 91 date: 1970-01-01T00:00:01+00:00
92 92
93 93 Nested example:
94 94
95 95 >>> def subrepos(ui, fm):
96 96 ... fm.startitem()
97 97 ... fm.write(b'reponame', b'[%s]\\n', b'baz')
98 98 ... files(ui, fm.nested(b'files', tmpl=b'{reponame}'))
99 99 ... fm.end()
100 100 >>> show(subrepos)
101 101 [baz]
102 102 foo
103 103 bar
104 104 >>> show(subrepos, template=b'{reponame}: {join(files % "{path}", ", ")}\\n')
105 105 baz: foo, bar
106 106 """
107 107
108 108 from __future__ import absolute_import, print_function
109 109
110 110 import contextlib
111 111 import itertools
112 112 import os
113 113
114 114 from .i18n import _
115 115 from .node import (
116 116 hex,
117 117 short,
118 118 )
119 119 from .thirdparty import attr
120 120
121 121 from . import (
122 122 error,
123 123 pycompat,
124 124 templatefilters,
125 125 templatekw,
126 126 templater,
127 127 templateutil,
128 128 util,
129 129 )
130 130 from .utils import (
131 131 cborutil,
132 132 dateutil,
133 133 stringutil,
134 134 )
135 135
136 136 pickle = util.pickle
137 137
138 138
139 139 def isprintable(obj):
140 140 """Check if the given object can be directly passed in to formatter's
141 141 write() and data() functions
142 142
143 143 Returns False if the object is unsupported or must be pre-processed by
144 144 formatdate(), formatdict(), or formatlist().
145 145 """
146 146 return isinstance(obj, (type(None), bool, int, pycompat.long, float, bytes))
147 147
148 148
149 149 class _nullconverter(object):
150 150 '''convert non-primitive data types to be processed by formatter'''
151 151
152 152 # set to True if context object should be stored as item
153 153 storecontext = False
154 154
155 155 @staticmethod
156 156 def wrapnested(data, tmpl, sep):
157 157 '''wrap nested data by appropriate type'''
158 158 return data
159 159
160 160 @staticmethod
161 161 def formatdate(date, fmt):
162 162 '''convert date tuple to appropriate format'''
163 163 # timestamp can be float, but the canonical form should be int
164 164 ts, tz = date
165 165 return (int(ts), tz)
166 166
167 167 @staticmethod
168 168 def formatdict(data, key, value, fmt, sep):
169 169 '''convert dict or key-value pairs to appropriate dict format'''
170 170 # use plain dict instead of util.sortdict so that data can be
171 171 # serialized as a builtin dict in pickle output
172 172 return dict(data)
173 173
174 174 @staticmethod
175 175 def formatlist(data, name, fmt, sep):
176 176 '''convert iterable to appropriate list format'''
177 177 return list(data)
178 178
179 179
180 180 class baseformatter(object):
181 181 def __init__(self, ui, topic, opts, converter):
182 182 self._ui = ui
183 183 self._topic = topic
184 184 self._opts = opts
185 185 self._converter = converter
186 186 self._item = None
187 187 # function to convert node to string suitable for this output
188 188 self.hexfunc = hex
189 189
190 190 def __enter__(self):
191 191 return self
192 192
193 193 def __exit__(self, exctype, excvalue, traceback):
194 194 if exctype is None:
195 195 self.end()
196 196
197 197 def _showitem(self):
198 198 '''show a formatted item once all data is collected'''
199 199
200 200 def startitem(self):
201 201 '''begin an item in the format list'''
202 202 if self._item is not None:
203 203 self._showitem()
204 204 self._item = {}
205 205
206 206 def formatdate(self, date, fmt=b'%a %b %d %H:%M:%S %Y %1%2'):
207 207 '''convert date tuple to appropriate format'''
208 208 return self._converter.formatdate(date, fmt)
209 209
210 210 def formatdict(self, data, key=b'key', value=b'value', fmt=None, sep=b' '):
211 211 '''convert dict or key-value pairs to appropriate dict format'''
212 212 return self._converter.formatdict(data, key, value, fmt, sep)
213 213
214 214 def formatlist(self, data, name, fmt=None, sep=b' '):
215 215 '''convert iterable to appropriate list format'''
216 216 # name is mandatory argument for now, but it could be optional if
217 217 # we have default template keyword, e.g. {item}
218 218 return self._converter.formatlist(data, name, fmt, sep)
219 219
220 220 def context(self, **ctxs):
221 221 '''insert context objects to be used to render template keywords'''
222 222 ctxs = pycompat.byteskwargs(ctxs)
223 223 assert all(k in {b'repo', b'ctx', b'fctx'} for k in ctxs)
224 224 if self._converter.storecontext:
225 225 # populate missing resources in fctx -> ctx -> repo order
226 226 if b'fctx' in ctxs and b'ctx' not in ctxs:
227 227 ctxs[b'ctx'] = ctxs[b'fctx'].changectx()
228 228 if b'ctx' in ctxs and b'repo' not in ctxs:
229 229 ctxs[b'repo'] = ctxs[b'ctx'].repo()
230 230 self._item.update(ctxs)
231 231
232 232 def datahint(self):
233 233 '''set of field names to be referenced'''
234 234 return set()
235 235
236 236 def data(self, **data):
237 237 '''insert data into item that's not shown in default output'''
238 238 data = pycompat.byteskwargs(data)
239 239 self._item.update(data)
240 240
241 241 def write(self, fields, deftext, *fielddata, **opts):
242 242 '''do default text output while assigning data to item'''
243 243 fieldkeys = fields.split()
244 244 assert len(fieldkeys) == len(fielddata), (fieldkeys, fielddata)
245 245 self._item.update(zip(fieldkeys, fielddata))
246 246
247 247 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
248 248 '''do conditional write (primarily for plain formatter)'''
249 249 fieldkeys = fields.split()
250 250 assert len(fieldkeys) == len(fielddata)
251 251 self._item.update(zip(fieldkeys, fielddata))
252 252
253 253 def plain(self, text, **opts):
254 254 '''show raw text for non-templated mode'''
255 255
256 256 def isplain(self):
257 257 '''check for plain formatter usage'''
258 258 return False
259 259
260 260 def nested(self, field, tmpl=None, sep=b''):
261 261 '''sub formatter to store nested data in the specified field'''
262 262 data = []
263 263 self._item[field] = self._converter.wrapnested(data, tmpl, sep)
264 264 return _nestedformatter(self._ui, self._converter, data)
265 265
266 266 def end(self):
267 267 '''end output for the formatter'''
268 268 if self._item is not None:
269 269 self._showitem()
270 270
271 271
272 272 def nullformatter(ui, topic, opts):
273 273 '''formatter that prints nothing'''
274 274 return baseformatter(ui, topic, opts, converter=_nullconverter)
275 275
276 276
277 277 class _nestedformatter(baseformatter):
278 278 '''build sub items and store them in the parent formatter'''
279 279
280 280 def __init__(self, ui, converter, data):
281 281 baseformatter.__init__(
282 282 self, ui, topic=b'', opts={}, converter=converter
283 283 )
284 284 self._data = data
285 285
286 286 def _showitem(self):
287 287 self._data.append(self._item)
288 288
289 289
290 290 def _iteritems(data):
291 291 '''iterate key-value pairs in stable order'''
292 292 if isinstance(data, dict):
293 293 return sorted(pycompat.iteritems(data))
294 294 return data
295 295
296 296
297 297 class _plainconverter(object):
298 298 '''convert non-primitive data types to text'''
299 299
300 300 storecontext = False
301 301
302 302 @staticmethod
303 303 def wrapnested(data, tmpl, sep):
304 304 raise error.ProgrammingError(b'plainformatter should never be nested')
305 305
306 306 @staticmethod
307 307 def formatdate(date, fmt):
308 308 '''stringify date tuple in the given format'''
309 309 return dateutil.datestr(date, fmt)
310 310
311 311 @staticmethod
312 312 def formatdict(data, key, value, fmt, sep):
313 313 '''stringify key-value pairs separated by sep'''
314 314 prefmt = pycompat.identity
315 315 if fmt is None:
316 316 fmt = b'%s=%s'
317 317 prefmt = pycompat.bytestr
318 318 return sep.join(
319 319 fmt % (prefmt(k), prefmt(v)) for k, v in _iteritems(data)
320 320 )
321 321
322 322 @staticmethod
323 323 def formatlist(data, name, fmt, sep):
324 324 '''stringify iterable separated by sep'''
325 325 prefmt = pycompat.identity
326 326 if fmt is None:
327 327 fmt = b'%s'
328 328 prefmt = pycompat.bytestr
329 329 return sep.join(fmt % prefmt(e) for e in data)
330 330
331 331
332 332 class plainformatter(baseformatter):
333 333 '''the default text output scheme'''
334 334
335 335 def __init__(self, ui, out, topic, opts):
336 336 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
337 337 if ui.debugflag:
338 338 self.hexfunc = hex
339 339 else:
340 340 self.hexfunc = short
341 341 if ui is out:
342 342 self._write = ui.write
343 343 else:
344 344 self._write = lambda s, **opts: out.write(s)
345 345
346 346 def startitem(self):
347 347 pass
348 348
349 349 def data(self, **data):
350 350 pass
351 351
352 352 def write(self, fields, deftext, *fielddata, **opts):
353 353 self._write(deftext % fielddata, **opts)
354 354
355 355 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
356 356 '''do conditional write'''
357 357 if cond:
358 358 self._write(deftext % fielddata, **opts)
359 359
360 360 def plain(self, text, **opts):
361 361 self._write(text, **opts)
362 362
363 363 def isplain(self):
364 364 return True
365 365
366 366 def nested(self, field, tmpl=None, sep=b''):
367 367 # nested data will be directly written to ui
368 368 return self
369 369
370 370 def end(self):
371 371 pass
372 372
373 373
374 374 class debugformatter(baseformatter):
375 375 def __init__(self, ui, out, topic, opts):
376 376 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
377 377 self._out = out
378 378 self._out.write(b"%s = [\n" % self._topic)
379 379
380 380 def _showitem(self):
381 381 self._out.write(
382 382 b' %s,\n' % stringutil.pprint(self._item, indent=4, level=1)
383 383 )
384 384
385 385 def end(self):
386 386 baseformatter.end(self)
387 387 self._out.write(b"]\n")
388 388
389 389
390 390 class pickleformatter(baseformatter):
391 391 def __init__(self, ui, out, topic, opts):
392 392 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
393 393 self._out = out
394 394 self._data = []
395 395
396 396 def _showitem(self):
397 397 self._data.append(self._item)
398 398
399 399 def end(self):
400 400 baseformatter.end(self)
401 401 self._out.write(pickle.dumps(self._data))
402 402
403 403
404 404 class cborformatter(baseformatter):
405 405 '''serialize items as an indefinite-length CBOR array'''
406 406
407 407 def __init__(self, ui, out, topic, opts):
408 408 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
409 409 self._out = out
410 410 self._out.write(cborutil.BEGIN_INDEFINITE_ARRAY)
411 411
412 412 def _showitem(self):
413 413 self._out.write(b''.join(cborutil.streamencode(self._item)))
414 414
415 415 def end(self):
416 416 baseformatter.end(self)
417 417 self._out.write(cborutil.BREAK)
418 418
419 419
420 420 class jsonformatter(baseformatter):
421 421 def __init__(self, ui, out, topic, opts):
422 422 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
423 423 self._out = out
424 424 self._out.write(b"[")
425 425 self._first = True
426 426
427 427 def _showitem(self):
428 428 if self._first:
429 429 self._first = False
430 430 else:
431 431 self._out.write(b",")
432 432
433 433 self._out.write(b"\n {\n")
434 434 first = True
435 435 for k, v in sorted(self._item.items()):
436 436 if first:
437 437 first = False
438 438 else:
439 439 self._out.write(b",\n")
440 440 u = templatefilters.json(v, paranoid=False)
441 441 self._out.write(b' "%s": %s' % (k, u))
442 442 self._out.write(b"\n }")
443 443
444 444 def end(self):
445 445 baseformatter.end(self)
446 446 self._out.write(b"\n]\n")
447 447
448 448
449 449 class _templateconverter(object):
450 450 '''convert non-primitive data types to be processed by templater'''
451 451
452 452 storecontext = True
453 453
454 454 @staticmethod
455 455 def wrapnested(data, tmpl, sep):
456 456 '''wrap nested data by templatable type'''
457 457 return templateutil.mappinglist(data, tmpl=tmpl, sep=sep)
458 458
459 459 @staticmethod
460 460 def formatdate(date, fmt):
461 461 '''return date tuple'''
462 462 return templateutil.date(date)
463 463
464 464 @staticmethod
465 465 def formatdict(data, key, value, fmt, sep):
466 466 '''build object that can be evaluated as either plain string or dict'''
467 467 data = util.sortdict(_iteritems(data))
468 468
469 469 def f():
470 470 yield _plainconverter.formatdict(data, key, value, fmt, sep)
471 471
472 472 return templateutil.hybriddict(
473 473 data, key=key, value=value, fmt=fmt, gen=f
474 474 )
475 475
476 476 @staticmethod
477 477 def formatlist(data, name, fmt, sep):
478 478 '''build object that can be evaluated as either plain string or list'''
479 479 data = list(data)
480 480
481 481 def f():
482 482 yield _plainconverter.formatlist(data, name, fmt, sep)
483 483
484 484 return templateutil.hybridlist(data, name=name, fmt=fmt, gen=f)
485 485
486 486
487 487 class templateformatter(baseformatter):
488 488 def __init__(self, ui, out, topic, opts, spec, overridetemplates=None):
489 489 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
490 490 self._out = out
491 491 self._tref = spec.ref
492 492 self._t = loadtemplater(
493 493 ui,
494 494 spec,
495 495 defaults=templatekw.keywords,
496 496 resources=templateresources(ui),
497 497 cache=templatekw.defaulttempl,
498 498 )
499 499 if overridetemplates:
500 500 self._t.cache.update(overridetemplates)
501 501 self._parts = templatepartsmap(
502 502 spec, self._t, [b'docheader', b'docfooter', b'separator']
503 503 )
504 504 self._counter = itertools.count()
505 505 self._renderitem(b'docheader', {})
506 506
507 507 def _showitem(self):
508 508 item = self._item.copy()
509 509 item[b'index'] = index = next(self._counter)
510 510 if index > 0:
511 511 self._renderitem(b'separator', {})
512 512 self._renderitem(self._tref, item)
513 513
514 514 def _renderitem(self, part, item):
515 515 if part not in self._parts:
516 516 return
517 517 ref = self._parts[part]
518 518 # None can't be put in the mapping dict since it means <unset>
519 519 for k, v in item.items():
520 520 if v is None:
521 521 item[k] = templateutil.wrappedvalue(v)
522 522 self._out.write(self._t.render(ref, item))
523 523
524 524 @util.propertycache
525 525 def _symbolsused(self):
526 526 return self._t.symbolsused(self._tref)
527 527
528 528 def datahint(self):
529 529 '''set of field names to be referenced from the template'''
530 530 return self._symbolsused[0]
531 531
532 532 def end(self):
533 533 baseformatter.end(self)
534 534 self._renderitem(b'docfooter', {})
535 535
536 536
537 537 @attr.s(frozen=True)
538 538 class templatespec(object):
539 539 ref = attr.ib()
540 540 tmpl = attr.ib()
541 541 mapfile = attr.ib()
542 542 refargs = attr.ib(default=None)
543 543 fp = attr.ib(default=None)
544 544
545 545
546 546 def empty_templatespec():
547 547 return templatespec(None, None, None)
548 548
549 549
550 550 def reference_templatespec(ref, refargs=None):
551 551 return templatespec(ref, None, None, refargs)
552 552
553 553
554 554 def literal_templatespec(tmpl):
555 555 if pycompat.ispy3:
556 556 assert not isinstance(tmpl, str), b'tmpl must not be a str'
557 557 return templatespec(b'', tmpl, None)
558 558
559 559
560 560 def mapfile_templatespec(topic, mapfile, fp=None):
561 561 return templatespec(topic, None, mapfile, fp=fp)
562 562
563 563
564 564 def lookuptemplate(ui, topic, tmpl):
565 565 """Find the template matching the given -T/--template spec 'tmpl'
566 566
567 567 'tmpl' can be any of the following:
568 568
569 569 - a literal template (e.g. '{rev}')
570 570 - a reference to built-in template (i.e. formatter)
571 571 - a map-file name or path (e.g. 'changelog')
572 572 - a reference to [templates] in config file
573 573 - a path to raw template file
574 574
575 575 A map file defines a stand-alone template environment. If a map file
576 576 selected, all templates defined in the file will be loaded, and the
577 577 template matching the given topic will be rendered. Aliases won't be
578 578 loaded from user config, but from the map file.
579 579
580 580 If no map file selected, all templates in [templates] section will be
581 581 available as well as aliases in [templatealias].
582 582 """
583 583
584 584 if not tmpl:
585 585 return empty_templatespec()
586 586
587 587 # looks like a literal template?
588 588 if b'{' in tmpl:
589 589 return literal_templatespec(tmpl)
590 590
591 591 # a reference to built-in (formatter) template
592 592 if tmpl in {b'cbor', b'json', b'pickle', b'debug'}:
593 593 return reference_templatespec(tmpl)
594 594
595 595 # a function-style reference to built-in template
596 596 func, fsep, ftail = tmpl.partition(b'(')
597 597 if func in {b'cbor', b'json'} and fsep and ftail.endswith(b')'):
598 598 templater.parseexpr(tmpl) # make sure syntax errors are confined
599 599 return reference_templatespec(func, refargs=ftail[:-1])
600 600
601 601 # perhaps a stock style?
602 602 if not os.path.split(tmpl)[0]:
603 (mapname, fp) = templater.open_template(
603 (mapname, fp) = templater.try_open_template(
604 604 b'map-cmdline.' + tmpl
605 ) or templater.open_template(tmpl)
605 ) or templater.try_open_template(tmpl)
606 606 if mapname:
607 607 return mapfile_templatespec(topic, mapname, fp)
608 608
609 609 # perhaps it's a reference to [templates]
610 610 if ui.config(b'templates', tmpl):
611 611 return reference_templatespec(tmpl)
612 612
613 613 if tmpl == b'list':
614 614 ui.write(_(b"available styles: %s\n") % templater.stylelist())
615 615 raise error.Abort(_(b"specify a template"))
616 616
617 617 # perhaps it's a path to a map or a template
618 618 if (b'/' in tmpl or b'\\' in tmpl) and os.path.isfile(tmpl):
619 619 # is it a mapfile for a style?
620 620 if os.path.basename(tmpl).startswith(b"map-"):
621 621 return mapfile_templatespec(topic, os.path.realpath(tmpl))
622 622 with util.posixfile(tmpl, b'rb') as f:
623 623 tmpl = f.read()
624 624 return literal_templatespec(tmpl)
625 625
626 626 # constant string?
627 627 return literal_templatespec(tmpl)
628 628
629 629
630 630 def templatepartsmap(spec, t, partnames):
631 631 """Create a mapping of {part: ref}"""
632 632 partsmap = {spec.ref: spec.ref} # initial ref must exist in t
633 633 if spec.mapfile:
634 634 partsmap.update((p, p) for p in partnames if p in t)
635 635 elif spec.ref:
636 636 for part in partnames:
637 637 ref = b'%s:%s' % (spec.ref, part) # select config sub-section
638 638 if ref in t:
639 639 partsmap[part] = ref
640 640 return partsmap
641 641
642 642
643 643 def loadtemplater(ui, spec, defaults=None, resources=None, cache=None):
644 644 """Create a templater from either a literal template or loading from
645 645 a map file"""
646 646 assert not (spec.tmpl and spec.mapfile)
647 647 if spec.mapfile:
648 648 return templater.templater.frommapfile(
649 649 spec.mapfile,
650 650 spec.fp,
651 651 defaults=defaults,
652 652 resources=resources,
653 653 cache=cache,
654 654 )
655 655 return maketemplater(
656 656 ui, spec.tmpl, defaults=defaults, resources=resources, cache=cache
657 657 )
658 658
659 659
660 660 def maketemplater(ui, tmpl, defaults=None, resources=None, cache=None):
661 661 """Create a templater from a string template 'tmpl'"""
662 662 aliases = ui.configitems(b'templatealias')
663 663 t = templater.templater(
664 664 defaults=defaults, resources=resources, cache=cache, aliases=aliases
665 665 )
666 666 t.cache.update(
667 667 (k, templater.unquotestring(v)) for k, v in ui.configitems(b'templates')
668 668 )
669 669 if tmpl:
670 670 t.cache[b''] = tmpl
671 671 return t
672 672
673 673
674 674 # marker to denote a resource to be loaded on demand based on mapping values
675 675 # (e.g. (ctx, path) -> fctx)
676 676 _placeholder = object()
677 677
678 678
679 679 class templateresources(templater.resourcemapper):
680 680 """Resource mapper designed for the default templatekw and function"""
681 681
682 682 def __init__(self, ui, repo=None):
683 683 self._resmap = {
684 684 b'cache': {}, # for templatekw/funcs to store reusable data
685 685 b'repo': repo,
686 686 b'ui': ui,
687 687 }
688 688
689 689 def availablekeys(self, mapping):
690 690 return {
691 691 k for k in self.knownkeys() if self._getsome(mapping, k) is not None
692 692 }
693 693
694 694 def knownkeys(self):
695 695 return {b'cache', b'ctx', b'fctx', b'repo', b'revcache', b'ui'}
696 696
697 697 def lookup(self, mapping, key):
698 698 if key not in self.knownkeys():
699 699 return None
700 700 v = self._getsome(mapping, key)
701 701 if v is _placeholder:
702 702 v = mapping[key] = self._loadermap[key](self, mapping)
703 703 return v
704 704
705 705 def populatemap(self, context, origmapping, newmapping):
706 706 mapping = {}
707 707 if self._hasnodespec(newmapping):
708 708 mapping[b'revcache'] = {} # per-ctx cache
709 709 if self._hasnodespec(origmapping) and self._hasnodespec(newmapping):
710 710 orignode = templateutil.runsymbol(context, origmapping, b'node')
711 711 mapping[b'originalnode'] = orignode
712 712 # put marker to override 'ctx'/'fctx' in mapping if any, and flag
713 713 # its existence to be reported by availablekeys()
714 714 if b'ctx' not in newmapping and self._hasliteral(newmapping, b'node'):
715 715 mapping[b'ctx'] = _placeholder
716 716 if b'fctx' not in newmapping and self._hasliteral(newmapping, b'path'):
717 717 mapping[b'fctx'] = _placeholder
718 718 return mapping
719 719
720 720 def _getsome(self, mapping, key):
721 721 v = mapping.get(key)
722 722 if v is not None:
723 723 return v
724 724 return self._resmap.get(key)
725 725
726 726 def _hasliteral(self, mapping, key):
727 727 """Test if a literal value is set or unset in the given mapping"""
728 728 return key in mapping and not callable(mapping[key])
729 729
730 730 def _getliteral(self, mapping, key):
731 731 """Return value of the given name if it is a literal"""
732 732 v = mapping.get(key)
733 733 if callable(v):
734 734 return None
735 735 return v
736 736
737 737 def _hasnodespec(self, mapping):
738 738 """Test if context revision is set or unset in the given mapping"""
739 739 return b'node' in mapping or b'ctx' in mapping
740 740
741 741 def _loadctx(self, mapping):
742 742 repo = self._getsome(mapping, b'repo')
743 743 node = self._getliteral(mapping, b'node')
744 744 if repo is None or node is None:
745 745 return
746 746 try:
747 747 return repo[node]
748 748 except error.RepoLookupError:
749 749 return None # maybe hidden/non-existent node
750 750
751 751 def _loadfctx(self, mapping):
752 752 ctx = self._getsome(mapping, b'ctx')
753 753 path = self._getliteral(mapping, b'path')
754 754 if ctx is None or path is None:
755 755 return None
756 756 try:
757 757 return ctx[path]
758 758 except error.LookupError:
759 759 return None # maybe removed file?
760 760
761 761 _loadermap = {
762 762 b'ctx': _loadctx,
763 763 b'fctx': _loadfctx,
764 764 }
765 765
766 766
767 767 def _internaltemplateformatter(
768 768 ui,
769 769 out,
770 770 topic,
771 771 opts,
772 772 spec,
773 773 tmpl,
774 774 docheader=b'',
775 775 docfooter=b'',
776 776 separator=b'',
777 777 ):
778 778 """Build template formatter that handles customizable built-in templates
779 779 such as -Tjson(...)"""
780 780 templates = {spec.ref: tmpl}
781 781 if docheader:
782 782 templates[b'%s:docheader' % spec.ref] = docheader
783 783 if docfooter:
784 784 templates[b'%s:docfooter' % spec.ref] = docfooter
785 785 if separator:
786 786 templates[b'%s:separator' % spec.ref] = separator
787 787 return templateformatter(
788 788 ui, out, topic, opts, spec, overridetemplates=templates
789 789 )
790 790
791 791
792 792 def formatter(ui, out, topic, opts):
793 793 spec = lookuptemplate(ui, topic, opts.get(b'template', b''))
794 794 if spec.ref == b"cbor" and spec.refargs is not None:
795 795 return _internaltemplateformatter(
796 796 ui,
797 797 out,
798 798 topic,
799 799 opts,
800 800 spec,
801 801 tmpl=b'{dict(%s)|cbor}' % spec.refargs,
802 802 docheader=cborutil.BEGIN_INDEFINITE_ARRAY,
803 803 docfooter=cborutil.BREAK,
804 804 )
805 805 elif spec.ref == b"cbor":
806 806 return cborformatter(ui, out, topic, opts)
807 807 elif spec.ref == b"json" and spec.refargs is not None:
808 808 return _internaltemplateformatter(
809 809 ui,
810 810 out,
811 811 topic,
812 812 opts,
813 813 spec,
814 814 tmpl=b'{dict(%s)|json}' % spec.refargs,
815 815 docheader=b'[\n ',
816 816 docfooter=b'\n]\n',
817 817 separator=b',\n ',
818 818 )
819 819 elif spec.ref == b"json":
820 820 return jsonformatter(ui, out, topic, opts)
821 821 elif spec.ref == b"pickle":
822 822 assert spec.refargs is None, r'function-style not supported'
823 823 return pickleformatter(ui, out, topic, opts)
824 824 elif spec.ref == b"debug":
825 825 assert spec.refargs is None, r'function-style not supported'
826 826 return debugformatter(ui, out, topic, opts)
827 827 elif spec.ref or spec.tmpl or spec.mapfile:
828 828 assert spec.refargs is None, r'function-style not supported'
829 829 return templateformatter(ui, out, topic, opts, spec)
830 830 # developer config: ui.formatdebug
831 831 elif ui.configbool(b'ui', b'formatdebug'):
832 832 return debugformatter(ui, out, topic, opts)
833 833 # deprecated config: ui.formatjson
834 834 elif ui.configbool(b'ui', b'formatjson'):
835 835 return jsonformatter(ui, out, topic, opts)
836 836 return plainformatter(ui, out, topic, opts)
837 837
838 838
839 839 @contextlib.contextmanager
840 840 def openformatter(ui, filename, topic, opts):
841 841 """Create a formatter that writes outputs to the specified file
842 842
843 843 Must be invoked using the 'with' statement.
844 844 """
845 845 with util.posixfile(filename, b'wb') as out:
846 846 with formatter(ui, out, topic, opts) as fm:
847 847 yield fm
848 848
849 849
850 850 @contextlib.contextmanager
851 851 def _neverending(fm):
852 852 yield fm
853 853
854 854
855 855 def maybereopen(fm, filename):
856 856 """Create a formatter backed by file if filename specified, else return
857 857 the given formatter
858 858
859 859 Must be invoked using the 'with' statement. This will never call fm.end()
860 860 of the given formatter.
861 861 """
862 862 if filename:
863 863 return openformatter(fm._ui, filename, fm._topic, fm._opts)
864 864 else:
865 865 return _neverending(fm)
@@ -1,527 +1,527 b''
1 1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from __future__ import absolute_import
10 10
11 11 import contextlib
12 12 import os
13 13
14 14 from .common import (
15 15 ErrorResponse,
16 16 HTTP_BAD_REQUEST,
17 17 cspvalues,
18 18 permhooks,
19 19 statusmessage,
20 20 )
21 21 from ..pycompat import getattr
22 22
23 23 from .. import (
24 24 encoding,
25 25 error,
26 26 extensions,
27 27 formatter,
28 28 hg,
29 29 hook,
30 30 profiling,
31 31 pycompat,
32 32 registrar,
33 33 repoview,
34 34 templatefilters,
35 35 templater,
36 36 templateutil,
37 37 ui as uimod,
38 38 util,
39 39 wireprotoserver,
40 40 )
41 41
42 42 from . import (
43 43 request as requestmod,
44 44 webcommands,
45 45 webutil,
46 46 wsgicgi,
47 47 )
48 48
49 49
50 50 def getstyle(req, configfn, templatepath):
51 51 styles = (
52 52 req.qsparams.get(b'style', None),
53 53 configfn(b'web', b'style'),
54 54 b'paper',
55 55 )
56 56 return styles, _stylemap(styles, templatepath)
57 57
58 58
59 59 def _stylemap(styles, path=None):
60 60 """Return path to mapfile for a given style.
61 61
62 62 Searches mapfile in the following locations:
63 63 1. templatepath/style/map
64 64 2. templatepath/map-style
65 65 3. templatepath/map
66 66 """
67 67
68 68 for style in styles:
69 69 # only plain name is allowed to honor template paths
70 70 if (
71 71 not style
72 72 or style in (pycompat.oscurdir, pycompat.ospardir)
73 73 or pycompat.ossep in style
74 74 or pycompat.osaltsep
75 75 and pycompat.osaltsep in style
76 76 ):
77 77 continue
78 78 locations = (os.path.join(style, b'map'), b'map-' + style, b'map')
79 79
80 80 for location in locations:
81 mapfile, fp = templater.open_template(location, path)
81 mapfile, fp = templater.try_open_template(location, path)
82 82 if mapfile:
83 83 return style, mapfile, fp
84 84
85 85 raise RuntimeError(b"No hgweb templates found in %r" % path)
86 86
87 87
88 88 def makebreadcrumb(url, prefix=b''):
89 89 '''Return a 'URL breadcrumb' list
90 90
91 91 A 'URL breadcrumb' is a list of URL-name pairs,
92 92 corresponding to each of the path items on a URL.
93 93 This can be used to create path navigation entries.
94 94 '''
95 95 if url.endswith(b'/'):
96 96 url = url[:-1]
97 97 if prefix:
98 98 url = b'/' + prefix + url
99 99 relpath = url
100 100 if relpath.startswith(b'/'):
101 101 relpath = relpath[1:]
102 102
103 103 breadcrumb = []
104 104 urlel = url
105 105 pathitems = [b''] + relpath.split(b'/')
106 106 for pathel in reversed(pathitems):
107 107 if not pathel or not urlel:
108 108 break
109 109 breadcrumb.append({b'url': urlel, b'name': pathel})
110 110 urlel = os.path.dirname(urlel)
111 111 return templateutil.mappinglist(reversed(breadcrumb))
112 112
113 113
114 114 class requestcontext(object):
115 115 """Holds state/context for an individual request.
116 116
117 117 Servers can be multi-threaded. Holding state on the WSGI application
118 118 is prone to race conditions. Instances of this class exist to hold
119 119 mutable and race-free state for requests.
120 120 """
121 121
122 122 def __init__(self, app, repo, req, res):
123 123 self.repo = repo
124 124 self.reponame = app.reponame
125 125 self.req = req
126 126 self.res = res
127 127
128 128 self.maxchanges = self.configint(b'web', b'maxchanges')
129 129 self.stripecount = self.configint(b'web', b'stripes')
130 130 self.maxshortchanges = self.configint(b'web', b'maxshortchanges')
131 131 self.maxfiles = self.configint(b'web', b'maxfiles')
132 132 self.allowpull = self.configbool(b'web', b'allow-pull')
133 133
134 134 # we use untrusted=False to prevent a repo owner from using
135 135 # web.templates in .hg/hgrc to get access to any file readable
136 136 # by the user running the CGI script
137 137 self.templatepath = self.config(b'web', b'templates', untrusted=False)
138 138
139 139 # This object is more expensive to build than simple config values.
140 140 # It is shared across requests. The app will replace the object
141 141 # if it is updated. Since this is a reference and nothing should
142 142 # modify the underlying object, it should be constant for the lifetime
143 143 # of the request.
144 144 self.websubtable = app.websubtable
145 145
146 146 self.csp, self.nonce = cspvalues(self.repo.ui)
147 147
148 148 # Trust the settings from the .hg/hgrc files by default.
149 149 def config(self, *args, **kwargs):
150 150 kwargs.setdefault('untrusted', True)
151 151 return self.repo.ui.config(*args, **kwargs)
152 152
153 153 def configbool(self, *args, **kwargs):
154 154 kwargs.setdefault('untrusted', True)
155 155 return self.repo.ui.configbool(*args, **kwargs)
156 156
157 157 def configint(self, *args, **kwargs):
158 158 kwargs.setdefault('untrusted', True)
159 159 return self.repo.ui.configint(*args, **kwargs)
160 160
161 161 def configlist(self, *args, **kwargs):
162 162 kwargs.setdefault('untrusted', True)
163 163 return self.repo.ui.configlist(*args, **kwargs)
164 164
165 165 def archivelist(self, nodeid):
166 166 return webutil.archivelist(self.repo.ui, nodeid)
167 167
168 168 def templater(self, req):
169 169 # determine scheme, port and server name
170 170 # this is needed to create absolute urls
171 171 logourl = self.config(b'web', b'logourl')
172 172 logoimg = self.config(b'web', b'logoimg')
173 173 staticurl = (
174 174 self.config(b'web', b'staticurl')
175 175 or req.apppath.rstrip(b'/') + b'/static/'
176 176 )
177 177 if not staticurl.endswith(b'/'):
178 178 staticurl += b'/'
179 179
180 180 # figure out which style to use
181 181
182 182 vars = {}
183 183 styles, (style, mapfile, fp) = getstyle(
184 184 req, self.config, self.templatepath
185 185 )
186 186 if style == styles[0]:
187 187 vars[b'style'] = style
188 188
189 189 sessionvars = webutil.sessionvars(vars, b'?')
190 190
191 191 if not self.reponame:
192 192 self.reponame = (
193 193 self.config(b'web', b'name', b'')
194 194 or req.reponame
195 195 or req.apppath
196 196 or self.repo.root
197 197 )
198 198
199 199 filters = {}
200 200 templatefilter = registrar.templatefilter(filters)
201 201
202 202 @templatefilter(b'websub', intype=bytes)
203 203 def websubfilter(text):
204 204 return templatefilters.websub(text, self.websubtable)
205 205
206 206 # create the templater
207 207 # TODO: export all keywords: defaults = templatekw.keywords.copy()
208 208 defaults = {
209 209 b'url': req.apppath + b'/',
210 210 b'logourl': logourl,
211 211 b'logoimg': logoimg,
212 212 b'staticurl': staticurl,
213 213 b'urlbase': req.advertisedbaseurl,
214 214 b'repo': self.reponame,
215 215 b'encoding': encoding.encoding,
216 216 b'sessionvars': sessionvars,
217 217 b'pathdef': makebreadcrumb(req.apppath),
218 218 b'style': style,
219 219 b'nonce': self.nonce,
220 220 }
221 221 templatekeyword = registrar.templatekeyword(defaults)
222 222
223 223 @templatekeyword(b'motd', requires=())
224 224 def motd(context, mapping):
225 225 yield self.config(b'web', b'motd')
226 226
227 227 tres = formatter.templateresources(self.repo.ui, self.repo)
228 228 return templater.templater.frommapfile(
229 229 mapfile, fp=fp, filters=filters, defaults=defaults, resources=tres
230 230 )
231 231
232 232 def sendtemplate(self, name, **kwargs):
233 233 """Helper function to send a response generated from a template."""
234 234 kwargs = pycompat.byteskwargs(kwargs)
235 235 self.res.setbodygen(self.tmpl.generate(name, kwargs))
236 236 return self.res.sendresponse()
237 237
238 238
239 239 class hgweb(object):
240 240 """HTTP server for individual repositories.
241 241
242 242 Instances of this class serve HTTP responses for a particular
243 243 repository.
244 244
245 245 Instances are typically used as WSGI applications.
246 246
247 247 Some servers are multi-threaded. On these servers, there may
248 248 be multiple active threads inside __call__.
249 249 """
250 250
251 251 def __init__(self, repo, name=None, baseui=None):
252 252 if isinstance(repo, bytes):
253 253 if baseui:
254 254 u = baseui.copy()
255 255 else:
256 256 u = uimod.ui.load()
257 257 extensions.loadall(u)
258 258 extensions.populateui(u)
259 259 r = hg.repository(u, repo)
260 260 else:
261 261 # we trust caller to give us a private copy
262 262 r = repo
263 263
264 264 r.ui.setconfig(b'ui', b'report_untrusted', b'off', b'hgweb')
265 265 r.baseui.setconfig(b'ui', b'report_untrusted', b'off', b'hgweb')
266 266 r.ui.setconfig(b'ui', b'nontty', b'true', b'hgweb')
267 267 r.baseui.setconfig(b'ui', b'nontty', b'true', b'hgweb')
268 268 # resolve file patterns relative to repo root
269 269 r.ui.setconfig(b'ui', b'forcecwd', r.root, b'hgweb')
270 270 r.baseui.setconfig(b'ui', b'forcecwd', r.root, b'hgweb')
271 271 # it's unlikely that we can replace signal handlers in WSGI server,
272 272 # and mod_wsgi issues a big warning. a plain hgweb process (with no
273 273 # threading) could replace signal handlers, but we don't bother
274 274 # conditionally enabling it.
275 275 r.ui.setconfig(b'ui', b'signal-safe-lock', b'false', b'hgweb')
276 276 r.baseui.setconfig(b'ui', b'signal-safe-lock', b'false', b'hgweb')
277 277 # displaying bundling progress bar while serving feel wrong and may
278 278 # break some wsgi implementation.
279 279 r.ui.setconfig(b'progress', b'disable', b'true', b'hgweb')
280 280 r.baseui.setconfig(b'progress', b'disable', b'true', b'hgweb')
281 281 self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))]
282 282 self._lastrepo = self._repos[0]
283 283 hook.redirect(True)
284 284 self.reponame = name
285 285
286 286 def _webifyrepo(self, repo):
287 287 repo = getwebview(repo)
288 288 self.websubtable = webutil.getwebsubs(repo)
289 289 return repo
290 290
291 291 @contextlib.contextmanager
292 292 def _obtainrepo(self):
293 293 """Obtain a repo unique to the caller.
294 294
295 295 Internally we maintain a stack of cachedlocalrepo instances
296 296 to be handed out. If one is available, we pop it and return it,
297 297 ensuring it is up to date in the process. If one is not available,
298 298 we clone the most recently used repo instance and return it.
299 299
300 300 It is currently possible for the stack to grow without bounds
301 301 if the server allows infinite threads. However, servers should
302 302 have a thread limit, thus establishing our limit.
303 303 """
304 304 if self._repos:
305 305 cached = self._repos.pop()
306 306 r, created = cached.fetch()
307 307 else:
308 308 cached = self._lastrepo.copy()
309 309 r, created = cached.fetch()
310 310 if created:
311 311 r = self._webifyrepo(r)
312 312
313 313 self._lastrepo = cached
314 314 self.mtime = cached.mtime
315 315 try:
316 316 yield r
317 317 finally:
318 318 self._repos.append(cached)
319 319
320 320 def run(self):
321 321 """Start a server from CGI environment.
322 322
323 323 Modern servers should be using WSGI and should avoid this
324 324 method, if possible.
325 325 """
326 326 if not encoding.environ.get(b'GATEWAY_INTERFACE', b'').startswith(
327 327 b"CGI/1."
328 328 ):
329 329 raise RuntimeError(
330 330 b"This function is only intended to be "
331 331 b"called while running as a CGI script."
332 332 )
333 333 wsgicgi.launch(self)
334 334
335 335 def __call__(self, env, respond):
336 336 """Run the WSGI application.
337 337
338 338 This may be called by multiple threads.
339 339 """
340 340 req = requestmod.parserequestfromenv(env)
341 341 res = requestmod.wsgiresponse(req, respond)
342 342
343 343 return self.run_wsgi(req, res)
344 344
345 345 def run_wsgi(self, req, res):
346 346 """Internal method to run the WSGI application.
347 347
348 348 This is typically only called by Mercurial. External consumers
349 349 should be using instances of this class as the WSGI application.
350 350 """
351 351 with self._obtainrepo() as repo:
352 352 profile = repo.ui.configbool(b'profiling', b'enabled')
353 353 with profiling.profile(repo.ui, enabled=profile):
354 354 for r in self._runwsgi(req, res, repo):
355 355 yield r
356 356
357 357 def _runwsgi(self, req, res, repo):
358 358 rctx = requestcontext(self, repo, req, res)
359 359
360 360 # This state is global across all threads.
361 361 encoding.encoding = rctx.config(b'web', b'encoding')
362 362 rctx.repo.ui.environ = req.rawenv
363 363
364 364 if rctx.csp:
365 365 # hgwebdir may have added CSP header. Since we generate our own,
366 366 # replace it.
367 367 res.headers[b'Content-Security-Policy'] = rctx.csp
368 368
369 369 # /api/* is reserved for various API implementations. Dispatch
370 370 # accordingly. But URL paths can conflict with subrepos and virtual
371 371 # repos in hgwebdir. So until we have a workaround for this, only
372 372 # expose the URLs if the feature is enabled.
373 373 apienabled = rctx.repo.ui.configbool(b'experimental', b'web.apiserver')
374 374 if apienabled and req.dispatchparts and req.dispatchparts[0] == b'api':
375 375 wireprotoserver.handlewsgiapirequest(
376 376 rctx, req, res, self.check_perm
377 377 )
378 378 return res.sendresponse()
379 379
380 380 handled = wireprotoserver.handlewsgirequest(
381 381 rctx, req, res, self.check_perm
382 382 )
383 383 if handled:
384 384 return res.sendresponse()
385 385
386 386 # Old implementations of hgweb supported dispatching the request via
387 387 # the initial query string parameter instead of using PATH_INFO.
388 388 # If PATH_INFO is present (signaled by ``req.dispatchpath`` having
389 389 # a value), we use it. Otherwise fall back to the query string.
390 390 if req.dispatchpath is not None:
391 391 query = req.dispatchpath
392 392 else:
393 393 query = req.querystring.partition(b'&')[0].partition(b';')[0]
394 394
395 395 # translate user-visible url structure to internal structure
396 396
397 397 args = query.split(b'/', 2)
398 398 if b'cmd' not in req.qsparams and args and args[0]:
399 399 cmd = args.pop(0)
400 400 style = cmd.rfind(b'-')
401 401 if style != -1:
402 402 req.qsparams[b'style'] = cmd[:style]
403 403 cmd = cmd[style + 1 :]
404 404
405 405 # avoid accepting e.g. style parameter as command
406 406 if util.safehasattr(webcommands, cmd):
407 407 req.qsparams[b'cmd'] = cmd
408 408
409 409 if cmd == b'static':
410 410 req.qsparams[b'file'] = b'/'.join(args)
411 411 else:
412 412 if args and args[0]:
413 413 node = args.pop(0).replace(b'%2F', b'/')
414 414 req.qsparams[b'node'] = node
415 415 if args:
416 416 if b'file' in req.qsparams:
417 417 del req.qsparams[b'file']
418 418 for a in args:
419 419 req.qsparams.add(b'file', a)
420 420
421 421 ua = req.headers.get(b'User-Agent', b'')
422 422 if cmd == b'rev' and b'mercurial' in ua:
423 423 req.qsparams[b'style'] = b'raw'
424 424
425 425 if cmd == b'archive':
426 426 fn = req.qsparams[b'node']
427 427 for type_, spec in pycompat.iteritems(webutil.archivespecs):
428 428 ext = spec[2]
429 429 if fn.endswith(ext):
430 430 req.qsparams[b'node'] = fn[: -len(ext)]
431 431 req.qsparams[b'type'] = type_
432 432 else:
433 433 cmd = req.qsparams.get(b'cmd', b'')
434 434
435 435 # process the web interface request
436 436
437 437 try:
438 438 rctx.tmpl = rctx.templater(req)
439 439 ctype = rctx.tmpl.render(
440 440 b'mimetype', {b'encoding': encoding.encoding}
441 441 )
442 442
443 443 # check read permissions non-static content
444 444 if cmd != b'static':
445 445 self.check_perm(rctx, req, None)
446 446
447 447 if cmd == b'':
448 448 req.qsparams[b'cmd'] = rctx.tmpl.render(b'default', {})
449 449 cmd = req.qsparams[b'cmd']
450 450
451 451 # Don't enable caching if using a CSP nonce because then it wouldn't
452 452 # be a nonce.
453 453 if rctx.configbool(b'web', b'cache') and not rctx.nonce:
454 454 tag = b'W/"%d"' % self.mtime
455 455 if req.headers.get(b'If-None-Match') == tag:
456 456 res.status = b'304 Not Modified'
457 457 # Content-Type may be defined globally. It isn't valid on a
458 458 # 304, so discard it.
459 459 try:
460 460 del res.headers[b'Content-Type']
461 461 except KeyError:
462 462 pass
463 463 # Response body not allowed on 304.
464 464 res.setbodybytes(b'')
465 465 return res.sendresponse()
466 466
467 467 res.headers[b'ETag'] = tag
468 468
469 469 if cmd not in webcommands.__all__:
470 470 msg = b'no such method: %s' % cmd
471 471 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
472 472 else:
473 473 # Set some globals appropriate for web handlers. Commands can
474 474 # override easily enough.
475 475 res.status = b'200 Script output follows'
476 476 res.headers[b'Content-Type'] = ctype
477 477 return getattr(webcommands, cmd)(rctx)
478 478
479 479 except (error.LookupError, error.RepoLookupError) as err:
480 480 msg = pycompat.bytestr(err)
481 481 if util.safehasattr(err, b'name') and not isinstance(
482 482 err, error.ManifestLookupError
483 483 ):
484 484 msg = b'revision not found: %s' % err.name
485 485
486 486 res.status = b'404 Not Found'
487 487 res.headers[b'Content-Type'] = ctype
488 488 return rctx.sendtemplate(b'error', error=msg)
489 489 except (error.RepoError, error.StorageError) as e:
490 490 res.status = b'500 Internal Server Error'
491 491 res.headers[b'Content-Type'] = ctype
492 492 return rctx.sendtemplate(b'error', error=pycompat.bytestr(e))
493 493 except error.Abort as e:
494 494 res.status = b'403 Forbidden'
495 495 res.headers[b'Content-Type'] = ctype
496 496 return rctx.sendtemplate(b'error', error=pycompat.bytestr(e))
497 497 except ErrorResponse as e:
498 498 for k, v in e.headers:
499 499 res.headers[k] = v
500 500 res.status = statusmessage(e.code, pycompat.bytestr(e))
501 501 res.headers[b'Content-Type'] = ctype
502 502 return rctx.sendtemplate(b'error', error=pycompat.bytestr(e))
503 503
504 504 def check_perm(self, rctx, req, op):
505 505 for permhook in permhooks:
506 506 permhook(rctx, req, op)
507 507
508 508
509 509 def getwebview(repo):
510 510 """The 'web.view' config controls changeset filter to hgweb. Possible
511 511 values are ``served``, ``visible`` and ``all``. Default is ``served``.
512 512 The ``served`` filter only shows changesets that can be pulled from the
513 513 hgweb instance. The``visible`` filter includes secret changesets but
514 514 still excludes "hidden" one.
515 515
516 516 See the repoview module for details.
517 517
518 518 The option has been around undocumented since Mercurial 2.5, but no
519 519 user ever asked about it. So we better keep it undocumented for now."""
520 520 # experimental config: web.view
521 521 viewconfig = repo.ui.config(b'web', b'view', untrusted=True)
522 522 if viewconfig == b'all':
523 523 return repo.unfiltered()
524 524 elif viewconfig in repoview.filtertable:
525 525 return repo.filtered(viewconfig)
526 526 else:
527 527 return repo.filtered(b'served')
@@ -1,1085 +1,1085 b''
1 1 # logcmdutil.py - utility for log-like commands
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import itertools
11 11 import os
12 12 import posixpath
13 13
14 14 from .i18n import _
15 15 from .node import (
16 16 nullid,
17 17 wdirid,
18 18 wdirrev,
19 19 )
20 20
21 21 from . import (
22 22 dagop,
23 23 error,
24 24 formatter,
25 25 graphmod,
26 26 match as matchmod,
27 27 mdiff,
28 28 patch,
29 29 pathutil,
30 30 pycompat,
31 31 revset,
32 32 revsetlang,
33 33 scmutil,
34 34 smartset,
35 35 templatekw,
36 36 templater,
37 37 util,
38 38 )
39 39 from .utils import (
40 40 dateutil,
41 41 stringutil,
42 42 )
43 43
44 44
45 45 if pycompat.TYPE_CHECKING:
46 46 from typing import (
47 47 Any,
48 48 Optional,
49 49 Tuple,
50 50 )
51 51
52 52 for t in (Any, Optional, Tuple):
53 53 assert t
54 54
55 55
56 56 def getlimit(opts):
57 57 """get the log limit according to option -l/--limit"""
58 58 limit = opts.get(b'limit')
59 59 if limit:
60 60 try:
61 61 limit = int(limit)
62 62 except ValueError:
63 63 raise error.Abort(_(b'limit must be a positive integer'))
64 64 if limit <= 0:
65 65 raise error.Abort(_(b'limit must be positive'))
66 66 else:
67 67 limit = None
68 68 return limit
69 69
70 70
71 71 def diffordiffstat(
72 72 ui,
73 73 repo,
74 74 diffopts,
75 75 ctx1,
76 76 ctx2,
77 77 match,
78 78 changes=None,
79 79 stat=False,
80 80 fp=None,
81 81 graphwidth=0,
82 82 prefix=b'',
83 83 root=b'',
84 84 listsubrepos=False,
85 85 hunksfilterfn=None,
86 86 ):
87 87 '''show diff or diffstat.'''
88 88 if root:
89 89 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
90 90 else:
91 91 relroot = b''
92 92 copysourcematch = None
93 93
94 94 def compose(f, g):
95 95 return lambda x: f(g(x))
96 96
97 97 def pathfn(f):
98 98 return posixpath.join(prefix, f)
99 99
100 100 if relroot != b'':
101 101 # XXX relative roots currently don't work if the root is within a
102 102 # subrepo
103 103 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
104 104 uirelroot = uipathfn(pathfn(relroot))
105 105 relroot += b'/'
106 106 for matchroot in match.files():
107 107 if not matchroot.startswith(relroot):
108 108 ui.warn(
109 109 _(b'warning: %s not inside relative root %s\n')
110 110 % (uipathfn(pathfn(matchroot)), uirelroot)
111 111 )
112 112
113 113 relrootmatch = scmutil.match(ctx2, pats=[relroot], default=b'path')
114 114 match = matchmod.intersectmatchers(match, relrootmatch)
115 115 copysourcematch = relrootmatch
116 116
117 117 checkroot = repo.ui.configbool(
118 118 b'devel', b'all-warnings'
119 119 ) or repo.ui.configbool(b'devel', b'check-relroot')
120 120
121 121 def relrootpathfn(f):
122 122 if checkroot and not f.startswith(relroot):
123 123 raise AssertionError(
124 124 b"file %s doesn't start with relroot %s" % (f, relroot)
125 125 )
126 126 return f[len(relroot) :]
127 127
128 128 pathfn = compose(relrootpathfn, pathfn)
129 129
130 130 if stat:
131 131 diffopts = diffopts.copy(context=0, noprefix=False)
132 132 width = 80
133 133 if not ui.plain():
134 134 width = ui.termwidth() - graphwidth
135 135 # If an explicit --root was given, don't respect ui.relative-paths
136 136 if not relroot:
137 137 pathfn = compose(scmutil.getuipathfn(repo), pathfn)
138 138
139 139 chunks = ctx2.diff(
140 140 ctx1,
141 141 match,
142 142 changes,
143 143 opts=diffopts,
144 144 pathfn=pathfn,
145 145 copysourcematch=copysourcematch,
146 146 hunksfilterfn=hunksfilterfn,
147 147 )
148 148
149 149 if fp is not None or ui.canwritewithoutlabels():
150 150 out = fp or ui
151 151 if stat:
152 152 chunks = [patch.diffstat(util.iterlines(chunks), width=width)]
153 153 for chunk in util.filechunkiter(util.chunkbuffer(chunks)):
154 154 out.write(chunk)
155 155 else:
156 156 if stat:
157 157 chunks = patch.diffstatui(util.iterlines(chunks), width=width)
158 158 else:
159 159 chunks = patch.difflabel(
160 160 lambda chunks, **kwargs: chunks, chunks, opts=diffopts
161 161 )
162 162 if ui.canbatchlabeledwrites():
163 163
164 164 def gen():
165 165 for chunk, label in chunks:
166 166 yield ui.label(chunk, label=label)
167 167
168 168 for chunk in util.filechunkiter(util.chunkbuffer(gen())):
169 169 ui.write(chunk)
170 170 else:
171 171 for chunk, label in chunks:
172 172 ui.write(chunk, label=label)
173 173
174 174 node2 = ctx2.node()
175 175 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
176 176 tempnode2 = node2
177 177 try:
178 178 if node2 is not None:
179 179 tempnode2 = ctx2.substate[subpath][1]
180 180 except KeyError:
181 181 # A subrepo that existed in node1 was deleted between node1 and
182 182 # node2 (inclusive). Thus, ctx2's substate won't contain that
183 183 # subpath. The best we can do is to ignore it.
184 184 tempnode2 = None
185 185 submatch = matchmod.subdirmatcher(subpath, match)
186 186 subprefix = repo.wvfs.reljoin(prefix, subpath)
187 187 if listsubrepos or match.exact(subpath) or any(submatch.files()):
188 188 sub.diff(
189 189 ui,
190 190 diffopts,
191 191 tempnode2,
192 192 submatch,
193 193 changes=changes,
194 194 stat=stat,
195 195 fp=fp,
196 196 prefix=subprefix,
197 197 )
198 198
199 199
200 200 class changesetdiffer(object):
201 201 """Generate diff of changeset with pre-configured filtering functions"""
202 202
203 203 def _makefilematcher(self, ctx):
204 204 return scmutil.matchall(ctx.repo())
205 205
206 206 def _makehunksfilter(self, ctx):
207 207 return None
208 208
209 209 def showdiff(self, ui, ctx, diffopts, graphwidth=0, stat=False):
210 210 diffordiffstat(
211 211 ui,
212 212 ctx.repo(),
213 213 diffopts,
214 214 ctx.p1(),
215 215 ctx,
216 216 match=self._makefilematcher(ctx),
217 217 stat=stat,
218 218 graphwidth=graphwidth,
219 219 hunksfilterfn=self._makehunksfilter(ctx),
220 220 )
221 221
222 222
223 223 def changesetlabels(ctx):
224 224 labels = [b'log.changeset', b'changeset.%s' % ctx.phasestr()]
225 225 if ctx.obsolete():
226 226 labels.append(b'changeset.obsolete')
227 227 if ctx.isunstable():
228 228 labels.append(b'changeset.unstable')
229 229 for instability in ctx.instabilities():
230 230 labels.append(b'instability.%s' % instability)
231 231 return b' '.join(labels)
232 232
233 233
234 234 class changesetprinter(object):
235 235 '''show changeset information when templating not requested.'''
236 236
237 237 def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False):
238 238 self.ui = ui
239 239 self.repo = repo
240 240 self.buffered = buffered
241 241 self._differ = differ or changesetdiffer()
242 242 self._diffopts = patch.diffallopts(ui, diffopts)
243 243 self._includestat = diffopts and diffopts.get(b'stat')
244 244 self._includediff = diffopts and diffopts.get(b'patch')
245 245 self.header = {}
246 246 self.hunk = {}
247 247 self.lastheader = None
248 248 self.footer = None
249 249 self._columns = templatekw.getlogcolumns()
250 250
251 251 def flush(self, ctx):
252 252 rev = ctx.rev()
253 253 if rev in self.header:
254 254 h = self.header[rev]
255 255 if h != self.lastheader:
256 256 self.lastheader = h
257 257 self.ui.write(h)
258 258 del self.header[rev]
259 259 if rev in self.hunk:
260 260 self.ui.write(self.hunk[rev])
261 261 del self.hunk[rev]
262 262
263 263 def close(self):
264 264 if self.footer:
265 265 self.ui.write(self.footer)
266 266
267 267 def show(self, ctx, copies=None, **props):
268 268 props = pycompat.byteskwargs(props)
269 269 if self.buffered:
270 270 self.ui.pushbuffer(labeled=True)
271 271 self._show(ctx, copies, props)
272 272 self.hunk[ctx.rev()] = self.ui.popbuffer()
273 273 else:
274 274 self._show(ctx, copies, props)
275 275
276 276 def _show(self, ctx, copies, props):
277 277 '''show a single changeset or file revision'''
278 278 changenode = ctx.node()
279 279 graphwidth = props.get(b'graphwidth', 0)
280 280
281 281 if self.ui.quiet:
282 282 self.ui.write(
283 283 b"%s\n" % scmutil.formatchangeid(ctx), label=b'log.node'
284 284 )
285 285 return
286 286
287 287 columns = self._columns
288 288 self.ui.write(
289 289 columns[b'changeset'] % scmutil.formatchangeid(ctx),
290 290 label=changesetlabels(ctx),
291 291 )
292 292
293 293 # branches are shown first before any other names due to backwards
294 294 # compatibility
295 295 branch = ctx.branch()
296 296 # don't show the default branch name
297 297 if branch != b'default':
298 298 self.ui.write(columns[b'branch'] % branch, label=b'log.branch')
299 299
300 300 for nsname, ns in pycompat.iteritems(self.repo.names):
301 301 # branches has special logic already handled above, so here we just
302 302 # skip it
303 303 if nsname == b'branches':
304 304 continue
305 305 # we will use the templatename as the color name since those two
306 306 # should be the same
307 307 for name in ns.names(self.repo, changenode):
308 308 self.ui.write(ns.logfmt % name, label=b'log.%s' % ns.colorname)
309 309 if self.ui.debugflag:
310 310 self.ui.write(
311 311 columns[b'phase'] % ctx.phasestr(), label=b'log.phase'
312 312 )
313 313 for pctx in scmutil.meaningfulparents(self.repo, ctx):
314 314 label = b'log.parent changeset.%s' % pctx.phasestr()
315 315 self.ui.write(
316 316 columns[b'parent'] % scmutil.formatchangeid(pctx), label=label
317 317 )
318 318
319 319 if self.ui.debugflag:
320 320 mnode = ctx.manifestnode()
321 321 if mnode is None:
322 322 mnode = wdirid
323 323 mrev = wdirrev
324 324 else:
325 325 mrev = self.repo.manifestlog.rev(mnode)
326 326 self.ui.write(
327 327 columns[b'manifest']
328 328 % scmutil.formatrevnode(self.ui, mrev, mnode),
329 329 label=b'ui.debug log.manifest',
330 330 )
331 331 self.ui.write(columns[b'user'] % ctx.user(), label=b'log.user')
332 332 self.ui.write(
333 333 columns[b'date'] % dateutil.datestr(ctx.date()), label=b'log.date'
334 334 )
335 335
336 336 if ctx.isunstable():
337 337 instabilities = ctx.instabilities()
338 338 self.ui.write(
339 339 columns[b'instability'] % b', '.join(instabilities),
340 340 label=b'log.instability',
341 341 )
342 342
343 343 elif ctx.obsolete():
344 344 self._showobsfate(ctx)
345 345
346 346 self._exthook(ctx)
347 347
348 348 if self.ui.debugflag:
349 349 files = ctx.p1().status(ctx)
350 350 for key, value in zip(
351 351 [b'files', b'files+', b'files-'],
352 352 [files.modified, files.added, files.removed],
353 353 ):
354 354 if value:
355 355 self.ui.write(
356 356 columns[key] % b" ".join(value),
357 357 label=b'ui.debug log.files',
358 358 )
359 359 elif ctx.files() and self.ui.verbose:
360 360 self.ui.write(
361 361 columns[b'files'] % b" ".join(ctx.files()),
362 362 label=b'ui.note log.files',
363 363 )
364 364 if copies and self.ui.verbose:
365 365 copies = [b'%s (%s)' % c for c in copies]
366 366 self.ui.write(
367 367 columns[b'copies'] % b' '.join(copies),
368 368 label=b'ui.note log.copies',
369 369 )
370 370
371 371 extra = ctx.extra()
372 372 if extra and self.ui.debugflag:
373 373 for key, value in sorted(extra.items()):
374 374 self.ui.write(
375 375 columns[b'extra'] % (key, stringutil.escapestr(value)),
376 376 label=b'ui.debug log.extra',
377 377 )
378 378
379 379 description = ctx.description().strip()
380 380 if description:
381 381 if self.ui.verbose:
382 382 self.ui.write(
383 383 _(b"description:\n"), label=b'ui.note log.description'
384 384 )
385 385 self.ui.write(description, label=b'ui.note log.description')
386 386 self.ui.write(b"\n\n")
387 387 else:
388 388 self.ui.write(
389 389 columns[b'summary'] % description.splitlines()[0],
390 390 label=b'log.summary',
391 391 )
392 392 self.ui.write(b"\n")
393 393
394 394 self._showpatch(ctx, graphwidth)
395 395
396 396 def _showobsfate(self, ctx):
397 397 # TODO: do not depend on templater
398 398 tres = formatter.templateresources(self.repo.ui, self.repo)
399 399 t = formatter.maketemplater(
400 400 self.repo.ui,
401 401 b'{join(obsfate, "\n")}',
402 402 defaults=templatekw.keywords,
403 403 resources=tres,
404 404 )
405 405 obsfate = t.renderdefault({b'ctx': ctx}).splitlines()
406 406
407 407 if obsfate:
408 408 for obsfateline in obsfate:
409 409 self.ui.write(
410 410 self._columns[b'obsolete'] % obsfateline,
411 411 label=b'log.obsfate',
412 412 )
413 413
414 414 def _exthook(self, ctx):
415 415 '''empty method used by extension as a hook point
416 416 '''
417 417
418 418 def _showpatch(self, ctx, graphwidth=0):
419 419 if self._includestat:
420 420 self._differ.showdiff(
421 421 self.ui, ctx, self._diffopts, graphwidth, stat=True
422 422 )
423 423 if self._includestat and self._includediff:
424 424 self.ui.write(b"\n")
425 425 if self._includediff:
426 426 self._differ.showdiff(
427 427 self.ui, ctx, self._diffopts, graphwidth, stat=False
428 428 )
429 429 if self._includestat or self._includediff:
430 430 self.ui.write(b"\n")
431 431
432 432
433 433 class changesetformatter(changesetprinter):
434 434 """Format changeset information by generic formatter"""
435 435
436 436 def __init__(
437 437 self, ui, repo, fm, differ=None, diffopts=None, buffered=False
438 438 ):
439 439 changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
440 440 self._diffopts = patch.difffeatureopts(ui, diffopts, git=True)
441 441 self._fm = fm
442 442
443 443 def close(self):
444 444 self._fm.end()
445 445
446 446 def _show(self, ctx, copies, props):
447 447 '''show a single changeset or file revision'''
448 448 fm = self._fm
449 449 fm.startitem()
450 450 fm.context(ctx=ctx)
451 451 fm.data(rev=scmutil.intrev(ctx), node=fm.hexfunc(scmutil.binnode(ctx)))
452 452
453 453 datahint = fm.datahint()
454 454 if self.ui.quiet and not datahint:
455 455 return
456 456
457 457 fm.data(
458 458 branch=ctx.branch(),
459 459 phase=ctx.phasestr(),
460 460 user=ctx.user(),
461 461 date=fm.formatdate(ctx.date()),
462 462 desc=ctx.description(),
463 463 bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'),
464 464 tags=fm.formatlist(ctx.tags(), name=b'tag'),
465 465 parents=fm.formatlist(
466 466 [fm.hexfunc(c.node()) for c in ctx.parents()], name=b'node'
467 467 ),
468 468 )
469 469
470 470 if self.ui.debugflag or b'manifest' in datahint:
471 471 fm.data(manifest=fm.hexfunc(ctx.manifestnode() or wdirid))
472 472 if self.ui.debugflag or b'extra' in datahint:
473 473 fm.data(extra=fm.formatdict(ctx.extra()))
474 474
475 475 if (
476 476 self.ui.debugflag
477 477 or b'modified' in datahint
478 478 or b'added' in datahint
479 479 or b'removed' in datahint
480 480 ):
481 481 files = ctx.p1().status(ctx)
482 482 fm.data(
483 483 modified=fm.formatlist(files.modified, name=b'file'),
484 484 added=fm.formatlist(files.added, name=b'file'),
485 485 removed=fm.formatlist(files.removed, name=b'file'),
486 486 )
487 487
488 488 verbose = not self.ui.debugflag and self.ui.verbose
489 489 if verbose or b'files' in datahint:
490 490 fm.data(files=fm.formatlist(ctx.files(), name=b'file'))
491 491 if verbose and copies or b'copies' in datahint:
492 492 fm.data(
493 493 copies=fm.formatdict(copies or {}, key=b'name', value=b'source')
494 494 )
495 495
496 496 if self._includestat or b'diffstat' in datahint:
497 497 self.ui.pushbuffer()
498 498 self._differ.showdiff(self.ui, ctx, self._diffopts, stat=True)
499 499 fm.data(diffstat=self.ui.popbuffer())
500 500 if self._includediff or b'diff' in datahint:
501 501 self.ui.pushbuffer()
502 502 self._differ.showdiff(self.ui, ctx, self._diffopts, stat=False)
503 503 fm.data(diff=self.ui.popbuffer())
504 504
505 505
506 506 class changesettemplater(changesetprinter):
507 507 '''format changeset information.
508 508
509 509 Note: there are a variety of convenience functions to build a
510 510 changesettemplater for common cases. See functions such as:
511 511 maketemplater, changesetdisplayer, buildcommittemplate, or other
512 512 functions that use changesest_templater.
513 513 '''
514 514
515 515 # Arguments before "buffered" used to be positional. Consider not
516 516 # adding/removing arguments before "buffered" to not break callers.
517 517 def __init__(
518 518 self, ui, repo, tmplspec, differ=None, diffopts=None, buffered=False
519 519 ):
520 520 changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
521 521 # tres is shared with _graphnodeformatter()
522 522 self._tresources = tres = formatter.templateresources(ui, repo)
523 523 self.t = formatter.loadtemplater(
524 524 ui,
525 525 tmplspec,
526 526 defaults=templatekw.keywords,
527 527 resources=tres,
528 528 cache=templatekw.defaulttempl,
529 529 )
530 530 self._counter = itertools.count()
531 531
532 532 self._tref = tmplspec.ref
533 533 self._parts = {
534 534 b'header': b'',
535 535 b'footer': b'',
536 536 tmplspec.ref: tmplspec.ref,
537 537 b'docheader': b'',
538 538 b'docfooter': b'',
539 539 b'separator': b'',
540 540 }
541 541 if tmplspec.mapfile:
542 542 # find correct templates for current mode, for backward
543 543 # compatibility with 'log -v/-q/--debug' using a mapfile
544 544 tmplmodes = [
545 545 (True, b''),
546 546 (self.ui.verbose, b'_verbose'),
547 547 (self.ui.quiet, b'_quiet'),
548 548 (self.ui.debugflag, b'_debug'),
549 549 ]
550 550 for mode, postfix in tmplmodes:
551 551 for t in self._parts:
552 552 cur = t + postfix
553 553 if mode and cur in self.t:
554 554 self._parts[t] = cur
555 555 else:
556 556 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
557 557 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
558 558 self._parts.update(m)
559 559
560 560 if self._parts[b'docheader']:
561 561 self.ui.write(self.t.render(self._parts[b'docheader'], {}))
562 562
563 563 def close(self):
564 564 if self._parts[b'docfooter']:
565 565 if not self.footer:
566 566 self.footer = b""
567 567 self.footer += self.t.render(self._parts[b'docfooter'], {})
568 568 return super(changesettemplater, self).close()
569 569
570 570 def _show(self, ctx, copies, props):
571 571 '''show a single changeset or file revision'''
572 572 props = props.copy()
573 573 props[b'ctx'] = ctx
574 574 props[b'index'] = index = next(self._counter)
575 575 props[b'revcache'] = {b'copies': copies}
576 576 graphwidth = props.get(b'graphwidth', 0)
577 577
578 578 # write separator, which wouldn't work well with the header part below
579 579 # since there's inherently a conflict between header (across items) and
580 580 # separator (per item)
581 581 if self._parts[b'separator'] and index > 0:
582 582 self.ui.write(self.t.render(self._parts[b'separator'], {}))
583 583
584 584 # write header
585 585 if self._parts[b'header']:
586 586 h = self.t.render(self._parts[b'header'], props)
587 587 if self.buffered:
588 588 self.header[ctx.rev()] = h
589 589 else:
590 590 if self.lastheader != h:
591 591 self.lastheader = h
592 592 self.ui.write(h)
593 593
594 594 # write changeset metadata, then patch if requested
595 595 key = self._parts[self._tref]
596 596 self.ui.write(self.t.render(key, props))
597 597 self._exthook(ctx)
598 598 self._showpatch(ctx, graphwidth)
599 599
600 600 if self._parts[b'footer']:
601 601 if not self.footer:
602 602 self.footer = self.t.render(self._parts[b'footer'], props)
603 603
604 604
605 605 def templatespec(tmpl, mapfile):
606 606 assert not (tmpl and mapfile)
607 607 if mapfile:
608 608 return formatter.mapfile_templatespec(b'changeset', mapfile)
609 609 else:
610 610 return formatter.literal_templatespec(tmpl)
611 611
612 612
613 613 def _lookuptemplate(ui, tmpl, style):
614 614 """Find the template matching the given template spec or style
615 615
616 616 See formatter.lookuptemplate() for details.
617 617 """
618 618
619 619 # ui settings
620 620 if not tmpl and not style: # template are stronger than style
621 621 tmpl = ui.config(b'ui', b'logtemplate')
622 622 if tmpl:
623 623 return formatter.literal_templatespec(templater.unquotestring(tmpl))
624 624 else:
625 625 style = util.expandpath(ui.config(b'ui', b'style'))
626 626
627 627 if not tmpl and style:
628 628 mapfile = style
629 629 fp = None
630 630 if not os.path.split(mapfile)[0]:
631 (mapname, fp) = templater.open_template(
631 (mapname, fp) = templater.try_open_template(
632 632 b'map-cmdline.' + mapfile
633 ) or templater.open_template(mapfile)
633 ) or templater.try_open_template(mapfile)
634 634 if mapname:
635 635 mapfile = mapname
636 636 return formatter.mapfile_templatespec(b'changeset', mapfile, fp)
637 637
638 638 return formatter.lookuptemplate(ui, b'changeset', tmpl)
639 639
640 640
641 641 def maketemplater(ui, repo, tmpl, buffered=False):
642 642 """Create a changesettemplater from a literal template 'tmpl'
643 643 byte-string."""
644 644 spec = formatter.literal_templatespec(tmpl)
645 645 return changesettemplater(ui, repo, spec, buffered=buffered)
646 646
647 647
648 648 def changesetdisplayer(ui, repo, opts, differ=None, buffered=False):
649 649 """show one changeset using template or regular display.
650 650
651 651 Display format will be the first non-empty hit of:
652 652 1. option 'template'
653 653 2. option 'style'
654 654 3. [ui] setting 'logtemplate'
655 655 4. [ui] setting 'style'
656 656 If all of these values are either the unset or the empty string,
657 657 regular display via changesetprinter() is done.
658 658 """
659 659 postargs = (differ, opts, buffered)
660 660 spec = _lookuptemplate(ui, opts.get(b'template'), opts.get(b'style'))
661 661
662 662 # machine-readable formats have slightly different keyword set than
663 663 # plain templates, which are handled by changesetformatter.
664 664 # note that {b'pickle', b'debug'} can also be added to the list if needed.
665 665 if spec.ref in {b'cbor', b'json'}:
666 666 fm = ui.formatter(b'log', opts)
667 667 return changesetformatter(ui, repo, fm, *postargs)
668 668
669 669 if not spec.ref and not spec.tmpl and not spec.mapfile:
670 670 return changesetprinter(ui, repo, *postargs)
671 671
672 672 return changesettemplater(ui, repo, spec, *postargs)
673 673
674 674
675 675 def _makematcher(repo, revs, pats, opts):
676 676 """Build matcher and expanded patterns from log options
677 677
678 678 If --follow, revs are the revisions to follow from.
679 679
680 680 Returns (match, pats, slowpath) where
681 681 - match: a matcher built from the given pats and -I/-X opts
682 682 - pats: patterns used (globs are expanded on Windows)
683 683 - slowpath: True if patterns aren't as simple as scanning filelogs
684 684 """
685 685 # pats/include/exclude are passed to match.match() directly in
686 686 # _matchfiles() revset but walkchangerevs() builds its matcher with
687 687 # scmutil.match(). The difference is input pats are globbed on
688 688 # platforms without shell expansion (windows).
689 689 wctx = repo[None]
690 690 match, pats = scmutil.matchandpats(wctx, pats, opts)
691 691 slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
692 692 if not slowpath:
693 693 follow = opts.get(b'follow') or opts.get(b'follow_first')
694 694 startctxs = []
695 695 if follow and opts.get(b'rev'):
696 696 startctxs = [repo[r] for r in revs]
697 697 for f in match.files():
698 698 if follow and startctxs:
699 699 # No idea if the path was a directory at that revision, so
700 700 # take the slow path.
701 701 if any(f not in c for c in startctxs):
702 702 slowpath = True
703 703 continue
704 704 elif follow and f not in wctx:
705 705 # If the file exists, it may be a directory, so let it
706 706 # take the slow path.
707 707 if os.path.exists(repo.wjoin(f)):
708 708 slowpath = True
709 709 continue
710 710 else:
711 711 raise error.Abort(
712 712 _(
713 713 b'cannot follow file not in parent '
714 714 b'revision: "%s"'
715 715 )
716 716 % f
717 717 )
718 718 filelog = repo.file(f)
719 719 if not filelog:
720 720 # A zero count may be a directory or deleted file, so
721 721 # try to find matching entries on the slow path.
722 722 if follow:
723 723 raise error.Abort(
724 724 _(b'cannot follow nonexistent file: "%s"') % f
725 725 )
726 726 slowpath = True
727 727
728 728 # We decided to fall back to the slowpath because at least one
729 729 # of the paths was not a file. Check to see if at least one of them
730 730 # existed in history - in that case, we'll continue down the
731 731 # slowpath; otherwise, we can turn off the slowpath
732 732 if slowpath:
733 733 for path in match.files():
734 734 if path == b'.' or path in repo.store:
735 735 break
736 736 else:
737 737 slowpath = False
738 738
739 739 return match, pats, slowpath
740 740
741 741
742 742 def _fileancestors(repo, revs, match, followfirst):
743 743 fctxs = []
744 744 for r in revs:
745 745 ctx = repo[r]
746 746 fctxs.extend(ctx[f].introfilectx() for f in ctx.walk(match))
747 747
748 748 # When displaying a revision with --patch --follow FILE, we have
749 749 # to know which file of the revision must be diffed. With
750 750 # --follow, we want the names of the ancestors of FILE in the
751 751 # revision, stored in "fcache". "fcache" is populated as a side effect
752 752 # of the graph traversal.
753 753 fcache = {}
754 754
755 755 def filematcher(ctx):
756 756 return scmutil.matchfiles(repo, fcache.get(ctx.rev(), []))
757 757
758 758 def revgen():
759 759 for rev, cs in dagop.filectxancestors(fctxs, followfirst=followfirst):
760 760 fcache[rev] = [c.path() for c in cs]
761 761 yield rev
762 762
763 763 return smartset.generatorset(revgen(), iterasc=False), filematcher
764 764
765 765
766 766 def _makenofollowfilematcher(repo, pats, opts):
767 767 '''hook for extensions to override the filematcher for non-follow cases'''
768 768 return None
769 769
770 770
771 771 _opt2logrevset = {
772 772 b'no_merges': (b'not merge()', None),
773 773 b'only_merges': (b'merge()', None),
774 774 b'_matchfiles': (None, b'_matchfiles(%ps)'),
775 775 b'date': (b'date(%s)', None),
776 776 b'branch': (b'branch(%s)', b'%lr'),
777 777 b'_patslog': (b'filelog(%s)', b'%lr'),
778 778 b'keyword': (b'keyword(%s)', b'%lr'),
779 779 b'prune': (b'ancestors(%s)', b'not %lr'),
780 780 b'user': (b'user(%s)', b'%lr'),
781 781 }
782 782
783 783
784 784 def _makerevset(repo, match, pats, slowpath, opts):
785 785 """Return a revset string built from log options and file patterns"""
786 786 opts = dict(opts)
787 787 # follow or not follow?
788 788 follow = opts.get(b'follow') or opts.get(b'follow_first')
789 789
790 790 # branch and only_branch are really aliases and must be handled at
791 791 # the same time
792 792 opts[b'branch'] = opts.get(b'branch', []) + opts.get(b'only_branch', [])
793 793 opts[b'branch'] = [repo.lookupbranch(b) for b in opts[b'branch']]
794 794
795 795 if slowpath:
796 796 # See walkchangerevs() slow path.
797 797 #
798 798 # pats/include/exclude cannot be represented as separate
799 799 # revset expressions as their filtering logic applies at file
800 800 # level. For instance "-I a -X b" matches a revision touching
801 801 # "a" and "b" while "file(a) and not file(b)" does
802 802 # not. Besides, filesets are evaluated against the working
803 803 # directory.
804 804 matchargs = [b'r:', b'd:relpath']
805 805 for p in pats:
806 806 matchargs.append(b'p:' + p)
807 807 for p in opts.get(b'include', []):
808 808 matchargs.append(b'i:' + p)
809 809 for p in opts.get(b'exclude', []):
810 810 matchargs.append(b'x:' + p)
811 811 opts[b'_matchfiles'] = matchargs
812 812 elif not follow:
813 813 opts[b'_patslog'] = list(pats)
814 814
815 815 expr = []
816 816 for op, val in sorted(pycompat.iteritems(opts)):
817 817 if not val:
818 818 continue
819 819 if op not in _opt2logrevset:
820 820 continue
821 821 revop, listop = _opt2logrevset[op]
822 822 if revop and b'%' not in revop:
823 823 expr.append(revop)
824 824 elif not listop:
825 825 expr.append(revsetlang.formatspec(revop, val))
826 826 else:
827 827 if revop:
828 828 val = [revsetlang.formatspec(revop, v) for v in val]
829 829 expr.append(revsetlang.formatspec(listop, val))
830 830
831 831 if expr:
832 832 expr = b'(' + b' and '.join(expr) + b')'
833 833 else:
834 834 expr = None
835 835 return expr
836 836
837 837
838 838 def _initialrevs(repo, opts):
839 839 """Return the initial set of revisions to be filtered or followed"""
840 840 follow = opts.get(b'follow') or opts.get(b'follow_first')
841 841 if opts.get(b'rev'):
842 842 revs = scmutil.revrange(repo, opts[b'rev'])
843 843 elif follow and repo.dirstate.p1() == nullid:
844 844 revs = smartset.baseset()
845 845 elif follow:
846 846 revs = repo.revs(b'.')
847 847 else:
848 848 revs = smartset.spanset(repo)
849 849 revs.reverse()
850 850 return revs
851 851
852 852
853 853 def getrevs(repo, pats, opts):
854 854 # type: (Any, Any, Any) -> Tuple[smartset.abstractsmartset, Optional[changesetdiffer]]
855 855 """Return (revs, differ) where revs is a smartset
856 856
857 857 differ is a changesetdiffer with pre-configured file matcher.
858 858 """
859 859 follow = opts.get(b'follow') or opts.get(b'follow_first')
860 860 followfirst = opts.get(b'follow_first')
861 861 limit = getlimit(opts)
862 862 revs = _initialrevs(repo, opts)
863 863 if not revs:
864 864 return smartset.baseset(), None
865 865 match, pats, slowpath = _makematcher(repo, revs, pats, opts)
866 866 filematcher = None
867 867 if follow:
868 868 if slowpath or match.always():
869 869 revs = dagop.revancestors(repo, revs, followfirst=followfirst)
870 870 else:
871 871 revs, filematcher = _fileancestors(repo, revs, match, followfirst)
872 872 revs.reverse()
873 873 if filematcher is None:
874 874 filematcher = _makenofollowfilematcher(repo, pats, opts)
875 875 if filematcher is None:
876 876
877 877 def filematcher(ctx):
878 878 return match
879 879
880 880 expr = _makerevset(repo, match, pats, slowpath, opts)
881 881 if opts.get(b'graph'):
882 882 # User-specified revs might be unsorted, but don't sort before
883 883 # _makerevset because it might depend on the order of revs
884 884 if repo.ui.configbool(b'experimental', b'log.topo'):
885 885 if not revs.istopo():
886 886 revs = dagop.toposort(revs, repo.changelog.parentrevs)
887 887 # TODO: try to iterate the set lazily
888 888 revs = revset.baseset(list(revs), istopo=True)
889 889 elif not (revs.isdescending() or revs.istopo()):
890 890 revs.sort(reverse=True)
891 891 if expr:
892 892 matcher = revset.match(None, expr)
893 893 revs = matcher(repo, revs)
894 894 if limit is not None:
895 895 revs = revs.slice(0, limit)
896 896
897 897 differ = changesetdiffer()
898 898 differ._makefilematcher = filematcher
899 899 return revs, differ
900 900
901 901
902 902 def _parselinerangeopt(repo, opts):
903 903 """Parse --line-range log option and return a list of tuples (filename,
904 904 (fromline, toline)).
905 905 """
906 906 linerangebyfname = []
907 907 for pat in opts.get(b'line_range', []):
908 908 try:
909 909 pat, linerange = pat.rsplit(b',', 1)
910 910 except ValueError:
911 911 raise error.Abort(_(b'malformatted line-range pattern %s') % pat)
912 912 try:
913 913 fromline, toline = map(int, linerange.split(b':'))
914 914 except ValueError:
915 915 raise error.Abort(_(b"invalid line range for %s") % pat)
916 916 msg = _(b"line range pattern '%s' must match exactly one file") % pat
917 917 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
918 918 linerangebyfname.append(
919 919 (fname, util.processlinerange(fromline, toline))
920 920 )
921 921 return linerangebyfname
922 922
923 923
924 924 def getlinerangerevs(repo, userrevs, opts):
925 925 """Return (revs, differ).
926 926
927 927 "revs" are revisions obtained by processing "line-range" log options and
928 928 walking block ancestors of each specified file/line-range.
929 929
930 930 "differ" is a changesetdiffer with pre-configured file matcher and hunks
931 931 filter.
932 932 """
933 933 wctx = repo[None]
934 934
935 935 # Two-levels map of "rev -> file ctx -> [line range]".
936 936 linerangesbyrev = {}
937 937 for fname, (fromline, toline) in _parselinerangeopt(repo, opts):
938 938 if fname not in wctx:
939 939 raise error.Abort(
940 940 _(b'cannot follow file not in parent revision: "%s"') % fname
941 941 )
942 942 fctx = wctx.filectx(fname)
943 943 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
944 944 rev = fctx.introrev()
945 945 if rev is None:
946 946 rev = wdirrev
947 947 if rev not in userrevs:
948 948 continue
949 949 linerangesbyrev.setdefault(rev, {}).setdefault(
950 950 fctx.path(), []
951 951 ).append(linerange)
952 952
953 953 def nofilterhunksfn(fctx, hunks):
954 954 return hunks
955 955
956 956 def hunksfilter(ctx):
957 957 fctxlineranges = linerangesbyrev.get(scmutil.intrev(ctx))
958 958 if fctxlineranges is None:
959 959 return nofilterhunksfn
960 960
961 961 def filterfn(fctx, hunks):
962 962 lineranges = fctxlineranges.get(fctx.path())
963 963 if lineranges is not None:
964 964 for hr, lines in hunks:
965 965 if hr is None: # binary
966 966 yield hr, lines
967 967 continue
968 968 if any(mdiff.hunkinrange(hr[2:], lr) for lr in lineranges):
969 969 yield hr, lines
970 970 else:
971 971 for hunk in hunks:
972 972 yield hunk
973 973
974 974 return filterfn
975 975
976 976 def filematcher(ctx):
977 977 files = list(linerangesbyrev.get(scmutil.intrev(ctx), []))
978 978 return scmutil.matchfiles(repo, files)
979 979
980 980 revs = sorted(linerangesbyrev, reverse=True)
981 981
982 982 differ = changesetdiffer()
983 983 differ._makefilematcher = filematcher
984 984 differ._makehunksfilter = hunksfilter
985 985 return smartset.baseset(revs), differ
986 986
987 987
988 988 def _graphnodeformatter(ui, displayer):
989 989 spec = ui.config(b'ui', b'graphnodetemplate')
990 990 if not spec:
991 991 return templatekw.getgraphnode # fast path for "{graphnode}"
992 992
993 993 spec = templater.unquotestring(spec)
994 994 if isinstance(displayer, changesettemplater):
995 995 # reuse cache of slow templates
996 996 tres = displayer._tresources
997 997 else:
998 998 tres = formatter.templateresources(ui)
999 999 templ = formatter.maketemplater(
1000 1000 ui, spec, defaults=templatekw.keywords, resources=tres
1001 1001 )
1002 1002
1003 1003 def formatnode(repo, ctx, cache):
1004 1004 props = {b'ctx': ctx, b'repo': repo}
1005 1005 return templ.renderdefault(props)
1006 1006
1007 1007 return formatnode
1008 1008
1009 1009
1010 1010 def displaygraph(ui, repo, dag, displayer, edgefn, getcopies=None, props=None):
1011 1011 props = props or {}
1012 1012 formatnode = _graphnodeformatter(ui, displayer)
1013 1013 state = graphmod.asciistate()
1014 1014 styles = state.styles
1015 1015
1016 1016 # only set graph styling if HGPLAIN is not set.
1017 1017 if ui.plain(b'graph'):
1018 1018 # set all edge styles to |, the default pre-3.8 behaviour
1019 1019 styles.update(dict.fromkeys(styles, b'|'))
1020 1020 else:
1021 1021 edgetypes = {
1022 1022 b'parent': graphmod.PARENT,
1023 1023 b'grandparent': graphmod.GRANDPARENT,
1024 1024 b'missing': graphmod.MISSINGPARENT,
1025 1025 }
1026 1026 for name, key in edgetypes.items():
1027 1027 # experimental config: experimental.graphstyle.*
1028 1028 styles[key] = ui.config(
1029 1029 b'experimental', b'graphstyle.%s' % name, styles[key]
1030 1030 )
1031 1031 if not styles[key]:
1032 1032 styles[key] = None
1033 1033
1034 1034 # experimental config: experimental.graphshorten
1035 1035 state.graphshorten = ui.configbool(b'experimental', b'graphshorten')
1036 1036
1037 1037 formatnode_cache = {}
1038 1038 for rev, type, ctx, parents in dag:
1039 1039 char = formatnode(repo, ctx, formatnode_cache)
1040 1040 copies = getcopies(ctx) if getcopies else None
1041 1041 edges = edgefn(type, char, state, rev, parents)
1042 1042 firstedge = next(edges)
1043 1043 width = firstedge[2]
1044 1044 displayer.show(
1045 1045 ctx, copies=copies, graphwidth=width, **pycompat.strkwargs(props)
1046 1046 )
1047 1047 lines = displayer.hunk.pop(rev).split(b'\n')
1048 1048 if not lines[-1]:
1049 1049 del lines[-1]
1050 1050 displayer.flush(ctx)
1051 1051 for type, char, width, coldata in itertools.chain([firstedge], edges):
1052 1052 graphmod.ascii(ui, state, type, char, lines, coldata)
1053 1053 lines = []
1054 1054 displayer.close()
1055 1055
1056 1056
1057 1057 def displaygraphrevs(ui, repo, revs, displayer, getrenamed):
1058 1058 revdag = graphmod.dagwalker(repo, revs)
1059 1059 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed)
1060 1060
1061 1061
1062 1062 def displayrevs(ui, repo, revs, displayer, getcopies):
1063 1063 for rev in revs:
1064 1064 ctx = repo[rev]
1065 1065 copies = getcopies(ctx) if getcopies else None
1066 1066 displayer.show(ctx, copies=copies)
1067 1067 displayer.flush(ctx)
1068 1068 displayer.close()
1069 1069
1070 1070
1071 1071 def checkunsupportedgraphflags(pats, opts):
1072 1072 for op in [b"newest_first"]:
1073 1073 if op in opts and opts[op]:
1074 1074 raise error.Abort(
1075 1075 _(b"-G/--graph option is incompatible with --%s")
1076 1076 % op.replace(b"_", b"-")
1077 1077 )
1078 1078
1079 1079
1080 1080 def graphrevs(repo, nodes, opts):
1081 1081 limit = getlimit(opts)
1082 1082 nodes.reverse()
1083 1083 if limit is not None:
1084 1084 nodes = nodes[:limit]
1085 1085 return graphmod.nodes(repo, nodes)
@@ -1,1111 +1,1112 b''
1 1 # templater.py - template expansion for output
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """Slightly complicated template engine for commands and hgweb
9 9
10 10 This module provides low-level interface to the template engine. See the
11 11 formatter and cmdutil modules if you are looking for high-level functions
12 12 such as ``cmdutil.rendertemplate(ctx, tmpl)``.
13 13
14 14 Internal Data Types
15 15 -------------------
16 16
17 17 Template keywords and functions take a dictionary of current symbols and
18 18 resources (a "mapping") and return result. Inputs and outputs must be one
19 19 of the following data types:
20 20
21 21 bytes
22 22 a byte string, which is generally a human-readable text in local encoding.
23 23
24 24 generator
25 25 a lazily-evaluated byte string, which is a possibly nested generator of
26 26 values of any printable types, and will be folded by ``stringify()``
27 27 or ``flatten()``.
28 28
29 29 None
30 30 sometimes represents an empty value, which can be stringified to ''.
31 31
32 32 True, False, int, float
33 33 can be stringified as such.
34 34
35 35 wrappedbytes, wrappedvalue
36 36 a wrapper for the above printable types.
37 37
38 38 date
39 39 represents a (unixtime, offset) tuple.
40 40
41 41 hybrid
42 42 represents a list/dict of printable values, which can also be converted
43 43 to mappings by % operator.
44 44
45 45 hybriditem
46 46 represents a scalar printable value, also supports % operator.
47 47
48 48 revslist
49 49 represents a list of revision numbers.
50 50
51 51 mappinggenerator, mappinglist
52 52 represents mappings (i.e. a list of dicts), which may have default
53 53 output format.
54 54
55 55 mappingdict
56 56 represents a single mapping (i.e. a dict), which may have default output
57 57 format.
58 58
59 59 mappingnone
60 60 represents None of Optional[mappable], which will be mapped to an empty
61 61 string by % operation.
62 62
63 63 mappedgenerator
64 64 a lazily-evaluated list of byte strings, which is e.g. a result of %
65 65 operation.
66 66 """
67 67
68 68 from __future__ import absolute_import, print_function
69 69
70 70 import abc
71 71 import os
72 72
73 73 from .i18n import _
74 74 from .pycompat import getattr
75 75 from . import (
76 76 config,
77 77 encoding,
78 78 error,
79 79 parser,
80 80 pycompat,
81 81 templatefilters,
82 82 templatefuncs,
83 83 templateutil,
84 84 util,
85 85 )
86 86 from .utils import (
87 87 resourceutil,
88 88 stringutil,
89 89 )
90 90
91 91 # template parsing
92 92
93 93 elements = {
94 94 # token-type: binding-strength, primary, prefix, infix, suffix
95 95 b"(": (20, None, (b"group", 1, b")"), (b"func", 1, b")"), None),
96 96 b".": (18, None, None, (b".", 18), None),
97 97 b"%": (15, None, None, (b"%", 15), None),
98 98 b"|": (15, None, None, (b"|", 15), None),
99 99 b"*": (5, None, None, (b"*", 5), None),
100 100 b"/": (5, None, None, (b"/", 5), None),
101 101 b"+": (4, None, None, (b"+", 4), None),
102 102 b"-": (4, None, (b"negate", 19), (b"-", 4), None),
103 103 b"=": (3, None, None, (b"keyvalue", 3), None),
104 104 b",": (2, None, None, (b"list", 2), None),
105 105 b")": (0, None, None, None, None),
106 106 b"integer": (0, b"integer", None, None, None),
107 107 b"symbol": (0, b"symbol", None, None, None),
108 108 b"string": (0, b"string", None, None, None),
109 109 b"template": (0, b"template", None, None, None),
110 110 b"end": (0, None, None, None, None),
111 111 }
112 112
113 113
114 114 def tokenize(program, start, end, term=None):
115 115 """Parse a template expression into a stream of tokens, which must end
116 116 with term if specified"""
117 117 pos = start
118 118 program = pycompat.bytestr(program)
119 119 while pos < end:
120 120 c = program[pos]
121 121 if c.isspace(): # skip inter-token whitespace
122 122 pass
123 123 elif c in b"(=,).%|+-*/": # handle simple operators
124 124 yield (c, None, pos)
125 125 elif c in b'"\'': # handle quoted templates
126 126 s = pos + 1
127 127 data, pos = _parsetemplate(program, s, end, c)
128 128 yield (b'template', data, s)
129 129 pos -= 1
130 130 elif c == b'r' and program[pos : pos + 2] in (b"r'", b'r"'):
131 131 # handle quoted strings
132 132 c = program[pos + 1]
133 133 s = pos = pos + 2
134 134 while pos < end: # find closing quote
135 135 d = program[pos]
136 136 if d == b'\\': # skip over escaped characters
137 137 pos += 2
138 138 continue
139 139 if d == c:
140 140 yield (b'string', program[s:pos], s)
141 141 break
142 142 pos += 1
143 143 else:
144 144 raise error.ParseError(_(b"unterminated string"), s)
145 145 elif c.isdigit():
146 146 s = pos
147 147 while pos < end:
148 148 d = program[pos]
149 149 if not d.isdigit():
150 150 break
151 151 pos += 1
152 152 yield (b'integer', program[s:pos], s)
153 153 pos -= 1
154 154 elif (
155 155 c == b'\\'
156 156 and program[pos : pos + 2] in (br"\'", br'\"')
157 157 or c == b'r'
158 158 and program[pos : pos + 3] in (br"r\'", br'r\"')
159 159 ):
160 160 # handle escaped quoted strings for compatibility with 2.9.2-3.4,
161 161 # where some of nested templates were preprocessed as strings and
162 162 # then compiled. therefore, \"...\" was allowed. (issue4733)
163 163 #
164 164 # processing flow of _evalifliteral() at 5ab28a2e9962:
165 165 # outer template string -> stringify() -> compiletemplate()
166 166 # ------------------------ ------------ ------------------
167 167 # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}]
168 168 # ~~~~~~~~
169 169 # escaped quoted string
170 170 if c == b'r':
171 171 pos += 1
172 172 token = b'string'
173 173 else:
174 174 token = b'template'
175 175 quote = program[pos : pos + 2]
176 176 s = pos = pos + 2
177 177 while pos < end: # find closing escaped quote
178 178 if program.startswith(b'\\\\\\', pos, end):
179 179 pos += 4 # skip over double escaped characters
180 180 continue
181 181 if program.startswith(quote, pos, end):
182 182 # interpret as if it were a part of an outer string
183 183 data = parser.unescapestr(program[s:pos])
184 184 if token == b'template':
185 185 data = _parsetemplate(data, 0, len(data))[0]
186 186 yield (token, data, s)
187 187 pos += 1
188 188 break
189 189 pos += 1
190 190 else:
191 191 raise error.ParseError(_(b"unterminated string"), s)
192 192 elif c.isalnum() or c in b'_':
193 193 s = pos
194 194 pos += 1
195 195 while pos < end: # find end of symbol
196 196 d = program[pos]
197 197 if not (d.isalnum() or d == b"_"):
198 198 break
199 199 pos += 1
200 200 sym = program[s:pos]
201 201 yield (b'symbol', sym, s)
202 202 pos -= 1
203 203 elif c == term:
204 204 yield (b'end', None, pos)
205 205 return
206 206 else:
207 207 raise error.ParseError(_(b"syntax error"), pos)
208 208 pos += 1
209 209 if term:
210 210 raise error.ParseError(_(b"unterminated template expansion"), start)
211 211 yield (b'end', None, pos)
212 212
213 213
214 214 def _parsetemplate(tmpl, start, stop, quote=b''):
215 215 r"""
216 216 >>> _parsetemplate(b'foo{bar}"baz', 0, 12)
217 217 ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
218 218 >>> _parsetemplate(b'foo{bar}"baz', 0, 12, quote=b'"')
219 219 ([('string', 'foo'), ('symbol', 'bar')], 9)
220 220 >>> _parsetemplate(b'foo"{bar}', 0, 9, quote=b'"')
221 221 ([('string', 'foo')], 4)
222 222 >>> _parsetemplate(br'foo\"bar"baz', 0, 12, quote=b'"')
223 223 ([('string', 'foo"'), ('string', 'bar')], 9)
224 224 >>> _parsetemplate(br'foo\\"bar', 0, 10, quote=b'"')
225 225 ([('string', 'foo\\')], 6)
226 226 """
227 227 parsed = []
228 228 for typ, val, pos in _scantemplate(tmpl, start, stop, quote):
229 229 if typ == b'string':
230 230 parsed.append((typ, val))
231 231 elif typ == b'template':
232 232 parsed.append(val)
233 233 elif typ == b'end':
234 234 return parsed, pos
235 235 else:
236 236 raise error.ProgrammingError(b'unexpected type: %s' % typ)
237 237 raise error.ProgrammingError(b'unterminated scanning of template')
238 238
239 239
240 240 def scantemplate(tmpl, raw=False):
241 241 r"""Scan (type, start, end) positions of outermost elements in template
242 242
243 243 If raw=True, a backslash is not taken as an escape character just like
244 244 r'' string in Python. Note that this is different from r'' literal in
245 245 template in that no template fragment can appear in r'', e.g. r'{foo}'
246 246 is a literal '{foo}', but ('{foo}', raw=True) is a template expression
247 247 'foo'.
248 248
249 249 >>> list(scantemplate(b'foo{bar}"baz'))
250 250 [('string', 0, 3), ('template', 3, 8), ('string', 8, 12)]
251 251 >>> list(scantemplate(b'outer{"inner"}outer'))
252 252 [('string', 0, 5), ('template', 5, 14), ('string', 14, 19)]
253 253 >>> list(scantemplate(b'foo\\{escaped}'))
254 254 [('string', 0, 5), ('string', 5, 13)]
255 255 >>> list(scantemplate(b'foo\\{escaped}', raw=True))
256 256 [('string', 0, 4), ('template', 4, 13)]
257 257 """
258 258 last = None
259 259 for typ, val, pos in _scantemplate(tmpl, 0, len(tmpl), raw=raw):
260 260 if last:
261 261 yield last + (pos,)
262 262 if typ == b'end':
263 263 return
264 264 else:
265 265 last = (typ, pos)
266 266 raise error.ProgrammingError(b'unterminated scanning of template')
267 267
268 268
269 269 def _scantemplate(tmpl, start, stop, quote=b'', raw=False):
270 270 """Parse template string into chunks of strings and template expressions"""
271 271 sepchars = b'{' + quote
272 272 unescape = [parser.unescapestr, pycompat.identity][raw]
273 273 pos = start
274 274 p = parser.parser(elements)
275 275 try:
276 276 while pos < stop:
277 277 n = min(
278 278 (tmpl.find(c, pos, stop) for c in pycompat.bytestr(sepchars)),
279 279 key=lambda n: (n < 0, n),
280 280 )
281 281 if n < 0:
282 282 yield (b'string', unescape(tmpl[pos:stop]), pos)
283 283 pos = stop
284 284 break
285 285 c = tmpl[n : n + 1]
286 286 bs = 0 # count leading backslashes
287 287 if not raw:
288 288 bs = (n - pos) - len(tmpl[pos:n].rstrip(b'\\'))
289 289 if bs % 2 == 1:
290 290 # escaped (e.g. '\{', '\\\{', but not '\\{')
291 291 yield (b'string', unescape(tmpl[pos : n - 1]) + c, pos)
292 292 pos = n + 1
293 293 continue
294 294 if n > pos:
295 295 yield (b'string', unescape(tmpl[pos:n]), pos)
296 296 if c == quote:
297 297 yield (b'end', None, n + 1)
298 298 return
299 299
300 300 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, b'}'))
301 301 if not tmpl.startswith(b'}', pos):
302 302 raise error.ParseError(_(b"invalid token"), pos)
303 303 yield (b'template', parseres, n)
304 304 pos += 1
305 305
306 306 if quote:
307 307 raise error.ParseError(_(b"unterminated string"), start)
308 308 except error.ParseError as inst:
309 309 _addparseerrorhint(inst, tmpl)
310 310 raise
311 311 yield (b'end', None, pos)
312 312
313 313
314 314 def _addparseerrorhint(inst, tmpl):
315 315 if len(inst.args) <= 1:
316 316 return # no location
317 317 loc = inst.args[1]
318 318 # Offset the caret location by the number of newlines before the
319 319 # location of the error, since we will replace one-char newlines
320 320 # with the two-char literal r'\n'.
321 321 offset = tmpl[:loc].count(b'\n')
322 322 tmpl = tmpl.replace(b'\n', br'\n')
323 323 # We want the caret to point to the place in the template that
324 324 # failed to parse, but in a hint we get a open paren at the
325 325 # start. Therefore, we print "loc + 1" spaces (instead of "loc")
326 326 # to line up the caret with the location of the error.
327 327 inst.hint = tmpl + b'\n' + b' ' * (loc + 1 + offset) + b'^ ' + _(b'here')
328 328
329 329
330 330 def _unnesttemplatelist(tree):
331 331 """Expand list of templates to node tuple
332 332
333 333 >>> def f(tree):
334 334 ... print(pycompat.sysstr(prettyformat(_unnesttemplatelist(tree))))
335 335 >>> f((b'template', []))
336 336 (string '')
337 337 >>> f((b'template', [(b'string', b'foo')]))
338 338 (string 'foo')
339 339 >>> f((b'template', [(b'string', b'foo'), (b'symbol', b'rev')]))
340 340 (template
341 341 (string 'foo')
342 342 (symbol 'rev'))
343 343 >>> f((b'template', [(b'symbol', b'rev')])) # template(rev) -> str
344 344 (template
345 345 (symbol 'rev'))
346 346 >>> f((b'template', [(b'template', [(b'string', b'foo')])]))
347 347 (string 'foo')
348 348 """
349 349 if not isinstance(tree, tuple):
350 350 return tree
351 351 op = tree[0]
352 352 if op != b'template':
353 353 return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:])
354 354
355 355 assert len(tree) == 2
356 356 xs = tuple(_unnesttemplatelist(x) for x in tree[1])
357 357 if not xs:
358 358 return (b'string', b'') # empty template ""
359 359 elif len(xs) == 1 and xs[0][0] == b'string':
360 360 return xs[0] # fast path for string with no template fragment "x"
361 361 else:
362 362 return (op,) + xs
363 363
364 364
365 365 def parse(tmpl):
366 366 """Parse template string into tree"""
367 367 parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
368 368 assert pos == len(tmpl), b'unquoted template should be consumed'
369 369 return _unnesttemplatelist((b'template', parsed))
370 370
371 371
372 372 def parseexpr(expr):
373 373 """Parse a template expression into tree
374 374
375 375 >>> parseexpr(b'"foo"')
376 376 ('string', 'foo')
377 377 >>> parseexpr(b'foo(bar)')
378 378 ('func', ('symbol', 'foo'), ('symbol', 'bar'))
379 379 >>> parseexpr(b'foo(')
380 380 Traceback (most recent call last):
381 381 ...
382 382 ParseError: ('not a prefix: end', 4)
383 383 >>> parseexpr(b'"foo" "bar"')
384 384 Traceback (most recent call last):
385 385 ...
386 386 ParseError: ('invalid token', 7)
387 387 """
388 388 try:
389 389 return _parseexpr(expr)
390 390 except error.ParseError as inst:
391 391 _addparseerrorhint(inst, expr)
392 392 raise
393 393
394 394
395 395 def _parseexpr(expr):
396 396 p = parser.parser(elements)
397 397 tree, pos = p.parse(tokenize(expr, 0, len(expr)))
398 398 if pos != len(expr):
399 399 raise error.ParseError(_(b'invalid token'), pos)
400 400 return _unnesttemplatelist(tree)
401 401
402 402
403 403 def prettyformat(tree):
404 404 return parser.prettyformat(tree, (b'integer', b'string', b'symbol'))
405 405
406 406
407 407 def compileexp(exp, context, curmethods):
408 408 """Compile parsed template tree to (func, data) pair"""
409 409 if not exp:
410 410 raise error.ParseError(_(b"missing argument"))
411 411 t = exp[0]
412 412 return curmethods[t](exp, context)
413 413
414 414
415 415 # template evaluation
416 416
417 417
418 418 def getsymbol(exp):
419 419 if exp[0] == b'symbol':
420 420 return exp[1]
421 421 raise error.ParseError(_(b"expected a symbol, got '%s'") % exp[0])
422 422
423 423
424 424 def getlist(x):
425 425 if not x:
426 426 return []
427 427 if x[0] == b'list':
428 428 return getlist(x[1]) + [x[2]]
429 429 return [x]
430 430
431 431
432 432 def gettemplate(exp, context):
433 433 """Compile given template tree or load named template from map file;
434 434 returns (func, data) pair"""
435 435 if exp[0] in (b'template', b'string'):
436 436 return compileexp(exp, context, methods)
437 437 if exp[0] == b'symbol':
438 438 # unlike runsymbol(), here 'symbol' is always taken as template name
439 439 # even if it exists in mapping. this allows us to override mapping
440 440 # by web templates, e.g. 'changelogtag' is redefined in map file.
441 441 return context._load(exp[1])
442 442 raise error.ParseError(_(b"expected template specifier"))
443 443
444 444
445 445 def _runrecursivesymbol(context, mapping, key):
446 446 raise error.Abort(_(b"recursive reference '%s' in template") % key)
447 447
448 448
449 449 def buildtemplate(exp, context):
450 450 ctmpl = [compileexp(e, context, methods) for e in exp[1:]]
451 451 return (templateutil.runtemplate, ctmpl)
452 452
453 453
454 454 def buildfilter(exp, context):
455 455 n = getsymbol(exp[2])
456 456 if n in context._filters:
457 457 filt = context._filters[n]
458 458 arg = compileexp(exp[1], context, methods)
459 459 return (templateutil.runfilter, (arg, filt))
460 460 if n in context._funcs:
461 461 f = context._funcs[n]
462 462 args = _buildfuncargs(exp[1], context, methods, n, f._argspec)
463 463 return (f, args)
464 464 raise error.ParseError(_(b"unknown function '%s'") % n)
465 465
466 466
467 467 def buildmap(exp, context):
468 468 darg = compileexp(exp[1], context, methods)
469 469 targ = gettemplate(exp[2], context)
470 470 return (templateutil.runmap, (darg, targ))
471 471
472 472
473 473 def buildmember(exp, context):
474 474 darg = compileexp(exp[1], context, methods)
475 475 memb = getsymbol(exp[2])
476 476 return (templateutil.runmember, (darg, memb))
477 477
478 478
479 479 def buildnegate(exp, context):
480 480 arg = compileexp(exp[1], context, exprmethods)
481 481 return (templateutil.runnegate, arg)
482 482
483 483
484 484 def buildarithmetic(exp, context, func):
485 485 left = compileexp(exp[1], context, exprmethods)
486 486 right = compileexp(exp[2], context, exprmethods)
487 487 return (templateutil.runarithmetic, (func, left, right))
488 488
489 489
490 490 def buildfunc(exp, context):
491 491 n = getsymbol(exp[1])
492 492 if n in context._funcs:
493 493 f = context._funcs[n]
494 494 args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec)
495 495 return (f, args)
496 496 if n in context._filters:
497 497 args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None)
498 498 if len(args) != 1:
499 499 raise error.ParseError(_(b"filter %s expects one argument") % n)
500 500 f = context._filters[n]
501 501 return (templateutil.runfilter, (args[0], f))
502 502 raise error.ParseError(_(b"unknown function '%s'") % n)
503 503
504 504
505 505 def _buildfuncargs(exp, context, curmethods, funcname, argspec):
506 506 """Compile parsed tree of function arguments into list or dict of
507 507 (func, data) pairs
508 508
509 509 >>> context = engine(lambda t: (templateutil.runsymbol, t))
510 510 >>> def fargs(expr, argspec):
511 511 ... x = _parseexpr(expr)
512 512 ... n = getsymbol(x[1])
513 513 ... return _buildfuncargs(x[2], context, exprmethods, n, argspec)
514 514 >>> list(fargs(b'a(l=1, k=2)', b'k l m').keys())
515 515 ['l', 'k']
516 516 >>> args = fargs(b'a(opts=1, k=2)', b'**opts')
517 517 >>> list(args.keys()), list(args[b'opts'].keys())
518 518 (['opts'], ['opts', 'k'])
519 519 """
520 520
521 521 def compiledict(xs):
522 522 return util.sortdict(
523 523 (k, compileexp(x, context, curmethods))
524 524 for k, x in pycompat.iteritems(xs)
525 525 )
526 526
527 527 def compilelist(xs):
528 528 return [compileexp(x, context, curmethods) for x in xs]
529 529
530 530 if not argspec:
531 531 # filter or function with no argspec: return list of positional args
532 532 return compilelist(getlist(exp))
533 533
534 534 # function with argspec: return dict of named args
535 535 _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec)
536 536 treeargs = parser.buildargsdict(
537 537 getlist(exp),
538 538 funcname,
539 539 argspec,
540 540 keyvaluenode=b'keyvalue',
541 541 keynode=b'symbol',
542 542 )
543 543 compargs = util.sortdict()
544 544 if varkey:
545 545 compargs[varkey] = compilelist(treeargs.pop(varkey))
546 546 if optkey:
547 547 compargs[optkey] = compiledict(treeargs.pop(optkey))
548 548 compargs.update(compiledict(treeargs))
549 549 return compargs
550 550
551 551
552 552 def buildkeyvaluepair(exp, content):
553 553 raise error.ParseError(_(b"can't use a key-value pair in this context"))
554 554
555 555
556 556 def buildlist(exp, context):
557 557 raise error.ParseError(
558 558 _(b"can't use a list in this context"),
559 559 hint=_(b'check place of comma and parens'),
560 560 )
561 561
562 562
563 563 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
564 564 exprmethods = {
565 565 b"integer": lambda e, c: (templateutil.runinteger, e[1]),
566 566 b"string": lambda e, c: (templateutil.runstring, e[1]),
567 567 b"symbol": lambda e, c: (templateutil.runsymbol, e[1]),
568 568 b"template": buildtemplate,
569 569 b"group": lambda e, c: compileexp(e[1], c, exprmethods),
570 570 b".": buildmember,
571 571 b"|": buildfilter,
572 572 b"%": buildmap,
573 573 b"func": buildfunc,
574 574 b"keyvalue": buildkeyvaluepair,
575 575 b"list": buildlist,
576 576 b"+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b),
577 577 b"-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b),
578 578 b"negate": buildnegate,
579 579 b"*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
580 580 b"/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
581 581 }
582 582
583 583 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
584 584 methods = exprmethods.copy()
585 585 methods[b"integer"] = exprmethods[b"symbol"] # '{1}' as variable
586 586
587 587
588 588 class _aliasrules(parser.basealiasrules):
589 589 """Parsing and expansion rule set of template aliases"""
590 590
591 591 _section = _(b'template alias')
592 592 _parse = staticmethod(_parseexpr)
593 593
594 594 @staticmethod
595 595 def _trygetfunc(tree):
596 596 """Return (name, args) if tree is func(...) or ...|filter; otherwise
597 597 None"""
598 598 if tree[0] == b'func' and tree[1][0] == b'symbol':
599 599 return tree[1][1], getlist(tree[2])
600 600 if tree[0] == b'|' and tree[2][0] == b'symbol':
601 601 return tree[2][1], [tree[1]]
602 602
603 603
604 604 def expandaliases(tree, aliases):
605 605 """Return new tree of aliases are expanded"""
606 606 aliasmap = _aliasrules.buildmap(aliases)
607 607 return _aliasrules.expand(aliasmap, tree)
608 608
609 609
610 610 # template engine
611 611
612 612
613 613 def unquotestring(s):
614 614 '''unwrap quotes if any; otherwise returns unmodified string'''
615 615 if len(s) < 2 or s[0] not in b"'\"" or s[0] != s[-1]:
616 616 return s
617 617 return s[1:-1]
618 618
619 619
620 620 class resourcemapper(object): # pytype: disable=ignored-metaclass
621 621 """Mapper of internal template resources"""
622 622
623 623 __metaclass__ = abc.ABCMeta
624 624
625 625 @abc.abstractmethod
626 626 def availablekeys(self, mapping):
627 627 """Return a set of available resource keys based on the given mapping"""
628 628
629 629 @abc.abstractmethod
630 630 def knownkeys(self):
631 631 """Return a set of supported resource keys"""
632 632
633 633 @abc.abstractmethod
634 634 def lookup(self, mapping, key):
635 635 """Return a resource for the key if available; otherwise None"""
636 636
637 637 @abc.abstractmethod
638 638 def populatemap(self, context, origmapping, newmapping):
639 639 """Return a dict of additional mapping items which should be paired
640 640 with the given new mapping"""
641 641
642 642
643 643 class nullresourcemapper(resourcemapper):
644 644 def availablekeys(self, mapping):
645 645 return set()
646 646
647 647 def knownkeys(self):
648 648 return set()
649 649
650 650 def lookup(self, mapping, key):
651 651 return None
652 652
653 653 def populatemap(self, context, origmapping, newmapping):
654 654 return {}
655 655
656 656
657 657 class engine(object):
658 658 '''template expansion engine.
659 659
660 660 template expansion works like this. a map file contains key=value
661 661 pairs. if value is quoted, it is treated as string. otherwise, it
662 662 is treated as name of template file.
663 663
664 664 templater is asked to expand a key in map. it looks up key, and
665 665 looks for strings like this: {foo}. it expands {foo} by looking up
666 666 foo in map, and substituting it. expansion is recursive: it stops
667 667 when there is no more {foo} to replace.
668 668
669 669 expansion also allows formatting and filtering.
670 670
671 671 format uses key to expand each item in list. syntax is
672 672 {key%format}.
673 673
674 674 filter uses function to transform value. syntax is
675 675 {key|filter1|filter2|...}.'''
676 676
677 677 def __init__(self, loader, filters=None, defaults=None, resources=None):
678 678 self._loader = loader
679 679 if filters is None:
680 680 filters = {}
681 681 self._filters = filters
682 682 self._funcs = templatefuncs.funcs # make this a parameter if needed
683 683 if defaults is None:
684 684 defaults = {}
685 685 if resources is None:
686 686 resources = nullresourcemapper()
687 687 self._defaults = defaults
688 688 self._resources = resources
689 689 self._cache = {} # key: (func, data)
690 690 self._tmplcache = {} # literal template: (func, data)
691 691
692 692 def overlaymap(self, origmapping, newmapping):
693 693 """Create combined mapping from the original mapping and partial
694 694 mapping to override the original"""
695 695 # do not copy symbols which overrides the defaults depending on
696 696 # new resources, so the defaults will be re-evaluated (issue5612)
697 697 knownres = self._resources.knownkeys()
698 698 newres = self._resources.availablekeys(newmapping)
699 699 mapping = {
700 700 k: v
701 701 for k, v in pycompat.iteritems(origmapping)
702 702 if (
703 703 k in knownres # not a symbol per self.symbol()
704 704 or newres.isdisjoint(self._defaultrequires(k))
705 705 )
706 706 }
707 707 mapping.update(newmapping)
708 708 mapping.update(
709 709 self._resources.populatemap(self, origmapping, newmapping)
710 710 )
711 711 return mapping
712 712
713 713 def _defaultrequires(self, key):
714 714 """Resource keys required by the specified default symbol function"""
715 715 v = self._defaults.get(key)
716 716 if v is None or not callable(v):
717 717 return ()
718 718 return getattr(v, '_requires', ())
719 719
720 720 def symbol(self, mapping, key):
721 721 """Resolve symbol to value or function; None if nothing found"""
722 722 v = None
723 723 if key not in self._resources.knownkeys():
724 724 v = mapping.get(key)
725 725 if v is None:
726 726 v = self._defaults.get(key)
727 727 return v
728 728
729 729 def availableresourcekeys(self, mapping):
730 730 """Return a set of available resource keys based on the given mapping"""
731 731 return self._resources.availablekeys(mapping)
732 732
733 733 def knownresourcekeys(self):
734 734 """Return a set of supported resource keys"""
735 735 return self._resources.knownkeys()
736 736
737 737 def resource(self, mapping, key):
738 738 """Return internal data (e.g. cache) used for keyword/function
739 739 evaluation"""
740 740 v = self._resources.lookup(mapping, key)
741 741 if v is None:
742 742 raise templateutil.ResourceUnavailable(
743 743 _(b'template resource not available: %s') % key
744 744 )
745 745 return v
746 746
747 747 def _load(self, t):
748 748 '''load, parse, and cache a template'''
749 749 if t not in self._cache:
750 750 x = self._loader(t)
751 751 # put poison to cut recursion while compiling 't'
752 752 self._cache[t] = (_runrecursivesymbol, t)
753 753 try:
754 754 self._cache[t] = compileexp(x, self, methods)
755 755 except: # re-raises
756 756 del self._cache[t]
757 757 raise
758 758 return self._cache[t]
759 759
760 760 def _parse(self, tmpl):
761 761 """Parse and cache a literal template"""
762 762 if tmpl not in self._tmplcache:
763 763 x = parse(tmpl)
764 764 self._tmplcache[tmpl] = compileexp(x, self, methods)
765 765 return self._tmplcache[tmpl]
766 766
767 767 def preload(self, t):
768 768 """Load, parse, and cache the specified template if available"""
769 769 try:
770 770 self._load(t)
771 771 return True
772 772 except templateutil.TemplateNotFound:
773 773 return False
774 774
775 775 def process(self, t, mapping):
776 776 '''Perform expansion. t is name of map element to expand.
777 777 mapping contains added elements for use during expansion. Is a
778 778 generator.'''
779 779 func, data = self._load(t)
780 780 return self._expand(func, data, mapping)
781 781
782 782 def expand(self, tmpl, mapping):
783 783 """Perform expansion over a literal template
784 784
785 785 No user aliases will be expanded since this is supposed to be called
786 786 with an internal template string.
787 787 """
788 788 func, data = self._parse(tmpl)
789 789 return self._expand(func, data, mapping)
790 790
791 791 def _expand(self, func, data, mapping):
792 792 # populate additional items only if they don't exist in the given
793 793 # mapping. this is slightly different from overlaymap() because the
794 794 # initial 'revcache' may contain pre-computed items.
795 795 extramapping = self._resources.populatemap(self, {}, mapping)
796 796 if extramapping:
797 797 extramapping.update(mapping)
798 798 mapping = extramapping
799 799 return templateutil.flatten(self, mapping, func(self, mapping, data))
800 800
801 801
802 802 def stylelist():
803 803 path = templatedir()
804 804 if not path:
805 805 return _(b'no templates found, try `hg debuginstall` for more info')
806 806 dirlist = os.listdir(path)
807 807 stylelist = []
808 808 for file in dirlist:
809 809 split = file.split(b".")
810 810 if split[-1] in (b'orig', b'rej'):
811 811 continue
812 812 if split[0] == b"map-cmdline":
813 813 stylelist.append(split[1])
814 814 return b", ".join(sorted(stylelist))
815 815
816 816
817 817 def _open_mapfile(mapfile):
818 818 if os.path.exists(mapfile):
819 819 return util.posixfile(mapfile, b'rb')
820 820 raise error.Abort(
821 821 _(b"style '%s' not found") % mapfile,
822 822 hint=_(b"available styles: %s") % stylelist(),
823 823 )
824 824
825 825
826 826 def _readmapfile(fp, mapfile):
827 827 """Load template elements from the given map file"""
828 828 base = os.path.dirname(mapfile)
829 829 conf = config.config()
830 830
831 831 def include(rel, remap, sections):
832 832 subresource = None
833 833 if base:
834 834 abs = os.path.normpath(os.path.join(base, rel))
835 835 if os.path.isfile(abs):
836 836 subresource = util.posixfile(abs, b'rb')
837 837 if not subresource:
838 838 if pycompat.ossep not in rel:
839 839 abs = rel
840 840 subresource = resourceutil.open_resource(
841 841 b'mercurial.templates', rel
842 842 )
843 843 else:
844 844 dir = templatedir()
845 845 if dir:
846 846 abs = os.path.normpath(os.path.join(dir, rel))
847 847 if os.path.isfile(abs):
848 848 subresource = util.posixfile(abs, b'rb')
849 849 if subresource:
850 850 data = subresource.read()
851 851 conf.parse(
852 852 abs, data, sections=sections, remap=remap, include=include,
853 853 )
854 854
855 855 data = fp.read()
856 856 conf.parse(mapfile, data, remap={b'': b'templates'}, include=include)
857 857
858 858 cache = {}
859 859 tmap = {}
860 860 aliases = []
861 861
862 862 val = conf.get(b'templates', b'__base__')
863 863 if val and val[0] not in b"'\"":
864 864 # treat as a pointer to a base class for this style
865 865 path = os.path.normpath(os.path.join(base, val))
866 866
867 867 # fallback check in template paths
868 868 if not os.path.exists(path):
869 869 dir = templatedir()
870 870 if dir is not None:
871 871 p2 = os.path.normpath(os.path.join(dir, val))
872 872 if os.path.isfile(p2):
873 873 path = p2
874 874 else:
875 875 p3 = os.path.normpath(os.path.join(p2, b"map"))
876 876 if os.path.isfile(p3):
877 877 path = p3
878 878
879 879 fp = _open_mapfile(path)
880 880 cache, tmap, aliases = _readmapfile(fp, path)
881 881
882 882 for key, val in conf[b'templates'].items():
883 883 if not val:
884 884 raise error.ParseError(
885 885 _(b'missing value'), conf.source(b'templates', key)
886 886 )
887 887 if val[0] in b"'\"":
888 888 if val[0] != val[-1]:
889 889 raise error.ParseError(
890 890 _(b'unmatched quotes'), conf.source(b'templates', key)
891 891 )
892 892 cache[key] = unquotestring(val)
893 893 elif key != b'__base__':
894 894 tmap[key] = os.path.join(base, val)
895 895 aliases.extend(conf[b'templatealias'].items())
896 896 return cache, tmap, aliases
897 897
898 898
899 899 class loader(object):
900 900 """Load template fragments optionally from a map file"""
901 901
902 902 def __init__(self, cache, aliases):
903 903 if cache is None:
904 904 cache = {}
905 905 self.cache = cache.copy()
906 906 self._map = {}
907 907 self._aliasmap = _aliasrules.buildmap(aliases)
908 908
909 909 def __contains__(self, key):
910 910 return key in self.cache or key in self._map
911 911
912 912 def load(self, t):
913 913 """Get parsed tree for the given template name. Use a local cache."""
914 914 if t not in self.cache:
915 915 try:
916 916 self.cache[t] = util.readfile(self._map[t])
917 917 except KeyError as inst:
918 918 raise templateutil.TemplateNotFound(
919 919 _(b'"%s" not in template map') % inst.args[0]
920 920 )
921 921 except IOError as inst:
922 922 reason = _(b'template file %s: %s') % (
923 923 self._map[t],
924 924 stringutil.forcebytestr(inst.args[1]),
925 925 )
926 926 raise IOError(inst.args[0], encoding.strfromlocal(reason))
927 927 return self._parse(self.cache[t])
928 928
929 929 def _parse(self, tmpl):
930 930 x = parse(tmpl)
931 931 if self._aliasmap:
932 932 x = _aliasrules.expand(self._aliasmap, x)
933 933 return x
934 934
935 935 def _findsymbolsused(self, tree, syms):
936 936 if not tree:
937 937 return
938 938 op = tree[0]
939 939 if op == b'symbol':
940 940 s = tree[1]
941 941 if s in syms[0]:
942 942 return # avoid recursion: s -> cache[s] -> s
943 943 syms[0].add(s)
944 944 if s in self.cache or s in self._map:
945 945 # s may be a reference for named template
946 946 self._findsymbolsused(self.load(s), syms)
947 947 return
948 948 if op in {b'integer', b'string'}:
949 949 return
950 950 # '{arg|func}' == '{func(arg)}'
951 951 if op == b'|':
952 952 syms[1].add(getsymbol(tree[2]))
953 953 self._findsymbolsused(tree[1], syms)
954 954 return
955 955 if op == b'func':
956 956 syms[1].add(getsymbol(tree[1]))
957 957 self._findsymbolsused(tree[2], syms)
958 958 return
959 959 for x in tree[1:]:
960 960 self._findsymbolsused(x, syms)
961 961
962 962 def symbolsused(self, t):
963 963 """Look up (keywords, filters/functions) referenced from the name
964 964 template 't'
965 965
966 966 This may load additional templates from the map file.
967 967 """
968 968 syms = (set(), set())
969 969 self._findsymbolsused(self.load(t), syms)
970 970 return syms
971 971
972 972
973 973 class templater(object):
974 974 def __init__(
975 975 self,
976 976 filters=None,
977 977 defaults=None,
978 978 resources=None,
979 979 cache=None,
980 980 aliases=(),
981 981 minchunk=1024,
982 982 maxchunk=65536,
983 983 ):
984 984 """Create template engine optionally with preloaded template fragments
985 985
986 986 - ``filters``: a dict of functions to transform a value into another.
987 987 - ``defaults``: a dict of symbol values/functions; may be overridden
988 988 by a ``mapping`` dict.
989 989 - ``resources``: a resourcemapper object to look up internal data
990 990 (e.g. cache), inaccessible from user template.
991 991 - ``cache``: a dict of preloaded template fragments.
992 992 - ``aliases``: a list of alias (name, replacement) pairs.
993 993
994 994 self.cache may be updated later to register additional template
995 995 fragments.
996 996 """
997 997 allfilters = templatefilters.filters.copy()
998 998 if filters:
999 999 allfilters.update(filters)
1000 1000 self._loader = loader(cache, aliases)
1001 1001 self._proc = engine(self._loader.load, allfilters, defaults, resources)
1002 1002 self._minchunk, self._maxchunk = minchunk, maxchunk
1003 1003
1004 1004 @classmethod
1005 1005 def frommapfile(
1006 1006 cls,
1007 1007 mapfile,
1008 1008 fp=None,
1009 1009 filters=None,
1010 1010 defaults=None,
1011 1011 resources=None,
1012 1012 cache=None,
1013 1013 minchunk=1024,
1014 1014 maxchunk=65536,
1015 1015 ):
1016 1016 """Create templater from the specified map file"""
1017 1017 t = cls(filters, defaults, resources, cache, [], minchunk, maxchunk)
1018 1018 if not fp:
1019 1019 fp = _open_mapfile(mapfile)
1020 1020 cache, tmap, aliases = _readmapfile(fp, mapfile)
1021 1021 t._loader.cache.update(cache)
1022 1022 t._loader._map = tmap
1023 1023 t._loader._aliasmap = _aliasrules.buildmap(aliases)
1024 1024 return t
1025 1025
1026 1026 def __contains__(self, key):
1027 1027 return key in self._loader
1028 1028
1029 1029 @property
1030 1030 def cache(self):
1031 1031 return self._loader.cache
1032 1032
1033 1033 # for highlight extension to insert one-time 'colorize' filter
1034 1034 @property
1035 1035 def _filters(self):
1036 1036 return self._proc._filters
1037 1037
1038 1038 @property
1039 1039 def defaults(self):
1040 1040 return self._proc._defaults
1041 1041
1042 1042 def load(self, t):
1043 1043 """Get parsed tree for the given template name. Use a local cache."""
1044 1044 return self._loader.load(t)
1045 1045
1046 1046 def symbolsuseddefault(self):
1047 1047 """Look up (keywords, filters/functions) referenced from the default
1048 1048 unnamed template
1049 1049
1050 1050 This may load additional templates from the map file.
1051 1051 """
1052 1052 return self.symbolsused(b'')
1053 1053
1054 1054 def symbolsused(self, t):
1055 1055 """Look up (keywords, filters/functions) referenced from the name
1056 1056 template 't'
1057 1057
1058 1058 This may load additional templates from the map file.
1059 1059 """
1060 1060 return self._loader.symbolsused(t)
1061 1061
1062 1062 def renderdefault(self, mapping):
1063 1063 """Render the default unnamed template and return result as string"""
1064 1064 return self.render(b'', mapping)
1065 1065
1066 1066 def render(self, t, mapping):
1067 1067 """Render the specified named template and return result as string"""
1068 1068 return b''.join(self.generate(t, mapping))
1069 1069
1070 1070 def generate(self, t, mapping):
1071 1071 """Return a generator that renders the specified named template and
1072 1072 yields chunks"""
1073 1073 stream = self._proc.process(t, mapping)
1074 1074 if self._minchunk:
1075 1075 stream = util.increasingchunks(
1076 1076 stream, min=self._minchunk, max=self._maxchunk
1077 1077 )
1078 1078 return stream
1079 1079
1080 1080
1081 1081 def templatedir():
1082 1082 '''return the directory used for template files, or None.'''
1083 1083 path = os.path.normpath(os.path.join(resourceutil.datapath, b'templates'))
1084 1084 return path if os.path.isdir(path) else None
1085 1085
1086 1086
1087 1087 def open_template(name, templatepath=None):
1088 1088 '''returns a file-like object for the given template, and its full path
1089 1089
1090 1090 If the name is a relative path and we're in a frozen binary, the template
1091 1091 will be read from the mercurial.templates package instead. The returned path
1092 1092 will then be the relative path.
1093 1093 '''
1094 1094 if templatepath is None:
1095 1095 templatepath = templatedir()
1096 1096 if templatepath is not None or os.path.isabs(name):
1097 1097 f = os.path.join(templatepath, name)
1098 try:
1099 return f, open(f, mode='rb')
1100 except EnvironmentError:
1101 return None, None
1098 return f, open(f, mode='rb')
1102 1099 else:
1103 1100 name_parts = pycompat.sysstr(name).split('/')
1104 1101 package_name = '.'.join(['mercurial', 'templates'] + name_parts[:-1])
1105 try:
1106 return (
1107 name,
1108 resourceutil.open_resource(package_name, name_parts[-1]),
1109 )
1110 except (ImportError, OSError):
1111 return None, None
1102 return (
1103 name,
1104 resourceutil.open_resource(package_name, name_parts[-1]),
1105 )
1106
1107
1108 def try_open_template(name, templatepath=None):
1109 try:
1110 return open_template(name, templatepath)
1111 except (EnvironmentError, ImportError):
1112 return None, None
General Comments 0
You need to be logged in to leave comments. Login now