##// END OF EJS Templates
manifestcache: actually honor --clear...
marmoute -
r42113:b74ef675 default
parent child Browse files
Show More
@@ -1,3426 +1,3427
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from . import (
36 36 bundle2,
37 37 changegroup,
38 38 cmdutil,
39 39 color,
40 40 context,
41 41 copies,
42 42 dagparser,
43 43 encoding,
44 44 error,
45 45 exchange,
46 46 extensions,
47 47 filemerge,
48 48 filesetlang,
49 49 formatter,
50 50 hg,
51 51 httppeer,
52 52 localrepo,
53 53 lock as lockmod,
54 54 logcmdutil,
55 55 merge as mergemod,
56 56 obsolete,
57 57 obsutil,
58 58 phases,
59 59 policy,
60 60 pvec,
61 61 pycompat,
62 62 registrar,
63 63 repair,
64 64 revlog,
65 65 revset,
66 66 revsetlang,
67 67 scmutil,
68 68 setdiscovery,
69 69 simplemerge,
70 70 sshpeer,
71 71 sslutil,
72 72 streamclone,
73 73 templater,
74 74 treediscovery,
75 75 upgrade,
76 76 url as urlmod,
77 77 util,
78 78 vfs as vfsmod,
79 79 wireprotoframing,
80 80 wireprotoserver,
81 81 wireprotov2peer,
82 82 )
83 83 from .utils import (
84 84 cborutil,
85 85 dateutil,
86 86 procutil,
87 87 stringutil,
88 88 )
89 89
90 90 from .revlogutils import (
91 91 deltas as deltautil
92 92 )
93 93
94 94 release = lockmod.release
95 95
96 96 command = registrar.command()
97 97
98 98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
99 99 def debugancestor(ui, repo, *args):
100 100 """find the ancestor revision of two revisions in a given index"""
101 101 if len(args) == 3:
102 102 index, rev1, rev2 = args
103 103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
104 104 lookup = r.lookup
105 105 elif len(args) == 2:
106 106 if not repo:
107 107 raise error.Abort(_('there is no Mercurial repository here '
108 108 '(.hg not found)'))
109 109 rev1, rev2 = args
110 110 r = repo.changelog
111 111 lookup = repo.lookup
112 112 else:
113 113 raise error.Abort(_('either two or three arguments required'))
114 114 a = r.ancestor(lookup(rev1), lookup(rev2))
115 115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
116 116
117 117 @command('debugapplystreamclonebundle', [], 'FILE')
118 118 def debugapplystreamclonebundle(ui, repo, fname):
119 119 """apply a stream clone bundle file"""
120 120 f = hg.openpath(ui, fname)
121 121 gen = exchange.readbundle(ui, f, fname)
122 122 gen.apply(repo)
123 123
124 124 @command('debugbuilddag',
125 125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
126 126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
127 127 ('n', 'new-file', None, _('add new file at each rev'))],
128 128 _('[OPTION]... [TEXT]'))
129 129 def debugbuilddag(ui, repo, text=None,
130 130 mergeable_file=False,
131 131 overwritten_file=False,
132 132 new_file=False):
133 133 """builds a repo with a given DAG from scratch in the current empty repo
134 134
135 135 The description of the DAG is read from stdin if not given on the
136 136 command line.
137 137
138 138 Elements:
139 139
140 140 - "+n" is a linear run of n nodes based on the current default parent
141 141 - "." is a single node based on the current default parent
142 142 - "$" resets the default parent to null (implied at the start);
143 143 otherwise the default parent is always the last node created
144 144 - "<p" sets the default parent to the backref p
145 145 - "*p" is a fork at parent p, which is a backref
146 146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
147 147 - "/p2" is a merge of the preceding node and p2
148 148 - ":tag" defines a local tag for the preceding node
149 149 - "@branch" sets the named branch for subsequent nodes
150 150 - "#...\\n" is a comment up to the end of the line
151 151
152 152 Whitespace between the above elements is ignored.
153 153
154 154 A backref is either
155 155
156 156 - a number n, which references the node curr-n, where curr is the current
157 157 node, or
158 158 - the name of a local tag you placed earlier using ":tag", or
159 159 - empty to denote the default parent.
160 160
161 161 All string valued-elements are either strictly alphanumeric, or must
162 162 be enclosed in double quotes ("..."), with "\\" as escape character.
163 163 """
164 164
165 165 if text is None:
166 166 ui.status(_("reading DAG from stdin\n"))
167 167 text = ui.fin.read()
168 168
169 169 cl = repo.changelog
170 170 if len(cl) > 0:
171 171 raise error.Abort(_('repository is not empty'))
172 172
173 173 # determine number of revs in DAG
174 174 total = 0
175 175 for type, data in dagparser.parsedag(text):
176 176 if type == 'n':
177 177 total += 1
178 178
179 179 if mergeable_file:
180 180 linesperrev = 2
181 181 # make a file with k lines per rev
182 182 initialmergedlines = ['%d' % i
183 183 for i in pycompat.xrange(0, total * linesperrev)]
184 184 initialmergedlines.append("")
185 185
186 186 tags = []
187 187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
188 188 total=total)
189 189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
190 190 at = -1
191 191 atbranch = 'default'
192 192 nodeids = []
193 193 id = 0
194 194 progress.update(id)
195 195 for type, data in dagparser.parsedag(text):
196 196 if type == 'n':
197 197 ui.note(('node %s\n' % pycompat.bytestr(data)))
198 198 id, ps = data
199 199
200 200 files = []
201 201 filecontent = {}
202 202
203 203 p2 = None
204 204 if mergeable_file:
205 205 fn = "mf"
206 206 p1 = repo[ps[0]]
207 207 if len(ps) > 1:
208 208 p2 = repo[ps[1]]
209 209 pa = p1.ancestor(p2)
210 210 base, local, other = [x[fn].data() for x in (pa, p1,
211 211 p2)]
212 212 m3 = simplemerge.Merge3Text(base, local, other)
213 213 ml = [l.strip() for l in m3.merge_lines()]
214 214 ml.append("")
215 215 elif at > 0:
216 216 ml = p1[fn].data().split("\n")
217 217 else:
218 218 ml = initialmergedlines
219 219 ml[id * linesperrev] += " r%i" % id
220 220 mergedtext = "\n".join(ml)
221 221 files.append(fn)
222 222 filecontent[fn] = mergedtext
223 223
224 224 if overwritten_file:
225 225 fn = "of"
226 226 files.append(fn)
227 227 filecontent[fn] = "r%i\n" % id
228 228
229 229 if new_file:
230 230 fn = "nf%i" % id
231 231 files.append(fn)
232 232 filecontent[fn] = "r%i\n" % id
233 233 if len(ps) > 1:
234 234 if not p2:
235 235 p2 = repo[ps[1]]
236 236 for fn in p2:
237 237 if fn.startswith("nf"):
238 238 files.append(fn)
239 239 filecontent[fn] = p2[fn].data()
240 240
241 241 def fctxfn(repo, cx, path):
242 242 if path in filecontent:
243 243 return context.memfilectx(repo, cx, path,
244 244 filecontent[path])
245 245 return None
246 246
247 247 if len(ps) == 0 or ps[0] < 0:
248 248 pars = [None, None]
249 249 elif len(ps) == 1:
250 250 pars = [nodeids[ps[0]], None]
251 251 else:
252 252 pars = [nodeids[p] for p in ps]
253 253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
254 254 date=(id, 0),
255 255 user="debugbuilddag",
256 256 extra={'branch': atbranch})
257 257 nodeid = repo.commitctx(cx)
258 258 nodeids.append(nodeid)
259 259 at = id
260 260 elif type == 'l':
261 261 id, name = data
262 262 ui.note(('tag %s\n' % name))
263 263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
264 264 elif type == 'a':
265 265 ui.note(('branch %s\n' % data))
266 266 atbranch = data
267 267 progress.update(id)
268 268
269 269 if tags:
270 270 repo.vfs.write("localtags", "".join(tags))
271 271
272 272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
273 273 indent_string = ' ' * indent
274 274 if all:
275 275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
276 276 % indent_string)
277 277
278 278 def showchunks(named):
279 279 ui.write("\n%s%s\n" % (indent_string, named))
280 280 for deltadata in gen.deltaiter():
281 281 node, p1, p2, cs, deltabase, delta, flags = deltadata
282 282 ui.write("%s%s %s %s %s %s %d\n" %
283 283 (indent_string, hex(node), hex(p1), hex(p2),
284 284 hex(cs), hex(deltabase), len(delta)))
285 285
286 286 chunkdata = gen.changelogheader()
287 287 showchunks("changelog")
288 288 chunkdata = gen.manifestheader()
289 289 showchunks("manifest")
290 290 for chunkdata in iter(gen.filelogheader, {}):
291 291 fname = chunkdata['filename']
292 292 showchunks(fname)
293 293 else:
294 294 if isinstance(gen, bundle2.unbundle20):
295 295 raise error.Abort(_('use debugbundle2 for this file'))
296 296 chunkdata = gen.changelogheader()
297 297 for deltadata in gen.deltaiter():
298 298 node, p1, p2, cs, deltabase, delta, flags = deltadata
299 299 ui.write("%s%s\n" % (indent_string, hex(node)))
300 300
301 301 def _debugobsmarkers(ui, part, indent=0, **opts):
302 302 """display version and markers contained in 'data'"""
303 303 opts = pycompat.byteskwargs(opts)
304 304 data = part.read()
305 305 indent_string = ' ' * indent
306 306 try:
307 307 version, markers = obsolete._readmarkers(data)
308 308 except error.UnknownVersion as exc:
309 309 msg = "%sunsupported version: %s (%d bytes)\n"
310 310 msg %= indent_string, exc.version, len(data)
311 311 ui.write(msg)
312 312 else:
313 313 msg = "%sversion: %d (%d bytes)\n"
314 314 msg %= indent_string, version, len(data)
315 315 ui.write(msg)
316 316 fm = ui.formatter('debugobsolete', opts)
317 317 for rawmarker in sorted(markers):
318 318 m = obsutil.marker(None, rawmarker)
319 319 fm.startitem()
320 320 fm.plain(indent_string)
321 321 cmdutil.showmarker(fm, m)
322 322 fm.end()
323 323
324 324 def _debugphaseheads(ui, data, indent=0):
325 325 """display version and markers contained in 'data'"""
326 326 indent_string = ' ' * indent
327 327 headsbyphase = phases.binarydecode(data)
328 328 for phase in phases.allphases:
329 329 for head in headsbyphase[phase]:
330 330 ui.write(indent_string)
331 331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
332 332
333 333 def _quasirepr(thing):
334 334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
335 335 return '{%s}' % (
336 336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
337 337 return pycompat.bytestr(repr(thing))
338 338
339 339 def _debugbundle2(ui, gen, all=None, **opts):
340 340 """lists the contents of a bundle2"""
341 341 if not isinstance(gen, bundle2.unbundle20):
342 342 raise error.Abort(_('not a bundle2 file'))
343 343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
344 344 parttypes = opts.get(r'part_type', [])
345 345 for part in gen.iterparts():
346 346 if parttypes and part.type not in parttypes:
347 347 continue
348 348 msg = '%s -- %s (mandatory: %r)\n'
349 349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
350 350 if part.type == 'changegroup':
351 351 version = part.params.get('version', '01')
352 352 cg = changegroup.getunbundler(version, part, 'UN')
353 353 if not ui.quiet:
354 354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
355 355 if part.type == 'obsmarkers':
356 356 if not ui.quiet:
357 357 _debugobsmarkers(ui, part, indent=4, **opts)
358 358 if part.type == 'phase-heads':
359 359 if not ui.quiet:
360 360 _debugphaseheads(ui, part, indent=4)
361 361
362 362 @command('debugbundle',
363 363 [('a', 'all', None, _('show all details')),
364 364 ('', 'part-type', [], _('show only the named part type')),
365 365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
366 366 _('FILE'),
367 367 norepo=True)
368 368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
369 369 """lists the contents of a bundle"""
370 370 with hg.openpath(ui, bundlepath) as f:
371 371 if spec:
372 372 spec = exchange.getbundlespec(ui, f)
373 373 ui.write('%s\n' % spec)
374 374 return
375 375
376 376 gen = exchange.readbundle(ui, f, bundlepath)
377 377 if isinstance(gen, bundle2.unbundle20):
378 378 return _debugbundle2(ui, gen, all=all, **opts)
379 379 _debugchangegroup(ui, gen, all=all, **opts)
380 380
381 381 @command('debugcapabilities',
382 382 [], _('PATH'),
383 383 norepo=True)
384 384 def debugcapabilities(ui, path, **opts):
385 385 """lists the capabilities of a remote peer"""
386 386 opts = pycompat.byteskwargs(opts)
387 387 peer = hg.peer(ui, opts, path)
388 388 caps = peer.capabilities()
389 389 ui.write(('Main capabilities:\n'))
390 390 for c in sorted(caps):
391 391 ui.write((' %s\n') % c)
392 392 b2caps = bundle2.bundle2caps(peer)
393 393 if b2caps:
394 394 ui.write(('Bundle2 capabilities:\n'))
395 395 for key, values in sorted(b2caps.iteritems()):
396 396 ui.write((' %s\n') % key)
397 397 for v in values:
398 398 ui.write((' %s\n') % v)
399 399
400 400 @command('debugcheckstate', [], '')
401 401 def debugcheckstate(ui, repo):
402 402 """validate the correctness of the current dirstate"""
403 403 parent1, parent2 = repo.dirstate.parents()
404 404 m1 = repo[parent1].manifest()
405 405 m2 = repo[parent2].manifest()
406 406 errors = 0
407 407 for f in repo.dirstate:
408 408 state = repo.dirstate[f]
409 409 if state in "nr" and f not in m1:
410 410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
411 411 errors += 1
412 412 if state in "a" and f in m1:
413 413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
414 414 errors += 1
415 415 if state in "m" and f not in m1 and f not in m2:
416 416 ui.warn(_("%s in state %s, but not in either manifest\n") %
417 417 (f, state))
418 418 errors += 1
419 419 for f in m1:
420 420 state = repo.dirstate[f]
421 421 if state not in "nrm":
422 422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
423 423 errors += 1
424 424 if errors:
425 425 error = _(".hg/dirstate inconsistent with current parent's manifest")
426 426 raise error.Abort(error)
427 427
428 428 @command('debugcolor',
429 429 [('', 'style', None, _('show all configured styles'))],
430 430 'hg debugcolor')
431 431 def debugcolor(ui, repo, **opts):
432 432 """show available color, effects or style"""
433 433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
434 434 if opts.get(r'style'):
435 435 return _debugdisplaystyle(ui)
436 436 else:
437 437 return _debugdisplaycolor(ui)
438 438
439 439 def _debugdisplaycolor(ui):
440 440 ui = ui.copy()
441 441 ui._styles.clear()
442 442 for effect in color._activeeffects(ui).keys():
443 443 ui._styles[effect] = effect
444 444 if ui._terminfoparams:
445 445 for k, v in ui.configitems('color'):
446 446 if k.startswith('color.'):
447 447 ui._styles[k] = k[6:]
448 448 elif k.startswith('terminfo.'):
449 449 ui._styles[k] = k[9:]
450 450 ui.write(_('available colors:\n'))
451 451 # sort label with a '_' after the other to group '_background' entry.
452 452 items = sorted(ui._styles.items(),
453 453 key=lambda i: ('_' in i[0], i[0], i[1]))
454 454 for colorname, label in items:
455 455 ui.write(('%s\n') % colorname, label=label)
456 456
457 457 def _debugdisplaystyle(ui):
458 458 ui.write(_('available style:\n'))
459 459 if not ui._styles:
460 460 return
461 461 width = max(len(s) for s in ui._styles)
462 462 for label, effects in sorted(ui._styles.items()):
463 463 ui.write('%s' % label, label=label)
464 464 if effects:
465 465 # 50
466 466 ui.write(': ')
467 467 ui.write(' ' * (max(0, width - len(label))))
468 468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 469 ui.write('\n')
470 470
471 471 @command('debugcreatestreamclonebundle', [], 'FILE')
472 472 def debugcreatestreamclonebundle(ui, repo, fname):
473 473 """create a stream clone bundle file
474 474
475 475 Stream bundles are special bundles that are essentially archives of
476 476 revlog files. They are commonly used for cloning very quickly.
477 477 """
478 478 # TODO we may want to turn this into an abort when this functionality
479 479 # is moved into `hg bundle`.
480 480 if phases.hassecret(repo):
481 481 ui.warn(_('(warning: stream clone bundle will contain secret '
482 482 'revisions)\n'))
483 483
484 484 requirements, gen = streamclone.generatebundlev1(repo)
485 485 changegroup.writechunks(ui, gen, fname)
486 486
487 487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488 488
489 489 @command('debugdag',
490 490 [('t', 'tags', None, _('use tags as labels')),
491 491 ('b', 'branches', None, _('annotate with branch names')),
492 492 ('', 'dots', None, _('use dots for runs')),
493 493 ('s', 'spaces', None, _('separate elements by spaces'))],
494 494 _('[OPTION]... [FILE [REV]...]'),
495 495 optionalrepo=True)
496 496 def debugdag(ui, repo, file_=None, *revs, **opts):
497 497 """format the changelog or an index DAG as a concise textual description
498 498
499 499 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 500 revision numbers, they get labeled in the output as rN.
501 501
502 502 Otherwise, the changelog DAG of the current repo is emitted.
503 503 """
504 504 spaces = opts.get(r'spaces')
505 505 dots = opts.get(r'dots')
506 506 if file_:
507 507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
508 508 file_)
509 509 revs = set((int(r) for r in revs))
510 510 def events():
511 511 for r in rlog:
512 512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 513 if p != -1))
514 514 if r in revs:
515 515 yield 'l', (r, "r%i" % r)
516 516 elif repo:
517 517 cl = repo.changelog
518 518 tags = opts.get(r'tags')
519 519 branches = opts.get(r'branches')
520 520 if tags:
521 521 labels = {}
522 522 for l, n in repo.tags().items():
523 523 labels.setdefault(cl.rev(n), []).append(l)
524 524 def events():
525 525 b = "default"
526 526 for r in cl:
527 527 if branches:
528 528 newb = cl.read(cl.node(r))[5]['branch']
529 529 if newb != b:
530 530 yield 'a', newb
531 531 b = newb
532 532 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 533 if p != -1))
534 534 if tags:
535 535 ls = labels.get(r)
536 536 if ls:
537 537 for l in ls:
538 538 yield 'l', (r, l)
539 539 else:
540 540 raise error.Abort(_('need repo for changelog dag'))
541 541
542 542 for line in dagparser.dagtextlines(events(),
543 543 addspaces=spaces,
544 544 wraplabels=True,
545 545 wrapannotations=True,
546 546 wrapnonlinear=dots,
547 547 usedots=dots,
548 548 maxlinewidth=70):
549 549 ui.write(line)
550 550 ui.write("\n")
551 551
552 552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 553 def debugdata(ui, repo, file_, rev=None, **opts):
554 554 """dump the contents of a data file revision"""
555 555 opts = pycompat.byteskwargs(opts)
556 556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 557 if rev is not None:
558 558 raise error.CommandError('debugdata', _('invalid arguments'))
559 559 file_, rev = None, file_
560 560 elif rev is None:
561 561 raise error.CommandError('debugdata', _('invalid arguments'))
562 562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
563 563 try:
564 564 ui.write(r.revision(r.lookup(rev), raw=True))
565 565 except KeyError:
566 566 raise error.Abort(_('invalid revision identifier %s') % rev)
567 567
568 568 @command('debugdate',
569 569 [('e', 'extended', None, _('try extended date formats'))],
570 570 _('[-e] DATE [RANGE]'),
571 571 norepo=True, optionalrepo=True)
572 572 def debugdate(ui, date, range=None, **opts):
573 573 """parse and display a date"""
574 574 if opts[r"extended"]:
575 575 d = dateutil.parsedate(date, util.extendeddateformats)
576 576 else:
577 577 d = dateutil.parsedate(date)
578 578 ui.write(("internal: %d %d\n") % d)
579 579 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 580 if range:
581 581 m = dateutil.matchdate(range)
582 582 ui.write(("match: %s\n") % m(d[0]))
583 583
584 584 @command('debugdeltachain',
585 585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 586 _('-c|-m|FILE'),
587 587 optionalrepo=True)
588 588 def debugdeltachain(ui, repo, file_=None, **opts):
589 589 """dump information about delta chains in a revlog
590 590
591 591 Output can be templatized. Available template keywords are:
592 592
593 593 :``rev``: revision number
594 594 :``chainid``: delta chain identifier (numbered by unique base)
595 595 :``chainlen``: delta chain length to this revision
596 596 :``prevrev``: previous revision in delta chain
597 597 :``deltatype``: role of delta / how it was computed
598 598 :``compsize``: compressed size of revision
599 599 :``uncompsize``: uncompressed size of revision
600 600 :``chainsize``: total size of compressed revisions in chain
601 601 :``chainratio``: total chain size divided by uncompressed revision size
602 602 (new delta chains typically start at ratio 2.00)
603 603 :``lindist``: linear distance from base revision in delta chain to end
604 604 of this revision
605 605 :``extradist``: total size of revisions not part of this delta chain from
606 606 base of delta chain to end of this revision; a measurement
607 607 of how much extra data we need to read/seek across to read
608 608 the delta chain for this revision
609 609 :``extraratio``: extradist divided by chainsize; another representation of
610 610 how much unrelated data is needed to load this delta chain
611 611
612 612 If the repository is configured to use the sparse read, additional keywords
613 613 are available:
614 614
615 615 :``readsize``: total size of data read from the disk for a revision
616 616 (sum of the sizes of all the blocks)
617 617 :``largestblock``: size of the largest block of data read from the disk
618 618 :``readdensity``: density of useful bytes in the data read from the disk
619 619 :``srchunks``: in how many data hunks the whole revision would be read
620 620
621 621 The sparse read can be enabled with experimental.sparse-read = True
622 622 """
623 623 opts = pycompat.byteskwargs(opts)
624 624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 625 index = r.index
626 626 start = r.start
627 627 length = r.length
628 628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
629 629 withsparseread = getattr(r, '_withsparseread', False)
630 630
631 631 def revinfo(rev):
632 632 e = index[rev]
633 633 compsize = e[1]
634 634 uncompsize = e[2]
635 635 chainsize = 0
636 636
637 637 if generaldelta:
638 638 if e[3] == e[5]:
639 639 deltatype = 'p1'
640 640 elif e[3] == e[6]:
641 641 deltatype = 'p2'
642 642 elif e[3] == rev - 1:
643 643 deltatype = 'prev'
644 644 elif e[3] == rev:
645 645 deltatype = 'base'
646 646 else:
647 647 deltatype = 'other'
648 648 else:
649 649 if e[3] == rev:
650 650 deltatype = 'base'
651 651 else:
652 652 deltatype = 'prev'
653 653
654 654 chain = r._deltachain(rev)[0]
655 655 for iterrev in chain:
656 656 e = index[iterrev]
657 657 chainsize += e[1]
658 658
659 659 return compsize, uncompsize, deltatype, chain, chainsize
660 660
661 661 fm = ui.formatter('debugdeltachain', opts)
662 662
663 663 fm.plain(' rev chain# chainlen prev delta '
664 664 'size rawsize chainsize ratio lindist extradist '
665 665 'extraratio')
666 666 if withsparseread:
667 667 fm.plain(' readsize largestblk rddensity srchunks')
668 668 fm.plain('\n')
669 669
670 670 chainbases = {}
671 671 for rev in r:
672 672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
673 673 chainbase = chain[0]
674 674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
675 675 basestart = start(chainbase)
676 676 revstart = start(rev)
677 677 lineardist = revstart + comp - basestart
678 678 extradist = lineardist - chainsize
679 679 try:
680 680 prevrev = chain[-2]
681 681 except IndexError:
682 682 prevrev = -1
683 683
684 684 if uncomp != 0:
685 685 chainratio = float(chainsize) / float(uncomp)
686 686 else:
687 687 chainratio = chainsize
688 688
689 689 if chainsize != 0:
690 690 extraratio = float(extradist) / float(chainsize)
691 691 else:
692 692 extraratio = extradist
693 693
694 694 fm.startitem()
695 695 fm.write('rev chainid chainlen prevrev deltatype compsize '
696 696 'uncompsize chainsize chainratio lindist extradist '
697 697 'extraratio',
698 698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
699 699 rev, chainid, len(chain), prevrev, deltatype, comp,
700 700 uncomp, chainsize, chainratio, lineardist, extradist,
701 701 extraratio,
702 702 rev=rev, chainid=chainid, chainlen=len(chain),
703 703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
704 704 uncompsize=uncomp, chainsize=chainsize,
705 705 chainratio=chainratio, lindist=lineardist,
706 706 extradist=extradist, extraratio=extraratio)
707 707 if withsparseread:
708 708 readsize = 0
709 709 largestblock = 0
710 710 srchunks = 0
711 711
712 712 for revschunk in deltautil.slicechunk(r, chain):
713 713 srchunks += 1
714 714 blkend = start(revschunk[-1]) + length(revschunk[-1])
715 715 blksize = blkend - start(revschunk[0])
716 716
717 717 readsize += blksize
718 718 if largestblock < blksize:
719 719 largestblock = blksize
720 720
721 721 if readsize:
722 722 readdensity = float(chainsize) / float(readsize)
723 723 else:
724 724 readdensity = 1
725 725
726 726 fm.write('readsize largestblock readdensity srchunks',
727 727 ' %10d %10d %9.5f %8d',
728 728 readsize, largestblock, readdensity, srchunks,
729 729 readsize=readsize, largestblock=largestblock,
730 730 readdensity=readdensity, srchunks=srchunks)
731 731
732 732 fm.plain('\n')
733 733
734 734 fm.end()
735 735
736 736 @command('debugdirstate|debugstate',
737 737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
738 738 ('', 'dates', True, _('display the saved mtime')),
739 739 ('', 'datesort', None, _('sort by saved mtime'))],
740 740 _('[OPTION]...'))
741 741 def debugstate(ui, repo, **opts):
742 742 """show the contents of the current dirstate"""
743 743
744 744 nodates = not opts[r'dates']
745 745 if opts.get(r'nodates') is not None:
746 746 nodates = True
747 747 datesort = opts.get(r'datesort')
748 748
749 749 if datesort:
750 750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 751 else:
752 752 keyfunc = None # sort by filename
753 753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 754 if ent[3] == -1:
755 755 timestr = 'unset '
756 756 elif nodates:
757 757 timestr = 'set '
758 758 else:
759 759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 760 time.localtime(ent[3]))
761 761 timestr = encoding.strtolocal(timestr)
762 762 if ent[1] & 0o20000:
763 763 mode = 'lnk'
764 764 else:
765 765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 767 for f in repo.dirstate.copies():
768 768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769 769
770 770 @command('debugdiscovery',
771 771 [('', 'old', None, _('use old-style discovery')),
772 772 ('', 'nonheads', None,
773 773 _('use old-style discovery with non-heads included')),
774 774 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 775 ] + cmdutil.remoteopts,
776 776 _('[--rev REV] [OTHER]'))
777 777 def debugdiscovery(ui, repo, remoteurl="default", **opts):
778 778 """runs the changeset discovery protocol in isolation"""
779 779 opts = pycompat.byteskwargs(opts)
780 780 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
781 781 remote = hg.peer(repo, opts, remoteurl)
782 782 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
783 783
784 784 # make sure tests are repeatable
785 785 random.seed(12323)
786 786
787 787 def doit(pushedrevs, remoteheads, remote=remote):
788 788 if opts.get('old'):
789 789 if not util.safehasattr(remote, 'branches'):
790 790 # enable in-client legacy support
791 791 remote = localrepo.locallegacypeer(remote.local())
792 792 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
793 793 force=True)
794 794 common = set(common)
795 795 if not opts.get('nonheads'):
796 796 ui.write(("unpruned common: %s\n") %
797 797 " ".join(sorted(short(n) for n in common)))
798 798
799 799 clnode = repo.changelog.node
800 800 common = repo.revs('heads(::%ln)', common)
801 801 common = {clnode(r) for r in common}
802 802 else:
803 803 nodes = None
804 804 if pushedrevs:
805 805 revs = scmutil.revrange(repo, pushedrevs)
806 806 nodes = [repo[r].node() for r in revs]
807 807 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
808 808 ancestorsof=nodes)
809 809 common = set(common)
810 810 rheads = set(hds)
811 811 lheads = set(repo.heads())
812 812 ui.write(("common heads: %s\n") %
813 813 " ".join(sorted(short(n) for n in common)))
814 814 if lheads <= common:
815 815 ui.write(("local is subset\n"))
816 816 elif rheads <= common:
817 817 ui.write(("remote is subset\n"))
818 818
819 819 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
820 820 localrevs = opts['rev']
821 821 doit(localrevs, remoterevs)
822 822
823 823 _chunksize = 4 << 10
824 824
825 825 @command('debugdownload',
826 826 [
827 827 ('o', 'output', '', _('path')),
828 828 ],
829 829 optionalrepo=True)
830 830 def debugdownload(ui, repo, url, output=None, **opts):
831 831 """download a resource using Mercurial logic and config
832 832 """
833 833 fh = urlmod.open(ui, url, output)
834 834
835 835 dest = ui
836 836 if output:
837 837 dest = open(output, "wb", _chunksize)
838 838 try:
839 839 data = fh.read(_chunksize)
840 840 while data:
841 841 dest.write(data)
842 842 data = fh.read(_chunksize)
843 843 finally:
844 844 if output:
845 845 dest.close()
846 846
847 847 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
848 848 def debugextensions(ui, repo, **opts):
849 849 '''show information about active extensions'''
850 850 opts = pycompat.byteskwargs(opts)
851 851 exts = extensions.extensions(ui)
852 852 hgver = util.version()
853 853 fm = ui.formatter('debugextensions', opts)
854 854 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
855 855 isinternal = extensions.ismoduleinternal(extmod)
856 856 extsource = pycompat.fsencode(extmod.__file__)
857 857 if isinternal:
858 858 exttestedwith = [] # never expose magic string to users
859 859 else:
860 860 exttestedwith = getattr(extmod, 'testedwith', '').split()
861 861 extbuglink = getattr(extmod, 'buglink', None)
862 862
863 863 fm.startitem()
864 864
865 865 if ui.quiet or ui.verbose:
866 866 fm.write('name', '%s\n', extname)
867 867 else:
868 868 fm.write('name', '%s', extname)
869 869 if isinternal or hgver in exttestedwith:
870 870 fm.plain('\n')
871 871 elif not exttestedwith:
872 872 fm.plain(_(' (untested!)\n'))
873 873 else:
874 874 lasttestedversion = exttestedwith[-1]
875 875 fm.plain(' (%s!)\n' % lasttestedversion)
876 876
877 877 fm.condwrite(ui.verbose and extsource, 'source',
878 878 _(' location: %s\n'), extsource or "")
879 879
880 880 if ui.verbose:
881 881 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
882 882 fm.data(bundled=isinternal)
883 883
884 884 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
885 885 _(' tested with: %s\n'),
886 886 fm.formatlist(exttestedwith, name='ver'))
887 887
888 888 fm.condwrite(ui.verbose and extbuglink, 'buglink',
889 889 _(' bug reporting: %s\n'), extbuglink or "")
890 890
891 891 fm.end()
892 892
893 893 @command('debugfileset',
894 894 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
895 895 ('', 'all-files', False,
896 896 _('test files from all revisions and working directory')),
897 897 ('s', 'show-matcher', None,
898 898 _('print internal representation of matcher')),
899 899 ('p', 'show-stage', [],
900 900 _('print parsed tree at the given stage'), _('NAME'))],
901 901 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
902 902 def debugfileset(ui, repo, expr, **opts):
903 903 '''parse and apply a fileset specification'''
904 904 from . import fileset
905 905 fileset.symbols # force import of fileset so we have predicates to optimize
906 906 opts = pycompat.byteskwargs(opts)
907 907 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
908 908
909 909 stages = [
910 910 ('parsed', pycompat.identity),
911 911 ('analyzed', filesetlang.analyze),
912 912 ('optimized', filesetlang.optimize),
913 913 ]
914 914 stagenames = set(n for n, f in stages)
915 915
916 916 showalways = set()
917 917 if ui.verbose and not opts['show_stage']:
918 918 # show parsed tree by --verbose (deprecated)
919 919 showalways.add('parsed')
920 920 if opts['show_stage'] == ['all']:
921 921 showalways.update(stagenames)
922 922 else:
923 923 for n in opts['show_stage']:
924 924 if n not in stagenames:
925 925 raise error.Abort(_('invalid stage name: %s') % n)
926 926 showalways.update(opts['show_stage'])
927 927
928 928 tree = filesetlang.parse(expr)
929 929 for n, f in stages:
930 930 tree = f(tree)
931 931 if n in showalways:
932 932 if opts['show_stage'] or n != 'parsed':
933 933 ui.write(("* %s:\n") % n)
934 934 ui.write(filesetlang.prettyformat(tree), "\n")
935 935
936 936 files = set()
937 937 if opts['all_files']:
938 938 for r in repo:
939 939 c = repo[r]
940 940 files.update(c.files())
941 941 files.update(c.substate)
942 942 if opts['all_files'] or ctx.rev() is None:
943 943 wctx = repo[None]
944 944 files.update(repo.dirstate.walk(scmutil.matchall(repo),
945 945 subrepos=list(wctx.substate),
946 946 unknown=True, ignored=True))
947 947 files.update(wctx.substate)
948 948 else:
949 949 files.update(ctx.files())
950 950 files.update(ctx.substate)
951 951
952 952 m = ctx.matchfileset(expr)
953 953 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
954 954 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
955 955 for f in sorted(files):
956 956 if not m(f):
957 957 continue
958 958 ui.write("%s\n" % f)
959 959
960 960 @command('debugformat',
961 961 [] + cmdutil.formatteropts)
962 962 def debugformat(ui, repo, **opts):
963 963 """display format information about the current repository
964 964
965 965 Use --verbose to get extra information about current config value and
966 966 Mercurial default."""
967 967 opts = pycompat.byteskwargs(opts)
968 968 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
969 969 maxvariantlength = max(len('format-variant'), maxvariantlength)
970 970
971 971 def makeformatname(name):
972 972 return '%s:' + (' ' * (maxvariantlength - len(name)))
973 973
974 974 fm = ui.formatter('debugformat', opts)
975 975 if fm.isplain():
976 976 def formatvalue(value):
977 977 if util.safehasattr(value, 'startswith'):
978 978 return value
979 979 if value:
980 980 return 'yes'
981 981 else:
982 982 return 'no'
983 983 else:
984 984 formatvalue = pycompat.identity
985 985
986 986 fm.plain('format-variant')
987 987 fm.plain(' ' * (maxvariantlength - len('format-variant')))
988 988 fm.plain(' repo')
989 989 if ui.verbose:
990 990 fm.plain(' config default')
991 991 fm.plain('\n')
992 992 for fv in upgrade.allformatvariant:
993 993 fm.startitem()
994 994 repovalue = fv.fromrepo(repo)
995 995 configvalue = fv.fromconfig(repo)
996 996
997 997 if repovalue != configvalue:
998 998 namelabel = 'formatvariant.name.mismatchconfig'
999 999 repolabel = 'formatvariant.repo.mismatchconfig'
1000 1000 elif repovalue != fv.default:
1001 1001 namelabel = 'formatvariant.name.mismatchdefault'
1002 1002 repolabel = 'formatvariant.repo.mismatchdefault'
1003 1003 else:
1004 1004 namelabel = 'formatvariant.name.uptodate'
1005 1005 repolabel = 'formatvariant.repo.uptodate'
1006 1006
1007 1007 fm.write('name', makeformatname(fv.name), fv.name,
1008 1008 label=namelabel)
1009 1009 fm.write('repo', ' %3s', formatvalue(repovalue),
1010 1010 label=repolabel)
1011 1011 if fv.default != configvalue:
1012 1012 configlabel = 'formatvariant.config.special'
1013 1013 else:
1014 1014 configlabel = 'formatvariant.config.default'
1015 1015 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1016 1016 label=configlabel)
1017 1017 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1018 1018 label='formatvariant.default')
1019 1019 fm.plain('\n')
1020 1020 fm.end()
1021 1021
1022 1022 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1023 1023 def debugfsinfo(ui, path="."):
1024 1024 """show information detected about current filesystem"""
1025 1025 ui.write(('path: %s\n') % path)
1026 1026 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1027 1027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1028 1028 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1029 1029 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1030 1030 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1031 1031 casesensitive = '(unknown)'
1032 1032 try:
1033 1033 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1034 1034 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1035 1035 except OSError:
1036 1036 pass
1037 1037 ui.write(('case-sensitive: %s\n') % casesensitive)
1038 1038
1039 1039 @command('debuggetbundle',
1040 1040 [('H', 'head', [], _('id of head node'), _('ID')),
1041 1041 ('C', 'common', [], _('id of common node'), _('ID')),
1042 1042 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1043 1043 _('REPO FILE [-H|-C ID]...'),
1044 1044 norepo=True)
1045 1045 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1046 1046 """retrieves a bundle from a repo
1047 1047
1048 1048 Every ID must be a full-length hex node id string. Saves the bundle to the
1049 1049 given file.
1050 1050 """
1051 1051 opts = pycompat.byteskwargs(opts)
1052 1052 repo = hg.peer(ui, opts, repopath)
1053 1053 if not repo.capable('getbundle'):
1054 1054 raise error.Abort("getbundle() not supported by target repository")
1055 1055 args = {}
1056 1056 if common:
1057 1057 args[r'common'] = [bin(s) for s in common]
1058 1058 if head:
1059 1059 args[r'heads'] = [bin(s) for s in head]
1060 1060 # TODO: get desired bundlecaps from command line.
1061 1061 args[r'bundlecaps'] = None
1062 1062 bundle = repo.getbundle('debug', **args)
1063 1063
1064 1064 bundletype = opts.get('type', 'bzip2').lower()
1065 1065 btypes = {'none': 'HG10UN',
1066 1066 'bzip2': 'HG10BZ',
1067 1067 'gzip': 'HG10GZ',
1068 1068 'bundle2': 'HG20'}
1069 1069 bundletype = btypes.get(bundletype)
1070 1070 if bundletype not in bundle2.bundletypes:
1071 1071 raise error.Abort(_('unknown bundle type specified with --type'))
1072 1072 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1073 1073
1074 1074 @command('debugignore', [], '[FILE]')
1075 1075 def debugignore(ui, repo, *files, **opts):
1076 1076 """display the combined ignore pattern and information about ignored files
1077 1077
1078 1078 With no argument display the combined ignore pattern.
1079 1079
1080 1080 Given space separated file names, shows if the given file is ignored and
1081 1081 if so, show the ignore rule (file and line number) that matched it.
1082 1082 """
1083 1083 ignore = repo.dirstate._ignore
1084 1084 if not files:
1085 1085 # Show all the patterns
1086 1086 ui.write("%s\n" % pycompat.byterepr(ignore))
1087 1087 else:
1088 1088 m = scmutil.match(repo[None], pats=files)
1089 1089 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1090 1090 for f in m.files():
1091 1091 nf = util.normpath(f)
1092 1092 ignored = None
1093 1093 ignoredata = None
1094 1094 if nf != '.':
1095 1095 if ignore(nf):
1096 1096 ignored = nf
1097 1097 ignoredata = repo.dirstate._ignorefileandline(nf)
1098 1098 else:
1099 1099 for p in util.finddirs(nf):
1100 1100 if ignore(p):
1101 1101 ignored = p
1102 1102 ignoredata = repo.dirstate._ignorefileandline(p)
1103 1103 break
1104 1104 if ignored:
1105 1105 if ignored == nf:
1106 1106 ui.write(_("%s is ignored\n") % uipathfn(f))
1107 1107 else:
1108 1108 ui.write(_("%s is ignored because of "
1109 1109 "containing folder %s\n")
1110 1110 % (uipathfn(f), ignored))
1111 1111 ignorefile, lineno, line = ignoredata
1112 1112 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1113 1113 % (ignorefile, lineno, line))
1114 1114 else:
1115 1115 ui.write(_("%s is not ignored\n") % uipathfn(f))
1116 1116
1117 1117 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1118 1118 _('-c|-m|FILE'))
1119 1119 def debugindex(ui, repo, file_=None, **opts):
1120 1120 """dump index data for a storage primitive"""
1121 1121 opts = pycompat.byteskwargs(opts)
1122 1122 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1123 1123
1124 1124 if ui.debugflag:
1125 1125 shortfn = hex
1126 1126 else:
1127 1127 shortfn = short
1128 1128
1129 1129 idlen = 12
1130 1130 for i in store:
1131 1131 idlen = len(shortfn(store.node(i)))
1132 1132 break
1133 1133
1134 1134 fm = ui.formatter('debugindex', opts)
1135 1135 fm.plain(b' rev linkrev %s %s p2\n' % (
1136 1136 b'nodeid'.ljust(idlen),
1137 1137 b'p1'.ljust(idlen)))
1138 1138
1139 1139 for rev in store:
1140 1140 node = store.node(rev)
1141 1141 parents = store.parents(node)
1142 1142
1143 1143 fm.startitem()
1144 1144 fm.write(b'rev', b'%6d ', rev)
1145 1145 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1146 1146 fm.write(b'node', '%s ', shortfn(node))
1147 1147 fm.write(b'p1', '%s ', shortfn(parents[0]))
1148 1148 fm.write(b'p2', '%s', shortfn(parents[1]))
1149 1149 fm.plain(b'\n')
1150 1150
1151 1151 fm.end()
1152 1152
1153 1153 @command('debugindexdot', cmdutil.debugrevlogopts,
1154 1154 _('-c|-m|FILE'), optionalrepo=True)
1155 1155 def debugindexdot(ui, repo, file_=None, **opts):
1156 1156 """dump an index DAG as a graphviz dot file"""
1157 1157 opts = pycompat.byteskwargs(opts)
1158 1158 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1159 1159 ui.write(("digraph G {\n"))
1160 1160 for i in r:
1161 1161 node = r.node(i)
1162 1162 pp = r.parents(node)
1163 1163 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1164 1164 if pp[1] != nullid:
1165 1165 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1166 1166 ui.write("}\n")
1167 1167
1168 1168 @command('debugindexstats', [])
1169 1169 def debugindexstats(ui, repo):
1170 1170 """show stats related to the changelog index"""
1171 1171 repo.changelog.shortest(nullid, 1)
1172 1172 index = repo.changelog.index
1173 1173 if not util.safehasattr(index, 'stats'):
1174 1174 raise error.Abort(_('debugindexstats only works with native code'))
1175 1175 for k, v in sorted(index.stats().items()):
1176 1176 ui.write('%s: %d\n' % (k, v))
1177 1177
1178 1178 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1179 1179 def debuginstall(ui, **opts):
1180 1180 '''test Mercurial installation
1181 1181
1182 1182 Returns 0 on success.
1183 1183 '''
1184 1184 opts = pycompat.byteskwargs(opts)
1185 1185
1186 1186 problems = 0
1187 1187
1188 1188 fm = ui.formatter('debuginstall', opts)
1189 1189 fm.startitem()
1190 1190
1191 1191 # encoding
1192 1192 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1193 1193 err = None
1194 1194 try:
1195 1195 codecs.lookup(pycompat.sysstr(encoding.encoding))
1196 1196 except LookupError as inst:
1197 1197 err = stringutil.forcebytestr(inst)
1198 1198 problems += 1
1199 1199 fm.condwrite(err, 'encodingerror', _(" %s\n"
1200 1200 " (check that your locale is properly set)\n"), err)
1201 1201
1202 1202 # Python
1203 1203 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1204 1204 pycompat.sysexecutable)
1205 1205 fm.write('pythonver', _("checking Python version (%s)\n"),
1206 1206 ("%d.%d.%d" % sys.version_info[:3]))
1207 1207 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1208 1208 os.path.dirname(pycompat.fsencode(os.__file__)))
1209 1209
1210 1210 security = set(sslutil.supportedprotocols)
1211 1211 if sslutil.hassni:
1212 1212 security.add('sni')
1213 1213
1214 1214 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1215 1215 fm.formatlist(sorted(security), name='protocol',
1216 1216 fmt='%s', sep=','))
1217 1217
1218 1218 # These are warnings, not errors. So don't increment problem count. This
1219 1219 # may change in the future.
1220 1220 if 'tls1.2' not in security:
1221 1221 fm.plain(_(' TLS 1.2 not supported by Python install; '
1222 1222 'network connections lack modern security\n'))
1223 1223 if 'sni' not in security:
1224 1224 fm.plain(_(' SNI not supported by Python install; may have '
1225 1225 'connectivity issues with some servers\n'))
1226 1226
1227 1227 # TODO print CA cert info
1228 1228
1229 1229 # hg version
1230 1230 hgver = util.version()
1231 1231 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1232 1232 hgver.split('+')[0])
1233 1233 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1234 1234 '+'.join(hgver.split('+')[1:]))
1235 1235
1236 1236 # compiled modules
1237 1237 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1238 1238 policy.policy)
1239 1239 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1240 1240 os.path.dirname(pycompat.fsencode(__file__)))
1241 1241
1242 1242 if policy.policy in ('c', 'allow'):
1243 1243 err = None
1244 1244 try:
1245 1245 from .cext import (
1246 1246 base85,
1247 1247 bdiff,
1248 1248 mpatch,
1249 1249 osutil,
1250 1250 )
1251 1251 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1252 1252 except Exception as inst:
1253 1253 err = stringutil.forcebytestr(inst)
1254 1254 problems += 1
1255 1255 fm.condwrite(err, 'extensionserror', " %s\n", err)
1256 1256
1257 1257 compengines = util.compengines._engines.values()
1258 1258 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1259 1259 fm.formatlist(sorted(e.name() for e in compengines),
1260 1260 name='compengine', fmt='%s', sep=', '))
1261 1261 fm.write('compenginesavail', _('checking available compression engines '
1262 1262 '(%s)\n'),
1263 1263 fm.formatlist(sorted(e.name() for e in compengines
1264 1264 if e.available()),
1265 1265 name='compengine', fmt='%s', sep=', '))
1266 1266 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1267 1267 fm.write('compenginesserver', _('checking available compression engines '
1268 1268 'for wire protocol (%s)\n'),
1269 1269 fm.formatlist([e.name() for e in wirecompengines
1270 1270 if e.wireprotosupport()],
1271 1271 name='compengine', fmt='%s', sep=', '))
1272 1272 re2 = 'missing'
1273 1273 if util._re2:
1274 1274 re2 = 'available'
1275 1275 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1276 1276 fm.data(re2=bool(util._re2))
1277 1277
1278 1278 # templates
1279 1279 p = templater.templatepaths()
1280 1280 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1281 1281 fm.condwrite(not p, '', _(" no template directories found\n"))
1282 1282 if p:
1283 1283 m = templater.templatepath("map-cmdline.default")
1284 1284 if m:
1285 1285 # template found, check if it is working
1286 1286 err = None
1287 1287 try:
1288 1288 templater.templater.frommapfile(m)
1289 1289 except Exception as inst:
1290 1290 err = stringutil.forcebytestr(inst)
1291 1291 p = None
1292 1292 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1293 1293 else:
1294 1294 p = None
1295 1295 fm.condwrite(p, 'defaulttemplate',
1296 1296 _("checking default template (%s)\n"), m)
1297 1297 fm.condwrite(not m, 'defaulttemplatenotfound',
1298 1298 _(" template '%s' not found\n"), "default")
1299 1299 if not p:
1300 1300 problems += 1
1301 1301 fm.condwrite(not p, '',
1302 1302 _(" (templates seem to have been installed incorrectly)\n"))
1303 1303
1304 1304 # editor
1305 1305 editor = ui.geteditor()
1306 1306 editor = util.expandpath(editor)
1307 1307 editorbin = procutil.shellsplit(editor)[0]
1308 1308 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1309 1309 cmdpath = procutil.findexe(editorbin)
1310 1310 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1311 1311 _(" No commit editor set and can't find %s in PATH\n"
1312 1312 " (specify a commit editor in your configuration"
1313 1313 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1314 1314 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1315 1315 _(" Can't find editor '%s' in PATH\n"
1316 1316 " (specify a commit editor in your configuration"
1317 1317 " file)\n"), not cmdpath and editorbin)
1318 1318 if not cmdpath and editor != 'vi':
1319 1319 problems += 1
1320 1320
1321 1321 # check username
1322 1322 username = None
1323 1323 err = None
1324 1324 try:
1325 1325 username = ui.username()
1326 1326 except error.Abort as e:
1327 1327 err = stringutil.forcebytestr(e)
1328 1328 problems += 1
1329 1329
1330 1330 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1331 1331 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1332 1332 " (specify a username in your configuration file)\n"), err)
1333 1333
1334 1334 fm.condwrite(not problems, '',
1335 1335 _("no problems detected\n"))
1336 1336 if not problems:
1337 1337 fm.data(problems=problems)
1338 1338 fm.condwrite(problems, 'problems',
1339 1339 _("%d problems detected,"
1340 1340 " please check your install!\n"), problems)
1341 1341 fm.end()
1342 1342
1343 1343 return problems
1344 1344
1345 1345 @command('debugknown', [], _('REPO ID...'), norepo=True)
1346 1346 def debugknown(ui, repopath, *ids, **opts):
1347 1347 """test whether node ids are known to a repo
1348 1348
1349 1349 Every ID must be a full-length hex node id string. Returns a list of 0s
1350 1350 and 1s indicating unknown/known.
1351 1351 """
1352 1352 opts = pycompat.byteskwargs(opts)
1353 1353 repo = hg.peer(ui, opts, repopath)
1354 1354 if not repo.capable('known'):
1355 1355 raise error.Abort("known() not supported by target repository")
1356 1356 flags = repo.known([bin(s) for s in ids])
1357 1357 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1358 1358
1359 1359 @command('debuglabelcomplete', [], _('LABEL...'))
1360 1360 def debuglabelcomplete(ui, repo, *args):
1361 1361 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1362 1362 debugnamecomplete(ui, repo, *args)
1363 1363
1364 1364 @command('debuglocks',
1365 1365 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1366 1366 ('W', 'force-wlock', None,
1367 1367 _('free the working state lock (DANGEROUS)')),
1368 1368 ('s', 'set-lock', None, _('set the store lock until stopped')),
1369 1369 ('S', 'set-wlock', None,
1370 1370 _('set the working state lock until stopped'))],
1371 1371 _('[OPTION]...'))
1372 1372 def debuglocks(ui, repo, **opts):
1373 1373 """show or modify state of locks
1374 1374
1375 1375 By default, this command will show which locks are held. This
1376 1376 includes the user and process holding the lock, the amount of time
1377 1377 the lock has been held, and the machine name where the process is
1378 1378 running if it's not local.
1379 1379
1380 1380 Locks protect the integrity of Mercurial's data, so should be
1381 1381 treated with care. System crashes or other interruptions may cause
1382 1382 locks to not be properly released, though Mercurial will usually
1383 1383 detect and remove such stale locks automatically.
1384 1384
1385 1385 However, detecting stale locks may not always be possible (for
1386 1386 instance, on a shared filesystem). Removing locks may also be
1387 1387 blocked by filesystem permissions.
1388 1388
1389 1389 Setting a lock will prevent other commands from changing the data.
1390 1390 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1391 1391 The set locks are removed when the command exits.
1392 1392
1393 1393 Returns 0 if no locks are held.
1394 1394
1395 1395 """
1396 1396
1397 1397 if opts.get(r'force_lock'):
1398 1398 repo.svfs.unlink('lock')
1399 1399 if opts.get(r'force_wlock'):
1400 1400 repo.vfs.unlink('wlock')
1401 1401 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1402 1402 return 0
1403 1403
1404 1404 locks = []
1405 1405 try:
1406 1406 if opts.get(r'set_wlock'):
1407 1407 try:
1408 1408 locks.append(repo.wlock(False))
1409 1409 except error.LockHeld:
1410 1410 raise error.Abort(_('wlock is already held'))
1411 1411 if opts.get(r'set_lock'):
1412 1412 try:
1413 1413 locks.append(repo.lock(False))
1414 1414 except error.LockHeld:
1415 1415 raise error.Abort(_('lock is already held'))
1416 1416 if len(locks):
1417 1417 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1418 1418 return 0
1419 1419 finally:
1420 1420 release(*locks)
1421 1421
1422 1422 now = time.time()
1423 1423 held = 0
1424 1424
1425 1425 def report(vfs, name, method):
1426 1426 # this causes stale locks to get reaped for more accurate reporting
1427 1427 try:
1428 1428 l = method(False)
1429 1429 except error.LockHeld:
1430 1430 l = None
1431 1431
1432 1432 if l:
1433 1433 l.release()
1434 1434 else:
1435 1435 try:
1436 1436 st = vfs.lstat(name)
1437 1437 age = now - st[stat.ST_MTIME]
1438 1438 user = util.username(st.st_uid)
1439 1439 locker = vfs.readlock(name)
1440 1440 if ":" in locker:
1441 1441 host, pid = locker.split(':')
1442 1442 if host == socket.gethostname():
1443 1443 locker = 'user %s, process %s' % (user or b'None', pid)
1444 1444 else:
1445 1445 locker = ('user %s, process %s, host %s'
1446 1446 % (user or b'None', pid, host))
1447 1447 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1448 1448 return 1
1449 1449 except OSError as e:
1450 1450 if e.errno != errno.ENOENT:
1451 1451 raise
1452 1452
1453 1453 ui.write(("%-6s free\n") % (name + ":"))
1454 1454 return 0
1455 1455
1456 1456 held += report(repo.svfs, "lock", repo.lock)
1457 1457 held += report(repo.vfs, "wlock", repo.wlock)
1458 1458
1459 1459 return held
1460 1460
1461 1461 @command('debugmanifestfulltextcache', [
1462 1462 ('', 'clear', False, _('clear the cache')),
1463 1463 ('a', 'add', '', _('add the given manifest node to the cache'),
1464 1464 _('NODE'))
1465 1465 ], '')
1466 1466 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1467 1467 """show, clear or amend the contents of the manifest fulltext cache"""
1468 1468
1469 1469 def getcache():
1470 1470 r = repo.manifestlog.getstorage(b'')
1471 1471 try:
1472 1472 return r._fulltextcache
1473 1473 except AttributeError:
1474 1474 msg = _("Current revlog implementation doesn't appear to have a "
1475 1475 "manifest fulltext cache\n")
1476 1476 raise error.Abort(msg)
1477 1477
1478 1478 if opts.get(r'clear'):
1479 1479 with repo.lock():
1480 1480 cache = getcache()
1481 cache.clear()
1481 cache.clear(clear_persisted_data=True)
1482 return
1482 1483
1483 1484 if add:
1484 1485 with repo.lock():
1485 1486 try:
1486 1487 m = repo.manifestlog
1487 1488 manifest = m[m.getstorage(b'').lookup(add)]
1488 1489 except error.LookupError as e:
1489 1490 raise error.Abort(e, hint="Check your manifest node id")
1490 1491 manifest.read() # stores revisision in cache too
1491 1492 return
1492 1493
1493 1494 cache = getcache()
1494 1495 if not len(cache):
1495 1496 ui.write(_('cache empty\n'))
1496 1497 else:
1497 1498 ui.write(
1498 1499 _('cache contains %d manifest entries, in order of most to '
1499 1500 'least recent:\n') % (len(cache),))
1500 1501 totalsize = 0
1501 1502 for nodeid in cache:
1502 1503 # Use cache.get to not update the LRU order
1503 1504 data = cache.get(nodeid)
1504 1505 size = len(data)
1505 1506 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1506 1507 ui.write(_('id: %s, size %s\n') % (
1507 1508 hex(nodeid), util.bytecount(size)))
1508 1509 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1509 1510 ui.write(
1510 1511 _('total cache data size %s, on-disk %s\n') % (
1511 1512 util.bytecount(totalsize), util.bytecount(ondisk))
1512 1513 )
1513 1514
1514 1515 @command('debugmergestate', [], '')
1515 1516 def debugmergestate(ui, repo, *args):
1516 1517 """print merge state
1517 1518
1518 1519 Use --verbose to print out information about whether v1 or v2 merge state
1519 1520 was chosen."""
1520 1521 def _hashornull(h):
1521 1522 if h == nullhex:
1522 1523 return 'null'
1523 1524 else:
1524 1525 return h
1525 1526
1526 1527 def printrecords(version):
1527 1528 ui.write(('* version %d records\n') % version)
1528 1529 if version == 1:
1529 1530 records = v1records
1530 1531 else:
1531 1532 records = v2records
1532 1533
1533 1534 for rtype, record in records:
1534 1535 # pretty print some record types
1535 1536 if rtype == 'L':
1536 1537 ui.write(('local: %s\n') % record)
1537 1538 elif rtype == 'O':
1538 1539 ui.write(('other: %s\n') % record)
1539 1540 elif rtype == 'm':
1540 1541 driver, mdstate = record.split('\0', 1)
1541 1542 ui.write(('merge driver: %s (state "%s")\n')
1542 1543 % (driver, mdstate))
1543 1544 elif rtype in 'FDC':
1544 1545 r = record.split('\0')
1545 1546 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1546 1547 if version == 1:
1547 1548 onode = 'not stored in v1 format'
1548 1549 flags = r[7]
1549 1550 else:
1550 1551 onode, flags = r[7:9]
1551 1552 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1552 1553 % (f, rtype, state, _hashornull(hash)))
1553 1554 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1554 1555 ui.write((' ancestor path: %s (node %s)\n')
1555 1556 % (afile, _hashornull(anode)))
1556 1557 ui.write((' other path: %s (node %s)\n')
1557 1558 % (ofile, _hashornull(onode)))
1558 1559 elif rtype == 'f':
1559 1560 filename, rawextras = record.split('\0', 1)
1560 1561 extras = rawextras.split('\0')
1561 1562 i = 0
1562 1563 extrastrings = []
1563 1564 while i < len(extras):
1564 1565 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1565 1566 i += 2
1566 1567
1567 1568 ui.write(('file extras: %s (%s)\n')
1568 1569 % (filename, ', '.join(extrastrings)))
1569 1570 elif rtype == 'l':
1570 1571 labels = record.split('\0', 2)
1571 1572 labels = [l for l in labels if len(l) > 0]
1572 1573 ui.write(('labels:\n'))
1573 1574 ui.write((' local: %s\n' % labels[0]))
1574 1575 ui.write((' other: %s\n' % labels[1]))
1575 1576 if len(labels) > 2:
1576 1577 ui.write((' base: %s\n' % labels[2]))
1577 1578 else:
1578 1579 ui.write(('unrecognized entry: %s\t%s\n')
1579 1580 % (rtype, record.replace('\0', '\t')))
1580 1581
1581 1582 # Avoid mergestate.read() since it may raise an exception for unsupported
1582 1583 # merge state records. We shouldn't be doing this, but this is OK since this
1583 1584 # command is pretty low-level.
1584 1585 ms = mergemod.mergestate(repo)
1585 1586
1586 1587 # sort so that reasonable information is on top
1587 1588 v1records = ms._readrecordsv1()
1588 1589 v2records = ms._readrecordsv2()
1589 1590 order = 'LOml'
1590 1591 def key(r):
1591 1592 idx = order.find(r[0])
1592 1593 if idx == -1:
1593 1594 return (1, r[1])
1594 1595 else:
1595 1596 return (0, idx)
1596 1597 v1records.sort(key=key)
1597 1598 v2records.sort(key=key)
1598 1599
1599 1600 if not v1records and not v2records:
1600 1601 ui.write(('no merge state found\n'))
1601 1602 elif not v2records:
1602 1603 ui.note(('no version 2 merge state\n'))
1603 1604 printrecords(1)
1604 1605 elif ms._v1v2match(v1records, v2records):
1605 1606 ui.note(('v1 and v2 states match: using v2\n'))
1606 1607 printrecords(2)
1607 1608 else:
1608 1609 ui.note(('v1 and v2 states mismatch: using v1\n'))
1609 1610 printrecords(1)
1610 1611 if ui.verbose:
1611 1612 printrecords(2)
1612 1613
1613 1614 @command('debugnamecomplete', [], _('NAME...'))
1614 1615 def debugnamecomplete(ui, repo, *args):
1615 1616 '''complete "names" - tags, open branch names, bookmark names'''
1616 1617
1617 1618 names = set()
1618 1619 # since we previously only listed open branches, we will handle that
1619 1620 # specially (after this for loop)
1620 1621 for name, ns in repo.names.iteritems():
1621 1622 if name != 'branches':
1622 1623 names.update(ns.listnames(repo))
1623 1624 names.update(tag for (tag, heads, tip, closed)
1624 1625 in repo.branchmap().iterbranches() if not closed)
1625 1626 completions = set()
1626 1627 if not args:
1627 1628 args = ['']
1628 1629 for a in args:
1629 1630 completions.update(n for n in names if n.startswith(a))
1630 1631 ui.write('\n'.join(sorted(completions)))
1631 1632 ui.write('\n')
1632 1633
1633 1634 @command('debugobsolete',
1634 1635 [('', 'flags', 0, _('markers flag')),
1635 1636 ('', 'record-parents', False,
1636 1637 _('record parent information for the precursor')),
1637 1638 ('r', 'rev', [], _('display markers relevant to REV')),
1638 1639 ('', 'exclusive', False, _('restrict display to markers only '
1639 1640 'relevant to REV')),
1640 1641 ('', 'index', False, _('display index of the marker')),
1641 1642 ('', 'delete', [], _('delete markers specified by indices')),
1642 1643 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1643 1644 _('[OBSOLETED [REPLACEMENT ...]]'))
1644 1645 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1645 1646 """create arbitrary obsolete marker
1646 1647
1647 1648 With no arguments, displays the list of obsolescence markers."""
1648 1649
1649 1650 opts = pycompat.byteskwargs(opts)
1650 1651
1651 1652 def parsenodeid(s):
1652 1653 try:
1653 1654 # We do not use revsingle/revrange functions here to accept
1654 1655 # arbitrary node identifiers, possibly not present in the
1655 1656 # local repository.
1656 1657 n = bin(s)
1657 1658 if len(n) != len(nullid):
1658 1659 raise TypeError()
1659 1660 return n
1660 1661 except TypeError:
1661 1662 raise error.Abort('changeset references must be full hexadecimal '
1662 1663 'node identifiers')
1663 1664
1664 1665 if opts.get('delete'):
1665 1666 indices = []
1666 1667 for v in opts.get('delete'):
1667 1668 try:
1668 1669 indices.append(int(v))
1669 1670 except ValueError:
1670 1671 raise error.Abort(_('invalid index value: %r') % v,
1671 1672 hint=_('use integers for indices'))
1672 1673
1673 1674 if repo.currenttransaction():
1674 1675 raise error.Abort(_('cannot delete obsmarkers in the middle '
1675 1676 'of transaction.'))
1676 1677
1677 1678 with repo.lock():
1678 1679 n = repair.deleteobsmarkers(repo.obsstore, indices)
1679 1680 ui.write(_('deleted %i obsolescence markers\n') % n)
1680 1681
1681 1682 return
1682 1683
1683 1684 if precursor is not None:
1684 1685 if opts['rev']:
1685 1686 raise error.Abort('cannot select revision when creating marker')
1686 1687 metadata = {}
1687 1688 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1688 1689 succs = tuple(parsenodeid(succ) for succ in successors)
1689 1690 l = repo.lock()
1690 1691 try:
1691 1692 tr = repo.transaction('debugobsolete')
1692 1693 try:
1693 1694 date = opts.get('date')
1694 1695 if date:
1695 1696 date = dateutil.parsedate(date)
1696 1697 else:
1697 1698 date = None
1698 1699 prec = parsenodeid(precursor)
1699 1700 parents = None
1700 1701 if opts['record_parents']:
1701 1702 if prec not in repo.unfiltered():
1702 1703 raise error.Abort('cannot used --record-parents on '
1703 1704 'unknown changesets')
1704 1705 parents = repo.unfiltered()[prec].parents()
1705 1706 parents = tuple(p.node() for p in parents)
1706 1707 repo.obsstore.create(tr, prec, succs, opts['flags'],
1707 1708 parents=parents, date=date,
1708 1709 metadata=metadata, ui=ui)
1709 1710 tr.close()
1710 1711 except ValueError as exc:
1711 1712 raise error.Abort(_('bad obsmarker input: %s') %
1712 1713 pycompat.bytestr(exc))
1713 1714 finally:
1714 1715 tr.release()
1715 1716 finally:
1716 1717 l.release()
1717 1718 else:
1718 1719 if opts['rev']:
1719 1720 revs = scmutil.revrange(repo, opts['rev'])
1720 1721 nodes = [repo[r].node() for r in revs]
1721 1722 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1722 1723 exclusive=opts['exclusive']))
1723 1724 markers.sort(key=lambda x: x._data)
1724 1725 else:
1725 1726 markers = obsutil.getmarkers(repo)
1726 1727
1727 1728 markerstoiter = markers
1728 1729 isrelevant = lambda m: True
1729 1730 if opts.get('rev') and opts.get('index'):
1730 1731 markerstoiter = obsutil.getmarkers(repo)
1731 1732 markerset = set(markers)
1732 1733 isrelevant = lambda m: m in markerset
1733 1734
1734 1735 fm = ui.formatter('debugobsolete', opts)
1735 1736 for i, m in enumerate(markerstoiter):
1736 1737 if not isrelevant(m):
1737 1738 # marker can be irrelevant when we're iterating over a set
1738 1739 # of markers (markerstoiter) which is bigger than the set
1739 1740 # of markers we want to display (markers)
1740 1741 # this can happen if both --index and --rev options are
1741 1742 # provided and thus we need to iterate over all of the markers
1742 1743 # to get the correct indices, but only display the ones that
1743 1744 # are relevant to --rev value
1744 1745 continue
1745 1746 fm.startitem()
1746 1747 ind = i if opts.get('index') else None
1747 1748 cmdutil.showmarker(fm, m, index=ind)
1748 1749 fm.end()
1749 1750
1750 1751 @command('debugp1copies',
1751 1752 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1752 1753 _('[-r REV]'))
1753 1754 def debugp1copies(ui, repo, **opts):
1754 1755 """dump copy information compared to p1"""
1755 1756
1756 1757 opts = pycompat.byteskwargs(opts)
1757 1758 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1758 1759 for dst, src in ctx.p1copies().items():
1759 1760 ui.write('%s -> %s\n' % (src, dst))
1760 1761
1761 1762 @command('debugp2copies',
1762 1763 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1763 1764 _('[-r REV]'))
1764 1765 def debugp1copies(ui, repo, **opts):
1765 1766 """dump copy information compared to p2"""
1766 1767
1767 1768 opts = pycompat.byteskwargs(opts)
1768 1769 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1769 1770 for dst, src in ctx.p2copies().items():
1770 1771 ui.write('%s -> %s\n' % (src, dst))
1771 1772
1772 1773 @command('debugpathcomplete',
1773 1774 [('f', 'full', None, _('complete an entire path')),
1774 1775 ('n', 'normal', None, _('show only normal files')),
1775 1776 ('a', 'added', None, _('show only added files')),
1776 1777 ('r', 'removed', None, _('show only removed files'))],
1777 1778 _('FILESPEC...'))
1778 1779 def debugpathcomplete(ui, repo, *specs, **opts):
1779 1780 '''complete part or all of a tracked path
1780 1781
1781 1782 This command supports shells that offer path name completion. It
1782 1783 currently completes only files already known to the dirstate.
1783 1784
1784 1785 Completion extends only to the next path segment unless
1785 1786 --full is specified, in which case entire paths are used.'''
1786 1787
1787 1788 def complete(path, acceptable):
1788 1789 dirstate = repo.dirstate
1789 1790 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1790 1791 rootdir = repo.root + pycompat.ossep
1791 1792 if spec != repo.root and not spec.startswith(rootdir):
1792 1793 return [], []
1793 1794 if os.path.isdir(spec):
1794 1795 spec += '/'
1795 1796 spec = spec[len(rootdir):]
1796 1797 fixpaths = pycompat.ossep != '/'
1797 1798 if fixpaths:
1798 1799 spec = spec.replace(pycompat.ossep, '/')
1799 1800 speclen = len(spec)
1800 1801 fullpaths = opts[r'full']
1801 1802 files, dirs = set(), set()
1802 1803 adddir, addfile = dirs.add, files.add
1803 1804 for f, st in dirstate.iteritems():
1804 1805 if f.startswith(spec) and st[0] in acceptable:
1805 1806 if fixpaths:
1806 1807 f = f.replace('/', pycompat.ossep)
1807 1808 if fullpaths:
1808 1809 addfile(f)
1809 1810 continue
1810 1811 s = f.find(pycompat.ossep, speclen)
1811 1812 if s >= 0:
1812 1813 adddir(f[:s])
1813 1814 else:
1814 1815 addfile(f)
1815 1816 return files, dirs
1816 1817
1817 1818 acceptable = ''
1818 1819 if opts[r'normal']:
1819 1820 acceptable += 'nm'
1820 1821 if opts[r'added']:
1821 1822 acceptable += 'a'
1822 1823 if opts[r'removed']:
1823 1824 acceptable += 'r'
1824 1825 cwd = repo.getcwd()
1825 1826 if not specs:
1826 1827 specs = ['.']
1827 1828
1828 1829 files, dirs = set(), set()
1829 1830 for spec in specs:
1830 1831 f, d = complete(spec, acceptable or 'nmar')
1831 1832 files.update(f)
1832 1833 dirs.update(d)
1833 1834 files.update(dirs)
1834 1835 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1835 1836 ui.write('\n')
1836 1837
1837 1838 @command('debugpathcopies',
1838 1839 cmdutil.walkopts,
1839 1840 'hg debugpathcopies REV1 REV2 [FILE]',
1840 1841 inferrepo=True)
1841 1842 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1842 1843 """show copies between two revisions"""
1843 1844 ctx1 = scmutil.revsingle(repo, rev1)
1844 1845 ctx2 = scmutil.revsingle(repo, rev2)
1845 1846 m = scmutil.match(ctx1, pats, opts)
1846 1847 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1847 1848 ui.write('%s -> %s\n' % (src, dst))
1848 1849
1849 1850 @command('debugpeer', [], _('PATH'), norepo=True)
1850 1851 def debugpeer(ui, path):
1851 1852 """establish a connection to a peer repository"""
1852 1853 # Always enable peer request logging. Requires --debug to display
1853 1854 # though.
1854 1855 overrides = {
1855 1856 ('devel', 'debug.peer-request'): True,
1856 1857 }
1857 1858
1858 1859 with ui.configoverride(overrides):
1859 1860 peer = hg.peer(ui, {}, path)
1860 1861
1861 1862 local = peer.local() is not None
1862 1863 canpush = peer.canpush()
1863 1864
1864 1865 ui.write(_('url: %s\n') % peer.url())
1865 1866 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1866 1867 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1867 1868
1868 1869 @command('debugpickmergetool',
1869 1870 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1870 1871 ('', 'changedelete', None, _('emulate merging change and delete')),
1871 1872 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1872 1873 _('[PATTERN]...'),
1873 1874 inferrepo=True)
1874 1875 def debugpickmergetool(ui, repo, *pats, **opts):
1875 1876 """examine which merge tool is chosen for specified file
1876 1877
1877 1878 As described in :hg:`help merge-tools`, Mercurial examines
1878 1879 configurations below in this order to decide which merge tool is
1879 1880 chosen for specified file.
1880 1881
1881 1882 1. ``--tool`` option
1882 1883 2. ``HGMERGE`` environment variable
1883 1884 3. configurations in ``merge-patterns`` section
1884 1885 4. configuration of ``ui.merge``
1885 1886 5. configurations in ``merge-tools`` section
1886 1887 6. ``hgmerge`` tool (for historical reason only)
1887 1888 7. default tool for fallback (``:merge`` or ``:prompt``)
1888 1889
1889 1890 This command writes out examination result in the style below::
1890 1891
1891 1892 FILE = MERGETOOL
1892 1893
1893 1894 By default, all files known in the first parent context of the
1894 1895 working directory are examined. Use file patterns and/or -I/-X
1895 1896 options to limit target files. -r/--rev is also useful to examine
1896 1897 files in another context without actual updating to it.
1897 1898
1898 1899 With --debug, this command shows warning messages while matching
1899 1900 against ``merge-patterns`` and so on, too. It is recommended to
1900 1901 use this option with explicit file patterns and/or -I/-X options,
1901 1902 because this option increases amount of output per file according
1902 1903 to configurations in hgrc.
1903 1904
1904 1905 With -v/--verbose, this command shows configurations below at
1905 1906 first (only if specified).
1906 1907
1907 1908 - ``--tool`` option
1908 1909 - ``HGMERGE`` environment variable
1909 1910 - configuration of ``ui.merge``
1910 1911
1911 1912 If merge tool is chosen before matching against
1912 1913 ``merge-patterns``, this command can't show any helpful
1913 1914 information, even with --debug. In such case, information above is
1914 1915 useful to know why a merge tool is chosen.
1915 1916 """
1916 1917 opts = pycompat.byteskwargs(opts)
1917 1918 overrides = {}
1918 1919 if opts['tool']:
1919 1920 overrides[('ui', 'forcemerge')] = opts['tool']
1920 1921 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1921 1922
1922 1923 with ui.configoverride(overrides, 'debugmergepatterns'):
1923 1924 hgmerge = encoding.environ.get("HGMERGE")
1924 1925 if hgmerge is not None:
1925 1926 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1926 1927 uimerge = ui.config("ui", "merge")
1927 1928 if uimerge:
1928 1929 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1929 1930
1930 1931 ctx = scmutil.revsingle(repo, opts.get('rev'))
1931 1932 m = scmutil.match(ctx, pats, opts)
1932 1933 changedelete = opts['changedelete']
1933 1934 for path in ctx.walk(m):
1934 1935 fctx = ctx[path]
1935 1936 try:
1936 1937 if not ui.debugflag:
1937 1938 ui.pushbuffer(error=True)
1938 1939 tool, toolpath = filemerge._picktool(repo, ui, path,
1939 1940 fctx.isbinary(),
1940 1941 'l' in fctx.flags(),
1941 1942 changedelete)
1942 1943 finally:
1943 1944 if not ui.debugflag:
1944 1945 ui.popbuffer()
1945 1946 ui.write(('%s = %s\n') % (path, tool))
1946 1947
1947 1948 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1948 1949 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1949 1950 '''access the pushkey key/value protocol
1950 1951
1951 1952 With two args, list the keys in the given namespace.
1952 1953
1953 1954 With five args, set a key to new if it currently is set to old.
1954 1955 Reports success or failure.
1955 1956 '''
1956 1957
1957 1958 target = hg.peer(ui, {}, repopath)
1958 1959 if keyinfo:
1959 1960 key, old, new = keyinfo
1960 1961 with target.commandexecutor() as e:
1961 1962 r = e.callcommand('pushkey', {
1962 1963 'namespace': namespace,
1963 1964 'key': key,
1964 1965 'old': old,
1965 1966 'new': new,
1966 1967 }).result()
1967 1968
1968 1969 ui.status(pycompat.bytestr(r) + '\n')
1969 1970 return not r
1970 1971 else:
1971 1972 for k, v in sorted(target.listkeys(namespace).iteritems()):
1972 1973 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1973 1974 stringutil.escapestr(v)))
1974 1975
1975 1976 @command('debugpvec', [], _('A B'))
1976 1977 def debugpvec(ui, repo, a, b=None):
1977 1978 ca = scmutil.revsingle(repo, a)
1978 1979 cb = scmutil.revsingle(repo, b)
1979 1980 pa = pvec.ctxpvec(ca)
1980 1981 pb = pvec.ctxpvec(cb)
1981 1982 if pa == pb:
1982 1983 rel = "="
1983 1984 elif pa > pb:
1984 1985 rel = ">"
1985 1986 elif pa < pb:
1986 1987 rel = "<"
1987 1988 elif pa | pb:
1988 1989 rel = "|"
1989 1990 ui.write(_("a: %s\n") % pa)
1990 1991 ui.write(_("b: %s\n") % pb)
1991 1992 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1992 1993 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1993 1994 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1994 1995 pa.distance(pb), rel))
1995 1996
1996 1997 @command('debugrebuilddirstate|debugrebuildstate',
1997 1998 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1998 1999 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1999 2000 'the working copy parent')),
2000 2001 ],
2001 2002 _('[-r REV]'))
2002 2003 def debugrebuilddirstate(ui, repo, rev, **opts):
2003 2004 """rebuild the dirstate as it would look like for the given revision
2004 2005
2005 2006 If no revision is specified the first current parent will be used.
2006 2007
2007 2008 The dirstate will be set to the files of the given revision.
2008 2009 The actual working directory content or existing dirstate
2009 2010 information such as adds or removes is not considered.
2010 2011
2011 2012 ``minimal`` will only rebuild the dirstate status for files that claim to be
2012 2013 tracked but are not in the parent manifest, or that exist in the parent
2013 2014 manifest but are not in the dirstate. It will not change adds, removes, or
2014 2015 modified files that are in the working copy parent.
2015 2016
2016 2017 One use of this command is to make the next :hg:`status` invocation
2017 2018 check the actual file content.
2018 2019 """
2019 2020 ctx = scmutil.revsingle(repo, rev)
2020 2021 with repo.wlock():
2021 2022 dirstate = repo.dirstate
2022 2023 changedfiles = None
2023 2024 # See command doc for what minimal does.
2024 2025 if opts.get(r'minimal'):
2025 2026 manifestfiles = set(ctx.manifest().keys())
2026 2027 dirstatefiles = set(dirstate)
2027 2028 manifestonly = manifestfiles - dirstatefiles
2028 2029 dsonly = dirstatefiles - manifestfiles
2029 2030 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2030 2031 changedfiles = manifestonly | dsnotadded
2031 2032
2032 2033 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2033 2034
2034 2035 @command('debugrebuildfncache', [], '')
2035 2036 def debugrebuildfncache(ui, repo):
2036 2037 """rebuild the fncache file"""
2037 2038 repair.rebuildfncache(ui, repo)
2038 2039
2039 2040 @command('debugrename',
2040 2041 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2041 2042 _('[-r REV] [FILE]...'))
2042 2043 def debugrename(ui, repo, *pats, **opts):
2043 2044 """dump rename information"""
2044 2045
2045 2046 opts = pycompat.byteskwargs(opts)
2046 2047 ctx = scmutil.revsingle(repo, opts.get('rev'))
2047 2048 m = scmutil.match(ctx, pats, opts)
2048 2049 for abs in ctx.walk(m):
2049 2050 fctx = ctx[abs]
2050 2051 o = fctx.filelog().renamed(fctx.filenode())
2051 2052 rel = repo.pathto(abs)
2052 2053 if o:
2053 2054 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2054 2055 else:
2055 2056 ui.write(_("%s not renamed\n") % rel)
2056 2057
2057 2058 @command('debugrevlog', cmdutil.debugrevlogopts +
2058 2059 [('d', 'dump', False, _('dump index data'))],
2059 2060 _('-c|-m|FILE'),
2060 2061 optionalrepo=True)
2061 2062 def debugrevlog(ui, repo, file_=None, **opts):
2062 2063 """show data and statistics about a revlog"""
2063 2064 opts = pycompat.byteskwargs(opts)
2064 2065 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2065 2066
2066 2067 if opts.get("dump"):
2067 2068 numrevs = len(r)
2068 2069 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2069 2070 " rawsize totalsize compression heads chainlen\n"))
2070 2071 ts = 0
2071 2072 heads = set()
2072 2073
2073 2074 for rev in pycompat.xrange(numrevs):
2074 2075 dbase = r.deltaparent(rev)
2075 2076 if dbase == -1:
2076 2077 dbase = rev
2077 2078 cbase = r.chainbase(rev)
2078 2079 clen = r.chainlen(rev)
2079 2080 p1, p2 = r.parentrevs(rev)
2080 2081 rs = r.rawsize(rev)
2081 2082 ts = ts + rs
2082 2083 heads -= set(r.parentrevs(rev))
2083 2084 heads.add(rev)
2084 2085 try:
2085 2086 compression = ts / r.end(rev)
2086 2087 except ZeroDivisionError:
2087 2088 compression = 0
2088 2089 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2089 2090 "%11d %5d %8d\n" %
2090 2091 (rev, p1, p2, r.start(rev), r.end(rev),
2091 2092 r.start(dbase), r.start(cbase),
2092 2093 r.start(p1), r.start(p2),
2093 2094 rs, ts, compression, len(heads), clen))
2094 2095 return 0
2095 2096
2096 2097 v = r.version
2097 2098 format = v & 0xFFFF
2098 2099 flags = []
2099 2100 gdelta = False
2100 2101 if v & revlog.FLAG_INLINE_DATA:
2101 2102 flags.append('inline')
2102 2103 if v & revlog.FLAG_GENERALDELTA:
2103 2104 gdelta = True
2104 2105 flags.append('generaldelta')
2105 2106 if not flags:
2106 2107 flags = ['(none)']
2107 2108
2108 2109 ### tracks merge vs single parent
2109 2110 nummerges = 0
2110 2111
2111 2112 ### tracks ways the "delta" are build
2112 2113 # nodelta
2113 2114 numempty = 0
2114 2115 numemptytext = 0
2115 2116 numemptydelta = 0
2116 2117 # full file content
2117 2118 numfull = 0
2118 2119 # intermediate snapshot against a prior snapshot
2119 2120 numsemi = 0
2120 2121 # snapshot count per depth
2121 2122 numsnapdepth = collections.defaultdict(lambda: 0)
2122 2123 # delta against previous revision
2123 2124 numprev = 0
2124 2125 # delta against first or second parent (not prev)
2125 2126 nump1 = 0
2126 2127 nump2 = 0
2127 2128 # delta against neither prev nor parents
2128 2129 numother = 0
2129 2130 # delta against prev that are also first or second parent
2130 2131 # (details of `numprev`)
2131 2132 nump1prev = 0
2132 2133 nump2prev = 0
2133 2134
2134 2135 # data about delta chain of each revs
2135 2136 chainlengths = []
2136 2137 chainbases = []
2137 2138 chainspans = []
2138 2139
2139 2140 # data about each revision
2140 2141 datasize = [None, 0, 0]
2141 2142 fullsize = [None, 0, 0]
2142 2143 semisize = [None, 0, 0]
2143 2144 # snapshot count per depth
2144 2145 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2145 2146 deltasize = [None, 0, 0]
2146 2147 chunktypecounts = {}
2147 2148 chunktypesizes = {}
2148 2149
2149 2150 def addsize(size, l):
2150 2151 if l[0] is None or size < l[0]:
2151 2152 l[0] = size
2152 2153 if size > l[1]:
2153 2154 l[1] = size
2154 2155 l[2] += size
2155 2156
2156 2157 numrevs = len(r)
2157 2158 for rev in pycompat.xrange(numrevs):
2158 2159 p1, p2 = r.parentrevs(rev)
2159 2160 delta = r.deltaparent(rev)
2160 2161 if format > 0:
2161 2162 addsize(r.rawsize(rev), datasize)
2162 2163 if p2 != nullrev:
2163 2164 nummerges += 1
2164 2165 size = r.length(rev)
2165 2166 if delta == nullrev:
2166 2167 chainlengths.append(0)
2167 2168 chainbases.append(r.start(rev))
2168 2169 chainspans.append(size)
2169 2170 if size == 0:
2170 2171 numempty += 1
2171 2172 numemptytext += 1
2172 2173 else:
2173 2174 numfull += 1
2174 2175 numsnapdepth[0] += 1
2175 2176 addsize(size, fullsize)
2176 2177 addsize(size, snapsizedepth[0])
2177 2178 else:
2178 2179 chainlengths.append(chainlengths[delta] + 1)
2179 2180 baseaddr = chainbases[delta]
2180 2181 revaddr = r.start(rev)
2181 2182 chainbases.append(baseaddr)
2182 2183 chainspans.append((revaddr - baseaddr) + size)
2183 2184 if size == 0:
2184 2185 numempty += 1
2185 2186 numemptydelta += 1
2186 2187 elif r.issnapshot(rev):
2187 2188 addsize(size, semisize)
2188 2189 numsemi += 1
2189 2190 depth = r.snapshotdepth(rev)
2190 2191 numsnapdepth[depth] += 1
2191 2192 addsize(size, snapsizedepth[depth])
2192 2193 else:
2193 2194 addsize(size, deltasize)
2194 2195 if delta == rev - 1:
2195 2196 numprev += 1
2196 2197 if delta == p1:
2197 2198 nump1prev += 1
2198 2199 elif delta == p2:
2199 2200 nump2prev += 1
2200 2201 elif delta == p1:
2201 2202 nump1 += 1
2202 2203 elif delta == p2:
2203 2204 nump2 += 1
2204 2205 elif delta != nullrev:
2205 2206 numother += 1
2206 2207
2207 2208 # Obtain data on the raw chunks in the revlog.
2208 2209 if util.safehasattr(r, '_getsegmentforrevs'):
2209 2210 segment = r._getsegmentforrevs(rev, rev)[1]
2210 2211 else:
2211 2212 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2212 2213 if segment:
2213 2214 chunktype = bytes(segment[0:1])
2214 2215 else:
2215 2216 chunktype = 'empty'
2216 2217
2217 2218 if chunktype not in chunktypecounts:
2218 2219 chunktypecounts[chunktype] = 0
2219 2220 chunktypesizes[chunktype] = 0
2220 2221
2221 2222 chunktypecounts[chunktype] += 1
2222 2223 chunktypesizes[chunktype] += size
2223 2224
2224 2225 # Adjust size min value for empty cases
2225 2226 for size in (datasize, fullsize, semisize, deltasize):
2226 2227 if size[0] is None:
2227 2228 size[0] = 0
2228 2229
2229 2230 numdeltas = numrevs - numfull - numempty - numsemi
2230 2231 numoprev = numprev - nump1prev - nump2prev
2231 2232 totalrawsize = datasize[2]
2232 2233 datasize[2] /= numrevs
2233 2234 fulltotal = fullsize[2]
2234 2235 fullsize[2] /= numfull
2235 2236 semitotal = semisize[2]
2236 2237 snaptotal = {}
2237 2238 if numsemi > 0:
2238 2239 semisize[2] /= numsemi
2239 2240 for depth in snapsizedepth:
2240 2241 snaptotal[depth] = snapsizedepth[depth][2]
2241 2242 snapsizedepth[depth][2] /= numsnapdepth[depth]
2242 2243
2243 2244 deltatotal = deltasize[2]
2244 2245 if numdeltas > 0:
2245 2246 deltasize[2] /= numdeltas
2246 2247 totalsize = fulltotal + semitotal + deltatotal
2247 2248 avgchainlen = sum(chainlengths) / numrevs
2248 2249 maxchainlen = max(chainlengths)
2249 2250 maxchainspan = max(chainspans)
2250 2251 compratio = 1
2251 2252 if totalsize:
2252 2253 compratio = totalrawsize / totalsize
2253 2254
2254 2255 basedfmtstr = '%%%dd\n'
2255 2256 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2256 2257
2257 2258 def dfmtstr(max):
2258 2259 return basedfmtstr % len(str(max))
2259 2260 def pcfmtstr(max, padding=0):
2260 2261 return basepcfmtstr % (len(str(max)), ' ' * padding)
2261 2262
2262 2263 def pcfmt(value, total):
2263 2264 if total:
2264 2265 return (value, 100 * float(value) / total)
2265 2266 else:
2266 2267 return value, 100.0
2267 2268
2268 2269 ui.write(('format : %d\n') % format)
2269 2270 ui.write(('flags : %s\n') % ', '.join(flags))
2270 2271
2271 2272 ui.write('\n')
2272 2273 fmt = pcfmtstr(totalsize)
2273 2274 fmt2 = dfmtstr(totalsize)
2274 2275 ui.write(('revisions : ') + fmt2 % numrevs)
2275 2276 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2276 2277 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2277 2278 ui.write(('revisions : ') + fmt2 % numrevs)
2278 2279 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2279 2280 ui.write((' text : ')
2280 2281 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2281 2282 ui.write((' delta : ')
2282 2283 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2283 2284 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2284 2285 for depth in sorted(numsnapdepth):
2285 2286 ui.write((' lvl-%-3d : ' % depth)
2286 2287 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2287 2288 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2288 2289 ui.write(('revision size : ') + fmt2 % totalsize)
2289 2290 ui.write((' snapshot : ')
2290 2291 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2291 2292 for depth in sorted(numsnapdepth):
2292 2293 ui.write((' lvl-%-3d : ' % depth)
2293 2294 + fmt % pcfmt(snaptotal[depth], totalsize))
2294 2295 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2295 2296
2296 2297 def fmtchunktype(chunktype):
2297 2298 if chunktype == 'empty':
2298 2299 return ' %s : ' % chunktype
2299 2300 elif chunktype in pycompat.bytestr(string.ascii_letters):
2300 2301 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2301 2302 else:
2302 2303 return ' 0x%s : ' % hex(chunktype)
2303 2304
2304 2305 ui.write('\n')
2305 2306 ui.write(('chunks : ') + fmt2 % numrevs)
2306 2307 for chunktype in sorted(chunktypecounts):
2307 2308 ui.write(fmtchunktype(chunktype))
2308 2309 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2309 2310 ui.write(('chunks size : ') + fmt2 % totalsize)
2310 2311 for chunktype in sorted(chunktypecounts):
2311 2312 ui.write(fmtchunktype(chunktype))
2312 2313 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2313 2314
2314 2315 ui.write('\n')
2315 2316 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2316 2317 ui.write(('avg chain length : ') + fmt % avgchainlen)
2317 2318 ui.write(('max chain length : ') + fmt % maxchainlen)
2318 2319 ui.write(('max chain reach : ') + fmt % maxchainspan)
2319 2320 ui.write(('compression ratio : ') + fmt % compratio)
2320 2321
2321 2322 if format > 0:
2322 2323 ui.write('\n')
2323 2324 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2324 2325 % tuple(datasize))
2325 2326 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2326 2327 % tuple(fullsize))
2327 2328 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2328 2329 % tuple(semisize))
2329 2330 for depth in sorted(snapsizedepth):
2330 2331 if depth == 0:
2331 2332 continue
2332 2333 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2333 2334 % ((depth,) + tuple(snapsizedepth[depth])))
2334 2335 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2335 2336 % tuple(deltasize))
2336 2337
2337 2338 if numdeltas > 0:
2338 2339 ui.write('\n')
2339 2340 fmt = pcfmtstr(numdeltas)
2340 2341 fmt2 = pcfmtstr(numdeltas, 4)
2341 2342 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2342 2343 if numprev > 0:
2343 2344 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2344 2345 numprev))
2345 2346 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2346 2347 numprev))
2347 2348 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2348 2349 numprev))
2349 2350 if gdelta:
2350 2351 ui.write(('deltas against p1 : ')
2351 2352 + fmt % pcfmt(nump1, numdeltas))
2352 2353 ui.write(('deltas against p2 : ')
2353 2354 + fmt % pcfmt(nump2, numdeltas))
2354 2355 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2355 2356 numdeltas))
2356 2357
2357 2358 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2358 2359 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2359 2360 _('[-f FORMAT] -c|-m|FILE'),
2360 2361 optionalrepo=True)
2361 2362 def debugrevlogindex(ui, repo, file_=None, **opts):
2362 2363 """dump the contents of a revlog index"""
2363 2364 opts = pycompat.byteskwargs(opts)
2364 2365 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2365 2366 format = opts.get('format', 0)
2366 2367 if format not in (0, 1):
2367 2368 raise error.Abort(_("unknown format %d") % format)
2368 2369
2369 2370 if ui.debugflag:
2370 2371 shortfn = hex
2371 2372 else:
2372 2373 shortfn = short
2373 2374
2374 2375 # There might not be anything in r, so have a sane default
2375 2376 idlen = 12
2376 2377 for i in r:
2377 2378 idlen = len(shortfn(r.node(i)))
2378 2379 break
2379 2380
2380 2381 if format == 0:
2381 2382 if ui.verbose:
2382 2383 ui.write((" rev offset length linkrev"
2383 2384 " %s %s p2\n") % ("nodeid".ljust(idlen),
2384 2385 "p1".ljust(idlen)))
2385 2386 else:
2386 2387 ui.write((" rev linkrev %s %s p2\n") % (
2387 2388 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2388 2389 elif format == 1:
2389 2390 if ui.verbose:
2390 2391 ui.write((" rev flag offset length size link p1"
2391 2392 " p2 %s\n") % "nodeid".rjust(idlen))
2392 2393 else:
2393 2394 ui.write((" rev flag size link p1 p2 %s\n") %
2394 2395 "nodeid".rjust(idlen))
2395 2396
2396 2397 for i in r:
2397 2398 node = r.node(i)
2398 2399 if format == 0:
2399 2400 try:
2400 2401 pp = r.parents(node)
2401 2402 except Exception:
2402 2403 pp = [nullid, nullid]
2403 2404 if ui.verbose:
2404 2405 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2405 2406 i, r.start(i), r.length(i), r.linkrev(i),
2406 2407 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2407 2408 else:
2408 2409 ui.write("% 6d % 7d %s %s %s\n" % (
2409 2410 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2410 2411 shortfn(pp[1])))
2411 2412 elif format == 1:
2412 2413 pr = r.parentrevs(i)
2413 2414 if ui.verbose:
2414 2415 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2415 2416 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2416 2417 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2417 2418 else:
2418 2419 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2419 2420 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2420 2421 shortfn(node)))
2421 2422
2422 2423 @command('debugrevspec',
2423 2424 [('', 'optimize', None,
2424 2425 _('print parsed tree after optimizing (DEPRECATED)')),
2425 2426 ('', 'show-revs', True, _('print list of result revisions (default)')),
2426 2427 ('s', 'show-set', None, _('print internal representation of result set')),
2427 2428 ('p', 'show-stage', [],
2428 2429 _('print parsed tree at the given stage'), _('NAME')),
2429 2430 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2430 2431 ('', 'verify-optimized', False, _('verify optimized result')),
2431 2432 ],
2432 2433 ('REVSPEC'))
2433 2434 def debugrevspec(ui, repo, expr, **opts):
2434 2435 """parse and apply a revision specification
2435 2436
2436 2437 Use -p/--show-stage option to print the parsed tree at the given stages.
2437 2438 Use -p all to print tree at every stage.
2438 2439
2439 2440 Use --no-show-revs option with -s or -p to print only the set
2440 2441 representation or the parsed tree respectively.
2441 2442
2442 2443 Use --verify-optimized to compare the optimized result with the unoptimized
2443 2444 one. Returns 1 if the optimized result differs.
2444 2445 """
2445 2446 opts = pycompat.byteskwargs(opts)
2446 2447 aliases = ui.configitems('revsetalias')
2447 2448 stages = [
2448 2449 ('parsed', lambda tree: tree),
2449 2450 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2450 2451 ui.warn)),
2451 2452 ('concatenated', revsetlang.foldconcat),
2452 2453 ('analyzed', revsetlang.analyze),
2453 2454 ('optimized', revsetlang.optimize),
2454 2455 ]
2455 2456 if opts['no_optimized']:
2456 2457 stages = stages[:-1]
2457 2458 if opts['verify_optimized'] and opts['no_optimized']:
2458 2459 raise error.Abort(_('cannot use --verify-optimized with '
2459 2460 '--no-optimized'))
2460 2461 stagenames = set(n for n, f in stages)
2461 2462
2462 2463 showalways = set()
2463 2464 showchanged = set()
2464 2465 if ui.verbose and not opts['show_stage']:
2465 2466 # show parsed tree by --verbose (deprecated)
2466 2467 showalways.add('parsed')
2467 2468 showchanged.update(['expanded', 'concatenated'])
2468 2469 if opts['optimize']:
2469 2470 showalways.add('optimized')
2470 2471 if opts['show_stage'] and opts['optimize']:
2471 2472 raise error.Abort(_('cannot use --optimize with --show-stage'))
2472 2473 if opts['show_stage'] == ['all']:
2473 2474 showalways.update(stagenames)
2474 2475 else:
2475 2476 for n in opts['show_stage']:
2476 2477 if n not in stagenames:
2477 2478 raise error.Abort(_('invalid stage name: %s') % n)
2478 2479 showalways.update(opts['show_stage'])
2479 2480
2480 2481 treebystage = {}
2481 2482 printedtree = None
2482 2483 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2483 2484 for n, f in stages:
2484 2485 treebystage[n] = tree = f(tree)
2485 2486 if n in showalways or (n in showchanged and tree != printedtree):
2486 2487 if opts['show_stage'] or n != 'parsed':
2487 2488 ui.write(("* %s:\n") % n)
2488 2489 ui.write(revsetlang.prettyformat(tree), "\n")
2489 2490 printedtree = tree
2490 2491
2491 2492 if opts['verify_optimized']:
2492 2493 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2493 2494 brevs = revset.makematcher(treebystage['optimized'])(repo)
2494 2495 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2495 2496 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2496 2497 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2497 2498 arevs = list(arevs)
2498 2499 brevs = list(brevs)
2499 2500 if arevs == brevs:
2500 2501 return 0
2501 2502 ui.write(('--- analyzed\n'), label='diff.file_a')
2502 2503 ui.write(('+++ optimized\n'), label='diff.file_b')
2503 2504 sm = difflib.SequenceMatcher(None, arevs, brevs)
2504 2505 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2505 2506 if tag in (r'delete', r'replace'):
2506 2507 for c in arevs[alo:ahi]:
2507 2508 ui.write('-%d\n' % c, label='diff.deleted')
2508 2509 if tag in (r'insert', r'replace'):
2509 2510 for c in brevs[blo:bhi]:
2510 2511 ui.write('+%d\n' % c, label='diff.inserted')
2511 2512 if tag == r'equal':
2512 2513 for c in arevs[alo:ahi]:
2513 2514 ui.write(' %d\n' % c)
2514 2515 return 1
2515 2516
2516 2517 func = revset.makematcher(tree)
2517 2518 revs = func(repo)
2518 2519 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2519 2520 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2520 2521 if not opts['show_revs']:
2521 2522 return
2522 2523 for c in revs:
2523 2524 ui.write("%d\n" % c)
2524 2525
2525 2526 @command('debugserve', [
2526 2527 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2527 2528 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2528 2529 ('', 'logiofile', '', _('file to log server I/O to')),
2529 2530 ], '')
2530 2531 def debugserve(ui, repo, **opts):
2531 2532 """run a server with advanced settings
2532 2533
2533 2534 This command is similar to :hg:`serve`. It exists partially as a
2534 2535 workaround to the fact that ``hg serve --stdio`` must have specific
2535 2536 arguments for security reasons.
2536 2537 """
2537 2538 opts = pycompat.byteskwargs(opts)
2538 2539
2539 2540 if not opts['sshstdio']:
2540 2541 raise error.Abort(_('only --sshstdio is currently supported'))
2541 2542
2542 2543 logfh = None
2543 2544
2544 2545 if opts['logiofd'] and opts['logiofile']:
2545 2546 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2546 2547
2547 2548 if opts['logiofd']:
2548 2549 # Line buffered because output is line based.
2549 2550 try:
2550 2551 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2551 2552 except OSError as e:
2552 2553 if e.errno != errno.ESPIPE:
2553 2554 raise
2554 2555 # can't seek a pipe, so `ab` mode fails on py3
2555 2556 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2556 2557 elif opts['logiofile']:
2557 2558 logfh = open(opts['logiofile'], 'ab', 1)
2558 2559
2559 2560 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2560 2561 s.serve_forever()
2561 2562
2562 2563 @command('debugsetparents', [], _('REV1 [REV2]'))
2563 2564 def debugsetparents(ui, repo, rev1, rev2=None):
2564 2565 """manually set the parents of the current working directory
2565 2566
2566 2567 This is useful for writing repository conversion tools, but should
2567 2568 be used with care. For example, neither the working directory nor the
2568 2569 dirstate is updated, so file status may be incorrect after running this
2569 2570 command.
2570 2571
2571 2572 Returns 0 on success.
2572 2573 """
2573 2574
2574 2575 node1 = scmutil.revsingle(repo, rev1).node()
2575 2576 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2576 2577
2577 2578 with repo.wlock():
2578 2579 repo.setparents(node1, node2)
2579 2580
2580 2581 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2581 2582 def debugssl(ui, repo, source=None, **opts):
2582 2583 '''test a secure connection to a server
2583 2584
2584 2585 This builds the certificate chain for the server on Windows, installing the
2585 2586 missing intermediates and trusted root via Windows Update if necessary. It
2586 2587 does nothing on other platforms.
2587 2588
2588 2589 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2589 2590 that server is used. See :hg:`help urls` for more information.
2590 2591
2591 2592 If the update succeeds, retry the original operation. Otherwise, the cause
2592 2593 of the SSL error is likely another issue.
2593 2594 '''
2594 2595 if not pycompat.iswindows:
2595 2596 raise error.Abort(_('certificate chain building is only possible on '
2596 2597 'Windows'))
2597 2598
2598 2599 if not source:
2599 2600 if not repo:
2600 2601 raise error.Abort(_("there is no Mercurial repository here, and no "
2601 2602 "server specified"))
2602 2603 source = "default"
2603 2604
2604 2605 source, branches = hg.parseurl(ui.expandpath(source))
2605 2606 url = util.url(source)
2606 2607
2607 2608 defaultport = {'https': 443, 'ssh': 22}
2608 2609 if url.scheme in defaultport:
2609 2610 try:
2610 2611 addr = (url.host, int(url.port or defaultport[url.scheme]))
2611 2612 except ValueError:
2612 2613 raise error.Abort(_("malformed port number in URL"))
2613 2614 else:
2614 2615 raise error.Abort(_("only https and ssh connections are supported"))
2615 2616
2616 2617 from . import win32
2617 2618
2618 2619 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2619 2620 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2620 2621
2621 2622 try:
2622 2623 s.connect(addr)
2623 2624 cert = s.getpeercert(True)
2624 2625
2625 2626 ui.status(_('checking the certificate chain for %s\n') % url.host)
2626 2627
2627 2628 complete = win32.checkcertificatechain(cert, build=False)
2628 2629
2629 2630 if not complete:
2630 2631 ui.status(_('certificate chain is incomplete, updating... '))
2631 2632
2632 2633 if not win32.checkcertificatechain(cert):
2633 2634 ui.status(_('failed.\n'))
2634 2635 else:
2635 2636 ui.status(_('done.\n'))
2636 2637 else:
2637 2638 ui.status(_('full certificate chain is available\n'))
2638 2639 finally:
2639 2640 s.close()
2640 2641
2641 2642 @command('debugsub',
2642 2643 [('r', 'rev', '',
2643 2644 _('revision to check'), _('REV'))],
2644 2645 _('[-r REV] [REV]'))
2645 2646 def debugsub(ui, repo, rev=None):
2646 2647 ctx = scmutil.revsingle(repo, rev, None)
2647 2648 for k, v in sorted(ctx.substate.items()):
2648 2649 ui.write(('path %s\n') % k)
2649 2650 ui.write((' source %s\n') % v[0])
2650 2651 ui.write((' revision %s\n') % v[1])
2651 2652
2652 2653 @command('debugsuccessorssets',
2653 2654 [('', 'closest', False, _('return closest successors sets only'))],
2654 2655 _('[REV]'))
2655 2656 def debugsuccessorssets(ui, repo, *revs, **opts):
2656 2657 """show set of successors for revision
2657 2658
2658 2659 A successors set of changeset A is a consistent group of revisions that
2659 2660 succeed A. It contains non-obsolete changesets only unless closests
2660 2661 successors set is set.
2661 2662
2662 2663 In most cases a changeset A has a single successors set containing a single
2663 2664 successor (changeset A replaced by A').
2664 2665
2665 2666 A changeset that is made obsolete with no successors are called "pruned".
2666 2667 Such changesets have no successors sets at all.
2667 2668
2668 2669 A changeset that has been "split" will have a successors set containing
2669 2670 more than one successor.
2670 2671
2671 2672 A changeset that has been rewritten in multiple different ways is called
2672 2673 "divergent". Such changesets have multiple successor sets (each of which
2673 2674 may also be split, i.e. have multiple successors).
2674 2675
2675 2676 Results are displayed as follows::
2676 2677
2677 2678 <rev1>
2678 2679 <successors-1A>
2679 2680 <rev2>
2680 2681 <successors-2A>
2681 2682 <successors-2B1> <successors-2B2> <successors-2B3>
2682 2683
2683 2684 Here rev2 has two possible (i.e. divergent) successors sets. The first
2684 2685 holds one element, whereas the second holds three (i.e. the changeset has
2685 2686 been split).
2686 2687 """
2687 2688 # passed to successorssets caching computation from one call to another
2688 2689 cache = {}
2689 2690 ctx2str = bytes
2690 2691 node2str = short
2691 2692 for rev in scmutil.revrange(repo, revs):
2692 2693 ctx = repo[rev]
2693 2694 ui.write('%s\n'% ctx2str(ctx))
2694 2695 for succsset in obsutil.successorssets(repo, ctx.node(),
2695 2696 closest=opts[r'closest'],
2696 2697 cache=cache):
2697 2698 if succsset:
2698 2699 ui.write(' ')
2699 2700 ui.write(node2str(succsset[0]))
2700 2701 for node in succsset[1:]:
2701 2702 ui.write(' ')
2702 2703 ui.write(node2str(node))
2703 2704 ui.write('\n')
2704 2705
2705 2706 @command('debugtemplate',
2706 2707 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2707 2708 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2708 2709 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2709 2710 optionalrepo=True)
2710 2711 def debugtemplate(ui, repo, tmpl, **opts):
2711 2712 """parse and apply a template
2712 2713
2713 2714 If -r/--rev is given, the template is processed as a log template and
2714 2715 applied to the given changesets. Otherwise, it is processed as a generic
2715 2716 template.
2716 2717
2717 2718 Use --verbose to print the parsed tree.
2718 2719 """
2719 2720 revs = None
2720 2721 if opts[r'rev']:
2721 2722 if repo is None:
2722 2723 raise error.RepoError(_('there is no Mercurial repository here '
2723 2724 '(.hg not found)'))
2724 2725 revs = scmutil.revrange(repo, opts[r'rev'])
2725 2726
2726 2727 props = {}
2727 2728 for d in opts[r'define']:
2728 2729 try:
2729 2730 k, v = (e.strip() for e in d.split('=', 1))
2730 2731 if not k or k == 'ui':
2731 2732 raise ValueError
2732 2733 props[k] = v
2733 2734 except ValueError:
2734 2735 raise error.Abort(_('malformed keyword definition: %s') % d)
2735 2736
2736 2737 if ui.verbose:
2737 2738 aliases = ui.configitems('templatealias')
2738 2739 tree = templater.parse(tmpl)
2739 2740 ui.note(templater.prettyformat(tree), '\n')
2740 2741 newtree = templater.expandaliases(tree, aliases)
2741 2742 if newtree != tree:
2742 2743 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2743 2744
2744 2745 if revs is None:
2745 2746 tres = formatter.templateresources(ui, repo)
2746 2747 t = formatter.maketemplater(ui, tmpl, resources=tres)
2747 2748 if ui.verbose:
2748 2749 kwds, funcs = t.symbolsuseddefault()
2749 2750 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2750 2751 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2751 2752 ui.write(t.renderdefault(props))
2752 2753 else:
2753 2754 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2754 2755 if ui.verbose:
2755 2756 kwds, funcs = displayer.t.symbolsuseddefault()
2756 2757 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2757 2758 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2758 2759 for r in revs:
2759 2760 displayer.show(repo[r], **pycompat.strkwargs(props))
2760 2761 displayer.close()
2761 2762
2762 2763 @command('debuguigetpass', [
2763 2764 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2764 2765 ], _('[-p TEXT]'), norepo=True)
2765 2766 def debuguigetpass(ui, prompt=''):
2766 2767 """show prompt to type password"""
2767 2768 r = ui.getpass(prompt)
2768 2769 ui.write(('respose: %s\n') % r)
2769 2770
2770 2771 @command('debuguiprompt', [
2771 2772 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2772 2773 ], _('[-p TEXT]'), norepo=True)
2773 2774 def debuguiprompt(ui, prompt=''):
2774 2775 """show plain prompt"""
2775 2776 r = ui.prompt(prompt)
2776 2777 ui.write(('response: %s\n') % r)
2777 2778
2778 2779 @command('debugupdatecaches', [])
2779 2780 def debugupdatecaches(ui, repo, *pats, **opts):
2780 2781 """warm all known caches in the repository"""
2781 2782 with repo.wlock(), repo.lock():
2782 2783 repo.updatecaches(full=True)
2783 2784
2784 2785 @command('debugupgraderepo', [
2785 2786 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2786 2787 ('', 'run', False, _('performs an upgrade')),
2787 2788 ('', 'backup', True, _('keep the old repository content around')),
2788 2789 ])
2789 2790 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2790 2791 """upgrade a repository to use different features
2791 2792
2792 2793 If no arguments are specified, the repository is evaluated for upgrade
2793 2794 and a list of problems and potential optimizations is printed.
2794 2795
2795 2796 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2796 2797 can be influenced via additional arguments. More details will be provided
2797 2798 by the command output when run without ``--run``.
2798 2799
2799 2800 During the upgrade, the repository will be locked and no writes will be
2800 2801 allowed.
2801 2802
2802 2803 At the end of the upgrade, the repository may not be readable while new
2803 2804 repository data is swapped in. This window will be as long as it takes to
2804 2805 rename some directories inside the ``.hg`` directory. On most machines, this
2805 2806 should complete almost instantaneously and the chances of a consumer being
2806 2807 unable to access the repository should be low.
2807 2808 """
2808 2809 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2809 2810 backup=backup)
2810 2811
2811 2812 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2812 2813 inferrepo=True)
2813 2814 def debugwalk(ui, repo, *pats, **opts):
2814 2815 """show how files match on given patterns"""
2815 2816 opts = pycompat.byteskwargs(opts)
2816 2817 m = scmutil.match(repo[None], pats, opts)
2817 2818 if ui.verbose:
2818 2819 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2819 2820 items = list(repo[None].walk(m))
2820 2821 if not items:
2821 2822 return
2822 2823 f = lambda fn: fn
2823 2824 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2824 2825 f = lambda fn: util.normpath(fn)
2825 2826 fmt = 'f %%-%ds %%-%ds %%s' % (
2826 2827 max([len(abs) for abs in items]),
2827 2828 max([len(repo.pathto(abs)) for abs in items]))
2828 2829 for abs in items:
2829 2830 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2830 2831 ui.write("%s\n" % line.rstrip())
2831 2832
2832 2833 @command('debugwhyunstable', [], _('REV'))
2833 2834 def debugwhyunstable(ui, repo, rev):
2834 2835 """explain instabilities of a changeset"""
2835 2836 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2836 2837 dnodes = ''
2837 2838 if entry.get('divergentnodes'):
2838 2839 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2839 2840 for ctx in entry['divergentnodes']) + ' '
2840 2841 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2841 2842 entry['reason'], entry['node']))
2842 2843
2843 2844 @command('debugwireargs',
2844 2845 [('', 'three', '', 'three'),
2845 2846 ('', 'four', '', 'four'),
2846 2847 ('', 'five', '', 'five'),
2847 2848 ] + cmdutil.remoteopts,
2848 2849 _('REPO [OPTIONS]... [ONE [TWO]]'),
2849 2850 norepo=True)
2850 2851 def debugwireargs(ui, repopath, *vals, **opts):
2851 2852 opts = pycompat.byteskwargs(opts)
2852 2853 repo = hg.peer(ui, opts, repopath)
2853 2854 for opt in cmdutil.remoteopts:
2854 2855 del opts[opt[1]]
2855 2856 args = {}
2856 2857 for k, v in opts.iteritems():
2857 2858 if v:
2858 2859 args[k] = v
2859 2860 args = pycompat.strkwargs(args)
2860 2861 # run twice to check that we don't mess up the stream for the next command
2861 2862 res1 = repo.debugwireargs(*vals, **args)
2862 2863 res2 = repo.debugwireargs(*vals, **args)
2863 2864 ui.write("%s\n" % res1)
2864 2865 if res1 != res2:
2865 2866 ui.warn("%s\n" % res2)
2866 2867
2867 2868 def _parsewirelangblocks(fh):
2868 2869 activeaction = None
2869 2870 blocklines = []
2870 2871 lastindent = 0
2871 2872
2872 2873 for line in fh:
2873 2874 line = line.rstrip()
2874 2875 if not line:
2875 2876 continue
2876 2877
2877 2878 if line.startswith(b'#'):
2878 2879 continue
2879 2880
2880 2881 if not line.startswith(b' '):
2881 2882 # New block. Flush previous one.
2882 2883 if activeaction:
2883 2884 yield activeaction, blocklines
2884 2885
2885 2886 activeaction = line
2886 2887 blocklines = []
2887 2888 lastindent = 0
2888 2889 continue
2889 2890
2890 2891 # Else we start with an indent.
2891 2892
2892 2893 if not activeaction:
2893 2894 raise error.Abort(_('indented line outside of block'))
2894 2895
2895 2896 indent = len(line) - len(line.lstrip())
2896 2897
2897 2898 # If this line is indented more than the last line, concatenate it.
2898 2899 if indent > lastindent and blocklines:
2899 2900 blocklines[-1] += line.lstrip()
2900 2901 else:
2901 2902 blocklines.append(line)
2902 2903 lastindent = indent
2903 2904
2904 2905 # Flush last block.
2905 2906 if activeaction:
2906 2907 yield activeaction, blocklines
2907 2908
2908 2909 @command('debugwireproto',
2909 2910 [
2910 2911 ('', 'localssh', False, _('start an SSH server for this repo')),
2911 2912 ('', 'peer', '', _('construct a specific version of the peer')),
2912 2913 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2913 2914 ('', 'nologhandshake', False,
2914 2915 _('do not log I/O related to the peer handshake')),
2915 2916 ] + cmdutil.remoteopts,
2916 2917 _('[PATH]'),
2917 2918 optionalrepo=True)
2918 2919 def debugwireproto(ui, repo, path=None, **opts):
2919 2920 """send wire protocol commands to a server
2920 2921
2921 2922 This command can be used to issue wire protocol commands to remote
2922 2923 peers and to debug the raw data being exchanged.
2923 2924
2924 2925 ``--localssh`` will start an SSH server against the current repository
2925 2926 and connect to that. By default, the connection will perform a handshake
2926 2927 and establish an appropriate peer instance.
2927 2928
2928 2929 ``--peer`` can be used to bypass the handshake protocol and construct a
2929 2930 peer instance using the specified class type. Valid values are ``raw``,
2930 2931 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2931 2932 raw data payloads and don't support higher-level command actions.
2932 2933
2933 2934 ``--noreadstderr`` can be used to disable automatic reading from stderr
2934 2935 of the peer (for SSH connections only). Disabling automatic reading of
2935 2936 stderr is useful for making output more deterministic.
2936 2937
2937 2938 Commands are issued via a mini language which is specified via stdin.
2938 2939 The language consists of individual actions to perform. An action is
2939 2940 defined by a block. A block is defined as a line with no leading
2940 2941 space followed by 0 or more lines with leading space. Blocks are
2941 2942 effectively a high-level command with additional metadata.
2942 2943
2943 2944 Lines beginning with ``#`` are ignored.
2944 2945
2945 2946 The following sections denote available actions.
2946 2947
2947 2948 raw
2948 2949 ---
2949 2950
2950 2951 Send raw data to the server.
2951 2952
2952 2953 The block payload contains the raw data to send as one atomic send
2953 2954 operation. The data may not actually be delivered in a single system
2954 2955 call: it depends on the abilities of the transport being used.
2955 2956
2956 2957 Each line in the block is de-indented and concatenated. Then, that
2957 2958 value is evaluated as a Python b'' literal. This allows the use of
2958 2959 backslash escaping, etc.
2959 2960
2960 2961 raw+
2961 2962 ----
2962 2963
2963 2964 Behaves like ``raw`` except flushes output afterwards.
2964 2965
2965 2966 command <X>
2966 2967 -----------
2967 2968
2968 2969 Send a request to run a named command, whose name follows the ``command``
2969 2970 string.
2970 2971
2971 2972 Arguments to the command are defined as lines in this block. The format of
2972 2973 each line is ``<key> <value>``. e.g.::
2973 2974
2974 2975 command listkeys
2975 2976 namespace bookmarks
2976 2977
2977 2978 If the value begins with ``eval:``, it will be interpreted as a Python
2978 2979 literal expression. Otherwise values are interpreted as Python b'' literals.
2979 2980 This allows sending complex types and encoding special byte sequences via
2980 2981 backslash escaping.
2981 2982
2982 2983 The following arguments have special meaning:
2983 2984
2984 2985 ``PUSHFILE``
2985 2986 When defined, the *push* mechanism of the peer will be used instead
2986 2987 of the static request-response mechanism and the content of the
2987 2988 file specified in the value of this argument will be sent as the
2988 2989 command payload.
2989 2990
2990 2991 This can be used to submit a local bundle file to the remote.
2991 2992
2992 2993 batchbegin
2993 2994 ----------
2994 2995
2995 2996 Instruct the peer to begin a batched send.
2996 2997
2997 2998 All ``command`` blocks are queued for execution until the next
2998 2999 ``batchsubmit`` block.
2999 3000
3000 3001 batchsubmit
3001 3002 -----------
3002 3003
3003 3004 Submit previously queued ``command`` blocks as a batch request.
3004 3005
3005 3006 This action MUST be paired with a ``batchbegin`` action.
3006 3007
3007 3008 httprequest <method> <path>
3008 3009 ---------------------------
3009 3010
3010 3011 (HTTP peer only)
3011 3012
3012 3013 Send an HTTP request to the peer.
3013 3014
3014 3015 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3015 3016
3016 3017 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3017 3018 headers to add to the request. e.g. ``Accept: foo``.
3018 3019
3019 3020 The following arguments are special:
3020 3021
3021 3022 ``BODYFILE``
3022 3023 The content of the file defined as the value to this argument will be
3023 3024 transferred verbatim as the HTTP request body.
3024 3025
3025 3026 ``frame <type> <flags> <payload>``
3026 3027 Send a unified protocol frame as part of the request body.
3027 3028
3028 3029 All frames will be collected and sent as the body to the HTTP
3029 3030 request.
3030 3031
3031 3032 close
3032 3033 -----
3033 3034
3034 3035 Close the connection to the server.
3035 3036
3036 3037 flush
3037 3038 -----
3038 3039
3039 3040 Flush data written to the server.
3040 3041
3041 3042 readavailable
3042 3043 -------------
3043 3044
3044 3045 Close the write end of the connection and read all available data from
3045 3046 the server.
3046 3047
3047 3048 If the connection to the server encompasses multiple pipes, we poll both
3048 3049 pipes and read available data.
3049 3050
3050 3051 readline
3051 3052 --------
3052 3053
3053 3054 Read a line of output from the server. If there are multiple output
3054 3055 pipes, reads only the main pipe.
3055 3056
3056 3057 ereadline
3057 3058 ---------
3058 3059
3059 3060 Like ``readline``, but read from the stderr pipe, if available.
3060 3061
3061 3062 read <X>
3062 3063 --------
3063 3064
3064 3065 ``read()`` N bytes from the server's main output pipe.
3065 3066
3066 3067 eread <X>
3067 3068 ---------
3068 3069
3069 3070 ``read()`` N bytes from the server's stderr pipe, if available.
3070 3071
3071 3072 Specifying Unified Frame-Based Protocol Frames
3072 3073 ----------------------------------------------
3073 3074
3074 3075 It is possible to emit a *Unified Frame-Based Protocol* by using special
3075 3076 syntax.
3076 3077
3077 3078 A frame is composed as a type, flags, and payload. These can be parsed
3078 3079 from a string of the form:
3079 3080
3080 3081 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3081 3082
3082 3083 ``request-id`` and ``stream-id`` are integers defining the request and
3083 3084 stream identifiers.
3084 3085
3085 3086 ``type`` can be an integer value for the frame type or the string name
3086 3087 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3087 3088 ``command-name``.
3088 3089
3089 3090 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3090 3091 components. Each component (and there can be just one) can be an integer
3091 3092 or a flag name for stream flags or frame flags, respectively. Values are
3092 3093 resolved to integers and then bitwise OR'd together.
3093 3094
3094 3095 ``payload`` represents the raw frame payload. If it begins with
3095 3096 ``cbor:``, the following string is evaluated as Python code and the
3096 3097 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3097 3098 as a Python byte string literal.
3098 3099 """
3099 3100 opts = pycompat.byteskwargs(opts)
3100 3101
3101 3102 if opts['localssh'] and not repo:
3102 3103 raise error.Abort(_('--localssh requires a repository'))
3103 3104
3104 3105 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3105 3106 raise error.Abort(_('invalid value for --peer'),
3106 3107 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3107 3108
3108 3109 if path and opts['localssh']:
3109 3110 raise error.Abort(_('cannot specify --localssh with an explicit '
3110 3111 'path'))
3111 3112
3112 3113 if ui.interactive():
3113 3114 ui.write(_('(waiting for commands on stdin)\n'))
3114 3115
3115 3116 blocks = list(_parsewirelangblocks(ui.fin))
3116 3117
3117 3118 proc = None
3118 3119 stdin = None
3119 3120 stdout = None
3120 3121 stderr = None
3121 3122 opener = None
3122 3123
3123 3124 if opts['localssh']:
3124 3125 # We start the SSH server in its own process so there is process
3125 3126 # separation. This prevents a whole class of potential bugs around
3126 3127 # shared state from interfering with server operation.
3127 3128 args = procutil.hgcmd() + [
3128 3129 '-R', repo.root,
3129 3130 'debugserve', '--sshstdio',
3130 3131 ]
3131 3132 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3132 3133 stdin=subprocess.PIPE,
3133 3134 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3134 3135 bufsize=0)
3135 3136
3136 3137 stdin = proc.stdin
3137 3138 stdout = proc.stdout
3138 3139 stderr = proc.stderr
3139 3140
3140 3141 # We turn the pipes into observers so we can log I/O.
3141 3142 if ui.verbose or opts['peer'] == 'raw':
3142 3143 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3143 3144 logdata=True)
3144 3145 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3145 3146 logdata=True)
3146 3147 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3147 3148 logdata=True)
3148 3149
3149 3150 # --localssh also implies the peer connection settings.
3150 3151
3151 3152 url = 'ssh://localserver'
3152 3153 autoreadstderr = not opts['noreadstderr']
3153 3154
3154 3155 if opts['peer'] == 'ssh1':
3155 3156 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3156 3157 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3157 3158 None, autoreadstderr=autoreadstderr)
3158 3159 elif opts['peer'] == 'ssh2':
3159 3160 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3160 3161 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3161 3162 None, autoreadstderr=autoreadstderr)
3162 3163 elif opts['peer'] == 'raw':
3163 3164 ui.write(_('using raw connection to peer\n'))
3164 3165 peer = None
3165 3166 else:
3166 3167 ui.write(_('creating ssh peer from handshake results\n'))
3167 3168 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3168 3169 autoreadstderr=autoreadstderr)
3169 3170
3170 3171 elif path:
3171 3172 # We bypass hg.peer() so we can proxy the sockets.
3172 3173 # TODO consider not doing this because we skip
3173 3174 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3174 3175 u = util.url(path)
3175 3176 if u.scheme != 'http':
3176 3177 raise error.Abort(_('only http:// paths are currently supported'))
3177 3178
3178 3179 url, authinfo = u.authinfo()
3179 3180 openerargs = {
3180 3181 r'useragent': b'Mercurial debugwireproto',
3181 3182 }
3182 3183
3183 3184 # Turn pipes/sockets into observers so we can log I/O.
3184 3185 if ui.verbose:
3185 3186 openerargs.update({
3186 3187 r'loggingfh': ui,
3187 3188 r'loggingname': b's',
3188 3189 r'loggingopts': {
3189 3190 r'logdata': True,
3190 3191 r'logdataapis': False,
3191 3192 },
3192 3193 })
3193 3194
3194 3195 if ui.debugflag:
3195 3196 openerargs[r'loggingopts'][r'logdataapis'] = True
3196 3197
3197 3198 # Don't send default headers when in raw mode. This allows us to
3198 3199 # bypass most of the behavior of our URL handling code so we can
3199 3200 # have near complete control over what's sent on the wire.
3200 3201 if opts['peer'] == 'raw':
3201 3202 openerargs[r'sendaccept'] = False
3202 3203
3203 3204 opener = urlmod.opener(ui, authinfo, **openerargs)
3204 3205
3205 3206 if opts['peer'] == 'http2':
3206 3207 ui.write(_('creating http peer for wire protocol version 2\n'))
3207 3208 # We go through makepeer() because we need an API descriptor for
3208 3209 # the peer instance to be useful.
3209 3210 with ui.configoverride({
3210 3211 ('experimental', 'httppeer.advertise-v2'): True}):
3211 3212 if opts['nologhandshake']:
3212 3213 ui.pushbuffer()
3213 3214
3214 3215 peer = httppeer.makepeer(ui, path, opener=opener)
3215 3216
3216 3217 if opts['nologhandshake']:
3217 3218 ui.popbuffer()
3218 3219
3219 3220 if not isinstance(peer, httppeer.httpv2peer):
3220 3221 raise error.Abort(_('could not instantiate HTTP peer for '
3221 3222 'wire protocol version 2'),
3222 3223 hint=_('the server may not have the feature '
3223 3224 'enabled or is not allowing this '
3224 3225 'client version'))
3225 3226
3226 3227 elif opts['peer'] == 'raw':
3227 3228 ui.write(_('using raw connection to peer\n'))
3228 3229 peer = None
3229 3230 elif opts['peer']:
3230 3231 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3231 3232 opts['peer'])
3232 3233 else:
3233 3234 peer = httppeer.makepeer(ui, path, opener=opener)
3234 3235
3235 3236 # We /could/ populate stdin/stdout with sock.makefile()...
3236 3237 else:
3237 3238 raise error.Abort(_('unsupported connection configuration'))
3238 3239
3239 3240 batchedcommands = None
3240 3241
3241 3242 # Now perform actions based on the parsed wire language instructions.
3242 3243 for action, lines in blocks:
3243 3244 if action in ('raw', 'raw+'):
3244 3245 if not stdin:
3245 3246 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3246 3247
3247 3248 # Concatenate the data together.
3248 3249 data = ''.join(l.lstrip() for l in lines)
3249 3250 data = stringutil.unescapestr(data)
3250 3251 stdin.write(data)
3251 3252
3252 3253 if action == 'raw+':
3253 3254 stdin.flush()
3254 3255 elif action == 'flush':
3255 3256 if not stdin:
3256 3257 raise error.Abort(_('cannot call flush on this peer'))
3257 3258 stdin.flush()
3258 3259 elif action.startswith('command'):
3259 3260 if not peer:
3260 3261 raise error.Abort(_('cannot send commands unless peer instance '
3261 3262 'is available'))
3262 3263
3263 3264 command = action.split(' ', 1)[1]
3264 3265
3265 3266 args = {}
3266 3267 for line in lines:
3267 3268 # We need to allow empty values.
3268 3269 fields = line.lstrip().split(' ', 1)
3269 3270 if len(fields) == 1:
3270 3271 key = fields[0]
3271 3272 value = ''
3272 3273 else:
3273 3274 key, value = fields
3274 3275
3275 3276 if value.startswith('eval:'):
3276 3277 value = stringutil.evalpythonliteral(value[5:])
3277 3278 else:
3278 3279 value = stringutil.unescapestr(value)
3279 3280
3280 3281 args[key] = value
3281 3282
3282 3283 if batchedcommands is not None:
3283 3284 batchedcommands.append((command, args))
3284 3285 continue
3285 3286
3286 3287 ui.status(_('sending %s command\n') % command)
3287 3288
3288 3289 if 'PUSHFILE' in args:
3289 3290 with open(args['PUSHFILE'], r'rb') as fh:
3290 3291 del args['PUSHFILE']
3291 3292 res, output = peer._callpush(command, fh,
3292 3293 **pycompat.strkwargs(args))
3293 3294 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3294 3295 ui.status(_('remote output: %s\n') %
3295 3296 stringutil.escapestr(output))
3296 3297 else:
3297 3298 with peer.commandexecutor() as e:
3298 3299 res = e.callcommand(command, args).result()
3299 3300
3300 3301 if isinstance(res, wireprotov2peer.commandresponse):
3301 3302 val = res.objects()
3302 3303 ui.status(_('response: %s\n') %
3303 3304 stringutil.pprint(val, bprefix=True, indent=2))
3304 3305 else:
3305 3306 ui.status(_('response: %s\n') %
3306 3307 stringutil.pprint(res, bprefix=True, indent=2))
3307 3308
3308 3309 elif action == 'batchbegin':
3309 3310 if batchedcommands is not None:
3310 3311 raise error.Abort(_('nested batchbegin not allowed'))
3311 3312
3312 3313 batchedcommands = []
3313 3314 elif action == 'batchsubmit':
3314 3315 # There is a batching API we could go through. But it would be
3315 3316 # difficult to normalize requests into function calls. It is easier
3316 3317 # to bypass this layer and normalize to commands + args.
3317 3318 ui.status(_('sending batch with %d sub-commands\n') %
3318 3319 len(batchedcommands))
3319 3320 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3320 3321 ui.status(_('response #%d: %s\n') %
3321 3322 (i, stringutil.escapestr(chunk)))
3322 3323
3323 3324 batchedcommands = None
3324 3325
3325 3326 elif action.startswith('httprequest '):
3326 3327 if not opener:
3327 3328 raise error.Abort(_('cannot use httprequest without an HTTP '
3328 3329 'peer'))
3329 3330
3330 3331 request = action.split(' ', 2)
3331 3332 if len(request) != 3:
3332 3333 raise error.Abort(_('invalid httprequest: expected format is '
3333 3334 '"httprequest <method> <path>'))
3334 3335
3335 3336 method, httppath = request[1:]
3336 3337 headers = {}
3337 3338 body = None
3338 3339 frames = []
3339 3340 for line in lines:
3340 3341 line = line.lstrip()
3341 3342 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3342 3343 if m:
3343 3344 # Headers need to use native strings.
3344 3345 key = pycompat.strurl(m.group(1))
3345 3346 value = pycompat.strurl(m.group(2))
3346 3347 headers[key] = value
3347 3348 continue
3348 3349
3349 3350 if line.startswith(b'BODYFILE '):
3350 3351 with open(line.split(b' ', 1), 'rb') as fh:
3351 3352 body = fh.read()
3352 3353 elif line.startswith(b'frame '):
3353 3354 frame = wireprotoframing.makeframefromhumanstring(
3354 3355 line[len(b'frame '):])
3355 3356
3356 3357 frames.append(frame)
3357 3358 else:
3358 3359 raise error.Abort(_('unknown argument to httprequest: %s') %
3359 3360 line)
3360 3361
3361 3362 url = path + httppath
3362 3363
3363 3364 if frames:
3364 3365 body = b''.join(bytes(f) for f in frames)
3365 3366
3366 3367 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3367 3368
3368 3369 # urllib.Request insists on using has_data() as a proxy for
3369 3370 # determining the request method. Override that to use our
3370 3371 # explicitly requested method.
3371 3372 req.get_method = lambda: pycompat.sysstr(method)
3372 3373
3373 3374 try:
3374 3375 res = opener.open(req)
3375 3376 body = res.read()
3376 3377 except util.urlerr.urlerror as e:
3377 3378 # read() method must be called, but only exists in Python 2
3378 3379 getattr(e, 'read', lambda: None)()
3379 3380 continue
3380 3381
3381 3382 ct = res.headers.get(r'Content-Type')
3382 3383 if ct == r'application/mercurial-cbor':
3383 3384 ui.write(_('cbor> %s\n') %
3384 3385 stringutil.pprint(cborutil.decodeall(body),
3385 3386 bprefix=True,
3386 3387 indent=2))
3387 3388
3388 3389 elif action == 'close':
3389 3390 peer.close()
3390 3391 elif action == 'readavailable':
3391 3392 if not stdout or not stderr:
3392 3393 raise error.Abort(_('readavailable not available on this peer'))
3393 3394
3394 3395 stdin.close()
3395 3396 stdout.read()
3396 3397 stderr.read()
3397 3398
3398 3399 elif action == 'readline':
3399 3400 if not stdout:
3400 3401 raise error.Abort(_('readline not available on this peer'))
3401 3402 stdout.readline()
3402 3403 elif action == 'ereadline':
3403 3404 if not stderr:
3404 3405 raise error.Abort(_('ereadline not available on this peer'))
3405 3406 stderr.readline()
3406 3407 elif action.startswith('read '):
3407 3408 count = int(action.split(' ', 1)[1])
3408 3409 if not stdout:
3409 3410 raise error.Abort(_('read not available on this peer'))
3410 3411 stdout.read(count)
3411 3412 elif action.startswith('eread '):
3412 3413 count = int(action.split(' ', 1)[1])
3413 3414 if not stderr:
3414 3415 raise error.Abort(_('eread not available on this peer'))
3415 3416 stderr.read(count)
3416 3417 else:
3417 3418 raise error.Abort(_('unknown action: %s') % action)
3418 3419
3419 3420 if batchedcommands is not None:
3420 3421 raise error.Abort(_('unclosed "batchbegin" request'))
3421 3422
3422 3423 if peer:
3423 3424 peer.close()
3424 3425
3425 3426 if proc:
3426 3427 proc.kill()
@@ -1,144 +1,150
1 1 Source bundle was generated with the following script:
2 2
3 3 # hg init
4 4 # echo a > a
5 5 # ln -s a l
6 6 # hg ci -Ama -d'0 0'
7 7 # mkdir b
8 8 # echo a > b/a
9 9 # chmod +x b/a
10 10 # hg ci -Amb -d'1 0'
11 11
12 12 $ hg init
13 13 $ hg unbundle "$TESTDIR/bundles/test-manifest.hg"
14 14 adding changesets
15 15 adding manifests
16 16 adding file changes
17 17 added 2 changesets with 3 changes to 3 files
18 18 new changesets b73562a03cfe:5bdc995175ba (2 drafts)
19 19 (run 'hg update' to get a working copy)
20 20
21 21 The next call is expected to return nothing:
22 22
23 23 $ hg manifest
24 24
25 25 $ hg co
26 26 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
27 27
28 28 $ hg manifest
29 29 a
30 30 b/a
31 31 l
32 32
33 33 $ hg files -vr .
34 34 2 a
35 35 2 x b/a
36 36 1 l l
37 37 $ hg files -r . -X b
38 38 a
39 39 l
40 40 $ hg files -T '{path} {size} {flags}\n'
41 41 a 2
42 42 b/a 2 x
43 43 l 1 l
44 44 $ hg files -T '{path} {node|shortest}\n' -r.
45 45 a 5bdc
46 46 b/a 5bdc
47 47 l 5bdc
48 48
49 49 $ hg manifest -v
50 50 644 a
51 51 755 * b/a
52 52 644 @ l
53 53 $ hg manifest -T '{path} {rev}\n'
54 54 a 1
55 55 b/a 1
56 56 l 1
57 57
58 58 $ hg manifest --debug
59 59 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 a
60 60 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 755 * b/a
61 61 047b75c6d7a3ef6a2243bd0e99f94f6ea6683597 644 @ l
62 62
63 63 $ hg manifest -r 0
64 64 a
65 65 l
66 66
67 67 $ hg manifest -r 1
68 68 a
69 69 b/a
70 70 l
71 71
72 72 $ hg manifest -r tip
73 73 a
74 74 b/a
75 75 l
76 76
77 77 $ hg manifest tip
78 78 a
79 79 b/a
80 80 l
81 81
82 82 $ hg manifest --all
83 83 a
84 84 b/a
85 85 l
86 86
87 87 The next two calls are expected to abort:
88 88
89 89 $ hg manifest -r 2
90 90 abort: unknown revision '2'!
91 91 [255]
92 92
93 93 $ hg manifest -r tip tip
94 94 abort: please specify just one revision
95 95 [255]
96 96
97 97 Testing the manifest full text cache utility
98 98 --------------------------------------------
99 99
100 100 Reminder of the manifest log content
101 101
102 102 $ hg log --debug | grep 'manifest:'
103 103 manifest: 1:1e01206b1d2f72bd55f2a33fa8ccad74144825b7
104 104 manifest: 0:fce2a30dedad1eef4da95ca1dc0004157aa527cf
105 105
106 106 Showing the content of the caches after the above operations
107 107
108 108 $ hg debugmanifestfulltextcache
109 109 cache empty
110 110
111 111 Adding a new persistent entry in the cache
112 112
113 113 $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7
114 114
115 115 $ hg debugmanifestfulltextcache
116 116 cache contains 1 manifest entries, in order of most to least recent:
117 117 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
118 118 total cache data size 157 bytes, on-disk 157 bytes
119 119
120 120 Check we don't duplicated entry (added from the debug command)
121 121
122 122 $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7
123 123 $ hg debugmanifestfulltextcache
124 124 cache contains 1 manifest entries, in order of most to least recent:
125 125 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
126 126 total cache data size 157 bytes, on-disk 157 bytes
127 127
128 128 Adding a second entry
129 129
130 130 $ hg debugmanifestfulltextcache --add fce2a30dedad1eef4da95ca1dc0004157aa527cf
131 131 $ hg debugmanifestfulltextcache
132 132 cache contains 2 manifest entries, in order of most to least recent:
133 133 id: fce2a30dedad1eef4da95ca1dc0004157aa527cf, size 87 bytes
134 134 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
135 135 total cache data size 268 bytes, on-disk 268 bytes
136 136
137 137 Accessing the initial entry again, refresh their order
138 138
139 139 $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7
140 140 $ hg debugmanifestfulltextcache
141 141 cache contains 2 manifest entries, in order of most to least recent:
142 142 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
143 143 id: fce2a30dedad1eef4da95ca1dc0004157aa527cf, size 87 bytes
144 144 total cache data size 268 bytes, on-disk 268 bytes
145
146 Check cache clearing
147
148 $ hg debugmanifestfulltextcache --clear
149 $ hg debugmanifestfulltextcache
150 cache empty
General Comments 0
You need to be logged in to leave comments. Login now