##// END OF EJS Templates
manifestcache: only lock the repository if the debug command touch the cache...
marmoute -
r42108:fbee66c9 default
parent child Browse files
Show More
@@ -1,3420 +1,3425
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from . import (
36 36 bundle2,
37 37 changegroup,
38 38 cmdutil,
39 39 color,
40 40 context,
41 41 copies,
42 42 dagparser,
43 43 encoding,
44 44 error,
45 45 exchange,
46 46 extensions,
47 47 filemerge,
48 48 filesetlang,
49 49 formatter,
50 50 hg,
51 51 httppeer,
52 52 localrepo,
53 53 lock as lockmod,
54 54 logcmdutil,
55 55 merge as mergemod,
56 56 obsolete,
57 57 obsutil,
58 58 phases,
59 59 policy,
60 60 pvec,
61 61 pycompat,
62 62 registrar,
63 63 repair,
64 64 revlog,
65 65 revset,
66 66 revsetlang,
67 67 scmutil,
68 68 setdiscovery,
69 69 simplemerge,
70 70 sshpeer,
71 71 sslutil,
72 72 streamclone,
73 73 templater,
74 74 treediscovery,
75 75 upgrade,
76 76 url as urlmod,
77 77 util,
78 78 vfs as vfsmod,
79 79 wireprotoframing,
80 80 wireprotoserver,
81 81 wireprotov2peer,
82 82 )
83 83 from .utils import (
84 84 cborutil,
85 85 dateutil,
86 86 procutil,
87 87 stringutil,
88 88 )
89 89
90 90 from .revlogutils import (
91 91 deltas as deltautil
92 92 )
93 93
94 94 release = lockmod.release
95 95
96 96 command = registrar.command()
97 97
98 98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
99 99 def debugancestor(ui, repo, *args):
100 100 """find the ancestor revision of two revisions in a given index"""
101 101 if len(args) == 3:
102 102 index, rev1, rev2 = args
103 103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
104 104 lookup = r.lookup
105 105 elif len(args) == 2:
106 106 if not repo:
107 107 raise error.Abort(_('there is no Mercurial repository here '
108 108 '(.hg not found)'))
109 109 rev1, rev2 = args
110 110 r = repo.changelog
111 111 lookup = repo.lookup
112 112 else:
113 113 raise error.Abort(_('either two or three arguments required'))
114 114 a = r.ancestor(lookup(rev1), lookup(rev2))
115 115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
116 116
117 117 @command('debugapplystreamclonebundle', [], 'FILE')
118 118 def debugapplystreamclonebundle(ui, repo, fname):
119 119 """apply a stream clone bundle file"""
120 120 f = hg.openpath(ui, fname)
121 121 gen = exchange.readbundle(ui, f, fname)
122 122 gen.apply(repo)
123 123
124 124 @command('debugbuilddag',
125 125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
126 126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
127 127 ('n', 'new-file', None, _('add new file at each rev'))],
128 128 _('[OPTION]... [TEXT]'))
129 129 def debugbuilddag(ui, repo, text=None,
130 130 mergeable_file=False,
131 131 overwritten_file=False,
132 132 new_file=False):
133 133 """builds a repo with a given DAG from scratch in the current empty repo
134 134
135 135 The description of the DAG is read from stdin if not given on the
136 136 command line.
137 137
138 138 Elements:
139 139
140 140 - "+n" is a linear run of n nodes based on the current default parent
141 141 - "." is a single node based on the current default parent
142 142 - "$" resets the default parent to null (implied at the start);
143 143 otherwise the default parent is always the last node created
144 144 - "<p" sets the default parent to the backref p
145 145 - "*p" is a fork at parent p, which is a backref
146 146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
147 147 - "/p2" is a merge of the preceding node and p2
148 148 - ":tag" defines a local tag for the preceding node
149 149 - "@branch" sets the named branch for subsequent nodes
150 150 - "#...\\n" is a comment up to the end of the line
151 151
152 152 Whitespace between the above elements is ignored.
153 153
154 154 A backref is either
155 155
156 156 - a number n, which references the node curr-n, where curr is the current
157 157 node, or
158 158 - the name of a local tag you placed earlier using ":tag", or
159 159 - empty to denote the default parent.
160 160
161 161 All string valued-elements are either strictly alphanumeric, or must
162 162 be enclosed in double quotes ("..."), with "\\" as escape character.
163 163 """
164 164
165 165 if text is None:
166 166 ui.status(_("reading DAG from stdin\n"))
167 167 text = ui.fin.read()
168 168
169 169 cl = repo.changelog
170 170 if len(cl) > 0:
171 171 raise error.Abort(_('repository is not empty'))
172 172
173 173 # determine number of revs in DAG
174 174 total = 0
175 175 for type, data in dagparser.parsedag(text):
176 176 if type == 'n':
177 177 total += 1
178 178
179 179 if mergeable_file:
180 180 linesperrev = 2
181 181 # make a file with k lines per rev
182 182 initialmergedlines = ['%d' % i
183 183 for i in pycompat.xrange(0, total * linesperrev)]
184 184 initialmergedlines.append("")
185 185
186 186 tags = []
187 187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
188 188 total=total)
189 189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
190 190 at = -1
191 191 atbranch = 'default'
192 192 nodeids = []
193 193 id = 0
194 194 progress.update(id)
195 195 for type, data in dagparser.parsedag(text):
196 196 if type == 'n':
197 197 ui.note(('node %s\n' % pycompat.bytestr(data)))
198 198 id, ps = data
199 199
200 200 files = []
201 201 filecontent = {}
202 202
203 203 p2 = None
204 204 if mergeable_file:
205 205 fn = "mf"
206 206 p1 = repo[ps[0]]
207 207 if len(ps) > 1:
208 208 p2 = repo[ps[1]]
209 209 pa = p1.ancestor(p2)
210 210 base, local, other = [x[fn].data() for x in (pa, p1,
211 211 p2)]
212 212 m3 = simplemerge.Merge3Text(base, local, other)
213 213 ml = [l.strip() for l in m3.merge_lines()]
214 214 ml.append("")
215 215 elif at > 0:
216 216 ml = p1[fn].data().split("\n")
217 217 else:
218 218 ml = initialmergedlines
219 219 ml[id * linesperrev] += " r%i" % id
220 220 mergedtext = "\n".join(ml)
221 221 files.append(fn)
222 222 filecontent[fn] = mergedtext
223 223
224 224 if overwritten_file:
225 225 fn = "of"
226 226 files.append(fn)
227 227 filecontent[fn] = "r%i\n" % id
228 228
229 229 if new_file:
230 230 fn = "nf%i" % id
231 231 files.append(fn)
232 232 filecontent[fn] = "r%i\n" % id
233 233 if len(ps) > 1:
234 234 if not p2:
235 235 p2 = repo[ps[1]]
236 236 for fn in p2:
237 237 if fn.startswith("nf"):
238 238 files.append(fn)
239 239 filecontent[fn] = p2[fn].data()
240 240
241 241 def fctxfn(repo, cx, path):
242 242 if path in filecontent:
243 243 return context.memfilectx(repo, cx, path,
244 244 filecontent[path])
245 245 return None
246 246
247 247 if len(ps) == 0 or ps[0] < 0:
248 248 pars = [None, None]
249 249 elif len(ps) == 1:
250 250 pars = [nodeids[ps[0]], None]
251 251 else:
252 252 pars = [nodeids[p] for p in ps]
253 253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
254 254 date=(id, 0),
255 255 user="debugbuilddag",
256 256 extra={'branch': atbranch})
257 257 nodeid = repo.commitctx(cx)
258 258 nodeids.append(nodeid)
259 259 at = id
260 260 elif type == 'l':
261 261 id, name = data
262 262 ui.note(('tag %s\n' % name))
263 263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
264 264 elif type == 'a':
265 265 ui.note(('branch %s\n' % data))
266 266 atbranch = data
267 267 progress.update(id)
268 268
269 269 if tags:
270 270 repo.vfs.write("localtags", "".join(tags))
271 271
272 272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
273 273 indent_string = ' ' * indent
274 274 if all:
275 275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
276 276 % indent_string)
277 277
278 278 def showchunks(named):
279 279 ui.write("\n%s%s\n" % (indent_string, named))
280 280 for deltadata in gen.deltaiter():
281 281 node, p1, p2, cs, deltabase, delta, flags = deltadata
282 282 ui.write("%s%s %s %s %s %s %d\n" %
283 283 (indent_string, hex(node), hex(p1), hex(p2),
284 284 hex(cs), hex(deltabase), len(delta)))
285 285
286 286 chunkdata = gen.changelogheader()
287 287 showchunks("changelog")
288 288 chunkdata = gen.manifestheader()
289 289 showchunks("manifest")
290 290 for chunkdata in iter(gen.filelogheader, {}):
291 291 fname = chunkdata['filename']
292 292 showchunks(fname)
293 293 else:
294 294 if isinstance(gen, bundle2.unbundle20):
295 295 raise error.Abort(_('use debugbundle2 for this file'))
296 296 chunkdata = gen.changelogheader()
297 297 for deltadata in gen.deltaiter():
298 298 node, p1, p2, cs, deltabase, delta, flags = deltadata
299 299 ui.write("%s%s\n" % (indent_string, hex(node)))
300 300
301 301 def _debugobsmarkers(ui, part, indent=0, **opts):
302 302 """display version and markers contained in 'data'"""
303 303 opts = pycompat.byteskwargs(opts)
304 304 data = part.read()
305 305 indent_string = ' ' * indent
306 306 try:
307 307 version, markers = obsolete._readmarkers(data)
308 308 except error.UnknownVersion as exc:
309 309 msg = "%sunsupported version: %s (%d bytes)\n"
310 310 msg %= indent_string, exc.version, len(data)
311 311 ui.write(msg)
312 312 else:
313 313 msg = "%sversion: %d (%d bytes)\n"
314 314 msg %= indent_string, version, len(data)
315 315 ui.write(msg)
316 316 fm = ui.formatter('debugobsolete', opts)
317 317 for rawmarker in sorted(markers):
318 318 m = obsutil.marker(None, rawmarker)
319 319 fm.startitem()
320 320 fm.plain(indent_string)
321 321 cmdutil.showmarker(fm, m)
322 322 fm.end()
323 323
324 324 def _debugphaseheads(ui, data, indent=0):
325 325 """display version and markers contained in 'data'"""
326 326 indent_string = ' ' * indent
327 327 headsbyphase = phases.binarydecode(data)
328 328 for phase in phases.allphases:
329 329 for head in headsbyphase[phase]:
330 330 ui.write(indent_string)
331 331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
332 332
333 333 def _quasirepr(thing):
334 334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
335 335 return '{%s}' % (
336 336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
337 337 return pycompat.bytestr(repr(thing))
338 338
339 339 def _debugbundle2(ui, gen, all=None, **opts):
340 340 """lists the contents of a bundle2"""
341 341 if not isinstance(gen, bundle2.unbundle20):
342 342 raise error.Abort(_('not a bundle2 file'))
343 343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
344 344 parttypes = opts.get(r'part_type', [])
345 345 for part in gen.iterparts():
346 346 if parttypes and part.type not in parttypes:
347 347 continue
348 348 msg = '%s -- %s (mandatory: %r)\n'
349 349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
350 350 if part.type == 'changegroup':
351 351 version = part.params.get('version', '01')
352 352 cg = changegroup.getunbundler(version, part, 'UN')
353 353 if not ui.quiet:
354 354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
355 355 if part.type == 'obsmarkers':
356 356 if not ui.quiet:
357 357 _debugobsmarkers(ui, part, indent=4, **opts)
358 358 if part.type == 'phase-heads':
359 359 if not ui.quiet:
360 360 _debugphaseheads(ui, part, indent=4)
361 361
362 362 @command('debugbundle',
363 363 [('a', 'all', None, _('show all details')),
364 364 ('', 'part-type', [], _('show only the named part type')),
365 365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
366 366 _('FILE'),
367 367 norepo=True)
368 368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
369 369 """lists the contents of a bundle"""
370 370 with hg.openpath(ui, bundlepath) as f:
371 371 if spec:
372 372 spec = exchange.getbundlespec(ui, f)
373 373 ui.write('%s\n' % spec)
374 374 return
375 375
376 376 gen = exchange.readbundle(ui, f, bundlepath)
377 377 if isinstance(gen, bundle2.unbundle20):
378 378 return _debugbundle2(ui, gen, all=all, **opts)
379 379 _debugchangegroup(ui, gen, all=all, **opts)
380 380
381 381 @command('debugcapabilities',
382 382 [], _('PATH'),
383 383 norepo=True)
384 384 def debugcapabilities(ui, path, **opts):
385 385 """lists the capabilities of a remote peer"""
386 386 opts = pycompat.byteskwargs(opts)
387 387 peer = hg.peer(ui, opts, path)
388 388 caps = peer.capabilities()
389 389 ui.write(('Main capabilities:\n'))
390 390 for c in sorted(caps):
391 391 ui.write((' %s\n') % c)
392 392 b2caps = bundle2.bundle2caps(peer)
393 393 if b2caps:
394 394 ui.write(('Bundle2 capabilities:\n'))
395 395 for key, values in sorted(b2caps.iteritems()):
396 396 ui.write((' %s\n') % key)
397 397 for v in values:
398 398 ui.write((' %s\n') % v)
399 399
400 400 @command('debugcheckstate', [], '')
401 401 def debugcheckstate(ui, repo):
402 402 """validate the correctness of the current dirstate"""
403 403 parent1, parent2 = repo.dirstate.parents()
404 404 m1 = repo[parent1].manifest()
405 405 m2 = repo[parent2].manifest()
406 406 errors = 0
407 407 for f in repo.dirstate:
408 408 state = repo.dirstate[f]
409 409 if state in "nr" and f not in m1:
410 410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
411 411 errors += 1
412 412 if state in "a" and f in m1:
413 413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
414 414 errors += 1
415 415 if state in "m" and f not in m1 and f not in m2:
416 416 ui.warn(_("%s in state %s, but not in either manifest\n") %
417 417 (f, state))
418 418 errors += 1
419 419 for f in m1:
420 420 state = repo.dirstate[f]
421 421 if state not in "nrm":
422 422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
423 423 errors += 1
424 424 if errors:
425 425 error = _(".hg/dirstate inconsistent with current parent's manifest")
426 426 raise error.Abort(error)
427 427
428 428 @command('debugcolor',
429 429 [('', 'style', None, _('show all configured styles'))],
430 430 'hg debugcolor')
431 431 def debugcolor(ui, repo, **opts):
432 432 """show available color, effects or style"""
433 433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
434 434 if opts.get(r'style'):
435 435 return _debugdisplaystyle(ui)
436 436 else:
437 437 return _debugdisplaycolor(ui)
438 438
439 439 def _debugdisplaycolor(ui):
440 440 ui = ui.copy()
441 441 ui._styles.clear()
442 442 for effect in color._activeeffects(ui).keys():
443 443 ui._styles[effect] = effect
444 444 if ui._terminfoparams:
445 445 for k, v in ui.configitems('color'):
446 446 if k.startswith('color.'):
447 447 ui._styles[k] = k[6:]
448 448 elif k.startswith('terminfo.'):
449 449 ui._styles[k] = k[9:]
450 450 ui.write(_('available colors:\n'))
451 451 # sort label with a '_' after the other to group '_background' entry.
452 452 items = sorted(ui._styles.items(),
453 453 key=lambda i: ('_' in i[0], i[0], i[1]))
454 454 for colorname, label in items:
455 455 ui.write(('%s\n') % colorname, label=label)
456 456
457 457 def _debugdisplaystyle(ui):
458 458 ui.write(_('available style:\n'))
459 459 if not ui._styles:
460 460 return
461 461 width = max(len(s) for s in ui._styles)
462 462 for label, effects in sorted(ui._styles.items()):
463 463 ui.write('%s' % label, label=label)
464 464 if effects:
465 465 # 50
466 466 ui.write(': ')
467 467 ui.write(' ' * (max(0, width - len(label))))
468 468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 469 ui.write('\n')
470 470
471 471 @command('debugcreatestreamclonebundle', [], 'FILE')
472 472 def debugcreatestreamclonebundle(ui, repo, fname):
473 473 """create a stream clone bundle file
474 474
475 475 Stream bundles are special bundles that are essentially archives of
476 476 revlog files. They are commonly used for cloning very quickly.
477 477 """
478 478 # TODO we may want to turn this into an abort when this functionality
479 479 # is moved into `hg bundle`.
480 480 if phases.hassecret(repo):
481 481 ui.warn(_('(warning: stream clone bundle will contain secret '
482 482 'revisions)\n'))
483 483
484 484 requirements, gen = streamclone.generatebundlev1(repo)
485 485 changegroup.writechunks(ui, gen, fname)
486 486
487 487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488 488
489 489 @command('debugdag',
490 490 [('t', 'tags', None, _('use tags as labels')),
491 491 ('b', 'branches', None, _('annotate with branch names')),
492 492 ('', 'dots', None, _('use dots for runs')),
493 493 ('s', 'spaces', None, _('separate elements by spaces'))],
494 494 _('[OPTION]... [FILE [REV]...]'),
495 495 optionalrepo=True)
496 496 def debugdag(ui, repo, file_=None, *revs, **opts):
497 497 """format the changelog or an index DAG as a concise textual description
498 498
499 499 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 500 revision numbers, they get labeled in the output as rN.
501 501
502 502 Otherwise, the changelog DAG of the current repo is emitted.
503 503 """
504 504 spaces = opts.get(r'spaces')
505 505 dots = opts.get(r'dots')
506 506 if file_:
507 507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
508 508 file_)
509 509 revs = set((int(r) for r in revs))
510 510 def events():
511 511 for r in rlog:
512 512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 513 if p != -1))
514 514 if r in revs:
515 515 yield 'l', (r, "r%i" % r)
516 516 elif repo:
517 517 cl = repo.changelog
518 518 tags = opts.get(r'tags')
519 519 branches = opts.get(r'branches')
520 520 if tags:
521 521 labels = {}
522 522 for l, n in repo.tags().items():
523 523 labels.setdefault(cl.rev(n), []).append(l)
524 524 def events():
525 525 b = "default"
526 526 for r in cl:
527 527 if branches:
528 528 newb = cl.read(cl.node(r))[5]['branch']
529 529 if newb != b:
530 530 yield 'a', newb
531 531 b = newb
532 532 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 533 if p != -1))
534 534 if tags:
535 535 ls = labels.get(r)
536 536 if ls:
537 537 for l in ls:
538 538 yield 'l', (r, l)
539 539 else:
540 540 raise error.Abort(_('need repo for changelog dag'))
541 541
542 542 for line in dagparser.dagtextlines(events(),
543 543 addspaces=spaces,
544 544 wraplabels=True,
545 545 wrapannotations=True,
546 546 wrapnonlinear=dots,
547 547 usedots=dots,
548 548 maxlinewidth=70):
549 549 ui.write(line)
550 550 ui.write("\n")
551 551
552 552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 553 def debugdata(ui, repo, file_, rev=None, **opts):
554 554 """dump the contents of a data file revision"""
555 555 opts = pycompat.byteskwargs(opts)
556 556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 557 if rev is not None:
558 558 raise error.CommandError('debugdata', _('invalid arguments'))
559 559 file_, rev = None, file_
560 560 elif rev is None:
561 561 raise error.CommandError('debugdata', _('invalid arguments'))
562 562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
563 563 try:
564 564 ui.write(r.revision(r.lookup(rev), raw=True))
565 565 except KeyError:
566 566 raise error.Abort(_('invalid revision identifier %s') % rev)
567 567
568 568 @command('debugdate',
569 569 [('e', 'extended', None, _('try extended date formats'))],
570 570 _('[-e] DATE [RANGE]'),
571 571 norepo=True, optionalrepo=True)
572 572 def debugdate(ui, date, range=None, **opts):
573 573 """parse and display a date"""
574 574 if opts[r"extended"]:
575 575 d = dateutil.parsedate(date, util.extendeddateformats)
576 576 else:
577 577 d = dateutil.parsedate(date)
578 578 ui.write(("internal: %d %d\n") % d)
579 579 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 580 if range:
581 581 m = dateutil.matchdate(range)
582 582 ui.write(("match: %s\n") % m(d[0]))
583 583
584 584 @command('debugdeltachain',
585 585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 586 _('-c|-m|FILE'),
587 587 optionalrepo=True)
588 588 def debugdeltachain(ui, repo, file_=None, **opts):
589 589 """dump information about delta chains in a revlog
590 590
591 591 Output can be templatized. Available template keywords are:
592 592
593 593 :``rev``: revision number
594 594 :``chainid``: delta chain identifier (numbered by unique base)
595 595 :``chainlen``: delta chain length to this revision
596 596 :``prevrev``: previous revision in delta chain
597 597 :``deltatype``: role of delta / how it was computed
598 598 :``compsize``: compressed size of revision
599 599 :``uncompsize``: uncompressed size of revision
600 600 :``chainsize``: total size of compressed revisions in chain
601 601 :``chainratio``: total chain size divided by uncompressed revision size
602 602 (new delta chains typically start at ratio 2.00)
603 603 :``lindist``: linear distance from base revision in delta chain to end
604 604 of this revision
605 605 :``extradist``: total size of revisions not part of this delta chain from
606 606 base of delta chain to end of this revision; a measurement
607 607 of how much extra data we need to read/seek across to read
608 608 the delta chain for this revision
609 609 :``extraratio``: extradist divided by chainsize; another representation of
610 610 how much unrelated data is needed to load this delta chain
611 611
612 612 If the repository is configured to use the sparse read, additional keywords
613 613 are available:
614 614
615 615 :``readsize``: total size of data read from the disk for a revision
616 616 (sum of the sizes of all the blocks)
617 617 :``largestblock``: size of the largest block of data read from the disk
618 618 :``readdensity``: density of useful bytes in the data read from the disk
619 619 :``srchunks``: in how many data hunks the whole revision would be read
620 620
621 621 The sparse read can be enabled with experimental.sparse-read = True
622 622 """
623 623 opts = pycompat.byteskwargs(opts)
624 624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 625 index = r.index
626 626 start = r.start
627 627 length = r.length
628 628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
629 629 withsparseread = getattr(r, '_withsparseread', False)
630 630
631 631 def revinfo(rev):
632 632 e = index[rev]
633 633 compsize = e[1]
634 634 uncompsize = e[2]
635 635 chainsize = 0
636 636
637 637 if generaldelta:
638 638 if e[3] == e[5]:
639 639 deltatype = 'p1'
640 640 elif e[3] == e[6]:
641 641 deltatype = 'p2'
642 642 elif e[3] == rev - 1:
643 643 deltatype = 'prev'
644 644 elif e[3] == rev:
645 645 deltatype = 'base'
646 646 else:
647 647 deltatype = 'other'
648 648 else:
649 649 if e[3] == rev:
650 650 deltatype = 'base'
651 651 else:
652 652 deltatype = 'prev'
653 653
654 654 chain = r._deltachain(rev)[0]
655 655 for iterrev in chain:
656 656 e = index[iterrev]
657 657 chainsize += e[1]
658 658
659 659 return compsize, uncompsize, deltatype, chain, chainsize
660 660
661 661 fm = ui.formatter('debugdeltachain', opts)
662 662
663 663 fm.plain(' rev chain# chainlen prev delta '
664 664 'size rawsize chainsize ratio lindist extradist '
665 665 'extraratio')
666 666 if withsparseread:
667 667 fm.plain(' readsize largestblk rddensity srchunks')
668 668 fm.plain('\n')
669 669
670 670 chainbases = {}
671 671 for rev in r:
672 672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
673 673 chainbase = chain[0]
674 674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
675 675 basestart = start(chainbase)
676 676 revstart = start(rev)
677 677 lineardist = revstart + comp - basestart
678 678 extradist = lineardist - chainsize
679 679 try:
680 680 prevrev = chain[-2]
681 681 except IndexError:
682 682 prevrev = -1
683 683
684 684 if uncomp != 0:
685 685 chainratio = float(chainsize) / float(uncomp)
686 686 else:
687 687 chainratio = chainsize
688 688
689 689 if chainsize != 0:
690 690 extraratio = float(extradist) / float(chainsize)
691 691 else:
692 692 extraratio = extradist
693 693
694 694 fm.startitem()
695 695 fm.write('rev chainid chainlen prevrev deltatype compsize '
696 696 'uncompsize chainsize chainratio lindist extradist '
697 697 'extraratio',
698 698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
699 699 rev, chainid, len(chain), prevrev, deltatype, comp,
700 700 uncomp, chainsize, chainratio, lineardist, extradist,
701 701 extraratio,
702 702 rev=rev, chainid=chainid, chainlen=len(chain),
703 703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
704 704 uncompsize=uncomp, chainsize=chainsize,
705 705 chainratio=chainratio, lindist=lineardist,
706 706 extradist=extradist, extraratio=extraratio)
707 707 if withsparseread:
708 708 readsize = 0
709 709 largestblock = 0
710 710 srchunks = 0
711 711
712 712 for revschunk in deltautil.slicechunk(r, chain):
713 713 srchunks += 1
714 714 blkend = start(revschunk[-1]) + length(revschunk[-1])
715 715 blksize = blkend - start(revschunk[0])
716 716
717 717 readsize += blksize
718 718 if largestblock < blksize:
719 719 largestblock = blksize
720 720
721 721 if readsize:
722 722 readdensity = float(chainsize) / float(readsize)
723 723 else:
724 724 readdensity = 1
725 725
726 726 fm.write('readsize largestblock readdensity srchunks',
727 727 ' %10d %10d %9.5f %8d',
728 728 readsize, largestblock, readdensity, srchunks,
729 729 readsize=readsize, largestblock=largestblock,
730 730 readdensity=readdensity, srchunks=srchunks)
731 731
732 732 fm.plain('\n')
733 733
734 734 fm.end()
735 735
736 736 @command('debugdirstate|debugstate',
737 737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
738 738 ('', 'dates', True, _('display the saved mtime')),
739 739 ('', 'datesort', None, _('sort by saved mtime'))],
740 740 _('[OPTION]...'))
741 741 def debugstate(ui, repo, **opts):
742 742 """show the contents of the current dirstate"""
743 743
744 744 nodates = not opts[r'dates']
745 745 if opts.get(r'nodates') is not None:
746 746 nodates = True
747 747 datesort = opts.get(r'datesort')
748 748
749 749 if datesort:
750 750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 751 else:
752 752 keyfunc = None # sort by filename
753 753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 754 if ent[3] == -1:
755 755 timestr = 'unset '
756 756 elif nodates:
757 757 timestr = 'set '
758 758 else:
759 759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 760 time.localtime(ent[3]))
761 761 timestr = encoding.strtolocal(timestr)
762 762 if ent[1] & 0o20000:
763 763 mode = 'lnk'
764 764 else:
765 765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 767 for f in repo.dirstate.copies():
768 768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769 769
770 770 @command('debugdiscovery',
771 771 [('', 'old', None, _('use old-style discovery')),
772 772 ('', 'nonheads', None,
773 773 _('use old-style discovery with non-heads included')),
774 774 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 775 ] + cmdutil.remoteopts,
776 776 _('[--rev REV] [OTHER]'))
777 777 def debugdiscovery(ui, repo, remoteurl="default", **opts):
778 778 """runs the changeset discovery protocol in isolation"""
779 779 opts = pycompat.byteskwargs(opts)
780 780 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
781 781 remote = hg.peer(repo, opts, remoteurl)
782 782 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
783 783
784 784 # make sure tests are repeatable
785 785 random.seed(12323)
786 786
787 787 def doit(pushedrevs, remoteheads, remote=remote):
788 788 if opts.get('old'):
789 789 if not util.safehasattr(remote, 'branches'):
790 790 # enable in-client legacy support
791 791 remote = localrepo.locallegacypeer(remote.local())
792 792 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
793 793 force=True)
794 794 common = set(common)
795 795 if not opts.get('nonheads'):
796 796 ui.write(("unpruned common: %s\n") %
797 797 " ".join(sorted(short(n) for n in common)))
798 798
799 799 clnode = repo.changelog.node
800 800 common = repo.revs('heads(::%ln)', common)
801 801 common = {clnode(r) for r in common}
802 802 else:
803 803 nodes = None
804 804 if pushedrevs:
805 805 revs = scmutil.revrange(repo, pushedrevs)
806 806 nodes = [repo[r].node() for r in revs]
807 807 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
808 808 ancestorsof=nodes)
809 809 common = set(common)
810 810 rheads = set(hds)
811 811 lheads = set(repo.heads())
812 812 ui.write(("common heads: %s\n") %
813 813 " ".join(sorted(short(n) for n in common)))
814 814 if lheads <= common:
815 815 ui.write(("local is subset\n"))
816 816 elif rheads <= common:
817 817 ui.write(("remote is subset\n"))
818 818
819 819 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
820 820 localrevs = opts['rev']
821 821 doit(localrevs, remoterevs)
822 822
823 823 _chunksize = 4 << 10
824 824
825 825 @command('debugdownload',
826 826 [
827 827 ('o', 'output', '', _('path')),
828 828 ],
829 829 optionalrepo=True)
830 830 def debugdownload(ui, repo, url, output=None, **opts):
831 831 """download a resource using Mercurial logic and config
832 832 """
833 833 fh = urlmod.open(ui, url, output)
834 834
835 835 dest = ui
836 836 if output:
837 837 dest = open(output, "wb", _chunksize)
838 838 try:
839 839 data = fh.read(_chunksize)
840 840 while data:
841 841 dest.write(data)
842 842 data = fh.read(_chunksize)
843 843 finally:
844 844 if output:
845 845 dest.close()
846 846
847 847 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
848 848 def debugextensions(ui, repo, **opts):
849 849 '''show information about active extensions'''
850 850 opts = pycompat.byteskwargs(opts)
851 851 exts = extensions.extensions(ui)
852 852 hgver = util.version()
853 853 fm = ui.formatter('debugextensions', opts)
854 854 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
855 855 isinternal = extensions.ismoduleinternal(extmod)
856 856 extsource = pycompat.fsencode(extmod.__file__)
857 857 if isinternal:
858 858 exttestedwith = [] # never expose magic string to users
859 859 else:
860 860 exttestedwith = getattr(extmod, 'testedwith', '').split()
861 861 extbuglink = getattr(extmod, 'buglink', None)
862 862
863 863 fm.startitem()
864 864
865 865 if ui.quiet or ui.verbose:
866 866 fm.write('name', '%s\n', extname)
867 867 else:
868 868 fm.write('name', '%s', extname)
869 869 if isinternal or hgver in exttestedwith:
870 870 fm.plain('\n')
871 871 elif not exttestedwith:
872 872 fm.plain(_(' (untested!)\n'))
873 873 else:
874 874 lasttestedversion = exttestedwith[-1]
875 875 fm.plain(' (%s!)\n' % lasttestedversion)
876 876
877 877 fm.condwrite(ui.verbose and extsource, 'source',
878 878 _(' location: %s\n'), extsource or "")
879 879
880 880 if ui.verbose:
881 881 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
882 882 fm.data(bundled=isinternal)
883 883
884 884 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
885 885 _(' tested with: %s\n'),
886 886 fm.formatlist(exttestedwith, name='ver'))
887 887
888 888 fm.condwrite(ui.verbose and extbuglink, 'buglink',
889 889 _(' bug reporting: %s\n'), extbuglink or "")
890 890
891 891 fm.end()
892 892
893 893 @command('debugfileset',
894 894 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
895 895 ('', 'all-files', False,
896 896 _('test files from all revisions and working directory')),
897 897 ('s', 'show-matcher', None,
898 898 _('print internal representation of matcher')),
899 899 ('p', 'show-stage', [],
900 900 _('print parsed tree at the given stage'), _('NAME'))],
901 901 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
902 902 def debugfileset(ui, repo, expr, **opts):
903 903 '''parse and apply a fileset specification'''
904 904 from . import fileset
905 905 fileset.symbols # force import of fileset so we have predicates to optimize
906 906 opts = pycompat.byteskwargs(opts)
907 907 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
908 908
909 909 stages = [
910 910 ('parsed', pycompat.identity),
911 911 ('analyzed', filesetlang.analyze),
912 912 ('optimized', filesetlang.optimize),
913 913 ]
914 914 stagenames = set(n for n, f in stages)
915 915
916 916 showalways = set()
917 917 if ui.verbose and not opts['show_stage']:
918 918 # show parsed tree by --verbose (deprecated)
919 919 showalways.add('parsed')
920 920 if opts['show_stage'] == ['all']:
921 921 showalways.update(stagenames)
922 922 else:
923 923 for n in opts['show_stage']:
924 924 if n not in stagenames:
925 925 raise error.Abort(_('invalid stage name: %s') % n)
926 926 showalways.update(opts['show_stage'])
927 927
928 928 tree = filesetlang.parse(expr)
929 929 for n, f in stages:
930 930 tree = f(tree)
931 931 if n in showalways:
932 932 if opts['show_stage'] or n != 'parsed':
933 933 ui.write(("* %s:\n") % n)
934 934 ui.write(filesetlang.prettyformat(tree), "\n")
935 935
936 936 files = set()
937 937 if opts['all_files']:
938 938 for r in repo:
939 939 c = repo[r]
940 940 files.update(c.files())
941 941 files.update(c.substate)
942 942 if opts['all_files'] or ctx.rev() is None:
943 943 wctx = repo[None]
944 944 files.update(repo.dirstate.walk(scmutil.matchall(repo),
945 945 subrepos=list(wctx.substate),
946 946 unknown=True, ignored=True))
947 947 files.update(wctx.substate)
948 948 else:
949 949 files.update(ctx.files())
950 950 files.update(ctx.substate)
951 951
952 952 m = ctx.matchfileset(expr)
953 953 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
954 954 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
955 955 for f in sorted(files):
956 956 if not m(f):
957 957 continue
958 958 ui.write("%s\n" % f)
959 959
960 960 @command('debugformat',
961 961 [] + cmdutil.formatteropts)
962 962 def debugformat(ui, repo, **opts):
963 963 """display format information about the current repository
964 964
965 965 Use --verbose to get extra information about current config value and
966 966 Mercurial default."""
967 967 opts = pycompat.byteskwargs(opts)
968 968 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
969 969 maxvariantlength = max(len('format-variant'), maxvariantlength)
970 970
971 971 def makeformatname(name):
972 972 return '%s:' + (' ' * (maxvariantlength - len(name)))
973 973
974 974 fm = ui.formatter('debugformat', opts)
975 975 if fm.isplain():
976 976 def formatvalue(value):
977 977 if util.safehasattr(value, 'startswith'):
978 978 return value
979 979 if value:
980 980 return 'yes'
981 981 else:
982 982 return 'no'
983 983 else:
984 984 formatvalue = pycompat.identity
985 985
986 986 fm.plain('format-variant')
987 987 fm.plain(' ' * (maxvariantlength - len('format-variant')))
988 988 fm.plain(' repo')
989 989 if ui.verbose:
990 990 fm.plain(' config default')
991 991 fm.plain('\n')
992 992 for fv in upgrade.allformatvariant:
993 993 fm.startitem()
994 994 repovalue = fv.fromrepo(repo)
995 995 configvalue = fv.fromconfig(repo)
996 996
997 997 if repovalue != configvalue:
998 998 namelabel = 'formatvariant.name.mismatchconfig'
999 999 repolabel = 'formatvariant.repo.mismatchconfig'
1000 1000 elif repovalue != fv.default:
1001 1001 namelabel = 'formatvariant.name.mismatchdefault'
1002 1002 repolabel = 'formatvariant.repo.mismatchdefault'
1003 1003 else:
1004 1004 namelabel = 'formatvariant.name.uptodate'
1005 1005 repolabel = 'formatvariant.repo.uptodate'
1006 1006
1007 1007 fm.write('name', makeformatname(fv.name), fv.name,
1008 1008 label=namelabel)
1009 1009 fm.write('repo', ' %3s', formatvalue(repovalue),
1010 1010 label=repolabel)
1011 1011 if fv.default != configvalue:
1012 1012 configlabel = 'formatvariant.config.special'
1013 1013 else:
1014 1014 configlabel = 'formatvariant.config.default'
1015 1015 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1016 1016 label=configlabel)
1017 1017 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1018 1018 label='formatvariant.default')
1019 1019 fm.plain('\n')
1020 1020 fm.end()
1021 1021
1022 1022 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1023 1023 def debugfsinfo(ui, path="."):
1024 1024 """show information detected about current filesystem"""
1025 1025 ui.write(('path: %s\n') % path)
1026 1026 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1027 1027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1028 1028 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1029 1029 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1030 1030 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1031 1031 casesensitive = '(unknown)'
1032 1032 try:
1033 1033 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1034 1034 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1035 1035 except OSError:
1036 1036 pass
1037 1037 ui.write(('case-sensitive: %s\n') % casesensitive)
1038 1038
1039 1039 @command('debuggetbundle',
1040 1040 [('H', 'head', [], _('id of head node'), _('ID')),
1041 1041 ('C', 'common', [], _('id of common node'), _('ID')),
1042 1042 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1043 1043 _('REPO FILE [-H|-C ID]...'),
1044 1044 norepo=True)
1045 1045 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1046 1046 """retrieves a bundle from a repo
1047 1047
1048 1048 Every ID must be a full-length hex node id string. Saves the bundle to the
1049 1049 given file.
1050 1050 """
1051 1051 opts = pycompat.byteskwargs(opts)
1052 1052 repo = hg.peer(ui, opts, repopath)
1053 1053 if not repo.capable('getbundle'):
1054 1054 raise error.Abort("getbundle() not supported by target repository")
1055 1055 args = {}
1056 1056 if common:
1057 1057 args[r'common'] = [bin(s) for s in common]
1058 1058 if head:
1059 1059 args[r'heads'] = [bin(s) for s in head]
1060 1060 # TODO: get desired bundlecaps from command line.
1061 1061 args[r'bundlecaps'] = None
1062 1062 bundle = repo.getbundle('debug', **args)
1063 1063
1064 1064 bundletype = opts.get('type', 'bzip2').lower()
1065 1065 btypes = {'none': 'HG10UN',
1066 1066 'bzip2': 'HG10BZ',
1067 1067 'gzip': 'HG10GZ',
1068 1068 'bundle2': 'HG20'}
1069 1069 bundletype = btypes.get(bundletype)
1070 1070 if bundletype not in bundle2.bundletypes:
1071 1071 raise error.Abort(_('unknown bundle type specified with --type'))
1072 1072 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1073 1073
1074 1074 @command('debugignore', [], '[FILE]')
1075 1075 def debugignore(ui, repo, *files, **opts):
1076 1076 """display the combined ignore pattern and information about ignored files
1077 1077
1078 1078 With no argument display the combined ignore pattern.
1079 1079
1080 1080 Given space separated file names, shows if the given file is ignored and
1081 1081 if so, show the ignore rule (file and line number) that matched it.
1082 1082 """
1083 1083 ignore = repo.dirstate._ignore
1084 1084 if not files:
1085 1085 # Show all the patterns
1086 1086 ui.write("%s\n" % pycompat.byterepr(ignore))
1087 1087 else:
1088 1088 m = scmutil.match(repo[None], pats=files)
1089 1089 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1090 1090 for f in m.files():
1091 1091 nf = util.normpath(f)
1092 1092 ignored = None
1093 1093 ignoredata = None
1094 1094 if nf != '.':
1095 1095 if ignore(nf):
1096 1096 ignored = nf
1097 1097 ignoredata = repo.dirstate._ignorefileandline(nf)
1098 1098 else:
1099 1099 for p in util.finddirs(nf):
1100 1100 if ignore(p):
1101 1101 ignored = p
1102 1102 ignoredata = repo.dirstate._ignorefileandline(p)
1103 1103 break
1104 1104 if ignored:
1105 1105 if ignored == nf:
1106 1106 ui.write(_("%s is ignored\n") % uipathfn(f))
1107 1107 else:
1108 1108 ui.write(_("%s is ignored because of "
1109 1109 "containing folder %s\n")
1110 1110 % (uipathfn(f), ignored))
1111 1111 ignorefile, lineno, line = ignoredata
1112 1112 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1113 1113 % (ignorefile, lineno, line))
1114 1114 else:
1115 1115 ui.write(_("%s is not ignored\n") % uipathfn(f))
1116 1116
1117 1117 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1118 1118 _('-c|-m|FILE'))
1119 1119 def debugindex(ui, repo, file_=None, **opts):
1120 1120 """dump index data for a storage primitive"""
1121 1121 opts = pycompat.byteskwargs(opts)
1122 1122 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1123 1123
1124 1124 if ui.debugflag:
1125 1125 shortfn = hex
1126 1126 else:
1127 1127 shortfn = short
1128 1128
1129 1129 idlen = 12
1130 1130 for i in store:
1131 1131 idlen = len(shortfn(store.node(i)))
1132 1132 break
1133 1133
1134 1134 fm = ui.formatter('debugindex', opts)
1135 1135 fm.plain(b' rev linkrev %s %s p2\n' % (
1136 1136 b'nodeid'.ljust(idlen),
1137 1137 b'p1'.ljust(idlen)))
1138 1138
1139 1139 for rev in store:
1140 1140 node = store.node(rev)
1141 1141 parents = store.parents(node)
1142 1142
1143 1143 fm.startitem()
1144 1144 fm.write(b'rev', b'%6d ', rev)
1145 1145 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1146 1146 fm.write(b'node', '%s ', shortfn(node))
1147 1147 fm.write(b'p1', '%s ', shortfn(parents[0]))
1148 1148 fm.write(b'p2', '%s', shortfn(parents[1]))
1149 1149 fm.plain(b'\n')
1150 1150
1151 1151 fm.end()
1152 1152
1153 1153 @command('debugindexdot', cmdutil.debugrevlogopts,
1154 1154 _('-c|-m|FILE'), optionalrepo=True)
1155 1155 def debugindexdot(ui, repo, file_=None, **opts):
1156 1156 """dump an index DAG as a graphviz dot file"""
1157 1157 opts = pycompat.byteskwargs(opts)
1158 1158 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1159 1159 ui.write(("digraph G {\n"))
1160 1160 for i in r:
1161 1161 node = r.node(i)
1162 1162 pp = r.parents(node)
1163 1163 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1164 1164 if pp[1] != nullid:
1165 1165 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1166 1166 ui.write("}\n")
1167 1167
1168 1168 @command('debugindexstats', [])
1169 1169 def debugindexstats(ui, repo):
1170 1170 """show stats related to the changelog index"""
1171 1171 repo.changelog.shortest(nullid, 1)
1172 1172 index = repo.changelog.index
1173 1173 if not util.safehasattr(index, 'stats'):
1174 1174 raise error.Abort(_('debugindexstats only works with native code'))
1175 1175 for k, v in sorted(index.stats().items()):
1176 1176 ui.write('%s: %d\n' % (k, v))
1177 1177
1178 1178 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1179 1179 def debuginstall(ui, **opts):
1180 1180 '''test Mercurial installation
1181 1181
1182 1182 Returns 0 on success.
1183 1183 '''
1184 1184 opts = pycompat.byteskwargs(opts)
1185 1185
1186 1186 problems = 0
1187 1187
1188 1188 fm = ui.formatter('debuginstall', opts)
1189 1189 fm.startitem()
1190 1190
1191 1191 # encoding
1192 1192 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1193 1193 err = None
1194 1194 try:
1195 1195 codecs.lookup(pycompat.sysstr(encoding.encoding))
1196 1196 except LookupError as inst:
1197 1197 err = stringutil.forcebytestr(inst)
1198 1198 problems += 1
1199 1199 fm.condwrite(err, 'encodingerror', _(" %s\n"
1200 1200 " (check that your locale is properly set)\n"), err)
1201 1201
1202 1202 # Python
1203 1203 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1204 1204 pycompat.sysexecutable)
1205 1205 fm.write('pythonver', _("checking Python version (%s)\n"),
1206 1206 ("%d.%d.%d" % sys.version_info[:3]))
1207 1207 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1208 1208 os.path.dirname(pycompat.fsencode(os.__file__)))
1209 1209
1210 1210 security = set(sslutil.supportedprotocols)
1211 1211 if sslutil.hassni:
1212 1212 security.add('sni')
1213 1213
1214 1214 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1215 1215 fm.formatlist(sorted(security), name='protocol',
1216 1216 fmt='%s', sep=','))
1217 1217
1218 1218 # These are warnings, not errors. So don't increment problem count. This
1219 1219 # may change in the future.
1220 1220 if 'tls1.2' not in security:
1221 1221 fm.plain(_(' TLS 1.2 not supported by Python install; '
1222 1222 'network connections lack modern security\n'))
1223 1223 if 'sni' not in security:
1224 1224 fm.plain(_(' SNI not supported by Python install; may have '
1225 1225 'connectivity issues with some servers\n'))
1226 1226
1227 1227 # TODO print CA cert info
1228 1228
1229 1229 # hg version
1230 1230 hgver = util.version()
1231 1231 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1232 1232 hgver.split('+')[0])
1233 1233 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1234 1234 '+'.join(hgver.split('+')[1:]))
1235 1235
1236 1236 # compiled modules
1237 1237 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1238 1238 policy.policy)
1239 1239 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1240 1240 os.path.dirname(pycompat.fsencode(__file__)))
1241 1241
1242 1242 if policy.policy in ('c', 'allow'):
1243 1243 err = None
1244 1244 try:
1245 1245 from .cext import (
1246 1246 base85,
1247 1247 bdiff,
1248 1248 mpatch,
1249 1249 osutil,
1250 1250 )
1251 1251 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1252 1252 except Exception as inst:
1253 1253 err = stringutil.forcebytestr(inst)
1254 1254 problems += 1
1255 1255 fm.condwrite(err, 'extensionserror', " %s\n", err)
1256 1256
1257 1257 compengines = util.compengines._engines.values()
1258 1258 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1259 1259 fm.formatlist(sorted(e.name() for e in compengines),
1260 1260 name='compengine', fmt='%s', sep=', '))
1261 1261 fm.write('compenginesavail', _('checking available compression engines '
1262 1262 '(%s)\n'),
1263 1263 fm.formatlist(sorted(e.name() for e in compengines
1264 1264 if e.available()),
1265 1265 name='compengine', fmt='%s', sep=', '))
1266 1266 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1267 1267 fm.write('compenginesserver', _('checking available compression engines '
1268 1268 'for wire protocol (%s)\n'),
1269 1269 fm.formatlist([e.name() for e in wirecompengines
1270 1270 if e.wireprotosupport()],
1271 1271 name='compengine', fmt='%s', sep=', '))
1272 1272 re2 = 'missing'
1273 1273 if util._re2:
1274 1274 re2 = 'available'
1275 1275 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1276 1276 fm.data(re2=bool(util._re2))
1277 1277
1278 1278 # templates
1279 1279 p = templater.templatepaths()
1280 1280 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1281 1281 fm.condwrite(not p, '', _(" no template directories found\n"))
1282 1282 if p:
1283 1283 m = templater.templatepath("map-cmdline.default")
1284 1284 if m:
1285 1285 # template found, check if it is working
1286 1286 err = None
1287 1287 try:
1288 1288 templater.templater.frommapfile(m)
1289 1289 except Exception as inst:
1290 1290 err = stringutil.forcebytestr(inst)
1291 1291 p = None
1292 1292 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1293 1293 else:
1294 1294 p = None
1295 1295 fm.condwrite(p, 'defaulttemplate',
1296 1296 _("checking default template (%s)\n"), m)
1297 1297 fm.condwrite(not m, 'defaulttemplatenotfound',
1298 1298 _(" template '%s' not found\n"), "default")
1299 1299 if not p:
1300 1300 problems += 1
1301 1301 fm.condwrite(not p, '',
1302 1302 _(" (templates seem to have been installed incorrectly)\n"))
1303 1303
1304 1304 # editor
1305 1305 editor = ui.geteditor()
1306 1306 editor = util.expandpath(editor)
1307 1307 editorbin = procutil.shellsplit(editor)[0]
1308 1308 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1309 1309 cmdpath = procutil.findexe(editorbin)
1310 1310 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1311 1311 _(" No commit editor set and can't find %s in PATH\n"
1312 1312 " (specify a commit editor in your configuration"
1313 1313 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1314 1314 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1315 1315 _(" Can't find editor '%s' in PATH\n"
1316 1316 " (specify a commit editor in your configuration"
1317 1317 " file)\n"), not cmdpath and editorbin)
1318 1318 if not cmdpath and editor != 'vi':
1319 1319 problems += 1
1320 1320
1321 1321 # check username
1322 1322 username = None
1323 1323 err = None
1324 1324 try:
1325 1325 username = ui.username()
1326 1326 except error.Abort as e:
1327 1327 err = stringutil.forcebytestr(e)
1328 1328 problems += 1
1329 1329
1330 1330 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1331 1331 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1332 1332 " (specify a username in your configuration file)\n"), err)
1333 1333
1334 1334 fm.condwrite(not problems, '',
1335 1335 _("no problems detected\n"))
1336 1336 if not problems:
1337 1337 fm.data(problems=problems)
1338 1338 fm.condwrite(problems, 'problems',
1339 1339 _("%d problems detected,"
1340 1340 " please check your install!\n"), problems)
1341 1341 fm.end()
1342 1342
1343 1343 return problems
1344 1344
1345 1345 @command('debugknown', [], _('REPO ID...'), norepo=True)
1346 1346 def debugknown(ui, repopath, *ids, **opts):
1347 1347 """test whether node ids are known to a repo
1348 1348
1349 1349 Every ID must be a full-length hex node id string. Returns a list of 0s
1350 1350 and 1s indicating unknown/known.
1351 1351 """
1352 1352 opts = pycompat.byteskwargs(opts)
1353 1353 repo = hg.peer(ui, opts, repopath)
1354 1354 if not repo.capable('known'):
1355 1355 raise error.Abort("known() not supported by target repository")
1356 1356 flags = repo.known([bin(s) for s in ids])
1357 1357 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1358 1358
1359 1359 @command('debuglabelcomplete', [], _('LABEL...'))
1360 1360 def debuglabelcomplete(ui, repo, *args):
1361 1361 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1362 1362 debugnamecomplete(ui, repo, *args)
1363 1363
1364 1364 @command('debuglocks',
1365 1365 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1366 1366 ('W', 'force-wlock', None,
1367 1367 _('free the working state lock (DANGEROUS)')),
1368 1368 ('s', 'set-lock', None, _('set the store lock until stopped')),
1369 1369 ('S', 'set-wlock', None,
1370 1370 _('set the working state lock until stopped'))],
1371 1371 _('[OPTION]...'))
1372 1372 def debuglocks(ui, repo, **opts):
1373 1373 """show or modify state of locks
1374 1374
1375 1375 By default, this command will show which locks are held. This
1376 1376 includes the user and process holding the lock, the amount of time
1377 1377 the lock has been held, and the machine name where the process is
1378 1378 running if it's not local.
1379 1379
1380 1380 Locks protect the integrity of Mercurial's data, so should be
1381 1381 treated with care. System crashes or other interruptions may cause
1382 1382 locks to not be properly released, though Mercurial will usually
1383 1383 detect and remove such stale locks automatically.
1384 1384
1385 1385 However, detecting stale locks may not always be possible (for
1386 1386 instance, on a shared filesystem). Removing locks may also be
1387 1387 blocked by filesystem permissions.
1388 1388
1389 1389 Setting a lock will prevent other commands from changing the data.
1390 1390 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1391 1391 The set locks are removed when the command exits.
1392 1392
1393 1393 Returns 0 if no locks are held.
1394 1394
1395 1395 """
1396 1396
1397 1397 if opts.get(r'force_lock'):
1398 1398 repo.svfs.unlink('lock')
1399 1399 if opts.get(r'force_wlock'):
1400 1400 repo.vfs.unlink('wlock')
1401 1401 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1402 1402 return 0
1403 1403
1404 1404 locks = []
1405 1405 try:
1406 1406 if opts.get(r'set_wlock'):
1407 1407 try:
1408 1408 locks.append(repo.wlock(False))
1409 1409 except error.LockHeld:
1410 1410 raise error.Abort(_('wlock is already held'))
1411 1411 if opts.get(r'set_lock'):
1412 1412 try:
1413 1413 locks.append(repo.lock(False))
1414 1414 except error.LockHeld:
1415 1415 raise error.Abort(_('lock is already held'))
1416 1416 if len(locks):
1417 1417 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1418 1418 return 0
1419 1419 finally:
1420 1420 release(*locks)
1421 1421
1422 1422 now = time.time()
1423 1423 held = 0
1424 1424
1425 1425 def report(vfs, name, method):
1426 1426 # this causes stale locks to get reaped for more accurate reporting
1427 1427 try:
1428 1428 l = method(False)
1429 1429 except error.LockHeld:
1430 1430 l = None
1431 1431
1432 1432 if l:
1433 1433 l.release()
1434 1434 else:
1435 1435 try:
1436 1436 st = vfs.lstat(name)
1437 1437 age = now - st[stat.ST_MTIME]
1438 1438 user = util.username(st.st_uid)
1439 1439 locker = vfs.readlock(name)
1440 1440 if ":" in locker:
1441 1441 host, pid = locker.split(':')
1442 1442 if host == socket.gethostname():
1443 1443 locker = 'user %s, process %s' % (user or b'None', pid)
1444 1444 else:
1445 1445 locker = ('user %s, process %s, host %s'
1446 1446 % (user or b'None', pid, host))
1447 1447 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1448 1448 return 1
1449 1449 except OSError as e:
1450 1450 if e.errno != errno.ENOENT:
1451 1451 raise
1452 1452
1453 1453 ui.write(("%-6s free\n") % (name + ":"))
1454 1454 return 0
1455 1455
1456 1456 held += report(repo.svfs, "lock", repo.lock)
1457 1457 held += report(repo.vfs, "wlock", repo.wlock)
1458 1458
1459 1459 return held
1460 1460
1461 1461 @command('debugmanifestfulltextcache', [
1462 1462 ('', 'clear', False, _('clear the cache')),
1463 1463 ('a', 'add', '', _('add the given manifest node to the cache'),
1464 1464 _('NODE'))
1465 1465 ], '')
1466 1466 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1467 1467 """show, clear or amend the contents of the manifest fulltext cache"""
1468 with repo.lock():
1468
1469 def getcache():
1469 1470 r = repo.manifestlog.getstorage(b'')
1470 1471 try:
1471 cache = r._fulltextcache
1472 return r._fulltextcache
1472 1473 except AttributeError:
1473 ui.warn(_(
1474 "Current revlog implementation doesn't appear to have a "
1475 'manifest fulltext cache\n'))
1476 return
1477
1478 if opts.get(r'clear'):
1474 msg = _("Current revlog implementation doesn't appear to have a "
1475 "manifest fulltext cache\n")
1476 raise error.Abort(msg)
1477
1478 if opts.get(r'clear'):
1479 with repo.lock():
1480 cache = getcache()
1479 1481 cache.clear()
1480 1482
1481 if add:
1483 if add:
1484 with repo.lock():
1482 1485 try:
1483 manifest = repo.manifestlog[r.lookup(add)]
1486 m = repo.manifestlog
1487 manifest = m[m.getstorage(b'').lookup(add)]
1484 1488 except error.LookupError as e:
1485 1489 raise error.Abort(e, hint="Check your manifest node id")
1486 1490 manifest.read() # stores revisision in cache too
1487 1491
1488 if not len(cache):
1489 ui.write(_('cache empty\n'))
1490 else:
1491 ui.write(
1492 _('cache contains %d manifest entries, in order of most to '
1493 'least recent:\n') % (len(cache),))
1494 totalsize = 0
1495 for nodeid in cache:
1496 # Use cache.get to not update the LRU order
1497 data = cache.get(nodeid)
1498 size = len(data)
1499 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1500 ui.write(_('id: %s, size %s\n') % (
1501 hex(nodeid), util.bytecount(size)))
1502 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1503 ui.write(
1504 _('total cache data size %s, on-disk %s\n') % (
1505 util.bytecount(totalsize), util.bytecount(ondisk))
1506 )
1492 cache = getcache()
1493 if not len(cache):
1494 ui.write(_('cache empty\n'))
1495 else:
1496 ui.write(
1497 _('cache contains %d manifest entries, in order of most to '
1498 'least recent:\n') % (len(cache),))
1499 totalsize = 0
1500 for nodeid in cache:
1501 # Use cache.get to not update the LRU order
1502 data = cache.get(nodeid)
1503 size = len(data)
1504 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1505 ui.write(_('id: %s, size %s\n') % (
1506 hex(nodeid), util.bytecount(size)))
1507 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1508 ui.write(
1509 _('total cache data size %s, on-disk %s\n') % (
1510 util.bytecount(totalsize), util.bytecount(ondisk))
1511 )
1507 1512
1508 1513 @command('debugmergestate', [], '')
1509 1514 def debugmergestate(ui, repo, *args):
1510 1515 """print merge state
1511 1516
1512 1517 Use --verbose to print out information about whether v1 or v2 merge state
1513 1518 was chosen."""
1514 1519 def _hashornull(h):
1515 1520 if h == nullhex:
1516 1521 return 'null'
1517 1522 else:
1518 1523 return h
1519 1524
1520 1525 def printrecords(version):
1521 1526 ui.write(('* version %d records\n') % version)
1522 1527 if version == 1:
1523 1528 records = v1records
1524 1529 else:
1525 1530 records = v2records
1526 1531
1527 1532 for rtype, record in records:
1528 1533 # pretty print some record types
1529 1534 if rtype == 'L':
1530 1535 ui.write(('local: %s\n') % record)
1531 1536 elif rtype == 'O':
1532 1537 ui.write(('other: %s\n') % record)
1533 1538 elif rtype == 'm':
1534 1539 driver, mdstate = record.split('\0', 1)
1535 1540 ui.write(('merge driver: %s (state "%s")\n')
1536 1541 % (driver, mdstate))
1537 1542 elif rtype in 'FDC':
1538 1543 r = record.split('\0')
1539 1544 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1540 1545 if version == 1:
1541 1546 onode = 'not stored in v1 format'
1542 1547 flags = r[7]
1543 1548 else:
1544 1549 onode, flags = r[7:9]
1545 1550 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1546 1551 % (f, rtype, state, _hashornull(hash)))
1547 1552 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1548 1553 ui.write((' ancestor path: %s (node %s)\n')
1549 1554 % (afile, _hashornull(anode)))
1550 1555 ui.write((' other path: %s (node %s)\n')
1551 1556 % (ofile, _hashornull(onode)))
1552 1557 elif rtype == 'f':
1553 1558 filename, rawextras = record.split('\0', 1)
1554 1559 extras = rawextras.split('\0')
1555 1560 i = 0
1556 1561 extrastrings = []
1557 1562 while i < len(extras):
1558 1563 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1559 1564 i += 2
1560 1565
1561 1566 ui.write(('file extras: %s (%s)\n')
1562 1567 % (filename, ', '.join(extrastrings)))
1563 1568 elif rtype == 'l':
1564 1569 labels = record.split('\0', 2)
1565 1570 labels = [l for l in labels if len(l) > 0]
1566 1571 ui.write(('labels:\n'))
1567 1572 ui.write((' local: %s\n' % labels[0]))
1568 1573 ui.write((' other: %s\n' % labels[1]))
1569 1574 if len(labels) > 2:
1570 1575 ui.write((' base: %s\n' % labels[2]))
1571 1576 else:
1572 1577 ui.write(('unrecognized entry: %s\t%s\n')
1573 1578 % (rtype, record.replace('\0', '\t')))
1574 1579
1575 1580 # Avoid mergestate.read() since it may raise an exception for unsupported
1576 1581 # merge state records. We shouldn't be doing this, but this is OK since this
1577 1582 # command is pretty low-level.
1578 1583 ms = mergemod.mergestate(repo)
1579 1584
1580 1585 # sort so that reasonable information is on top
1581 1586 v1records = ms._readrecordsv1()
1582 1587 v2records = ms._readrecordsv2()
1583 1588 order = 'LOml'
1584 1589 def key(r):
1585 1590 idx = order.find(r[0])
1586 1591 if idx == -1:
1587 1592 return (1, r[1])
1588 1593 else:
1589 1594 return (0, idx)
1590 1595 v1records.sort(key=key)
1591 1596 v2records.sort(key=key)
1592 1597
1593 1598 if not v1records and not v2records:
1594 1599 ui.write(('no merge state found\n'))
1595 1600 elif not v2records:
1596 1601 ui.note(('no version 2 merge state\n'))
1597 1602 printrecords(1)
1598 1603 elif ms._v1v2match(v1records, v2records):
1599 1604 ui.note(('v1 and v2 states match: using v2\n'))
1600 1605 printrecords(2)
1601 1606 else:
1602 1607 ui.note(('v1 and v2 states mismatch: using v1\n'))
1603 1608 printrecords(1)
1604 1609 if ui.verbose:
1605 1610 printrecords(2)
1606 1611
1607 1612 @command('debugnamecomplete', [], _('NAME...'))
1608 1613 def debugnamecomplete(ui, repo, *args):
1609 1614 '''complete "names" - tags, open branch names, bookmark names'''
1610 1615
1611 1616 names = set()
1612 1617 # since we previously only listed open branches, we will handle that
1613 1618 # specially (after this for loop)
1614 1619 for name, ns in repo.names.iteritems():
1615 1620 if name != 'branches':
1616 1621 names.update(ns.listnames(repo))
1617 1622 names.update(tag for (tag, heads, tip, closed)
1618 1623 in repo.branchmap().iterbranches() if not closed)
1619 1624 completions = set()
1620 1625 if not args:
1621 1626 args = ['']
1622 1627 for a in args:
1623 1628 completions.update(n for n in names if n.startswith(a))
1624 1629 ui.write('\n'.join(sorted(completions)))
1625 1630 ui.write('\n')
1626 1631
1627 1632 @command('debugobsolete',
1628 1633 [('', 'flags', 0, _('markers flag')),
1629 1634 ('', 'record-parents', False,
1630 1635 _('record parent information for the precursor')),
1631 1636 ('r', 'rev', [], _('display markers relevant to REV')),
1632 1637 ('', 'exclusive', False, _('restrict display to markers only '
1633 1638 'relevant to REV')),
1634 1639 ('', 'index', False, _('display index of the marker')),
1635 1640 ('', 'delete', [], _('delete markers specified by indices')),
1636 1641 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1637 1642 _('[OBSOLETED [REPLACEMENT ...]]'))
1638 1643 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1639 1644 """create arbitrary obsolete marker
1640 1645
1641 1646 With no arguments, displays the list of obsolescence markers."""
1642 1647
1643 1648 opts = pycompat.byteskwargs(opts)
1644 1649
1645 1650 def parsenodeid(s):
1646 1651 try:
1647 1652 # We do not use revsingle/revrange functions here to accept
1648 1653 # arbitrary node identifiers, possibly not present in the
1649 1654 # local repository.
1650 1655 n = bin(s)
1651 1656 if len(n) != len(nullid):
1652 1657 raise TypeError()
1653 1658 return n
1654 1659 except TypeError:
1655 1660 raise error.Abort('changeset references must be full hexadecimal '
1656 1661 'node identifiers')
1657 1662
1658 1663 if opts.get('delete'):
1659 1664 indices = []
1660 1665 for v in opts.get('delete'):
1661 1666 try:
1662 1667 indices.append(int(v))
1663 1668 except ValueError:
1664 1669 raise error.Abort(_('invalid index value: %r') % v,
1665 1670 hint=_('use integers for indices'))
1666 1671
1667 1672 if repo.currenttransaction():
1668 1673 raise error.Abort(_('cannot delete obsmarkers in the middle '
1669 1674 'of transaction.'))
1670 1675
1671 1676 with repo.lock():
1672 1677 n = repair.deleteobsmarkers(repo.obsstore, indices)
1673 1678 ui.write(_('deleted %i obsolescence markers\n') % n)
1674 1679
1675 1680 return
1676 1681
1677 1682 if precursor is not None:
1678 1683 if opts['rev']:
1679 1684 raise error.Abort('cannot select revision when creating marker')
1680 1685 metadata = {}
1681 1686 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1682 1687 succs = tuple(parsenodeid(succ) for succ in successors)
1683 1688 l = repo.lock()
1684 1689 try:
1685 1690 tr = repo.transaction('debugobsolete')
1686 1691 try:
1687 1692 date = opts.get('date')
1688 1693 if date:
1689 1694 date = dateutil.parsedate(date)
1690 1695 else:
1691 1696 date = None
1692 1697 prec = parsenodeid(precursor)
1693 1698 parents = None
1694 1699 if opts['record_parents']:
1695 1700 if prec not in repo.unfiltered():
1696 1701 raise error.Abort('cannot used --record-parents on '
1697 1702 'unknown changesets')
1698 1703 parents = repo.unfiltered()[prec].parents()
1699 1704 parents = tuple(p.node() for p in parents)
1700 1705 repo.obsstore.create(tr, prec, succs, opts['flags'],
1701 1706 parents=parents, date=date,
1702 1707 metadata=metadata, ui=ui)
1703 1708 tr.close()
1704 1709 except ValueError as exc:
1705 1710 raise error.Abort(_('bad obsmarker input: %s') %
1706 1711 pycompat.bytestr(exc))
1707 1712 finally:
1708 1713 tr.release()
1709 1714 finally:
1710 1715 l.release()
1711 1716 else:
1712 1717 if opts['rev']:
1713 1718 revs = scmutil.revrange(repo, opts['rev'])
1714 1719 nodes = [repo[r].node() for r in revs]
1715 1720 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1716 1721 exclusive=opts['exclusive']))
1717 1722 markers.sort(key=lambda x: x._data)
1718 1723 else:
1719 1724 markers = obsutil.getmarkers(repo)
1720 1725
1721 1726 markerstoiter = markers
1722 1727 isrelevant = lambda m: True
1723 1728 if opts.get('rev') and opts.get('index'):
1724 1729 markerstoiter = obsutil.getmarkers(repo)
1725 1730 markerset = set(markers)
1726 1731 isrelevant = lambda m: m in markerset
1727 1732
1728 1733 fm = ui.formatter('debugobsolete', opts)
1729 1734 for i, m in enumerate(markerstoiter):
1730 1735 if not isrelevant(m):
1731 1736 # marker can be irrelevant when we're iterating over a set
1732 1737 # of markers (markerstoiter) which is bigger than the set
1733 1738 # of markers we want to display (markers)
1734 1739 # this can happen if both --index and --rev options are
1735 1740 # provided and thus we need to iterate over all of the markers
1736 1741 # to get the correct indices, but only display the ones that
1737 1742 # are relevant to --rev value
1738 1743 continue
1739 1744 fm.startitem()
1740 1745 ind = i if opts.get('index') else None
1741 1746 cmdutil.showmarker(fm, m, index=ind)
1742 1747 fm.end()
1743 1748
1744 1749 @command('debugp1copies',
1745 1750 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1746 1751 _('[-r REV]'))
1747 1752 def debugp1copies(ui, repo, **opts):
1748 1753 """dump copy information compared to p1"""
1749 1754
1750 1755 opts = pycompat.byteskwargs(opts)
1751 1756 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1752 1757 for dst, src in ctx.p1copies().items():
1753 1758 ui.write('%s -> %s\n' % (src, dst))
1754 1759
1755 1760 @command('debugp2copies',
1756 1761 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1757 1762 _('[-r REV]'))
1758 1763 def debugp1copies(ui, repo, **opts):
1759 1764 """dump copy information compared to p2"""
1760 1765
1761 1766 opts = pycompat.byteskwargs(opts)
1762 1767 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1763 1768 for dst, src in ctx.p2copies().items():
1764 1769 ui.write('%s -> %s\n' % (src, dst))
1765 1770
1766 1771 @command('debugpathcomplete',
1767 1772 [('f', 'full', None, _('complete an entire path')),
1768 1773 ('n', 'normal', None, _('show only normal files')),
1769 1774 ('a', 'added', None, _('show only added files')),
1770 1775 ('r', 'removed', None, _('show only removed files'))],
1771 1776 _('FILESPEC...'))
1772 1777 def debugpathcomplete(ui, repo, *specs, **opts):
1773 1778 '''complete part or all of a tracked path
1774 1779
1775 1780 This command supports shells that offer path name completion. It
1776 1781 currently completes only files already known to the dirstate.
1777 1782
1778 1783 Completion extends only to the next path segment unless
1779 1784 --full is specified, in which case entire paths are used.'''
1780 1785
1781 1786 def complete(path, acceptable):
1782 1787 dirstate = repo.dirstate
1783 1788 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1784 1789 rootdir = repo.root + pycompat.ossep
1785 1790 if spec != repo.root and not spec.startswith(rootdir):
1786 1791 return [], []
1787 1792 if os.path.isdir(spec):
1788 1793 spec += '/'
1789 1794 spec = spec[len(rootdir):]
1790 1795 fixpaths = pycompat.ossep != '/'
1791 1796 if fixpaths:
1792 1797 spec = spec.replace(pycompat.ossep, '/')
1793 1798 speclen = len(spec)
1794 1799 fullpaths = opts[r'full']
1795 1800 files, dirs = set(), set()
1796 1801 adddir, addfile = dirs.add, files.add
1797 1802 for f, st in dirstate.iteritems():
1798 1803 if f.startswith(spec) and st[0] in acceptable:
1799 1804 if fixpaths:
1800 1805 f = f.replace('/', pycompat.ossep)
1801 1806 if fullpaths:
1802 1807 addfile(f)
1803 1808 continue
1804 1809 s = f.find(pycompat.ossep, speclen)
1805 1810 if s >= 0:
1806 1811 adddir(f[:s])
1807 1812 else:
1808 1813 addfile(f)
1809 1814 return files, dirs
1810 1815
1811 1816 acceptable = ''
1812 1817 if opts[r'normal']:
1813 1818 acceptable += 'nm'
1814 1819 if opts[r'added']:
1815 1820 acceptable += 'a'
1816 1821 if opts[r'removed']:
1817 1822 acceptable += 'r'
1818 1823 cwd = repo.getcwd()
1819 1824 if not specs:
1820 1825 specs = ['.']
1821 1826
1822 1827 files, dirs = set(), set()
1823 1828 for spec in specs:
1824 1829 f, d = complete(spec, acceptable or 'nmar')
1825 1830 files.update(f)
1826 1831 dirs.update(d)
1827 1832 files.update(dirs)
1828 1833 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1829 1834 ui.write('\n')
1830 1835
1831 1836 @command('debugpathcopies',
1832 1837 cmdutil.walkopts,
1833 1838 'hg debugpathcopies REV1 REV2 [FILE]',
1834 1839 inferrepo=True)
1835 1840 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1836 1841 """show copies between two revisions"""
1837 1842 ctx1 = scmutil.revsingle(repo, rev1)
1838 1843 ctx2 = scmutil.revsingle(repo, rev2)
1839 1844 m = scmutil.match(ctx1, pats, opts)
1840 1845 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1841 1846 ui.write('%s -> %s\n' % (src, dst))
1842 1847
1843 1848 @command('debugpeer', [], _('PATH'), norepo=True)
1844 1849 def debugpeer(ui, path):
1845 1850 """establish a connection to a peer repository"""
1846 1851 # Always enable peer request logging. Requires --debug to display
1847 1852 # though.
1848 1853 overrides = {
1849 1854 ('devel', 'debug.peer-request'): True,
1850 1855 }
1851 1856
1852 1857 with ui.configoverride(overrides):
1853 1858 peer = hg.peer(ui, {}, path)
1854 1859
1855 1860 local = peer.local() is not None
1856 1861 canpush = peer.canpush()
1857 1862
1858 1863 ui.write(_('url: %s\n') % peer.url())
1859 1864 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1860 1865 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1861 1866
1862 1867 @command('debugpickmergetool',
1863 1868 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1864 1869 ('', 'changedelete', None, _('emulate merging change and delete')),
1865 1870 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1866 1871 _('[PATTERN]...'),
1867 1872 inferrepo=True)
1868 1873 def debugpickmergetool(ui, repo, *pats, **opts):
1869 1874 """examine which merge tool is chosen for specified file
1870 1875
1871 1876 As described in :hg:`help merge-tools`, Mercurial examines
1872 1877 configurations below in this order to decide which merge tool is
1873 1878 chosen for specified file.
1874 1879
1875 1880 1. ``--tool`` option
1876 1881 2. ``HGMERGE`` environment variable
1877 1882 3. configurations in ``merge-patterns`` section
1878 1883 4. configuration of ``ui.merge``
1879 1884 5. configurations in ``merge-tools`` section
1880 1885 6. ``hgmerge`` tool (for historical reason only)
1881 1886 7. default tool for fallback (``:merge`` or ``:prompt``)
1882 1887
1883 1888 This command writes out examination result in the style below::
1884 1889
1885 1890 FILE = MERGETOOL
1886 1891
1887 1892 By default, all files known in the first parent context of the
1888 1893 working directory are examined. Use file patterns and/or -I/-X
1889 1894 options to limit target files. -r/--rev is also useful to examine
1890 1895 files in another context without actual updating to it.
1891 1896
1892 1897 With --debug, this command shows warning messages while matching
1893 1898 against ``merge-patterns`` and so on, too. It is recommended to
1894 1899 use this option with explicit file patterns and/or -I/-X options,
1895 1900 because this option increases amount of output per file according
1896 1901 to configurations in hgrc.
1897 1902
1898 1903 With -v/--verbose, this command shows configurations below at
1899 1904 first (only if specified).
1900 1905
1901 1906 - ``--tool`` option
1902 1907 - ``HGMERGE`` environment variable
1903 1908 - configuration of ``ui.merge``
1904 1909
1905 1910 If merge tool is chosen before matching against
1906 1911 ``merge-patterns``, this command can't show any helpful
1907 1912 information, even with --debug. In such case, information above is
1908 1913 useful to know why a merge tool is chosen.
1909 1914 """
1910 1915 opts = pycompat.byteskwargs(opts)
1911 1916 overrides = {}
1912 1917 if opts['tool']:
1913 1918 overrides[('ui', 'forcemerge')] = opts['tool']
1914 1919 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1915 1920
1916 1921 with ui.configoverride(overrides, 'debugmergepatterns'):
1917 1922 hgmerge = encoding.environ.get("HGMERGE")
1918 1923 if hgmerge is not None:
1919 1924 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1920 1925 uimerge = ui.config("ui", "merge")
1921 1926 if uimerge:
1922 1927 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1923 1928
1924 1929 ctx = scmutil.revsingle(repo, opts.get('rev'))
1925 1930 m = scmutil.match(ctx, pats, opts)
1926 1931 changedelete = opts['changedelete']
1927 1932 for path in ctx.walk(m):
1928 1933 fctx = ctx[path]
1929 1934 try:
1930 1935 if not ui.debugflag:
1931 1936 ui.pushbuffer(error=True)
1932 1937 tool, toolpath = filemerge._picktool(repo, ui, path,
1933 1938 fctx.isbinary(),
1934 1939 'l' in fctx.flags(),
1935 1940 changedelete)
1936 1941 finally:
1937 1942 if not ui.debugflag:
1938 1943 ui.popbuffer()
1939 1944 ui.write(('%s = %s\n') % (path, tool))
1940 1945
1941 1946 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1942 1947 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1943 1948 '''access the pushkey key/value protocol
1944 1949
1945 1950 With two args, list the keys in the given namespace.
1946 1951
1947 1952 With five args, set a key to new if it currently is set to old.
1948 1953 Reports success or failure.
1949 1954 '''
1950 1955
1951 1956 target = hg.peer(ui, {}, repopath)
1952 1957 if keyinfo:
1953 1958 key, old, new = keyinfo
1954 1959 with target.commandexecutor() as e:
1955 1960 r = e.callcommand('pushkey', {
1956 1961 'namespace': namespace,
1957 1962 'key': key,
1958 1963 'old': old,
1959 1964 'new': new,
1960 1965 }).result()
1961 1966
1962 1967 ui.status(pycompat.bytestr(r) + '\n')
1963 1968 return not r
1964 1969 else:
1965 1970 for k, v in sorted(target.listkeys(namespace).iteritems()):
1966 1971 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1967 1972 stringutil.escapestr(v)))
1968 1973
1969 1974 @command('debugpvec', [], _('A B'))
1970 1975 def debugpvec(ui, repo, a, b=None):
1971 1976 ca = scmutil.revsingle(repo, a)
1972 1977 cb = scmutil.revsingle(repo, b)
1973 1978 pa = pvec.ctxpvec(ca)
1974 1979 pb = pvec.ctxpvec(cb)
1975 1980 if pa == pb:
1976 1981 rel = "="
1977 1982 elif pa > pb:
1978 1983 rel = ">"
1979 1984 elif pa < pb:
1980 1985 rel = "<"
1981 1986 elif pa | pb:
1982 1987 rel = "|"
1983 1988 ui.write(_("a: %s\n") % pa)
1984 1989 ui.write(_("b: %s\n") % pb)
1985 1990 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1986 1991 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1987 1992 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1988 1993 pa.distance(pb), rel))
1989 1994
1990 1995 @command('debugrebuilddirstate|debugrebuildstate',
1991 1996 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1992 1997 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1993 1998 'the working copy parent')),
1994 1999 ],
1995 2000 _('[-r REV]'))
1996 2001 def debugrebuilddirstate(ui, repo, rev, **opts):
1997 2002 """rebuild the dirstate as it would look like for the given revision
1998 2003
1999 2004 If no revision is specified the first current parent will be used.
2000 2005
2001 2006 The dirstate will be set to the files of the given revision.
2002 2007 The actual working directory content or existing dirstate
2003 2008 information such as adds or removes is not considered.
2004 2009
2005 2010 ``minimal`` will only rebuild the dirstate status for files that claim to be
2006 2011 tracked but are not in the parent manifest, or that exist in the parent
2007 2012 manifest but are not in the dirstate. It will not change adds, removes, or
2008 2013 modified files that are in the working copy parent.
2009 2014
2010 2015 One use of this command is to make the next :hg:`status` invocation
2011 2016 check the actual file content.
2012 2017 """
2013 2018 ctx = scmutil.revsingle(repo, rev)
2014 2019 with repo.wlock():
2015 2020 dirstate = repo.dirstate
2016 2021 changedfiles = None
2017 2022 # See command doc for what minimal does.
2018 2023 if opts.get(r'minimal'):
2019 2024 manifestfiles = set(ctx.manifest().keys())
2020 2025 dirstatefiles = set(dirstate)
2021 2026 manifestonly = manifestfiles - dirstatefiles
2022 2027 dsonly = dirstatefiles - manifestfiles
2023 2028 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2024 2029 changedfiles = manifestonly | dsnotadded
2025 2030
2026 2031 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2027 2032
2028 2033 @command('debugrebuildfncache', [], '')
2029 2034 def debugrebuildfncache(ui, repo):
2030 2035 """rebuild the fncache file"""
2031 2036 repair.rebuildfncache(ui, repo)
2032 2037
2033 2038 @command('debugrename',
2034 2039 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2035 2040 _('[-r REV] [FILE]...'))
2036 2041 def debugrename(ui, repo, *pats, **opts):
2037 2042 """dump rename information"""
2038 2043
2039 2044 opts = pycompat.byteskwargs(opts)
2040 2045 ctx = scmutil.revsingle(repo, opts.get('rev'))
2041 2046 m = scmutil.match(ctx, pats, opts)
2042 2047 for abs in ctx.walk(m):
2043 2048 fctx = ctx[abs]
2044 2049 o = fctx.filelog().renamed(fctx.filenode())
2045 2050 rel = repo.pathto(abs)
2046 2051 if o:
2047 2052 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2048 2053 else:
2049 2054 ui.write(_("%s not renamed\n") % rel)
2050 2055
2051 2056 @command('debugrevlog', cmdutil.debugrevlogopts +
2052 2057 [('d', 'dump', False, _('dump index data'))],
2053 2058 _('-c|-m|FILE'),
2054 2059 optionalrepo=True)
2055 2060 def debugrevlog(ui, repo, file_=None, **opts):
2056 2061 """show data and statistics about a revlog"""
2057 2062 opts = pycompat.byteskwargs(opts)
2058 2063 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2059 2064
2060 2065 if opts.get("dump"):
2061 2066 numrevs = len(r)
2062 2067 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2063 2068 " rawsize totalsize compression heads chainlen\n"))
2064 2069 ts = 0
2065 2070 heads = set()
2066 2071
2067 2072 for rev in pycompat.xrange(numrevs):
2068 2073 dbase = r.deltaparent(rev)
2069 2074 if dbase == -1:
2070 2075 dbase = rev
2071 2076 cbase = r.chainbase(rev)
2072 2077 clen = r.chainlen(rev)
2073 2078 p1, p2 = r.parentrevs(rev)
2074 2079 rs = r.rawsize(rev)
2075 2080 ts = ts + rs
2076 2081 heads -= set(r.parentrevs(rev))
2077 2082 heads.add(rev)
2078 2083 try:
2079 2084 compression = ts / r.end(rev)
2080 2085 except ZeroDivisionError:
2081 2086 compression = 0
2082 2087 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2083 2088 "%11d %5d %8d\n" %
2084 2089 (rev, p1, p2, r.start(rev), r.end(rev),
2085 2090 r.start(dbase), r.start(cbase),
2086 2091 r.start(p1), r.start(p2),
2087 2092 rs, ts, compression, len(heads), clen))
2088 2093 return 0
2089 2094
2090 2095 v = r.version
2091 2096 format = v & 0xFFFF
2092 2097 flags = []
2093 2098 gdelta = False
2094 2099 if v & revlog.FLAG_INLINE_DATA:
2095 2100 flags.append('inline')
2096 2101 if v & revlog.FLAG_GENERALDELTA:
2097 2102 gdelta = True
2098 2103 flags.append('generaldelta')
2099 2104 if not flags:
2100 2105 flags = ['(none)']
2101 2106
2102 2107 ### tracks merge vs single parent
2103 2108 nummerges = 0
2104 2109
2105 2110 ### tracks ways the "delta" are build
2106 2111 # nodelta
2107 2112 numempty = 0
2108 2113 numemptytext = 0
2109 2114 numemptydelta = 0
2110 2115 # full file content
2111 2116 numfull = 0
2112 2117 # intermediate snapshot against a prior snapshot
2113 2118 numsemi = 0
2114 2119 # snapshot count per depth
2115 2120 numsnapdepth = collections.defaultdict(lambda: 0)
2116 2121 # delta against previous revision
2117 2122 numprev = 0
2118 2123 # delta against first or second parent (not prev)
2119 2124 nump1 = 0
2120 2125 nump2 = 0
2121 2126 # delta against neither prev nor parents
2122 2127 numother = 0
2123 2128 # delta against prev that are also first or second parent
2124 2129 # (details of `numprev`)
2125 2130 nump1prev = 0
2126 2131 nump2prev = 0
2127 2132
2128 2133 # data about delta chain of each revs
2129 2134 chainlengths = []
2130 2135 chainbases = []
2131 2136 chainspans = []
2132 2137
2133 2138 # data about each revision
2134 2139 datasize = [None, 0, 0]
2135 2140 fullsize = [None, 0, 0]
2136 2141 semisize = [None, 0, 0]
2137 2142 # snapshot count per depth
2138 2143 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2139 2144 deltasize = [None, 0, 0]
2140 2145 chunktypecounts = {}
2141 2146 chunktypesizes = {}
2142 2147
2143 2148 def addsize(size, l):
2144 2149 if l[0] is None or size < l[0]:
2145 2150 l[0] = size
2146 2151 if size > l[1]:
2147 2152 l[1] = size
2148 2153 l[2] += size
2149 2154
2150 2155 numrevs = len(r)
2151 2156 for rev in pycompat.xrange(numrevs):
2152 2157 p1, p2 = r.parentrevs(rev)
2153 2158 delta = r.deltaparent(rev)
2154 2159 if format > 0:
2155 2160 addsize(r.rawsize(rev), datasize)
2156 2161 if p2 != nullrev:
2157 2162 nummerges += 1
2158 2163 size = r.length(rev)
2159 2164 if delta == nullrev:
2160 2165 chainlengths.append(0)
2161 2166 chainbases.append(r.start(rev))
2162 2167 chainspans.append(size)
2163 2168 if size == 0:
2164 2169 numempty += 1
2165 2170 numemptytext += 1
2166 2171 else:
2167 2172 numfull += 1
2168 2173 numsnapdepth[0] += 1
2169 2174 addsize(size, fullsize)
2170 2175 addsize(size, snapsizedepth[0])
2171 2176 else:
2172 2177 chainlengths.append(chainlengths[delta] + 1)
2173 2178 baseaddr = chainbases[delta]
2174 2179 revaddr = r.start(rev)
2175 2180 chainbases.append(baseaddr)
2176 2181 chainspans.append((revaddr - baseaddr) + size)
2177 2182 if size == 0:
2178 2183 numempty += 1
2179 2184 numemptydelta += 1
2180 2185 elif r.issnapshot(rev):
2181 2186 addsize(size, semisize)
2182 2187 numsemi += 1
2183 2188 depth = r.snapshotdepth(rev)
2184 2189 numsnapdepth[depth] += 1
2185 2190 addsize(size, snapsizedepth[depth])
2186 2191 else:
2187 2192 addsize(size, deltasize)
2188 2193 if delta == rev - 1:
2189 2194 numprev += 1
2190 2195 if delta == p1:
2191 2196 nump1prev += 1
2192 2197 elif delta == p2:
2193 2198 nump2prev += 1
2194 2199 elif delta == p1:
2195 2200 nump1 += 1
2196 2201 elif delta == p2:
2197 2202 nump2 += 1
2198 2203 elif delta != nullrev:
2199 2204 numother += 1
2200 2205
2201 2206 # Obtain data on the raw chunks in the revlog.
2202 2207 if util.safehasattr(r, '_getsegmentforrevs'):
2203 2208 segment = r._getsegmentforrevs(rev, rev)[1]
2204 2209 else:
2205 2210 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2206 2211 if segment:
2207 2212 chunktype = bytes(segment[0:1])
2208 2213 else:
2209 2214 chunktype = 'empty'
2210 2215
2211 2216 if chunktype not in chunktypecounts:
2212 2217 chunktypecounts[chunktype] = 0
2213 2218 chunktypesizes[chunktype] = 0
2214 2219
2215 2220 chunktypecounts[chunktype] += 1
2216 2221 chunktypesizes[chunktype] += size
2217 2222
2218 2223 # Adjust size min value for empty cases
2219 2224 for size in (datasize, fullsize, semisize, deltasize):
2220 2225 if size[0] is None:
2221 2226 size[0] = 0
2222 2227
2223 2228 numdeltas = numrevs - numfull - numempty - numsemi
2224 2229 numoprev = numprev - nump1prev - nump2prev
2225 2230 totalrawsize = datasize[2]
2226 2231 datasize[2] /= numrevs
2227 2232 fulltotal = fullsize[2]
2228 2233 fullsize[2] /= numfull
2229 2234 semitotal = semisize[2]
2230 2235 snaptotal = {}
2231 2236 if numsemi > 0:
2232 2237 semisize[2] /= numsemi
2233 2238 for depth in snapsizedepth:
2234 2239 snaptotal[depth] = snapsizedepth[depth][2]
2235 2240 snapsizedepth[depth][2] /= numsnapdepth[depth]
2236 2241
2237 2242 deltatotal = deltasize[2]
2238 2243 if numdeltas > 0:
2239 2244 deltasize[2] /= numdeltas
2240 2245 totalsize = fulltotal + semitotal + deltatotal
2241 2246 avgchainlen = sum(chainlengths) / numrevs
2242 2247 maxchainlen = max(chainlengths)
2243 2248 maxchainspan = max(chainspans)
2244 2249 compratio = 1
2245 2250 if totalsize:
2246 2251 compratio = totalrawsize / totalsize
2247 2252
2248 2253 basedfmtstr = '%%%dd\n'
2249 2254 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2250 2255
2251 2256 def dfmtstr(max):
2252 2257 return basedfmtstr % len(str(max))
2253 2258 def pcfmtstr(max, padding=0):
2254 2259 return basepcfmtstr % (len(str(max)), ' ' * padding)
2255 2260
2256 2261 def pcfmt(value, total):
2257 2262 if total:
2258 2263 return (value, 100 * float(value) / total)
2259 2264 else:
2260 2265 return value, 100.0
2261 2266
2262 2267 ui.write(('format : %d\n') % format)
2263 2268 ui.write(('flags : %s\n') % ', '.join(flags))
2264 2269
2265 2270 ui.write('\n')
2266 2271 fmt = pcfmtstr(totalsize)
2267 2272 fmt2 = dfmtstr(totalsize)
2268 2273 ui.write(('revisions : ') + fmt2 % numrevs)
2269 2274 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2270 2275 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2271 2276 ui.write(('revisions : ') + fmt2 % numrevs)
2272 2277 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2273 2278 ui.write((' text : ')
2274 2279 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2275 2280 ui.write((' delta : ')
2276 2281 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2277 2282 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2278 2283 for depth in sorted(numsnapdepth):
2279 2284 ui.write((' lvl-%-3d : ' % depth)
2280 2285 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2281 2286 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2282 2287 ui.write(('revision size : ') + fmt2 % totalsize)
2283 2288 ui.write((' snapshot : ')
2284 2289 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2285 2290 for depth in sorted(numsnapdepth):
2286 2291 ui.write((' lvl-%-3d : ' % depth)
2287 2292 + fmt % pcfmt(snaptotal[depth], totalsize))
2288 2293 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2289 2294
2290 2295 def fmtchunktype(chunktype):
2291 2296 if chunktype == 'empty':
2292 2297 return ' %s : ' % chunktype
2293 2298 elif chunktype in pycompat.bytestr(string.ascii_letters):
2294 2299 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2295 2300 else:
2296 2301 return ' 0x%s : ' % hex(chunktype)
2297 2302
2298 2303 ui.write('\n')
2299 2304 ui.write(('chunks : ') + fmt2 % numrevs)
2300 2305 for chunktype in sorted(chunktypecounts):
2301 2306 ui.write(fmtchunktype(chunktype))
2302 2307 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2303 2308 ui.write(('chunks size : ') + fmt2 % totalsize)
2304 2309 for chunktype in sorted(chunktypecounts):
2305 2310 ui.write(fmtchunktype(chunktype))
2306 2311 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2307 2312
2308 2313 ui.write('\n')
2309 2314 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2310 2315 ui.write(('avg chain length : ') + fmt % avgchainlen)
2311 2316 ui.write(('max chain length : ') + fmt % maxchainlen)
2312 2317 ui.write(('max chain reach : ') + fmt % maxchainspan)
2313 2318 ui.write(('compression ratio : ') + fmt % compratio)
2314 2319
2315 2320 if format > 0:
2316 2321 ui.write('\n')
2317 2322 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2318 2323 % tuple(datasize))
2319 2324 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2320 2325 % tuple(fullsize))
2321 2326 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2322 2327 % tuple(semisize))
2323 2328 for depth in sorted(snapsizedepth):
2324 2329 if depth == 0:
2325 2330 continue
2326 2331 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2327 2332 % ((depth,) + tuple(snapsizedepth[depth])))
2328 2333 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2329 2334 % tuple(deltasize))
2330 2335
2331 2336 if numdeltas > 0:
2332 2337 ui.write('\n')
2333 2338 fmt = pcfmtstr(numdeltas)
2334 2339 fmt2 = pcfmtstr(numdeltas, 4)
2335 2340 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2336 2341 if numprev > 0:
2337 2342 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2338 2343 numprev))
2339 2344 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2340 2345 numprev))
2341 2346 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2342 2347 numprev))
2343 2348 if gdelta:
2344 2349 ui.write(('deltas against p1 : ')
2345 2350 + fmt % pcfmt(nump1, numdeltas))
2346 2351 ui.write(('deltas against p2 : ')
2347 2352 + fmt % pcfmt(nump2, numdeltas))
2348 2353 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2349 2354 numdeltas))
2350 2355
2351 2356 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2352 2357 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2353 2358 _('[-f FORMAT] -c|-m|FILE'),
2354 2359 optionalrepo=True)
2355 2360 def debugrevlogindex(ui, repo, file_=None, **opts):
2356 2361 """dump the contents of a revlog index"""
2357 2362 opts = pycompat.byteskwargs(opts)
2358 2363 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2359 2364 format = opts.get('format', 0)
2360 2365 if format not in (0, 1):
2361 2366 raise error.Abort(_("unknown format %d") % format)
2362 2367
2363 2368 if ui.debugflag:
2364 2369 shortfn = hex
2365 2370 else:
2366 2371 shortfn = short
2367 2372
2368 2373 # There might not be anything in r, so have a sane default
2369 2374 idlen = 12
2370 2375 for i in r:
2371 2376 idlen = len(shortfn(r.node(i)))
2372 2377 break
2373 2378
2374 2379 if format == 0:
2375 2380 if ui.verbose:
2376 2381 ui.write((" rev offset length linkrev"
2377 2382 " %s %s p2\n") % ("nodeid".ljust(idlen),
2378 2383 "p1".ljust(idlen)))
2379 2384 else:
2380 2385 ui.write((" rev linkrev %s %s p2\n") % (
2381 2386 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2382 2387 elif format == 1:
2383 2388 if ui.verbose:
2384 2389 ui.write((" rev flag offset length size link p1"
2385 2390 " p2 %s\n") % "nodeid".rjust(idlen))
2386 2391 else:
2387 2392 ui.write((" rev flag size link p1 p2 %s\n") %
2388 2393 "nodeid".rjust(idlen))
2389 2394
2390 2395 for i in r:
2391 2396 node = r.node(i)
2392 2397 if format == 0:
2393 2398 try:
2394 2399 pp = r.parents(node)
2395 2400 except Exception:
2396 2401 pp = [nullid, nullid]
2397 2402 if ui.verbose:
2398 2403 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2399 2404 i, r.start(i), r.length(i), r.linkrev(i),
2400 2405 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2401 2406 else:
2402 2407 ui.write("% 6d % 7d %s %s %s\n" % (
2403 2408 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2404 2409 shortfn(pp[1])))
2405 2410 elif format == 1:
2406 2411 pr = r.parentrevs(i)
2407 2412 if ui.verbose:
2408 2413 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2409 2414 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2410 2415 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2411 2416 else:
2412 2417 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2413 2418 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2414 2419 shortfn(node)))
2415 2420
2416 2421 @command('debugrevspec',
2417 2422 [('', 'optimize', None,
2418 2423 _('print parsed tree after optimizing (DEPRECATED)')),
2419 2424 ('', 'show-revs', True, _('print list of result revisions (default)')),
2420 2425 ('s', 'show-set', None, _('print internal representation of result set')),
2421 2426 ('p', 'show-stage', [],
2422 2427 _('print parsed tree at the given stage'), _('NAME')),
2423 2428 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2424 2429 ('', 'verify-optimized', False, _('verify optimized result')),
2425 2430 ],
2426 2431 ('REVSPEC'))
2427 2432 def debugrevspec(ui, repo, expr, **opts):
2428 2433 """parse and apply a revision specification
2429 2434
2430 2435 Use -p/--show-stage option to print the parsed tree at the given stages.
2431 2436 Use -p all to print tree at every stage.
2432 2437
2433 2438 Use --no-show-revs option with -s or -p to print only the set
2434 2439 representation or the parsed tree respectively.
2435 2440
2436 2441 Use --verify-optimized to compare the optimized result with the unoptimized
2437 2442 one. Returns 1 if the optimized result differs.
2438 2443 """
2439 2444 opts = pycompat.byteskwargs(opts)
2440 2445 aliases = ui.configitems('revsetalias')
2441 2446 stages = [
2442 2447 ('parsed', lambda tree: tree),
2443 2448 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2444 2449 ui.warn)),
2445 2450 ('concatenated', revsetlang.foldconcat),
2446 2451 ('analyzed', revsetlang.analyze),
2447 2452 ('optimized', revsetlang.optimize),
2448 2453 ]
2449 2454 if opts['no_optimized']:
2450 2455 stages = stages[:-1]
2451 2456 if opts['verify_optimized'] and opts['no_optimized']:
2452 2457 raise error.Abort(_('cannot use --verify-optimized with '
2453 2458 '--no-optimized'))
2454 2459 stagenames = set(n for n, f in stages)
2455 2460
2456 2461 showalways = set()
2457 2462 showchanged = set()
2458 2463 if ui.verbose and not opts['show_stage']:
2459 2464 # show parsed tree by --verbose (deprecated)
2460 2465 showalways.add('parsed')
2461 2466 showchanged.update(['expanded', 'concatenated'])
2462 2467 if opts['optimize']:
2463 2468 showalways.add('optimized')
2464 2469 if opts['show_stage'] and opts['optimize']:
2465 2470 raise error.Abort(_('cannot use --optimize with --show-stage'))
2466 2471 if opts['show_stage'] == ['all']:
2467 2472 showalways.update(stagenames)
2468 2473 else:
2469 2474 for n in opts['show_stage']:
2470 2475 if n not in stagenames:
2471 2476 raise error.Abort(_('invalid stage name: %s') % n)
2472 2477 showalways.update(opts['show_stage'])
2473 2478
2474 2479 treebystage = {}
2475 2480 printedtree = None
2476 2481 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2477 2482 for n, f in stages:
2478 2483 treebystage[n] = tree = f(tree)
2479 2484 if n in showalways or (n in showchanged and tree != printedtree):
2480 2485 if opts['show_stage'] or n != 'parsed':
2481 2486 ui.write(("* %s:\n") % n)
2482 2487 ui.write(revsetlang.prettyformat(tree), "\n")
2483 2488 printedtree = tree
2484 2489
2485 2490 if opts['verify_optimized']:
2486 2491 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2487 2492 brevs = revset.makematcher(treebystage['optimized'])(repo)
2488 2493 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2489 2494 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2490 2495 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2491 2496 arevs = list(arevs)
2492 2497 brevs = list(brevs)
2493 2498 if arevs == brevs:
2494 2499 return 0
2495 2500 ui.write(('--- analyzed\n'), label='diff.file_a')
2496 2501 ui.write(('+++ optimized\n'), label='diff.file_b')
2497 2502 sm = difflib.SequenceMatcher(None, arevs, brevs)
2498 2503 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2499 2504 if tag in (r'delete', r'replace'):
2500 2505 for c in arevs[alo:ahi]:
2501 2506 ui.write('-%d\n' % c, label='diff.deleted')
2502 2507 if tag in (r'insert', r'replace'):
2503 2508 for c in brevs[blo:bhi]:
2504 2509 ui.write('+%d\n' % c, label='diff.inserted')
2505 2510 if tag == r'equal':
2506 2511 for c in arevs[alo:ahi]:
2507 2512 ui.write(' %d\n' % c)
2508 2513 return 1
2509 2514
2510 2515 func = revset.makematcher(tree)
2511 2516 revs = func(repo)
2512 2517 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2513 2518 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2514 2519 if not opts['show_revs']:
2515 2520 return
2516 2521 for c in revs:
2517 2522 ui.write("%d\n" % c)
2518 2523
2519 2524 @command('debugserve', [
2520 2525 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2521 2526 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2522 2527 ('', 'logiofile', '', _('file to log server I/O to')),
2523 2528 ], '')
2524 2529 def debugserve(ui, repo, **opts):
2525 2530 """run a server with advanced settings
2526 2531
2527 2532 This command is similar to :hg:`serve`. It exists partially as a
2528 2533 workaround to the fact that ``hg serve --stdio`` must have specific
2529 2534 arguments for security reasons.
2530 2535 """
2531 2536 opts = pycompat.byteskwargs(opts)
2532 2537
2533 2538 if not opts['sshstdio']:
2534 2539 raise error.Abort(_('only --sshstdio is currently supported'))
2535 2540
2536 2541 logfh = None
2537 2542
2538 2543 if opts['logiofd'] and opts['logiofile']:
2539 2544 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2540 2545
2541 2546 if opts['logiofd']:
2542 2547 # Line buffered because output is line based.
2543 2548 try:
2544 2549 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2545 2550 except OSError as e:
2546 2551 if e.errno != errno.ESPIPE:
2547 2552 raise
2548 2553 # can't seek a pipe, so `ab` mode fails on py3
2549 2554 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2550 2555 elif opts['logiofile']:
2551 2556 logfh = open(opts['logiofile'], 'ab', 1)
2552 2557
2553 2558 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2554 2559 s.serve_forever()
2555 2560
2556 2561 @command('debugsetparents', [], _('REV1 [REV2]'))
2557 2562 def debugsetparents(ui, repo, rev1, rev2=None):
2558 2563 """manually set the parents of the current working directory
2559 2564
2560 2565 This is useful for writing repository conversion tools, but should
2561 2566 be used with care. For example, neither the working directory nor the
2562 2567 dirstate is updated, so file status may be incorrect after running this
2563 2568 command.
2564 2569
2565 2570 Returns 0 on success.
2566 2571 """
2567 2572
2568 2573 node1 = scmutil.revsingle(repo, rev1).node()
2569 2574 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2570 2575
2571 2576 with repo.wlock():
2572 2577 repo.setparents(node1, node2)
2573 2578
2574 2579 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2575 2580 def debugssl(ui, repo, source=None, **opts):
2576 2581 '''test a secure connection to a server
2577 2582
2578 2583 This builds the certificate chain for the server on Windows, installing the
2579 2584 missing intermediates and trusted root via Windows Update if necessary. It
2580 2585 does nothing on other platforms.
2581 2586
2582 2587 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2583 2588 that server is used. See :hg:`help urls` for more information.
2584 2589
2585 2590 If the update succeeds, retry the original operation. Otherwise, the cause
2586 2591 of the SSL error is likely another issue.
2587 2592 '''
2588 2593 if not pycompat.iswindows:
2589 2594 raise error.Abort(_('certificate chain building is only possible on '
2590 2595 'Windows'))
2591 2596
2592 2597 if not source:
2593 2598 if not repo:
2594 2599 raise error.Abort(_("there is no Mercurial repository here, and no "
2595 2600 "server specified"))
2596 2601 source = "default"
2597 2602
2598 2603 source, branches = hg.parseurl(ui.expandpath(source))
2599 2604 url = util.url(source)
2600 2605
2601 2606 defaultport = {'https': 443, 'ssh': 22}
2602 2607 if url.scheme in defaultport:
2603 2608 try:
2604 2609 addr = (url.host, int(url.port or defaultport[url.scheme]))
2605 2610 except ValueError:
2606 2611 raise error.Abort(_("malformed port number in URL"))
2607 2612 else:
2608 2613 raise error.Abort(_("only https and ssh connections are supported"))
2609 2614
2610 2615 from . import win32
2611 2616
2612 2617 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2613 2618 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2614 2619
2615 2620 try:
2616 2621 s.connect(addr)
2617 2622 cert = s.getpeercert(True)
2618 2623
2619 2624 ui.status(_('checking the certificate chain for %s\n') % url.host)
2620 2625
2621 2626 complete = win32.checkcertificatechain(cert, build=False)
2622 2627
2623 2628 if not complete:
2624 2629 ui.status(_('certificate chain is incomplete, updating... '))
2625 2630
2626 2631 if not win32.checkcertificatechain(cert):
2627 2632 ui.status(_('failed.\n'))
2628 2633 else:
2629 2634 ui.status(_('done.\n'))
2630 2635 else:
2631 2636 ui.status(_('full certificate chain is available\n'))
2632 2637 finally:
2633 2638 s.close()
2634 2639
2635 2640 @command('debugsub',
2636 2641 [('r', 'rev', '',
2637 2642 _('revision to check'), _('REV'))],
2638 2643 _('[-r REV] [REV]'))
2639 2644 def debugsub(ui, repo, rev=None):
2640 2645 ctx = scmutil.revsingle(repo, rev, None)
2641 2646 for k, v in sorted(ctx.substate.items()):
2642 2647 ui.write(('path %s\n') % k)
2643 2648 ui.write((' source %s\n') % v[0])
2644 2649 ui.write((' revision %s\n') % v[1])
2645 2650
2646 2651 @command('debugsuccessorssets',
2647 2652 [('', 'closest', False, _('return closest successors sets only'))],
2648 2653 _('[REV]'))
2649 2654 def debugsuccessorssets(ui, repo, *revs, **opts):
2650 2655 """show set of successors for revision
2651 2656
2652 2657 A successors set of changeset A is a consistent group of revisions that
2653 2658 succeed A. It contains non-obsolete changesets only unless closests
2654 2659 successors set is set.
2655 2660
2656 2661 In most cases a changeset A has a single successors set containing a single
2657 2662 successor (changeset A replaced by A').
2658 2663
2659 2664 A changeset that is made obsolete with no successors are called "pruned".
2660 2665 Such changesets have no successors sets at all.
2661 2666
2662 2667 A changeset that has been "split" will have a successors set containing
2663 2668 more than one successor.
2664 2669
2665 2670 A changeset that has been rewritten in multiple different ways is called
2666 2671 "divergent". Such changesets have multiple successor sets (each of which
2667 2672 may also be split, i.e. have multiple successors).
2668 2673
2669 2674 Results are displayed as follows::
2670 2675
2671 2676 <rev1>
2672 2677 <successors-1A>
2673 2678 <rev2>
2674 2679 <successors-2A>
2675 2680 <successors-2B1> <successors-2B2> <successors-2B3>
2676 2681
2677 2682 Here rev2 has two possible (i.e. divergent) successors sets. The first
2678 2683 holds one element, whereas the second holds three (i.e. the changeset has
2679 2684 been split).
2680 2685 """
2681 2686 # passed to successorssets caching computation from one call to another
2682 2687 cache = {}
2683 2688 ctx2str = bytes
2684 2689 node2str = short
2685 2690 for rev in scmutil.revrange(repo, revs):
2686 2691 ctx = repo[rev]
2687 2692 ui.write('%s\n'% ctx2str(ctx))
2688 2693 for succsset in obsutil.successorssets(repo, ctx.node(),
2689 2694 closest=opts[r'closest'],
2690 2695 cache=cache):
2691 2696 if succsset:
2692 2697 ui.write(' ')
2693 2698 ui.write(node2str(succsset[0]))
2694 2699 for node in succsset[1:]:
2695 2700 ui.write(' ')
2696 2701 ui.write(node2str(node))
2697 2702 ui.write('\n')
2698 2703
2699 2704 @command('debugtemplate',
2700 2705 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2701 2706 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2702 2707 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2703 2708 optionalrepo=True)
2704 2709 def debugtemplate(ui, repo, tmpl, **opts):
2705 2710 """parse and apply a template
2706 2711
2707 2712 If -r/--rev is given, the template is processed as a log template and
2708 2713 applied to the given changesets. Otherwise, it is processed as a generic
2709 2714 template.
2710 2715
2711 2716 Use --verbose to print the parsed tree.
2712 2717 """
2713 2718 revs = None
2714 2719 if opts[r'rev']:
2715 2720 if repo is None:
2716 2721 raise error.RepoError(_('there is no Mercurial repository here '
2717 2722 '(.hg not found)'))
2718 2723 revs = scmutil.revrange(repo, opts[r'rev'])
2719 2724
2720 2725 props = {}
2721 2726 for d in opts[r'define']:
2722 2727 try:
2723 2728 k, v = (e.strip() for e in d.split('=', 1))
2724 2729 if not k or k == 'ui':
2725 2730 raise ValueError
2726 2731 props[k] = v
2727 2732 except ValueError:
2728 2733 raise error.Abort(_('malformed keyword definition: %s') % d)
2729 2734
2730 2735 if ui.verbose:
2731 2736 aliases = ui.configitems('templatealias')
2732 2737 tree = templater.parse(tmpl)
2733 2738 ui.note(templater.prettyformat(tree), '\n')
2734 2739 newtree = templater.expandaliases(tree, aliases)
2735 2740 if newtree != tree:
2736 2741 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2737 2742
2738 2743 if revs is None:
2739 2744 tres = formatter.templateresources(ui, repo)
2740 2745 t = formatter.maketemplater(ui, tmpl, resources=tres)
2741 2746 if ui.verbose:
2742 2747 kwds, funcs = t.symbolsuseddefault()
2743 2748 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2744 2749 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2745 2750 ui.write(t.renderdefault(props))
2746 2751 else:
2747 2752 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2748 2753 if ui.verbose:
2749 2754 kwds, funcs = displayer.t.symbolsuseddefault()
2750 2755 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2751 2756 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2752 2757 for r in revs:
2753 2758 displayer.show(repo[r], **pycompat.strkwargs(props))
2754 2759 displayer.close()
2755 2760
2756 2761 @command('debuguigetpass', [
2757 2762 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2758 2763 ], _('[-p TEXT]'), norepo=True)
2759 2764 def debuguigetpass(ui, prompt=''):
2760 2765 """show prompt to type password"""
2761 2766 r = ui.getpass(prompt)
2762 2767 ui.write(('respose: %s\n') % r)
2763 2768
2764 2769 @command('debuguiprompt', [
2765 2770 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2766 2771 ], _('[-p TEXT]'), norepo=True)
2767 2772 def debuguiprompt(ui, prompt=''):
2768 2773 """show plain prompt"""
2769 2774 r = ui.prompt(prompt)
2770 2775 ui.write(('response: %s\n') % r)
2771 2776
2772 2777 @command('debugupdatecaches', [])
2773 2778 def debugupdatecaches(ui, repo, *pats, **opts):
2774 2779 """warm all known caches in the repository"""
2775 2780 with repo.wlock(), repo.lock():
2776 2781 repo.updatecaches(full=True)
2777 2782
2778 2783 @command('debugupgraderepo', [
2779 2784 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2780 2785 ('', 'run', False, _('performs an upgrade')),
2781 2786 ('', 'backup', True, _('keep the old repository content around')),
2782 2787 ])
2783 2788 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2784 2789 """upgrade a repository to use different features
2785 2790
2786 2791 If no arguments are specified, the repository is evaluated for upgrade
2787 2792 and a list of problems and potential optimizations is printed.
2788 2793
2789 2794 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2790 2795 can be influenced via additional arguments. More details will be provided
2791 2796 by the command output when run without ``--run``.
2792 2797
2793 2798 During the upgrade, the repository will be locked and no writes will be
2794 2799 allowed.
2795 2800
2796 2801 At the end of the upgrade, the repository may not be readable while new
2797 2802 repository data is swapped in. This window will be as long as it takes to
2798 2803 rename some directories inside the ``.hg`` directory. On most machines, this
2799 2804 should complete almost instantaneously and the chances of a consumer being
2800 2805 unable to access the repository should be low.
2801 2806 """
2802 2807 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2803 2808 backup=backup)
2804 2809
2805 2810 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2806 2811 inferrepo=True)
2807 2812 def debugwalk(ui, repo, *pats, **opts):
2808 2813 """show how files match on given patterns"""
2809 2814 opts = pycompat.byteskwargs(opts)
2810 2815 m = scmutil.match(repo[None], pats, opts)
2811 2816 if ui.verbose:
2812 2817 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2813 2818 items = list(repo[None].walk(m))
2814 2819 if not items:
2815 2820 return
2816 2821 f = lambda fn: fn
2817 2822 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2818 2823 f = lambda fn: util.normpath(fn)
2819 2824 fmt = 'f %%-%ds %%-%ds %%s' % (
2820 2825 max([len(abs) for abs in items]),
2821 2826 max([len(repo.pathto(abs)) for abs in items]))
2822 2827 for abs in items:
2823 2828 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2824 2829 ui.write("%s\n" % line.rstrip())
2825 2830
2826 2831 @command('debugwhyunstable', [], _('REV'))
2827 2832 def debugwhyunstable(ui, repo, rev):
2828 2833 """explain instabilities of a changeset"""
2829 2834 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2830 2835 dnodes = ''
2831 2836 if entry.get('divergentnodes'):
2832 2837 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2833 2838 for ctx in entry['divergentnodes']) + ' '
2834 2839 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2835 2840 entry['reason'], entry['node']))
2836 2841
2837 2842 @command('debugwireargs',
2838 2843 [('', 'three', '', 'three'),
2839 2844 ('', 'four', '', 'four'),
2840 2845 ('', 'five', '', 'five'),
2841 2846 ] + cmdutil.remoteopts,
2842 2847 _('REPO [OPTIONS]... [ONE [TWO]]'),
2843 2848 norepo=True)
2844 2849 def debugwireargs(ui, repopath, *vals, **opts):
2845 2850 opts = pycompat.byteskwargs(opts)
2846 2851 repo = hg.peer(ui, opts, repopath)
2847 2852 for opt in cmdutil.remoteopts:
2848 2853 del opts[opt[1]]
2849 2854 args = {}
2850 2855 for k, v in opts.iteritems():
2851 2856 if v:
2852 2857 args[k] = v
2853 2858 args = pycompat.strkwargs(args)
2854 2859 # run twice to check that we don't mess up the stream for the next command
2855 2860 res1 = repo.debugwireargs(*vals, **args)
2856 2861 res2 = repo.debugwireargs(*vals, **args)
2857 2862 ui.write("%s\n" % res1)
2858 2863 if res1 != res2:
2859 2864 ui.warn("%s\n" % res2)
2860 2865
2861 2866 def _parsewirelangblocks(fh):
2862 2867 activeaction = None
2863 2868 blocklines = []
2864 2869 lastindent = 0
2865 2870
2866 2871 for line in fh:
2867 2872 line = line.rstrip()
2868 2873 if not line:
2869 2874 continue
2870 2875
2871 2876 if line.startswith(b'#'):
2872 2877 continue
2873 2878
2874 2879 if not line.startswith(b' '):
2875 2880 # New block. Flush previous one.
2876 2881 if activeaction:
2877 2882 yield activeaction, blocklines
2878 2883
2879 2884 activeaction = line
2880 2885 blocklines = []
2881 2886 lastindent = 0
2882 2887 continue
2883 2888
2884 2889 # Else we start with an indent.
2885 2890
2886 2891 if not activeaction:
2887 2892 raise error.Abort(_('indented line outside of block'))
2888 2893
2889 2894 indent = len(line) - len(line.lstrip())
2890 2895
2891 2896 # If this line is indented more than the last line, concatenate it.
2892 2897 if indent > lastindent and blocklines:
2893 2898 blocklines[-1] += line.lstrip()
2894 2899 else:
2895 2900 blocklines.append(line)
2896 2901 lastindent = indent
2897 2902
2898 2903 # Flush last block.
2899 2904 if activeaction:
2900 2905 yield activeaction, blocklines
2901 2906
2902 2907 @command('debugwireproto',
2903 2908 [
2904 2909 ('', 'localssh', False, _('start an SSH server for this repo')),
2905 2910 ('', 'peer', '', _('construct a specific version of the peer')),
2906 2911 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2907 2912 ('', 'nologhandshake', False,
2908 2913 _('do not log I/O related to the peer handshake')),
2909 2914 ] + cmdutil.remoteopts,
2910 2915 _('[PATH]'),
2911 2916 optionalrepo=True)
2912 2917 def debugwireproto(ui, repo, path=None, **opts):
2913 2918 """send wire protocol commands to a server
2914 2919
2915 2920 This command can be used to issue wire protocol commands to remote
2916 2921 peers and to debug the raw data being exchanged.
2917 2922
2918 2923 ``--localssh`` will start an SSH server against the current repository
2919 2924 and connect to that. By default, the connection will perform a handshake
2920 2925 and establish an appropriate peer instance.
2921 2926
2922 2927 ``--peer`` can be used to bypass the handshake protocol and construct a
2923 2928 peer instance using the specified class type. Valid values are ``raw``,
2924 2929 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2925 2930 raw data payloads and don't support higher-level command actions.
2926 2931
2927 2932 ``--noreadstderr`` can be used to disable automatic reading from stderr
2928 2933 of the peer (for SSH connections only). Disabling automatic reading of
2929 2934 stderr is useful for making output more deterministic.
2930 2935
2931 2936 Commands are issued via a mini language which is specified via stdin.
2932 2937 The language consists of individual actions to perform. An action is
2933 2938 defined by a block. A block is defined as a line with no leading
2934 2939 space followed by 0 or more lines with leading space. Blocks are
2935 2940 effectively a high-level command with additional metadata.
2936 2941
2937 2942 Lines beginning with ``#`` are ignored.
2938 2943
2939 2944 The following sections denote available actions.
2940 2945
2941 2946 raw
2942 2947 ---
2943 2948
2944 2949 Send raw data to the server.
2945 2950
2946 2951 The block payload contains the raw data to send as one atomic send
2947 2952 operation. The data may not actually be delivered in a single system
2948 2953 call: it depends on the abilities of the transport being used.
2949 2954
2950 2955 Each line in the block is de-indented and concatenated. Then, that
2951 2956 value is evaluated as a Python b'' literal. This allows the use of
2952 2957 backslash escaping, etc.
2953 2958
2954 2959 raw+
2955 2960 ----
2956 2961
2957 2962 Behaves like ``raw`` except flushes output afterwards.
2958 2963
2959 2964 command <X>
2960 2965 -----------
2961 2966
2962 2967 Send a request to run a named command, whose name follows the ``command``
2963 2968 string.
2964 2969
2965 2970 Arguments to the command are defined as lines in this block. The format of
2966 2971 each line is ``<key> <value>``. e.g.::
2967 2972
2968 2973 command listkeys
2969 2974 namespace bookmarks
2970 2975
2971 2976 If the value begins with ``eval:``, it will be interpreted as a Python
2972 2977 literal expression. Otherwise values are interpreted as Python b'' literals.
2973 2978 This allows sending complex types and encoding special byte sequences via
2974 2979 backslash escaping.
2975 2980
2976 2981 The following arguments have special meaning:
2977 2982
2978 2983 ``PUSHFILE``
2979 2984 When defined, the *push* mechanism of the peer will be used instead
2980 2985 of the static request-response mechanism and the content of the
2981 2986 file specified in the value of this argument will be sent as the
2982 2987 command payload.
2983 2988
2984 2989 This can be used to submit a local bundle file to the remote.
2985 2990
2986 2991 batchbegin
2987 2992 ----------
2988 2993
2989 2994 Instruct the peer to begin a batched send.
2990 2995
2991 2996 All ``command`` blocks are queued for execution until the next
2992 2997 ``batchsubmit`` block.
2993 2998
2994 2999 batchsubmit
2995 3000 -----------
2996 3001
2997 3002 Submit previously queued ``command`` blocks as a batch request.
2998 3003
2999 3004 This action MUST be paired with a ``batchbegin`` action.
3000 3005
3001 3006 httprequest <method> <path>
3002 3007 ---------------------------
3003 3008
3004 3009 (HTTP peer only)
3005 3010
3006 3011 Send an HTTP request to the peer.
3007 3012
3008 3013 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3009 3014
3010 3015 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3011 3016 headers to add to the request. e.g. ``Accept: foo``.
3012 3017
3013 3018 The following arguments are special:
3014 3019
3015 3020 ``BODYFILE``
3016 3021 The content of the file defined as the value to this argument will be
3017 3022 transferred verbatim as the HTTP request body.
3018 3023
3019 3024 ``frame <type> <flags> <payload>``
3020 3025 Send a unified protocol frame as part of the request body.
3021 3026
3022 3027 All frames will be collected and sent as the body to the HTTP
3023 3028 request.
3024 3029
3025 3030 close
3026 3031 -----
3027 3032
3028 3033 Close the connection to the server.
3029 3034
3030 3035 flush
3031 3036 -----
3032 3037
3033 3038 Flush data written to the server.
3034 3039
3035 3040 readavailable
3036 3041 -------------
3037 3042
3038 3043 Close the write end of the connection and read all available data from
3039 3044 the server.
3040 3045
3041 3046 If the connection to the server encompasses multiple pipes, we poll both
3042 3047 pipes and read available data.
3043 3048
3044 3049 readline
3045 3050 --------
3046 3051
3047 3052 Read a line of output from the server. If there are multiple output
3048 3053 pipes, reads only the main pipe.
3049 3054
3050 3055 ereadline
3051 3056 ---------
3052 3057
3053 3058 Like ``readline``, but read from the stderr pipe, if available.
3054 3059
3055 3060 read <X>
3056 3061 --------
3057 3062
3058 3063 ``read()`` N bytes from the server's main output pipe.
3059 3064
3060 3065 eread <X>
3061 3066 ---------
3062 3067
3063 3068 ``read()`` N bytes from the server's stderr pipe, if available.
3064 3069
3065 3070 Specifying Unified Frame-Based Protocol Frames
3066 3071 ----------------------------------------------
3067 3072
3068 3073 It is possible to emit a *Unified Frame-Based Protocol* by using special
3069 3074 syntax.
3070 3075
3071 3076 A frame is composed as a type, flags, and payload. These can be parsed
3072 3077 from a string of the form:
3073 3078
3074 3079 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3075 3080
3076 3081 ``request-id`` and ``stream-id`` are integers defining the request and
3077 3082 stream identifiers.
3078 3083
3079 3084 ``type`` can be an integer value for the frame type or the string name
3080 3085 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3081 3086 ``command-name``.
3082 3087
3083 3088 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3084 3089 components. Each component (and there can be just one) can be an integer
3085 3090 or a flag name for stream flags or frame flags, respectively. Values are
3086 3091 resolved to integers and then bitwise OR'd together.
3087 3092
3088 3093 ``payload`` represents the raw frame payload. If it begins with
3089 3094 ``cbor:``, the following string is evaluated as Python code and the
3090 3095 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3091 3096 as a Python byte string literal.
3092 3097 """
3093 3098 opts = pycompat.byteskwargs(opts)
3094 3099
3095 3100 if opts['localssh'] and not repo:
3096 3101 raise error.Abort(_('--localssh requires a repository'))
3097 3102
3098 3103 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3099 3104 raise error.Abort(_('invalid value for --peer'),
3100 3105 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3101 3106
3102 3107 if path and opts['localssh']:
3103 3108 raise error.Abort(_('cannot specify --localssh with an explicit '
3104 3109 'path'))
3105 3110
3106 3111 if ui.interactive():
3107 3112 ui.write(_('(waiting for commands on stdin)\n'))
3108 3113
3109 3114 blocks = list(_parsewirelangblocks(ui.fin))
3110 3115
3111 3116 proc = None
3112 3117 stdin = None
3113 3118 stdout = None
3114 3119 stderr = None
3115 3120 opener = None
3116 3121
3117 3122 if opts['localssh']:
3118 3123 # We start the SSH server in its own process so there is process
3119 3124 # separation. This prevents a whole class of potential bugs around
3120 3125 # shared state from interfering with server operation.
3121 3126 args = procutil.hgcmd() + [
3122 3127 '-R', repo.root,
3123 3128 'debugserve', '--sshstdio',
3124 3129 ]
3125 3130 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3126 3131 stdin=subprocess.PIPE,
3127 3132 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3128 3133 bufsize=0)
3129 3134
3130 3135 stdin = proc.stdin
3131 3136 stdout = proc.stdout
3132 3137 stderr = proc.stderr
3133 3138
3134 3139 # We turn the pipes into observers so we can log I/O.
3135 3140 if ui.verbose or opts['peer'] == 'raw':
3136 3141 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3137 3142 logdata=True)
3138 3143 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3139 3144 logdata=True)
3140 3145 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3141 3146 logdata=True)
3142 3147
3143 3148 # --localssh also implies the peer connection settings.
3144 3149
3145 3150 url = 'ssh://localserver'
3146 3151 autoreadstderr = not opts['noreadstderr']
3147 3152
3148 3153 if opts['peer'] == 'ssh1':
3149 3154 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3150 3155 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3151 3156 None, autoreadstderr=autoreadstderr)
3152 3157 elif opts['peer'] == 'ssh2':
3153 3158 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3154 3159 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3155 3160 None, autoreadstderr=autoreadstderr)
3156 3161 elif opts['peer'] == 'raw':
3157 3162 ui.write(_('using raw connection to peer\n'))
3158 3163 peer = None
3159 3164 else:
3160 3165 ui.write(_('creating ssh peer from handshake results\n'))
3161 3166 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3162 3167 autoreadstderr=autoreadstderr)
3163 3168
3164 3169 elif path:
3165 3170 # We bypass hg.peer() so we can proxy the sockets.
3166 3171 # TODO consider not doing this because we skip
3167 3172 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3168 3173 u = util.url(path)
3169 3174 if u.scheme != 'http':
3170 3175 raise error.Abort(_('only http:// paths are currently supported'))
3171 3176
3172 3177 url, authinfo = u.authinfo()
3173 3178 openerargs = {
3174 3179 r'useragent': b'Mercurial debugwireproto',
3175 3180 }
3176 3181
3177 3182 # Turn pipes/sockets into observers so we can log I/O.
3178 3183 if ui.verbose:
3179 3184 openerargs.update({
3180 3185 r'loggingfh': ui,
3181 3186 r'loggingname': b's',
3182 3187 r'loggingopts': {
3183 3188 r'logdata': True,
3184 3189 r'logdataapis': False,
3185 3190 },
3186 3191 })
3187 3192
3188 3193 if ui.debugflag:
3189 3194 openerargs[r'loggingopts'][r'logdataapis'] = True
3190 3195
3191 3196 # Don't send default headers when in raw mode. This allows us to
3192 3197 # bypass most of the behavior of our URL handling code so we can
3193 3198 # have near complete control over what's sent on the wire.
3194 3199 if opts['peer'] == 'raw':
3195 3200 openerargs[r'sendaccept'] = False
3196 3201
3197 3202 opener = urlmod.opener(ui, authinfo, **openerargs)
3198 3203
3199 3204 if opts['peer'] == 'http2':
3200 3205 ui.write(_('creating http peer for wire protocol version 2\n'))
3201 3206 # We go through makepeer() because we need an API descriptor for
3202 3207 # the peer instance to be useful.
3203 3208 with ui.configoverride({
3204 3209 ('experimental', 'httppeer.advertise-v2'): True}):
3205 3210 if opts['nologhandshake']:
3206 3211 ui.pushbuffer()
3207 3212
3208 3213 peer = httppeer.makepeer(ui, path, opener=opener)
3209 3214
3210 3215 if opts['nologhandshake']:
3211 3216 ui.popbuffer()
3212 3217
3213 3218 if not isinstance(peer, httppeer.httpv2peer):
3214 3219 raise error.Abort(_('could not instantiate HTTP peer for '
3215 3220 'wire protocol version 2'),
3216 3221 hint=_('the server may not have the feature '
3217 3222 'enabled or is not allowing this '
3218 3223 'client version'))
3219 3224
3220 3225 elif opts['peer'] == 'raw':
3221 3226 ui.write(_('using raw connection to peer\n'))
3222 3227 peer = None
3223 3228 elif opts['peer']:
3224 3229 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3225 3230 opts['peer'])
3226 3231 else:
3227 3232 peer = httppeer.makepeer(ui, path, opener=opener)
3228 3233
3229 3234 # We /could/ populate stdin/stdout with sock.makefile()...
3230 3235 else:
3231 3236 raise error.Abort(_('unsupported connection configuration'))
3232 3237
3233 3238 batchedcommands = None
3234 3239
3235 3240 # Now perform actions based on the parsed wire language instructions.
3236 3241 for action, lines in blocks:
3237 3242 if action in ('raw', 'raw+'):
3238 3243 if not stdin:
3239 3244 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3240 3245
3241 3246 # Concatenate the data together.
3242 3247 data = ''.join(l.lstrip() for l in lines)
3243 3248 data = stringutil.unescapestr(data)
3244 3249 stdin.write(data)
3245 3250
3246 3251 if action == 'raw+':
3247 3252 stdin.flush()
3248 3253 elif action == 'flush':
3249 3254 if not stdin:
3250 3255 raise error.Abort(_('cannot call flush on this peer'))
3251 3256 stdin.flush()
3252 3257 elif action.startswith('command'):
3253 3258 if not peer:
3254 3259 raise error.Abort(_('cannot send commands unless peer instance '
3255 3260 'is available'))
3256 3261
3257 3262 command = action.split(' ', 1)[1]
3258 3263
3259 3264 args = {}
3260 3265 for line in lines:
3261 3266 # We need to allow empty values.
3262 3267 fields = line.lstrip().split(' ', 1)
3263 3268 if len(fields) == 1:
3264 3269 key = fields[0]
3265 3270 value = ''
3266 3271 else:
3267 3272 key, value = fields
3268 3273
3269 3274 if value.startswith('eval:'):
3270 3275 value = stringutil.evalpythonliteral(value[5:])
3271 3276 else:
3272 3277 value = stringutil.unescapestr(value)
3273 3278
3274 3279 args[key] = value
3275 3280
3276 3281 if batchedcommands is not None:
3277 3282 batchedcommands.append((command, args))
3278 3283 continue
3279 3284
3280 3285 ui.status(_('sending %s command\n') % command)
3281 3286
3282 3287 if 'PUSHFILE' in args:
3283 3288 with open(args['PUSHFILE'], r'rb') as fh:
3284 3289 del args['PUSHFILE']
3285 3290 res, output = peer._callpush(command, fh,
3286 3291 **pycompat.strkwargs(args))
3287 3292 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3288 3293 ui.status(_('remote output: %s\n') %
3289 3294 stringutil.escapestr(output))
3290 3295 else:
3291 3296 with peer.commandexecutor() as e:
3292 3297 res = e.callcommand(command, args).result()
3293 3298
3294 3299 if isinstance(res, wireprotov2peer.commandresponse):
3295 3300 val = res.objects()
3296 3301 ui.status(_('response: %s\n') %
3297 3302 stringutil.pprint(val, bprefix=True, indent=2))
3298 3303 else:
3299 3304 ui.status(_('response: %s\n') %
3300 3305 stringutil.pprint(res, bprefix=True, indent=2))
3301 3306
3302 3307 elif action == 'batchbegin':
3303 3308 if batchedcommands is not None:
3304 3309 raise error.Abort(_('nested batchbegin not allowed'))
3305 3310
3306 3311 batchedcommands = []
3307 3312 elif action == 'batchsubmit':
3308 3313 # There is a batching API we could go through. But it would be
3309 3314 # difficult to normalize requests into function calls. It is easier
3310 3315 # to bypass this layer and normalize to commands + args.
3311 3316 ui.status(_('sending batch with %d sub-commands\n') %
3312 3317 len(batchedcommands))
3313 3318 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3314 3319 ui.status(_('response #%d: %s\n') %
3315 3320 (i, stringutil.escapestr(chunk)))
3316 3321
3317 3322 batchedcommands = None
3318 3323
3319 3324 elif action.startswith('httprequest '):
3320 3325 if not opener:
3321 3326 raise error.Abort(_('cannot use httprequest without an HTTP '
3322 3327 'peer'))
3323 3328
3324 3329 request = action.split(' ', 2)
3325 3330 if len(request) != 3:
3326 3331 raise error.Abort(_('invalid httprequest: expected format is '
3327 3332 '"httprequest <method> <path>'))
3328 3333
3329 3334 method, httppath = request[1:]
3330 3335 headers = {}
3331 3336 body = None
3332 3337 frames = []
3333 3338 for line in lines:
3334 3339 line = line.lstrip()
3335 3340 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3336 3341 if m:
3337 3342 # Headers need to use native strings.
3338 3343 key = pycompat.strurl(m.group(1))
3339 3344 value = pycompat.strurl(m.group(2))
3340 3345 headers[key] = value
3341 3346 continue
3342 3347
3343 3348 if line.startswith(b'BODYFILE '):
3344 3349 with open(line.split(b' ', 1), 'rb') as fh:
3345 3350 body = fh.read()
3346 3351 elif line.startswith(b'frame '):
3347 3352 frame = wireprotoframing.makeframefromhumanstring(
3348 3353 line[len(b'frame '):])
3349 3354
3350 3355 frames.append(frame)
3351 3356 else:
3352 3357 raise error.Abort(_('unknown argument to httprequest: %s') %
3353 3358 line)
3354 3359
3355 3360 url = path + httppath
3356 3361
3357 3362 if frames:
3358 3363 body = b''.join(bytes(f) for f in frames)
3359 3364
3360 3365 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3361 3366
3362 3367 # urllib.Request insists on using has_data() as a proxy for
3363 3368 # determining the request method. Override that to use our
3364 3369 # explicitly requested method.
3365 3370 req.get_method = lambda: pycompat.sysstr(method)
3366 3371
3367 3372 try:
3368 3373 res = opener.open(req)
3369 3374 body = res.read()
3370 3375 except util.urlerr.urlerror as e:
3371 3376 # read() method must be called, but only exists in Python 2
3372 3377 getattr(e, 'read', lambda: None)()
3373 3378 continue
3374 3379
3375 3380 ct = res.headers.get(r'Content-Type')
3376 3381 if ct == r'application/mercurial-cbor':
3377 3382 ui.write(_('cbor> %s\n') %
3378 3383 stringutil.pprint(cborutil.decodeall(body),
3379 3384 bprefix=True,
3380 3385 indent=2))
3381 3386
3382 3387 elif action == 'close':
3383 3388 peer.close()
3384 3389 elif action == 'readavailable':
3385 3390 if not stdout or not stderr:
3386 3391 raise error.Abort(_('readavailable not available on this peer'))
3387 3392
3388 3393 stdin.close()
3389 3394 stdout.read()
3390 3395 stderr.read()
3391 3396
3392 3397 elif action == 'readline':
3393 3398 if not stdout:
3394 3399 raise error.Abort(_('readline not available on this peer'))
3395 3400 stdout.readline()
3396 3401 elif action == 'ereadline':
3397 3402 if not stderr:
3398 3403 raise error.Abort(_('ereadline not available on this peer'))
3399 3404 stderr.readline()
3400 3405 elif action.startswith('read '):
3401 3406 count = int(action.split(' ', 1)[1])
3402 3407 if not stdout:
3403 3408 raise error.Abort(_('read not available on this peer'))
3404 3409 stdout.read(count)
3405 3410 elif action.startswith('eread '):
3406 3411 count = int(action.split(' ', 1)[1])
3407 3412 if not stderr:
3408 3413 raise error.Abort(_('eread not available on this peer'))
3409 3414 stderr.read(count)
3410 3415 else:
3411 3416 raise error.Abort(_('unknown action: %s') % action)
3412 3417
3413 3418 if batchedcommands is not None:
3414 3419 raise error.Abort(_('unclosed "batchbegin" request'))
3415 3420
3416 3421 if peer:
3417 3422 peer.close()
3418 3423
3419 3424 if proc:
3420 3425 proc.kill()
@@ -1,109 +1,121
1 1 Source bundle was generated with the following script:
2 2
3 3 # hg init
4 4 # echo a > a
5 5 # ln -s a l
6 6 # hg ci -Ama -d'0 0'
7 7 # mkdir b
8 8 # echo a > b/a
9 9 # chmod +x b/a
10 10 # hg ci -Amb -d'1 0'
11 11
12 12 $ hg init
13 13 $ hg unbundle "$TESTDIR/bundles/test-manifest.hg"
14 14 adding changesets
15 15 adding manifests
16 16 adding file changes
17 17 added 2 changesets with 3 changes to 3 files
18 18 new changesets b73562a03cfe:5bdc995175ba (2 drafts)
19 19 (run 'hg update' to get a working copy)
20 20
21 21 The next call is expected to return nothing:
22 22
23 23 $ hg manifest
24 24
25 25 $ hg co
26 26 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
27 27
28 28 $ hg manifest
29 29 a
30 30 b/a
31 31 l
32 32
33 33 $ hg files -vr .
34 34 2 a
35 35 2 x b/a
36 36 1 l l
37 37 $ hg files -r . -X b
38 38 a
39 39 l
40 40 $ hg files -T '{path} {size} {flags}\n'
41 41 a 2
42 42 b/a 2 x
43 43 l 1 l
44 44 $ hg files -T '{path} {node|shortest}\n' -r.
45 45 a 5bdc
46 46 b/a 5bdc
47 47 l 5bdc
48 48
49 49 $ hg manifest -v
50 50 644 a
51 51 755 * b/a
52 52 644 @ l
53 53 $ hg manifest -T '{path} {rev}\n'
54 54 a 1
55 55 b/a 1
56 56 l 1
57 57
58 58 $ hg manifest --debug
59 59 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 a
60 60 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 755 * b/a
61 61 047b75c6d7a3ef6a2243bd0e99f94f6ea6683597 644 @ l
62 62
63 63 $ hg manifest -r 0
64 64 a
65 65 l
66 66
67 67 $ hg manifest -r 1
68 68 a
69 69 b/a
70 70 l
71 71
72 72 $ hg manifest -r tip
73 73 a
74 74 b/a
75 75 l
76 76
77 77 $ hg manifest tip
78 78 a
79 79 b/a
80 80 l
81 81
82 82 $ hg manifest --all
83 83 a
84 84 b/a
85 85 l
86 86
87 87 The next two calls are expected to abort:
88 88
89 89 $ hg manifest -r 2
90 90 abort: unknown revision '2'!
91 91 [255]
92 92
93 93 $ hg manifest -r tip tip
94 94 abort: please specify just one revision
95 95 [255]
96 96
97 97 Testing the manifest full text cache utility
98 98 --------------------------------------------
99 99
100 100 Reminder of the manifest log content
101 101
102 102 $ hg log --debug | grep 'manifest:'
103 103 manifest: 1:1e01206b1d2f72bd55f2a33fa8ccad74144825b7
104 104 manifest: 0:fce2a30dedad1eef4da95ca1dc0004157aa527cf
105 105
106 106 Showing the content of the caches after the above operations
107 107
108 108 $ hg debugmanifestfulltextcache
109 109 cache empty
110
111 Adding a new persistent entry in the cache
112
113 $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7
114 cache contains 1 manifest entries, in order of most to least recent:
115 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
116 total cache data size 157 bytes, on-disk 157 bytes
117
118 $ hg debugmanifestfulltextcache
119 cache contains 1 manifest entries, in order of most to least recent:
120 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
121 total cache data size 157 bytes, on-disk 157 bytes
General Comments 0
You need to be logged in to leave comments. Login now