##// END OF EJS Templates
setdiscovery: don't use dagutil for rev -> node conversions...
Gregory Szorc -
r39195:5b32b3c6 default
parent child Browse files
Show More
@@ -1,3325 +1,3327 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .thirdparty import (
36 36 cbor,
37 37 )
38 38 from . import (
39 39 bundle2,
40 40 changegroup,
41 41 cmdutil,
42 42 color,
43 43 context,
44 44 dagparser,
45 45 dagutil,
46 46 encoding,
47 47 error,
48 48 exchange,
49 49 extensions,
50 50 filemerge,
51 51 filesetlang,
52 52 formatter,
53 53 hg,
54 54 httppeer,
55 55 localrepo,
56 56 lock as lockmod,
57 57 logcmdutil,
58 58 merge as mergemod,
59 59 obsolete,
60 60 obsutil,
61 61 phases,
62 62 policy,
63 63 pvec,
64 64 pycompat,
65 65 registrar,
66 66 repair,
67 67 revlog,
68 68 revset,
69 69 revsetlang,
70 70 scmutil,
71 71 setdiscovery,
72 72 simplemerge,
73 73 sshpeer,
74 74 sslutil,
75 75 streamclone,
76 76 templater,
77 77 treediscovery,
78 78 upgrade,
79 79 url as urlmod,
80 80 util,
81 81 vfs as vfsmod,
82 82 wireprotoframing,
83 83 wireprotoserver,
84 84 wireprotov2peer,
85 85 )
86 86 from .utils import (
87 87 dateutil,
88 88 procutil,
89 89 stringutil,
90 90 )
91 91
92 92 release = lockmod.release
93 93
94 94 command = registrar.command()
95 95
96 96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 97 def debugancestor(ui, repo, *args):
98 98 """find the ancestor revision of two revisions in a given index"""
99 99 if len(args) == 3:
100 100 index, rev1, rev2 = args
101 101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 102 lookup = r.lookup
103 103 elif len(args) == 2:
104 104 if not repo:
105 105 raise error.Abort(_('there is no Mercurial repository here '
106 106 '(.hg not found)'))
107 107 rev1, rev2 = args
108 108 r = repo.changelog
109 109 lookup = repo.lookup
110 110 else:
111 111 raise error.Abort(_('either two or three arguments required'))
112 112 a = r.ancestor(lookup(rev1), lookup(rev2))
113 113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114 114
115 115 @command('debugapplystreamclonebundle', [], 'FILE')
116 116 def debugapplystreamclonebundle(ui, repo, fname):
117 117 """apply a stream clone bundle file"""
118 118 f = hg.openpath(ui, fname)
119 119 gen = exchange.readbundle(ui, f, fname)
120 120 gen.apply(repo)
121 121
122 122 @command('debugbuilddag',
123 123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 125 ('n', 'new-file', None, _('add new file at each rev'))],
126 126 _('[OPTION]... [TEXT]'))
127 127 def debugbuilddag(ui, repo, text=None,
128 128 mergeable_file=False,
129 129 overwritten_file=False,
130 130 new_file=False):
131 131 """builds a repo with a given DAG from scratch in the current empty repo
132 132
133 133 The description of the DAG is read from stdin if not given on the
134 134 command line.
135 135
136 136 Elements:
137 137
138 138 - "+n" is a linear run of n nodes based on the current default parent
139 139 - "." is a single node based on the current default parent
140 140 - "$" resets the default parent to null (implied at the start);
141 141 otherwise the default parent is always the last node created
142 142 - "<p" sets the default parent to the backref p
143 143 - "*p" is a fork at parent p, which is a backref
144 144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 145 - "/p2" is a merge of the preceding node and p2
146 146 - ":tag" defines a local tag for the preceding node
147 147 - "@branch" sets the named branch for subsequent nodes
148 148 - "#...\\n" is a comment up to the end of the line
149 149
150 150 Whitespace between the above elements is ignored.
151 151
152 152 A backref is either
153 153
154 154 - a number n, which references the node curr-n, where curr is the current
155 155 node, or
156 156 - the name of a local tag you placed earlier using ":tag", or
157 157 - empty to denote the default parent.
158 158
159 159 All string valued-elements are either strictly alphanumeric, or must
160 160 be enclosed in double quotes ("..."), with "\\" as escape character.
161 161 """
162 162
163 163 if text is None:
164 164 ui.status(_("reading DAG from stdin\n"))
165 165 text = ui.fin.read()
166 166
167 167 cl = repo.changelog
168 168 if len(cl) > 0:
169 169 raise error.Abort(_('repository is not empty'))
170 170
171 171 # determine number of revs in DAG
172 172 total = 0
173 173 for type, data in dagparser.parsedag(text):
174 174 if type == 'n':
175 175 total += 1
176 176
177 177 if mergeable_file:
178 178 linesperrev = 2
179 179 # make a file with k lines per rev
180 180 initialmergedlines = ['%d' % i
181 181 for i in pycompat.xrange(0, total * linesperrev)]
182 182 initialmergedlines.append("")
183 183
184 184 tags = []
185 185 progress = ui.makeprogress(_('building'), unit=_('revisions'),
186 186 total=total)
187 187 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
188 188 at = -1
189 189 atbranch = 'default'
190 190 nodeids = []
191 191 id = 0
192 192 progress.update(id)
193 193 for type, data in dagparser.parsedag(text):
194 194 if type == 'n':
195 195 ui.note(('node %s\n' % pycompat.bytestr(data)))
196 196 id, ps = data
197 197
198 198 files = []
199 199 filecontent = {}
200 200
201 201 p2 = None
202 202 if mergeable_file:
203 203 fn = "mf"
204 204 p1 = repo[ps[0]]
205 205 if len(ps) > 1:
206 206 p2 = repo[ps[1]]
207 207 pa = p1.ancestor(p2)
208 208 base, local, other = [x[fn].data() for x in (pa, p1,
209 209 p2)]
210 210 m3 = simplemerge.Merge3Text(base, local, other)
211 211 ml = [l.strip() for l in m3.merge_lines()]
212 212 ml.append("")
213 213 elif at > 0:
214 214 ml = p1[fn].data().split("\n")
215 215 else:
216 216 ml = initialmergedlines
217 217 ml[id * linesperrev] += " r%i" % id
218 218 mergedtext = "\n".join(ml)
219 219 files.append(fn)
220 220 filecontent[fn] = mergedtext
221 221
222 222 if overwritten_file:
223 223 fn = "of"
224 224 files.append(fn)
225 225 filecontent[fn] = "r%i\n" % id
226 226
227 227 if new_file:
228 228 fn = "nf%i" % id
229 229 files.append(fn)
230 230 filecontent[fn] = "r%i\n" % id
231 231 if len(ps) > 1:
232 232 if not p2:
233 233 p2 = repo[ps[1]]
234 234 for fn in p2:
235 235 if fn.startswith("nf"):
236 236 files.append(fn)
237 237 filecontent[fn] = p2[fn].data()
238 238
239 239 def fctxfn(repo, cx, path):
240 240 if path in filecontent:
241 241 return context.memfilectx(repo, cx, path,
242 242 filecontent[path])
243 243 return None
244 244
245 245 if len(ps) == 0 or ps[0] < 0:
246 246 pars = [None, None]
247 247 elif len(ps) == 1:
248 248 pars = [nodeids[ps[0]], None]
249 249 else:
250 250 pars = [nodeids[p] for p in ps]
251 251 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
252 252 date=(id, 0),
253 253 user="debugbuilddag",
254 254 extra={'branch': atbranch})
255 255 nodeid = repo.commitctx(cx)
256 256 nodeids.append(nodeid)
257 257 at = id
258 258 elif type == 'l':
259 259 id, name = data
260 260 ui.note(('tag %s\n' % name))
261 261 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
262 262 elif type == 'a':
263 263 ui.note(('branch %s\n' % data))
264 264 atbranch = data
265 265 progress.update(id)
266 266
267 267 if tags:
268 268 repo.vfs.write("localtags", "".join(tags))
269 269
270 270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
271 271 indent_string = ' ' * indent
272 272 if all:
273 273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
274 274 % indent_string)
275 275
276 276 def showchunks(named):
277 277 ui.write("\n%s%s\n" % (indent_string, named))
278 278 for deltadata in gen.deltaiter():
279 279 node, p1, p2, cs, deltabase, delta, flags = deltadata
280 280 ui.write("%s%s %s %s %s %s %d\n" %
281 281 (indent_string, hex(node), hex(p1), hex(p2),
282 282 hex(cs), hex(deltabase), len(delta)))
283 283
284 284 chunkdata = gen.changelogheader()
285 285 showchunks("changelog")
286 286 chunkdata = gen.manifestheader()
287 287 showchunks("manifest")
288 288 for chunkdata in iter(gen.filelogheader, {}):
289 289 fname = chunkdata['filename']
290 290 showchunks(fname)
291 291 else:
292 292 if isinstance(gen, bundle2.unbundle20):
293 293 raise error.Abort(_('use debugbundle2 for this file'))
294 294 chunkdata = gen.changelogheader()
295 295 for deltadata in gen.deltaiter():
296 296 node, p1, p2, cs, deltabase, delta, flags = deltadata
297 297 ui.write("%s%s\n" % (indent_string, hex(node)))
298 298
299 299 def _debugobsmarkers(ui, part, indent=0, **opts):
300 300 """display version and markers contained in 'data'"""
301 301 opts = pycompat.byteskwargs(opts)
302 302 data = part.read()
303 303 indent_string = ' ' * indent
304 304 try:
305 305 version, markers = obsolete._readmarkers(data)
306 306 except error.UnknownVersion as exc:
307 307 msg = "%sunsupported version: %s (%d bytes)\n"
308 308 msg %= indent_string, exc.version, len(data)
309 309 ui.write(msg)
310 310 else:
311 311 msg = "%sversion: %d (%d bytes)\n"
312 312 msg %= indent_string, version, len(data)
313 313 ui.write(msg)
314 314 fm = ui.formatter('debugobsolete', opts)
315 315 for rawmarker in sorted(markers):
316 316 m = obsutil.marker(None, rawmarker)
317 317 fm.startitem()
318 318 fm.plain(indent_string)
319 319 cmdutil.showmarker(fm, m)
320 320 fm.end()
321 321
322 322 def _debugphaseheads(ui, data, indent=0):
323 323 """display version and markers contained in 'data'"""
324 324 indent_string = ' ' * indent
325 325 headsbyphase = phases.binarydecode(data)
326 326 for phase in phases.allphases:
327 327 for head in headsbyphase[phase]:
328 328 ui.write(indent_string)
329 329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
330 330
331 331 def _quasirepr(thing):
332 332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
333 333 return '{%s}' % (
334 334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
335 335 return pycompat.bytestr(repr(thing))
336 336
337 337 def _debugbundle2(ui, gen, all=None, **opts):
338 338 """lists the contents of a bundle2"""
339 339 if not isinstance(gen, bundle2.unbundle20):
340 340 raise error.Abort(_('not a bundle2 file'))
341 341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
342 342 parttypes = opts.get(r'part_type', [])
343 343 for part in gen.iterparts():
344 344 if parttypes and part.type not in parttypes:
345 345 continue
346 346 msg = '%s -- %s (mandatory: %r)\n'
347 347 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
348 348 if part.type == 'changegroup':
349 349 version = part.params.get('version', '01')
350 350 cg = changegroup.getunbundler(version, part, 'UN')
351 351 if not ui.quiet:
352 352 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
353 353 if part.type == 'obsmarkers':
354 354 if not ui.quiet:
355 355 _debugobsmarkers(ui, part, indent=4, **opts)
356 356 if part.type == 'phase-heads':
357 357 if not ui.quiet:
358 358 _debugphaseheads(ui, part, indent=4)
359 359
360 360 @command('debugbundle',
361 361 [('a', 'all', None, _('show all details')),
362 362 ('', 'part-type', [], _('show only the named part type')),
363 363 ('', 'spec', None, _('print the bundlespec of the bundle'))],
364 364 _('FILE'),
365 365 norepo=True)
366 366 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
367 367 """lists the contents of a bundle"""
368 368 with hg.openpath(ui, bundlepath) as f:
369 369 if spec:
370 370 spec = exchange.getbundlespec(ui, f)
371 371 ui.write('%s\n' % spec)
372 372 return
373 373
374 374 gen = exchange.readbundle(ui, f, bundlepath)
375 375 if isinstance(gen, bundle2.unbundle20):
376 376 return _debugbundle2(ui, gen, all=all, **opts)
377 377 _debugchangegroup(ui, gen, all=all, **opts)
378 378
379 379 @command('debugcapabilities',
380 380 [], _('PATH'),
381 381 norepo=True)
382 382 def debugcapabilities(ui, path, **opts):
383 383 """lists the capabilities of a remote peer"""
384 384 opts = pycompat.byteskwargs(opts)
385 385 peer = hg.peer(ui, opts, path)
386 386 caps = peer.capabilities()
387 387 ui.write(('Main capabilities:\n'))
388 388 for c in sorted(caps):
389 389 ui.write((' %s\n') % c)
390 390 b2caps = bundle2.bundle2caps(peer)
391 391 if b2caps:
392 392 ui.write(('Bundle2 capabilities:\n'))
393 393 for key, values in sorted(b2caps.iteritems()):
394 394 ui.write((' %s\n') % key)
395 395 for v in values:
396 396 ui.write((' %s\n') % v)
397 397
398 398 @command('debugcheckstate', [], '')
399 399 def debugcheckstate(ui, repo):
400 400 """validate the correctness of the current dirstate"""
401 401 parent1, parent2 = repo.dirstate.parents()
402 402 m1 = repo[parent1].manifest()
403 403 m2 = repo[parent2].manifest()
404 404 errors = 0
405 405 for f in repo.dirstate:
406 406 state = repo.dirstate[f]
407 407 if state in "nr" and f not in m1:
408 408 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
409 409 errors += 1
410 410 if state in "a" and f in m1:
411 411 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
412 412 errors += 1
413 413 if state in "m" and f not in m1 and f not in m2:
414 414 ui.warn(_("%s in state %s, but not in either manifest\n") %
415 415 (f, state))
416 416 errors += 1
417 417 for f in m1:
418 418 state = repo.dirstate[f]
419 419 if state not in "nrm":
420 420 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
421 421 errors += 1
422 422 if errors:
423 423 error = _(".hg/dirstate inconsistent with current parent's manifest")
424 424 raise error.Abort(error)
425 425
426 426 @command('debugcolor',
427 427 [('', 'style', None, _('show all configured styles'))],
428 428 'hg debugcolor')
429 429 def debugcolor(ui, repo, **opts):
430 430 """show available color, effects or style"""
431 431 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
432 432 if opts.get(r'style'):
433 433 return _debugdisplaystyle(ui)
434 434 else:
435 435 return _debugdisplaycolor(ui)
436 436
437 437 def _debugdisplaycolor(ui):
438 438 ui = ui.copy()
439 439 ui._styles.clear()
440 440 for effect in color._activeeffects(ui).keys():
441 441 ui._styles[effect] = effect
442 442 if ui._terminfoparams:
443 443 for k, v in ui.configitems('color'):
444 444 if k.startswith('color.'):
445 445 ui._styles[k] = k[6:]
446 446 elif k.startswith('terminfo.'):
447 447 ui._styles[k] = k[9:]
448 448 ui.write(_('available colors:\n'))
449 449 # sort label with a '_' after the other to group '_background' entry.
450 450 items = sorted(ui._styles.items(),
451 451 key=lambda i: ('_' in i[0], i[0], i[1]))
452 452 for colorname, label in items:
453 453 ui.write(('%s\n') % colorname, label=label)
454 454
455 455 def _debugdisplaystyle(ui):
456 456 ui.write(_('available style:\n'))
457 457 if not ui._styles:
458 458 return
459 459 width = max(len(s) for s in ui._styles)
460 460 for label, effects in sorted(ui._styles.items()):
461 461 ui.write('%s' % label, label=label)
462 462 if effects:
463 463 # 50
464 464 ui.write(': ')
465 465 ui.write(' ' * (max(0, width - len(label))))
466 466 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
467 467 ui.write('\n')
468 468
469 469 @command('debugcreatestreamclonebundle', [], 'FILE')
470 470 def debugcreatestreamclonebundle(ui, repo, fname):
471 471 """create a stream clone bundle file
472 472
473 473 Stream bundles are special bundles that are essentially archives of
474 474 revlog files. They are commonly used for cloning very quickly.
475 475 """
476 476 # TODO we may want to turn this into an abort when this functionality
477 477 # is moved into `hg bundle`.
478 478 if phases.hassecret(repo):
479 479 ui.warn(_('(warning: stream clone bundle will contain secret '
480 480 'revisions)\n'))
481 481
482 482 requirements, gen = streamclone.generatebundlev1(repo)
483 483 changegroup.writechunks(ui, gen, fname)
484 484
485 485 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
486 486
487 487 @command('debugdag',
488 488 [('t', 'tags', None, _('use tags as labels')),
489 489 ('b', 'branches', None, _('annotate with branch names')),
490 490 ('', 'dots', None, _('use dots for runs')),
491 491 ('s', 'spaces', None, _('separate elements by spaces'))],
492 492 _('[OPTION]... [FILE [REV]...]'),
493 493 optionalrepo=True)
494 494 def debugdag(ui, repo, file_=None, *revs, **opts):
495 495 """format the changelog or an index DAG as a concise textual description
496 496
497 497 If you pass a revlog index, the revlog's DAG is emitted. If you list
498 498 revision numbers, they get labeled in the output as rN.
499 499
500 500 Otherwise, the changelog DAG of the current repo is emitted.
501 501 """
502 502 spaces = opts.get(r'spaces')
503 503 dots = opts.get(r'dots')
504 504 if file_:
505 505 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
506 506 file_)
507 507 revs = set((int(r) for r in revs))
508 508 def events():
509 509 for r in rlog:
510 510 yield 'n', (r, list(p for p in rlog.parentrevs(r)
511 511 if p != -1))
512 512 if r in revs:
513 513 yield 'l', (r, "r%i" % r)
514 514 elif repo:
515 515 cl = repo.changelog
516 516 tags = opts.get(r'tags')
517 517 branches = opts.get(r'branches')
518 518 if tags:
519 519 labels = {}
520 520 for l, n in repo.tags().items():
521 521 labels.setdefault(cl.rev(n), []).append(l)
522 522 def events():
523 523 b = "default"
524 524 for r in cl:
525 525 if branches:
526 526 newb = cl.read(cl.node(r))[5]['branch']
527 527 if newb != b:
528 528 yield 'a', newb
529 529 b = newb
530 530 yield 'n', (r, list(p for p in cl.parentrevs(r)
531 531 if p != -1))
532 532 if tags:
533 533 ls = labels.get(r)
534 534 if ls:
535 535 for l in ls:
536 536 yield 'l', (r, l)
537 537 else:
538 538 raise error.Abort(_('need repo for changelog dag'))
539 539
540 540 for line in dagparser.dagtextlines(events(),
541 541 addspaces=spaces,
542 542 wraplabels=True,
543 543 wrapannotations=True,
544 544 wrapnonlinear=dots,
545 545 usedots=dots,
546 546 maxlinewidth=70):
547 547 ui.write(line)
548 548 ui.write("\n")
549 549
550 550 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
551 551 def debugdata(ui, repo, file_, rev=None, **opts):
552 552 """dump the contents of a data file revision"""
553 553 opts = pycompat.byteskwargs(opts)
554 554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
555 555 if rev is not None:
556 556 raise error.CommandError('debugdata', _('invalid arguments'))
557 557 file_, rev = None, file_
558 558 elif rev is None:
559 559 raise error.CommandError('debugdata', _('invalid arguments'))
560 560 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
561 561 try:
562 562 ui.write(r.revision(r.lookup(rev), raw=True))
563 563 except KeyError:
564 564 raise error.Abort(_('invalid revision identifier %s') % rev)
565 565
566 566 @command('debugdate',
567 567 [('e', 'extended', None, _('try extended date formats'))],
568 568 _('[-e] DATE [RANGE]'),
569 569 norepo=True, optionalrepo=True)
570 570 def debugdate(ui, date, range=None, **opts):
571 571 """parse and display a date"""
572 572 if opts[r"extended"]:
573 573 d = dateutil.parsedate(date, util.extendeddateformats)
574 574 else:
575 575 d = dateutil.parsedate(date)
576 576 ui.write(("internal: %d %d\n") % d)
577 577 ui.write(("standard: %s\n") % dateutil.datestr(d))
578 578 if range:
579 579 m = dateutil.matchdate(range)
580 580 ui.write(("match: %s\n") % m(d[0]))
581 581
582 582 @command('debugdeltachain',
583 583 cmdutil.debugrevlogopts + cmdutil.formatteropts,
584 584 _('-c|-m|FILE'),
585 585 optionalrepo=True)
586 586 def debugdeltachain(ui, repo, file_=None, **opts):
587 587 """dump information about delta chains in a revlog
588 588
589 589 Output can be templatized. Available template keywords are:
590 590
591 591 :``rev``: revision number
592 592 :``chainid``: delta chain identifier (numbered by unique base)
593 593 :``chainlen``: delta chain length to this revision
594 594 :``prevrev``: previous revision in delta chain
595 595 :``deltatype``: role of delta / how it was computed
596 596 :``compsize``: compressed size of revision
597 597 :``uncompsize``: uncompressed size of revision
598 598 :``chainsize``: total size of compressed revisions in chain
599 599 :``chainratio``: total chain size divided by uncompressed revision size
600 600 (new delta chains typically start at ratio 2.00)
601 601 :``lindist``: linear distance from base revision in delta chain to end
602 602 of this revision
603 603 :``extradist``: total size of revisions not part of this delta chain from
604 604 base of delta chain to end of this revision; a measurement
605 605 of how much extra data we need to read/seek across to read
606 606 the delta chain for this revision
607 607 :``extraratio``: extradist divided by chainsize; another representation of
608 608 how much unrelated data is needed to load this delta chain
609 609
610 610 If the repository is configured to use the sparse read, additional keywords
611 611 are available:
612 612
613 613 :``readsize``: total size of data read from the disk for a revision
614 614 (sum of the sizes of all the blocks)
615 615 :``largestblock``: size of the largest block of data read from the disk
616 616 :``readdensity``: density of useful bytes in the data read from the disk
617 617 :``srchunks``: in how many data hunks the whole revision would be read
618 618
619 619 The sparse read can be enabled with experimental.sparse-read = True
620 620 """
621 621 opts = pycompat.byteskwargs(opts)
622 622 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
623 623 index = r.index
624 624 start = r.start
625 625 length = r.length
626 626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
627 627 withsparseread = getattr(r, '_withsparseread', False)
628 628
629 629 def revinfo(rev):
630 630 e = index[rev]
631 631 compsize = e[1]
632 632 uncompsize = e[2]
633 633 chainsize = 0
634 634
635 635 if generaldelta:
636 636 if e[3] == e[5]:
637 637 deltatype = 'p1'
638 638 elif e[3] == e[6]:
639 639 deltatype = 'p2'
640 640 elif e[3] == rev - 1:
641 641 deltatype = 'prev'
642 642 elif e[3] == rev:
643 643 deltatype = 'base'
644 644 else:
645 645 deltatype = 'other'
646 646 else:
647 647 if e[3] == rev:
648 648 deltatype = 'base'
649 649 else:
650 650 deltatype = 'prev'
651 651
652 652 chain = r._deltachain(rev)[0]
653 653 for iterrev in chain:
654 654 e = index[iterrev]
655 655 chainsize += e[1]
656 656
657 657 return compsize, uncompsize, deltatype, chain, chainsize
658 658
659 659 fm = ui.formatter('debugdeltachain', opts)
660 660
661 661 fm.plain(' rev chain# chainlen prev delta '
662 662 'size rawsize chainsize ratio lindist extradist '
663 663 'extraratio')
664 664 if withsparseread:
665 665 fm.plain(' readsize largestblk rddensity srchunks')
666 666 fm.plain('\n')
667 667
668 668 chainbases = {}
669 669 for rev in r:
670 670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
671 671 chainbase = chain[0]
672 672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
673 673 basestart = start(chainbase)
674 674 revstart = start(rev)
675 675 lineardist = revstart + comp - basestart
676 676 extradist = lineardist - chainsize
677 677 try:
678 678 prevrev = chain[-2]
679 679 except IndexError:
680 680 prevrev = -1
681 681
682 682 if uncomp != 0:
683 683 chainratio = float(chainsize) / float(uncomp)
684 684 else:
685 685 chainratio = chainsize
686 686
687 687 if chainsize != 0:
688 688 extraratio = float(extradist) / float(chainsize)
689 689 else:
690 690 extraratio = extradist
691 691
692 692 fm.startitem()
693 693 fm.write('rev chainid chainlen prevrev deltatype compsize '
694 694 'uncompsize chainsize chainratio lindist extradist '
695 695 'extraratio',
696 696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
697 697 rev, chainid, len(chain), prevrev, deltatype, comp,
698 698 uncomp, chainsize, chainratio, lineardist, extradist,
699 699 extraratio,
700 700 rev=rev, chainid=chainid, chainlen=len(chain),
701 701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
702 702 uncompsize=uncomp, chainsize=chainsize,
703 703 chainratio=chainratio, lindist=lineardist,
704 704 extradist=extradist, extraratio=extraratio)
705 705 if withsparseread:
706 706 readsize = 0
707 707 largestblock = 0
708 708 srchunks = 0
709 709
710 710 for revschunk in revlog._slicechunk(r, chain):
711 711 srchunks += 1
712 712 blkend = start(revschunk[-1]) + length(revschunk[-1])
713 713 blksize = blkend - start(revschunk[0])
714 714
715 715 readsize += blksize
716 716 if largestblock < blksize:
717 717 largestblock = blksize
718 718
719 719 if readsize:
720 720 readdensity = float(chainsize) / float(readsize)
721 721 else:
722 722 readdensity = 1
723 723
724 724 fm.write('readsize largestblock readdensity srchunks',
725 725 ' %10d %10d %9.5f %8d',
726 726 readsize, largestblock, readdensity, srchunks,
727 727 readsize=readsize, largestblock=largestblock,
728 728 readdensity=readdensity, srchunks=srchunks)
729 729
730 730 fm.plain('\n')
731 731
732 732 fm.end()
733 733
734 734 @command('debugdirstate|debugstate',
735 735 [('', 'nodates', None, _('do not display the saved mtime')),
736 736 ('', 'datesort', None, _('sort by saved mtime'))],
737 737 _('[OPTION]...'))
738 738 def debugstate(ui, repo, **opts):
739 739 """show the contents of the current dirstate"""
740 740
741 741 nodates = opts.get(r'nodates')
742 742 datesort = opts.get(r'datesort')
743 743
744 744 timestr = ""
745 745 if datesort:
746 746 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
747 747 else:
748 748 keyfunc = None # sort by filename
749 749 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
750 750 if ent[3] == -1:
751 751 timestr = 'unset '
752 752 elif nodates:
753 753 timestr = 'set '
754 754 else:
755 755 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
756 756 time.localtime(ent[3]))
757 757 timestr = encoding.strtolocal(timestr)
758 758 if ent[1] & 0o20000:
759 759 mode = 'lnk'
760 760 else:
761 761 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
762 762 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
763 763 for f in repo.dirstate.copies():
764 764 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
765 765
766 766 @command('debugdiscovery',
767 767 [('', 'old', None, _('use old-style discovery')),
768 768 ('', 'nonheads', None,
769 769 _('use old-style discovery with non-heads included')),
770 770 ('', 'rev', [], 'restrict discovery to this set of revs'),
771 771 ] + cmdutil.remoteopts,
772 772 _('[--rev REV] [OTHER]'))
773 773 def debugdiscovery(ui, repo, remoteurl="default", **opts):
774 774 """runs the changeset discovery protocol in isolation"""
775 775 opts = pycompat.byteskwargs(opts)
776 776 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
777 777 remote = hg.peer(repo, opts, remoteurl)
778 778 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
779 779
780 780 # make sure tests are repeatable
781 781 random.seed(12323)
782 782
783 783 def doit(pushedrevs, remoteheads, remote=remote):
784 784 if opts.get('old'):
785 785 if not util.safehasattr(remote, 'branches'):
786 786 # enable in-client legacy support
787 787 remote = localrepo.locallegacypeer(remote.local())
788 788 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
789 789 force=True)
790 790 common = set(common)
791 791 if not opts.get('nonheads'):
792 792 ui.write(("unpruned common: %s\n") %
793 793 " ".join(sorted(short(n) for n in common)))
794 dag = dagutil.revlogdag(repo.changelog)
794 cl = repo.changelog
795 clnode = cl.node
796 dag = dagutil.revlogdag(cl)
795 797 all = dag.ancestorset(dag.internalizeall(common))
796 common = dag.externalizeall(dag.headsetofconnecteds(all))
798 common = {clnode(r) for r in dag.headsetofconnecteds(all)}
797 799 else:
798 800 nodes = None
799 801 if pushedrevs:
800 802 revs = scmutil.revrange(repo, pushedrevs)
801 803 nodes = [repo[r].node() for r in revs]
802 804 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
803 805 ancestorsof=nodes)
804 806 common = set(common)
805 807 rheads = set(hds)
806 808 lheads = set(repo.heads())
807 809 ui.write(("common heads: %s\n") %
808 810 " ".join(sorted(short(n) for n in common)))
809 811 if lheads <= common:
810 812 ui.write(("local is subset\n"))
811 813 elif rheads <= common:
812 814 ui.write(("remote is subset\n"))
813 815
814 816 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
815 817 localrevs = opts['rev']
816 818 doit(localrevs, remoterevs)
817 819
818 820 _chunksize = 4 << 10
819 821
820 822 @command('debugdownload',
821 823 [
822 824 ('o', 'output', '', _('path')),
823 825 ],
824 826 optionalrepo=True)
825 827 def debugdownload(ui, repo, url, output=None, **opts):
826 828 """download a resource using Mercurial logic and config
827 829 """
828 830 fh = urlmod.open(ui, url, output)
829 831
830 832 dest = ui
831 833 if output:
832 834 dest = open(output, "wb", _chunksize)
833 835 try:
834 836 data = fh.read(_chunksize)
835 837 while data:
836 838 dest.write(data)
837 839 data = fh.read(_chunksize)
838 840 finally:
839 841 if output:
840 842 dest.close()
841 843
842 844 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
843 845 def debugextensions(ui, repo, **opts):
844 846 '''show information about active extensions'''
845 847 opts = pycompat.byteskwargs(opts)
846 848 exts = extensions.extensions(ui)
847 849 hgver = util.version()
848 850 fm = ui.formatter('debugextensions', opts)
849 851 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
850 852 isinternal = extensions.ismoduleinternal(extmod)
851 853 extsource = pycompat.fsencode(extmod.__file__)
852 854 if isinternal:
853 855 exttestedwith = [] # never expose magic string to users
854 856 else:
855 857 exttestedwith = getattr(extmod, 'testedwith', '').split()
856 858 extbuglink = getattr(extmod, 'buglink', None)
857 859
858 860 fm.startitem()
859 861
860 862 if ui.quiet or ui.verbose:
861 863 fm.write('name', '%s\n', extname)
862 864 else:
863 865 fm.write('name', '%s', extname)
864 866 if isinternal or hgver in exttestedwith:
865 867 fm.plain('\n')
866 868 elif not exttestedwith:
867 869 fm.plain(_(' (untested!)\n'))
868 870 else:
869 871 lasttestedversion = exttestedwith[-1]
870 872 fm.plain(' (%s!)\n' % lasttestedversion)
871 873
872 874 fm.condwrite(ui.verbose and extsource, 'source',
873 875 _(' location: %s\n'), extsource or "")
874 876
875 877 if ui.verbose:
876 878 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
877 879 fm.data(bundled=isinternal)
878 880
879 881 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
880 882 _(' tested with: %s\n'),
881 883 fm.formatlist(exttestedwith, name='ver'))
882 884
883 885 fm.condwrite(ui.verbose and extbuglink, 'buglink',
884 886 _(' bug reporting: %s\n'), extbuglink or "")
885 887
886 888 fm.end()
887 889
888 890 @command('debugfileset',
889 891 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
890 892 ('', 'all-files', False,
891 893 _('test files from all revisions and working directory')),
892 894 ('s', 'show-matcher', None,
893 895 _('print internal representation of matcher')),
894 896 ('p', 'show-stage', [],
895 897 _('print parsed tree at the given stage'), _('NAME'))],
896 898 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
897 899 def debugfileset(ui, repo, expr, **opts):
898 900 '''parse and apply a fileset specification'''
899 901 from . import fileset
900 902 fileset.symbols # force import of fileset so we have predicates to optimize
901 903 opts = pycompat.byteskwargs(opts)
902 904 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
903 905
904 906 stages = [
905 907 ('parsed', pycompat.identity),
906 908 ('analyzed', filesetlang.analyze),
907 909 ('optimized', filesetlang.optimize),
908 910 ]
909 911 stagenames = set(n for n, f in stages)
910 912
911 913 showalways = set()
912 914 if ui.verbose and not opts['show_stage']:
913 915 # show parsed tree by --verbose (deprecated)
914 916 showalways.add('parsed')
915 917 if opts['show_stage'] == ['all']:
916 918 showalways.update(stagenames)
917 919 else:
918 920 for n in opts['show_stage']:
919 921 if n not in stagenames:
920 922 raise error.Abort(_('invalid stage name: %s') % n)
921 923 showalways.update(opts['show_stage'])
922 924
923 925 tree = filesetlang.parse(expr)
924 926 for n, f in stages:
925 927 tree = f(tree)
926 928 if n in showalways:
927 929 if opts['show_stage'] or n != 'parsed':
928 930 ui.write(("* %s:\n") % n)
929 931 ui.write(filesetlang.prettyformat(tree), "\n")
930 932
931 933 files = set()
932 934 if opts['all_files']:
933 935 for r in repo:
934 936 c = repo[r]
935 937 files.update(c.files())
936 938 files.update(c.substate)
937 939 if opts['all_files'] or ctx.rev() is None:
938 940 wctx = repo[None]
939 941 files.update(repo.dirstate.walk(scmutil.matchall(repo),
940 942 subrepos=list(wctx.substate),
941 943 unknown=True, ignored=True))
942 944 files.update(wctx.substate)
943 945 else:
944 946 files.update(ctx.files())
945 947 files.update(ctx.substate)
946 948
947 949 m = ctx.matchfileset(expr)
948 950 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
949 951 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
950 952 for f in sorted(files):
951 953 if not m(f):
952 954 continue
953 955 ui.write("%s\n" % f)
954 956
955 957 @command('debugformat',
956 958 [] + cmdutil.formatteropts)
957 959 def debugformat(ui, repo, **opts):
958 960 """display format information about the current repository
959 961
960 962 Use --verbose to get extra information about current config value and
961 963 Mercurial default."""
962 964 opts = pycompat.byteskwargs(opts)
963 965 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
964 966 maxvariantlength = max(len('format-variant'), maxvariantlength)
965 967
966 968 def makeformatname(name):
967 969 return '%s:' + (' ' * (maxvariantlength - len(name)))
968 970
969 971 fm = ui.formatter('debugformat', opts)
970 972 if fm.isplain():
971 973 def formatvalue(value):
972 974 if util.safehasattr(value, 'startswith'):
973 975 return value
974 976 if value:
975 977 return 'yes'
976 978 else:
977 979 return 'no'
978 980 else:
979 981 formatvalue = pycompat.identity
980 982
981 983 fm.plain('format-variant')
982 984 fm.plain(' ' * (maxvariantlength - len('format-variant')))
983 985 fm.plain(' repo')
984 986 if ui.verbose:
985 987 fm.plain(' config default')
986 988 fm.plain('\n')
987 989 for fv in upgrade.allformatvariant:
988 990 fm.startitem()
989 991 repovalue = fv.fromrepo(repo)
990 992 configvalue = fv.fromconfig(repo)
991 993
992 994 if repovalue != configvalue:
993 995 namelabel = 'formatvariant.name.mismatchconfig'
994 996 repolabel = 'formatvariant.repo.mismatchconfig'
995 997 elif repovalue != fv.default:
996 998 namelabel = 'formatvariant.name.mismatchdefault'
997 999 repolabel = 'formatvariant.repo.mismatchdefault'
998 1000 else:
999 1001 namelabel = 'formatvariant.name.uptodate'
1000 1002 repolabel = 'formatvariant.repo.uptodate'
1001 1003
1002 1004 fm.write('name', makeformatname(fv.name), fv.name,
1003 1005 label=namelabel)
1004 1006 fm.write('repo', ' %3s', formatvalue(repovalue),
1005 1007 label=repolabel)
1006 1008 if fv.default != configvalue:
1007 1009 configlabel = 'formatvariant.config.special'
1008 1010 else:
1009 1011 configlabel = 'formatvariant.config.default'
1010 1012 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1011 1013 label=configlabel)
1012 1014 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1013 1015 label='formatvariant.default')
1014 1016 fm.plain('\n')
1015 1017 fm.end()
1016 1018
1017 1019 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1018 1020 def debugfsinfo(ui, path="."):
1019 1021 """show information detected about current filesystem"""
1020 1022 ui.write(('path: %s\n') % path)
1021 1023 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1022 1024 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1023 1025 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1024 1026 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1025 1027 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1026 1028 casesensitive = '(unknown)'
1027 1029 try:
1028 1030 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1029 1031 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1030 1032 except OSError:
1031 1033 pass
1032 1034 ui.write(('case-sensitive: %s\n') % casesensitive)
1033 1035
1034 1036 @command('debuggetbundle',
1035 1037 [('H', 'head', [], _('id of head node'), _('ID')),
1036 1038 ('C', 'common', [], _('id of common node'), _('ID')),
1037 1039 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1038 1040 _('REPO FILE [-H|-C ID]...'),
1039 1041 norepo=True)
1040 1042 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1041 1043 """retrieves a bundle from a repo
1042 1044
1043 1045 Every ID must be a full-length hex node id string. Saves the bundle to the
1044 1046 given file.
1045 1047 """
1046 1048 opts = pycompat.byteskwargs(opts)
1047 1049 repo = hg.peer(ui, opts, repopath)
1048 1050 if not repo.capable('getbundle'):
1049 1051 raise error.Abort("getbundle() not supported by target repository")
1050 1052 args = {}
1051 1053 if common:
1052 1054 args[r'common'] = [bin(s) for s in common]
1053 1055 if head:
1054 1056 args[r'heads'] = [bin(s) for s in head]
1055 1057 # TODO: get desired bundlecaps from command line.
1056 1058 args[r'bundlecaps'] = None
1057 1059 bundle = repo.getbundle('debug', **args)
1058 1060
1059 1061 bundletype = opts.get('type', 'bzip2').lower()
1060 1062 btypes = {'none': 'HG10UN',
1061 1063 'bzip2': 'HG10BZ',
1062 1064 'gzip': 'HG10GZ',
1063 1065 'bundle2': 'HG20'}
1064 1066 bundletype = btypes.get(bundletype)
1065 1067 if bundletype not in bundle2.bundletypes:
1066 1068 raise error.Abort(_('unknown bundle type specified with --type'))
1067 1069 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1068 1070
1069 1071 @command('debugignore', [], '[FILE]')
1070 1072 def debugignore(ui, repo, *files, **opts):
1071 1073 """display the combined ignore pattern and information about ignored files
1072 1074
1073 1075 With no argument display the combined ignore pattern.
1074 1076
1075 1077 Given space separated file names, shows if the given file is ignored and
1076 1078 if so, show the ignore rule (file and line number) that matched it.
1077 1079 """
1078 1080 ignore = repo.dirstate._ignore
1079 1081 if not files:
1080 1082 # Show all the patterns
1081 1083 ui.write("%s\n" % pycompat.byterepr(ignore))
1082 1084 else:
1083 1085 m = scmutil.match(repo[None], pats=files)
1084 1086 for f in m.files():
1085 1087 nf = util.normpath(f)
1086 1088 ignored = None
1087 1089 ignoredata = None
1088 1090 if nf != '.':
1089 1091 if ignore(nf):
1090 1092 ignored = nf
1091 1093 ignoredata = repo.dirstate._ignorefileandline(nf)
1092 1094 else:
1093 1095 for p in util.finddirs(nf):
1094 1096 if ignore(p):
1095 1097 ignored = p
1096 1098 ignoredata = repo.dirstate._ignorefileandline(p)
1097 1099 break
1098 1100 if ignored:
1099 1101 if ignored == nf:
1100 1102 ui.write(_("%s is ignored\n") % m.uipath(f))
1101 1103 else:
1102 1104 ui.write(_("%s is ignored because of "
1103 1105 "containing folder %s\n")
1104 1106 % (m.uipath(f), ignored))
1105 1107 ignorefile, lineno, line = ignoredata
1106 1108 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1107 1109 % (ignorefile, lineno, line))
1108 1110 else:
1109 1111 ui.write(_("%s is not ignored\n") % m.uipath(f))
1110 1112
1111 1113 @command('debugindex', cmdutil.debugrevlogopts +
1112 1114 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1113 1115 _('[-f FORMAT] -c|-m|FILE'),
1114 1116 optionalrepo=True)
1115 1117 def debugindex(ui, repo, file_=None, **opts):
1116 1118 """dump the contents of an index file"""
1117 1119 opts = pycompat.byteskwargs(opts)
1118 1120 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1119 1121 format = opts.get('format', 0)
1120 1122 if format not in (0, 1):
1121 1123 raise error.Abort(_("unknown format %d") % format)
1122 1124
1123 1125 if ui.debugflag:
1124 1126 shortfn = hex
1125 1127 else:
1126 1128 shortfn = short
1127 1129
1128 1130 # There might not be anything in r, so have a sane default
1129 1131 idlen = 12
1130 1132 for i in r:
1131 1133 idlen = len(shortfn(r.node(i)))
1132 1134 break
1133 1135
1134 1136 if format == 0:
1135 1137 if ui.verbose:
1136 1138 ui.write((" rev offset length linkrev"
1137 1139 " %s %s p2\n") % ("nodeid".ljust(idlen),
1138 1140 "p1".ljust(idlen)))
1139 1141 else:
1140 1142 ui.write((" rev linkrev %s %s p2\n") % (
1141 1143 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1142 1144 elif format == 1:
1143 1145 if ui.verbose:
1144 1146 ui.write((" rev flag offset length size link p1"
1145 1147 " p2 %s\n") % "nodeid".rjust(idlen))
1146 1148 else:
1147 1149 ui.write((" rev flag size link p1 p2 %s\n") %
1148 1150 "nodeid".rjust(idlen))
1149 1151
1150 1152 for i in r:
1151 1153 node = r.node(i)
1152 1154 if format == 0:
1153 1155 try:
1154 1156 pp = r.parents(node)
1155 1157 except Exception:
1156 1158 pp = [nullid, nullid]
1157 1159 if ui.verbose:
1158 1160 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1159 1161 i, r.start(i), r.length(i), r.linkrev(i),
1160 1162 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1161 1163 else:
1162 1164 ui.write("% 6d % 7d %s %s %s\n" % (
1163 1165 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1164 1166 shortfn(pp[1])))
1165 1167 elif format == 1:
1166 1168 pr = r.parentrevs(i)
1167 1169 if ui.verbose:
1168 1170 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1169 1171 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1170 1172 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1171 1173 else:
1172 1174 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1173 1175 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1174 1176 shortfn(node)))
1175 1177
1176 1178 @command('debugindexdot', cmdutil.debugrevlogopts,
1177 1179 _('-c|-m|FILE'), optionalrepo=True)
1178 1180 def debugindexdot(ui, repo, file_=None, **opts):
1179 1181 """dump an index DAG as a graphviz dot file"""
1180 1182 opts = pycompat.byteskwargs(opts)
1181 1183 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1182 1184 ui.write(("digraph G {\n"))
1183 1185 for i in r:
1184 1186 node = r.node(i)
1185 1187 pp = r.parents(node)
1186 1188 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1187 1189 if pp[1] != nullid:
1188 1190 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1189 1191 ui.write("}\n")
1190 1192
1191 1193 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1192 1194 def debuginstall(ui, **opts):
1193 1195 '''test Mercurial installation
1194 1196
1195 1197 Returns 0 on success.
1196 1198 '''
1197 1199 opts = pycompat.byteskwargs(opts)
1198 1200
1199 1201 def writetemp(contents):
1200 1202 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1201 1203 f = os.fdopen(fd, r"wb")
1202 1204 f.write(contents)
1203 1205 f.close()
1204 1206 return name
1205 1207
1206 1208 problems = 0
1207 1209
1208 1210 fm = ui.formatter('debuginstall', opts)
1209 1211 fm.startitem()
1210 1212
1211 1213 # encoding
1212 1214 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1213 1215 err = None
1214 1216 try:
1215 1217 codecs.lookup(pycompat.sysstr(encoding.encoding))
1216 1218 except LookupError as inst:
1217 1219 err = stringutil.forcebytestr(inst)
1218 1220 problems += 1
1219 1221 fm.condwrite(err, 'encodingerror', _(" %s\n"
1220 1222 " (check that your locale is properly set)\n"), err)
1221 1223
1222 1224 # Python
1223 1225 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1224 1226 pycompat.sysexecutable)
1225 1227 fm.write('pythonver', _("checking Python version (%s)\n"),
1226 1228 ("%d.%d.%d" % sys.version_info[:3]))
1227 1229 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1228 1230 os.path.dirname(pycompat.fsencode(os.__file__)))
1229 1231
1230 1232 security = set(sslutil.supportedprotocols)
1231 1233 if sslutil.hassni:
1232 1234 security.add('sni')
1233 1235
1234 1236 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1235 1237 fm.formatlist(sorted(security), name='protocol',
1236 1238 fmt='%s', sep=','))
1237 1239
1238 1240 # These are warnings, not errors. So don't increment problem count. This
1239 1241 # may change in the future.
1240 1242 if 'tls1.2' not in security:
1241 1243 fm.plain(_(' TLS 1.2 not supported by Python install; '
1242 1244 'network connections lack modern security\n'))
1243 1245 if 'sni' not in security:
1244 1246 fm.plain(_(' SNI not supported by Python install; may have '
1245 1247 'connectivity issues with some servers\n'))
1246 1248
1247 1249 # TODO print CA cert info
1248 1250
1249 1251 # hg version
1250 1252 hgver = util.version()
1251 1253 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1252 1254 hgver.split('+')[0])
1253 1255 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1254 1256 '+'.join(hgver.split('+')[1:]))
1255 1257
1256 1258 # compiled modules
1257 1259 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1258 1260 policy.policy)
1259 1261 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1260 1262 os.path.dirname(pycompat.fsencode(__file__)))
1261 1263
1262 1264 if policy.policy in ('c', 'allow'):
1263 1265 err = None
1264 1266 try:
1265 1267 from .cext import (
1266 1268 base85,
1267 1269 bdiff,
1268 1270 mpatch,
1269 1271 osutil,
1270 1272 )
1271 1273 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1272 1274 except Exception as inst:
1273 1275 err = stringutil.forcebytestr(inst)
1274 1276 problems += 1
1275 1277 fm.condwrite(err, 'extensionserror', " %s\n", err)
1276 1278
1277 1279 compengines = util.compengines._engines.values()
1278 1280 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1279 1281 fm.formatlist(sorted(e.name() for e in compengines),
1280 1282 name='compengine', fmt='%s', sep=', '))
1281 1283 fm.write('compenginesavail', _('checking available compression engines '
1282 1284 '(%s)\n'),
1283 1285 fm.formatlist(sorted(e.name() for e in compengines
1284 1286 if e.available()),
1285 1287 name='compengine', fmt='%s', sep=', '))
1286 1288 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1287 1289 fm.write('compenginesserver', _('checking available compression engines '
1288 1290 'for wire protocol (%s)\n'),
1289 1291 fm.formatlist([e.name() for e in wirecompengines
1290 1292 if e.wireprotosupport()],
1291 1293 name='compengine', fmt='%s', sep=', '))
1292 1294 re2 = 'missing'
1293 1295 if util._re2:
1294 1296 re2 = 'available'
1295 1297 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1296 1298 fm.data(re2=bool(util._re2))
1297 1299
1298 1300 # templates
1299 1301 p = templater.templatepaths()
1300 1302 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1301 1303 fm.condwrite(not p, '', _(" no template directories found\n"))
1302 1304 if p:
1303 1305 m = templater.templatepath("map-cmdline.default")
1304 1306 if m:
1305 1307 # template found, check if it is working
1306 1308 err = None
1307 1309 try:
1308 1310 templater.templater.frommapfile(m)
1309 1311 except Exception as inst:
1310 1312 err = stringutil.forcebytestr(inst)
1311 1313 p = None
1312 1314 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1313 1315 else:
1314 1316 p = None
1315 1317 fm.condwrite(p, 'defaulttemplate',
1316 1318 _("checking default template (%s)\n"), m)
1317 1319 fm.condwrite(not m, 'defaulttemplatenotfound',
1318 1320 _(" template '%s' not found\n"), "default")
1319 1321 if not p:
1320 1322 problems += 1
1321 1323 fm.condwrite(not p, '',
1322 1324 _(" (templates seem to have been installed incorrectly)\n"))
1323 1325
1324 1326 # editor
1325 1327 editor = ui.geteditor()
1326 1328 editor = util.expandpath(editor)
1327 1329 editorbin = procutil.shellsplit(editor)[0]
1328 1330 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1329 1331 cmdpath = procutil.findexe(editorbin)
1330 1332 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1331 1333 _(" No commit editor set and can't find %s in PATH\n"
1332 1334 " (specify a commit editor in your configuration"
1333 1335 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1334 1336 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1335 1337 _(" Can't find editor '%s' in PATH\n"
1336 1338 " (specify a commit editor in your configuration"
1337 1339 " file)\n"), not cmdpath and editorbin)
1338 1340 if not cmdpath and editor != 'vi':
1339 1341 problems += 1
1340 1342
1341 1343 # check username
1342 1344 username = None
1343 1345 err = None
1344 1346 try:
1345 1347 username = ui.username()
1346 1348 except error.Abort as e:
1347 1349 err = stringutil.forcebytestr(e)
1348 1350 problems += 1
1349 1351
1350 1352 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1351 1353 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1352 1354 " (specify a username in your configuration file)\n"), err)
1353 1355
1354 1356 fm.condwrite(not problems, '',
1355 1357 _("no problems detected\n"))
1356 1358 if not problems:
1357 1359 fm.data(problems=problems)
1358 1360 fm.condwrite(problems, 'problems',
1359 1361 _("%d problems detected,"
1360 1362 " please check your install!\n"), problems)
1361 1363 fm.end()
1362 1364
1363 1365 return problems
1364 1366
1365 1367 @command('debugknown', [], _('REPO ID...'), norepo=True)
1366 1368 def debugknown(ui, repopath, *ids, **opts):
1367 1369 """test whether node ids are known to a repo
1368 1370
1369 1371 Every ID must be a full-length hex node id string. Returns a list of 0s
1370 1372 and 1s indicating unknown/known.
1371 1373 """
1372 1374 opts = pycompat.byteskwargs(opts)
1373 1375 repo = hg.peer(ui, opts, repopath)
1374 1376 if not repo.capable('known'):
1375 1377 raise error.Abort("known() not supported by target repository")
1376 1378 flags = repo.known([bin(s) for s in ids])
1377 1379 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1378 1380
1379 1381 @command('debuglabelcomplete', [], _('LABEL...'))
1380 1382 def debuglabelcomplete(ui, repo, *args):
1381 1383 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1382 1384 debugnamecomplete(ui, repo, *args)
1383 1385
1384 1386 @command('debuglocks',
1385 1387 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1386 1388 ('W', 'force-wlock', None,
1387 1389 _('free the working state lock (DANGEROUS)')),
1388 1390 ('s', 'set-lock', None, _('set the store lock until stopped')),
1389 1391 ('S', 'set-wlock', None,
1390 1392 _('set the working state lock until stopped'))],
1391 1393 _('[OPTION]...'))
1392 1394 def debuglocks(ui, repo, **opts):
1393 1395 """show or modify state of locks
1394 1396
1395 1397 By default, this command will show which locks are held. This
1396 1398 includes the user and process holding the lock, the amount of time
1397 1399 the lock has been held, and the machine name where the process is
1398 1400 running if it's not local.
1399 1401
1400 1402 Locks protect the integrity of Mercurial's data, so should be
1401 1403 treated with care. System crashes or other interruptions may cause
1402 1404 locks to not be properly released, though Mercurial will usually
1403 1405 detect and remove such stale locks automatically.
1404 1406
1405 1407 However, detecting stale locks may not always be possible (for
1406 1408 instance, on a shared filesystem). Removing locks may also be
1407 1409 blocked by filesystem permissions.
1408 1410
1409 1411 Setting a lock will prevent other commands from changing the data.
1410 1412 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1411 1413 The set locks are removed when the command exits.
1412 1414
1413 1415 Returns 0 if no locks are held.
1414 1416
1415 1417 """
1416 1418
1417 1419 if opts.get(r'force_lock'):
1418 1420 repo.svfs.unlink('lock')
1419 1421 if opts.get(r'force_wlock'):
1420 1422 repo.vfs.unlink('wlock')
1421 1423 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1422 1424 return 0
1423 1425
1424 1426 locks = []
1425 1427 try:
1426 1428 if opts.get(r'set_wlock'):
1427 1429 try:
1428 1430 locks.append(repo.wlock(False))
1429 1431 except error.LockHeld:
1430 1432 raise error.Abort(_('wlock is already held'))
1431 1433 if opts.get(r'set_lock'):
1432 1434 try:
1433 1435 locks.append(repo.lock(False))
1434 1436 except error.LockHeld:
1435 1437 raise error.Abort(_('lock is already held'))
1436 1438 if len(locks):
1437 1439 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1438 1440 return 0
1439 1441 finally:
1440 1442 release(*locks)
1441 1443
1442 1444 now = time.time()
1443 1445 held = 0
1444 1446
1445 1447 def report(vfs, name, method):
1446 1448 # this causes stale locks to get reaped for more accurate reporting
1447 1449 try:
1448 1450 l = method(False)
1449 1451 except error.LockHeld:
1450 1452 l = None
1451 1453
1452 1454 if l:
1453 1455 l.release()
1454 1456 else:
1455 1457 try:
1456 1458 st = vfs.lstat(name)
1457 1459 age = now - st[stat.ST_MTIME]
1458 1460 user = util.username(st.st_uid)
1459 1461 locker = vfs.readlock(name)
1460 1462 if ":" in locker:
1461 1463 host, pid = locker.split(':')
1462 1464 if host == socket.gethostname():
1463 1465 locker = 'user %s, process %s' % (user, pid)
1464 1466 else:
1465 1467 locker = 'user %s, process %s, host %s' \
1466 1468 % (user, pid, host)
1467 1469 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1468 1470 return 1
1469 1471 except OSError as e:
1470 1472 if e.errno != errno.ENOENT:
1471 1473 raise
1472 1474
1473 1475 ui.write(("%-6s free\n") % (name + ":"))
1474 1476 return 0
1475 1477
1476 1478 held += report(repo.svfs, "lock", repo.lock)
1477 1479 held += report(repo.vfs, "wlock", repo.wlock)
1478 1480
1479 1481 return held
1480 1482
1481 1483 @command('debugmanifestfulltextcache', [
1482 1484 ('', 'clear', False, _('clear the cache')),
1483 1485 ('a', 'add', '', _('add the given manifest node to the cache'),
1484 1486 _('NODE'))
1485 1487 ], '')
1486 1488 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1487 1489 """show, clear or amend the contents of the manifest fulltext cache"""
1488 1490 with repo.lock():
1489 1491 r = repo.manifestlog._revlog
1490 1492 try:
1491 1493 cache = r._fulltextcache
1492 1494 except AttributeError:
1493 1495 ui.warn(_(
1494 1496 "Current revlog implementation doesn't appear to have a "
1495 1497 'manifest fulltext cache\n'))
1496 1498 return
1497 1499
1498 1500 if opts.get(r'clear'):
1499 1501 cache.clear()
1500 1502
1501 1503 if add:
1502 1504 try:
1503 1505 manifest = repo.manifestlog[r.lookup(add)]
1504 1506 except error.LookupError as e:
1505 1507 raise error.Abort(e, hint="Check your manifest node id")
1506 1508 manifest.read() # stores revisision in cache too
1507 1509
1508 1510 if not len(cache):
1509 1511 ui.write(_('Cache empty'))
1510 1512 else:
1511 1513 ui.write(
1512 1514 _('Cache contains %d manifest entries, in order of most to '
1513 1515 'least recent:\n') % (len(cache),))
1514 1516 totalsize = 0
1515 1517 for nodeid in cache:
1516 1518 # Use cache.get to not update the LRU order
1517 1519 data = cache.get(nodeid)
1518 1520 size = len(data)
1519 1521 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1520 1522 ui.write(_('id: %s, size %s\n') % (
1521 1523 hex(nodeid), util.bytecount(size)))
1522 1524 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1523 1525 ui.write(
1524 1526 _('Total cache data size %s, on-disk %s\n') % (
1525 1527 util.bytecount(totalsize), util.bytecount(ondisk))
1526 1528 )
1527 1529
1528 1530 @command('debugmergestate', [], '')
1529 1531 def debugmergestate(ui, repo, *args):
1530 1532 """print merge state
1531 1533
1532 1534 Use --verbose to print out information about whether v1 or v2 merge state
1533 1535 was chosen."""
1534 1536 def _hashornull(h):
1535 1537 if h == nullhex:
1536 1538 return 'null'
1537 1539 else:
1538 1540 return h
1539 1541
1540 1542 def printrecords(version):
1541 1543 ui.write(('* version %d records\n') % version)
1542 1544 if version == 1:
1543 1545 records = v1records
1544 1546 else:
1545 1547 records = v2records
1546 1548
1547 1549 for rtype, record in records:
1548 1550 # pretty print some record types
1549 1551 if rtype == 'L':
1550 1552 ui.write(('local: %s\n') % record)
1551 1553 elif rtype == 'O':
1552 1554 ui.write(('other: %s\n') % record)
1553 1555 elif rtype == 'm':
1554 1556 driver, mdstate = record.split('\0', 1)
1555 1557 ui.write(('merge driver: %s (state "%s")\n')
1556 1558 % (driver, mdstate))
1557 1559 elif rtype in 'FDC':
1558 1560 r = record.split('\0')
1559 1561 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1560 1562 if version == 1:
1561 1563 onode = 'not stored in v1 format'
1562 1564 flags = r[7]
1563 1565 else:
1564 1566 onode, flags = r[7:9]
1565 1567 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1566 1568 % (f, rtype, state, _hashornull(hash)))
1567 1569 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1568 1570 ui.write((' ancestor path: %s (node %s)\n')
1569 1571 % (afile, _hashornull(anode)))
1570 1572 ui.write((' other path: %s (node %s)\n')
1571 1573 % (ofile, _hashornull(onode)))
1572 1574 elif rtype == 'f':
1573 1575 filename, rawextras = record.split('\0', 1)
1574 1576 extras = rawextras.split('\0')
1575 1577 i = 0
1576 1578 extrastrings = []
1577 1579 while i < len(extras):
1578 1580 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1579 1581 i += 2
1580 1582
1581 1583 ui.write(('file extras: %s (%s)\n')
1582 1584 % (filename, ', '.join(extrastrings)))
1583 1585 elif rtype == 'l':
1584 1586 labels = record.split('\0', 2)
1585 1587 labels = [l for l in labels if len(l) > 0]
1586 1588 ui.write(('labels:\n'))
1587 1589 ui.write((' local: %s\n' % labels[0]))
1588 1590 ui.write((' other: %s\n' % labels[1]))
1589 1591 if len(labels) > 2:
1590 1592 ui.write((' base: %s\n' % labels[2]))
1591 1593 else:
1592 1594 ui.write(('unrecognized entry: %s\t%s\n')
1593 1595 % (rtype, record.replace('\0', '\t')))
1594 1596
1595 1597 # Avoid mergestate.read() since it may raise an exception for unsupported
1596 1598 # merge state records. We shouldn't be doing this, but this is OK since this
1597 1599 # command is pretty low-level.
1598 1600 ms = mergemod.mergestate(repo)
1599 1601
1600 1602 # sort so that reasonable information is on top
1601 1603 v1records = ms._readrecordsv1()
1602 1604 v2records = ms._readrecordsv2()
1603 1605 order = 'LOml'
1604 1606 def key(r):
1605 1607 idx = order.find(r[0])
1606 1608 if idx == -1:
1607 1609 return (1, r[1])
1608 1610 else:
1609 1611 return (0, idx)
1610 1612 v1records.sort(key=key)
1611 1613 v2records.sort(key=key)
1612 1614
1613 1615 if not v1records and not v2records:
1614 1616 ui.write(('no merge state found\n'))
1615 1617 elif not v2records:
1616 1618 ui.note(('no version 2 merge state\n'))
1617 1619 printrecords(1)
1618 1620 elif ms._v1v2match(v1records, v2records):
1619 1621 ui.note(('v1 and v2 states match: using v2\n'))
1620 1622 printrecords(2)
1621 1623 else:
1622 1624 ui.note(('v1 and v2 states mismatch: using v1\n'))
1623 1625 printrecords(1)
1624 1626 if ui.verbose:
1625 1627 printrecords(2)
1626 1628
1627 1629 @command('debugnamecomplete', [], _('NAME...'))
1628 1630 def debugnamecomplete(ui, repo, *args):
1629 1631 '''complete "names" - tags, open branch names, bookmark names'''
1630 1632
1631 1633 names = set()
1632 1634 # since we previously only listed open branches, we will handle that
1633 1635 # specially (after this for loop)
1634 1636 for name, ns in repo.names.iteritems():
1635 1637 if name != 'branches':
1636 1638 names.update(ns.listnames(repo))
1637 1639 names.update(tag for (tag, heads, tip, closed)
1638 1640 in repo.branchmap().iterbranches() if not closed)
1639 1641 completions = set()
1640 1642 if not args:
1641 1643 args = ['']
1642 1644 for a in args:
1643 1645 completions.update(n for n in names if n.startswith(a))
1644 1646 ui.write('\n'.join(sorted(completions)))
1645 1647 ui.write('\n')
1646 1648
1647 1649 @command('debugobsolete',
1648 1650 [('', 'flags', 0, _('markers flag')),
1649 1651 ('', 'record-parents', False,
1650 1652 _('record parent information for the precursor')),
1651 1653 ('r', 'rev', [], _('display markers relevant to REV')),
1652 1654 ('', 'exclusive', False, _('restrict display to markers only '
1653 1655 'relevant to REV')),
1654 1656 ('', 'index', False, _('display index of the marker')),
1655 1657 ('', 'delete', [], _('delete markers specified by indices')),
1656 1658 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1657 1659 _('[OBSOLETED [REPLACEMENT ...]]'))
1658 1660 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1659 1661 """create arbitrary obsolete marker
1660 1662
1661 1663 With no arguments, displays the list of obsolescence markers."""
1662 1664
1663 1665 opts = pycompat.byteskwargs(opts)
1664 1666
1665 1667 def parsenodeid(s):
1666 1668 try:
1667 1669 # We do not use revsingle/revrange functions here to accept
1668 1670 # arbitrary node identifiers, possibly not present in the
1669 1671 # local repository.
1670 1672 n = bin(s)
1671 1673 if len(n) != len(nullid):
1672 1674 raise TypeError()
1673 1675 return n
1674 1676 except TypeError:
1675 1677 raise error.Abort('changeset references must be full hexadecimal '
1676 1678 'node identifiers')
1677 1679
1678 1680 if opts.get('delete'):
1679 1681 indices = []
1680 1682 for v in opts.get('delete'):
1681 1683 try:
1682 1684 indices.append(int(v))
1683 1685 except ValueError:
1684 1686 raise error.Abort(_('invalid index value: %r') % v,
1685 1687 hint=_('use integers for indices'))
1686 1688
1687 1689 if repo.currenttransaction():
1688 1690 raise error.Abort(_('cannot delete obsmarkers in the middle '
1689 1691 'of transaction.'))
1690 1692
1691 1693 with repo.lock():
1692 1694 n = repair.deleteobsmarkers(repo.obsstore, indices)
1693 1695 ui.write(_('deleted %i obsolescence markers\n') % n)
1694 1696
1695 1697 return
1696 1698
1697 1699 if precursor is not None:
1698 1700 if opts['rev']:
1699 1701 raise error.Abort('cannot select revision when creating marker')
1700 1702 metadata = {}
1701 1703 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1702 1704 succs = tuple(parsenodeid(succ) for succ in successors)
1703 1705 l = repo.lock()
1704 1706 try:
1705 1707 tr = repo.transaction('debugobsolete')
1706 1708 try:
1707 1709 date = opts.get('date')
1708 1710 if date:
1709 1711 date = dateutil.parsedate(date)
1710 1712 else:
1711 1713 date = None
1712 1714 prec = parsenodeid(precursor)
1713 1715 parents = None
1714 1716 if opts['record_parents']:
1715 1717 if prec not in repo.unfiltered():
1716 1718 raise error.Abort('cannot used --record-parents on '
1717 1719 'unknown changesets')
1718 1720 parents = repo.unfiltered()[prec].parents()
1719 1721 parents = tuple(p.node() for p in parents)
1720 1722 repo.obsstore.create(tr, prec, succs, opts['flags'],
1721 1723 parents=parents, date=date,
1722 1724 metadata=metadata, ui=ui)
1723 1725 tr.close()
1724 1726 except ValueError as exc:
1725 1727 raise error.Abort(_('bad obsmarker input: %s') %
1726 1728 pycompat.bytestr(exc))
1727 1729 finally:
1728 1730 tr.release()
1729 1731 finally:
1730 1732 l.release()
1731 1733 else:
1732 1734 if opts['rev']:
1733 1735 revs = scmutil.revrange(repo, opts['rev'])
1734 1736 nodes = [repo[r].node() for r in revs]
1735 1737 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1736 1738 exclusive=opts['exclusive']))
1737 1739 markers.sort(key=lambda x: x._data)
1738 1740 else:
1739 1741 markers = obsutil.getmarkers(repo)
1740 1742
1741 1743 markerstoiter = markers
1742 1744 isrelevant = lambda m: True
1743 1745 if opts.get('rev') and opts.get('index'):
1744 1746 markerstoiter = obsutil.getmarkers(repo)
1745 1747 markerset = set(markers)
1746 1748 isrelevant = lambda m: m in markerset
1747 1749
1748 1750 fm = ui.formatter('debugobsolete', opts)
1749 1751 for i, m in enumerate(markerstoiter):
1750 1752 if not isrelevant(m):
1751 1753 # marker can be irrelevant when we're iterating over a set
1752 1754 # of markers (markerstoiter) which is bigger than the set
1753 1755 # of markers we want to display (markers)
1754 1756 # this can happen if both --index and --rev options are
1755 1757 # provided and thus we need to iterate over all of the markers
1756 1758 # to get the correct indices, but only display the ones that
1757 1759 # are relevant to --rev value
1758 1760 continue
1759 1761 fm.startitem()
1760 1762 ind = i if opts.get('index') else None
1761 1763 cmdutil.showmarker(fm, m, index=ind)
1762 1764 fm.end()
1763 1765
1764 1766 @command('debugpathcomplete',
1765 1767 [('f', 'full', None, _('complete an entire path')),
1766 1768 ('n', 'normal', None, _('show only normal files')),
1767 1769 ('a', 'added', None, _('show only added files')),
1768 1770 ('r', 'removed', None, _('show only removed files'))],
1769 1771 _('FILESPEC...'))
1770 1772 def debugpathcomplete(ui, repo, *specs, **opts):
1771 1773 '''complete part or all of a tracked path
1772 1774
1773 1775 This command supports shells that offer path name completion. It
1774 1776 currently completes only files already known to the dirstate.
1775 1777
1776 1778 Completion extends only to the next path segment unless
1777 1779 --full is specified, in which case entire paths are used.'''
1778 1780
1779 1781 def complete(path, acceptable):
1780 1782 dirstate = repo.dirstate
1781 1783 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1782 1784 rootdir = repo.root + pycompat.ossep
1783 1785 if spec != repo.root and not spec.startswith(rootdir):
1784 1786 return [], []
1785 1787 if os.path.isdir(spec):
1786 1788 spec += '/'
1787 1789 spec = spec[len(rootdir):]
1788 1790 fixpaths = pycompat.ossep != '/'
1789 1791 if fixpaths:
1790 1792 spec = spec.replace(pycompat.ossep, '/')
1791 1793 speclen = len(spec)
1792 1794 fullpaths = opts[r'full']
1793 1795 files, dirs = set(), set()
1794 1796 adddir, addfile = dirs.add, files.add
1795 1797 for f, st in dirstate.iteritems():
1796 1798 if f.startswith(spec) and st[0] in acceptable:
1797 1799 if fixpaths:
1798 1800 f = f.replace('/', pycompat.ossep)
1799 1801 if fullpaths:
1800 1802 addfile(f)
1801 1803 continue
1802 1804 s = f.find(pycompat.ossep, speclen)
1803 1805 if s >= 0:
1804 1806 adddir(f[:s])
1805 1807 else:
1806 1808 addfile(f)
1807 1809 return files, dirs
1808 1810
1809 1811 acceptable = ''
1810 1812 if opts[r'normal']:
1811 1813 acceptable += 'nm'
1812 1814 if opts[r'added']:
1813 1815 acceptable += 'a'
1814 1816 if opts[r'removed']:
1815 1817 acceptable += 'r'
1816 1818 cwd = repo.getcwd()
1817 1819 if not specs:
1818 1820 specs = ['.']
1819 1821
1820 1822 files, dirs = set(), set()
1821 1823 for spec in specs:
1822 1824 f, d = complete(spec, acceptable or 'nmar')
1823 1825 files.update(f)
1824 1826 dirs.update(d)
1825 1827 files.update(dirs)
1826 1828 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1827 1829 ui.write('\n')
1828 1830
1829 1831 @command('debugpeer', [], _('PATH'), norepo=True)
1830 1832 def debugpeer(ui, path):
1831 1833 """establish a connection to a peer repository"""
1832 1834 # Always enable peer request logging. Requires --debug to display
1833 1835 # though.
1834 1836 overrides = {
1835 1837 ('devel', 'debug.peer-request'): True,
1836 1838 }
1837 1839
1838 1840 with ui.configoverride(overrides):
1839 1841 peer = hg.peer(ui, {}, path)
1840 1842
1841 1843 local = peer.local() is not None
1842 1844 canpush = peer.canpush()
1843 1845
1844 1846 ui.write(_('url: %s\n') % peer.url())
1845 1847 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1846 1848 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1847 1849
1848 1850 @command('debugpickmergetool',
1849 1851 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1850 1852 ('', 'changedelete', None, _('emulate merging change and delete')),
1851 1853 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1852 1854 _('[PATTERN]...'),
1853 1855 inferrepo=True)
1854 1856 def debugpickmergetool(ui, repo, *pats, **opts):
1855 1857 """examine which merge tool is chosen for specified file
1856 1858
1857 1859 As described in :hg:`help merge-tools`, Mercurial examines
1858 1860 configurations below in this order to decide which merge tool is
1859 1861 chosen for specified file.
1860 1862
1861 1863 1. ``--tool`` option
1862 1864 2. ``HGMERGE`` environment variable
1863 1865 3. configurations in ``merge-patterns`` section
1864 1866 4. configuration of ``ui.merge``
1865 1867 5. configurations in ``merge-tools`` section
1866 1868 6. ``hgmerge`` tool (for historical reason only)
1867 1869 7. default tool for fallback (``:merge`` or ``:prompt``)
1868 1870
1869 1871 This command writes out examination result in the style below::
1870 1872
1871 1873 FILE = MERGETOOL
1872 1874
1873 1875 By default, all files known in the first parent context of the
1874 1876 working directory are examined. Use file patterns and/or -I/-X
1875 1877 options to limit target files. -r/--rev is also useful to examine
1876 1878 files in another context without actual updating to it.
1877 1879
1878 1880 With --debug, this command shows warning messages while matching
1879 1881 against ``merge-patterns`` and so on, too. It is recommended to
1880 1882 use this option with explicit file patterns and/or -I/-X options,
1881 1883 because this option increases amount of output per file according
1882 1884 to configurations in hgrc.
1883 1885
1884 1886 With -v/--verbose, this command shows configurations below at
1885 1887 first (only if specified).
1886 1888
1887 1889 - ``--tool`` option
1888 1890 - ``HGMERGE`` environment variable
1889 1891 - configuration of ``ui.merge``
1890 1892
1891 1893 If merge tool is chosen before matching against
1892 1894 ``merge-patterns``, this command can't show any helpful
1893 1895 information, even with --debug. In such case, information above is
1894 1896 useful to know why a merge tool is chosen.
1895 1897 """
1896 1898 opts = pycompat.byteskwargs(opts)
1897 1899 overrides = {}
1898 1900 if opts['tool']:
1899 1901 overrides[('ui', 'forcemerge')] = opts['tool']
1900 1902 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1901 1903
1902 1904 with ui.configoverride(overrides, 'debugmergepatterns'):
1903 1905 hgmerge = encoding.environ.get("HGMERGE")
1904 1906 if hgmerge is not None:
1905 1907 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1906 1908 uimerge = ui.config("ui", "merge")
1907 1909 if uimerge:
1908 1910 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1909 1911
1910 1912 ctx = scmutil.revsingle(repo, opts.get('rev'))
1911 1913 m = scmutil.match(ctx, pats, opts)
1912 1914 changedelete = opts['changedelete']
1913 1915 for path in ctx.walk(m):
1914 1916 fctx = ctx[path]
1915 1917 try:
1916 1918 if not ui.debugflag:
1917 1919 ui.pushbuffer(error=True)
1918 1920 tool, toolpath = filemerge._picktool(repo, ui, path,
1919 1921 fctx.isbinary(),
1920 1922 'l' in fctx.flags(),
1921 1923 changedelete)
1922 1924 finally:
1923 1925 if not ui.debugflag:
1924 1926 ui.popbuffer()
1925 1927 ui.write(('%s = %s\n') % (path, tool))
1926 1928
1927 1929 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1928 1930 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1929 1931 '''access the pushkey key/value protocol
1930 1932
1931 1933 With two args, list the keys in the given namespace.
1932 1934
1933 1935 With five args, set a key to new if it currently is set to old.
1934 1936 Reports success or failure.
1935 1937 '''
1936 1938
1937 1939 target = hg.peer(ui, {}, repopath)
1938 1940 if keyinfo:
1939 1941 key, old, new = keyinfo
1940 1942 with target.commandexecutor() as e:
1941 1943 r = e.callcommand('pushkey', {
1942 1944 'namespace': namespace,
1943 1945 'key': key,
1944 1946 'old': old,
1945 1947 'new': new,
1946 1948 }).result()
1947 1949
1948 1950 ui.status(pycompat.bytestr(r) + '\n')
1949 1951 return not r
1950 1952 else:
1951 1953 for k, v in sorted(target.listkeys(namespace).iteritems()):
1952 1954 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1953 1955 stringutil.escapestr(v)))
1954 1956
1955 1957 @command('debugpvec', [], _('A B'))
1956 1958 def debugpvec(ui, repo, a, b=None):
1957 1959 ca = scmutil.revsingle(repo, a)
1958 1960 cb = scmutil.revsingle(repo, b)
1959 1961 pa = pvec.ctxpvec(ca)
1960 1962 pb = pvec.ctxpvec(cb)
1961 1963 if pa == pb:
1962 1964 rel = "="
1963 1965 elif pa > pb:
1964 1966 rel = ">"
1965 1967 elif pa < pb:
1966 1968 rel = "<"
1967 1969 elif pa | pb:
1968 1970 rel = "|"
1969 1971 ui.write(_("a: %s\n") % pa)
1970 1972 ui.write(_("b: %s\n") % pb)
1971 1973 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1972 1974 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1973 1975 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1974 1976 pa.distance(pb), rel))
1975 1977
1976 1978 @command('debugrebuilddirstate|debugrebuildstate',
1977 1979 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1978 1980 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1979 1981 'the working copy parent')),
1980 1982 ],
1981 1983 _('[-r REV]'))
1982 1984 def debugrebuilddirstate(ui, repo, rev, **opts):
1983 1985 """rebuild the dirstate as it would look like for the given revision
1984 1986
1985 1987 If no revision is specified the first current parent will be used.
1986 1988
1987 1989 The dirstate will be set to the files of the given revision.
1988 1990 The actual working directory content or existing dirstate
1989 1991 information such as adds or removes is not considered.
1990 1992
1991 1993 ``minimal`` will only rebuild the dirstate status for files that claim to be
1992 1994 tracked but are not in the parent manifest, or that exist in the parent
1993 1995 manifest but are not in the dirstate. It will not change adds, removes, or
1994 1996 modified files that are in the working copy parent.
1995 1997
1996 1998 One use of this command is to make the next :hg:`status` invocation
1997 1999 check the actual file content.
1998 2000 """
1999 2001 ctx = scmutil.revsingle(repo, rev)
2000 2002 with repo.wlock():
2001 2003 dirstate = repo.dirstate
2002 2004 changedfiles = None
2003 2005 # See command doc for what minimal does.
2004 2006 if opts.get(r'minimal'):
2005 2007 manifestfiles = set(ctx.manifest().keys())
2006 2008 dirstatefiles = set(dirstate)
2007 2009 manifestonly = manifestfiles - dirstatefiles
2008 2010 dsonly = dirstatefiles - manifestfiles
2009 2011 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2010 2012 changedfiles = manifestonly | dsnotadded
2011 2013
2012 2014 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2013 2015
2014 2016 @command('debugrebuildfncache', [], '')
2015 2017 def debugrebuildfncache(ui, repo):
2016 2018 """rebuild the fncache file"""
2017 2019 repair.rebuildfncache(ui, repo)
2018 2020
2019 2021 @command('debugrename',
2020 2022 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2021 2023 _('[-r REV] FILE'))
2022 2024 def debugrename(ui, repo, file1, *pats, **opts):
2023 2025 """dump rename information"""
2024 2026
2025 2027 opts = pycompat.byteskwargs(opts)
2026 2028 ctx = scmutil.revsingle(repo, opts.get('rev'))
2027 2029 m = scmutil.match(ctx, (file1,) + pats, opts)
2028 2030 for abs in ctx.walk(m):
2029 2031 fctx = ctx[abs]
2030 2032 o = fctx.filelog().renamed(fctx.filenode())
2031 2033 rel = m.rel(abs)
2032 2034 if o:
2033 2035 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2034 2036 else:
2035 2037 ui.write(_("%s not renamed\n") % rel)
2036 2038
2037 2039 @command('debugrevlog', cmdutil.debugrevlogopts +
2038 2040 [('d', 'dump', False, _('dump index data'))],
2039 2041 _('-c|-m|FILE'),
2040 2042 optionalrepo=True)
2041 2043 def debugrevlog(ui, repo, file_=None, **opts):
2042 2044 """show data and statistics about a revlog"""
2043 2045 opts = pycompat.byteskwargs(opts)
2044 2046 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2045 2047
2046 2048 if opts.get("dump"):
2047 2049 numrevs = len(r)
2048 2050 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2049 2051 " rawsize totalsize compression heads chainlen\n"))
2050 2052 ts = 0
2051 2053 heads = set()
2052 2054
2053 2055 for rev in pycompat.xrange(numrevs):
2054 2056 dbase = r.deltaparent(rev)
2055 2057 if dbase == -1:
2056 2058 dbase = rev
2057 2059 cbase = r.chainbase(rev)
2058 2060 clen = r.chainlen(rev)
2059 2061 p1, p2 = r.parentrevs(rev)
2060 2062 rs = r.rawsize(rev)
2061 2063 ts = ts + rs
2062 2064 heads -= set(r.parentrevs(rev))
2063 2065 heads.add(rev)
2064 2066 try:
2065 2067 compression = ts / r.end(rev)
2066 2068 except ZeroDivisionError:
2067 2069 compression = 0
2068 2070 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2069 2071 "%11d %5d %8d\n" %
2070 2072 (rev, p1, p2, r.start(rev), r.end(rev),
2071 2073 r.start(dbase), r.start(cbase),
2072 2074 r.start(p1), r.start(p2),
2073 2075 rs, ts, compression, len(heads), clen))
2074 2076 return 0
2075 2077
2076 2078 v = r.version
2077 2079 format = v & 0xFFFF
2078 2080 flags = []
2079 2081 gdelta = False
2080 2082 if v & revlog.FLAG_INLINE_DATA:
2081 2083 flags.append('inline')
2082 2084 if v & revlog.FLAG_GENERALDELTA:
2083 2085 gdelta = True
2084 2086 flags.append('generaldelta')
2085 2087 if not flags:
2086 2088 flags = ['(none)']
2087 2089
2088 2090 ### tracks merge vs single parent
2089 2091 nummerges = 0
2090 2092
2091 2093 ### tracks ways the "delta" are build
2092 2094 # nodelta
2093 2095 numempty = 0
2094 2096 numemptytext = 0
2095 2097 numemptydelta = 0
2096 2098 # full file content
2097 2099 numfull = 0
2098 2100 # intermediate snapshot against a prior snapshot
2099 2101 numsemi = 0
2100 2102 # snapshot count per depth
2101 2103 numsnapdepth = collections.defaultdict(lambda: 0)
2102 2104 # delta against previous revision
2103 2105 numprev = 0
2104 2106 # delta against first or second parent (not prev)
2105 2107 nump1 = 0
2106 2108 nump2 = 0
2107 2109 # delta against neither prev nor parents
2108 2110 numother = 0
2109 2111 # delta against prev that are also first or second parent
2110 2112 # (details of `numprev`)
2111 2113 nump1prev = 0
2112 2114 nump2prev = 0
2113 2115
2114 2116 # data about delta chain of each revs
2115 2117 chainlengths = []
2116 2118 chainbases = []
2117 2119 chainspans = []
2118 2120
2119 2121 # data about each revision
2120 2122 datasize = [None, 0, 0]
2121 2123 fullsize = [None, 0, 0]
2122 2124 semisize = [None, 0, 0]
2123 2125 # snapshot count per depth
2124 2126 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2125 2127 deltasize = [None, 0, 0]
2126 2128 chunktypecounts = {}
2127 2129 chunktypesizes = {}
2128 2130
2129 2131 def addsize(size, l):
2130 2132 if l[0] is None or size < l[0]:
2131 2133 l[0] = size
2132 2134 if size > l[1]:
2133 2135 l[1] = size
2134 2136 l[2] += size
2135 2137
2136 2138 numrevs = len(r)
2137 2139 for rev in pycompat.xrange(numrevs):
2138 2140 p1, p2 = r.parentrevs(rev)
2139 2141 delta = r.deltaparent(rev)
2140 2142 if format > 0:
2141 2143 addsize(r.rawsize(rev), datasize)
2142 2144 if p2 != nullrev:
2143 2145 nummerges += 1
2144 2146 size = r.length(rev)
2145 2147 if delta == nullrev:
2146 2148 chainlengths.append(0)
2147 2149 chainbases.append(r.start(rev))
2148 2150 chainspans.append(size)
2149 2151 if size == 0:
2150 2152 numempty += 1
2151 2153 numemptytext += 1
2152 2154 else:
2153 2155 numfull += 1
2154 2156 numsnapdepth[0] += 1
2155 2157 addsize(size, fullsize)
2156 2158 addsize(size, snapsizedepth[0])
2157 2159 else:
2158 2160 chainlengths.append(chainlengths[delta] + 1)
2159 2161 baseaddr = chainbases[delta]
2160 2162 revaddr = r.start(rev)
2161 2163 chainbases.append(baseaddr)
2162 2164 chainspans.append((revaddr - baseaddr) + size)
2163 2165 if size == 0:
2164 2166 numempty += 1
2165 2167 numemptydelta += 1
2166 2168 elif r.issnapshot(rev):
2167 2169 addsize(size, semisize)
2168 2170 numsemi += 1
2169 2171 depth = r.snapshotdepth(rev)
2170 2172 numsnapdepth[depth] += 1
2171 2173 addsize(size, snapsizedepth[depth])
2172 2174 else:
2173 2175 addsize(size, deltasize)
2174 2176 if delta == rev - 1:
2175 2177 numprev += 1
2176 2178 if delta == p1:
2177 2179 nump1prev += 1
2178 2180 elif delta == p2:
2179 2181 nump2prev += 1
2180 2182 elif delta == p1:
2181 2183 nump1 += 1
2182 2184 elif delta == p2:
2183 2185 nump2 += 1
2184 2186 elif delta != nullrev:
2185 2187 numother += 1
2186 2188
2187 2189 # Obtain data on the raw chunks in the revlog.
2188 2190 if util.safehasattr(r, '_getsegmentforrevs'):
2189 2191 segment = r._getsegmentforrevs(rev, rev)[1]
2190 2192 else:
2191 2193 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2192 2194 if segment:
2193 2195 chunktype = bytes(segment[0:1])
2194 2196 else:
2195 2197 chunktype = 'empty'
2196 2198
2197 2199 if chunktype not in chunktypecounts:
2198 2200 chunktypecounts[chunktype] = 0
2199 2201 chunktypesizes[chunktype] = 0
2200 2202
2201 2203 chunktypecounts[chunktype] += 1
2202 2204 chunktypesizes[chunktype] += size
2203 2205
2204 2206 # Adjust size min value for empty cases
2205 2207 for size in (datasize, fullsize, semisize, deltasize):
2206 2208 if size[0] is None:
2207 2209 size[0] = 0
2208 2210
2209 2211 numdeltas = numrevs - numfull - numempty - numsemi
2210 2212 numoprev = numprev - nump1prev - nump2prev
2211 2213 totalrawsize = datasize[2]
2212 2214 datasize[2] /= numrevs
2213 2215 fulltotal = fullsize[2]
2214 2216 fullsize[2] /= numfull
2215 2217 semitotal = semisize[2]
2216 2218 snaptotal = {}
2217 2219 if 0 < numsemi:
2218 2220 semisize[2] /= numsemi
2219 2221 for depth in snapsizedepth:
2220 2222 snaptotal[depth] = snapsizedepth[depth][2]
2221 2223 snapsizedepth[depth][2] /= numsnapdepth[depth]
2222 2224
2223 2225 deltatotal = deltasize[2]
2224 2226 if numdeltas > 0:
2225 2227 deltasize[2] /= numdeltas
2226 2228 totalsize = fulltotal + semitotal + deltatotal
2227 2229 avgchainlen = sum(chainlengths) / numrevs
2228 2230 maxchainlen = max(chainlengths)
2229 2231 maxchainspan = max(chainspans)
2230 2232 compratio = 1
2231 2233 if totalsize:
2232 2234 compratio = totalrawsize / totalsize
2233 2235
2234 2236 basedfmtstr = '%%%dd\n'
2235 2237 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2236 2238
2237 2239 def dfmtstr(max):
2238 2240 return basedfmtstr % len(str(max))
2239 2241 def pcfmtstr(max, padding=0):
2240 2242 return basepcfmtstr % (len(str(max)), ' ' * padding)
2241 2243
2242 2244 def pcfmt(value, total):
2243 2245 if total:
2244 2246 return (value, 100 * float(value) / total)
2245 2247 else:
2246 2248 return value, 100.0
2247 2249
2248 2250 ui.write(('format : %d\n') % format)
2249 2251 ui.write(('flags : %s\n') % ', '.join(flags))
2250 2252
2251 2253 ui.write('\n')
2252 2254 fmt = pcfmtstr(totalsize)
2253 2255 fmt2 = dfmtstr(totalsize)
2254 2256 ui.write(('revisions : ') + fmt2 % numrevs)
2255 2257 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2256 2258 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2257 2259 ui.write(('revisions : ') + fmt2 % numrevs)
2258 2260 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2259 2261 ui.write((' text : ')
2260 2262 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2261 2263 ui.write((' delta : ')
2262 2264 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2263 2265 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2264 2266 for depth in sorted(numsnapdepth):
2265 2267 ui.write((' lvl-%-3d : ' % depth)
2266 2268 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2267 2269 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2268 2270 ui.write(('revision size : ') + fmt2 % totalsize)
2269 2271 ui.write((' snapshot : ')
2270 2272 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2271 2273 for depth in sorted(numsnapdepth):
2272 2274 ui.write((' lvl-%-3d : ' % depth)
2273 2275 + fmt % pcfmt(snaptotal[depth], totalsize))
2274 2276 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2275 2277
2276 2278 def fmtchunktype(chunktype):
2277 2279 if chunktype == 'empty':
2278 2280 return ' %s : ' % chunktype
2279 2281 elif chunktype in pycompat.bytestr(string.ascii_letters):
2280 2282 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2281 2283 else:
2282 2284 return ' 0x%s : ' % hex(chunktype)
2283 2285
2284 2286 ui.write('\n')
2285 2287 ui.write(('chunks : ') + fmt2 % numrevs)
2286 2288 for chunktype in sorted(chunktypecounts):
2287 2289 ui.write(fmtchunktype(chunktype))
2288 2290 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2289 2291 ui.write(('chunks size : ') + fmt2 % totalsize)
2290 2292 for chunktype in sorted(chunktypecounts):
2291 2293 ui.write(fmtchunktype(chunktype))
2292 2294 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2293 2295
2294 2296 ui.write('\n')
2295 2297 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2296 2298 ui.write(('avg chain length : ') + fmt % avgchainlen)
2297 2299 ui.write(('max chain length : ') + fmt % maxchainlen)
2298 2300 ui.write(('max chain reach : ') + fmt % maxchainspan)
2299 2301 ui.write(('compression ratio : ') + fmt % compratio)
2300 2302
2301 2303 if format > 0:
2302 2304 ui.write('\n')
2303 2305 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2304 2306 % tuple(datasize))
2305 2307 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2306 2308 % tuple(fullsize))
2307 2309 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2308 2310 % tuple(semisize))
2309 2311 for depth in sorted(snapsizedepth):
2310 2312 if depth == 0:
2311 2313 continue
2312 2314 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2313 2315 % ((depth,) + tuple(snapsizedepth[depth])))
2314 2316 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2315 2317 % tuple(deltasize))
2316 2318
2317 2319 if numdeltas > 0:
2318 2320 ui.write('\n')
2319 2321 fmt = pcfmtstr(numdeltas)
2320 2322 fmt2 = pcfmtstr(numdeltas, 4)
2321 2323 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2322 2324 if numprev > 0:
2323 2325 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2324 2326 numprev))
2325 2327 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2326 2328 numprev))
2327 2329 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2328 2330 numprev))
2329 2331 if gdelta:
2330 2332 ui.write(('deltas against p1 : ')
2331 2333 + fmt % pcfmt(nump1, numdeltas))
2332 2334 ui.write(('deltas against p2 : ')
2333 2335 + fmt % pcfmt(nump2, numdeltas))
2334 2336 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2335 2337 numdeltas))
2336 2338
2337 2339 @command('debugrevspec',
2338 2340 [('', 'optimize', None,
2339 2341 _('print parsed tree after optimizing (DEPRECATED)')),
2340 2342 ('', 'show-revs', True, _('print list of result revisions (default)')),
2341 2343 ('s', 'show-set', None, _('print internal representation of result set')),
2342 2344 ('p', 'show-stage', [],
2343 2345 _('print parsed tree at the given stage'), _('NAME')),
2344 2346 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2345 2347 ('', 'verify-optimized', False, _('verify optimized result')),
2346 2348 ],
2347 2349 ('REVSPEC'))
2348 2350 def debugrevspec(ui, repo, expr, **opts):
2349 2351 """parse and apply a revision specification
2350 2352
2351 2353 Use -p/--show-stage option to print the parsed tree at the given stages.
2352 2354 Use -p all to print tree at every stage.
2353 2355
2354 2356 Use --no-show-revs option with -s or -p to print only the set
2355 2357 representation or the parsed tree respectively.
2356 2358
2357 2359 Use --verify-optimized to compare the optimized result with the unoptimized
2358 2360 one. Returns 1 if the optimized result differs.
2359 2361 """
2360 2362 opts = pycompat.byteskwargs(opts)
2361 2363 aliases = ui.configitems('revsetalias')
2362 2364 stages = [
2363 2365 ('parsed', lambda tree: tree),
2364 2366 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2365 2367 ui.warn)),
2366 2368 ('concatenated', revsetlang.foldconcat),
2367 2369 ('analyzed', revsetlang.analyze),
2368 2370 ('optimized', revsetlang.optimize),
2369 2371 ]
2370 2372 if opts['no_optimized']:
2371 2373 stages = stages[:-1]
2372 2374 if opts['verify_optimized'] and opts['no_optimized']:
2373 2375 raise error.Abort(_('cannot use --verify-optimized with '
2374 2376 '--no-optimized'))
2375 2377 stagenames = set(n for n, f in stages)
2376 2378
2377 2379 showalways = set()
2378 2380 showchanged = set()
2379 2381 if ui.verbose and not opts['show_stage']:
2380 2382 # show parsed tree by --verbose (deprecated)
2381 2383 showalways.add('parsed')
2382 2384 showchanged.update(['expanded', 'concatenated'])
2383 2385 if opts['optimize']:
2384 2386 showalways.add('optimized')
2385 2387 if opts['show_stage'] and opts['optimize']:
2386 2388 raise error.Abort(_('cannot use --optimize with --show-stage'))
2387 2389 if opts['show_stage'] == ['all']:
2388 2390 showalways.update(stagenames)
2389 2391 else:
2390 2392 for n in opts['show_stage']:
2391 2393 if n not in stagenames:
2392 2394 raise error.Abort(_('invalid stage name: %s') % n)
2393 2395 showalways.update(opts['show_stage'])
2394 2396
2395 2397 treebystage = {}
2396 2398 printedtree = None
2397 2399 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2398 2400 for n, f in stages:
2399 2401 treebystage[n] = tree = f(tree)
2400 2402 if n in showalways or (n in showchanged and tree != printedtree):
2401 2403 if opts['show_stage'] or n != 'parsed':
2402 2404 ui.write(("* %s:\n") % n)
2403 2405 ui.write(revsetlang.prettyformat(tree), "\n")
2404 2406 printedtree = tree
2405 2407
2406 2408 if opts['verify_optimized']:
2407 2409 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2408 2410 brevs = revset.makematcher(treebystage['optimized'])(repo)
2409 2411 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2410 2412 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2411 2413 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2412 2414 arevs = list(arevs)
2413 2415 brevs = list(brevs)
2414 2416 if arevs == brevs:
2415 2417 return 0
2416 2418 ui.write(('--- analyzed\n'), label='diff.file_a')
2417 2419 ui.write(('+++ optimized\n'), label='diff.file_b')
2418 2420 sm = difflib.SequenceMatcher(None, arevs, brevs)
2419 2421 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2420 2422 if tag in ('delete', 'replace'):
2421 2423 for c in arevs[alo:ahi]:
2422 2424 ui.write('-%s\n' % c, label='diff.deleted')
2423 2425 if tag in ('insert', 'replace'):
2424 2426 for c in brevs[blo:bhi]:
2425 2427 ui.write('+%s\n' % c, label='diff.inserted')
2426 2428 if tag == 'equal':
2427 2429 for c in arevs[alo:ahi]:
2428 2430 ui.write(' %s\n' % c)
2429 2431 return 1
2430 2432
2431 2433 func = revset.makematcher(tree)
2432 2434 revs = func(repo)
2433 2435 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2434 2436 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2435 2437 if not opts['show_revs']:
2436 2438 return
2437 2439 for c in revs:
2438 2440 ui.write("%d\n" % c)
2439 2441
2440 2442 @command('debugserve', [
2441 2443 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2442 2444 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2443 2445 ('', 'logiofile', '', _('file to log server I/O to')),
2444 2446 ], '')
2445 2447 def debugserve(ui, repo, **opts):
2446 2448 """run a server with advanced settings
2447 2449
2448 2450 This command is similar to :hg:`serve`. It exists partially as a
2449 2451 workaround to the fact that ``hg serve --stdio`` must have specific
2450 2452 arguments for security reasons.
2451 2453 """
2452 2454 opts = pycompat.byteskwargs(opts)
2453 2455
2454 2456 if not opts['sshstdio']:
2455 2457 raise error.Abort(_('only --sshstdio is currently supported'))
2456 2458
2457 2459 logfh = None
2458 2460
2459 2461 if opts['logiofd'] and opts['logiofile']:
2460 2462 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2461 2463
2462 2464 if opts['logiofd']:
2463 2465 # Line buffered because output is line based.
2464 2466 try:
2465 2467 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2466 2468 except OSError as e:
2467 2469 if e.errno != errno.ESPIPE:
2468 2470 raise
2469 2471 # can't seek a pipe, so `ab` mode fails on py3
2470 2472 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2471 2473 elif opts['logiofile']:
2472 2474 logfh = open(opts['logiofile'], 'ab', 1)
2473 2475
2474 2476 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2475 2477 s.serve_forever()
2476 2478
2477 2479 @command('debugsetparents', [], _('REV1 [REV2]'))
2478 2480 def debugsetparents(ui, repo, rev1, rev2=None):
2479 2481 """manually set the parents of the current working directory
2480 2482
2481 2483 This is useful for writing repository conversion tools, but should
2482 2484 be used with care. For example, neither the working directory nor the
2483 2485 dirstate is updated, so file status may be incorrect after running this
2484 2486 command.
2485 2487
2486 2488 Returns 0 on success.
2487 2489 """
2488 2490
2489 2491 node1 = scmutil.revsingle(repo, rev1).node()
2490 2492 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2491 2493
2492 2494 with repo.wlock():
2493 2495 repo.setparents(node1, node2)
2494 2496
2495 2497 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2496 2498 def debugssl(ui, repo, source=None, **opts):
2497 2499 '''test a secure connection to a server
2498 2500
2499 2501 This builds the certificate chain for the server on Windows, installing the
2500 2502 missing intermediates and trusted root via Windows Update if necessary. It
2501 2503 does nothing on other platforms.
2502 2504
2503 2505 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2504 2506 that server is used. See :hg:`help urls` for more information.
2505 2507
2506 2508 If the update succeeds, retry the original operation. Otherwise, the cause
2507 2509 of the SSL error is likely another issue.
2508 2510 '''
2509 2511 if not pycompat.iswindows:
2510 2512 raise error.Abort(_('certificate chain building is only possible on '
2511 2513 'Windows'))
2512 2514
2513 2515 if not source:
2514 2516 if not repo:
2515 2517 raise error.Abort(_("there is no Mercurial repository here, and no "
2516 2518 "server specified"))
2517 2519 source = "default"
2518 2520
2519 2521 source, branches = hg.parseurl(ui.expandpath(source))
2520 2522 url = util.url(source)
2521 2523 addr = None
2522 2524
2523 2525 defaultport = {'https': 443, 'ssh': 22}
2524 2526 if url.scheme in defaultport:
2525 2527 try:
2526 2528 addr = (url.host, int(url.port or defaultport[url.scheme]))
2527 2529 except ValueError:
2528 2530 raise error.Abort(_("malformed port number in URL"))
2529 2531 else:
2530 2532 raise error.Abort(_("only https and ssh connections are supported"))
2531 2533
2532 2534 from . import win32
2533 2535
2534 2536 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2535 2537 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2536 2538
2537 2539 try:
2538 2540 s.connect(addr)
2539 2541 cert = s.getpeercert(True)
2540 2542
2541 2543 ui.status(_('checking the certificate chain for %s\n') % url.host)
2542 2544
2543 2545 complete = win32.checkcertificatechain(cert, build=False)
2544 2546
2545 2547 if not complete:
2546 2548 ui.status(_('certificate chain is incomplete, updating... '))
2547 2549
2548 2550 if not win32.checkcertificatechain(cert):
2549 2551 ui.status(_('failed.\n'))
2550 2552 else:
2551 2553 ui.status(_('done.\n'))
2552 2554 else:
2553 2555 ui.status(_('full certificate chain is available\n'))
2554 2556 finally:
2555 2557 s.close()
2556 2558
2557 2559 @command('debugsub',
2558 2560 [('r', 'rev', '',
2559 2561 _('revision to check'), _('REV'))],
2560 2562 _('[-r REV] [REV]'))
2561 2563 def debugsub(ui, repo, rev=None):
2562 2564 ctx = scmutil.revsingle(repo, rev, None)
2563 2565 for k, v in sorted(ctx.substate.items()):
2564 2566 ui.write(('path %s\n') % k)
2565 2567 ui.write((' source %s\n') % v[0])
2566 2568 ui.write((' revision %s\n') % v[1])
2567 2569
2568 2570 @command('debugsuccessorssets',
2569 2571 [('', 'closest', False, _('return closest successors sets only'))],
2570 2572 _('[REV]'))
2571 2573 def debugsuccessorssets(ui, repo, *revs, **opts):
2572 2574 """show set of successors for revision
2573 2575
2574 2576 A successors set of changeset A is a consistent group of revisions that
2575 2577 succeed A. It contains non-obsolete changesets only unless closests
2576 2578 successors set is set.
2577 2579
2578 2580 In most cases a changeset A has a single successors set containing a single
2579 2581 successor (changeset A replaced by A').
2580 2582
2581 2583 A changeset that is made obsolete with no successors are called "pruned".
2582 2584 Such changesets have no successors sets at all.
2583 2585
2584 2586 A changeset that has been "split" will have a successors set containing
2585 2587 more than one successor.
2586 2588
2587 2589 A changeset that has been rewritten in multiple different ways is called
2588 2590 "divergent". Such changesets have multiple successor sets (each of which
2589 2591 may also be split, i.e. have multiple successors).
2590 2592
2591 2593 Results are displayed as follows::
2592 2594
2593 2595 <rev1>
2594 2596 <successors-1A>
2595 2597 <rev2>
2596 2598 <successors-2A>
2597 2599 <successors-2B1> <successors-2B2> <successors-2B3>
2598 2600
2599 2601 Here rev2 has two possible (i.e. divergent) successors sets. The first
2600 2602 holds one element, whereas the second holds three (i.e. the changeset has
2601 2603 been split).
2602 2604 """
2603 2605 # passed to successorssets caching computation from one call to another
2604 2606 cache = {}
2605 2607 ctx2str = bytes
2606 2608 node2str = short
2607 2609 for rev in scmutil.revrange(repo, revs):
2608 2610 ctx = repo[rev]
2609 2611 ui.write('%s\n'% ctx2str(ctx))
2610 2612 for succsset in obsutil.successorssets(repo, ctx.node(),
2611 2613 closest=opts[r'closest'],
2612 2614 cache=cache):
2613 2615 if succsset:
2614 2616 ui.write(' ')
2615 2617 ui.write(node2str(succsset[0]))
2616 2618 for node in succsset[1:]:
2617 2619 ui.write(' ')
2618 2620 ui.write(node2str(node))
2619 2621 ui.write('\n')
2620 2622
2621 2623 @command('debugtemplate',
2622 2624 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2623 2625 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2624 2626 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2625 2627 optionalrepo=True)
2626 2628 def debugtemplate(ui, repo, tmpl, **opts):
2627 2629 """parse and apply a template
2628 2630
2629 2631 If -r/--rev is given, the template is processed as a log template and
2630 2632 applied to the given changesets. Otherwise, it is processed as a generic
2631 2633 template.
2632 2634
2633 2635 Use --verbose to print the parsed tree.
2634 2636 """
2635 2637 revs = None
2636 2638 if opts[r'rev']:
2637 2639 if repo is None:
2638 2640 raise error.RepoError(_('there is no Mercurial repository here '
2639 2641 '(.hg not found)'))
2640 2642 revs = scmutil.revrange(repo, opts[r'rev'])
2641 2643
2642 2644 props = {}
2643 2645 for d in opts[r'define']:
2644 2646 try:
2645 2647 k, v = (e.strip() for e in d.split('=', 1))
2646 2648 if not k or k == 'ui':
2647 2649 raise ValueError
2648 2650 props[k] = v
2649 2651 except ValueError:
2650 2652 raise error.Abort(_('malformed keyword definition: %s') % d)
2651 2653
2652 2654 if ui.verbose:
2653 2655 aliases = ui.configitems('templatealias')
2654 2656 tree = templater.parse(tmpl)
2655 2657 ui.note(templater.prettyformat(tree), '\n')
2656 2658 newtree = templater.expandaliases(tree, aliases)
2657 2659 if newtree != tree:
2658 2660 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2659 2661
2660 2662 if revs is None:
2661 2663 tres = formatter.templateresources(ui, repo)
2662 2664 t = formatter.maketemplater(ui, tmpl, resources=tres)
2663 2665 if ui.verbose:
2664 2666 kwds, funcs = t.symbolsuseddefault()
2665 2667 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2666 2668 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2667 2669 ui.write(t.renderdefault(props))
2668 2670 else:
2669 2671 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2670 2672 if ui.verbose:
2671 2673 kwds, funcs = displayer.t.symbolsuseddefault()
2672 2674 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2673 2675 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2674 2676 for r in revs:
2675 2677 displayer.show(repo[r], **pycompat.strkwargs(props))
2676 2678 displayer.close()
2677 2679
2678 2680 @command('debuguigetpass', [
2679 2681 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2680 2682 ], _('[-p TEXT]'), norepo=True)
2681 2683 def debuguigetpass(ui, prompt=''):
2682 2684 """show prompt to type password"""
2683 2685 r = ui.getpass(prompt)
2684 2686 ui.write(('respose: %s\n') % r)
2685 2687
2686 2688 @command('debuguiprompt', [
2687 2689 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2688 2690 ], _('[-p TEXT]'), norepo=True)
2689 2691 def debuguiprompt(ui, prompt=''):
2690 2692 """show plain prompt"""
2691 2693 r = ui.prompt(prompt)
2692 2694 ui.write(('response: %s\n') % r)
2693 2695
2694 2696 @command('debugupdatecaches', [])
2695 2697 def debugupdatecaches(ui, repo, *pats, **opts):
2696 2698 """warm all known caches in the repository"""
2697 2699 with repo.wlock(), repo.lock():
2698 2700 repo.updatecaches(full=True)
2699 2701
2700 2702 @command('debugupgraderepo', [
2701 2703 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2702 2704 ('', 'run', False, _('performs an upgrade')),
2703 2705 ])
2704 2706 def debugupgraderepo(ui, repo, run=False, optimize=None):
2705 2707 """upgrade a repository to use different features
2706 2708
2707 2709 If no arguments are specified, the repository is evaluated for upgrade
2708 2710 and a list of problems and potential optimizations is printed.
2709 2711
2710 2712 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2711 2713 can be influenced via additional arguments. More details will be provided
2712 2714 by the command output when run without ``--run``.
2713 2715
2714 2716 During the upgrade, the repository will be locked and no writes will be
2715 2717 allowed.
2716 2718
2717 2719 At the end of the upgrade, the repository may not be readable while new
2718 2720 repository data is swapped in. This window will be as long as it takes to
2719 2721 rename some directories inside the ``.hg`` directory. On most machines, this
2720 2722 should complete almost instantaneously and the chances of a consumer being
2721 2723 unable to access the repository should be low.
2722 2724 """
2723 2725 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2724 2726
2725 2727 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2726 2728 inferrepo=True)
2727 2729 def debugwalk(ui, repo, *pats, **opts):
2728 2730 """show how files match on given patterns"""
2729 2731 opts = pycompat.byteskwargs(opts)
2730 2732 m = scmutil.match(repo[None], pats, opts)
2731 2733 if ui.verbose:
2732 2734 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2733 2735 items = list(repo[None].walk(m))
2734 2736 if not items:
2735 2737 return
2736 2738 f = lambda fn: fn
2737 2739 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2738 2740 f = lambda fn: util.normpath(fn)
2739 2741 fmt = 'f %%-%ds %%-%ds %%s' % (
2740 2742 max([len(abs) for abs in items]),
2741 2743 max([len(m.rel(abs)) for abs in items]))
2742 2744 for abs in items:
2743 2745 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2744 2746 ui.write("%s\n" % line.rstrip())
2745 2747
2746 2748 @command('debugwhyunstable', [], _('REV'))
2747 2749 def debugwhyunstable(ui, repo, rev):
2748 2750 """explain instabilities of a changeset"""
2749 2751 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2750 2752 dnodes = ''
2751 2753 if entry.get('divergentnodes'):
2752 2754 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2753 2755 for ctx in entry['divergentnodes']) + ' '
2754 2756 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2755 2757 entry['reason'], entry['node']))
2756 2758
2757 2759 @command('debugwireargs',
2758 2760 [('', 'three', '', 'three'),
2759 2761 ('', 'four', '', 'four'),
2760 2762 ('', 'five', '', 'five'),
2761 2763 ] + cmdutil.remoteopts,
2762 2764 _('REPO [OPTIONS]... [ONE [TWO]]'),
2763 2765 norepo=True)
2764 2766 def debugwireargs(ui, repopath, *vals, **opts):
2765 2767 opts = pycompat.byteskwargs(opts)
2766 2768 repo = hg.peer(ui, opts, repopath)
2767 2769 for opt in cmdutil.remoteopts:
2768 2770 del opts[opt[1]]
2769 2771 args = {}
2770 2772 for k, v in opts.iteritems():
2771 2773 if v:
2772 2774 args[k] = v
2773 2775 args = pycompat.strkwargs(args)
2774 2776 # run twice to check that we don't mess up the stream for the next command
2775 2777 res1 = repo.debugwireargs(*vals, **args)
2776 2778 res2 = repo.debugwireargs(*vals, **args)
2777 2779 ui.write("%s\n" % res1)
2778 2780 if res1 != res2:
2779 2781 ui.warn("%s\n" % res2)
2780 2782
2781 2783 def _parsewirelangblocks(fh):
2782 2784 activeaction = None
2783 2785 blocklines = []
2784 2786
2785 2787 for line in fh:
2786 2788 line = line.rstrip()
2787 2789 if not line:
2788 2790 continue
2789 2791
2790 2792 if line.startswith(b'#'):
2791 2793 continue
2792 2794
2793 2795 if not line.startswith(b' '):
2794 2796 # New block. Flush previous one.
2795 2797 if activeaction:
2796 2798 yield activeaction, blocklines
2797 2799
2798 2800 activeaction = line
2799 2801 blocklines = []
2800 2802 continue
2801 2803
2802 2804 # Else we start with an indent.
2803 2805
2804 2806 if not activeaction:
2805 2807 raise error.Abort(_('indented line outside of block'))
2806 2808
2807 2809 blocklines.append(line)
2808 2810
2809 2811 # Flush last block.
2810 2812 if activeaction:
2811 2813 yield activeaction, blocklines
2812 2814
2813 2815 @command('debugwireproto',
2814 2816 [
2815 2817 ('', 'localssh', False, _('start an SSH server for this repo')),
2816 2818 ('', 'peer', '', _('construct a specific version of the peer')),
2817 2819 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2818 2820 ('', 'nologhandshake', False,
2819 2821 _('do not log I/O related to the peer handshake')),
2820 2822 ] + cmdutil.remoteopts,
2821 2823 _('[PATH]'),
2822 2824 optionalrepo=True)
2823 2825 def debugwireproto(ui, repo, path=None, **opts):
2824 2826 """send wire protocol commands to a server
2825 2827
2826 2828 This command can be used to issue wire protocol commands to remote
2827 2829 peers and to debug the raw data being exchanged.
2828 2830
2829 2831 ``--localssh`` will start an SSH server against the current repository
2830 2832 and connect to that. By default, the connection will perform a handshake
2831 2833 and establish an appropriate peer instance.
2832 2834
2833 2835 ``--peer`` can be used to bypass the handshake protocol and construct a
2834 2836 peer instance using the specified class type. Valid values are ``raw``,
2835 2837 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2836 2838 raw data payloads and don't support higher-level command actions.
2837 2839
2838 2840 ``--noreadstderr`` can be used to disable automatic reading from stderr
2839 2841 of the peer (for SSH connections only). Disabling automatic reading of
2840 2842 stderr is useful for making output more deterministic.
2841 2843
2842 2844 Commands are issued via a mini language which is specified via stdin.
2843 2845 The language consists of individual actions to perform. An action is
2844 2846 defined by a block. A block is defined as a line with no leading
2845 2847 space followed by 0 or more lines with leading space. Blocks are
2846 2848 effectively a high-level command with additional metadata.
2847 2849
2848 2850 Lines beginning with ``#`` are ignored.
2849 2851
2850 2852 The following sections denote available actions.
2851 2853
2852 2854 raw
2853 2855 ---
2854 2856
2855 2857 Send raw data to the server.
2856 2858
2857 2859 The block payload contains the raw data to send as one atomic send
2858 2860 operation. The data may not actually be delivered in a single system
2859 2861 call: it depends on the abilities of the transport being used.
2860 2862
2861 2863 Each line in the block is de-indented and concatenated. Then, that
2862 2864 value is evaluated as a Python b'' literal. This allows the use of
2863 2865 backslash escaping, etc.
2864 2866
2865 2867 raw+
2866 2868 ----
2867 2869
2868 2870 Behaves like ``raw`` except flushes output afterwards.
2869 2871
2870 2872 command <X>
2871 2873 -----------
2872 2874
2873 2875 Send a request to run a named command, whose name follows the ``command``
2874 2876 string.
2875 2877
2876 2878 Arguments to the command are defined as lines in this block. The format of
2877 2879 each line is ``<key> <value>``. e.g.::
2878 2880
2879 2881 command listkeys
2880 2882 namespace bookmarks
2881 2883
2882 2884 If the value begins with ``eval:``, it will be interpreted as a Python
2883 2885 literal expression. Otherwise values are interpreted as Python b'' literals.
2884 2886 This allows sending complex types and encoding special byte sequences via
2885 2887 backslash escaping.
2886 2888
2887 2889 The following arguments have special meaning:
2888 2890
2889 2891 ``PUSHFILE``
2890 2892 When defined, the *push* mechanism of the peer will be used instead
2891 2893 of the static request-response mechanism and the content of the
2892 2894 file specified in the value of this argument will be sent as the
2893 2895 command payload.
2894 2896
2895 2897 This can be used to submit a local bundle file to the remote.
2896 2898
2897 2899 batchbegin
2898 2900 ----------
2899 2901
2900 2902 Instruct the peer to begin a batched send.
2901 2903
2902 2904 All ``command`` blocks are queued for execution until the next
2903 2905 ``batchsubmit`` block.
2904 2906
2905 2907 batchsubmit
2906 2908 -----------
2907 2909
2908 2910 Submit previously queued ``command`` blocks as a batch request.
2909 2911
2910 2912 This action MUST be paired with a ``batchbegin`` action.
2911 2913
2912 2914 httprequest <method> <path>
2913 2915 ---------------------------
2914 2916
2915 2917 (HTTP peer only)
2916 2918
2917 2919 Send an HTTP request to the peer.
2918 2920
2919 2921 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2920 2922
2921 2923 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2922 2924 headers to add to the request. e.g. ``Accept: foo``.
2923 2925
2924 2926 The following arguments are special:
2925 2927
2926 2928 ``BODYFILE``
2927 2929 The content of the file defined as the value to this argument will be
2928 2930 transferred verbatim as the HTTP request body.
2929 2931
2930 2932 ``frame <type> <flags> <payload>``
2931 2933 Send a unified protocol frame as part of the request body.
2932 2934
2933 2935 All frames will be collected and sent as the body to the HTTP
2934 2936 request.
2935 2937
2936 2938 close
2937 2939 -----
2938 2940
2939 2941 Close the connection to the server.
2940 2942
2941 2943 flush
2942 2944 -----
2943 2945
2944 2946 Flush data written to the server.
2945 2947
2946 2948 readavailable
2947 2949 -------------
2948 2950
2949 2951 Close the write end of the connection and read all available data from
2950 2952 the server.
2951 2953
2952 2954 If the connection to the server encompasses multiple pipes, we poll both
2953 2955 pipes and read available data.
2954 2956
2955 2957 readline
2956 2958 --------
2957 2959
2958 2960 Read a line of output from the server. If there are multiple output
2959 2961 pipes, reads only the main pipe.
2960 2962
2961 2963 ereadline
2962 2964 ---------
2963 2965
2964 2966 Like ``readline``, but read from the stderr pipe, if available.
2965 2967
2966 2968 read <X>
2967 2969 --------
2968 2970
2969 2971 ``read()`` N bytes from the server's main output pipe.
2970 2972
2971 2973 eread <X>
2972 2974 ---------
2973 2975
2974 2976 ``read()`` N bytes from the server's stderr pipe, if available.
2975 2977
2976 2978 Specifying Unified Frame-Based Protocol Frames
2977 2979 ----------------------------------------------
2978 2980
2979 2981 It is possible to emit a *Unified Frame-Based Protocol* by using special
2980 2982 syntax.
2981 2983
2982 2984 A frame is composed as a type, flags, and payload. These can be parsed
2983 2985 from a string of the form:
2984 2986
2985 2987 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2986 2988
2987 2989 ``request-id`` and ``stream-id`` are integers defining the request and
2988 2990 stream identifiers.
2989 2991
2990 2992 ``type`` can be an integer value for the frame type or the string name
2991 2993 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2992 2994 ``command-name``.
2993 2995
2994 2996 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2995 2997 components. Each component (and there can be just one) can be an integer
2996 2998 or a flag name for stream flags or frame flags, respectively. Values are
2997 2999 resolved to integers and then bitwise OR'd together.
2998 3000
2999 3001 ``payload`` represents the raw frame payload. If it begins with
3000 3002 ``cbor:``, the following string is evaluated as Python code and the
3001 3003 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3002 3004 as a Python byte string literal.
3003 3005 """
3004 3006 opts = pycompat.byteskwargs(opts)
3005 3007
3006 3008 if opts['localssh'] and not repo:
3007 3009 raise error.Abort(_('--localssh requires a repository'))
3008 3010
3009 3011 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3010 3012 raise error.Abort(_('invalid value for --peer'),
3011 3013 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3012 3014
3013 3015 if path and opts['localssh']:
3014 3016 raise error.Abort(_('cannot specify --localssh with an explicit '
3015 3017 'path'))
3016 3018
3017 3019 if ui.interactive():
3018 3020 ui.write(_('(waiting for commands on stdin)\n'))
3019 3021
3020 3022 blocks = list(_parsewirelangblocks(ui.fin))
3021 3023
3022 3024 proc = None
3023 3025 stdin = None
3024 3026 stdout = None
3025 3027 stderr = None
3026 3028 opener = None
3027 3029
3028 3030 if opts['localssh']:
3029 3031 # We start the SSH server in its own process so there is process
3030 3032 # separation. This prevents a whole class of potential bugs around
3031 3033 # shared state from interfering with server operation.
3032 3034 args = procutil.hgcmd() + [
3033 3035 '-R', repo.root,
3034 3036 'debugserve', '--sshstdio',
3035 3037 ]
3036 3038 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
3037 3039 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3038 3040 bufsize=0)
3039 3041
3040 3042 stdin = proc.stdin
3041 3043 stdout = proc.stdout
3042 3044 stderr = proc.stderr
3043 3045
3044 3046 # We turn the pipes into observers so we can log I/O.
3045 3047 if ui.verbose or opts['peer'] == 'raw':
3046 3048 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3047 3049 logdata=True)
3048 3050 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3049 3051 logdata=True)
3050 3052 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3051 3053 logdata=True)
3052 3054
3053 3055 # --localssh also implies the peer connection settings.
3054 3056
3055 3057 url = 'ssh://localserver'
3056 3058 autoreadstderr = not opts['noreadstderr']
3057 3059
3058 3060 if opts['peer'] == 'ssh1':
3059 3061 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3060 3062 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3061 3063 None, autoreadstderr=autoreadstderr)
3062 3064 elif opts['peer'] == 'ssh2':
3063 3065 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3064 3066 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3065 3067 None, autoreadstderr=autoreadstderr)
3066 3068 elif opts['peer'] == 'raw':
3067 3069 ui.write(_('using raw connection to peer\n'))
3068 3070 peer = None
3069 3071 else:
3070 3072 ui.write(_('creating ssh peer from handshake results\n'))
3071 3073 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3072 3074 autoreadstderr=autoreadstderr)
3073 3075
3074 3076 elif path:
3075 3077 # We bypass hg.peer() so we can proxy the sockets.
3076 3078 # TODO consider not doing this because we skip
3077 3079 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3078 3080 u = util.url(path)
3079 3081 if u.scheme != 'http':
3080 3082 raise error.Abort(_('only http:// paths are currently supported'))
3081 3083
3082 3084 url, authinfo = u.authinfo()
3083 3085 openerargs = {
3084 3086 r'useragent': b'Mercurial debugwireproto',
3085 3087 }
3086 3088
3087 3089 # Turn pipes/sockets into observers so we can log I/O.
3088 3090 if ui.verbose:
3089 3091 openerargs.update({
3090 3092 r'loggingfh': ui,
3091 3093 r'loggingname': b's',
3092 3094 r'loggingopts': {
3093 3095 r'logdata': True,
3094 3096 r'logdataapis': False,
3095 3097 },
3096 3098 })
3097 3099
3098 3100 if ui.debugflag:
3099 3101 openerargs[r'loggingopts'][r'logdataapis'] = True
3100 3102
3101 3103 # Don't send default headers when in raw mode. This allows us to
3102 3104 # bypass most of the behavior of our URL handling code so we can
3103 3105 # have near complete control over what's sent on the wire.
3104 3106 if opts['peer'] == 'raw':
3105 3107 openerargs[r'sendaccept'] = False
3106 3108
3107 3109 opener = urlmod.opener(ui, authinfo, **openerargs)
3108 3110
3109 3111 if opts['peer'] == 'http2':
3110 3112 ui.write(_('creating http peer for wire protocol version 2\n'))
3111 3113 # We go through makepeer() because we need an API descriptor for
3112 3114 # the peer instance to be useful.
3113 3115 with ui.configoverride({
3114 3116 ('experimental', 'httppeer.advertise-v2'): True}):
3115 3117 if opts['nologhandshake']:
3116 3118 ui.pushbuffer()
3117 3119
3118 3120 peer = httppeer.makepeer(ui, path, opener=opener)
3119 3121
3120 3122 if opts['nologhandshake']:
3121 3123 ui.popbuffer()
3122 3124
3123 3125 if not isinstance(peer, httppeer.httpv2peer):
3124 3126 raise error.Abort(_('could not instantiate HTTP peer for '
3125 3127 'wire protocol version 2'),
3126 3128 hint=_('the server may not have the feature '
3127 3129 'enabled or is not allowing this '
3128 3130 'client version'))
3129 3131
3130 3132 elif opts['peer'] == 'raw':
3131 3133 ui.write(_('using raw connection to peer\n'))
3132 3134 peer = None
3133 3135 elif opts['peer']:
3134 3136 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3135 3137 opts['peer'])
3136 3138 else:
3137 3139 peer = httppeer.makepeer(ui, path, opener=opener)
3138 3140
3139 3141 # We /could/ populate stdin/stdout with sock.makefile()...
3140 3142 else:
3141 3143 raise error.Abort(_('unsupported connection configuration'))
3142 3144
3143 3145 batchedcommands = None
3144 3146
3145 3147 # Now perform actions based on the parsed wire language instructions.
3146 3148 for action, lines in blocks:
3147 3149 if action in ('raw', 'raw+'):
3148 3150 if not stdin:
3149 3151 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3150 3152
3151 3153 # Concatenate the data together.
3152 3154 data = ''.join(l.lstrip() for l in lines)
3153 3155 data = stringutil.unescapestr(data)
3154 3156 stdin.write(data)
3155 3157
3156 3158 if action == 'raw+':
3157 3159 stdin.flush()
3158 3160 elif action == 'flush':
3159 3161 if not stdin:
3160 3162 raise error.Abort(_('cannot call flush on this peer'))
3161 3163 stdin.flush()
3162 3164 elif action.startswith('command'):
3163 3165 if not peer:
3164 3166 raise error.Abort(_('cannot send commands unless peer instance '
3165 3167 'is available'))
3166 3168
3167 3169 command = action.split(' ', 1)[1]
3168 3170
3169 3171 args = {}
3170 3172 for line in lines:
3171 3173 # We need to allow empty values.
3172 3174 fields = line.lstrip().split(' ', 1)
3173 3175 if len(fields) == 1:
3174 3176 key = fields[0]
3175 3177 value = ''
3176 3178 else:
3177 3179 key, value = fields
3178 3180
3179 3181 if value.startswith('eval:'):
3180 3182 value = stringutil.evalpythonliteral(value[5:])
3181 3183 else:
3182 3184 value = stringutil.unescapestr(value)
3183 3185
3184 3186 args[key] = value
3185 3187
3186 3188 if batchedcommands is not None:
3187 3189 batchedcommands.append((command, args))
3188 3190 continue
3189 3191
3190 3192 ui.status(_('sending %s command\n') % command)
3191 3193
3192 3194 if 'PUSHFILE' in args:
3193 3195 with open(args['PUSHFILE'], r'rb') as fh:
3194 3196 del args['PUSHFILE']
3195 3197 res, output = peer._callpush(command, fh,
3196 3198 **pycompat.strkwargs(args))
3197 3199 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3198 3200 ui.status(_('remote output: %s\n') %
3199 3201 stringutil.escapestr(output))
3200 3202 else:
3201 3203 with peer.commandexecutor() as e:
3202 3204 res = e.callcommand(command, args).result()
3203 3205
3204 3206 if isinstance(res, wireprotov2peer.commandresponse):
3205 3207 val = list(res.cborobjects())
3206 3208 ui.status(_('response: %s\n') %
3207 3209 stringutil.pprint(val, bprefix=True))
3208 3210
3209 3211 else:
3210 3212 ui.status(_('response: %s\n') %
3211 3213 stringutil.pprint(res, bprefix=True))
3212 3214
3213 3215 elif action == 'batchbegin':
3214 3216 if batchedcommands is not None:
3215 3217 raise error.Abort(_('nested batchbegin not allowed'))
3216 3218
3217 3219 batchedcommands = []
3218 3220 elif action == 'batchsubmit':
3219 3221 # There is a batching API we could go through. But it would be
3220 3222 # difficult to normalize requests into function calls. It is easier
3221 3223 # to bypass this layer and normalize to commands + args.
3222 3224 ui.status(_('sending batch with %d sub-commands\n') %
3223 3225 len(batchedcommands))
3224 3226 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3225 3227 ui.status(_('response #%d: %s\n') %
3226 3228 (i, stringutil.escapestr(chunk)))
3227 3229
3228 3230 batchedcommands = None
3229 3231
3230 3232 elif action.startswith('httprequest '):
3231 3233 if not opener:
3232 3234 raise error.Abort(_('cannot use httprequest without an HTTP '
3233 3235 'peer'))
3234 3236
3235 3237 request = action.split(' ', 2)
3236 3238 if len(request) != 3:
3237 3239 raise error.Abort(_('invalid httprequest: expected format is '
3238 3240 '"httprequest <method> <path>'))
3239 3241
3240 3242 method, httppath = request[1:]
3241 3243 headers = {}
3242 3244 body = None
3243 3245 frames = []
3244 3246 for line in lines:
3245 3247 line = line.lstrip()
3246 3248 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3247 3249 if m:
3248 3250 headers[m.group(1)] = m.group(2)
3249 3251 continue
3250 3252
3251 3253 if line.startswith(b'BODYFILE '):
3252 3254 with open(line.split(b' ', 1), 'rb') as fh:
3253 3255 body = fh.read()
3254 3256 elif line.startswith(b'frame '):
3255 3257 frame = wireprotoframing.makeframefromhumanstring(
3256 3258 line[len(b'frame '):])
3257 3259
3258 3260 frames.append(frame)
3259 3261 else:
3260 3262 raise error.Abort(_('unknown argument to httprequest: %s') %
3261 3263 line)
3262 3264
3263 3265 url = path + httppath
3264 3266
3265 3267 if frames:
3266 3268 body = b''.join(bytes(f) for f in frames)
3267 3269
3268 3270 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3269 3271
3270 3272 # urllib.Request insists on using has_data() as a proxy for
3271 3273 # determining the request method. Override that to use our
3272 3274 # explicitly requested method.
3273 3275 req.get_method = lambda: pycompat.sysstr(method)
3274 3276
3275 3277 try:
3276 3278 res = opener.open(req)
3277 3279 body = res.read()
3278 3280 except util.urlerr.urlerror as e:
3279 3281 # read() method must be called, but only exists in Python 2
3280 3282 getattr(e, 'read', lambda: None)()
3281 3283 continue
3282 3284
3283 3285 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3284 3286 ui.write(_('cbor> %s\n') %
3285 3287 stringutil.pprint(cbor.loads(body), bprefix=True))
3286 3288
3287 3289 elif action == 'close':
3288 3290 peer.close()
3289 3291 elif action == 'readavailable':
3290 3292 if not stdout or not stderr:
3291 3293 raise error.Abort(_('readavailable not available on this peer'))
3292 3294
3293 3295 stdin.close()
3294 3296 stdout.read()
3295 3297 stderr.read()
3296 3298
3297 3299 elif action == 'readline':
3298 3300 if not stdout:
3299 3301 raise error.Abort(_('readline not available on this peer'))
3300 3302 stdout.readline()
3301 3303 elif action == 'ereadline':
3302 3304 if not stderr:
3303 3305 raise error.Abort(_('ereadline not available on this peer'))
3304 3306 stderr.readline()
3305 3307 elif action.startswith('read '):
3306 3308 count = int(action.split(' ', 1)[1])
3307 3309 if not stdout:
3308 3310 raise error.Abort(_('read not available on this peer'))
3309 3311 stdout.read(count)
3310 3312 elif action.startswith('eread '):
3311 3313 count = int(action.split(' ', 1)[1])
3312 3314 if not stderr:
3313 3315 raise error.Abort(_('eread not available on this peer'))
3314 3316 stderr.read(count)
3315 3317 else:
3316 3318 raise error.Abort(_('unknown action: %s') % action)
3317 3319
3318 3320 if batchedcommands is not None:
3319 3321 raise error.Abort(_('unclosed "batchbegin" request'))
3320 3322
3321 3323 if peer:
3322 3324 peer.close()
3323 3325
3324 3326 if proc:
3325 3327 proc.kill()
@@ -1,271 +1,274 b''
1 1 # setdiscovery.py - improved discovery of common nodeset for mercurial
2 2 #
3 3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8 """
9 9 Algorithm works in the following way. You have two repository: local and
10 10 remote. They both contains a DAG of changelists.
11 11
12 12 The goal of the discovery protocol is to find one set of node *common*,
13 13 the set of nodes shared by local and remote.
14 14
15 15 One of the issue with the original protocol was latency, it could
16 16 potentially require lots of roundtrips to discover that the local repo was a
17 17 subset of remote (which is a very common case, you usually have few changes
18 18 compared to upstream, while upstream probably had lots of development).
19 19
20 20 The new protocol only requires one interface for the remote repo: `known()`,
21 21 which given a set of changelists tells you if they are present in the DAG.
22 22
23 23 The algorithm then works as follow:
24 24
25 25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
26 26 all nodes are in `unknown`.
27 27 - Take a sample from `unknown`, call `remote.known(sample)`
28 28 - For each node that remote knows, move it and all its ancestors to `common`
29 29 - For each node that remote doesn't know, move it and all its descendants
30 30 to `missing`
31 31 - Iterate until `unknown` is empty
32 32
33 33 There are a couple optimizations, first is instead of starting with a random
34 34 sample of missing, start by sending all heads, in the case where the local
35 35 repo is a subset, you computed the answer in one round trip.
36 36
37 37 Then you can do something similar to the bisecting strategy used when
38 38 finding faulty changesets. Instead of random samples, you can try picking
39 39 nodes that will maximize the number of nodes that will be
40 40 classified with it (since all ancestors or descendants will be marked as well).
41 41 """
42 42
43 43 from __future__ import absolute_import
44 44
45 45 import collections
46 46 import random
47 47
48 48 from .i18n import _
49 49 from .node import (
50 50 nullid,
51 51 nullrev,
52 52 )
53 53 from . import (
54 54 dagutil,
55 55 error,
56 56 util,
57 57 )
58 58
59 59 def _updatesample(dag, nodes, sample, quicksamplesize=0):
60 60 """update an existing sample to match the expected size
61 61
62 62 The sample is updated with nodes exponentially distant from each head of the
63 63 <nodes> set. (H~1, H~2, H~4, H~8, etc).
64 64
65 65 If a target size is specified, the sampling will stop once this size is
66 66 reached. Otherwise sampling will happen until roots of the <nodes> set are
67 67 reached.
68 68
69 69 :dag: a dag object from dagutil
70 70 :nodes: set of nodes we want to discover (if None, assume the whole dag)
71 71 :sample: a sample to update
72 72 :quicksamplesize: optional target size of the sample"""
73 73 # if nodes is empty we scan the entire graph
74 74 if nodes:
75 75 heads = dag.headsetofconnecteds(nodes)
76 76 else:
77 77 heads = dag.heads()
78 78 dist = {}
79 79 visit = collections.deque(heads)
80 80 seen = set()
81 81 factor = 1
82 82 while visit:
83 83 curr = visit.popleft()
84 84 if curr in seen:
85 85 continue
86 86 d = dist.setdefault(curr, 1)
87 87 if d > factor:
88 88 factor *= 2
89 89 if d == factor:
90 90 sample.add(curr)
91 91 if quicksamplesize and (len(sample) >= quicksamplesize):
92 92 return
93 93 seen.add(curr)
94 94 for p in dag.parents(curr):
95 95 if not nodes or p in nodes:
96 96 dist.setdefault(p, d + 1)
97 97 visit.append(p)
98 98
99 99 def _takequicksample(dag, nodes, size):
100 100 """takes a quick sample of size <size>
101 101
102 102 It is meant for initial sampling and focuses on querying heads and close
103 103 ancestors of heads.
104 104
105 105 :dag: a dag object
106 106 :nodes: set of nodes to discover
107 107 :size: the maximum size of the sample"""
108 108 sample = dag.headsetofconnecteds(nodes)
109 109 if len(sample) >= size:
110 110 return _limitsample(sample, size)
111 111 _updatesample(dag, None, sample, quicksamplesize=size)
112 112 return sample
113 113
114 114 def _takefullsample(dag, nodes, size):
115 115 sample = dag.headsetofconnecteds(nodes)
116 116 # update from heads
117 117 _updatesample(dag, nodes, sample)
118 118 # update from roots
119 119 _updatesample(dag.inverse(), nodes, sample)
120 120 assert sample
121 121 sample = _limitsample(sample, size)
122 122 if len(sample) < size:
123 123 more = size - len(sample)
124 124 sample.update(random.sample(list(nodes - sample), more))
125 125 return sample
126 126
127 127 def _limitsample(sample, desiredlen):
128 128 """return a random subset of sample of at most desiredlen item"""
129 129 if len(sample) > desiredlen:
130 130 sample = set(random.sample(sample, desiredlen))
131 131 return sample
132 132
133 133 def findcommonheads(ui, local, remote,
134 134 initialsamplesize=100,
135 135 fullsamplesize=200,
136 136 abortwhenunrelated=True,
137 137 ancestorsof=None):
138 138 '''Return a tuple (common, anyincoming, remoteheads) used to identify
139 139 missing nodes from or in remote.
140 140 '''
141 141 start = util.timer()
142 142
143 143 roundtrips = 0
144 144 cl = local.changelog
145 clnode = cl.node
145 146 localsubset = None
147
146 148 if ancestorsof is not None:
147 149 rev = local.changelog.rev
148 150 localsubset = [rev(n) for n in ancestorsof]
149 151 dag = dagutil.revlogdag(cl, localsubset=localsubset)
150 152
151 153 # early exit if we know all the specified remote heads already
152 154 ui.debug("query 1; heads\n")
153 155 roundtrips += 1
154 156 ownheads = dag.heads()
155 157 sample = _limitsample(ownheads, initialsamplesize)
156 158 # indices between sample and externalized version must match
157 159 sample = list(sample)
158 160
159 161 with remote.commandexecutor() as e:
160 162 fheads = e.callcommand('heads', {})
161 163 fknown = e.callcommand('known', {
162 'nodes': dag.externalizeall(sample),
164 'nodes': [clnode(r) for r in sample],
163 165 })
164 166
165 167 srvheadhashes, yesno = fheads.result(), fknown.result()
166 168
167 169 if cl.tip() == nullid:
168 170 if srvheadhashes != [nullid]:
169 171 return [nullid], True, srvheadhashes
170 172 return [nullid], False, []
171 173
172 174 # start actual discovery (we note this before the next "if" for
173 175 # compatibility reasons)
174 176 ui.status(_("searching for changes\n"))
175 177
176 178 srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
177 179 if len(srvheads) == len(srvheadhashes):
178 180 ui.debug("all remote heads known locally\n")
179 return (srvheadhashes, False, srvheadhashes,)
181 return srvheadhashes, False, srvheadhashes
180 182
181 183 if len(sample) == len(ownheads) and all(yesno):
182 184 ui.note(_("all local heads known remotely\n"))
183 ownheadhashes = dag.externalizeall(ownheads)
184 return (ownheadhashes, True, srvheadhashes,)
185 ownheadhashes = [clnode(r) for r in ownheads]
186 return ownheadhashes, True, srvheadhashes
185 187
186 188 # full blown discovery
187 189
188 190 # own nodes I know we both know
189 191 # treat remote heads (and maybe own heads) as a first implicit sample
190 192 # response
191 193 common = cl.incrementalmissingrevs(srvheads)
192 194 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
193 195 common.addbases(commoninsample)
194 196 # own nodes where I don't know if remote knows them
195 197 undecided = set(common.missingancestors(ownheads))
196 198 # own nodes I know remote lacks
197 199 missing = set()
198 200
199 201 full = False
200 202 progress = ui.makeprogress(_('searching'), unit=_('queries'))
201 203 while undecided:
202 204
203 205 if sample:
204 206 missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
205 207 missing.update(dag.descendantset(missinginsample, missing))
206 208
207 209 undecided.difference_update(missing)
208 210
209 211 if not undecided:
210 212 break
211 213
212 214 if full or common.hasbases():
213 215 if full:
214 216 ui.note(_("sampling from both directions\n"))
215 217 else:
216 218 ui.debug("taking initial sample\n")
217 219 samplefunc = _takefullsample
218 220 targetsize = fullsamplesize
219 221 else:
220 222 # use even cheaper initial sample
221 223 ui.debug("taking quick initial sample\n")
222 224 samplefunc = _takequicksample
223 225 targetsize = initialsamplesize
224 226 if len(undecided) < targetsize:
225 227 sample = list(undecided)
226 228 else:
227 229 sample = samplefunc(dag, undecided, targetsize)
228 230
229 231 roundtrips += 1
230 232 progress.update(roundtrips)
231 233 ui.debug("query %i; still undecided: %i, sample size is: %i\n"
232 234 % (roundtrips, len(undecided), len(sample)))
233 235 # indices between sample and externalized version must match
234 236 sample = list(sample)
235 237
236 238 with remote.commandexecutor() as e:
237 239 yesno = e.callcommand('known', {
238 'nodes': dag.externalizeall(sample),
240 'nodes': [clnode(r) for r in sample],
239 241 }).result()
240 242
241 243 full = True
242 244
243 245 if sample:
244 246 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
245 247 common.addbases(commoninsample)
246 248 common.removeancestorsfrom(undecided)
247 249
248 250 # heads(common) == heads(common.bases) since common represents common.bases
249 251 # and all its ancestors
250 252 result = dag.headsetofconnecteds(common.bases)
251 253 # common.bases can include nullrev, but our contract requires us to not
252 254 # return any heads in that case, so discard that
253 255 result.discard(nullrev)
254 256 elapsed = util.timer() - start
255 257 progress.complete()
256 258 ui.debug("%d total queries in %.4fs\n" % (roundtrips, elapsed))
257 259 msg = ('found %d common and %d unknown server heads,'
258 260 ' %d roundtrips in %.4fs\n')
259 261 missing = set(result) - set(srvheads)
260 262 ui.log('discovery', msg, len(result), len(missing), roundtrips,
261 263 elapsed)
262 264
263 265 if not result and srvheadhashes != [nullid]:
264 266 if abortwhenunrelated:
265 267 raise error.Abort(_("repository is unrelated"))
266 268 else:
267 269 ui.warn(_("warning: repository is unrelated\n"))
268 270 return ({nullid}, True, srvheadhashes,)
269 271
270 272 anyincoming = (srvheadhashes != [nullid])
271 return dag.externalizeall(result), anyincoming, srvheadhashes
273 result = {clnode(r) for r in result}
274 return result, anyincoming, srvheadhashes
General Comments 0
You need to be logged in to leave comments. Login now