##// END OF EJS Templates
debugdiscovery: include the number of heads in all sets...
marmoute -
r42321:607a0de9 default
parent child Browse files
Show More
@@ -1,3467 +1,3469 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from . import (
36 36 bundle2,
37 37 changegroup,
38 38 cmdutil,
39 39 color,
40 40 context,
41 41 copies,
42 42 dagparser,
43 43 encoding,
44 44 error,
45 45 exchange,
46 46 extensions,
47 47 filemerge,
48 48 filesetlang,
49 49 formatter,
50 50 hg,
51 51 httppeer,
52 52 localrepo,
53 53 lock as lockmod,
54 54 logcmdutil,
55 55 merge as mergemod,
56 56 obsolete,
57 57 obsutil,
58 58 phases,
59 59 policy,
60 60 pvec,
61 61 pycompat,
62 62 registrar,
63 63 repair,
64 64 revlog,
65 65 revset,
66 66 revsetlang,
67 67 scmutil,
68 68 setdiscovery,
69 69 simplemerge,
70 70 sshpeer,
71 71 sslutil,
72 72 streamclone,
73 73 templater,
74 74 treediscovery,
75 75 upgrade,
76 76 url as urlmod,
77 77 util,
78 78 vfs as vfsmod,
79 79 wireprotoframing,
80 80 wireprotoserver,
81 81 wireprotov2peer,
82 82 )
83 83 from .utils import (
84 84 cborutil,
85 85 compression,
86 86 dateutil,
87 87 procutil,
88 88 stringutil,
89 89 )
90 90
91 91 from .revlogutils import (
92 92 deltas as deltautil
93 93 )
94 94
95 95 release = lockmod.release
96 96
97 97 command = registrar.command()
98 98
99 99 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
100 100 def debugancestor(ui, repo, *args):
101 101 """find the ancestor revision of two revisions in a given index"""
102 102 if len(args) == 3:
103 103 index, rev1, rev2 = args
104 104 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
105 105 lookup = r.lookup
106 106 elif len(args) == 2:
107 107 if not repo:
108 108 raise error.Abort(_('there is no Mercurial repository here '
109 109 '(.hg not found)'))
110 110 rev1, rev2 = args
111 111 r = repo.changelog
112 112 lookup = repo.lookup
113 113 else:
114 114 raise error.Abort(_('either two or three arguments required'))
115 115 a = r.ancestor(lookup(rev1), lookup(rev2))
116 116 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
117 117
118 118 @command('debugapplystreamclonebundle', [], 'FILE')
119 119 def debugapplystreamclonebundle(ui, repo, fname):
120 120 """apply a stream clone bundle file"""
121 121 f = hg.openpath(ui, fname)
122 122 gen = exchange.readbundle(ui, f, fname)
123 123 gen.apply(repo)
124 124
125 125 @command('debugbuilddag',
126 126 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
127 127 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
128 128 ('n', 'new-file', None, _('add new file at each rev'))],
129 129 _('[OPTION]... [TEXT]'))
130 130 def debugbuilddag(ui, repo, text=None,
131 131 mergeable_file=False,
132 132 overwritten_file=False,
133 133 new_file=False):
134 134 """builds a repo with a given DAG from scratch in the current empty repo
135 135
136 136 The description of the DAG is read from stdin if not given on the
137 137 command line.
138 138
139 139 Elements:
140 140
141 141 - "+n" is a linear run of n nodes based on the current default parent
142 142 - "." is a single node based on the current default parent
143 143 - "$" resets the default parent to null (implied at the start);
144 144 otherwise the default parent is always the last node created
145 145 - "<p" sets the default parent to the backref p
146 146 - "*p" is a fork at parent p, which is a backref
147 147 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
148 148 - "/p2" is a merge of the preceding node and p2
149 149 - ":tag" defines a local tag for the preceding node
150 150 - "@branch" sets the named branch for subsequent nodes
151 151 - "#...\\n" is a comment up to the end of the line
152 152
153 153 Whitespace between the above elements is ignored.
154 154
155 155 A backref is either
156 156
157 157 - a number n, which references the node curr-n, where curr is the current
158 158 node, or
159 159 - the name of a local tag you placed earlier using ":tag", or
160 160 - empty to denote the default parent.
161 161
162 162 All string valued-elements are either strictly alphanumeric, or must
163 163 be enclosed in double quotes ("..."), with "\\" as escape character.
164 164 """
165 165
166 166 if text is None:
167 167 ui.status(_("reading DAG from stdin\n"))
168 168 text = ui.fin.read()
169 169
170 170 cl = repo.changelog
171 171 if len(cl) > 0:
172 172 raise error.Abort(_('repository is not empty'))
173 173
174 174 # determine number of revs in DAG
175 175 total = 0
176 176 for type, data in dagparser.parsedag(text):
177 177 if type == 'n':
178 178 total += 1
179 179
180 180 if mergeable_file:
181 181 linesperrev = 2
182 182 # make a file with k lines per rev
183 183 initialmergedlines = ['%d' % i
184 184 for i in pycompat.xrange(0, total * linesperrev)]
185 185 initialmergedlines.append("")
186 186
187 187 tags = []
188 188 progress = ui.makeprogress(_('building'), unit=_('revisions'),
189 189 total=total)
190 190 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
191 191 at = -1
192 192 atbranch = 'default'
193 193 nodeids = []
194 194 id = 0
195 195 progress.update(id)
196 196 for type, data in dagparser.parsedag(text):
197 197 if type == 'n':
198 198 ui.note(('node %s\n' % pycompat.bytestr(data)))
199 199 id, ps = data
200 200
201 201 files = []
202 202 filecontent = {}
203 203
204 204 p2 = None
205 205 if mergeable_file:
206 206 fn = "mf"
207 207 p1 = repo[ps[0]]
208 208 if len(ps) > 1:
209 209 p2 = repo[ps[1]]
210 210 pa = p1.ancestor(p2)
211 211 base, local, other = [x[fn].data() for x in (pa, p1,
212 212 p2)]
213 213 m3 = simplemerge.Merge3Text(base, local, other)
214 214 ml = [l.strip() for l in m3.merge_lines()]
215 215 ml.append("")
216 216 elif at > 0:
217 217 ml = p1[fn].data().split("\n")
218 218 else:
219 219 ml = initialmergedlines
220 220 ml[id * linesperrev] += " r%i" % id
221 221 mergedtext = "\n".join(ml)
222 222 files.append(fn)
223 223 filecontent[fn] = mergedtext
224 224
225 225 if overwritten_file:
226 226 fn = "of"
227 227 files.append(fn)
228 228 filecontent[fn] = "r%i\n" % id
229 229
230 230 if new_file:
231 231 fn = "nf%i" % id
232 232 files.append(fn)
233 233 filecontent[fn] = "r%i\n" % id
234 234 if len(ps) > 1:
235 235 if not p2:
236 236 p2 = repo[ps[1]]
237 237 for fn in p2:
238 238 if fn.startswith("nf"):
239 239 files.append(fn)
240 240 filecontent[fn] = p2[fn].data()
241 241
242 242 def fctxfn(repo, cx, path):
243 243 if path in filecontent:
244 244 return context.memfilectx(repo, cx, path,
245 245 filecontent[path])
246 246 return None
247 247
248 248 if len(ps) == 0 or ps[0] < 0:
249 249 pars = [None, None]
250 250 elif len(ps) == 1:
251 251 pars = [nodeids[ps[0]], None]
252 252 else:
253 253 pars = [nodeids[p] for p in ps]
254 254 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
255 255 date=(id, 0),
256 256 user="debugbuilddag",
257 257 extra={'branch': atbranch})
258 258 nodeid = repo.commitctx(cx)
259 259 nodeids.append(nodeid)
260 260 at = id
261 261 elif type == 'l':
262 262 id, name = data
263 263 ui.note(('tag %s\n' % name))
264 264 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
265 265 elif type == 'a':
266 266 ui.note(('branch %s\n' % data))
267 267 atbranch = data
268 268 progress.update(id)
269 269
270 270 if tags:
271 271 repo.vfs.write("localtags", "".join(tags))
272 272
273 273 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
274 274 indent_string = ' ' * indent
275 275 if all:
276 276 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
277 277 % indent_string)
278 278
279 279 def showchunks(named):
280 280 ui.write("\n%s%s\n" % (indent_string, named))
281 281 for deltadata in gen.deltaiter():
282 282 node, p1, p2, cs, deltabase, delta, flags = deltadata
283 283 ui.write("%s%s %s %s %s %s %d\n" %
284 284 (indent_string, hex(node), hex(p1), hex(p2),
285 285 hex(cs), hex(deltabase), len(delta)))
286 286
287 287 chunkdata = gen.changelogheader()
288 288 showchunks("changelog")
289 289 chunkdata = gen.manifestheader()
290 290 showchunks("manifest")
291 291 for chunkdata in iter(gen.filelogheader, {}):
292 292 fname = chunkdata['filename']
293 293 showchunks(fname)
294 294 else:
295 295 if isinstance(gen, bundle2.unbundle20):
296 296 raise error.Abort(_('use debugbundle2 for this file'))
297 297 chunkdata = gen.changelogheader()
298 298 for deltadata in gen.deltaiter():
299 299 node, p1, p2, cs, deltabase, delta, flags = deltadata
300 300 ui.write("%s%s\n" % (indent_string, hex(node)))
301 301
302 302 def _debugobsmarkers(ui, part, indent=0, **opts):
303 303 """display version and markers contained in 'data'"""
304 304 opts = pycompat.byteskwargs(opts)
305 305 data = part.read()
306 306 indent_string = ' ' * indent
307 307 try:
308 308 version, markers = obsolete._readmarkers(data)
309 309 except error.UnknownVersion as exc:
310 310 msg = "%sunsupported version: %s (%d bytes)\n"
311 311 msg %= indent_string, exc.version, len(data)
312 312 ui.write(msg)
313 313 else:
314 314 msg = "%sversion: %d (%d bytes)\n"
315 315 msg %= indent_string, version, len(data)
316 316 ui.write(msg)
317 317 fm = ui.formatter('debugobsolete', opts)
318 318 for rawmarker in sorted(markers):
319 319 m = obsutil.marker(None, rawmarker)
320 320 fm.startitem()
321 321 fm.plain(indent_string)
322 322 cmdutil.showmarker(fm, m)
323 323 fm.end()
324 324
325 325 def _debugphaseheads(ui, data, indent=0):
326 326 """display version and markers contained in 'data'"""
327 327 indent_string = ' ' * indent
328 328 headsbyphase = phases.binarydecode(data)
329 329 for phase in phases.allphases:
330 330 for head in headsbyphase[phase]:
331 331 ui.write(indent_string)
332 332 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
333 333
334 334 def _quasirepr(thing):
335 335 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
336 336 return '{%s}' % (
337 337 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
338 338 return pycompat.bytestr(repr(thing))
339 339
340 340 def _debugbundle2(ui, gen, all=None, **opts):
341 341 """lists the contents of a bundle2"""
342 342 if not isinstance(gen, bundle2.unbundle20):
343 343 raise error.Abort(_('not a bundle2 file'))
344 344 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
345 345 parttypes = opts.get(r'part_type', [])
346 346 for part in gen.iterparts():
347 347 if parttypes and part.type not in parttypes:
348 348 continue
349 349 msg = '%s -- %s (mandatory: %r)\n'
350 350 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
351 351 if part.type == 'changegroup':
352 352 version = part.params.get('version', '01')
353 353 cg = changegroup.getunbundler(version, part, 'UN')
354 354 if not ui.quiet:
355 355 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
356 356 if part.type == 'obsmarkers':
357 357 if not ui.quiet:
358 358 _debugobsmarkers(ui, part, indent=4, **opts)
359 359 if part.type == 'phase-heads':
360 360 if not ui.quiet:
361 361 _debugphaseheads(ui, part, indent=4)
362 362
363 363 @command('debugbundle',
364 364 [('a', 'all', None, _('show all details')),
365 365 ('', 'part-type', [], _('show only the named part type')),
366 366 ('', 'spec', None, _('print the bundlespec of the bundle'))],
367 367 _('FILE'),
368 368 norepo=True)
369 369 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
370 370 """lists the contents of a bundle"""
371 371 with hg.openpath(ui, bundlepath) as f:
372 372 if spec:
373 373 spec = exchange.getbundlespec(ui, f)
374 374 ui.write('%s\n' % spec)
375 375 return
376 376
377 377 gen = exchange.readbundle(ui, f, bundlepath)
378 378 if isinstance(gen, bundle2.unbundle20):
379 379 return _debugbundle2(ui, gen, all=all, **opts)
380 380 _debugchangegroup(ui, gen, all=all, **opts)
381 381
382 382 @command('debugcapabilities',
383 383 [], _('PATH'),
384 384 norepo=True)
385 385 def debugcapabilities(ui, path, **opts):
386 386 """lists the capabilities of a remote peer"""
387 387 opts = pycompat.byteskwargs(opts)
388 388 peer = hg.peer(ui, opts, path)
389 389 caps = peer.capabilities()
390 390 ui.write(('Main capabilities:\n'))
391 391 for c in sorted(caps):
392 392 ui.write((' %s\n') % c)
393 393 b2caps = bundle2.bundle2caps(peer)
394 394 if b2caps:
395 395 ui.write(('Bundle2 capabilities:\n'))
396 396 for key, values in sorted(b2caps.iteritems()):
397 397 ui.write((' %s\n') % key)
398 398 for v in values:
399 399 ui.write((' %s\n') % v)
400 400
401 401 @command('debugcheckstate', [], '')
402 402 def debugcheckstate(ui, repo):
403 403 """validate the correctness of the current dirstate"""
404 404 parent1, parent2 = repo.dirstate.parents()
405 405 m1 = repo[parent1].manifest()
406 406 m2 = repo[parent2].manifest()
407 407 errors = 0
408 408 for f in repo.dirstate:
409 409 state = repo.dirstate[f]
410 410 if state in "nr" and f not in m1:
411 411 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
412 412 errors += 1
413 413 if state in "a" and f in m1:
414 414 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
415 415 errors += 1
416 416 if state in "m" and f not in m1 and f not in m2:
417 417 ui.warn(_("%s in state %s, but not in either manifest\n") %
418 418 (f, state))
419 419 errors += 1
420 420 for f in m1:
421 421 state = repo.dirstate[f]
422 422 if state not in "nrm":
423 423 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
424 424 errors += 1
425 425 if errors:
426 426 error = _(".hg/dirstate inconsistent with current parent's manifest")
427 427 raise error.Abort(error)
428 428
429 429 @command('debugcolor',
430 430 [('', 'style', None, _('show all configured styles'))],
431 431 'hg debugcolor')
432 432 def debugcolor(ui, repo, **opts):
433 433 """show available color, effects or style"""
434 434 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
435 435 if opts.get(r'style'):
436 436 return _debugdisplaystyle(ui)
437 437 else:
438 438 return _debugdisplaycolor(ui)
439 439
440 440 def _debugdisplaycolor(ui):
441 441 ui = ui.copy()
442 442 ui._styles.clear()
443 443 for effect in color._activeeffects(ui).keys():
444 444 ui._styles[effect] = effect
445 445 if ui._terminfoparams:
446 446 for k, v in ui.configitems('color'):
447 447 if k.startswith('color.'):
448 448 ui._styles[k] = k[6:]
449 449 elif k.startswith('terminfo.'):
450 450 ui._styles[k] = k[9:]
451 451 ui.write(_('available colors:\n'))
452 452 # sort label with a '_' after the other to group '_background' entry.
453 453 items = sorted(ui._styles.items(),
454 454 key=lambda i: ('_' in i[0], i[0], i[1]))
455 455 for colorname, label in items:
456 456 ui.write(('%s\n') % colorname, label=label)
457 457
458 458 def _debugdisplaystyle(ui):
459 459 ui.write(_('available style:\n'))
460 460 if not ui._styles:
461 461 return
462 462 width = max(len(s) for s in ui._styles)
463 463 for label, effects in sorted(ui._styles.items()):
464 464 ui.write('%s' % label, label=label)
465 465 if effects:
466 466 # 50
467 467 ui.write(': ')
468 468 ui.write(' ' * (max(0, width - len(label))))
469 469 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
470 470 ui.write('\n')
471 471
472 472 @command('debugcreatestreamclonebundle', [], 'FILE')
473 473 def debugcreatestreamclonebundle(ui, repo, fname):
474 474 """create a stream clone bundle file
475 475
476 476 Stream bundles are special bundles that are essentially archives of
477 477 revlog files. They are commonly used for cloning very quickly.
478 478 """
479 479 # TODO we may want to turn this into an abort when this functionality
480 480 # is moved into `hg bundle`.
481 481 if phases.hassecret(repo):
482 482 ui.warn(_('(warning: stream clone bundle will contain secret '
483 483 'revisions)\n'))
484 484
485 485 requirements, gen = streamclone.generatebundlev1(repo)
486 486 changegroup.writechunks(ui, gen, fname)
487 487
488 488 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
489 489
490 490 @command('debugdag',
491 491 [('t', 'tags', None, _('use tags as labels')),
492 492 ('b', 'branches', None, _('annotate with branch names')),
493 493 ('', 'dots', None, _('use dots for runs')),
494 494 ('s', 'spaces', None, _('separate elements by spaces'))],
495 495 _('[OPTION]... [FILE [REV]...]'),
496 496 optionalrepo=True)
497 497 def debugdag(ui, repo, file_=None, *revs, **opts):
498 498 """format the changelog or an index DAG as a concise textual description
499 499
500 500 If you pass a revlog index, the revlog's DAG is emitted. If you list
501 501 revision numbers, they get labeled in the output as rN.
502 502
503 503 Otherwise, the changelog DAG of the current repo is emitted.
504 504 """
505 505 spaces = opts.get(r'spaces')
506 506 dots = opts.get(r'dots')
507 507 if file_:
508 508 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
509 509 file_)
510 510 revs = set((int(r) for r in revs))
511 511 def events():
512 512 for r in rlog:
513 513 yield 'n', (r, list(p for p in rlog.parentrevs(r)
514 514 if p != -1))
515 515 if r in revs:
516 516 yield 'l', (r, "r%i" % r)
517 517 elif repo:
518 518 cl = repo.changelog
519 519 tags = opts.get(r'tags')
520 520 branches = opts.get(r'branches')
521 521 if tags:
522 522 labels = {}
523 523 for l, n in repo.tags().items():
524 524 labels.setdefault(cl.rev(n), []).append(l)
525 525 def events():
526 526 b = "default"
527 527 for r in cl:
528 528 if branches:
529 529 newb = cl.read(cl.node(r))[5]['branch']
530 530 if newb != b:
531 531 yield 'a', newb
532 532 b = newb
533 533 yield 'n', (r, list(p for p in cl.parentrevs(r)
534 534 if p != -1))
535 535 if tags:
536 536 ls = labels.get(r)
537 537 if ls:
538 538 for l in ls:
539 539 yield 'l', (r, l)
540 540 else:
541 541 raise error.Abort(_('need repo for changelog dag'))
542 542
543 543 for line in dagparser.dagtextlines(events(),
544 544 addspaces=spaces,
545 545 wraplabels=True,
546 546 wrapannotations=True,
547 547 wrapnonlinear=dots,
548 548 usedots=dots,
549 549 maxlinewidth=70):
550 550 ui.write(line)
551 551 ui.write("\n")
552 552
553 553 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
554 554 def debugdata(ui, repo, file_, rev=None, **opts):
555 555 """dump the contents of a data file revision"""
556 556 opts = pycompat.byteskwargs(opts)
557 557 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
558 558 if rev is not None:
559 559 raise error.CommandError('debugdata', _('invalid arguments'))
560 560 file_, rev = None, file_
561 561 elif rev is None:
562 562 raise error.CommandError('debugdata', _('invalid arguments'))
563 563 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
564 564 try:
565 565 ui.write(r.revision(r.lookup(rev), raw=True))
566 566 except KeyError:
567 567 raise error.Abort(_('invalid revision identifier %s') % rev)
568 568
569 569 @command('debugdate',
570 570 [('e', 'extended', None, _('try extended date formats'))],
571 571 _('[-e] DATE [RANGE]'),
572 572 norepo=True, optionalrepo=True)
573 573 def debugdate(ui, date, range=None, **opts):
574 574 """parse and display a date"""
575 575 if opts[r"extended"]:
576 576 d = dateutil.parsedate(date, util.extendeddateformats)
577 577 else:
578 578 d = dateutil.parsedate(date)
579 579 ui.write(("internal: %d %d\n") % d)
580 580 ui.write(("standard: %s\n") % dateutil.datestr(d))
581 581 if range:
582 582 m = dateutil.matchdate(range)
583 583 ui.write(("match: %s\n") % m(d[0]))
584 584
585 585 @command('debugdeltachain',
586 586 cmdutil.debugrevlogopts + cmdutil.formatteropts,
587 587 _('-c|-m|FILE'),
588 588 optionalrepo=True)
589 589 def debugdeltachain(ui, repo, file_=None, **opts):
590 590 """dump information about delta chains in a revlog
591 591
592 592 Output can be templatized. Available template keywords are:
593 593
594 594 :``rev``: revision number
595 595 :``chainid``: delta chain identifier (numbered by unique base)
596 596 :``chainlen``: delta chain length to this revision
597 597 :``prevrev``: previous revision in delta chain
598 598 :``deltatype``: role of delta / how it was computed
599 599 :``compsize``: compressed size of revision
600 600 :``uncompsize``: uncompressed size of revision
601 601 :``chainsize``: total size of compressed revisions in chain
602 602 :``chainratio``: total chain size divided by uncompressed revision size
603 603 (new delta chains typically start at ratio 2.00)
604 604 :``lindist``: linear distance from base revision in delta chain to end
605 605 of this revision
606 606 :``extradist``: total size of revisions not part of this delta chain from
607 607 base of delta chain to end of this revision; a measurement
608 608 of how much extra data we need to read/seek across to read
609 609 the delta chain for this revision
610 610 :``extraratio``: extradist divided by chainsize; another representation of
611 611 how much unrelated data is needed to load this delta chain
612 612
613 613 If the repository is configured to use the sparse read, additional keywords
614 614 are available:
615 615
616 616 :``readsize``: total size of data read from the disk for a revision
617 617 (sum of the sizes of all the blocks)
618 618 :``largestblock``: size of the largest block of data read from the disk
619 619 :``readdensity``: density of useful bytes in the data read from the disk
620 620 :``srchunks``: in how many data hunks the whole revision would be read
621 621
622 622 The sparse read can be enabled with experimental.sparse-read = True
623 623 """
624 624 opts = pycompat.byteskwargs(opts)
625 625 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
626 626 index = r.index
627 627 start = r.start
628 628 length = r.length
629 629 generaldelta = r.version & revlog.FLAG_GENERALDELTA
630 630 withsparseread = getattr(r, '_withsparseread', False)
631 631
632 632 def revinfo(rev):
633 633 e = index[rev]
634 634 compsize = e[1]
635 635 uncompsize = e[2]
636 636 chainsize = 0
637 637
638 638 if generaldelta:
639 639 if e[3] == e[5]:
640 640 deltatype = 'p1'
641 641 elif e[3] == e[6]:
642 642 deltatype = 'p2'
643 643 elif e[3] == rev - 1:
644 644 deltatype = 'prev'
645 645 elif e[3] == rev:
646 646 deltatype = 'base'
647 647 else:
648 648 deltatype = 'other'
649 649 else:
650 650 if e[3] == rev:
651 651 deltatype = 'base'
652 652 else:
653 653 deltatype = 'prev'
654 654
655 655 chain = r._deltachain(rev)[0]
656 656 for iterrev in chain:
657 657 e = index[iterrev]
658 658 chainsize += e[1]
659 659
660 660 return compsize, uncompsize, deltatype, chain, chainsize
661 661
662 662 fm = ui.formatter('debugdeltachain', opts)
663 663
664 664 fm.plain(' rev chain# chainlen prev delta '
665 665 'size rawsize chainsize ratio lindist extradist '
666 666 'extraratio')
667 667 if withsparseread:
668 668 fm.plain(' readsize largestblk rddensity srchunks')
669 669 fm.plain('\n')
670 670
671 671 chainbases = {}
672 672 for rev in r:
673 673 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
674 674 chainbase = chain[0]
675 675 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
676 676 basestart = start(chainbase)
677 677 revstart = start(rev)
678 678 lineardist = revstart + comp - basestart
679 679 extradist = lineardist - chainsize
680 680 try:
681 681 prevrev = chain[-2]
682 682 except IndexError:
683 683 prevrev = -1
684 684
685 685 if uncomp != 0:
686 686 chainratio = float(chainsize) / float(uncomp)
687 687 else:
688 688 chainratio = chainsize
689 689
690 690 if chainsize != 0:
691 691 extraratio = float(extradist) / float(chainsize)
692 692 else:
693 693 extraratio = extradist
694 694
695 695 fm.startitem()
696 696 fm.write('rev chainid chainlen prevrev deltatype compsize '
697 697 'uncompsize chainsize chainratio lindist extradist '
698 698 'extraratio',
699 699 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
700 700 rev, chainid, len(chain), prevrev, deltatype, comp,
701 701 uncomp, chainsize, chainratio, lineardist, extradist,
702 702 extraratio,
703 703 rev=rev, chainid=chainid, chainlen=len(chain),
704 704 prevrev=prevrev, deltatype=deltatype, compsize=comp,
705 705 uncompsize=uncomp, chainsize=chainsize,
706 706 chainratio=chainratio, lindist=lineardist,
707 707 extradist=extradist, extraratio=extraratio)
708 708 if withsparseread:
709 709 readsize = 0
710 710 largestblock = 0
711 711 srchunks = 0
712 712
713 713 for revschunk in deltautil.slicechunk(r, chain):
714 714 srchunks += 1
715 715 blkend = start(revschunk[-1]) + length(revschunk[-1])
716 716 blksize = blkend - start(revschunk[0])
717 717
718 718 readsize += blksize
719 719 if largestblock < blksize:
720 720 largestblock = blksize
721 721
722 722 if readsize:
723 723 readdensity = float(chainsize) / float(readsize)
724 724 else:
725 725 readdensity = 1
726 726
727 727 fm.write('readsize largestblock readdensity srchunks',
728 728 ' %10d %10d %9.5f %8d',
729 729 readsize, largestblock, readdensity, srchunks,
730 730 readsize=readsize, largestblock=largestblock,
731 731 readdensity=readdensity, srchunks=srchunks)
732 732
733 733 fm.plain('\n')
734 734
735 735 fm.end()
736 736
737 737 @command('debugdirstate|debugstate',
738 738 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
739 739 ('', 'dates', True, _('display the saved mtime')),
740 740 ('', 'datesort', None, _('sort by saved mtime'))],
741 741 _('[OPTION]...'))
742 742 def debugstate(ui, repo, **opts):
743 743 """show the contents of the current dirstate"""
744 744
745 745 nodates = not opts[r'dates']
746 746 if opts.get(r'nodates') is not None:
747 747 nodates = True
748 748 datesort = opts.get(r'datesort')
749 749
750 750 if datesort:
751 751 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
752 752 else:
753 753 keyfunc = None # sort by filename
754 754 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
755 755 if ent[3] == -1:
756 756 timestr = 'unset '
757 757 elif nodates:
758 758 timestr = 'set '
759 759 else:
760 760 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
761 761 time.localtime(ent[3]))
762 762 timestr = encoding.strtolocal(timestr)
763 763 if ent[1] & 0o20000:
764 764 mode = 'lnk'
765 765 else:
766 766 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
767 767 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
768 768 for f in repo.dirstate.copies():
769 769 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
770 770
771 771 @command('debugdiscovery',
772 772 [('', 'old', None, _('use old-style discovery')),
773 773 ('', 'nonheads', None,
774 774 _('use old-style discovery with non-heads included')),
775 775 ('', 'rev', [], 'restrict discovery to this set of revs'),
776 776 ('', 'seed', '12323', 'specify the random seed use for discovery'),
777 777 ] + cmdutil.remoteopts,
778 778 _('[--rev REV] [OTHER]'))
779 779 def debugdiscovery(ui, repo, remoteurl="default", **opts):
780 780 """runs the changeset discovery protocol in isolation"""
781 781 opts = pycompat.byteskwargs(opts)
782 782 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
783 783 remote = hg.peer(repo, opts, remoteurl)
784 784 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
785 785
786 786 # make sure tests are repeatable
787 787 random.seed(int(opts['seed']))
788 788
789 789
790 790
791 791 if opts.get('old'):
792 792 def doit(pushedrevs, remoteheads, remote=remote):
793 793 if not util.safehasattr(remote, 'branches'):
794 794 # enable in-client legacy support
795 795 remote = localrepo.locallegacypeer(remote.local())
796 796 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
797 797 force=True)
798 798 common = set(common)
799 799 if not opts.get('nonheads'):
800 800 ui.write(("unpruned common: %s\n") %
801 801 " ".join(sorted(short(n) for n in common)))
802 802
803 803 clnode = repo.changelog.node
804 804 common = repo.revs('heads(::%ln)', common)
805 805 common = {clnode(r) for r in common}
806 806 return common, hds
807 807 else:
808 808 def doit(pushedrevs, remoteheads, remote=remote):
809 809 nodes = None
810 810 if pushedrevs:
811 811 revs = scmutil.revrange(repo, pushedrevs)
812 812 nodes = [repo[r].node() for r in revs]
813 813 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
814 814 ancestorsof=nodes)
815 815 return common, hds
816 816
817 817 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
818 818 localrevs = opts['rev']
819 819 with util.timedcm('debug-discovery') as t:
820 820 common, hds = doit(localrevs, remoterevs)
821 821
822 822 # compute all statistics
823 823 common = set(common)
824 824 rheads = set(hds)
825 825 lheads = set(repo.heads())
826 826
827 827 data = {}
828 828 data['elapsed'] = t.elapsed
829 829 data['nb-common'] = len(common)
830 830 data['nb-common-local'] = len(common & lheads)
831 831 data['nb-common-remote'] = len(common & rheads)
832 data['nb-common-both'] = len(common & rheads & lheads)
832 833 data['nb-local'] = len(lheads)
833 834 data['nb-local-missing'] = data['nb-local'] - data['nb-common-local']
834 835 data['nb-remote'] = len(rheads)
835 836 data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote']
836 837 data['nb-revs'] = len(repo.revs('all()'))
837 838 data['nb-revs-common'] = len(repo.revs('::%ln', common))
838 839 data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
839 840
840 841 # display discovery summary
841 842 ui.write(("elapsed time: %(elapsed)f seconds\n") % data)
842 843 ui.write(("heads summary:\n"))
843 844 ui.write((" total common heads: %(nb-common)9d\n") % data)
844 845 ui.write((" also local heads: %(nb-common-local)9d\n") % data)
845 846 ui.write((" also remote heads: %(nb-common-remote)9d\n") % data)
847 ui.write((" both: %(nb-common-both)9d\n") % data)
846 848 ui.write((" local heads: %(nb-local)9d\n") % data)
847 849 ui.write((" common: %(nb-common-local)9d\n") % data)
848 850 ui.write((" missing: %(nb-local-missing)9d\n") % data)
849 851 ui.write((" remote heads: %(nb-remote)9d\n") % data)
850 852 ui.write((" common: %(nb-common-remote)9d\n") % data)
851 853 ui.write((" unknown: %(nb-remote-unknown)9d\n") % data)
852 854 ui.write(("local changesets: %(nb-revs)9d\n") % data)
853 855 ui.write((" common: %(nb-revs-common)9d\n") % data)
854 856 ui.write((" missing: %(nb-revs-missing)9d\n") % data)
855 857
856 858 if ui.verbose:
857 859 ui.write(("common heads: %s\n") %
858 860 " ".join(sorted(short(n) for n in common)))
859 861
860 862 _chunksize = 4 << 10
861 863
862 864 @command('debugdownload',
863 865 [
864 866 ('o', 'output', '', _('path')),
865 867 ],
866 868 optionalrepo=True)
867 869 def debugdownload(ui, repo, url, output=None, **opts):
868 870 """download a resource using Mercurial logic and config
869 871 """
870 872 fh = urlmod.open(ui, url, output)
871 873
872 874 dest = ui
873 875 if output:
874 876 dest = open(output, "wb", _chunksize)
875 877 try:
876 878 data = fh.read(_chunksize)
877 879 while data:
878 880 dest.write(data)
879 881 data = fh.read(_chunksize)
880 882 finally:
881 883 if output:
882 884 dest.close()
883 885
884 886 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
885 887 def debugextensions(ui, repo, **opts):
886 888 '''show information about active extensions'''
887 889 opts = pycompat.byteskwargs(opts)
888 890 exts = extensions.extensions(ui)
889 891 hgver = util.version()
890 892 fm = ui.formatter('debugextensions', opts)
891 893 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
892 894 isinternal = extensions.ismoduleinternal(extmod)
893 895 extsource = pycompat.fsencode(extmod.__file__)
894 896 if isinternal:
895 897 exttestedwith = [] # never expose magic string to users
896 898 else:
897 899 exttestedwith = getattr(extmod, 'testedwith', '').split()
898 900 extbuglink = getattr(extmod, 'buglink', None)
899 901
900 902 fm.startitem()
901 903
902 904 if ui.quiet or ui.verbose:
903 905 fm.write('name', '%s\n', extname)
904 906 else:
905 907 fm.write('name', '%s', extname)
906 908 if isinternal or hgver in exttestedwith:
907 909 fm.plain('\n')
908 910 elif not exttestedwith:
909 911 fm.plain(_(' (untested!)\n'))
910 912 else:
911 913 lasttestedversion = exttestedwith[-1]
912 914 fm.plain(' (%s!)\n' % lasttestedversion)
913 915
914 916 fm.condwrite(ui.verbose and extsource, 'source',
915 917 _(' location: %s\n'), extsource or "")
916 918
917 919 if ui.verbose:
918 920 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
919 921 fm.data(bundled=isinternal)
920 922
921 923 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
922 924 _(' tested with: %s\n'),
923 925 fm.formatlist(exttestedwith, name='ver'))
924 926
925 927 fm.condwrite(ui.verbose and extbuglink, 'buglink',
926 928 _(' bug reporting: %s\n'), extbuglink or "")
927 929
928 930 fm.end()
929 931
930 932 @command('debugfileset',
931 933 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
932 934 ('', 'all-files', False,
933 935 _('test files from all revisions and working directory')),
934 936 ('s', 'show-matcher', None,
935 937 _('print internal representation of matcher')),
936 938 ('p', 'show-stage', [],
937 939 _('print parsed tree at the given stage'), _('NAME'))],
938 940 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
939 941 def debugfileset(ui, repo, expr, **opts):
940 942 '''parse and apply a fileset specification'''
941 943 from . import fileset
942 944 fileset.symbols # force import of fileset so we have predicates to optimize
943 945 opts = pycompat.byteskwargs(opts)
944 946 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
945 947
946 948 stages = [
947 949 ('parsed', pycompat.identity),
948 950 ('analyzed', filesetlang.analyze),
949 951 ('optimized', filesetlang.optimize),
950 952 ]
951 953 stagenames = set(n for n, f in stages)
952 954
953 955 showalways = set()
954 956 if ui.verbose and not opts['show_stage']:
955 957 # show parsed tree by --verbose (deprecated)
956 958 showalways.add('parsed')
957 959 if opts['show_stage'] == ['all']:
958 960 showalways.update(stagenames)
959 961 else:
960 962 for n in opts['show_stage']:
961 963 if n not in stagenames:
962 964 raise error.Abort(_('invalid stage name: %s') % n)
963 965 showalways.update(opts['show_stage'])
964 966
965 967 tree = filesetlang.parse(expr)
966 968 for n, f in stages:
967 969 tree = f(tree)
968 970 if n in showalways:
969 971 if opts['show_stage'] or n != 'parsed':
970 972 ui.write(("* %s:\n") % n)
971 973 ui.write(filesetlang.prettyformat(tree), "\n")
972 974
973 975 files = set()
974 976 if opts['all_files']:
975 977 for r in repo:
976 978 c = repo[r]
977 979 files.update(c.files())
978 980 files.update(c.substate)
979 981 if opts['all_files'] or ctx.rev() is None:
980 982 wctx = repo[None]
981 983 files.update(repo.dirstate.walk(scmutil.matchall(repo),
982 984 subrepos=list(wctx.substate),
983 985 unknown=True, ignored=True))
984 986 files.update(wctx.substate)
985 987 else:
986 988 files.update(ctx.files())
987 989 files.update(ctx.substate)
988 990
989 991 m = ctx.matchfileset(expr)
990 992 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
991 993 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
992 994 for f in sorted(files):
993 995 if not m(f):
994 996 continue
995 997 ui.write("%s\n" % f)
996 998
997 999 @command('debugformat',
998 1000 [] + cmdutil.formatteropts)
999 1001 def debugformat(ui, repo, **opts):
1000 1002 """display format information about the current repository
1001 1003
1002 1004 Use --verbose to get extra information about current config value and
1003 1005 Mercurial default."""
1004 1006 opts = pycompat.byteskwargs(opts)
1005 1007 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1006 1008 maxvariantlength = max(len('format-variant'), maxvariantlength)
1007 1009
1008 1010 def makeformatname(name):
1009 1011 return '%s:' + (' ' * (maxvariantlength - len(name)))
1010 1012
1011 1013 fm = ui.formatter('debugformat', opts)
1012 1014 if fm.isplain():
1013 1015 def formatvalue(value):
1014 1016 if util.safehasattr(value, 'startswith'):
1015 1017 return value
1016 1018 if value:
1017 1019 return 'yes'
1018 1020 else:
1019 1021 return 'no'
1020 1022 else:
1021 1023 formatvalue = pycompat.identity
1022 1024
1023 1025 fm.plain('format-variant')
1024 1026 fm.plain(' ' * (maxvariantlength - len('format-variant')))
1025 1027 fm.plain(' repo')
1026 1028 if ui.verbose:
1027 1029 fm.plain(' config default')
1028 1030 fm.plain('\n')
1029 1031 for fv in upgrade.allformatvariant:
1030 1032 fm.startitem()
1031 1033 repovalue = fv.fromrepo(repo)
1032 1034 configvalue = fv.fromconfig(repo)
1033 1035
1034 1036 if repovalue != configvalue:
1035 1037 namelabel = 'formatvariant.name.mismatchconfig'
1036 1038 repolabel = 'formatvariant.repo.mismatchconfig'
1037 1039 elif repovalue != fv.default:
1038 1040 namelabel = 'formatvariant.name.mismatchdefault'
1039 1041 repolabel = 'formatvariant.repo.mismatchdefault'
1040 1042 else:
1041 1043 namelabel = 'formatvariant.name.uptodate'
1042 1044 repolabel = 'formatvariant.repo.uptodate'
1043 1045
1044 1046 fm.write('name', makeformatname(fv.name), fv.name,
1045 1047 label=namelabel)
1046 1048 fm.write('repo', ' %3s', formatvalue(repovalue),
1047 1049 label=repolabel)
1048 1050 if fv.default != configvalue:
1049 1051 configlabel = 'formatvariant.config.special'
1050 1052 else:
1051 1053 configlabel = 'formatvariant.config.default'
1052 1054 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1053 1055 label=configlabel)
1054 1056 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1055 1057 label='formatvariant.default')
1056 1058 fm.plain('\n')
1057 1059 fm.end()
1058 1060
1059 1061 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1060 1062 def debugfsinfo(ui, path="."):
1061 1063 """show information detected about current filesystem"""
1062 1064 ui.write(('path: %s\n') % path)
1063 1065 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1064 1066 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1065 1067 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1066 1068 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1067 1069 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1068 1070 casesensitive = '(unknown)'
1069 1071 try:
1070 1072 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1071 1073 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1072 1074 except OSError:
1073 1075 pass
1074 1076 ui.write(('case-sensitive: %s\n') % casesensitive)
1075 1077
1076 1078 @command('debuggetbundle',
1077 1079 [('H', 'head', [], _('id of head node'), _('ID')),
1078 1080 ('C', 'common', [], _('id of common node'), _('ID')),
1079 1081 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1080 1082 _('REPO FILE [-H|-C ID]...'),
1081 1083 norepo=True)
1082 1084 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1083 1085 """retrieves a bundle from a repo
1084 1086
1085 1087 Every ID must be a full-length hex node id string. Saves the bundle to the
1086 1088 given file.
1087 1089 """
1088 1090 opts = pycompat.byteskwargs(opts)
1089 1091 repo = hg.peer(ui, opts, repopath)
1090 1092 if not repo.capable('getbundle'):
1091 1093 raise error.Abort("getbundle() not supported by target repository")
1092 1094 args = {}
1093 1095 if common:
1094 1096 args[r'common'] = [bin(s) for s in common]
1095 1097 if head:
1096 1098 args[r'heads'] = [bin(s) for s in head]
1097 1099 # TODO: get desired bundlecaps from command line.
1098 1100 args[r'bundlecaps'] = None
1099 1101 bundle = repo.getbundle('debug', **args)
1100 1102
1101 1103 bundletype = opts.get('type', 'bzip2').lower()
1102 1104 btypes = {'none': 'HG10UN',
1103 1105 'bzip2': 'HG10BZ',
1104 1106 'gzip': 'HG10GZ',
1105 1107 'bundle2': 'HG20'}
1106 1108 bundletype = btypes.get(bundletype)
1107 1109 if bundletype not in bundle2.bundletypes:
1108 1110 raise error.Abort(_('unknown bundle type specified with --type'))
1109 1111 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1110 1112
1111 1113 @command('debugignore', [], '[FILE]')
1112 1114 def debugignore(ui, repo, *files, **opts):
1113 1115 """display the combined ignore pattern and information about ignored files
1114 1116
1115 1117 With no argument display the combined ignore pattern.
1116 1118
1117 1119 Given space separated file names, shows if the given file is ignored and
1118 1120 if so, show the ignore rule (file and line number) that matched it.
1119 1121 """
1120 1122 ignore = repo.dirstate._ignore
1121 1123 if not files:
1122 1124 # Show all the patterns
1123 1125 ui.write("%s\n" % pycompat.byterepr(ignore))
1124 1126 else:
1125 1127 m = scmutil.match(repo[None], pats=files)
1126 1128 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1127 1129 for f in m.files():
1128 1130 nf = util.normpath(f)
1129 1131 ignored = None
1130 1132 ignoredata = None
1131 1133 if nf != '.':
1132 1134 if ignore(nf):
1133 1135 ignored = nf
1134 1136 ignoredata = repo.dirstate._ignorefileandline(nf)
1135 1137 else:
1136 1138 for p in util.finddirs(nf):
1137 1139 if ignore(p):
1138 1140 ignored = p
1139 1141 ignoredata = repo.dirstate._ignorefileandline(p)
1140 1142 break
1141 1143 if ignored:
1142 1144 if ignored == nf:
1143 1145 ui.write(_("%s is ignored\n") % uipathfn(f))
1144 1146 else:
1145 1147 ui.write(_("%s is ignored because of "
1146 1148 "containing directory %s\n")
1147 1149 % (uipathfn(f), ignored))
1148 1150 ignorefile, lineno, line = ignoredata
1149 1151 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1150 1152 % (ignorefile, lineno, line))
1151 1153 else:
1152 1154 ui.write(_("%s is not ignored\n") % uipathfn(f))
1153 1155
1154 1156 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1155 1157 _('-c|-m|FILE'))
1156 1158 def debugindex(ui, repo, file_=None, **opts):
1157 1159 """dump index data for a storage primitive"""
1158 1160 opts = pycompat.byteskwargs(opts)
1159 1161 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1160 1162
1161 1163 if ui.debugflag:
1162 1164 shortfn = hex
1163 1165 else:
1164 1166 shortfn = short
1165 1167
1166 1168 idlen = 12
1167 1169 for i in store:
1168 1170 idlen = len(shortfn(store.node(i)))
1169 1171 break
1170 1172
1171 1173 fm = ui.formatter('debugindex', opts)
1172 1174 fm.plain(b' rev linkrev %s %s p2\n' % (
1173 1175 b'nodeid'.ljust(idlen),
1174 1176 b'p1'.ljust(idlen)))
1175 1177
1176 1178 for rev in store:
1177 1179 node = store.node(rev)
1178 1180 parents = store.parents(node)
1179 1181
1180 1182 fm.startitem()
1181 1183 fm.write(b'rev', b'%6d ', rev)
1182 1184 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1183 1185 fm.write(b'node', '%s ', shortfn(node))
1184 1186 fm.write(b'p1', '%s ', shortfn(parents[0]))
1185 1187 fm.write(b'p2', '%s', shortfn(parents[1]))
1186 1188 fm.plain(b'\n')
1187 1189
1188 1190 fm.end()
1189 1191
1190 1192 @command('debugindexdot', cmdutil.debugrevlogopts,
1191 1193 _('-c|-m|FILE'), optionalrepo=True)
1192 1194 def debugindexdot(ui, repo, file_=None, **opts):
1193 1195 """dump an index DAG as a graphviz dot file"""
1194 1196 opts = pycompat.byteskwargs(opts)
1195 1197 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1196 1198 ui.write(("digraph G {\n"))
1197 1199 for i in r:
1198 1200 node = r.node(i)
1199 1201 pp = r.parents(node)
1200 1202 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1201 1203 if pp[1] != nullid:
1202 1204 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1203 1205 ui.write("}\n")
1204 1206
1205 1207 @command('debugindexstats', [])
1206 1208 def debugindexstats(ui, repo):
1207 1209 """show stats related to the changelog index"""
1208 1210 repo.changelog.shortest(nullid, 1)
1209 1211 index = repo.changelog.index
1210 1212 if not util.safehasattr(index, 'stats'):
1211 1213 raise error.Abort(_('debugindexstats only works with native code'))
1212 1214 for k, v in sorted(index.stats().items()):
1213 1215 ui.write('%s: %d\n' % (k, v))
1214 1216
1215 1217 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1216 1218 def debuginstall(ui, **opts):
1217 1219 '''test Mercurial installation
1218 1220
1219 1221 Returns 0 on success.
1220 1222 '''
1221 1223 opts = pycompat.byteskwargs(opts)
1222 1224
1223 1225 problems = 0
1224 1226
1225 1227 fm = ui.formatter('debuginstall', opts)
1226 1228 fm.startitem()
1227 1229
1228 1230 # encoding
1229 1231 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1230 1232 err = None
1231 1233 try:
1232 1234 codecs.lookup(pycompat.sysstr(encoding.encoding))
1233 1235 except LookupError as inst:
1234 1236 err = stringutil.forcebytestr(inst)
1235 1237 problems += 1
1236 1238 fm.condwrite(err, 'encodingerror', _(" %s\n"
1237 1239 " (check that your locale is properly set)\n"), err)
1238 1240
1239 1241 # Python
1240 1242 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1241 1243 pycompat.sysexecutable)
1242 1244 fm.write('pythonver', _("checking Python version (%s)\n"),
1243 1245 ("%d.%d.%d" % sys.version_info[:3]))
1244 1246 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1245 1247 os.path.dirname(pycompat.fsencode(os.__file__)))
1246 1248
1247 1249 security = set(sslutil.supportedprotocols)
1248 1250 if sslutil.hassni:
1249 1251 security.add('sni')
1250 1252
1251 1253 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1252 1254 fm.formatlist(sorted(security), name='protocol',
1253 1255 fmt='%s', sep=','))
1254 1256
1255 1257 # These are warnings, not errors. So don't increment problem count. This
1256 1258 # may change in the future.
1257 1259 if 'tls1.2' not in security:
1258 1260 fm.plain(_(' TLS 1.2 not supported by Python install; '
1259 1261 'network connections lack modern security\n'))
1260 1262 if 'sni' not in security:
1261 1263 fm.plain(_(' SNI not supported by Python install; may have '
1262 1264 'connectivity issues with some servers\n'))
1263 1265
1264 1266 # TODO print CA cert info
1265 1267
1266 1268 # hg version
1267 1269 hgver = util.version()
1268 1270 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1269 1271 hgver.split('+')[0])
1270 1272 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1271 1273 '+'.join(hgver.split('+')[1:]))
1272 1274
1273 1275 # compiled modules
1274 1276 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1275 1277 policy.policy)
1276 1278 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1277 1279 os.path.dirname(pycompat.fsencode(__file__)))
1278 1280
1279 1281 if policy.policy in ('c', 'allow'):
1280 1282 err = None
1281 1283 try:
1282 1284 from .cext import (
1283 1285 base85,
1284 1286 bdiff,
1285 1287 mpatch,
1286 1288 osutil,
1287 1289 )
1288 1290 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1289 1291 except Exception as inst:
1290 1292 err = stringutil.forcebytestr(inst)
1291 1293 problems += 1
1292 1294 fm.condwrite(err, 'extensionserror', " %s\n", err)
1293 1295
1294 1296 compengines = util.compengines._engines.values()
1295 1297 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1296 1298 fm.formatlist(sorted(e.name() for e in compengines),
1297 1299 name='compengine', fmt='%s', sep=', '))
1298 1300 fm.write('compenginesavail', _('checking available compression engines '
1299 1301 '(%s)\n'),
1300 1302 fm.formatlist(sorted(e.name() for e in compengines
1301 1303 if e.available()),
1302 1304 name='compengine', fmt='%s', sep=', '))
1303 1305 wirecompengines = compression.compengines.supportedwireengines(
1304 1306 compression.SERVERROLE)
1305 1307 fm.write('compenginesserver', _('checking available compression engines '
1306 1308 'for wire protocol (%s)\n'),
1307 1309 fm.formatlist([e.name() for e in wirecompengines
1308 1310 if e.wireprotosupport()],
1309 1311 name='compengine', fmt='%s', sep=', '))
1310 1312 re2 = 'missing'
1311 1313 if util._re2:
1312 1314 re2 = 'available'
1313 1315 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1314 1316 fm.data(re2=bool(util._re2))
1315 1317
1316 1318 # templates
1317 1319 p = templater.templatepaths()
1318 1320 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1319 1321 fm.condwrite(not p, '', _(" no template directories found\n"))
1320 1322 if p:
1321 1323 m = templater.templatepath("map-cmdline.default")
1322 1324 if m:
1323 1325 # template found, check if it is working
1324 1326 err = None
1325 1327 try:
1326 1328 templater.templater.frommapfile(m)
1327 1329 except Exception as inst:
1328 1330 err = stringutil.forcebytestr(inst)
1329 1331 p = None
1330 1332 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1331 1333 else:
1332 1334 p = None
1333 1335 fm.condwrite(p, 'defaulttemplate',
1334 1336 _("checking default template (%s)\n"), m)
1335 1337 fm.condwrite(not m, 'defaulttemplatenotfound',
1336 1338 _(" template '%s' not found\n"), "default")
1337 1339 if not p:
1338 1340 problems += 1
1339 1341 fm.condwrite(not p, '',
1340 1342 _(" (templates seem to have been installed incorrectly)\n"))
1341 1343
1342 1344 # editor
1343 1345 editor = ui.geteditor()
1344 1346 editor = util.expandpath(editor)
1345 1347 editorbin = procutil.shellsplit(editor)[0]
1346 1348 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1347 1349 cmdpath = procutil.findexe(editorbin)
1348 1350 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1349 1351 _(" No commit editor set and can't find %s in PATH\n"
1350 1352 " (specify a commit editor in your configuration"
1351 1353 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1352 1354 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1353 1355 _(" Can't find editor '%s' in PATH\n"
1354 1356 " (specify a commit editor in your configuration"
1355 1357 " file)\n"), not cmdpath and editorbin)
1356 1358 if not cmdpath and editor != 'vi':
1357 1359 problems += 1
1358 1360
1359 1361 # check username
1360 1362 username = None
1361 1363 err = None
1362 1364 try:
1363 1365 username = ui.username()
1364 1366 except error.Abort as e:
1365 1367 err = stringutil.forcebytestr(e)
1366 1368 problems += 1
1367 1369
1368 1370 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1369 1371 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1370 1372 " (specify a username in your configuration file)\n"), err)
1371 1373
1372 1374 fm.condwrite(not problems, '',
1373 1375 _("no problems detected\n"))
1374 1376 if not problems:
1375 1377 fm.data(problems=problems)
1376 1378 fm.condwrite(problems, 'problems',
1377 1379 _("%d problems detected,"
1378 1380 " please check your install!\n"), problems)
1379 1381 fm.end()
1380 1382
1381 1383 return problems
1382 1384
1383 1385 @command('debugknown', [], _('REPO ID...'), norepo=True)
1384 1386 def debugknown(ui, repopath, *ids, **opts):
1385 1387 """test whether node ids are known to a repo
1386 1388
1387 1389 Every ID must be a full-length hex node id string. Returns a list of 0s
1388 1390 and 1s indicating unknown/known.
1389 1391 """
1390 1392 opts = pycompat.byteskwargs(opts)
1391 1393 repo = hg.peer(ui, opts, repopath)
1392 1394 if not repo.capable('known'):
1393 1395 raise error.Abort("known() not supported by target repository")
1394 1396 flags = repo.known([bin(s) for s in ids])
1395 1397 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1396 1398
1397 1399 @command('debuglabelcomplete', [], _('LABEL...'))
1398 1400 def debuglabelcomplete(ui, repo, *args):
1399 1401 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1400 1402 debugnamecomplete(ui, repo, *args)
1401 1403
1402 1404 @command('debuglocks',
1403 1405 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1404 1406 ('W', 'force-wlock', None,
1405 1407 _('free the working state lock (DANGEROUS)')),
1406 1408 ('s', 'set-lock', None, _('set the store lock until stopped')),
1407 1409 ('S', 'set-wlock', None,
1408 1410 _('set the working state lock until stopped'))],
1409 1411 _('[OPTION]...'))
1410 1412 def debuglocks(ui, repo, **opts):
1411 1413 """show or modify state of locks
1412 1414
1413 1415 By default, this command will show which locks are held. This
1414 1416 includes the user and process holding the lock, the amount of time
1415 1417 the lock has been held, and the machine name where the process is
1416 1418 running if it's not local.
1417 1419
1418 1420 Locks protect the integrity of Mercurial's data, so should be
1419 1421 treated with care. System crashes or other interruptions may cause
1420 1422 locks to not be properly released, though Mercurial will usually
1421 1423 detect and remove such stale locks automatically.
1422 1424
1423 1425 However, detecting stale locks may not always be possible (for
1424 1426 instance, on a shared filesystem). Removing locks may also be
1425 1427 blocked by filesystem permissions.
1426 1428
1427 1429 Setting a lock will prevent other commands from changing the data.
1428 1430 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1429 1431 The set locks are removed when the command exits.
1430 1432
1431 1433 Returns 0 if no locks are held.
1432 1434
1433 1435 """
1434 1436
1435 1437 if opts.get(r'force_lock'):
1436 1438 repo.svfs.unlink('lock')
1437 1439 if opts.get(r'force_wlock'):
1438 1440 repo.vfs.unlink('wlock')
1439 1441 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1440 1442 return 0
1441 1443
1442 1444 locks = []
1443 1445 try:
1444 1446 if opts.get(r'set_wlock'):
1445 1447 try:
1446 1448 locks.append(repo.wlock(False))
1447 1449 except error.LockHeld:
1448 1450 raise error.Abort(_('wlock is already held'))
1449 1451 if opts.get(r'set_lock'):
1450 1452 try:
1451 1453 locks.append(repo.lock(False))
1452 1454 except error.LockHeld:
1453 1455 raise error.Abort(_('lock is already held'))
1454 1456 if len(locks):
1455 1457 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1456 1458 return 0
1457 1459 finally:
1458 1460 release(*locks)
1459 1461
1460 1462 now = time.time()
1461 1463 held = 0
1462 1464
1463 1465 def report(vfs, name, method):
1464 1466 # this causes stale locks to get reaped for more accurate reporting
1465 1467 try:
1466 1468 l = method(False)
1467 1469 except error.LockHeld:
1468 1470 l = None
1469 1471
1470 1472 if l:
1471 1473 l.release()
1472 1474 else:
1473 1475 try:
1474 1476 st = vfs.lstat(name)
1475 1477 age = now - st[stat.ST_MTIME]
1476 1478 user = util.username(st.st_uid)
1477 1479 locker = vfs.readlock(name)
1478 1480 if ":" in locker:
1479 1481 host, pid = locker.split(':')
1480 1482 if host == socket.gethostname():
1481 1483 locker = 'user %s, process %s' % (user or b'None', pid)
1482 1484 else:
1483 1485 locker = ('user %s, process %s, host %s'
1484 1486 % (user or b'None', pid, host))
1485 1487 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1486 1488 return 1
1487 1489 except OSError as e:
1488 1490 if e.errno != errno.ENOENT:
1489 1491 raise
1490 1492
1491 1493 ui.write(("%-6s free\n") % (name + ":"))
1492 1494 return 0
1493 1495
1494 1496 held += report(repo.svfs, "lock", repo.lock)
1495 1497 held += report(repo.vfs, "wlock", repo.wlock)
1496 1498
1497 1499 return held
1498 1500
1499 1501 @command('debugmanifestfulltextcache', [
1500 1502 ('', 'clear', False, _('clear the cache')),
1501 1503 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1502 1504 _('NODE'))
1503 1505 ], '')
1504 1506 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1505 1507 """show, clear or amend the contents of the manifest fulltext cache"""
1506 1508
1507 1509 def getcache():
1508 1510 r = repo.manifestlog.getstorage(b'')
1509 1511 try:
1510 1512 return r._fulltextcache
1511 1513 except AttributeError:
1512 1514 msg = _("Current revlog implementation doesn't appear to have a "
1513 1515 "manifest fulltext cache\n")
1514 1516 raise error.Abort(msg)
1515 1517
1516 1518 if opts.get(r'clear'):
1517 1519 with repo.wlock():
1518 1520 cache = getcache()
1519 1521 cache.clear(clear_persisted_data=True)
1520 1522 return
1521 1523
1522 1524 if add:
1523 1525 with repo.wlock():
1524 1526 m = repo.manifestlog
1525 1527 store = m.getstorage(b'')
1526 1528 for n in add:
1527 1529 try:
1528 1530 manifest = m[store.lookup(n)]
1529 1531 except error.LookupError as e:
1530 1532 raise error.Abort(e, hint="Check your manifest node id")
1531 1533 manifest.read() # stores revisision in cache too
1532 1534 return
1533 1535
1534 1536 cache = getcache()
1535 1537 if not len(cache):
1536 1538 ui.write(_('cache empty\n'))
1537 1539 else:
1538 1540 ui.write(
1539 1541 _('cache contains %d manifest entries, in order of most to '
1540 1542 'least recent:\n') % (len(cache),))
1541 1543 totalsize = 0
1542 1544 for nodeid in cache:
1543 1545 # Use cache.get to not update the LRU order
1544 1546 data = cache.peek(nodeid)
1545 1547 size = len(data)
1546 1548 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1547 1549 ui.write(_('id: %s, size %s\n') % (
1548 1550 hex(nodeid), util.bytecount(size)))
1549 1551 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1550 1552 ui.write(
1551 1553 _('total cache data size %s, on-disk %s\n') % (
1552 1554 util.bytecount(totalsize), util.bytecount(ondisk))
1553 1555 )
1554 1556
1555 1557 @command('debugmergestate', [], '')
1556 1558 def debugmergestate(ui, repo, *args):
1557 1559 """print merge state
1558 1560
1559 1561 Use --verbose to print out information about whether v1 or v2 merge state
1560 1562 was chosen."""
1561 1563 def _hashornull(h):
1562 1564 if h == nullhex:
1563 1565 return 'null'
1564 1566 else:
1565 1567 return h
1566 1568
1567 1569 def printrecords(version):
1568 1570 ui.write(('* version %d records\n') % version)
1569 1571 if version == 1:
1570 1572 records = v1records
1571 1573 else:
1572 1574 records = v2records
1573 1575
1574 1576 for rtype, record in records:
1575 1577 # pretty print some record types
1576 1578 if rtype == 'L':
1577 1579 ui.write(('local: %s\n') % record)
1578 1580 elif rtype == 'O':
1579 1581 ui.write(('other: %s\n') % record)
1580 1582 elif rtype == 'm':
1581 1583 driver, mdstate = record.split('\0', 1)
1582 1584 ui.write(('merge driver: %s (state "%s")\n')
1583 1585 % (driver, mdstate))
1584 1586 elif rtype in 'FDC':
1585 1587 r = record.split('\0')
1586 1588 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1587 1589 if version == 1:
1588 1590 onode = 'not stored in v1 format'
1589 1591 flags = r[7]
1590 1592 else:
1591 1593 onode, flags = r[7:9]
1592 1594 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1593 1595 % (f, rtype, state, _hashornull(hash)))
1594 1596 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1595 1597 ui.write((' ancestor path: %s (node %s)\n')
1596 1598 % (afile, _hashornull(anode)))
1597 1599 ui.write((' other path: %s (node %s)\n')
1598 1600 % (ofile, _hashornull(onode)))
1599 1601 elif rtype == 'f':
1600 1602 filename, rawextras = record.split('\0', 1)
1601 1603 extras = rawextras.split('\0')
1602 1604 i = 0
1603 1605 extrastrings = []
1604 1606 while i < len(extras):
1605 1607 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1606 1608 i += 2
1607 1609
1608 1610 ui.write(('file extras: %s (%s)\n')
1609 1611 % (filename, ', '.join(extrastrings)))
1610 1612 elif rtype == 'l':
1611 1613 labels = record.split('\0', 2)
1612 1614 labels = [l for l in labels if len(l) > 0]
1613 1615 ui.write(('labels:\n'))
1614 1616 ui.write((' local: %s\n' % labels[0]))
1615 1617 ui.write((' other: %s\n' % labels[1]))
1616 1618 if len(labels) > 2:
1617 1619 ui.write((' base: %s\n' % labels[2]))
1618 1620 else:
1619 1621 ui.write(('unrecognized entry: %s\t%s\n')
1620 1622 % (rtype, record.replace('\0', '\t')))
1621 1623
1622 1624 # Avoid mergestate.read() since it may raise an exception for unsupported
1623 1625 # merge state records. We shouldn't be doing this, but this is OK since this
1624 1626 # command is pretty low-level.
1625 1627 ms = mergemod.mergestate(repo)
1626 1628
1627 1629 # sort so that reasonable information is on top
1628 1630 v1records = ms._readrecordsv1()
1629 1631 v2records = ms._readrecordsv2()
1630 1632 order = 'LOml'
1631 1633 def key(r):
1632 1634 idx = order.find(r[0])
1633 1635 if idx == -1:
1634 1636 return (1, r[1])
1635 1637 else:
1636 1638 return (0, idx)
1637 1639 v1records.sort(key=key)
1638 1640 v2records.sort(key=key)
1639 1641
1640 1642 if not v1records and not v2records:
1641 1643 ui.write(('no merge state found\n'))
1642 1644 elif not v2records:
1643 1645 ui.note(('no version 2 merge state\n'))
1644 1646 printrecords(1)
1645 1647 elif ms._v1v2match(v1records, v2records):
1646 1648 ui.note(('v1 and v2 states match: using v2\n'))
1647 1649 printrecords(2)
1648 1650 else:
1649 1651 ui.note(('v1 and v2 states mismatch: using v1\n'))
1650 1652 printrecords(1)
1651 1653 if ui.verbose:
1652 1654 printrecords(2)
1653 1655
1654 1656 @command('debugnamecomplete', [], _('NAME...'))
1655 1657 def debugnamecomplete(ui, repo, *args):
1656 1658 '''complete "names" - tags, open branch names, bookmark names'''
1657 1659
1658 1660 names = set()
1659 1661 # since we previously only listed open branches, we will handle that
1660 1662 # specially (after this for loop)
1661 1663 for name, ns in repo.names.iteritems():
1662 1664 if name != 'branches':
1663 1665 names.update(ns.listnames(repo))
1664 1666 names.update(tag for (tag, heads, tip, closed)
1665 1667 in repo.branchmap().iterbranches() if not closed)
1666 1668 completions = set()
1667 1669 if not args:
1668 1670 args = ['']
1669 1671 for a in args:
1670 1672 completions.update(n for n in names if n.startswith(a))
1671 1673 ui.write('\n'.join(sorted(completions)))
1672 1674 ui.write('\n')
1673 1675
1674 1676 @command('debugobsolete',
1675 1677 [('', 'flags', 0, _('markers flag')),
1676 1678 ('', 'record-parents', False,
1677 1679 _('record parent information for the precursor')),
1678 1680 ('r', 'rev', [], _('display markers relevant to REV')),
1679 1681 ('', 'exclusive', False, _('restrict display to markers only '
1680 1682 'relevant to REV')),
1681 1683 ('', 'index', False, _('display index of the marker')),
1682 1684 ('', 'delete', [], _('delete markers specified by indices')),
1683 1685 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1684 1686 _('[OBSOLETED [REPLACEMENT ...]]'))
1685 1687 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1686 1688 """create arbitrary obsolete marker
1687 1689
1688 1690 With no arguments, displays the list of obsolescence markers."""
1689 1691
1690 1692 opts = pycompat.byteskwargs(opts)
1691 1693
1692 1694 def parsenodeid(s):
1693 1695 try:
1694 1696 # We do not use revsingle/revrange functions here to accept
1695 1697 # arbitrary node identifiers, possibly not present in the
1696 1698 # local repository.
1697 1699 n = bin(s)
1698 1700 if len(n) != len(nullid):
1699 1701 raise TypeError()
1700 1702 return n
1701 1703 except TypeError:
1702 1704 raise error.Abort('changeset references must be full hexadecimal '
1703 1705 'node identifiers')
1704 1706
1705 1707 if opts.get('delete'):
1706 1708 indices = []
1707 1709 for v in opts.get('delete'):
1708 1710 try:
1709 1711 indices.append(int(v))
1710 1712 except ValueError:
1711 1713 raise error.Abort(_('invalid index value: %r') % v,
1712 1714 hint=_('use integers for indices'))
1713 1715
1714 1716 if repo.currenttransaction():
1715 1717 raise error.Abort(_('cannot delete obsmarkers in the middle '
1716 1718 'of transaction.'))
1717 1719
1718 1720 with repo.lock():
1719 1721 n = repair.deleteobsmarkers(repo.obsstore, indices)
1720 1722 ui.write(_('deleted %i obsolescence markers\n') % n)
1721 1723
1722 1724 return
1723 1725
1724 1726 if precursor is not None:
1725 1727 if opts['rev']:
1726 1728 raise error.Abort('cannot select revision when creating marker')
1727 1729 metadata = {}
1728 1730 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1729 1731 succs = tuple(parsenodeid(succ) for succ in successors)
1730 1732 l = repo.lock()
1731 1733 try:
1732 1734 tr = repo.transaction('debugobsolete')
1733 1735 try:
1734 1736 date = opts.get('date')
1735 1737 if date:
1736 1738 date = dateutil.parsedate(date)
1737 1739 else:
1738 1740 date = None
1739 1741 prec = parsenodeid(precursor)
1740 1742 parents = None
1741 1743 if opts['record_parents']:
1742 1744 if prec not in repo.unfiltered():
1743 1745 raise error.Abort('cannot used --record-parents on '
1744 1746 'unknown changesets')
1745 1747 parents = repo.unfiltered()[prec].parents()
1746 1748 parents = tuple(p.node() for p in parents)
1747 1749 repo.obsstore.create(tr, prec, succs, opts['flags'],
1748 1750 parents=parents, date=date,
1749 1751 metadata=metadata, ui=ui)
1750 1752 tr.close()
1751 1753 except ValueError as exc:
1752 1754 raise error.Abort(_('bad obsmarker input: %s') %
1753 1755 pycompat.bytestr(exc))
1754 1756 finally:
1755 1757 tr.release()
1756 1758 finally:
1757 1759 l.release()
1758 1760 else:
1759 1761 if opts['rev']:
1760 1762 revs = scmutil.revrange(repo, opts['rev'])
1761 1763 nodes = [repo[r].node() for r in revs]
1762 1764 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1763 1765 exclusive=opts['exclusive']))
1764 1766 markers.sort(key=lambda x: x._data)
1765 1767 else:
1766 1768 markers = obsutil.getmarkers(repo)
1767 1769
1768 1770 markerstoiter = markers
1769 1771 isrelevant = lambda m: True
1770 1772 if opts.get('rev') and opts.get('index'):
1771 1773 markerstoiter = obsutil.getmarkers(repo)
1772 1774 markerset = set(markers)
1773 1775 isrelevant = lambda m: m in markerset
1774 1776
1775 1777 fm = ui.formatter('debugobsolete', opts)
1776 1778 for i, m in enumerate(markerstoiter):
1777 1779 if not isrelevant(m):
1778 1780 # marker can be irrelevant when we're iterating over a set
1779 1781 # of markers (markerstoiter) which is bigger than the set
1780 1782 # of markers we want to display (markers)
1781 1783 # this can happen if both --index and --rev options are
1782 1784 # provided and thus we need to iterate over all of the markers
1783 1785 # to get the correct indices, but only display the ones that
1784 1786 # are relevant to --rev value
1785 1787 continue
1786 1788 fm.startitem()
1787 1789 ind = i if opts.get('index') else None
1788 1790 cmdutil.showmarker(fm, m, index=ind)
1789 1791 fm.end()
1790 1792
1791 1793 @command('debugp1copies',
1792 1794 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1793 1795 _('[-r REV]'))
1794 1796 def debugp1copies(ui, repo, **opts):
1795 1797 """dump copy information compared to p1"""
1796 1798
1797 1799 opts = pycompat.byteskwargs(opts)
1798 1800 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1799 1801 for dst, src in ctx.p1copies().items():
1800 1802 ui.write('%s -> %s\n' % (src, dst))
1801 1803
1802 1804 @command('debugp2copies',
1803 1805 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1804 1806 _('[-r REV]'))
1805 1807 def debugp1copies(ui, repo, **opts):
1806 1808 """dump copy information compared to p2"""
1807 1809
1808 1810 opts = pycompat.byteskwargs(opts)
1809 1811 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1810 1812 for dst, src in ctx.p2copies().items():
1811 1813 ui.write('%s -> %s\n' % (src, dst))
1812 1814
1813 1815 @command('debugpathcomplete',
1814 1816 [('f', 'full', None, _('complete an entire path')),
1815 1817 ('n', 'normal', None, _('show only normal files')),
1816 1818 ('a', 'added', None, _('show only added files')),
1817 1819 ('r', 'removed', None, _('show only removed files'))],
1818 1820 _('FILESPEC...'))
1819 1821 def debugpathcomplete(ui, repo, *specs, **opts):
1820 1822 '''complete part or all of a tracked path
1821 1823
1822 1824 This command supports shells that offer path name completion. It
1823 1825 currently completes only files already known to the dirstate.
1824 1826
1825 1827 Completion extends only to the next path segment unless
1826 1828 --full is specified, in which case entire paths are used.'''
1827 1829
1828 1830 def complete(path, acceptable):
1829 1831 dirstate = repo.dirstate
1830 1832 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1831 1833 rootdir = repo.root + pycompat.ossep
1832 1834 if spec != repo.root and not spec.startswith(rootdir):
1833 1835 return [], []
1834 1836 if os.path.isdir(spec):
1835 1837 spec += '/'
1836 1838 spec = spec[len(rootdir):]
1837 1839 fixpaths = pycompat.ossep != '/'
1838 1840 if fixpaths:
1839 1841 spec = spec.replace(pycompat.ossep, '/')
1840 1842 speclen = len(spec)
1841 1843 fullpaths = opts[r'full']
1842 1844 files, dirs = set(), set()
1843 1845 adddir, addfile = dirs.add, files.add
1844 1846 for f, st in dirstate.iteritems():
1845 1847 if f.startswith(spec) and st[0] in acceptable:
1846 1848 if fixpaths:
1847 1849 f = f.replace('/', pycompat.ossep)
1848 1850 if fullpaths:
1849 1851 addfile(f)
1850 1852 continue
1851 1853 s = f.find(pycompat.ossep, speclen)
1852 1854 if s >= 0:
1853 1855 adddir(f[:s])
1854 1856 else:
1855 1857 addfile(f)
1856 1858 return files, dirs
1857 1859
1858 1860 acceptable = ''
1859 1861 if opts[r'normal']:
1860 1862 acceptable += 'nm'
1861 1863 if opts[r'added']:
1862 1864 acceptable += 'a'
1863 1865 if opts[r'removed']:
1864 1866 acceptable += 'r'
1865 1867 cwd = repo.getcwd()
1866 1868 if not specs:
1867 1869 specs = ['.']
1868 1870
1869 1871 files, dirs = set(), set()
1870 1872 for spec in specs:
1871 1873 f, d = complete(spec, acceptable or 'nmar')
1872 1874 files.update(f)
1873 1875 dirs.update(d)
1874 1876 files.update(dirs)
1875 1877 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1876 1878 ui.write('\n')
1877 1879
1878 1880 @command('debugpathcopies',
1879 1881 cmdutil.walkopts,
1880 1882 'hg debugpathcopies REV1 REV2 [FILE]',
1881 1883 inferrepo=True)
1882 1884 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1883 1885 """show copies between two revisions"""
1884 1886 ctx1 = scmutil.revsingle(repo, rev1)
1885 1887 ctx2 = scmutil.revsingle(repo, rev2)
1886 1888 m = scmutil.match(ctx1, pats, opts)
1887 1889 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1888 1890 ui.write('%s -> %s\n' % (src, dst))
1889 1891
1890 1892 @command('debugpeer', [], _('PATH'), norepo=True)
1891 1893 def debugpeer(ui, path):
1892 1894 """establish a connection to a peer repository"""
1893 1895 # Always enable peer request logging. Requires --debug to display
1894 1896 # though.
1895 1897 overrides = {
1896 1898 ('devel', 'debug.peer-request'): True,
1897 1899 }
1898 1900
1899 1901 with ui.configoverride(overrides):
1900 1902 peer = hg.peer(ui, {}, path)
1901 1903
1902 1904 local = peer.local() is not None
1903 1905 canpush = peer.canpush()
1904 1906
1905 1907 ui.write(_('url: %s\n') % peer.url())
1906 1908 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1907 1909 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1908 1910
1909 1911 @command('debugpickmergetool',
1910 1912 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1911 1913 ('', 'changedelete', None, _('emulate merging change and delete')),
1912 1914 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1913 1915 _('[PATTERN]...'),
1914 1916 inferrepo=True)
1915 1917 def debugpickmergetool(ui, repo, *pats, **opts):
1916 1918 """examine which merge tool is chosen for specified file
1917 1919
1918 1920 As described in :hg:`help merge-tools`, Mercurial examines
1919 1921 configurations below in this order to decide which merge tool is
1920 1922 chosen for specified file.
1921 1923
1922 1924 1. ``--tool`` option
1923 1925 2. ``HGMERGE`` environment variable
1924 1926 3. configurations in ``merge-patterns`` section
1925 1927 4. configuration of ``ui.merge``
1926 1928 5. configurations in ``merge-tools`` section
1927 1929 6. ``hgmerge`` tool (for historical reason only)
1928 1930 7. default tool for fallback (``:merge`` or ``:prompt``)
1929 1931
1930 1932 This command writes out examination result in the style below::
1931 1933
1932 1934 FILE = MERGETOOL
1933 1935
1934 1936 By default, all files known in the first parent context of the
1935 1937 working directory are examined. Use file patterns and/or -I/-X
1936 1938 options to limit target files. -r/--rev is also useful to examine
1937 1939 files in another context without actual updating to it.
1938 1940
1939 1941 With --debug, this command shows warning messages while matching
1940 1942 against ``merge-patterns`` and so on, too. It is recommended to
1941 1943 use this option with explicit file patterns and/or -I/-X options,
1942 1944 because this option increases amount of output per file according
1943 1945 to configurations in hgrc.
1944 1946
1945 1947 With -v/--verbose, this command shows configurations below at
1946 1948 first (only if specified).
1947 1949
1948 1950 - ``--tool`` option
1949 1951 - ``HGMERGE`` environment variable
1950 1952 - configuration of ``ui.merge``
1951 1953
1952 1954 If merge tool is chosen before matching against
1953 1955 ``merge-patterns``, this command can't show any helpful
1954 1956 information, even with --debug. In such case, information above is
1955 1957 useful to know why a merge tool is chosen.
1956 1958 """
1957 1959 opts = pycompat.byteskwargs(opts)
1958 1960 overrides = {}
1959 1961 if opts['tool']:
1960 1962 overrides[('ui', 'forcemerge')] = opts['tool']
1961 1963 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1962 1964
1963 1965 with ui.configoverride(overrides, 'debugmergepatterns'):
1964 1966 hgmerge = encoding.environ.get("HGMERGE")
1965 1967 if hgmerge is not None:
1966 1968 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1967 1969 uimerge = ui.config("ui", "merge")
1968 1970 if uimerge:
1969 1971 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1970 1972
1971 1973 ctx = scmutil.revsingle(repo, opts.get('rev'))
1972 1974 m = scmutil.match(ctx, pats, opts)
1973 1975 changedelete = opts['changedelete']
1974 1976 for path in ctx.walk(m):
1975 1977 fctx = ctx[path]
1976 1978 try:
1977 1979 if not ui.debugflag:
1978 1980 ui.pushbuffer(error=True)
1979 1981 tool, toolpath = filemerge._picktool(repo, ui, path,
1980 1982 fctx.isbinary(),
1981 1983 'l' in fctx.flags(),
1982 1984 changedelete)
1983 1985 finally:
1984 1986 if not ui.debugflag:
1985 1987 ui.popbuffer()
1986 1988 ui.write(('%s = %s\n') % (path, tool))
1987 1989
1988 1990 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1989 1991 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1990 1992 '''access the pushkey key/value protocol
1991 1993
1992 1994 With two args, list the keys in the given namespace.
1993 1995
1994 1996 With five args, set a key to new if it currently is set to old.
1995 1997 Reports success or failure.
1996 1998 '''
1997 1999
1998 2000 target = hg.peer(ui, {}, repopath)
1999 2001 if keyinfo:
2000 2002 key, old, new = keyinfo
2001 2003 with target.commandexecutor() as e:
2002 2004 r = e.callcommand('pushkey', {
2003 2005 'namespace': namespace,
2004 2006 'key': key,
2005 2007 'old': old,
2006 2008 'new': new,
2007 2009 }).result()
2008 2010
2009 2011 ui.status(pycompat.bytestr(r) + '\n')
2010 2012 return not r
2011 2013 else:
2012 2014 for k, v in sorted(target.listkeys(namespace).iteritems()):
2013 2015 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
2014 2016 stringutil.escapestr(v)))
2015 2017
2016 2018 @command('debugpvec', [], _('A B'))
2017 2019 def debugpvec(ui, repo, a, b=None):
2018 2020 ca = scmutil.revsingle(repo, a)
2019 2021 cb = scmutil.revsingle(repo, b)
2020 2022 pa = pvec.ctxpvec(ca)
2021 2023 pb = pvec.ctxpvec(cb)
2022 2024 if pa == pb:
2023 2025 rel = "="
2024 2026 elif pa > pb:
2025 2027 rel = ">"
2026 2028 elif pa < pb:
2027 2029 rel = "<"
2028 2030 elif pa | pb:
2029 2031 rel = "|"
2030 2032 ui.write(_("a: %s\n") % pa)
2031 2033 ui.write(_("b: %s\n") % pb)
2032 2034 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2033 2035 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2034 2036 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2035 2037 pa.distance(pb), rel))
2036 2038
2037 2039 @command('debugrebuilddirstate|debugrebuildstate',
2038 2040 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2039 2041 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2040 2042 'the working copy parent')),
2041 2043 ],
2042 2044 _('[-r REV]'))
2043 2045 def debugrebuilddirstate(ui, repo, rev, **opts):
2044 2046 """rebuild the dirstate as it would look like for the given revision
2045 2047
2046 2048 If no revision is specified the first current parent will be used.
2047 2049
2048 2050 The dirstate will be set to the files of the given revision.
2049 2051 The actual working directory content or existing dirstate
2050 2052 information such as adds or removes is not considered.
2051 2053
2052 2054 ``minimal`` will only rebuild the dirstate status for files that claim to be
2053 2055 tracked but are not in the parent manifest, or that exist in the parent
2054 2056 manifest but are not in the dirstate. It will not change adds, removes, or
2055 2057 modified files that are in the working copy parent.
2056 2058
2057 2059 One use of this command is to make the next :hg:`status` invocation
2058 2060 check the actual file content.
2059 2061 """
2060 2062 ctx = scmutil.revsingle(repo, rev)
2061 2063 with repo.wlock():
2062 2064 dirstate = repo.dirstate
2063 2065 changedfiles = None
2064 2066 # See command doc for what minimal does.
2065 2067 if opts.get(r'minimal'):
2066 2068 manifestfiles = set(ctx.manifest().keys())
2067 2069 dirstatefiles = set(dirstate)
2068 2070 manifestonly = manifestfiles - dirstatefiles
2069 2071 dsonly = dirstatefiles - manifestfiles
2070 2072 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2071 2073 changedfiles = manifestonly | dsnotadded
2072 2074
2073 2075 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2074 2076
2075 2077 @command('debugrebuildfncache', [], '')
2076 2078 def debugrebuildfncache(ui, repo):
2077 2079 """rebuild the fncache file"""
2078 2080 repair.rebuildfncache(ui, repo)
2079 2081
2080 2082 @command('debugrename',
2081 2083 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2082 2084 _('[-r REV] [FILE]...'))
2083 2085 def debugrename(ui, repo, *pats, **opts):
2084 2086 """dump rename information"""
2085 2087
2086 2088 opts = pycompat.byteskwargs(opts)
2087 2089 ctx = scmutil.revsingle(repo, opts.get('rev'))
2088 2090 m = scmutil.match(ctx, pats, opts)
2089 2091 for abs in ctx.walk(m):
2090 2092 fctx = ctx[abs]
2091 2093 o = fctx.filelog().renamed(fctx.filenode())
2092 2094 rel = repo.pathto(abs)
2093 2095 if o:
2094 2096 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2095 2097 else:
2096 2098 ui.write(_("%s not renamed\n") % rel)
2097 2099
2098 2100 @command('debugrevlog', cmdutil.debugrevlogopts +
2099 2101 [('d', 'dump', False, _('dump index data'))],
2100 2102 _('-c|-m|FILE'),
2101 2103 optionalrepo=True)
2102 2104 def debugrevlog(ui, repo, file_=None, **opts):
2103 2105 """show data and statistics about a revlog"""
2104 2106 opts = pycompat.byteskwargs(opts)
2105 2107 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2106 2108
2107 2109 if opts.get("dump"):
2108 2110 numrevs = len(r)
2109 2111 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2110 2112 " rawsize totalsize compression heads chainlen\n"))
2111 2113 ts = 0
2112 2114 heads = set()
2113 2115
2114 2116 for rev in pycompat.xrange(numrevs):
2115 2117 dbase = r.deltaparent(rev)
2116 2118 if dbase == -1:
2117 2119 dbase = rev
2118 2120 cbase = r.chainbase(rev)
2119 2121 clen = r.chainlen(rev)
2120 2122 p1, p2 = r.parentrevs(rev)
2121 2123 rs = r.rawsize(rev)
2122 2124 ts = ts + rs
2123 2125 heads -= set(r.parentrevs(rev))
2124 2126 heads.add(rev)
2125 2127 try:
2126 2128 compression = ts / r.end(rev)
2127 2129 except ZeroDivisionError:
2128 2130 compression = 0
2129 2131 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2130 2132 "%11d %5d %8d\n" %
2131 2133 (rev, p1, p2, r.start(rev), r.end(rev),
2132 2134 r.start(dbase), r.start(cbase),
2133 2135 r.start(p1), r.start(p2),
2134 2136 rs, ts, compression, len(heads), clen))
2135 2137 return 0
2136 2138
2137 2139 v = r.version
2138 2140 format = v & 0xFFFF
2139 2141 flags = []
2140 2142 gdelta = False
2141 2143 if v & revlog.FLAG_INLINE_DATA:
2142 2144 flags.append('inline')
2143 2145 if v & revlog.FLAG_GENERALDELTA:
2144 2146 gdelta = True
2145 2147 flags.append('generaldelta')
2146 2148 if not flags:
2147 2149 flags = ['(none)']
2148 2150
2149 2151 ### tracks merge vs single parent
2150 2152 nummerges = 0
2151 2153
2152 2154 ### tracks ways the "delta" are build
2153 2155 # nodelta
2154 2156 numempty = 0
2155 2157 numemptytext = 0
2156 2158 numemptydelta = 0
2157 2159 # full file content
2158 2160 numfull = 0
2159 2161 # intermediate snapshot against a prior snapshot
2160 2162 numsemi = 0
2161 2163 # snapshot count per depth
2162 2164 numsnapdepth = collections.defaultdict(lambda: 0)
2163 2165 # delta against previous revision
2164 2166 numprev = 0
2165 2167 # delta against first or second parent (not prev)
2166 2168 nump1 = 0
2167 2169 nump2 = 0
2168 2170 # delta against neither prev nor parents
2169 2171 numother = 0
2170 2172 # delta against prev that are also first or second parent
2171 2173 # (details of `numprev`)
2172 2174 nump1prev = 0
2173 2175 nump2prev = 0
2174 2176
2175 2177 # data about delta chain of each revs
2176 2178 chainlengths = []
2177 2179 chainbases = []
2178 2180 chainspans = []
2179 2181
2180 2182 # data about each revision
2181 2183 datasize = [None, 0, 0]
2182 2184 fullsize = [None, 0, 0]
2183 2185 semisize = [None, 0, 0]
2184 2186 # snapshot count per depth
2185 2187 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2186 2188 deltasize = [None, 0, 0]
2187 2189 chunktypecounts = {}
2188 2190 chunktypesizes = {}
2189 2191
2190 2192 def addsize(size, l):
2191 2193 if l[0] is None or size < l[0]:
2192 2194 l[0] = size
2193 2195 if size > l[1]:
2194 2196 l[1] = size
2195 2197 l[2] += size
2196 2198
2197 2199 numrevs = len(r)
2198 2200 for rev in pycompat.xrange(numrevs):
2199 2201 p1, p2 = r.parentrevs(rev)
2200 2202 delta = r.deltaparent(rev)
2201 2203 if format > 0:
2202 2204 addsize(r.rawsize(rev), datasize)
2203 2205 if p2 != nullrev:
2204 2206 nummerges += 1
2205 2207 size = r.length(rev)
2206 2208 if delta == nullrev:
2207 2209 chainlengths.append(0)
2208 2210 chainbases.append(r.start(rev))
2209 2211 chainspans.append(size)
2210 2212 if size == 0:
2211 2213 numempty += 1
2212 2214 numemptytext += 1
2213 2215 else:
2214 2216 numfull += 1
2215 2217 numsnapdepth[0] += 1
2216 2218 addsize(size, fullsize)
2217 2219 addsize(size, snapsizedepth[0])
2218 2220 else:
2219 2221 chainlengths.append(chainlengths[delta] + 1)
2220 2222 baseaddr = chainbases[delta]
2221 2223 revaddr = r.start(rev)
2222 2224 chainbases.append(baseaddr)
2223 2225 chainspans.append((revaddr - baseaddr) + size)
2224 2226 if size == 0:
2225 2227 numempty += 1
2226 2228 numemptydelta += 1
2227 2229 elif r.issnapshot(rev):
2228 2230 addsize(size, semisize)
2229 2231 numsemi += 1
2230 2232 depth = r.snapshotdepth(rev)
2231 2233 numsnapdepth[depth] += 1
2232 2234 addsize(size, snapsizedepth[depth])
2233 2235 else:
2234 2236 addsize(size, deltasize)
2235 2237 if delta == rev - 1:
2236 2238 numprev += 1
2237 2239 if delta == p1:
2238 2240 nump1prev += 1
2239 2241 elif delta == p2:
2240 2242 nump2prev += 1
2241 2243 elif delta == p1:
2242 2244 nump1 += 1
2243 2245 elif delta == p2:
2244 2246 nump2 += 1
2245 2247 elif delta != nullrev:
2246 2248 numother += 1
2247 2249
2248 2250 # Obtain data on the raw chunks in the revlog.
2249 2251 if util.safehasattr(r, '_getsegmentforrevs'):
2250 2252 segment = r._getsegmentforrevs(rev, rev)[1]
2251 2253 else:
2252 2254 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2253 2255 if segment:
2254 2256 chunktype = bytes(segment[0:1])
2255 2257 else:
2256 2258 chunktype = 'empty'
2257 2259
2258 2260 if chunktype not in chunktypecounts:
2259 2261 chunktypecounts[chunktype] = 0
2260 2262 chunktypesizes[chunktype] = 0
2261 2263
2262 2264 chunktypecounts[chunktype] += 1
2263 2265 chunktypesizes[chunktype] += size
2264 2266
2265 2267 # Adjust size min value for empty cases
2266 2268 for size in (datasize, fullsize, semisize, deltasize):
2267 2269 if size[0] is None:
2268 2270 size[0] = 0
2269 2271
2270 2272 numdeltas = numrevs - numfull - numempty - numsemi
2271 2273 numoprev = numprev - nump1prev - nump2prev
2272 2274 totalrawsize = datasize[2]
2273 2275 datasize[2] /= numrevs
2274 2276 fulltotal = fullsize[2]
2275 2277 fullsize[2] /= numfull
2276 2278 semitotal = semisize[2]
2277 2279 snaptotal = {}
2278 2280 if numsemi > 0:
2279 2281 semisize[2] /= numsemi
2280 2282 for depth in snapsizedepth:
2281 2283 snaptotal[depth] = snapsizedepth[depth][2]
2282 2284 snapsizedepth[depth][2] /= numsnapdepth[depth]
2283 2285
2284 2286 deltatotal = deltasize[2]
2285 2287 if numdeltas > 0:
2286 2288 deltasize[2] /= numdeltas
2287 2289 totalsize = fulltotal + semitotal + deltatotal
2288 2290 avgchainlen = sum(chainlengths) / numrevs
2289 2291 maxchainlen = max(chainlengths)
2290 2292 maxchainspan = max(chainspans)
2291 2293 compratio = 1
2292 2294 if totalsize:
2293 2295 compratio = totalrawsize / totalsize
2294 2296
2295 2297 basedfmtstr = '%%%dd\n'
2296 2298 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2297 2299
2298 2300 def dfmtstr(max):
2299 2301 return basedfmtstr % len(str(max))
2300 2302 def pcfmtstr(max, padding=0):
2301 2303 return basepcfmtstr % (len(str(max)), ' ' * padding)
2302 2304
2303 2305 def pcfmt(value, total):
2304 2306 if total:
2305 2307 return (value, 100 * float(value) / total)
2306 2308 else:
2307 2309 return value, 100.0
2308 2310
2309 2311 ui.write(('format : %d\n') % format)
2310 2312 ui.write(('flags : %s\n') % ', '.join(flags))
2311 2313
2312 2314 ui.write('\n')
2313 2315 fmt = pcfmtstr(totalsize)
2314 2316 fmt2 = dfmtstr(totalsize)
2315 2317 ui.write(('revisions : ') + fmt2 % numrevs)
2316 2318 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2317 2319 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2318 2320 ui.write(('revisions : ') + fmt2 % numrevs)
2319 2321 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2320 2322 ui.write((' text : ')
2321 2323 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2322 2324 ui.write((' delta : ')
2323 2325 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2324 2326 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2325 2327 for depth in sorted(numsnapdepth):
2326 2328 ui.write((' lvl-%-3d : ' % depth)
2327 2329 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2328 2330 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2329 2331 ui.write(('revision size : ') + fmt2 % totalsize)
2330 2332 ui.write((' snapshot : ')
2331 2333 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2332 2334 for depth in sorted(numsnapdepth):
2333 2335 ui.write((' lvl-%-3d : ' % depth)
2334 2336 + fmt % pcfmt(snaptotal[depth], totalsize))
2335 2337 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2336 2338
2337 2339 def fmtchunktype(chunktype):
2338 2340 if chunktype == 'empty':
2339 2341 return ' %s : ' % chunktype
2340 2342 elif chunktype in pycompat.bytestr(string.ascii_letters):
2341 2343 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2342 2344 else:
2343 2345 return ' 0x%s : ' % hex(chunktype)
2344 2346
2345 2347 ui.write('\n')
2346 2348 ui.write(('chunks : ') + fmt2 % numrevs)
2347 2349 for chunktype in sorted(chunktypecounts):
2348 2350 ui.write(fmtchunktype(chunktype))
2349 2351 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2350 2352 ui.write(('chunks size : ') + fmt2 % totalsize)
2351 2353 for chunktype in sorted(chunktypecounts):
2352 2354 ui.write(fmtchunktype(chunktype))
2353 2355 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2354 2356
2355 2357 ui.write('\n')
2356 2358 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2357 2359 ui.write(('avg chain length : ') + fmt % avgchainlen)
2358 2360 ui.write(('max chain length : ') + fmt % maxchainlen)
2359 2361 ui.write(('max chain reach : ') + fmt % maxchainspan)
2360 2362 ui.write(('compression ratio : ') + fmt % compratio)
2361 2363
2362 2364 if format > 0:
2363 2365 ui.write('\n')
2364 2366 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2365 2367 % tuple(datasize))
2366 2368 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2367 2369 % tuple(fullsize))
2368 2370 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2369 2371 % tuple(semisize))
2370 2372 for depth in sorted(snapsizedepth):
2371 2373 if depth == 0:
2372 2374 continue
2373 2375 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2374 2376 % ((depth,) + tuple(snapsizedepth[depth])))
2375 2377 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2376 2378 % tuple(deltasize))
2377 2379
2378 2380 if numdeltas > 0:
2379 2381 ui.write('\n')
2380 2382 fmt = pcfmtstr(numdeltas)
2381 2383 fmt2 = pcfmtstr(numdeltas, 4)
2382 2384 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2383 2385 if numprev > 0:
2384 2386 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2385 2387 numprev))
2386 2388 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2387 2389 numprev))
2388 2390 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2389 2391 numprev))
2390 2392 if gdelta:
2391 2393 ui.write(('deltas against p1 : ')
2392 2394 + fmt % pcfmt(nump1, numdeltas))
2393 2395 ui.write(('deltas against p2 : ')
2394 2396 + fmt % pcfmt(nump2, numdeltas))
2395 2397 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2396 2398 numdeltas))
2397 2399
2398 2400 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2399 2401 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2400 2402 _('[-f FORMAT] -c|-m|FILE'),
2401 2403 optionalrepo=True)
2402 2404 def debugrevlogindex(ui, repo, file_=None, **opts):
2403 2405 """dump the contents of a revlog index"""
2404 2406 opts = pycompat.byteskwargs(opts)
2405 2407 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2406 2408 format = opts.get('format', 0)
2407 2409 if format not in (0, 1):
2408 2410 raise error.Abort(_("unknown format %d") % format)
2409 2411
2410 2412 if ui.debugflag:
2411 2413 shortfn = hex
2412 2414 else:
2413 2415 shortfn = short
2414 2416
2415 2417 # There might not be anything in r, so have a sane default
2416 2418 idlen = 12
2417 2419 for i in r:
2418 2420 idlen = len(shortfn(r.node(i)))
2419 2421 break
2420 2422
2421 2423 if format == 0:
2422 2424 if ui.verbose:
2423 2425 ui.write((" rev offset length linkrev"
2424 2426 " %s %s p2\n") % ("nodeid".ljust(idlen),
2425 2427 "p1".ljust(idlen)))
2426 2428 else:
2427 2429 ui.write((" rev linkrev %s %s p2\n") % (
2428 2430 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2429 2431 elif format == 1:
2430 2432 if ui.verbose:
2431 2433 ui.write((" rev flag offset length size link p1"
2432 2434 " p2 %s\n") % "nodeid".rjust(idlen))
2433 2435 else:
2434 2436 ui.write((" rev flag size link p1 p2 %s\n") %
2435 2437 "nodeid".rjust(idlen))
2436 2438
2437 2439 for i in r:
2438 2440 node = r.node(i)
2439 2441 if format == 0:
2440 2442 try:
2441 2443 pp = r.parents(node)
2442 2444 except Exception:
2443 2445 pp = [nullid, nullid]
2444 2446 if ui.verbose:
2445 2447 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2446 2448 i, r.start(i), r.length(i), r.linkrev(i),
2447 2449 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2448 2450 else:
2449 2451 ui.write("% 6d % 7d %s %s %s\n" % (
2450 2452 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2451 2453 shortfn(pp[1])))
2452 2454 elif format == 1:
2453 2455 pr = r.parentrevs(i)
2454 2456 if ui.verbose:
2455 2457 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2456 2458 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2457 2459 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2458 2460 else:
2459 2461 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2460 2462 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2461 2463 shortfn(node)))
2462 2464
2463 2465 @command('debugrevspec',
2464 2466 [('', 'optimize', None,
2465 2467 _('print parsed tree after optimizing (DEPRECATED)')),
2466 2468 ('', 'show-revs', True, _('print list of result revisions (default)')),
2467 2469 ('s', 'show-set', None, _('print internal representation of result set')),
2468 2470 ('p', 'show-stage', [],
2469 2471 _('print parsed tree at the given stage'), _('NAME')),
2470 2472 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2471 2473 ('', 'verify-optimized', False, _('verify optimized result')),
2472 2474 ],
2473 2475 ('REVSPEC'))
2474 2476 def debugrevspec(ui, repo, expr, **opts):
2475 2477 """parse and apply a revision specification
2476 2478
2477 2479 Use -p/--show-stage option to print the parsed tree at the given stages.
2478 2480 Use -p all to print tree at every stage.
2479 2481
2480 2482 Use --no-show-revs option with -s or -p to print only the set
2481 2483 representation or the parsed tree respectively.
2482 2484
2483 2485 Use --verify-optimized to compare the optimized result with the unoptimized
2484 2486 one. Returns 1 if the optimized result differs.
2485 2487 """
2486 2488 opts = pycompat.byteskwargs(opts)
2487 2489 aliases = ui.configitems('revsetalias')
2488 2490 stages = [
2489 2491 ('parsed', lambda tree: tree),
2490 2492 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2491 2493 ui.warn)),
2492 2494 ('concatenated', revsetlang.foldconcat),
2493 2495 ('analyzed', revsetlang.analyze),
2494 2496 ('optimized', revsetlang.optimize),
2495 2497 ]
2496 2498 if opts['no_optimized']:
2497 2499 stages = stages[:-1]
2498 2500 if opts['verify_optimized'] and opts['no_optimized']:
2499 2501 raise error.Abort(_('cannot use --verify-optimized with '
2500 2502 '--no-optimized'))
2501 2503 stagenames = set(n for n, f in stages)
2502 2504
2503 2505 showalways = set()
2504 2506 showchanged = set()
2505 2507 if ui.verbose and not opts['show_stage']:
2506 2508 # show parsed tree by --verbose (deprecated)
2507 2509 showalways.add('parsed')
2508 2510 showchanged.update(['expanded', 'concatenated'])
2509 2511 if opts['optimize']:
2510 2512 showalways.add('optimized')
2511 2513 if opts['show_stage'] and opts['optimize']:
2512 2514 raise error.Abort(_('cannot use --optimize with --show-stage'))
2513 2515 if opts['show_stage'] == ['all']:
2514 2516 showalways.update(stagenames)
2515 2517 else:
2516 2518 for n in opts['show_stage']:
2517 2519 if n not in stagenames:
2518 2520 raise error.Abort(_('invalid stage name: %s') % n)
2519 2521 showalways.update(opts['show_stage'])
2520 2522
2521 2523 treebystage = {}
2522 2524 printedtree = None
2523 2525 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2524 2526 for n, f in stages:
2525 2527 treebystage[n] = tree = f(tree)
2526 2528 if n in showalways or (n in showchanged and tree != printedtree):
2527 2529 if opts['show_stage'] or n != 'parsed':
2528 2530 ui.write(("* %s:\n") % n)
2529 2531 ui.write(revsetlang.prettyformat(tree), "\n")
2530 2532 printedtree = tree
2531 2533
2532 2534 if opts['verify_optimized']:
2533 2535 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2534 2536 brevs = revset.makematcher(treebystage['optimized'])(repo)
2535 2537 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2536 2538 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2537 2539 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2538 2540 arevs = list(arevs)
2539 2541 brevs = list(brevs)
2540 2542 if arevs == brevs:
2541 2543 return 0
2542 2544 ui.write(('--- analyzed\n'), label='diff.file_a')
2543 2545 ui.write(('+++ optimized\n'), label='diff.file_b')
2544 2546 sm = difflib.SequenceMatcher(None, arevs, brevs)
2545 2547 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2546 2548 if tag in (r'delete', r'replace'):
2547 2549 for c in arevs[alo:ahi]:
2548 2550 ui.write('-%d\n' % c, label='diff.deleted')
2549 2551 if tag in (r'insert', r'replace'):
2550 2552 for c in brevs[blo:bhi]:
2551 2553 ui.write('+%d\n' % c, label='diff.inserted')
2552 2554 if tag == r'equal':
2553 2555 for c in arevs[alo:ahi]:
2554 2556 ui.write(' %d\n' % c)
2555 2557 return 1
2556 2558
2557 2559 func = revset.makematcher(tree)
2558 2560 revs = func(repo)
2559 2561 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2560 2562 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2561 2563 if not opts['show_revs']:
2562 2564 return
2563 2565 for c in revs:
2564 2566 ui.write("%d\n" % c)
2565 2567
2566 2568 @command('debugserve', [
2567 2569 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2568 2570 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2569 2571 ('', 'logiofile', '', _('file to log server I/O to')),
2570 2572 ], '')
2571 2573 def debugserve(ui, repo, **opts):
2572 2574 """run a server with advanced settings
2573 2575
2574 2576 This command is similar to :hg:`serve`. It exists partially as a
2575 2577 workaround to the fact that ``hg serve --stdio`` must have specific
2576 2578 arguments for security reasons.
2577 2579 """
2578 2580 opts = pycompat.byteskwargs(opts)
2579 2581
2580 2582 if not opts['sshstdio']:
2581 2583 raise error.Abort(_('only --sshstdio is currently supported'))
2582 2584
2583 2585 logfh = None
2584 2586
2585 2587 if opts['logiofd'] and opts['logiofile']:
2586 2588 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2587 2589
2588 2590 if opts['logiofd']:
2589 2591 # Line buffered because output is line based.
2590 2592 try:
2591 2593 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2592 2594 except OSError as e:
2593 2595 if e.errno != errno.ESPIPE:
2594 2596 raise
2595 2597 # can't seek a pipe, so `ab` mode fails on py3
2596 2598 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2597 2599 elif opts['logiofile']:
2598 2600 logfh = open(opts['logiofile'], 'ab', 1)
2599 2601
2600 2602 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2601 2603 s.serve_forever()
2602 2604
2603 2605 @command('debugsetparents', [], _('REV1 [REV2]'))
2604 2606 def debugsetparents(ui, repo, rev1, rev2=None):
2605 2607 """manually set the parents of the current working directory
2606 2608
2607 2609 This is useful for writing repository conversion tools, but should
2608 2610 be used with care. For example, neither the working directory nor the
2609 2611 dirstate is updated, so file status may be incorrect after running this
2610 2612 command.
2611 2613
2612 2614 Returns 0 on success.
2613 2615 """
2614 2616
2615 2617 node1 = scmutil.revsingle(repo, rev1).node()
2616 2618 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2617 2619
2618 2620 with repo.wlock():
2619 2621 repo.setparents(node1, node2)
2620 2622
2621 2623 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2622 2624 def debugssl(ui, repo, source=None, **opts):
2623 2625 '''test a secure connection to a server
2624 2626
2625 2627 This builds the certificate chain for the server on Windows, installing the
2626 2628 missing intermediates and trusted root via Windows Update if necessary. It
2627 2629 does nothing on other platforms.
2628 2630
2629 2631 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2630 2632 that server is used. See :hg:`help urls` for more information.
2631 2633
2632 2634 If the update succeeds, retry the original operation. Otherwise, the cause
2633 2635 of the SSL error is likely another issue.
2634 2636 '''
2635 2637 if not pycompat.iswindows:
2636 2638 raise error.Abort(_('certificate chain building is only possible on '
2637 2639 'Windows'))
2638 2640
2639 2641 if not source:
2640 2642 if not repo:
2641 2643 raise error.Abort(_("there is no Mercurial repository here, and no "
2642 2644 "server specified"))
2643 2645 source = "default"
2644 2646
2645 2647 source, branches = hg.parseurl(ui.expandpath(source))
2646 2648 url = util.url(source)
2647 2649
2648 2650 defaultport = {'https': 443, 'ssh': 22}
2649 2651 if url.scheme in defaultport:
2650 2652 try:
2651 2653 addr = (url.host, int(url.port or defaultport[url.scheme]))
2652 2654 except ValueError:
2653 2655 raise error.Abort(_("malformed port number in URL"))
2654 2656 else:
2655 2657 raise error.Abort(_("only https and ssh connections are supported"))
2656 2658
2657 2659 from . import win32
2658 2660
2659 2661 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2660 2662 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2661 2663
2662 2664 try:
2663 2665 s.connect(addr)
2664 2666 cert = s.getpeercert(True)
2665 2667
2666 2668 ui.status(_('checking the certificate chain for %s\n') % url.host)
2667 2669
2668 2670 complete = win32.checkcertificatechain(cert, build=False)
2669 2671
2670 2672 if not complete:
2671 2673 ui.status(_('certificate chain is incomplete, updating... '))
2672 2674
2673 2675 if not win32.checkcertificatechain(cert):
2674 2676 ui.status(_('failed.\n'))
2675 2677 else:
2676 2678 ui.status(_('done.\n'))
2677 2679 else:
2678 2680 ui.status(_('full certificate chain is available\n'))
2679 2681 finally:
2680 2682 s.close()
2681 2683
2682 2684 @command('debugsub',
2683 2685 [('r', 'rev', '',
2684 2686 _('revision to check'), _('REV'))],
2685 2687 _('[-r REV] [REV]'))
2686 2688 def debugsub(ui, repo, rev=None):
2687 2689 ctx = scmutil.revsingle(repo, rev, None)
2688 2690 for k, v in sorted(ctx.substate.items()):
2689 2691 ui.write(('path %s\n') % k)
2690 2692 ui.write((' source %s\n') % v[0])
2691 2693 ui.write((' revision %s\n') % v[1])
2692 2694
2693 2695 @command('debugsuccessorssets',
2694 2696 [('', 'closest', False, _('return closest successors sets only'))],
2695 2697 _('[REV]'))
2696 2698 def debugsuccessorssets(ui, repo, *revs, **opts):
2697 2699 """show set of successors for revision
2698 2700
2699 2701 A successors set of changeset A is a consistent group of revisions that
2700 2702 succeed A. It contains non-obsolete changesets only unless closests
2701 2703 successors set is set.
2702 2704
2703 2705 In most cases a changeset A has a single successors set containing a single
2704 2706 successor (changeset A replaced by A').
2705 2707
2706 2708 A changeset that is made obsolete with no successors are called "pruned".
2707 2709 Such changesets have no successors sets at all.
2708 2710
2709 2711 A changeset that has been "split" will have a successors set containing
2710 2712 more than one successor.
2711 2713
2712 2714 A changeset that has been rewritten in multiple different ways is called
2713 2715 "divergent". Such changesets have multiple successor sets (each of which
2714 2716 may also be split, i.e. have multiple successors).
2715 2717
2716 2718 Results are displayed as follows::
2717 2719
2718 2720 <rev1>
2719 2721 <successors-1A>
2720 2722 <rev2>
2721 2723 <successors-2A>
2722 2724 <successors-2B1> <successors-2B2> <successors-2B3>
2723 2725
2724 2726 Here rev2 has two possible (i.e. divergent) successors sets. The first
2725 2727 holds one element, whereas the second holds three (i.e. the changeset has
2726 2728 been split).
2727 2729 """
2728 2730 # passed to successorssets caching computation from one call to another
2729 2731 cache = {}
2730 2732 ctx2str = bytes
2731 2733 node2str = short
2732 2734 for rev in scmutil.revrange(repo, revs):
2733 2735 ctx = repo[rev]
2734 2736 ui.write('%s\n'% ctx2str(ctx))
2735 2737 for succsset in obsutil.successorssets(repo, ctx.node(),
2736 2738 closest=opts[r'closest'],
2737 2739 cache=cache):
2738 2740 if succsset:
2739 2741 ui.write(' ')
2740 2742 ui.write(node2str(succsset[0]))
2741 2743 for node in succsset[1:]:
2742 2744 ui.write(' ')
2743 2745 ui.write(node2str(node))
2744 2746 ui.write('\n')
2745 2747
2746 2748 @command('debugtemplate',
2747 2749 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2748 2750 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2749 2751 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2750 2752 optionalrepo=True)
2751 2753 def debugtemplate(ui, repo, tmpl, **opts):
2752 2754 """parse and apply a template
2753 2755
2754 2756 If -r/--rev is given, the template is processed as a log template and
2755 2757 applied to the given changesets. Otherwise, it is processed as a generic
2756 2758 template.
2757 2759
2758 2760 Use --verbose to print the parsed tree.
2759 2761 """
2760 2762 revs = None
2761 2763 if opts[r'rev']:
2762 2764 if repo is None:
2763 2765 raise error.RepoError(_('there is no Mercurial repository here '
2764 2766 '(.hg not found)'))
2765 2767 revs = scmutil.revrange(repo, opts[r'rev'])
2766 2768
2767 2769 props = {}
2768 2770 for d in opts[r'define']:
2769 2771 try:
2770 2772 k, v = (e.strip() for e in d.split('=', 1))
2771 2773 if not k or k == 'ui':
2772 2774 raise ValueError
2773 2775 props[k] = v
2774 2776 except ValueError:
2775 2777 raise error.Abort(_('malformed keyword definition: %s') % d)
2776 2778
2777 2779 if ui.verbose:
2778 2780 aliases = ui.configitems('templatealias')
2779 2781 tree = templater.parse(tmpl)
2780 2782 ui.note(templater.prettyformat(tree), '\n')
2781 2783 newtree = templater.expandaliases(tree, aliases)
2782 2784 if newtree != tree:
2783 2785 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2784 2786
2785 2787 if revs is None:
2786 2788 tres = formatter.templateresources(ui, repo)
2787 2789 t = formatter.maketemplater(ui, tmpl, resources=tres)
2788 2790 if ui.verbose:
2789 2791 kwds, funcs = t.symbolsuseddefault()
2790 2792 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2791 2793 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2792 2794 ui.write(t.renderdefault(props))
2793 2795 else:
2794 2796 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2795 2797 if ui.verbose:
2796 2798 kwds, funcs = displayer.t.symbolsuseddefault()
2797 2799 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2798 2800 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2799 2801 for r in revs:
2800 2802 displayer.show(repo[r], **pycompat.strkwargs(props))
2801 2803 displayer.close()
2802 2804
2803 2805 @command('debuguigetpass', [
2804 2806 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2805 2807 ], _('[-p TEXT]'), norepo=True)
2806 2808 def debuguigetpass(ui, prompt=''):
2807 2809 """show prompt to type password"""
2808 2810 r = ui.getpass(prompt)
2809 2811 ui.write(('respose: %s\n') % r)
2810 2812
2811 2813 @command('debuguiprompt', [
2812 2814 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2813 2815 ], _('[-p TEXT]'), norepo=True)
2814 2816 def debuguiprompt(ui, prompt=''):
2815 2817 """show plain prompt"""
2816 2818 r = ui.prompt(prompt)
2817 2819 ui.write(('response: %s\n') % r)
2818 2820
2819 2821 @command('debugupdatecaches', [])
2820 2822 def debugupdatecaches(ui, repo, *pats, **opts):
2821 2823 """warm all known caches in the repository"""
2822 2824 with repo.wlock(), repo.lock():
2823 2825 repo.updatecaches(full=True)
2824 2826
2825 2827 @command('debugupgraderepo', [
2826 2828 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2827 2829 ('', 'run', False, _('performs an upgrade')),
2828 2830 ('', 'backup', True, _('keep the old repository content around')),
2829 2831 ])
2830 2832 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2831 2833 """upgrade a repository to use different features
2832 2834
2833 2835 If no arguments are specified, the repository is evaluated for upgrade
2834 2836 and a list of problems and potential optimizations is printed.
2835 2837
2836 2838 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2837 2839 can be influenced via additional arguments. More details will be provided
2838 2840 by the command output when run without ``--run``.
2839 2841
2840 2842 During the upgrade, the repository will be locked and no writes will be
2841 2843 allowed.
2842 2844
2843 2845 At the end of the upgrade, the repository may not be readable while new
2844 2846 repository data is swapped in. This window will be as long as it takes to
2845 2847 rename some directories inside the ``.hg`` directory. On most machines, this
2846 2848 should complete almost instantaneously and the chances of a consumer being
2847 2849 unable to access the repository should be low.
2848 2850 """
2849 2851 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2850 2852 backup=backup)
2851 2853
2852 2854 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2853 2855 inferrepo=True)
2854 2856 def debugwalk(ui, repo, *pats, **opts):
2855 2857 """show how files match on given patterns"""
2856 2858 opts = pycompat.byteskwargs(opts)
2857 2859 m = scmutil.match(repo[None], pats, opts)
2858 2860 if ui.verbose:
2859 2861 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2860 2862 items = list(repo[None].walk(m))
2861 2863 if not items:
2862 2864 return
2863 2865 f = lambda fn: fn
2864 2866 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2865 2867 f = lambda fn: util.normpath(fn)
2866 2868 fmt = 'f %%-%ds %%-%ds %%s' % (
2867 2869 max([len(abs) for abs in items]),
2868 2870 max([len(repo.pathto(abs)) for abs in items]))
2869 2871 for abs in items:
2870 2872 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2871 2873 ui.write("%s\n" % line.rstrip())
2872 2874
2873 2875 @command('debugwhyunstable', [], _('REV'))
2874 2876 def debugwhyunstable(ui, repo, rev):
2875 2877 """explain instabilities of a changeset"""
2876 2878 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2877 2879 dnodes = ''
2878 2880 if entry.get('divergentnodes'):
2879 2881 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2880 2882 for ctx in entry['divergentnodes']) + ' '
2881 2883 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2882 2884 entry['reason'], entry['node']))
2883 2885
2884 2886 @command('debugwireargs',
2885 2887 [('', 'three', '', 'three'),
2886 2888 ('', 'four', '', 'four'),
2887 2889 ('', 'five', '', 'five'),
2888 2890 ] + cmdutil.remoteopts,
2889 2891 _('REPO [OPTIONS]... [ONE [TWO]]'),
2890 2892 norepo=True)
2891 2893 def debugwireargs(ui, repopath, *vals, **opts):
2892 2894 opts = pycompat.byteskwargs(opts)
2893 2895 repo = hg.peer(ui, opts, repopath)
2894 2896 for opt in cmdutil.remoteopts:
2895 2897 del opts[opt[1]]
2896 2898 args = {}
2897 2899 for k, v in opts.iteritems():
2898 2900 if v:
2899 2901 args[k] = v
2900 2902 args = pycompat.strkwargs(args)
2901 2903 # run twice to check that we don't mess up the stream for the next command
2902 2904 res1 = repo.debugwireargs(*vals, **args)
2903 2905 res2 = repo.debugwireargs(*vals, **args)
2904 2906 ui.write("%s\n" % res1)
2905 2907 if res1 != res2:
2906 2908 ui.warn("%s\n" % res2)
2907 2909
2908 2910 def _parsewirelangblocks(fh):
2909 2911 activeaction = None
2910 2912 blocklines = []
2911 2913 lastindent = 0
2912 2914
2913 2915 for line in fh:
2914 2916 line = line.rstrip()
2915 2917 if not line:
2916 2918 continue
2917 2919
2918 2920 if line.startswith(b'#'):
2919 2921 continue
2920 2922
2921 2923 if not line.startswith(b' '):
2922 2924 # New block. Flush previous one.
2923 2925 if activeaction:
2924 2926 yield activeaction, blocklines
2925 2927
2926 2928 activeaction = line
2927 2929 blocklines = []
2928 2930 lastindent = 0
2929 2931 continue
2930 2932
2931 2933 # Else we start with an indent.
2932 2934
2933 2935 if not activeaction:
2934 2936 raise error.Abort(_('indented line outside of block'))
2935 2937
2936 2938 indent = len(line) - len(line.lstrip())
2937 2939
2938 2940 # If this line is indented more than the last line, concatenate it.
2939 2941 if indent > lastindent and blocklines:
2940 2942 blocklines[-1] += line.lstrip()
2941 2943 else:
2942 2944 blocklines.append(line)
2943 2945 lastindent = indent
2944 2946
2945 2947 # Flush last block.
2946 2948 if activeaction:
2947 2949 yield activeaction, blocklines
2948 2950
2949 2951 @command('debugwireproto',
2950 2952 [
2951 2953 ('', 'localssh', False, _('start an SSH server for this repo')),
2952 2954 ('', 'peer', '', _('construct a specific version of the peer')),
2953 2955 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2954 2956 ('', 'nologhandshake', False,
2955 2957 _('do not log I/O related to the peer handshake')),
2956 2958 ] + cmdutil.remoteopts,
2957 2959 _('[PATH]'),
2958 2960 optionalrepo=True)
2959 2961 def debugwireproto(ui, repo, path=None, **opts):
2960 2962 """send wire protocol commands to a server
2961 2963
2962 2964 This command can be used to issue wire protocol commands to remote
2963 2965 peers and to debug the raw data being exchanged.
2964 2966
2965 2967 ``--localssh`` will start an SSH server against the current repository
2966 2968 and connect to that. By default, the connection will perform a handshake
2967 2969 and establish an appropriate peer instance.
2968 2970
2969 2971 ``--peer`` can be used to bypass the handshake protocol and construct a
2970 2972 peer instance using the specified class type. Valid values are ``raw``,
2971 2973 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2972 2974 raw data payloads and don't support higher-level command actions.
2973 2975
2974 2976 ``--noreadstderr`` can be used to disable automatic reading from stderr
2975 2977 of the peer (for SSH connections only). Disabling automatic reading of
2976 2978 stderr is useful for making output more deterministic.
2977 2979
2978 2980 Commands are issued via a mini language which is specified via stdin.
2979 2981 The language consists of individual actions to perform. An action is
2980 2982 defined by a block. A block is defined as a line with no leading
2981 2983 space followed by 0 or more lines with leading space. Blocks are
2982 2984 effectively a high-level command with additional metadata.
2983 2985
2984 2986 Lines beginning with ``#`` are ignored.
2985 2987
2986 2988 The following sections denote available actions.
2987 2989
2988 2990 raw
2989 2991 ---
2990 2992
2991 2993 Send raw data to the server.
2992 2994
2993 2995 The block payload contains the raw data to send as one atomic send
2994 2996 operation. The data may not actually be delivered in a single system
2995 2997 call: it depends on the abilities of the transport being used.
2996 2998
2997 2999 Each line in the block is de-indented and concatenated. Then, that
2998 3000 value is evaluated as a Python b'' literal. This allows the use of
2999 3001 backslash escaping, etc.
3000 3002
3001 3003 raw+
3002 3004 ----
3003 3005
3004 3006 Behaves like ``raw`` except flushes output afterwards.
3005 3007
3006 3008 command <X>
3007 3009 -----------
3008 3010
3009 3011 Send a request to run a named command, whose name follows the ``command``
3010 3012 string.
3011 3013
3012 3014 Arguments to the command are defined as lines in this block. The format of
3013 3015 each line is ``<key> <value>``. e.g.::
3014 3016
3015 3017 command listkeys
3016 3018 namespace bookmarks
3017 3019
3018 3020 If the value begins with ``eval:``, it will be interpreted as a Python
3019 3021 literal expression. Otherwise values are interpreted as Python b'' literals.
3020 3022 This allows sending complex types and encoding special byte sequences via
3021 3023 backslash escaping.
3022 3024
3023 3025 The following arguments have special meaning:
3024 3026
3025 3027 ``PUSHFILE``
3026 3028 When defined, the *push* mechanism of the peer will be used instead
3027 3029 of the static request-response mechanism and the content of the
3028 3030 file specified in the value of this argument will be sent as the
3029 3031 command payload.
3030 3032
3031 3033 This can be used to submit a local bundle file to the remote.
3032 3034
3033 3035 batchbegin
3034 3036 ----------
3035 3037
3036 3038 Instruct the peer to begin a batched send.
3037 3039
3038 3040 All ``command`` blocks are queued for execution until the next
3039 3041 ``batchsubmit`` block.
3040 3042
3041 3043 batchsubmit
3042 3044 -----------
3043 3045
3044 3046 Submit previously queued ``command`` blocks as a batch request.
3045 3047
3046 3048 This action MUST be paired with a ``batchbegin`` action.
3047 3049
3048 3050 httprequest <method> <path>
3049 3051 ---------------------------
3050 3052
3051 3053 (HTTP peer only)
3052 3054
3053 3055 Send an HTTP request to the peer.
3054 3056
3055 3057 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3056 3058
3057 3059 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3058 3060 headers to add to the request. e.g. ``Accept: foo``.
3059 3061
3060 3062 The following arguments are special:
3061 3063
3062 3064 ``BODYFILE``
3063 3065 The content of the file defined as the value to this argument will be
3064 3066 transferred verbatim as the HTTP request body.
3065 3067
3066 3068 ``frame <type> <flags> <payload>``
3067 3069 Send a unified protocol frame as part of the request body.
3068 3070
3069 3071 All frames will be collected and sent as the body to the HTTP
3070 3072 request.
3071 3073
3072 3074 close
3073 3075 -----
3074 3076
3075 3077 Close the connection to the server.
3076 3078
3077 3079 flush
3078 3080 -----
3079 3081
3080 3082 Flush data written to the server.
3081 3083
3082 3084 readavailable
3083 3085 -------------
3084 3086
3085 3087 Close the write end of the connection and read all available data from
3086 3088 the server.
3087 3089
3088 3090 If the connection to the server encompasses multiple pipes, we poll both
3089 3091 pipes and read available data.
3090 3092
3091 3093 readline
3092 3094 --------
3093 3095
3094 3096 Read a line of output from the server. If there are multiple output
3095 3097 pipes, reads only the main pipe.
3096 3098
3097 3099 ereadline
3098 3100 ---------
3099 3101
3100 3102 Like ``readline``, but read from the stderr pipe, if available.
3101 3103
3102 3104 read <X>
3103 3105 --------
3104 3106
3105 3107 ``read()`` N bytes from the server's main output pipe.
3106 3108
3107 3109 eread <X>
3108 3110 ---------
3109 3111
3110 3112 ``read()`` N bytes from the server's stderr pipe, if available.
3111 3113
3112 3114 Specifying Unified Frame-Based Protocol Frames
3113 3115 ----------------------------------------------
3114 3116
3115 3117 It is possible to emit a *Unified Frame-Based Protocol* by using special
3116 3118 syntax.
3117 3119
3118 3120 A frame is composed as a type, flags, and payload. These can be parsed
3119 3121 from a string of the form:
3120 3122
3121 3123 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3122 3124
3123 3125 ``request-id`` and ``stream-id`` are integers defining the request and
3124 3126 stream identifiers.
3125 3127
3126 3128 ``type`` can be an integer value for the frame type or the string name
3127 3129 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3128 3130 ``command-name``.
3129 3131
3130 3132 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3131 3133 components. Each component (and there can be just one) can be an integer
3132 3134 or a flag name for stream flags or frame flags, respectively. Values are
3133 3135 resolved to integers and then bitwise OR'd together.
3134 3136
3135 3137 ``payload`` represents the raw frame payload. If it begins with
3136 3138 ``cbor:``, the following string is evaluated as Python code and the
3137 3139 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3138 3140 as a Python byte string literal.
3139 3141 """
3140 3142 opts = pycompat.byteskwargs(opts)
3141 3143
3142 3144 if opts['localssh'] and not repo:
3143 3145 raise error.Abort(_('--localssh requires a repository'))
3144 3146
3145 3147 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3146 3148 raise error.Abort(_('invalid value for --peer'),
3147 3149 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3148 3150
3149 3151 if path and opts['localssh']:
3150 3152 raise error.Abort(_('cannot specify --localssh with an explicit '
3151 3153 'path'))
3152 3154
3153 3155 if ui.interactive():
3154 3156 ui.write(_('(waiting for commands on stdin)\n'))
3155 3157
3156 3158 blocks = list(_parsewirelangblocks(ui.fin))
3157 3159
3158 3160 proc = None
3159 3161 stdin = None
3160 3162 stdout = None
3161 3163 stderr = None
3162 3164 opener = None
3163 3165
3164 3166 if opts['localssh']:
3165 3167 # We start the SSH server in its own process so there is process
3166 3168 # separation. This prevents a whole class of potential bugs around
3167 3169 # shared state from interfering with server operation.
3168 3170 args = procutil.hgcmd() + [
3169 3171 '-R', repo.root,
3170 3172 'debugserve', '--sshstdio',
3171 3173 ]
3172 3174 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3173 3175 stdin=subprocess.PIPE,
3174 3176 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3175 3177 bufsize=0)
3176 3178
3177 3179 stdin = proc.stdin
3178 3180 stdout = proc.stdout
3179 3181 stderr = proc.stderr
3180 3182
3181 3183 # We turn the pipes into observers so we can log I/O.
3182 3184 if ui.verbose or opts['peer'] == 'raw':
3183 3185 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3184 3186 logdata=True)
3185 3187 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3186 3188 logdata=True)
3187 3189 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3188 3190 logdata=True)
3189 3191
3190 3192 # --localssh also implies the peer connection settings.
3191 3193
3192 3194 url = 'ssh://localserver'
3193 3195 autoreadstderr = not opts['noreadstderr']
3194 3196
3195 3197 if opts['peer'] == 'ssh1':
3196 3198 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3197 3199 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3198 3200 None, autoreadstderr=autoreadstderr)
3199 3201 elif opts['peer'] == 'ssh2':
3200 3202 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3201 3203 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3202 3204 None, autoreadstderr=autoreadstderr)
3203 3205 elif opts['peer'] == 'raw':
3204 3206 ui.write(_('using raw connection to peer\n'))
3205 3207 peer = None
3206 3208 else:
3207 3209 ui.write(_('creating ssh peer from handshake results\n'))
3208 3210 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3209 3211 autoreadstderr=autoreadstderr)
3210 3212
3211 3213 elif path:
3212 3214 # We bypass hg.peer() so we can proxy the sockets.
3213 3215 # TODO consider not doing this because we skip
3214 3216 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3215 3217 u = util.url(path)
3216 3218 if u.scheme != 'http':
3217 3219 raise error.Abort(_('only http:// paths are currently supported'))
3218 3220
3219 3221 url, authinfo = u.authinfo()
3220 3222 openerargs = {
3221 3223 r'useragent': b'Mercurial debugwireproto',
3222 3224 }
3223 3225
3224 3226 # Turn pipes/sockets into observers so we can log I/O.
3225 3227 if ui.verbose:
3226 3228 openerargs.update({
3227 3229 r'loggingfh': ui,
3228 3230 r'loggingname': b's',
3229 3231 r'loggingopts': {
3230 3232 r'logdata': True,
3231 3233 r'logdataapis': False,
3232 3234 },
3233 3235 })
3234 3236
3235 3237 if ui.debugflag:
3236 3238 openerargs[r'loggingopts'][r'logdataapis'] = True
3237 3239
3238 3240 # Don't send default headers when in raw mode. This allows us to
3239 3241 # bypass most of the behavior of our URL handling code so we can
3240 3242 # have near complete control over what's sent on the wire.
3241 3243 if opts['peer'] == 'raw':
3242 3244 openerargs[r'sendaccept'] = False
3243 3245
3244 3246 opener = urlmod.opener(ui, authinfo, **openerargs)
3245 3247
3246 3248 if opts['peer'] == 'http2':
3247 3249 ui.write(_('creating http peer for wire protocol version 2\n'))
3248 3250 # We go through makepeer() because we need an API descriptor for
3249 3251 # the peer instance to be useful.
3250 3252 with ui.configoverride({
3251 3253 ('experimental', 'httppeer.advertise-v2'): True}):
3252 3254 if opts['nologhandshake']:
3253 3255 ui.pushbuffer()
3254 3256
3255 3257 peer = httppeer.makepeer(ui, path, opener=opener)
3256 3258
3257 3259 if opts['nologhandshake']:
3258 3260 ui.popbuffer()
3259 3261
3260 3262 if not isinstance(peer, httppeer.httpv2peer):
3261 3263 raise error.Abort(_('could not instantiate HTTP peer for '
3262 3264 'wire protocol version 2'),
3263 3265 hint=_('the server may not have the feature '
3264 3266 'enabled or is not allowing this '
3265 3267 'client version'))
3266 3268
3267 3269 elif opts['peer'] == 'raw':
3268 3270 ui.write(_('using raw connection to peer\n'))
3269 3271 peer = None
3270 3272 elif opts['peer']:
3271 3273 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3272 3274 opts['peer'])
3273 3275 else:
3274 3276 peer = httppeer.makepeer(ui, path, opener=opener)
3275 3277
3276 3278 # We /could/ populate stdin/stdout with sock.makefile()...
3277 3279 else:
3278 3280 raise error.Abort(_('unsupported connection configuration'))
3279 3281
3280 3282 batchedcommands = None
3281 3283
3282 3284 # Now perform actions based on the parsed wire language instructions.
3283 3285 for action, lines in blocks:
3284 3286 if action in ('raw', 'raw+'):
3285 3287 if not stdin:
3286 3288 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3287 3289
3288 3290 # Concatenate the data together.
3289 3291 data = ''.join(l.lstrip() for l in lines)
3290 3292 data = stringutil.unescapestr(data)
3291 3293 stdin.write(data)
3292 3294
3293 3295 if action == 'raw+':
3294 3296 stdin.flush()
3295 3297 elif action == 'flush':
3296 3298 if not stdin:
3297 3299 raise error.Abort(_('cannot call flush on this peer'))
3298 3300 stdin.flush()
3299 3301 elif action.startswith('command'):
3300 3302 if not peer:
3301 3303 raise error.Abort(_('cannot send commands unless peer instance '
3302 3304 'is available'))
3303 3305
3304 3306 command = action.split(' ', 1)[1]
3305 3307
3306 3308 args = {}
3307 3309 for line in lines:
3308 3310 # We need to allow empty values.
3309 3311 fields = line.lstrip().split(' ', 1)
3310 3312 if len(fields) == 1:
3311 3313 key = fields[0]
3312 3314 value = ''
3313 3315 else:
3314 3316 key, value = fields
3315 3317
3316 3318 if value.startswith('eval:'):
3317 3319 value = stringutil.evalpythonliteral(value[5:])
3318 3320 else:
3319 3321 value = stringutil.unescapestr(value)
3320 3322
3321 3323 args[key] = value
3322 3324
3323 3325 if batchedcommands is not None:
3324 3326 batchedcommands.append((command, args))
3325 3327 continue
3326 3328
3327 3329 ui.status(_('sending %s command\n') % command)
3328 3330
3329 3331 if 'PUSHFILE' in args:
3330 3332 with open(args['PUSHFILE'], r'rb') as fh:
3331 3333 del args['PUSHFILE']
3332 3334 res, output = peer._callpush(command, fh,
3333 3335 **pycompat.strkwargs(args))
3334 3336 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3335 3337 ui.status(_('remote output: %s\n') %
3336 3338 stringutil.escapestr(output))
3337 3339 else:
3338 3340 with peer.commandexecutor() as e:
3339 3341 res = e.callcommand(command, args).result()
3340 3342
3341 3343 if isinstance(res, wireprotov2peer.commandresponse):
3342 3344 val = res.objects()
3343 3345 ui.status(_('response: %s\n') %
3344 3346 stringutil.pprint(val, bprefix=True, indent=2))
3345 3347 else:
3346 3348 ui.status(_('response: %s\n') %
3347 3349 stringutil.pprint(res, bprefix=True, indent=2))
3348 3350
3349 3351 elif action == 'batchbegin':
3350 3352 if batchedcommands is not None:
3351 3353 raise error.Abort(_('nested batchbegin not allowed'))
3352 3354
3353 3355 batchedcommands = []
3354 3356 elif action == 'batchsubmit':
3355 3357 # There is a batching API we could go through. But it would be
3356 3358 # difficult to normalize requests into function calls. It is easier
3357 3359 # to bypass this layer and normalize to commands + args.
3358 3360 ui.status(_('sending batch with %d sub-commands\n') %
3359 3361 len(batchedcommands))
3360 3362 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3361 3363 ui.status(_('response #%d: %s\n') %
3362 3364 (i, stringutil.escapestr(chunk)))
3363 3365
3364 3366 batchedcommands = None
3365 3367
3366 3368 elif action.startswith('httprequest '):
3367 3369 if not opener:
3368 3370 raise error.Abort(_('cannot use httprequest without an HTTP '
3369 3371 'peer'))
3370 3372
3371 3373 request = action.split(' ', 2)
3372 3374 if len(request) != 3:
3373 3375 raise error.Abort(_('invalid httprequest: expected format is '
3374 3376 '"httprequest <method> <path>'))
3375 3377
3376 3378 method, httppath = request[1:]
3377 3379 headers = {}
3378 3380 body = None
3379 3381 frames = []
3380 3382 for line in lines:
3381 3383 line = line.lstrip()
3382 3384 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3383 3385 if m:
3384 3386 # Headers need to use native strings.
3385 3387 key = pycompat.strurl(m.group(1))
3386 3388 value = pycompat.strurl(m.group(2))
3387 3389 headers[key] = value
3388 3390 continue
3389 3391
3390 3392 if line.startswith(b'BODYFILE '):
3391 3393 with open(line.split(b' ', 1), 'rb') as fh:
3392 3394 body = fh.read()
3393 3395 elif line.startswith(b'frame '):
3394 3396 frame = wireprotoframing.makeframefromhumanstring(
3395 3397 line[len(b'frame '):])
3396 3398
3397 3399 frames.append(frame)
3398 3400 else:
3399 3401 raise error.Abort(_('unknown argument to httprequest: %s') %
3400 3402 line)
3401 3403
3402 3404 url = path + httppath
3403 3405
3404 3406 if frames:
3405 3407 body = b''.join(bytes(f) for f in frames)
3406 3408
3407 3409 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3408 3410
3409 3411 # urllib.Request insists on using has_data() as a proxy for
3410 3412 # determining the request method. Override that to use our
3411 3413 # explicitly requested method.
3412 3414 req.get_method = lambda: pycompat.sysstr(method)
3413 3415
3414 3416 try:
3415 3417 res = opener.open(req)
3416 3418 body = res.read()
3417 3419 except util.urlerr.urlerror as e:
3418 3420 # read() method must be called, but only exists in Python 2
3419 3421 getattr(e, 'read', lambda: None)()
3420 3422 continue
3421 3423
3422 3424 ct = res.headers.get(r'Content-Type')
3423 3425 if ct == r'application/mercurial-cbor':
3424 3426 ui.write(_('cbor> %s\n') %
3425 3427 stringutil.pprint(cborutil.decodeall(body),
3426 3428 bprefix=True,
3427 3429 indent=2))
3428 3430
3429 3431 elif action == 'close':
3430 3432 peer.close()
3431 3433 elif action == 'readavailable':
3432 3434 if not stdout or not stderr:
3433 3435 raise error.Abort(_('readavailable not available on this peer'))
3434 3436
3435 3437 stdin.close()
3436 3438 stdout.read()
3437 3439 stderr.read()
3438 3440
3439 3441 elif action == 'readline':
3440 3442 if not stdout:
3441 3443 raise error.Abort(_('readline not available on this peer'))
3442 3444 stdout.readline()
3443 3445 elif action == 'ereadline':
3444 3446 if not stderr:
3445 3447 raise error.Abort(_('ereadline not available on this peer'))
3446 3448 stderr.readline()
3447 3449 elif action.startswith('read '):
3448 3450 count = int(action.split(' ', 1)[1])
3449 3451 if not stdout:
3450 3452 raise error.Abort(_('read not available on this peer'))
3451 3453 stdout.read(count)
3452 3454 elif action.startswith('eread '):
3453 3455 count = int(action.split(' ', 1)[1])
3454 3456 if not stderr:
3455 3457 raise error.Abort(_('eread not available on this peer'))
3456 3458 stderr.read(count)
3457 3459 else:
3458 3460 raise error.Abort(_('unknown action: %s') % action)
3459 3461
3460 3462 if batchedcommands is not None:
3461 3463 raise error.Abort(_('unclosed "batchbegin" request'))
3462 3464
3463 3465 if peer:
3464 3466 peer.close()
3465 3467
3466 3468 if proc:
3467 3469 proc.kill()
@@ -1,1083 +1,1121 b''
1 1
2 2 Function to test discovery between two repos in both directions, using both the local shortcut
3 3 (which is currently not activated by default) and the full remotable protocol:
4 4
5 5 $ testdesc() { # revs_a, revs_b, dagdesc
6 6 > if [ -d foo ]; then rm -rf foo; fi
7 7 > hg init foo
8 8 > cd foo
9 9 > hg debugbuilddag "$3"
10 10 > hg clone . a $1 --quiet
11 11 > hg clone . b $2 --quiet
12 12 > echo
13 13 > echo "% -- a -> b tree"
14 14 > hg -R a debugdiscovery b --verbose --old
15 15 > echo
16 16 > echo "% -- a -> b set"
17 17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 18 > echo
19 19 > echo "% -- a -> b set (tip only)"
20 20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
21 21 > echo
22 22 > echo "% -- b -> a tree"
23 23 > hg -R b debugdiscovery a --verbose --old
24 24 > echo
25 25 > echo "% -- b -> a set"
26 26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
27 27 > echo
28 28 > echo "% -- b -> a set (tip only)"
29 29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
30 30 > cd ..
31 31 > }
32 32
33 33
34 34 Small superset:
35 35
36 36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
37 37 > +2:f +1:a1:b1
38 38 > <f +4 :a2
39 39 > +5 :b2
40 40 > <f +3 :b3'
41 41
42 42 % -- a -> b tree
43 43 comparing with b
44 44 searching for changes
45 45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
46 46 elapsed time: * seconds (glob)
47 47 heads summary:
48 48 total common heads: 2
49 49 also local heads: 2
50 50 also remote heads: 1
51 both: 1
51 52 local heads: 2
52 53 common: 2
53 54 missing: 0
54 55 remote heads: 3
55 56 common: 1
56 57 unknown: 2
57 58 local changesets: 7
58 59 common: 7
59 60 missing: 0
60 61 common heads: 01241442b3c2 b5714e113bc0
61 62
62 63 % -- a -> b set
63 64 comparing with b
64 65 query 1; heads
65 66 searching for changes
66 67 all local heads known remotely
67 68 elapsed time: * seconds (glob)
68 69 heads summary:
69 70 total common heads: 2
70 71 also local heads: 2
71 72 also remote heads: 1
73 both: 1
72 74 local heads: 2
73 75 common: 2
74 76 missing: 0
75 77 remote heads: 3
76 78 common: 1
77 79 unknown: 2
78 80 local changesets: 7
79 81 common: 7
80 82 missing: 0
81 83 common heads: 01241442b3c2 b5714e113bc0
82 84
83 85 % -- a -> b set (tip only)
84 86 comparing with b
85 87 query 1; heads
86 88 searching for changes
87 89 all local heads known remotely
88 90 elapsed time: * seconds (glob)
89 91 heads summary:
90 92 total common heads: 1
91 93 also local heads: 1
92 94 also remote heads: 0
95 both: 0
93 96 local heads: 2
94 97 common: 1
95 98 missing: 1
96 99 remote heads: 3
97 100 common: 0
98 101 unknown: 3
99 102 local changesets: 7
100 103 common: 6
101 104 missing: 1
102 105 common heads: b5714e113bc0
103 106
104 107 % -- b -> a tree
105 108 comparing with a
106 109 searching for changes
107 110 unpruned common: 01241442b3c2 b5714e113bc0
108 111 elapsed time: * seconds (glob)
109 112 heads summary:
110 113 total common heads: 2
111 114 also local heads: 1
112 115 also remote heads: 2
116 both: 1
113 117 local heads: 3
114 118 common: 1
115 119 missing: 2
116 120 remote heads: 2
117 121 common: 2
118 122 unknown: 0
119 123 local changesets: 15
120 124 common: 7
121 125 missing: 8
122 126 common heads: 01241442b3c2 b5714e113bc0
123 127
124 128 % -- b -> a set
125 129 comparing with a
126 130 query 1; heads
127 131 searching for changes
128 132 all remote heads known locally
129 133 elapsed time: * seconds (glob)
130 134 heads summary:
131 135 total common heads: 2
132 136 also local heads: 1
133 137 also remote heads: 2
138 both: 1
134 139 local heads: 3
135 140 common: 1
136 141 missing: 2
137 142 remote heads: 2
138 143 common: 2
139 144 unknown: 0
140 145 local changesets: 15
141 146 common: 7
142 147 missing: 8
143 148 common heads: 01241442b3c2 b5714e113bc0
144 149
145 150 % -- b -> a set (tip only)
146 151 comparing with a
147 152 query 1; heads
148 153 searching for changes
149 154 all remote heads known locally
150 155 elapsed time: * seconds (glob)
151 156 heads summary:
152 157 total common heads: 2
153 158 also local heads: 1
154 159 also remote heads: 2
160 both: 1
155 161 local heads: 3
156 162 common: 1
157 163 missing: 2
158 164 remote heads: 2
159 165 common: 2
160 166 unknown: 0
161 167 local changesets: 15
162 168 common: 7
163 169 missing: 8
164 170 common heads: 01241442b3c2 b5714e113bc0
165 171
166 172
167 173 Many new:
168 174
169 175 $ testdesc '-ra1 -ra2' '-rb' '
170 176 > +2:f +3:a1 +3:b
171 177 > <f +30 :a2'
172 178
173 179 % -- a -> b tree
174 180 comparing with b
175 181 searching for changes
176 182 unpruned common: bebd167eb94d
177 183 elapsed time: * seconds (glob)
178 184 heads summary:
179 185 total common heads: 1
180 186 also local heads: 1
181 187 also remote heads: 0
188 both: 0
182 189 local heads: 2
183 190 common: 1
184 191 missing: 1
185 192 remote heads: 1
186 193 common: 0
187 194 unknown: 1
188 195 local changesets: 35
189 196 common: 5
190 197 missing: 30
191 198 common heads: bebd167eb94d
192 199
193 200 % -- a -> b set
194 201 comparing with b
195 202 query 1; heads
196 203 searching for changes
197 204 taking initial sample
198 205 searching: 2 queries
199 206 query 2; still undecided: 29, sample size is: 29
200 207 2 total queries in *.????s (glob)
201 208 elapsed time: * seconds (glob)
202 209 heads summary:
203 210 total common heads: 1
204 211 also local heads: 1
205 212 also remote heads: 0
213 both: 0
206 214 local heads: 2
207 215 common: 1
208 216 missing: 1
209 217 remote heads: 1
210 218 common: 0
211 219 unknown: 1
212 220 local changesets: 35
213 221 common: 5
214 222 missing: 30
215 223 common heads: bebd167eb94d
216 224
217 225 % -- a -> b set (tip only)
218 226 comparing with b
219 227 query 1; heads
220 228 searching for changes
221 229 taking quick initial sample
222 230 searching: 2 queries
223 231 query 2; still undecided: 31, sample size is: 31
224 232 2 total queries in *.????s (glob)
225 233 elapsed time: * seconds (glob)
226 234 heads summary:
227 235 total common heads: 1
228 236 also local heads: 0
229 237 also remote heads: 0
238 both: 0
230 239 local heads: 2
231 240 common: 0
232 241 missing: 2
233 242 remote heads: 1
234 243 common: 0
235 244 unknown: 1
236 245 local changesets: 35
237 246 common: 2
238 247 missing: 33
239 248 common heads: 66f7d451a68b
240 249
241 250 % -- b -> a tree
242 251 comparing with a
243 252 searching for changes
244 253 unpruned common: 66f7d451a68b bebd167eb94d
245 254 elapsed time: * seconds (glob)
246 255 heads summary:
247 256 total common heads: 1
248 257 also local heads: 0
249 258 also remote heads: 1
259 both: 0
250 260 local heads: 1
251 261 common: 0
252 262 missing: 1
253 263 remote heads: 2
254 264 common: 1
255 265 unknown: 1
256 266 local changesets: 8
257 267 common: 5
258 268 missing: 3
259 269 common heads: bebd167eb94d
260 270
261 271 % -- b -> a set
262 272 comparing with a
263 273 query 1; heads
264 274 searching for changes
265 275 taking initial sample
266 276 searching: 2 queries
267 277 query 2; still undecided: 2, sample size is: 2
268 278 2 total queries in *.????s (glob)
269 279 elapsed time: * seconds (glob)
270 280 heads summary:
271 281 total common heads: 1
272 282 also local heads: 0
273 283 also remote heads: 1
284 both: 0
274 285 local heads: 1
275 286 common: 0
276 287 missing: 1
277 288 remote heads: 2
278 289 common: 1
279 290 unknown: 1
280 291 local changesets: 8
281 292 common: 5
282 293 missing: 3
283 294 common heads: bebd167eb94d
284 295
285 296 % -- b -> a set (tip only)
286 297 comparing with a
287 298 query 1; heads
288 299 searching for changes
289 300 taking initial sample
290 301 searching: 2 queries
291 302 query 2; still undecided: 2, sample size is: 2
292 303 2 total queries in *.????s (glob)
293 304 elapsed time: * seconds (glob)
294 305 heads summary:
295 306 total common heads: 1
296 307 also local heads: 0
297 308 also remote heads: 1
309 both: 0
298 310 local heads: 1
299 311 common: 0
300 312 missing: 1
301 313 remote heads: 2
302 314 common: 1
303 315 unknown: 1
304 316 local changesets: 8
305 317 common: 5
306 318 missing: 3
307 319 common heads: bebd167eb94d
308 320
309 321 Both sides many new with stub:
310 322
311 323 $ testdesc '-ra1 -ra2' '-rb' '
312 324 > +2:f +2:a1 +30 :b
313 325 > <f +30 :a2'
314 326
315 327 % -- a -> b tree
316 328 comparing with b
317 329 searching for changes
318 330 unpruned common: 2dc09a01254d
319 331 elapsed time: * seconds (glob)
320 332 heads summary:
321 333 total common heads: 1
322 334 also local heads: 1
323 335 also remote heads: 0
336 both: 0
324 337 local heads: 2
325 338 common: 1
326 339 missing: 1
327 340 remote heads: 1
328 341 common: 0
329 342 unknown: 1
330 343 local changesets: 34
331 344 common: 4
332 345 missing: 30
333 346 common heads: 2dc09a01254d
334 347
335 348 % -- a -> b set
336 349 comparing with b
337 350 query 1; heads
338 351 searching for changes
339 352 taking initial sample
340 353 searching: 2 queries
341 354 query 2; still undecided: 29, sample size is: 29
342 355 2 total queries in *.????s (glob)
343 356 elapsed time: * seconds (glob)
344 357 heads summary:
345 358 total common heads: 1
346 359 also local heads: 1
347 360 also remote heads: 0
361 both: 0
348 362 local heads: 2
349 363 common: 1
350 364 missing: 1
351 365 remote heads: 1
352 366 common: 0
353 367 unknown: 1
354 368 local changesets: 34
355 369 common: 4
356 370 missing: 30
357 371 common heads: 2dc09a01254d
358 372
359 373 % -- a -> b set (tip only)
360 374 comparing with b
361 375 query 1; heads
362 376 searching for changes
363 377 taking quick initial sample
364 378 searching: 2 queries
365 379 query 2; still undecided: 31, sample size is: 31
366 380 2 total queries in *.????s (glob)
367 381 elapsed time: * seconds (glob)
368 382 heads summary:
369 383 total common heads: 1
370 384 also local heads: 0
371 385 also remote heads: 0
386 both: 0
372 387 local heads: 2
373 388 common: 0
374 389 missing: 2
375 390 remote heads: 1
376 391 common: 0
377 392 unknown: 1
378 393 local changesets: 34
379 394 common: 2
380 395 missing: 32
381 396 common heads: 66f7d451a68b
382 397
383 398 % -- b -> a tree
384 399 comparing with a
385 400 searching for changes
386 401 unpruned common: 2dc09a01254d 66f7d451a68b
387 402 elapsed time: * seconds (glob)
388 403 heads summary:
389 404 total common heads: 1
390 405 also local heads: 0
391 406 also remote heads: 1
407 both: 0
392 408 local heads: 1
393 409 common: 0
394 410 missing: 1
395 411 remote heads: 2
396 412 common: 1
397 413 unknown: 1
398 414 local changesets: 34
399 415 common: 4
400 416 missing: 30
401 417 common heads: 2dc09a01254d
402 418
403 419 % -- b -> a set
404 420 comparing with a
405 421 query 1; heads
406 422 searching for changes
407 423 taking initial sample
408 424 searching: 2 queries
409 425 query 2; still undecided: 29, sample size is: 29
410 426 2 total queries in *.????s (glob)
411 427 elapsed time: * seconds (glob)
412 428 heads summary:
413 429 total common heads: 1
414 430 also local heads: 0
415 431 also remote heads: 1
432 both: 0
416 433 local heads: 1
417 434 common: 0
418 435 missing: 1
419 436 remote heads: 2
420 437 common: 1
421 438 unknown: 1
422 439 local changesets: 34
423 440 common: 4
424 441 missing: 30
425 442 common heads: 2dc09a01254d
426 443
427 444 % -- b -> a set (tip only)
428 445 comparing with a
429 446 query 1; heads
430 447 searching for changes
431 448 taking initial sample
432 449 searching: 2 queries
433 450 query 2; still undecided: 29, sample size is: 29
434 451 2 total queries in *.????s (glob)
435 452 elapsed time: * seconds (glob)
436 453 heads summary:
437 454 total common heads: 1
438 455 also local heads: 0
439 456 also remote heads: 1
457 both: 0
440 458 local heads: 1
441 459 common: 0
442 460 missing: 1
443 461 remote heads: 2
444 462 common: 1
445 463 unknown: 1
446 464 local changesets: 34
447 465 common: 4
448 466 missing: 30
449 467 common heads: 2dc09a01254d
450 468
451 469
452 470 Both many new:
453 471
454 472 $ testdesc '-ra' '-rb' '
455 473 > +2:f +30 :b
456 474 > <f +30 :a'
457 475
458 476 % -- a -> b tree
459 477 comparing with b
460 478 searching for changes
461 479 unpruned common: 66f7d451a68b
462 480 elapsed time: * seconds (glob)
463 481 heads summary:
464 482 total common heads: 1
465 483 also local heads: 0
466 484 also remote heads: 0
485 both: 0
467 486 local heads: 1
468 487 common: 0
469 488 missing: 1
470 489 remote heads: 1
471 490 common: 0
472 491 unknown: 1
473 492 local changesets: 32
474 493 common: 2
475 494 missing: 30
476 495 common heads: 66f7d451a68b
477 496
478 497 % -- a -> b set
479 498 comparing with b
480 499 query 1; heads
481 500 searching for changes
482 501 taking quick initial sample
483 502 searching: 2 queries
484 503 query 2; still undecided: 31, sample size is: 31
485 504 2 total queries in *.????s (glob)
486 505 elapsed time: * seconds (glob)
487 506 heads summary:
488 507 total common heads: 1
489 508 also local heads: 0
490 509 also remote heads: 0
510 both: 0
491 511 local heads: 1
492 512 common: 0
493 513 missing: 1
494 514 remote heads: 1
495 515 common: 0
496 516 unknown: 1
497 517 local changesets: 32
498 518 common: 2
499 519 missing: 30
500 520 common heads: 66f7d451a68b
501 521
502 522 % -- a -> b set (tip only)
503 523 comparing with b
504 524 query 1; heads
505 525 searching for changes
506 526 taking quick initial sample
507 527 searching: 2 queries
508 528 query 2; still undecided: 31, sample size is: 31
509 529 2 total queries in *.????s (glob)
510 530 elapsed time: * seconds (glob)
511 531 heads summary:
512 532 total common heads: 1
513 533 also local heads: 0
514 534 also remote heads: 0
535 both: 0
515 536 local heads: 1
516 537 common: 0
517 538 missing: 1
518 539 remote heads: 1
519 540 common: 0
520 541 unknown: 1
521 542 local changesets: 32
522 543 common: 2
523 544 missing: 30
524 545 common heads: 66f7d451a68b
525 546
526 547 % -- b -> a tree
527 548 comparing with a
528 549 searching for changes
529 550 unpruned common: 66f7d451a68b
530 551 elapsed time: * seconds (glob)
531 552 heads summary:
532 553 total common heads: 1
533 554 also local heads: 0
534 555 also remote heads: 0
556 both: 0
535 557 local heads: 1
536 558 common: 0
537 559 missing: 1
538 560 remote heads: 1
539 561 common: 0
540 562 unknown: 1
541 563 local changesets: 32
542 564 common: 2
543 565 missing: 30
544 566 common heads: 66f7d451a68b
545 567
546 568 % -- b -> a set
547 569 comparing with a
548 570 query 1; heads
549 571 searching for changes
550 572 taking quick initial sample
551 573 searching: 2 queries
552 574 query 2; still undecided: 31, sample size is: 31
553 575 2 total queries in *.????s (glob)
554 576 elapsed time: * seconds (glob)
555 577 heads summary:
556 578 total common heads: 1
557 579 also local heads: 0
558 580 also remote heads: 0
581 both: 0
559 582 local heads: 1
560 583 common: 0
561 584 missing: 1
562 585 remote heads: 1
563 586 common: 0
564 587 unknown: 1
565 588 local changesets: 32
566 589 common: 2
567 590 missing: 30
568 591 common heads: 66f7d451a68b
569 592
570 593 % -- b -> a set (tip only)
571 594 comparing with a
572 595 query 1; heads
573 596 searching for changes
574 597 taking quick initial sample
575 598 searching: 2 queries
576 599 query 2; still undecided: 31, sample size is: 31
577 600 2 total queries in *.????s (glob)
578 601 elapsed time: * seconds (glob)
579 602 heads summary:
580 603 total common heads: 1
581 604 also local heads: 0
582 605 also remote heads: 0
606 both: 0
583 607 local heads: 1
584 608 common: 0
585 609 missing: 1
586 610 remote heads: 1
587 611 common: 0
588 612 unknown: 1
589 613 local changesets: 32
590 614 common: 2
591 615 missing: 30
592 616 common heads: 66f7d451a68b
593 617
594 618
595 619 Both many new skewed:
596 620
597 621 $ testdesc '-ra' '-rb' '
598 622 > +2:f +30 :b
599 623 > <f +50 :a'
600 624
601 625 % -- a -> b tree
602 626 comparing with b
603 627 searching for changes
604 628 unpruned common: 66f7d451a68b
605 629 elapsed time: * seconds (glob)
606 630 heads summary:
607 631 total common heads: 1
608 632 also local heads: 0
609 633 also remote heads: 0
634 both: 0
610 635 local heads: 1
611 636 common: 0
612 637 missing: 1
613 638 remote heads: 1
614 639 common: 0
615 640 unknown: 1
616 641 local changesets: 52
617 642 common: 2
618 643 missing: 50
619 644 common heads: 66f7d451a68b
620 645
621 646 % -- a -> b set
622 647 comparing with b
623 648 query 1; heads
624 649 searching for changes
625 650 taking quick initial sample
626 651 searching: 2 queries
627 652 query 2; still undecided: 51, sample size is: 51
628 653 2 total queries in *.????s (glob)
629 654 elapsed time: * seconds (glob)
630 655 heads summary:
631 656 total common heads: 1
632 657 also local heads: 0
633 658 also remote heads: 0
659 both: 0
634 660 local heads: 1
635 661 common: 0
636 662 missing: 1
637 663 remote heads: 1
638 664 common: 0
639 665 unknown: 1
640 666 local changesets: 52
641 667 common: 2
642 668 missing: 50
643 669 common heads: 66f7d451a68b
644 670
645 671 % -- a -> b set (tip only)
646 672 comparing with b
647 673 query 1; heads
648 674 searching for changes
649 675 taking quick initial sample
650 676 searching: 2 queries
651 677 query 2; still undecided: 51, sample size is: 51
652 678 2 total queries in *.????s (glob)
653 679 elapsed time: * seconds (glob)
654 680 heads summary:
655 681 total common heads: 1
656 682 also local heads: 0
657 683 also remote heads: 0
684 both: 0
658 685 local heads: 1
659 686 common: 0
660 687 missing: 1
661 688 remote heads: 1
662 689 common: 0
663 690 unknown: 1
664 691 local changesets: 52
665 692 common: 2
666 693 missing: 50
667 694 common heads: 66f7d451a68b
668 695
669 696 % -- b -> a tree
670 697 comparing with a
671 698 searching for changes
672 699 unpruned common: 66f7d451a68b
673 700 elapsed time: * seconds (glob)
674 701 heads summary:
675 702 total common heads: 1
676 703 also local heads: 0
677 704 also remote heads: 0
705 both: 0
678 706 local heads: 1
679 707 common: 0
680 708 missing: 1
681 709 remote heads: 1
682 710 common: 0
683 711 unknown: 1
684 712 local changesets: 32
685 713 common: 2
686 714 missing: 30
687 715 common heads: 66f7d451a68b
688 716
689 717 % -- b -> a set
690 718 comparing with a
691 719 query 1; heads
692 720 searching for changes
693 721 taking quick initial sample
694 722 searching: 2 queries
695 723 query 2; still undecided: 31, sample size is: 31
696 724 2 total queries in *.????s (glob)
697 725 elapsed time: * seconds (glob)
698 726 heads summary:
699 727 total common heads: 1
700 728 also local heads: 0
701 729 also remote heads: 0
730 both: 0
702 731 local heads: 1
703 732 common: 0
704 733 missing: 1
705 734 remote heads: 1
706 735 common: 0
707 736 unknown: 1
708 737 local changesets: 32
709 738 common: 2
710 739 missing: 30
711 740 common heads: 66f7d451a68b
712 741
713 742 % -- b -> a set (tip only)
714 743 comparing with a
715 744 query 1; heads
716 745 searching for changes
717 746 taking quick initial sample
718 747 searching: 2 queries
719 748 query 2; still undecided: 31, sample size is: 31
720 749 2 total queries in *.????s (glob)
721 750 elapsed time: * seconds (glob)
722 751 heads summary:
723 752 total common heads: 1
724 753 also local heads: 0
725 754 also remote heads: 0
755 both: 0
726 756 local heads: 1
727 757 common: 0
728 758 missing: 1
729 759 remote heads: 1
730 760 common: 0
731 761 unknown: 1
732 762 local changesets: 32
733 763 common: 2
734 764 missing: 30
735 765 common heads: 66f7d451a68b
736 766
737 767
738 768 Both many new on top of long history:
739 769
740 770 $ testdesc '-ra' '-rb' '
741 771 > +1000:f +30 :b
742 772 > <f +50 :a'
743 773
744 774 % -- a -> b tree
745 775 comparing with b
746 776 searching for changes
747 777 unpruned common: 7ead0cba2838
748 778 elapsed time: * seconds (glob)
749 779 heads summary:
750 780 total common heads: 1
751 781 also local heads: 0
752 782 also remote heads: 0
783 both: 0
753 784 local heads: 1
754 785 common: 0
755 786 missing: 1
756 787 remote heads: 1
757 788 common: 0
758 789 unknown: 1
759 790 local changesets: 1050
760 791 common: 1000
761 792 missing: 50
762 793 common heads: 7ead0cba2838
763 794
764 795 % -- a -> b set
765 796 comparing with b
766 797 query 1; heads
767 798 searching for changes
768 799 taking quick initial sample
769 800 searching: 2 queries
770 801 query 2; still undecided: 1049, sample size is: 11
771 802 sampling from both directions
772 803 searching: 3 queries
773 804 query 3; still undecided: 31, sample size is: 31
774 805 3 total queries in *.????s (glob)
775 806 elapsed time: * seconds (glob)
776 807 heads summary:
777 808 total common heads: 1
778 809 also local heads: 0
779 810 also remote heads: 0
811 both: 0
780 812 local heads: 1
781 813 common: 0
782 814 missing: 1
783 815 remote heads: 1
784 816 common: 0
785 817 unknown: 1
786 818 local changesets: 1050
787 819 common: 1000
788 820 missing: 50
789 821 common heads: 7ead0cba2838
790 822
791 823 % -- a -> b set (tip only)
792 824 comparing with b
793 825 query 1; heads
794 826 searching for changes
795 827 taking quick initial sample
796 828 searching: 2 queries
797 829 query 2; still undecided: 1049, sample size is: 11
798 830 sampling from both directions
799 831 searching: 3 queries
800 832 query 3; still undecided: 31, sample size is: 31
801 833 3 total queries in *.????s (glob)
802 834 elapsed time: * seconds (glob)
803 835 heads summary:
804 836 total common heads: 1
805 837 also local heads: 0
806 838 also remote heads: 0
839 both: 0
807 840 local heads: 1
808 841 common: 0
809 842 missing: 1
810 843 remote heads: 1
811 844 common: 0
812 845 unknown: 1
813 846 local changesets: 1050
814 847 common: 1000
815 848 missing: 50
816 849 common heads: 7ead0cba2838
817 850
818 851 % -- b -> a tree
819 852 comparing with a
820 853 searching for changes
821 854 unpruned common: 7ead0cba2838
822 855 elapsed time: * seconds (glob)
823 856 heads summary:
824 857 total common heads: 1
825 858 also local heads: 0
826 859 also remote heads: 0
860 both: 0
827 861 local heads: 1
828 862 common: 0
829 863 missing: 1
830 864 remote heads: 1
831 865 common: 0
832 866 unknown: 1
833 867 local changesets: 1030
834 868 common: 1000
835 869 missing: 30
836 870 common heads: 7ead0cba2838
837 871
838 872 % -- b -> a set
839 873 comparing with a
840 874 query 1; heads
841 875 searching for changes
842 876 taking quick initial sample
843 877 searching: 2 queries
844 878 query 2; still undecided: 1029, sample size is: 11
845 879 sampling from both directions
846 880 searching: 3 queries
847 881 query 3; still undecided: 15, sample size is: 15
848 882 3 total queries in *.????s (glob)
849 883 elapsed time: * seconds (glob)
850 884 heads summary:
851 885 total common heads: 1
852 886 also local heads: 0
853 887 also remote heads: 0
888 both: 0
854 889 local heads: 1
855 890 common: 0
856 891 missing: 1
857 892 remote heads: 1
858 893 common: 0
859 894 unknown: 1
860 895 local changesets: 1030
861 896 common: 1000
862 897 missing: 30
863 898 common heads: 7ead0cba2838
864 899
865 900 % -- b -> a set (tip only)
866 901 comparing with a
867 902 query 1; heads
868 903 searching for changes
869 904 taking quick initial sample
870 905 searching: 2 queries
871 906 query 2; still undecided: 1029, sample size is: 11
872 907 sampling from both directions
873 908 searching: 3 queries
874 909 query 3; still undecided: 15, sample size is: 15
875 910 3 total queries in *.????s (glob)
876 911 elapsed time: * seconds (glob)
877 912 heads summary:
878 913 total common heads: 1
879 914 also local heads: 0
880 915 also remote heads: 0
916 both: 0
881 917 local heads: 1
882 918 common: 0
883 919 missing: 1
884 920 remote heads: 1
885 921 common: 0
886 922 unknown: 1
887 923 local changesets: 1030
888 924 common: 1000
889 925 missing: 30
890 926 common heads: 7ead0cba2838
891 927
892 928
893 929 One with >200 heads, which used to use up all of the sample:
894 930
895 931 $ hg init manyheads
896 932 $ cd manyheads
897 933 $ echo "+300:r @a" >dagdesc
898 934 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
899 935 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
900 936 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
901 937 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
902 938 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
903 939 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
904 940 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
905 941 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
906 942 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
907 943 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
908 944 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
909 945 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
910 946 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
911 947 $ echo "@b *r+3" >>dagdesc # one more head
912 948 $ hg debugbuilddag <dagdesc
913 949 reading DAG from stdin
914 950
915 951 $ hg heads -t --template . | wc -c
916 952 \s*261 (re)
917 953
918 954 $ hg clone -b a . a
919 955 adding changesets
920 956 adding manifests
921 957 adding file changes
922 958 added 1340 changesets with 0 changes to 0 files (+259 heads)
923 959 new changesets 1ea73414a91b:1c51e2c80832
924 960 updating to branch a
925 961 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
926 962 $ hg clone -b b . b
927 963 adding changesets
928 964 adding manifests
929 965 adding file changes
930 966 added 304 changesets with 0 changes to 0 files
931 967 new changesets 1ea73414a91b:513314ca8b3a
932 968 updating to branch b
933 969 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
934 970
935 971 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true
936 972 comparing with b
937 973 query 1; heads
938 974 searching for changes
939 975 taking quick initial sample
940 976 searching: 2 queries
941 977 query 2; still undecided: 1240, sample size is: 100
942 978 sampling from both directions
943 979 searching: 3 queries
944 980 query 3; still undecided: 1140, sample size is: 200
945 981 sampling from both directions
946 982 searching: 4 queries
947 983 query 4; still undecided: \d+, sample size is: 200 (re)
948 984 sampling from both directions
949 985 searching: 5 queries
950 986 query 5; still undecided: \d+, sample size is: 200 (re)
951 987 sampling from both directions
952 988 searching: 6 queries
953 989 query 6; still undecided: \d+, sample size is: \d+ (re)
954 990 6 total queries in *.????s (glob)
955 991 elapsed time: * seconds (glob)
956 992 heads summary:
957 993 total common heads: 1
958 994 also local heads: 0
959 995 also remote heads: 0
996 both: 0
960 997 local heads: 260
961 998 common: 0
962 999 missing: 260
963 1000 remote heads: 1
964 1001 common: 0
965 1002 unknown: 1
966 1003 local changesets: 1340
967 1004 common: 300
968 1005 missing: 1040
969 1006 common heads: 3ee37d65064a
970 1007 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
971 1008 comparing with b
972 1009 query 1; heads
973 1010 searching for changes
974 1011 taking quick initial sample
975 1012 searching: 2 queries
976 1013 query 2; still undecided: 303, sample size is: 9
977 1014 sampling from both directions
978 1015 searching: 3 queries
979 1016 query 3; still undecided: 3, sample size is: 3
980 1017 3 total queries in *.????s (glob)
981 1018 elapsed time: * seconds (glob)
982 1019 heads summary:
983 1020 total common heads: 1
984 1021 also local heads: 0
985 1022 also remote heads: 0
1023 both: 0
986 1024 local heads: 260
987 1025 common: 0
988 1026 missing: 260
989 1027 remote heads: 1
990 1028 common: 0
991 1029 unknown: 1
992 1030 local changesets: 1340
993 1031 common: 300
994 1032 missing: 1040
995 1033 common heads: 3ee37d65064a
996 1034
997 1035 Test actual protocol when pulling one new head in addition to common heads
998 1036
999 1037 $ hg clone -U b c
1000 1038 $ hg -R c id -ir tip
1001 1039 513314ca8b3a
1002 1040 $ hg -R c up -qr default
1003 1041 $ touch c/f
1004 1042 $ hg -R c ci -Aqm "extra head"
1005 1043 $ hg -R c id -i
1006 1044 e64a39e7da8b
1007 1045
1008 1046 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1009 1047 $ cat hg.pid >> $DAEMON_PIDS
1010 1048
1011 1049 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
1012 1050 comparing with http://localhost:$HGPORT/
1013 1051 searching for changes
1014 1052 e64a39e7da8b
1015 1053
1016 1054 $ killdaemons.py
1017 1055 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
1018 1056 "GET /?cmd=capabilities HTTP/1.1" 200 -
1019 1057 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1020 1058 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1021 1059 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
1022 1060 $ cat errors.log
1023 1061
1024 1062 $ cd ..
1025 1063
1026 1064
1027 1065 Issue 4438 - test coverage for 3ef893520a85 issues.
1028 1066
1029 1067 $ mkdir issue4438
1030 1068 $ cd issue4438
1031 1069 #if false
1032 1070 generate new bundles:
1033 1071 $ hg init r1
1034 1072 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1035 1073 $ hg clone -q r1 r2
1036 1074 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1037 1075 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1038 1076 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1039 1077 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1040 1078 #else
1041 1079 use existing bundles:
1042 1080 $ hg init r1
1043 1081 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1044 1082 $ hg -R r1 -q up
1045 1083 $ hg init r2
1046 1084 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1047 1085 $ hg -R r2 -q up
1048 1086 #endif
1049 1087
1050 1088 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1051 1089
1052 1090 $ hg -R r1 outgoing r2 -T'{rev} '
1053 1091 comparing with r2
1054 1092 searching for changes
1055 1093 101 102 103 104 105 106 107 108 109 110 (no-eol)
1056 1094
1057 1095 The case where all the 'initialsamplesize' samples already were common would
1058 1096 give 'all remote heads known locally' without checking the remaining heads -
1059 1097 fixed in 86c35b7ae300:
1060 1098
1061 1099 $ cat >> $TESTTMP/unrandomsample.py << EOF
1062 1100 > import random
1063 1101 > def sample(population, k):
1064 1102 > return sorted(population)[:k]
1065 1103 > random.sample = sample
1066 1104 > EOF
1067 1105
1068 1106 $ cat >> r1/.hg/hgrc << EOF
1069 1107 > [extensions]
1070 1108 > unrandomsample = $TESTTMP/unrandomsample.py
1071 1109 > EOF
1072 1110
1073 1111 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1074 1112 > --config blackbox.track='command commandfinish discovery'
1075 1113 comparing with r2
1076 1114 searching for changes
1077 1115 101 102 103 104 105 106 107 108 109 110 (no-eol)
1078 1116 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1079 1117 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
1080 1118 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1081 1119 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 2 roundtrips in *.????s (glob)
1082 1120 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1083 1121 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now