##// END OF EJS Templates
debugdiscovery: display time elapsed during the discovery step...
marmoute -
r42202:eec20025 default
parent child Browse files
Show More
@@ -1,3462 +1,3465 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from . import (
36 36 bundle2,
37 37 changegroup,
38 38 cmdutil,
39 39 color,
40 40 context,
41 41 copies,
42 42 dagparser,
43 43 encoding,
44 44 error,
45 45 exchange,
46 46 extensions,
47 47 filemerge,
48 48 filesetlang,
49 49 formatter,
50 50 hg,
51 51 httppeer,
52 52 localrepo,
53 53 lock as lockmod,
54 54 logcmdutil,
55 55 merge as mergemod,
56 56 obsolete,
57 57 obsutil,
58 58 phases,
59 59 policy,
60 60 pvec,
61 61 pycompat,
62 62 registrar,
63 63 repair,
64 64 revlog,
65 65 revset,
66 66 revsetlang,
67 67 scmutil,
68 68 setdiscovery,
69 69 simplemerge,
70 70 sshpeer,
71 71 sslutil,
72 72 streamclone,
73 73 templater,
74 74 treediscovery,
75 75 upgrade,
76 76 url as urlmod,
77 77 util,
78 78 vfs as vfsmod,
79 79 wireprotoframing,
80 80 wireprotoserver,
81 81 wireprotov2peer,
82 82 )
83 83 from .utils import (
84 84 cborutil,
85 85 dateutil,
86 86 procutil,
87 87 stringutil,
88 88 )
89 89
90 90 from .revlogutils import (
91 91 deltas as deltautil
92 92 )
93 93
94 94 release = lockmod.release
95 95
96 96 command = registrar.command()
97 97
98 98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
99 99 def debugancestor(ui, repo, *args):
100 100 """find the ancestor revision of two revisions in a given index"""
101 101 if len(args) == 3:
102 102 index, rev1, rev2 = args
103 103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
104 104 lookup = r.lookup
105 105 elif len(args) == 2:
106 106 if not repo:
107 107 raise error.Abort(_('there is no Mercurial repository here '
108 108 '(.hg not found)'))
109 109 rev1, rev2 = args
110 110 r = repo.changelog
111 111 lookup = repo.lookup
112 112 else:
113 113 raise error.Abort(_('either two or three arguments required'))
114 114 a = r.ancestor(lookup(rev1), lookup(rev2))
115 115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
116 116
117 117 @command('debugapplystreamclonebundle', [], 'FILE')
118 118 def debugapplystreamclonebundle(ui, repo, fname):
119 119 """apply a stream clone bundle file"""
120 120 f = hg.openpath(ui, fname)
121 121 gen = exchange.readbundle(ui, f, fname)
122 122 gen.apply(repo)
123 123
124 124 @command('debugbuilddag',
125 125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
126 126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
127 127 ('n', 'new-file', None, _('add new file at each rev'))],
128 128 _('[OPTION]... [TEXT]'))
129 129 def debugbuilddag(ui, repo, text=None,
130 130 mergeable_file=False,
131 131 overwritten_file=False,
132 132 new_file=False):
133 133 """builds a repo with a given DAG from scratch in the current empty repo
134 134
135 135 The description of the DAG is read from stdin if not given on the
136 136 command line.
137 137
138 138 Elements:
139 139
140 140 - "+n" is a linear run of n nodes based on the current default parent
141 141 - "." is a single node based on the current default parent
142 142 - "$" resets the default parent to null (implied at the start);
143 143 otherwise the default parent is always the last node created
144 144 - "<p" sets the default parent to the backref p
145 145 - "*p" is a fork at parent p, which is a backref
146 146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
147 147 - "/p2" is a merge of the preceding node and p2
148 148 - ":tag" defines a local tag for the preceding node
149 149 - "@branch" sets the named branch for subsequent nodes
150 150 - "#...\\n" is a comment up to the end of the line
151 151
152 152 Whitespace between the above elements is ignored.
153 153
154 154 A backref is either
155 155
156 156 - a number n, which references the node curr-n, where curr is the current
157 157 node, or
158 158 - the name of a local tag you placed earlier using ":tag", or
159 159 - empty to denote the default parent.
160 160
161 161 All string valued-elements are either strictly alphanumeric, or must
162 162 be enclosed in double quotes ("..."), with "\\" as escape character.
163 163 """
164 164
165 165 if text is None:
166 166 ui.status(_("reading DAG from stdin\n"))
167 167 text = ui.fin.read()
168 168
169 169 cl = repo.changelog
170 170 if len(cl) > 0:
171 171 raise error.Abort(_('repository is not empty'))
172 172
173 173 # determine number of revs in DAG
174 174 total = 0
175 175 for type, data in dagparser.parsedag(text):
176 176 if type == 'n':
177 177 total += 1
178 178
179 179 if mergeable_file:
180 180 linesperrev = 2
181 181 # make a file with k lines per rev
182 182 initialmergedlines = ['%d' % i
183 183 for i in pycompat.xrange(0, total * linesperrev)]
184 184 initialmergedlines.append("")
185 185
186 186 tags = []
187 187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
188 188 total=total)
189 189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
190 190 at = -1
191 191 atbranch = 'default'
192 192 nodeids = []
193 193 id = 0
194 194 progress.update(id)
195 195 for type, data in dagparser.parsedag(text):
196 196 if type == 'n':
197 197 ui.note(('node %s\n' % pycompat.bytestr(data)))
198 198 id, ps = data
199 199
200 200 files = []
201 201 filecontent = {}
202 202
203 203 p2 = None
204 204 if mergeable_file:
205 205 fn = "mf"
206 206 p1 = repo[ps[0]]
207 207 if len(ps) > 1:
208 208 p2 = repo[ps[1]]
209 209 pa = p1.ancestor(p2)
210 210 base, local, other = [x[fn].data() for x in (pa, p1,
211 211 p2)]
212 212 m3 = simplemerge.Merge3Text(base, local, other)
213 213 ml = [l.strip() for l in m3.merge_lines()]
214 214 ml.append("")
215 215 elif at > 0:
216 216 ml = p1[fn].data().split("\n")
217 217 else:
218 218 ml = initialmergedlines
219 219 ml[id * linesperrev] += " r%i" % id
220 220 mergedtext = "\n".join(ml)
221 221 files.append(fn)
222 222 filecontent[fn] = mergedtext
223 223
224 224 if overwritten_file:
225 225 fn = "of"
226 226 files.append(fn)
227 227 filecontent[fn] = "r%i\n" % id
228 228
229 229 if new_file:
230 230 fn = "nf%i" % id
231 231 files.append(fn)
232 232 filecontent[fn] = "r%i\n" % id
233 233 if len(ps) > 1:
234 234 if not p2:
235 235 p2 = repo[ps[1]]
236 236 for fn in p2:
237 237 if fn.startswith("nf"):
238 238 files.append(fn)
239 239 filecontent[fn] = p2[fn].data()
240 240
241 241 def fctxfn(repo, cx, path):
242 242 if path in filecontent:
243 243 return context.memfilectx(repo, cx, path,
244 244 filecontent[path])
245 245 return None
246 246
247 247 if len(ps) == 0 or ps[0] < 0:
248 248 pars = [None, None]
249 249 elif len(ps) == 1:
250 250 pars = [nodeids[ps[0]], None]
251 251 else:
252 252 pars = [nodeids[p] for p in ps]
253 253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
254 254 date=(id, 0),
255 255 user="debugbuilddag",
256 256 extra={'branch': atbranch})
257 257 nodeid = repo.commitctx(cx)
258 258 nodeids.append(nodeid)
259 259 at = id
260 260 elif type == 'l':
261 261 id, name = data
262 262 ui.note(('tag %s\n' % name))
263 263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
264 264 elif type == 'a':
265 265 ui.note(('branch %s\n' % data))
266 266 atbranch = data
267 267 progress.update(id)
268 268
269 269 if tags:
270 270 repo.vfs.write("localtags", "".join(tags))
271 271
272 272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
273 273 indent_string = ' ' * indent
274 274 if all:
275 275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
276 276 % indent_string)
277 277
278 278 def showchunks(named):
279 279 ui.write("\n%s%s\n" % (indent_string, named))
280 280 for deltadata in gen.deltaiter():
281 281 node, p1, p2, cs, deltabase, delta, flags = deltadata
282 282 ui.write("%s%s %s %s %s %s %d\n" %
283 283 (indent_string, hex(node), hex(p1), hex(p2),
284 284 hex(cs), hex(deltabase), len(delta)))
285 285
286 286 chunkdata = gen.changelogheader()
287 287 showchunks("changelog")
288 288 chunkdata = gen.manifestheader()
289 289 showchunks("manifest")
290 290 for chunkdata in iter(gen.filelogheader, {}):
291 291 fname = chunkdata['filename']
292 292 showchunks(fname)
293 293 else:
294 294 if isinstance(gen, bundle2.unbundle20):
295 295 raise error.Abort(_('use debugbundle2 for this file'))
296 296 chunkdata = gen.changelogheader()
297 297 for deltadata in gen.deltaiter():
298 298 node, p1, p2, cs, deltabase, delta, flags = deltadata
299 299 ui.write("%s%s\n" % (indent_string, hex(node)))
300 300
301 301 def _debugobsmarkers(ui, part, indent=0, **opts):
302 302 """display version and markers contained in 'data'"""
303 303 opts = pycompat.byteskwargs(opts)
304 304 data = part.read()
305 305 indent_string = ' ' * indent
306 306 try:
307 307 version, markers = obsolete._readmarkers(data)
308 308 except error.UnknownVersion as exc:
309 309 msg = "%sunsupported version: %s (%d bytes)\n"
310 310 msg %= indent_string, exc.version, len(data)
311 311 ui.write(msg)
312 312 else:
313 313 msg = "%sversion: %d (%d bytes)\n"
314 314 msg %= indent_string, version, len(data)
315 315 ui.write(msg)
316 316 fm = ui.formatter('debugobsolete', opts)
317 317 for rawmarker in sorted(markers):
318 318 m = obsutil.marker(None, rawmarker)
319 319 fm.startitem()
320 320 fm.plain(indent_string)
321 321 cmdutil.showmarker(fm, m)
322 322 fm.end()
323 323
324 324 def _debugphaseheads(ui, data, indent=0):
325 325 """display version and markers contained in 'data'"""
326 326 indent_string = ' ' * indent
327 327 headsbyphase = phases.binarydecode(data)
328 328 for phase in phases.allphases:
329 329 for head in headsbyphase[phase]:
330 330 ui.write(indent_string)
331 331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
332 332
333 333 def _quasirepr(thing):
334 334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
335 335 return '{%s}' % (
336 336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
337 337 return pycompat.bytestr(repr(thing))
338 338
339 339 def _debugbundle2(ui, gen, all=None, **opts):
340 340 """lists the contents of a bundle2"""
341 341 if not isinstance(gen, bundle2.unbundle20):
342 342 raise error.Abort(_('not a bundle2 file'))
343 343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
344 344 parttypes = opts.get(r'part_type', [])
345 345 for part in gen.iterparts():
346 346 if parttypes and part.type not in parttypes:
347 347 continue
348 348 msg = '%s -- %s (mandatory: %r)\n'
349 349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
350 350 if part.type == 'changegroup':
351 351 version = part.params.get('version', '01')
352 352 cg = changegroup.getunbundler(version, part, 'UN')
353 353 if not ui.quiet:
354 354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
355 355 if part.type == 'obsmarkers':
356 356 if not ui.quiet:
357 357 _debugobsmarkers(ui, part, indent=4, **opts)
358 358 if part.type == 'phase-heads':
359 359 if not ui.quiet:
360 360 _debugphaseheads(ui, part, indent=4)
361 361
362 362 @command('debugbundle',
363 363 [('a', 'all', None, _('show all details')),
364 364 ('', 'part-type', [], _('show only the named part type')),
365 365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
366 366 _('FILE'),
367 367 norepo=True)
368 368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
369 369 """lists the contents of a bundle"""
370 370 with hg.openpath(ui, bundlepath) as f:
371 371 if spec:
372 372 spec = exchange.getbundlespec(ui, f)
373 373 ui.write('%s\n' % spec)
374 374 return
375 375
376 376 gen = exchange.readbundle(ui, f, bundlepath)
377 377 if isinstance(gen, bundle2.unbundle20):
378 378 return _debugbundle2(ui, gen, all=all, **opts)
379 379 _debugchangegroup(ui, gen, all=all, **opts)
380 380
381 381 @command('debugcapabilities',
382 382 [], _('PATH'),
383 383 norepo=True)
384 384 def debugcapabilities(ui, path, **opts):
385 385 """lists the capabilities of a remote peer"""
386 386 opts = pycompat.byteskwargs(opts)
387 387 peer = hg.peer(ui, opts, path)
388 388 caps = peer.capabilities()
389 389 ui.write(('Main capabilities:\n'))
390 390 for c in sorted(caps):
391 391 ui.write((' %s\n') % c)
392 392 b2caps = bundle2.bundle2caps(peer)
393 393 if b2caps:
394 394 ui.write(('Bundle2 capabilities:\n'))
395 395 for key, values in sorted(b2caps.iteritems()):
396 396 ui.write((' %s\n') % key)
397 397 for v in values:
398 398 ui.write((' %s\n') % v)
399 399
400 400 @command('debugcheckstate', [], '')
401 401 def debugcheckstate(ui, repo):
402 402 """validate the correctness of the current dirstate"""
403 403 parent1, parent2 = repo.dirstate.parents()
404 404 m1 = repo[parent1].manifest()
405 405 m2 = repo[parent2].manifest()
406 406 errors = 0
407 407 for f in repo.dirstate:
408 408 state = repo.dirstate[f]
409 409 if state in "nr" and f not in m1:
410 410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
411 411 errors += 1
412 412 if state in "a" and f in m1:
413 413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
414 414 errors += 1
415 415 if state in "m" and f not in m1 and f not in m2:
416 416 ui.warn(_("%s in state %s, but not in either manifest\n") %
417 417 (f, state))
418 418 errors += 1
419 419 for f in m1:
420 420 state = repo.dirstate[f]
421 421 if state not in "nrm":
422 422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
423 423 errors += 1
424 424 if errors:
425 425 error = _(".hg/dirstate inconsistent with current parent's manifest")
426 426 raise error.Abort(error)
427 427
428 428 @command('debugcolor',
429 429 [('', 'style', None, _('show all configured styles'))],
430 430 'hg debugcolor')
431 431 def debugcolor(ui, repo, **opts):
432 432 """show available color, effects or style"""
433 433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
434 434 if opts.get(r'style'):
435 435 return _debugdisplaystyle(ui)
436 436 else:
437 437 return _debugdisplaycolor(ui)
438 438
439 439 def _debugdisplaycolor(ui):
440 440 ui = ui.copy()
441 441 ui._styles.clear()
442 442 for effect in color._activeeffects(ui).keys():
443 443 ui._styles[effect] = effect
444 444 if ui._terminfoparams:
445 445 for k, v in ui.configitems('color'):
446 446 if k.startswith('color.'):
447 447 ui._styles[k] = k[6:]
448 448 elif k.startswith('terminfo.'):
449 449 ui._styles[k] = k[9:]
450 450 ui.write(_('available colors:\n'))
451 451 # sort label with a '_' after the other to group '_background' entry.
452 452 items = sorted(ui._styles.items(),
453 453 key=lambda i: ('_' in i[0], i[0], i[1]))
454 454 for colorname, label in items:
455 455 ui.write(('%s\n') % colorname, label=label)
456 456
457 457 def _debugdisplaystyle(ui):
458 458 ui.write(_('available style:\n'))
459 459 if not ui._styles:
460 460 return
461 461 width = max(len(s) for s in ui._styles)
462 462 for label, effects in sorted(ui._styles.items()):
463 463 ui.write('%s' % label, label=label)
464 464 if effects:
465 465 # 50
466 466 ui.write(': ')
467 467 ui.write(' ' * (max(0, width - len(label))))
468 468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 469 ui.write('\n')
470 470
471 471 @command('debugcreatestreamclonebundle', [], 'FILE')
472 472 def debugcreatestreamclonebundle(ui, repo, fname):
473 473 """create a stream clone bundle file
474 474
475 475 Stream bundles are special bundles that are essentially archives of
476 476 revlog files. They are commonly used for cloning very quickly.
477 477 """
478 478 # TODO we may want to turn this into an abort when this functionality
479 479 # is moved into `hg bundle`.
480 480 if phases.hassecret(repo):
481 481 ui.warn(_('(warning: stream clone bundle will contain secret '
482 482 'revisions)\n'))
483 483
484 484 requirements, gen = streamclone.generatebundlev1(repo)
485 485 changegroup.writechunks(ui, gen, fname)
486 486
487 487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488 488
489 489 @command('debugdag',
490 490 [('t', 'tags', None, _('use tags as labels')),
491 491 ('b', 'branches', None, _('annotate with branch names')),
492 492 ('', 'dots', None, _('use dots for runs')),
493 493 ('s', 'spaces', None, _('separate elements by spaces'))],
494 494 _('[OPTION]... [FILE [REV]...]'),
495 495 optionalrepo=True)
496 496 def debugdag(ui, repo, file_=None, *revs, **opts):
497 497 """format the changelog or an index DAG as a concise textual description
498 498
499 499 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 500 revision numbers, they get labeled in the output as rN.
501 501
502 502 Otherwise, the changelog DAG of the current repo is emitted.
503 503 """
504 504 spaces = opts.get(r'spaces')
505 505 dots = opts.get(r'dots')
506 506 if file_:
507 507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
508 508 file_)
509 509 revs = set((int(r) for r in revs))
510 510 def events():
511 511 for r in rlog:
512 512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 513 if p != -1))
514 514 if r in revs:
515 515 yield 'l', (r, "r%i" % r)
516 516 elif repo:
517 517 cl = repo.changelog
518 518 tags = opts.get(r'tags')
519 519 branches = opts.get(r'branches')
520 520 if tags:
521 521 labels = {}
522 522 for l, n in repo.tags().items():
523 523 labels.setdefault(cl.rev(n), []).append(l)
524 524 def events():
525 525 b = "default"
526 526 for r in cl:
527 527 if branches:
528 528 newb = cl.read(cl.node(r))[5]['branch']
529 529 if newb != b:
530 530 yield 'a', newb
531 531 b = newb
532 532 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 533 if p != -1))
534 534 if tags:
535 535 ls = labels.get(r)
536 536 if ls:
537 537 for l in ls:
538 538 yield 'l', (r, l)
539 539 else:
540 540 raise error.Abort(_('need repo for changelog dag'))
541 541
542 542 for line in dagparser.dagtextlines(events(),
543 543 addspaces=spaces,
544 544 wraplabels=True,
545 545 wrapannotations=True,
546 546 wrapnonlinear=dots,
547 547 usedots=dots,
548 548 maxlinewidth=70):
549 549 ui.write(line)
550 550 ui.write("\n")
551 551
552 552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 553 def debugdata(ui, repo, file_, rev=None, **opts):
554 554 """dump the contents of a data file revision"""
555 555 opts = pycompat.byteskwargs(opts)
556 556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 557 if rev is not None:
558 558 raise error.CommandError('debugdata', _('invalid arguments'))
559 559 file_, rev = None, file_
560 560 elif rev is None:
561 561 raise error.CommandError('debugdata', _('invalid arguments'))
562 562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
563 563 try:
564 564 ui.write(r.revision(r.lookup(rev), raw=True))
565 565 except KeyError:
566 566 raise error.Abort(_('invalid revision identifier %s') % rev)
567 567
568 568 @command('debugdate',
569 569 [('e', 'extended', None, _('try extended date formats'))],
570 570 _('[-e] DATE [RANGE]'),
571 571 norepo=True, optionalrepo=True)
572 572 def debugdate(ui, date, range=None, **opts):
573 573 """parse and display a date"""
574 574 if opts[r"extended"]:
575 575 d = dateutil.parsedate(date, util.extendeddateformats)
576 576 else:
577 577 d = dateutil.parsedate(date)
578 578 ui.write(("internal: %d %d\n") % d)
579 579 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 580 if range:
581 581 m = dateutil.matchdate(range)
582 582 ui.write(("match: %s\n") % m(d[0]))
583 583
584 584 @command('debugdeltachain',
585 585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 586 _('-c|-m|FILE'),
587 587 optionalrepo=True)
588 588 def debugdeltachain(ui, repo, file_=None, **opts):
589 589 """dump information about delta chains in a revlog
590 590
591 591 Output can be templatized. Available template keywords are:
592 592
593 593 :``rev``: revision number
594 594 :``chainid``: delta chain identifier (numbered by unique base)
595 595 :``chainlen``: delta chain length to this revision
596 596 :``prevrev``: previous revision in delta chain
597 597 :``deltatype``: role of delta / how it was computed
598 598 :``compsize``: compressed size of revision
599 599 :``uncompsize``: uncompressed size of revision
600 600 :``chainsize``: total size of compressed revisions in chain
601 601 :``chainratio``: total chain size divided by uncompressed revision size
602 602 (new delta chains typically start at ratio 2.00)
603 603 :``lindist``: linear distance from base revision in delta chain to end
604 604 of this revision
605 605 :``extradist``: total size of revisions not part of this delta chain from
606 606 base of delta chain to end of this revision; a measurement
607 607 of how much extra data we need to read/seek across to read
608 608 the delta chain for this revision
609 609 :``extraratio``: extradist divided by chainsize; another representation of
610 610 how much unrelated data is needed to load this delta chain
611 611
612 612 If the repository is configured to use the sparse read, additional keywords
613 613 are available:
614 614
615 615 :``readsize``: total size of data read from the disk for a revision
616 616 (sum of the sizes of all the blocks)
617 617 :``largestblock``: size of the largest block of data read from the disk
618 618 :``readdensity``: density of useful bytes in the data read from the disk
619 619 :``srchunks``: in how many data hunks the whole revision would be read
620 620
621 621 The sparse read can be enabled with experimental.sparse-read = True
622 622 """
623 623 opts = pycompat.byteskwargs(opts)
624 624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 625 index = r.index
626 626 start = r.start
627 627 length = r.length
628 628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
629 629 withsparseread = getattr(r, '_withsparseread', False)
630 630
631 631 def revinfo(rev):
632 632 e = index[rev]
633 633 compsize = e[1]
634 634 uncompsize = e[2]
635 635 chainsize = 0
636 636
637 637 if generaldelta:
638 638 if e[3] == e[5]:
639 639 deltatype = 'p1'
640 640 elif e[3] == e[6]:
641 641 deltatype = 'p2'
642 642 elif e[3] == rev - 1:
643 643 deltatype = 'prev'
644 644 elif e[3] == rev:
645 645 deltatype = 'base'
646 646 else:
647 647 deltatype = 'other'
648 648 else:
649 649 if e[3] == rev:
650 650 deltatype = 'base'
651 651 else:
652 652 deltatype = 'prev'
653 653
654 654 chain = r._deltachain(rev)[0]
655 655 for iterrev in chain:
656 656 e = index[iterrev]
657 657 chainsize += e[1]
658 658
659 659 return compsize, uncompsize, deltatype, chain, chainsize
660 660
661 661 fm = ui.formatter('debugdeltachain', opts)
662 662
663 663 fm.plain(' rev chain# chainlen prev delta '
664 664 'size rawsize chainsize ratio lindist extradist '
665 665 'extraratio')
666 666 if withsparseread:
667 667 fm.plain(' readsize largestblk rddensity srchunks')
668 668 fm.plain('\n')
669 669
670 670 chainbases = {}
671 671 for rev in r:
672 672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
673 673 chainbase = chain[0]
674 674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
675 675 basestart = start(chainbase)
676 676 revstart = start(rev)
677 677 lineardist = revstart + comp - basestart
678 678 extradist = lineardist - chainsize
679 679 try:
680 680 prevrev = chain[-2]
681 681 except IndexError:
682 682 prevrev = -1
683 683
684 684 if uncomp != 0:
685 685 chainratio = float(chainsize) / float(uncomp)
686 686 else:
687 687 chainratio = chainsize
688 688
689 689 if chainsize != 0:
690 690 extraratio = float(extradist) / float(chainsize)
691 691 else:
692 692 extraratio = extradist
693 693
694 694 fm.startitem()
695 695 fm.write('rev chainid chainlen prevrev deltatype compsize '
696 696 'uncompsize chainsize chainratio lindist extradist '
697 697 'extraratio',
698 698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
699 699 rev, chainid, len(chain), prevrev, deltatype, comp,
700 700 uncomp, chainsize, chainratio, lineardist, extradist,
701 701 extraratio,
702 702 rev=rev, chainid=chainid, chainlen=len(chain),
703 703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
704 704 uncompsize=uncomp, chainsize=chainsize,
705 705 chainratio=chainratio, lindist=lineardist,
706 706 extradist=extradist, extraratio=extraratio)
707 707 if withsparseread:
708 708 readsize = 0
709 709 largestblock = 0
710 710 srchunks = 0
711 711
712 712 for revschunk in deltautil.slicechunk(r, chain):
713 713 srchunks += 1
714 714 blkend = start(revschunk[-1]) + length(revschunk[-1])
715 715 blksize = blkend - start(revschunk[0])
716 716
717 717 readsize += blksize
718 718 if largestblock < blksize:
719 719 largestblock = blksize
720 720
721 721 if readsize:
722 722 readdensity = float(chainsize) / float(readsize)
723 723 else:
724 724 readdensity = 1
725 725
726 726 fm.write('readsize largestblock readdensity srchunks',
727 727 ' %10d %10d %9.5f %8d',
728 728 readsize, largestblock, readdensity, srchunks,
729 729 readsize=readsize, largestblock=largestblock,
730 730 readdensity=readdensity, srchunks=srchunks)
731 731
732 732 fm.plain('\n')
733 733
734 734 fm.end()
735 735
736 736 @command('debugdirstate|debugstate',
737 737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
738 738 ('', 'dates', True, _('display the saved mtime')),
739 739 ('', 'datesort', None, _('sort by saved mtime'))],
740 740 _('[OPTION]...'))
741 741 def debugstate(ui, repo, **opts):
742 742 """show the contents of the current dirstate"""
743 743
744 744 nodates = not opts[r'dates']
745 745 if opts.get(r'nodates') is not None:
746 746 nodates = True
747 747 datesort = opts.get(r'datesort')
748 748
749 749 if datesort:
750 750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 751 else:
752 752 keyfunc = None # sort by filename
753 753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 754 if ent[3] == -1:
755 755 timestr = 'unset '
756 756 elif nodates:
757 757 timestr = 'set '
758 758 else:
759 759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 760 time.localtime(ent[3]))
761 761 timestr = encoding.strtolocal(timestr)
762 762 if ent[1] & 0o20000:
763 763 mode = 'lnk'
764 764 else:
765 765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 767 for f in repo.dirstate.copies():
768 768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769 769
770 770 @command('debugdiscovery',
771 771 [('', 'old', None, _('use old-style discovery')),
772 772 ('', 'nonheads', None,
773 773 _('use old-style discovery with non-heads included')),
774 774 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 775 ('', 'seed', '12323', 'specify the random seed use for discovery'),
776 776 ] + cmdutil.remoteopts,
777 777 _('[--rev REV] [OTHER]'))
778 778 def debugdiscovery(ui, repo, remoteurl="default", **opts):
779 779 """runs the changeset discovery protocol in isolation"""
780 780 opts = pycompat.byteskwargs(opts)
781 781 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
782 782 remote = hg.peer(repo, opts, remoteurl)
783 783 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
784 784
785 785 # make sure tests are repeatable
786 786 random.seed(int(opts['seed']))
787 787
788 788
789 789
790 790 if opts.get('old'):
791 791 def doit(pushedrevs, remoteheads, remote=remote):
792 792 if not util.safehasattr(remote, 'branches'):
793 793 # enable in-client legacy support
794 794 remote = localrepo.locallegacypeer(remote.local())
795 795 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
796 796 force=True)
797 797 common = set(common)
798 798 if not opts.get('nonheads'):
799 799 ui.write(("unpruned common: %s\n") %
800 800 " ".join(sorted(short(n) for n in common)))
801 801
802 802 clnode = repo.changelog.node
803 803 common = repo.revs('heads(::%ln)', common)
804 804 common = {clnode(r) for r in common}
805 805 return common, hds
806 806 else:
807 807 def doit(pushedrevs, remoteheads, remote=remote):
808 808 nodes = None
809 809 if pushedrevs:
810 810 revs = scmutil.revrange(repo, pushedrevs)
811 811 nodes = [repo[r].node() for r in revs]
812 812 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
813 813 ancestorsof=nodes)
814 814 return common, hds
815 815
816 816 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
817 817 localrevs = opts['rev']
818 with util.timedcm('debug-discovery') as t:
818 819 common, hds = doit(localrevs, remoterevs)
819 820
820 821 # compute all statistics
821 822 common = set(common)
822 823 rheads = set(hds)
823 824 lheads = set(repo.heads())
824 825
825 826 data = {}
827 data['elapsed'] = t.elapsed
826 828 data['nb-common'] = len(common)
827 829 data['nb-common-local'] = len(common & lheads)
828 830 data['nb-common-remote'] = len(common & rheads)
829 831 data['nb-local'] = len(lheads)
830 832 data['nb-local-missing'] = data['nb-local'] - data['nb-common-local']
831 833 data['nb-remote'] = len(rheads)
832 834 data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote']
833 835 data['nb-revs'] = len(repo.revs('all()'))
834 836 data['nb-revs-common'] = len(repo.revs('::%ln', common))
835 837 data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
836 838
837 839 # display discovery summary
840 ui.write(("elapsed time: %(elapsed)f seconds\n") % data)
838 841 ui.write(("heads summary:\n"))
839 842 ui.write((" total common heads: %(nb-common)9d\n") % data)
840 843 ui.write((" also local heads: %(nb-common-local)9d\n") % data)
841 844 ui.write((" also remote heads: %(nb-common-remote)9d\n") % data)
842 845 ui.write((" local heads: %(nb-local)9d\n") % data)
843 846 ui.write((" common: %(nb-common-local)9d\n") % data)
844 847 ui.write((" missing: %(nb-local-missing)9d\n") % data)
845 848 ui.write((" remote heads: %(nb-remote)9d\n") % data)
846 849 ui.write((" common: %(nb-common-remote)9d\n") % data)
847 850 ui.write((" unknown: %(nb-remote-unknown)9d\n") % data)
848 851 ui.write(("local changesets: %(nb-revs)9d\n") % data)
849 852 ui.write((" common: %(nb-revs-common)9d\n") % data)
850 853 ui.write((" missing: %(nb-revs-missing)9d\n") % data)
851 854
852 855 if ui.verbose:
853 856 ui.write(("common heads: %s\n") %
854 857 " ".join(sorted(short(n) for n in common)))
855 858
856 859 _chunksize = 4 << 10
857 860
858 861 @command('debugdownload',
859 862 [
860 863 ('o', 'output', '', _('path')),
861 864 ],
862 865 optionalrepo=True)
863 866 def debugdownload(ui, repo, url, output=None, **opts):
864 867 """download a resource using Mercurial logic and config
865 868 """
866 869 fh = urlmod.open(ui, url, output)
867 870
868 871 dest = ui
869 872 if output:
870 873 dest = open(output, "wb", _chunksize)
871 874 try:
872 875 data = fh.read(_chunksize)
873 876 while data:
874 877 dest.write(data)
875 878 data = fh.read(_chunksize)
876 879 finally:
877 880 if output:
878 881 dest.close()
879 882
880 883 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
881 884 def debugextensions(ui, repo, **opts):
882 885 '''show information about active extensions'''
883 886 opts = pycompat.byteskwargs(opts)
884 887 exts = extensions.extensions(ui)
885 888 hgver = util.version()
886 889 fm = ui.formatter('debugextensions', opts)
887 890 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
888 891 isinternal = extensions.ismoduleinternal(extmod)
889 892 extsource = pycompat.fsencode(extmod.__file__)
890 893 if isinternal:
891 894 exttestedwith = [] # never expose magic string to users
892 895 else:
893 896 exttestedwith = getattr(extmod, 'testedwith', '').split()
894 897 extbuglink = getattr(extmod, 'buglink', None)
895 898
896 899 fm.startitem()
897 900
898 901 if ui.quiet or ui.verbose:
899 902 fm.write('name', '%s\n', extname)
900 903 else:
901 904 fm.write('name', '%s', extname)
902 905 if isinternal or hgver in exttestedwith:
903 906 fm.plain('\n')
904 907 elif not exttestedwith:
905 908 fm.plain(_(' (untested!)\n'))
906 909 else:
907 910 lasttestedversion = exttestedwith[-1]
908 911 fm.plain(' (%s!)\n' % lasttestedversion)
909 912
910 913 fm.condwrite(ui.verbose and extsource, 'source',
911 914 _(' location: %s\n'), extsource or "")
912 915
913 916 if ui.verbose:
914 917 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
915 918 fm.data(bundled=isinternal)
916 919
917 920 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
918 921 _(' tested with: %s\n'),
919 922 fm.formatlist(exttestedwith, name='ver'))
920 923
921 924 fm.condwrite(ui.verbose and extbuglink, 'buglink',
922 925 _(' bug reporting: %s\n'), extbuglink or "")
923 926
924 927 fm.end()
925 928
926 929 @command('debugfileset',
927 930 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
928 931 ('', 'all-files', False,
929 932 _('test files from all revisions and working directory')),
930 933 ('s', 'show-matcher', None,
931 934 _('print internal representation of matcher')),
932 935 ('p', 'show-stage', [],
933 936 _('print parsed tree at the given stage'), _('NAME'))],
934 937 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
935 938 def debugfileset(ui, repo, expr, **opts):
936 939 '''parse and apply a fileset specification'''
937 940 from . import fileset
938 941 fileset.symbols # force import of fileset so we have predicates to optimize
939 942 opts = pycompat.byteskwargs(opts)
940 943 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
941 944
942 945 stages = [
943 946 ('parsed', pycompat.identity),
944 947 ('analyzed', filesetlang.analyze),
945 948 ('optimized', filesetlang.optimize),
946 949 ]
947 950 stagenames = set(n for n, f in stages)
948 951
949 952 showalways = set()
950 953 if ui.verbose and not opts['show_stage']:
951 954 # show parsed tree by --verbose (deprecated)
952 955 showalways.add('parsed')
953 956 if opts['show_stage'] == ['all']:
954 957 showalways.update(stagenames)
955 958 else:
956 959 for n in opts['show_stage']:
957 960 if n not in stagenames:
958 961 raise error.Abort(_('invalid stage name: %s') % n)
959 962 showalways.update(opts['show_stage'])
960 963
961 964 tree = filesetlang.parse(expr)
962 965 for n, f in stages:
963 966 tree = f(tree)
964 967 if n in showalways:
965 968 if opts['show_stage'] or n != 'parsed':
966 969 ui.write(("* %s:\n") % n)
967 970 ui.write(filesetlang.prettyformat(tree), "\n")
968 971
969 972 files = set()
970 973 if opts['all_files']:
971 974 for r in repo:
972 975 c = repo[r]
973 976 files.update(c.files())
974 977 files.update(c.substate)
975 978 if opts['all_files'] or ctx.rev() is None:
976 979 wctx = repo[None]
977 980 files.update(repo.dirstate.walk(scmutil.matchall(repo),
978 981 subrepos=list(wctx.substate),
979 982 unknown=True, ignored=True))
980 983 files.update(wctx.substate)
981 984 else:
982 985 files.update(ctx.files())
983 986 files.update(ctx.substate)
984 987
985 988 m = ctx.matchfileset(expr)
986 989 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
987 990 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
988 991 for f in sorted(files):
989 992 if not m(f):
990 993 continue
991 994 ui.write("%s\n" % f)
992 995
993 996 @command('debugformat',
994 997 [] + cmdutil.formatteropts)
995 998 def debugformat(ui, repo, **opts):
996 999 """display format information about the current repository
997 1000
998 1001 Use --verbose to get extra information about current config value and
999 1002 Mercurial default."""
1000 1003 opts = pycompat.byteskwargs(opts)
1001 1004 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1002 1005 maxvariantlength = max(len('format-variant'), maxvariantlength)
1003 1006
1004 1007 def makeformatname(name):
1005 1008 return '%s:' + (' ' * (maxvariantlength - len(name)))
1006 1009
1007 1010 fm = ui.formatter('debugformat', opts)
1008 1011 if fm.isplain():
1009 1012 def formatvalue(value):
1010 1013 if util.safehasattr(value, 'startswith'):
1011 1014 return value
1012 1015 if value:
1013 1016 return 'yes'
1014 1017 else:
1015 1018 return 'no'
1016 1019 else:
1017 1020 formatvalue = pycompat.identity
1018 1021
1019 1022 fm.plain('format-variant')
1020 1023 fm.plain(' ' * (maxvariantlength - len('format-variant')))
1021 1024 fm.plain(' repo')
1022 1025 if ui.verbose:
1023 1026 fm.plain(' config default')
1024 1027 fm.plain('\n')
1025 1028 for fv in upgrade.allformatvariant:
1026 1029 fm.startitem()
1027 1030 repovalue = fv.fromrepo(repo)
1028 1031 configvalue = fv.fromconfig(repo)
1029 1032
1030 1033 if repovalue != configvalue:
1031 1034 namelabel = 'formatvariant.name.mismatchconfig'
1032 1035 repolabel = 'formatvariant.repo.mismatchconfig'
1033 1036 elif repovalue != fv.default:
1034 1037 namelabel = 'formatvariant.name.mismatchdefault'
1035 1038 repolabel = 'formatvariant.repo.mismatchdefault'
1036 1039 else:
1037 1040 namelabel = 'formatvariant.name.uptodate'
1038 1041 repolabel = 'formatvariant.repo.uptodate'
1039 1042
1040 1043 fm.write('name', makeformatname(fv.name), fv.name,
1041 1044 label=namelabel)
1042 1045 fm.write('repo', ' %3s', formatvalue(repovalue),
1043 1046 label=repolabel)
1044 1047 if fv.default != configvalue:
1045 1048 configlabel = 'formatvariant.config.special'
1046 1049 else:
1047 1050 configlabel = 'formatvariant.config.default'
1048 1051 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1049 1052 label=configlabel)
1050 1053 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1051 1054 label='formatvariant.default')
1052 1055 fm.plain('\n')
1053 1056 fm.end()
1054 1057
1055 1058 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1056 1059 def debugfsinfo(ui, path="."):
1057 1060 """show information detected about current filesystem"""
1058 1061 ui.write(('path: %s\n') % path)
1059 1062 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1060 1063 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1061 1064 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1062 1065 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1063 1066 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1064 1067 casesensitive = '(unknown)'
1065 1068 try:
1066 1069 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1067 1070 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1068 1071 except OSError:
1069 1072 pass
1070 1073 ui.write(('case-sensitive: %s\n') % casesensitive)
1071 1074
1072 1075 @command('debuggetbundle',
1073 1076 [('H', 'head', [], _('id of head node'), _('ID')),
1074 1077 ('C', 'common', [], _('id of common node'), _('ID')),
1075 1078 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1076 1079 _('REPO FILE [-H|-C ID]...'),
1077 1080 norepo=True)
1078 1081 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1079 1082 """retrieves a bundle from a repo
1080 1083
1081 1084 Every ID must be a full-length hex node id string. Saves the bundle to the
1082 1085 given file.
1083 1086 """
1084 1087 opts = pycompat.byteskwargs(opts)
1085 1088 repo = hg.peer(ui, opts, repopath)
1086 1089 if not repo.capable('getbundle'):
1087 1090 raise error.Abort("getbundle() not supported by target repository")
1088 1091 args = {}
1089 1092 if common:
1090 1093 args[r'common'] = [bin(s) for s in common]
1091 1094 if head:
1092 1095 args[r'heads'] = [bin(s) for s in head]
1093 1096 # TODO: get desired bundlecaps from command line.
1094 1097 args[r'bundlecaps'] = None
1095 1098 bundle = repo.getbundle('debug', **args)
1096 1099
1097 1100 bundletype = opts.get('type', 'bzip2').lower()
1098 1101 btypes = {'none': 'HG10UN',
1099 1102 'bzip2': 'HG10BZ',
1100 1103 'gzip': 'HG10GZ',
1101 1104 'bundle2': 'HG20'}
1102 1105 bundletype = btypes.get(bundletype)
1103 1106 if bundletype not in bundle2.bundletypes:
1104 1107 raise error.Abort(_('unknown bundle type specified with --type'))
1105 1108 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1106 1109
1107 1110 @command('debugignore', [], '[FILE]')
1108 1111 def debugignore(ui, repo, *files, **opts):
1109 1112 """display the combined ignore pattern and information about ignored files
1110 1113
1111 1114 With no argument display the combined ignore pattern.
1112 1115
1113 1116 Given space separated file names, shows if the given file is ignored and
1114 1117 if so, show the ignore rule (file and line number) that matched it.
1115 1118 """
1116 1119 ignore = repo.dirstate._ignore
1117 1120 if not files:
1118 1121 # Show all the patterns
1119 1122 ui.write("%s\n" % pycompat.byterepr(ignore))
1120 1123 else:
1121 1124 m = scmutil.match(repo[None], pats=files)
1122 1125 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1123 1126 for f in m.files():
1124 1127 nf = util.normpath(f)
1125 1128 ignored = None
1126 1129 ignoredata = None
1127 1130 if nf != '.':
1128 1131 if ignore(nf):
1129 1132 ignored = nf
1130 1133 ignoredata = repo.dirstate._ignorefileandline(nf)
1131 1134 else:
1132 1135 for p in util.finddirs(nf):
1133 1136 if ignore(p):
1134 1137 ignored = p
1135 1138 ignoredata = repo.dirstate._ignorefileandline(p)
1136 1139 break
1137 1140 if ignored:
1138 1141 if ignored == nf:
1139 1142 ui.write(_("%s is ignored\n") % uipathfn(f))
1140 1143 else:
1141 1144 ui.write(_("%s is ignored because of "
1142 1145 "containing folder %s\n")
1143 1146 % (uipathfn(f), ignored))
1144 1147 ignorefile, lineno, line = ignoredata
1145 1148 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1146 1149 % (ignorefile, lineno, line))
1147 1150 else:
1148 1151 ui.write(_("%s is not ignored\n") % uipathfn(f))
1149 1152
1150 1153 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1151 1154 _('-c|-m|FILE'))
1152 1155 def debugindex(ui, repo, file_=None, **opts):
1153 1156 """dump index data for a storage primitive"""
1154 1157 opts = pycompat.byteskwargs(opts)
1155 1158 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1156 1159
1157 1160 if ui.debugflag:
1158 1161 shortfn = hex
1159 1162 else:
1160 1163 shortfn = short
1161 1164
1162 1165 idlen = 12
1163 1166 for i in store:
1164 1167 idlen = len(shortfn(store.node(i)))
1165 1168 break
1166 1169
1167 1170 fm = ui.formatter('debugindex', opts)
1168 1171 fm.plain(b' rev linkrev %s %s p2\n' % (
1169 1172 b'nodeid'.ljust(idlen),
1170 1173 b'p1'.ljust(idlen)))
1171 1174
1172 1175 for rev in store:
1173 1176 node = store.node(rev)
1174 1177 parents = store.parents(node)
1175 1178
1176 1179 fm.startitem()
1177 1180 fm.write(b'rev', b'%6d ', rev)
1178 1181 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1179 1182 fm.write(b'node', '%s ', shortfn(node))
1180 1183 fm.write(b'p1', '%s ', shortfn(parents[0]))
1181 1184 fm.write(b'p2', '%s', shortfn(parents[1]))
1182 1185 fm.plain(b'\n')
1183 1186
1184 1187 fm.end()
1185 1188
1186 1189 @command('debugindexdot', cmdutil.debugrevlogopts,
1187 1190 _('-c|-m|FILE'), optionalrepo=True)
1188 1191 def debugindexdot(ui, repo, file_=None, **opts):
1189 1192 """dump an index DAG as a graphviz dot file"""
1190 1193 opts = pycompat.byteskwargs(opts)
1191 1194 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1192 1195 ui.write(("digraph G {\n"))
1193 1196 for i in r:
1194 1197 node = r.node(i)
1195 1198 pp = r.parents(node)
1196 1199 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1197 1200 if pp[1] != nullid:
1198 1201 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1199 1202 ui.write("}\n")
1200 1203
1201 1204 @command('debugindexstats', [])
1202 1205 def debugindexstats(ui, repo):
1203 1206 """show stats related to the changelog index"""
1204 1207 repo.changelog.shortest(nullid, 1)
1205 1208 index = repo.changelog.index
1206 1209 if not util.safehasattr(index, 'stats'):
1207 1210 raise error.Abort(_('debugindexstats only works with native code'))
1208 1211 for k, v in sorted(index.stats().items()):
1209 1212 ui.write('%s: %d\n' % (k, v))
1210 1213
1211 1214 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1212 1215 def debuginstall(ui, **opts):
1213 1216 '''test Mercurial installation
1214 1217
1215 1218 Returns 0 on success.
1216 1219 '''
1217 1220 opts = pycompat.byteskwargs(opts)
1218 1221
1219 1222 problems = 0
1220 1223
1221 1224 fm = ui.formatter('debuginstall', opts)
1222 1225 fm.startitem()
1223 1226
1224 1227 # encoding
1225 1228 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1226 1229 err = None
1227 1230 try:
1228 1231 codecs.lookup(pycompat.sysstr(encoding.encoding))
1229 1232 except LookupError as inst:
1230 1233 err = stringutil.forcebytestr(inst)
1231 1234 problems += 1
1232 1235 fm.condwrite(err, 'encodingerror', _(" %s\n"
1233 1236 " (check that your locale is properly set)\n"), err)
1234 1237
1235 1238 # Python
1236 1239 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1237 1240 pycompat.sysexecutable)
1238 1241 fm.write('pythonver', _("checking Python version (%s)\n"),
1239 1242 ("%d.%d.%d" % sys.version_info[:3]))
1240 1243 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1241 1244 os.path.dirname(pycompat.fsencode(os.__file__)))
1242 1245
1243 1246 security = set(sslutil.supportedprotocols)
1244 1247 if sslutil.hassni:
1245 1248 security.add('sni')
1246 1249
1247 1250 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1248 1251 fm.formatlist(sorted(security), name='protocol',
1249 1252 fmt='%s', sep=','))
1250 1253
1251 1254 # These are warnings, not errors. So don't increment problem count. This
1252 1255 # may change in the future.
1253 1256 if 'tls1.2' not in security:
1254 1257 fm.plain(_(' TLS 1.2 not supported by Python install; '
1255 1258 'network connections lack modern security\n'))
1256 1259 if 'sni' not in security:
1257 1260 fm.plain(_(' SNI not supported by Python install; may have '
1258 1261 'connectivity issues with some servers\n'))
1259 1262
1260 1263 # TODO print CA cert info
1261 1264
1262 1265 # hg version
1263 1266 hgver = util.version()
1264 1267 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1265 1268 hgver.split('+')[0])
1266 1269 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1267 1270 '+'.join(hgver.split('+')[1:]))
1268 1271
1269 1272 # compiled modules
1270 1273 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1271 1274 policy.policy)
1272 1275 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1273 1276 os.path.dirname(pycompat.fsencode(__file__)))
1274 1277
1275 1278 if policy.policy in ('c', 'allow'):
1276 1279 err = None
1277 1280 try:
1278 1281 from .cext import (
1279 1282 base85,
1280 1283 bdiff,
1281 1284 mpatch,
1282 1285 osutil,
1283 1286 )
1284 1287 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1285 1288 except Exception as inst:
1286 1289 err = stringutil.forcebytestr(inst)
1287 1290 problems += 1
1288 1291 fm.condwrite(err, 'extensionserror', " %s\n", err)
1289 1292
1290 1293 compengines = util.compengines._engines.values()
1291 1294 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1292 1295 fm.formatlist(sorted(e.name() for e in compengines),
1293 1296 name='compengine', fmt='%s', sep=', '))
1294 1297 fm.write('compenginesavail', _('checking available compression engines '
1295 1298 '(%s)\n'),
1296 1299 fm.formatlist(sorted(e.name() for e in compengines
1297 1300 if e.available()),
1298 1301 name='compengine', fmt='%s', sep=', '))
1299 1302 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1300 1303 fm.write('compenginesserver', _('checking available compression engines '
1301 1304 'for wire protocol (%s)\n'),
1302 1305 fm.formatlist([e.name() for e in wirecompengines
1303 1306 if e.wireprotosupport()],
1304 1307 name='compengine', fmt='%s', sep=', '))
1305 1308 re2 = 'missing'
1306 1309 if util._re2:
1307 1310 re2 = 'available'
1308 1311 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1309 1312 fm.data(re2=bool(util._re2))
1310 1313
1311 1314 # templates
1312 1315 p = templater.templatepaths()
1313 1316 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1314 1317 fm.condwrite(not p, '', _(" no template directories found\n"))
1315 1318 if p:
1316 1319 m = templater.templatepath("map-cmdline.default")
1317 1320 if m:
1318 1321 # template found, check if it is working
1319 1322 err = None
1320 1323 try:
1321 1324 templater.templater.frommapfile(m)
1322 1325 except Exception as inst:
1323 1326 err = stringutil.forcebytestr(inst)
1324 1327 p = None
1325 1328 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1326 1329 else:
1327 1330 p = None
1328 1331 fm.condwrite(p, 'defaulttemplate',
1329 1332 _("checking default template (%s)\n"), m)
1330 1333 fm.condwrite(not m, 'defaulttemplatenotfound',
1331 1334 _(" template '%s' not found\n"), "default")
1332 1335 if not p:
1333 1336 problems += 1
1334 1337 fm.condwrite(not p, '',
1335 1338 _(" (templates seem to have been installed incorrectly)\n"))
1336 1339
1337 1340 # editor
1338 1341 editor = ui.geteditor()
1339 1342 editor = util.expandpath(editor)
1340 1343 editorbin = procutil.shellsplit(editor)[0]
1341 1344 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1342 1345 cmdpath = procutil.findexe(editorbin)
1343 1346 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1344 1347 _(" No commit editor set and can't find %s in PATH\n"
1345 1348 " (specify a commit editor in your configuration"
1346 1349 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1347 1350 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1348 1351 _(" Can't find editor '%s' in PATH\n"
1349 1352 " (specify a commit editor in your configuration"
1350 1353 " file)\n"), not cmdpath and editorbin)
1351 1354 if not cmdpath and editor != 'vi':
1352 1355 problems += 1
1353 1356
1354 1357 # check username
1355 1358 username = None
1356 1359 err = None
1357 1360 try:
1358 1361 username = ui.username()
1359 1362 except error.Abort as e:
1360 1363 err = stringutil.forcebytestr(e)
1361 1364 problems += 1
1362 1365
1363 1366 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1364 1367 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1365 1368 " (specify a username in your configuration file)\n"), err)
1366 1369
1367 1370 fm.condwrite(not problems, '',
1368 1371 _("no problems detected\n"))
1369 1372 if not problems:
1370 1373 fm.data(problems=problems)
1371 1374 fm.condwrite(problems, 'problems',
1372 1375 _("%d problems detected,"
1373 1376 " please check your install!\n"), problems)
1374 1377 fm.end()
1375 1378
1376 1379 return problems
1377 1380
1378 1381 @command('debugknown', [], _('REPO ID...'), norepo=True)
1379 1382 def debugknown(ui, repopath, *ids, **opts):
1380 1383 """test whether node ids are known to a repo
1381 1384
1382 1385 Every ID must be a full-length hex node id string. Returns a list of 0s
1383 1386 and 1s indicating unknown/known.
1384 1387 """
1385 1388 opts = pycompat.byteskwargs(opts)
1386 1389 repo = hg.peer(ui, opts, repopath)
1387 1390 if not repo.capable('known'):
1388 1391 raise error.Abort("known() not supported by target repository")
1389 1392 flags = repo.known([bin(s) for s in ids])
1390 1393 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1391 1394
1392 1395 @command('debuglabelcomplete', [], _('LABEL...'))
1393 1396 def debuglabelcomplete(ui, repo, *args):
1394 1397 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1395 1398 debugnamecomplete(ui, repo, *args)
1396 1399
1397 1400 @command('debuglocks',
1398 1401 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1399 1402 ('W', 'force-wlock', None,
1400 1403 _('free the working state lock (DANGEROUS)')),
1401 1404 ('s', 'set-lock', None, _('set the store lock until stopped')),
1402 1405 ('S', 'set-wlock', None,
1403 1406 _('set the working state lock until stopped'))],
1404 1407 _('[OPTION]...'))
1405 1408 def debuglocks(ui, repo, **opts):
1406 1409 """show or modify state of locks
1407 1410
1408 1411 By default, this command will show which locks are held. This
1409 1412 includes the user and process holding the lock, the amount of time
1410 1413 the lock has been held, and the machine name where the process is
1411 1414 running if it's not local.
1412 1415
1413 1416 Locks protect the integrity of Mercurial's data, so should be
1414 1417 treated with care. System crashes or other interruptions may cause
1415 1418 locks to not be properly released, though Mercurial will usually
1416 1419 detect and remove such stale locks automatically.
1417 1420
1418 1421 However, detecting stale locks may not always be possible (for
1419 1422 instance, on a shared filesystem). Removing locks may also be
1420 1423 blocked by filesystem permissions.
1421 1424
1422 1425 Setting a lock will prevent other commands from changing the data.
1423 1426 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1424 1427 The set locks are removed when the command exits.
1425 1428
1426 1429 Returns 0 if no locks are held.
1427 1430
1428 1431 """
1429 1432
1430 1433 if opts.get(r'force_lock'):
1431 1434 repo.svfs.unlink('lock')
1432 1435 if opts.get(r'force_wlock'):
1433 1436 repo.vfs.unlink('wlock')
1434 1437 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1435 1438 return 0
1436 1439
1437 1440 locks = []
1438 1441 try:
1439 1442 if opts.get(r'set_wlock'):
1440 1443 try:
1441 1444 locks.append(repo.wlock(False))
1442 1445 except error.LockHeld:
1443 1446 raise error.Abort(_('wlock is already held'))
1444 1447 if opts.get(r'set_lock'):
1445 1448 try:
1446 1449 locks.append(repo.lock(False))
1447 1450 except error.LockHeld:
1448 1451 raise error.Abort(_('lock is already held'))
1449 1452 if len(locks):
1450 1453 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1451 1454 return 0
1452 1455 finally:
1453 1456 release(*locks)
1454 1457
1455 1458 now = time.time()
1456 1459 held = 0
1457 1460
1458 1461 def report(vfs, name, method):
1459 1462 # this causes stale locks to get reaped for more accurate reporting
1460 1463 try:
1461 1464 l = method(False)
1462 1465 except error.LockHeld:
1463 1466 l = None
1464 1467
1465 1468 if l:
1466 1469 l.release()
1467 1470 else:
1468 1471 try:
1469 1472 st = vfs.lstat(name)
1470 1473 age = now - st[stat.ST_MTIME]
1471 1474 user = util.username(st.st_uid)
1472 1475 locker = vfs.readlock(name)
1473 1476 if ":" in locker:
1474 1477 host, pid = locker.split(':')
1475 1478 if host == socket.gethostname():
1476 1479 locker = 'user %s, process %s' % (user or b'None', pid)
1477 1480 else:
1478 1481 locker = ('user %s, process %s, host %s'
1479 1482 % (user or b'None', pid, host))
1480 1483 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1481 1484 return 1
1482 1485 except OSError as e:
1483 1486 if e.errno != errno.ENOENT:
1484 1487 raise
1485 1488
1486 1489 ui.write(("%-6s free\n") % (name + ":"))
1487 1490 return 0
1488 1491
1489 1492 held += report(repo.svfs, "lock", repo.lock)
1490 1493 held += report(repo.vfs, "wlock", repo.wlock)
1491 1494
1492 1495 return held
1493 1496
1494 1497 @command('debugmanifestfulltextcache', [
1495 1498 ('', 'clear', False, _('clear the cache')),
1496 1499 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1497 1500 _('NODE'))
1498 1501 ], '')
1499 1502 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1500 1503 """show, clear or amend the contents of the manifest fulltext cache"""
1501 1504
1502 1505 def getcache():
1503 1506 r = repo.manifestlog.getstorage(b'')
1504 1507 try:
1505 1508 return r._fulltextcache
1506 1509 except AttributeError:
1507 1510 msg = _("Current revlog implementation doesn't appear to have a "
1508 1511 "manifest fulltext cache\n")
1509 1512 raise error.Abort(msg)
1510 1513
1511 1514 if opts.get(r'clear'):
1512 1515 with repo.wlock():
1513 1516 cache = getcache()
1514 1517 cache.clear(clear_persisted_data=True)
1515 1518 return
1516 1519
1517 1520 if add:
1518 1521 with repo.wlock():
1519 1522 m = repo.manifestlog
1520 1523 store = m.getstorage(b'')
1521 1524 for n in add:
1522 1525 try:
1523 1526 manifest = m[store.lookup(n)]
1524 1527 except error.LookupError as e:
1525 1528 raise error.Abort(e, hint="Check your manifest node id")
1526 1529 manifest.read() # stores revisision in cache too
1527 1530 return
1528 1531
1529 1532 cache = getcache()
1530 1533 if not len(cache):
1531 1534 ui.write(_('cache empty\n'))
1532 1535 else:
1533 1536 ui.write(
1534 1537 _('cache contains %d manifest entries, in order of most to '
1535 1538 'least recent:\n') % (len(cache),))
1536 1539 totalsize = 0
1537 1540 for nodeid in cache:
1538 1541 # Use cache.get to not update the LRU order
1539 1542 data = cache.peek(nodeid)
1540 1543 size = len(data)
1541 1544 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1542 1545 ui.write(_('id: %s, size %s\n') % (
1543 1546 hex(nodeid), util.bytecount(size)))
1544 1547 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1545 1548 ui.write(
1546 1549 _('total cache data size %s, on-disk %s\n') % (
1547 1550 util.bytecount(totalsize), util.bytecount(ondisk))
1548 1551 )
1549 1552
1550 1553 @command('debugmergestate', [], '')
1551 1554 def debugmergestate(ui, repo, *args):
1552 1555 """print merge state
1553 1556
1554 1557 Use --verbose to print out information about whether v1 or v2 merge state
1555 1558 was chosen."""
1556 1559 def _hashornull(h):
1557 1560 if h == nullhex:
1558 1561 return 'null'
1559 1562 else:
1560 1563 return h
1561 1564
1562 1565 def printrecords(version):
1563 1566 ui.write(('* version %d records\n') % version)
1564 1567 if version == 1:
1565 1568 records = v1records
1566 1569 else:
1567 1570 records = v2records
1568 1571
1569 1572 for rtype, record in records:
1570 1573 # pretty print some record types
1571 1574 if rtype == 'L':
1572 1575 ui.write(('local: %s\n') % record)
1573 1576 elif rtype == 'O':
1574 1577 ui.write(('other: %s\n') % record)
1575 1578 elif rtype == 'm':
1576 1579 driver, mdstate = record.split('\0', 1)
1577 1580 ui.write(('merge driver: %s (state "%s")\n')
1578 1581 % (driver, mdstate))
1579 1582 elif rtype in 'FDC':
1580 1583 r = record.split('\0')
1581 1584 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1582 1585 if version == 1:
1583 1586 onode = 'not stored in v1 format'
1584 1587 flags = r[7]
1585 1588 else:
1586 1589 onode, flags = r[7:9]
1587 1590 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1588 1591 % (f, rtype, state, _hashornull(hash)))
1589 1592 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1590 1593 ui.write((' ancestor path: %s (node %s)\n')
1591 1594 % (afile, _hashornull(anode)))
1592 1595 ui.write((' other path: %s (node %s)\n')
1593 1596 % (ofile, _hashornull(onode)))
1594 1597 elif rtype == 'f':
1595 1598 filename, rawextras = record.split('\0', 1)
1596 1599 extras = rawextras.split('\0')
1597 1600 i = 0
1598 1601 extrastrings = []
1599 1602 while i < len(extras):
1600 1603 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1601 1604 i += 2
1602 1605
1603 1606 ui.write(('file extras: %s (%s)\n')
1604 1607 % (filename, ', '.join(extrastrings)))
1605 1608 elif rtype == 'l':
1606 1609 labels = record.split('\0', 2)
1607 1610 labels = [l for l in labels if len(l) > 0]
1608 1611 ui.write(('labels:\n'))
1609 1612 ui.write((' local: %s\n' % labels[0]))
1610 1613 ui.write((' other: %s\n' % labels[1]))
1611 1614 if len(labels) > 2:
1612 1615 ui.write((' base: %s\n' % labels[2]))
1613 1616 else:
1614 1617 ui.write(('unrecognized entry: %s\t%s\n')
1615 1618 % (rtype, record.replace('\0', '\t')))
1616 1619
1617 1620 # Avoid mergestate.read() since it may raise an exception for unsupported
1618 1621 # merge state records. We shouldn't be doing this, but this is OK since this
1619 1622 # command is pretty low-level.
1620 1623 ms = mergemod.mergestate(repo)
1621 1624
1622 1625 # sort so that reasonable information is on top
1623 1626 v1records = ms._readrecordsv1()
1624 1627 v2records = ms._readrecordsv2()
1625 1628 order = 'LOml'
1626 1629 def key(r):
1627 1630 idx = order.find(r[0])
1628 1631 if idx == -1:
1629 1632 return (1, r[1])
1630 1633 else:
1631 1634 return (0, idx)
1632 1635 v1records.sort(key=key)
1633 1636 v2records.sort(key=key)
1634 1637
1635 1638 if not v1records and not v2records:
1636 1639 ui.write(('no merge state found\n'))
1637 1640 elif not v2records:
1638 1641 ui.note(('no version 2 merge state\n'))
1639 1642 printrecords(1)
1640 1643 elif ms._v1v2match(v1records, v2records):
1641 1644 ui.note(('v1 and v2 states match: using v2\n'))
1642 1645 printrecords(2)
1643 1646 else:
1644 1647 ui.note(('v1 and v2 states mismatch: using v1\n'))
1645 1648 printrecords(1)
1646 1649 if ui.verbose:
1647 1650 printrecords(2)
1648 1651
1649 1652 @command('debugnamecomplete', [], _('NAME...'))
1650 1653 def debugnamecomplete(ui, repo, *args):
1651 1654 '''complete "names" - tags, open branch names, bookmark names'''
1652 1655
1653 1656 names = set()
1654 1657 # since we previously only listed open branches, we will handle that
1655 1658 # specially (after this for loop)
1656 1659 for name, ns in repo.names.iteritems():
1657 1660 if name != 'branches':
1658 1661 names.update(ns.listnames(repo))
1659 1662 names.update(tag for (tag, heads, tip, closed)
1660 1663 in repo.branchmap().iterbranches() if not closed)
1661 1664 completions = set()
1662 1665 if not args:
1663 1666 args = ['']
1664 1667 for a in args:
1665 1668 completions.update(n for n in names if n.startswith(a))
1666 1669 ui.write('\n'.join(sorted(completions)))
1667 1670 ui.write('\n')
1668 1671
1669 1672 @command('debugobsolete',
1670 1673 [('', 'flags', 0, _('markers flag')),
1671 1674 ('', 'record-parents', False,
1672 1675 _('record parent information for the precursor')),
1673 1676 ('r', 'rev', [], _('display markers relevant to REV')),
1674 1677 ('', 'exclusive', False, _('restrict display to markers only '
1675 1678 'relevant to REV')),
1676 1679 ('', 'index', False, _('display index of the marker')),
1677 1680 ('', 'delete', [], _('delete markers specified by indices')),
1678 1681 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1679 1682 _('[OBSOLETED [REPLACEMENT ...]]'))
1680 1683 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1681 1684 """create arbitrary obsolete marker
1682 1685
1683 1686 With no arguments, displays the list of obsolescence markers."""
1684 1687
1685 1688 opts = pycompat.byteskwargs(opts)
1686 1689
1687 1690 def parsenodeid(s):
1688 1691 try:
1689 1692 # We do not use revsingle/revrange functions here to accept
1690 1693 # arbitrary node identifiers, possibly not present in the
1691 1694 # local repository.
1692 1695 n = bin(s)
1693 1696 if len(n) != len(nullid):
1694 1697 raise TypeError()
1695 1698 return n
1696 1699 except TypeError:
1697 1700 raise error.Abort('changeset references must be full hexadecimal '
1698 1701 'node identifiers')
1699 1702
1700 1703 if opts.get('delete'):
1701 1704 indices = []
1702 1705 for v in opts.get('delete'):
1703 1706 try:
1704 1707 indices.append(int(v))
1705 1708 except ValueError:
1706 1709 raise error.Abort(_('invalid index value: %r') % v,
1707 1710 hint=_('use integers for indices'))
1708 1711
1709 1712 if repo.currenttransaction():
1710 1713 raise error.Abort(_('cannot delete obsmarkers in the middle '
1711 1714 'of transaction.'))
1712 1715
1713 1716 with repo.lock():
1714 1717 n = repair.deleteobsmarkers(repo.obsstore, indices)
1715 1718 ui.write(_('deleted %i obsolescence markers\n') % n)
1716 1719
1717 1720 return
1718 1721
1719 1722 if precursor is not None:
1720 1723 if opts['rev']:
1721 1724 raise error.Abort('cannot select revision when creating marker')
1722 1725 metadata = {}
1723 1726 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1724 1727 succs = tuple(parsenodeid(succ) for succ in successors)
1725 1728 l = repo.lock()
1726 1729 try:
1727 1730 tr = repo.transaction('debugobsolete')
1728 1731 try:
1729 1732 date = opts.get('date')
1730 1733 if date:
1731 1734 date = dateutil.parsedate(date)
1732 1735 else:
1733 1736 date = None
1734 1737 prec = parsenodeid(precursor)
1735 1738 parents = None
1736 1739 if opts['record_parents']:
1737 1740 if prec not in repo.unfiltered():
1738 1741 raise error.Abort('cannot used --record-parents on '
1739 1742 'unknown changesets')
1740 1743 parents = repo.unfiltered()[prec].parents()
1741 1744 parents = tuple(p.node() for p in parents)
1742 1745 repo.obsstore.create(tr, prec, succs, opts['flags'],
1743 1746 parents=parents, date=date,
1744 1747 metadata=metadata, ui=ui)
1745 1748 tr.close()
1746 1749 except ValueError as exc:
1747 1750 raise error.Abort(_('bad obsmarker input: %s') %
1748 1751 pycompat.bytestr(exc))
1749 1752 finally:
1750 1753 tr.release()
1751 1754 finally:
1752 1755 l.release()
1753 1756 else:
1754 1757 if opts['rev']:
1755 1758 revs = scmutil.revrange(repo, opts['rev'])
1756 1759 nodes = [repo[r].node() for r in revs]
1757 1760 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1758 1761 exclusive=opts['exclusive']))
1759 1762 markers.sort(key=lambda x: x._data)
1760 1763 else:
1761 1764 markers = obsutil.getmarkers(repo)
1762 1765
1763 1766 markerstoiter = markers
1764 1767 isrelevant = lambda m: True
1765 1768 if opts.get('rev') and opts.get('index'):
1766 1769 markerstoiter = obsutil.getmarkers(repo)
1767 1770 markerset = set(markers)
1768 1771 isrelevant = lambda m: m in markerset
1769 1772
1770 1773 fm = ui.formatter('debugobsolete', opts)
1771 1774 for i, m in enumerate(markerstoiter):
1772 1775 if not isrelevant(m):
1773 1776 # marker can be irrelevant when we're iterating over a set
1774 1777 # of markers (markerstoiter) which is bigger than the set
1775 1778 # of markers we want to display (markers)
1776 1779 # this can happen if both --index and --rev options are
1777 1780 # provided and thus we need to iterate over all of the markers
1778 1781 # to get the correct indices, but only display the ones that
1779 1782 # are relevant to --rev value
1780 1783 continue
1781 1784 fm.startitem()
1782 1785 ind = i if opts.get('index') else None
1783 1786 cmdutil.showmarker(fm, m, index=ind)
1784 1787 fm.end()
1785 1788
1786 1789 @command('debugp1copies',
1787 1790 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1788 1791 _('[-r REV]'))
1789 1792 def debugp1copies(ui, repo, **opts):
1790 1793 """dump copy information compared to p1"""
1791 1794
1792 1795 opts = pycompat.byteskwargs(opts)
1793 1796 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1794 1797 for dst, src in ctx.p1copies().items():
1795 1798 ui.write('%s -> %s\n' % (src, dst))
1796 1799
1797 1800 @command('debugp2copies',
1798 1801 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1799 1802 _('[-r REV]'))
1800 1803 def debugp1copies(ui, repo, **opts):
1801 1804 """dump copy information compared to p2"""
1802 1805
1803 1806 opts = pycompat.byteskwargs(opts)
1804 1807 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1805 1808 for dst, src in ctx.p2copies().items():
1806 1809 ui.write('%s -> %s\n' % (src, dst))
1807 1810
1808 1811 @command('debugpathcomplete',
1809 1812 [('f', 'full', None, _('complete an entire path')),
1810 1813 ('n', 'normal', None, _('show only normal files')),
1811 1814 ('a', 'added', None, _('show only added files')),
1812 1815 ('r', 'removed', None, _('show only removed files'))],
1813 1816 _('FILESPEC...'))
1814 1817 def debugpathcomplete(ui, repo, *specs, **opts):
1815 1818 '''complete part or all of a tracked path
1816 1819
1817 1820 This command supports shells that offer path name completion. It
1818 1821 currently completes only files already known to the dirstate.
1819 1822
1820 1823 Completion extends only to the next path segment unless
1821 1824 --full is specified, in which case entire paths are used.'''
1822 1825
1823 1826 def complete(path, acceptable):
1824 1827 dirstate = repo.dirstate
1825 1828 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1826 1829 rootdir = repo.root + pycompat.ossep
1827 1830 if spec != repo.root and not spec.startswith(rootdir):
1828 1831 return [], []
1829 1832 if os.path.isdir(spec):
1830 1833 spec += '/'
1831 1834 spec = spec[len(rootdir):]
1832 1835 fixpaths = pycompat.ossep != '/'
1833 1836 if fixpaths:
1834 1837 spec = spec.replace(pycompat.ossep, '/')
1835 1838 speclen = len(spec)
1836 1839 fullpaths = opts[r'full']
1837 1840 files, dirs = set(), set()
1838 1841 adddir, addfile = dirs.add, files.add
1839 1842 for f, st in dirstate.iteritems():
1840 1843 if f.startswith(spec) and st[0] in acceptable:
1841 1844 if fixpaths:
1842 1845 f = f.replace('/', pycompat.ossep)
1843 1846 if fullpaths:
1844 1847 addfile(f)
1845 1848 continue
1846 1849 s = f.find(pycompat.ossep, speclen)
1847 1850 if s >= 0:
1848 1851 adddir(f[:s])
1849 1852 else:
1850 1853 addfile(f)
1851 1854 return files, dirs
1852 1855
1853 1856 acceptable = ''
1854 1857 if opts[r'normal']:
1855 1858 acceptable += 'nm'
1856 1859 if opts[r'added']:
1857 1860 acceptable += 'a'
1858 1861 if opts[r'removed']:
1859 1862 acceptable += 'r'
1860 1863 cwd = repo.getcwd()
1861 1864 if not specs:
1862 1865 specs = ['.']
1863 1866
1864 1867 files, dirs = set(), set()
1865 1868 for spec in specs:
1866 1869 f, d = complete(spec, acceptable or 'nmar')
1867 1870 files.update(f)
1868 1871 dirs.update(d)
1869 1872 files.update(dirs)
1870 1873 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1871 1874 ui.write('\n')
1872 1875
1873 1876 @command('debugpathcopies',
1874 1877 cmdutil.walkopts,
1875 1878 'hg debugpathcopies REV1 REV2 [FILE]',
1876 1879 inferrepo=True)
1877 1880 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1878 1881 """show copies between two revisions"""
1879 1882 ctx1 = scmutil.revsingle(repo, rev1)
1880 1883 ctx2 = scmutil.revsingle(repo, rev2)
1881 1884 m = scmutil.match(ctx1, pats, opts)
1882 1885 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1883 1886 ui.write('%s -> %s\n' % (src, dst))
1884 1887
1885 1888 @command('debugpeer', [], _('PATH'), norepo=True)
1886 1889 def debugpeer(ui, path):
1887 1890 """establish a connection to a peer repository"""
1888 1891 # Always enable peer request logging. Requires --debug to display
1889 1892 # though.
1890 1893 overrides = {
1891 1894 ('devel', 'debug.peer-request'): True,
1892 1895 }
1893 1896
1894 1897 with ui.configoverride(overrides):
1895 1898 peer = hg.peer(ui, {}, path)
1896 1899
1897 1900 local = peer.local() is not None
1898 1901 canpush = peer.canpush()
1899 1902
1900 1903 ui.write(_('url: %s\n') % peer.url())
1901 1904 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1902 1905 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1903 1906
1904 1907 @command('debugpickmergetool',
1905 1908 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1906 1909 ('', 'changedelete', None, _('emulate merging change and delete')),
1907 1910 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1908 1911 _('[PATTERN]...'),
1909 1912 inferrepo=True)
1910 1913 def debugpickmergetool(ui, repo, *pats, **opts):
1911 1914 """examine which merge tool is chosen for specified file
1912 1915
1913 1916 As described in :hg:`help merge-tools`, Mercurial examines
1914 1917 configurations below in this order to decide which merge tool is
1915 1918 chosen for specified file.
1916 1919
1917 1920 1. ``--tool`` option
1918 1921 2. ``HGMERGE`` environment variable
1919 1922 3. configurations in ``merge-patterns`` section
1920 1923 4. configuration of ``ui.merge``
1921 1924 5. configurations in ``merge-tools`` section
1922 1925 6. ``hgmerge`` tool (for historical reason only)
1923 1926 7. default tool for fallback (``:merge`` or ``:prompt``)
1924 1927
1925 1928 This command writes out examination result in the style below::
1926 1929
1927 1930 FILE = MERGETOOL
1928 1931
1929 1932 By default, all files known in the first parent context of the
1930 1933 working directory are examined. Use file patterns and/or -I/-X
1931 1934 options to limit target files. -r/--rev is also useful to examine
1932 1935 files in another context without actual updating to it.
1933 1936
1934 1937 With --debug, this command shows warning messages while matching
1935 1938 against ``merge-patterns`` and so on, too. It is recommended to
1936 1939 use this option with explicit file patterns and/or -I/-X options,
1937 1940 because this option increases amount of output per file according
1938 1941 to configurations in hgrc.
1939 1942
1940 1943 With -v/--verbose, this command shows configurations below at
1941 1944 first (only if specified).
1942 1945
1943 1946 - ``--tool`` option
1944 1947 - ``HGMERGE`` environment variable
1945 1948 - configuration of ``ui.merge``
1946 1949
1947 1950 If merge tool is chosen before matching against
1948 1951 ``merge-patterns``, this command can't show any helpful
1949 1952 information, even with --debug. In such case, information above is
1950 1953 useful to know why a merge tool is chosen.
1951 1954 """
1952 1955 opts = pycompat.byteskwargs(opts)
1953 1956 overrides = {}
1954 1957 if opts['tool']:
1955 1958 overrides[('ui', 'forcemerge')] = opts['tool']
1956 1959 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1957 1960
1958 1961 with ui.configoverride(overrides, 'debugmergepatterns'):
1959 1962 hgmerge = encoding.environ.get("HGMERGE")
1960 1963 if hgmerge is not None:
1961 1964 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1962 1965 uimerge = ui.config("ui", "merge")
1963 1966 if uimerge:
1964 1967 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1965 1968
1966 1969 ctx = scmutil.revsingle(repo, opts.get('rev'))
1967 1970 m = scmutil.match(ctx, pats, opts)
1968 1971 changedelete = opts['changedelete']
1969 1972 for path in ctx.walk(m):
1970 1973 fctx = ctx[path]
1971 1974 try:
1972 1975 if not ui.debugflag:
1973 1976 ui.pushbuffer(error=True)
1974 1977 tool, toolpath = filemerge._picktool(repo, ui, path,
1975 1978 fctx.isbinary(),
1976 1979 'l' in fctx.flags(),
1977 1980 changedelete)
1978 1981 finally:
1979 1982 if not ui.debugflag:
1980 1983 ui.popbuffer()
1981 1984 ui.write(('%s = %s\n') % (path, tool))
1982 1985
1983 1986 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1984 1987 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1985 1988 '''access the pushkey key/value protocol
1986 1989
1987 1990 With two args, list the keys in the given namespace.
1988 1991
1989 1992 With five args, set a key to new if it currently is set to old.
1990 1993 Reports success or failure.
1991 1994 '''
1992 1995
1993 1996 target = hg.peer(ui, {}, repopath)
1994 1997 if keyinfo:
1995 1998 key, old, new = keyinfo
1996 1999 with target.commandexecutor() as e:
1997 2000 r = e.callcommand('pushkey', {
1998 2001 'namespace': namespace,
1999 2002 'key': key,
2000 2003 'old': old,
2001 2004 'new': new,
2002 2005 }).result()
2003 2006
2004 2007 ui.status(pycompat.bytestr(r) + '\n')
2005 2008 return not r
2006 2009 else:
2007 2010 for k, v in sorted(target.listkeys(namespace).iteritems()):
2008 2011 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
2009 2012 stringutil.escapestr(v)))
2010 2013
2011 2014 @command('debugpvec', [], _('A B'))
2012 2015 def debugpvec(ui, repo, a, b=None):
2013 2016 ca = scmutil.revsingle(repo, a)
2014 2017 cb = scmutil.revsingle(repo, b)
2015 2018 pa = pvec.ctxpvec(ca)
2016 2019 pb = pvec.ctxpvec(cb)
2017 2020 if pa == pb:
2018 2021 rel = "="
2019 2022 elif pa > pb:
2020 2023 rel = ">"
2021 2024 elif pa < pb:
2022 2025 rel = "<"
2023 2026 elif pa | pb:
2024 2027 rel = "|"
2025 2028 ui.write(_("a: %s\n") % pa)
2026 2029 ui.write(_("b: %s\n") % pb)
2027 2030 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2028 2031 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2029 2032 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2030 2033 pa.distance(pb), rel))
2031 2034
2032 2035 @command('debugrebuilddirstate|debugrebuildstate',
2033 2036 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2034 2037 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2035 2038 'the working copy parent')),
2036 2039 ],
2037 2040 _('[-r REV]'))
2038 2041 def debugrebuilddirstate(ui, repo, rev, **opts):
2039 2042 """rebuild the dirstate as it would look like for the given revision
2040 2043
2041 2044 If no revision is specified the first current parent will be used.
2042 2045
2043 2046 The dirstate will be set to the files of the given revision.
2044 2047 The actual working directory content or existing dirstate
2045 2048 information such as adds or removes is not considered.
2046 2049
2047 2050 ``minimal`` will only rebuild the dirstate status for files that claim to be
2048 2051 tracked but are not in the parent manifest, or that exist in the parent
2049 2052 manifest but are not in the dirstate. It will not change adds, removes, or
2050 2053 modified files that are in the working copy parent.
2051 2054
2052 2055 One use of this command is to make the next :hg:`status` invocation
2053 2056 check the actual file content.
2054 2057 """
2055 2058 ctx = scmutil.revsingle(repo, rev)
2056 2059 with repo.wlock():
2057 2060 dirstate = repo.dirstate
2058 2061 changedfiles = None
2059 2062 # See command doc for what minimal does.
2060 2063 if opts.get(r'minimal'):
2061 2064 manifestfiles = set(ctx.manifest().keys())
2062 2065 dirstatefiles = set(dirstate)
2063 2066 manifestonly = manifestfiles - dirstatefiles
2064 2067 dsonly = dirstatefiles - manifestfiles
2065 2068 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2066 2069 changedfiles = manifestonly | dsnotadded
2067 2070
2068 2071 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2069 2072
2070 2073 @command('debugrebuildfncache', [], '')
2071 2074 def debugrebuildfncache(ui, repo):
2072 2075 """rebuild the fncache file"""
2073 2076 repair.rebuildfncache(ui, repo)
2074 2077
2075 2078 @command('debugrename',
2076 2079 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2077 2080 _('[-r REV] [FILE]...'))
2078 2081 def debugrename(ui, repo, *pats, **opts):
2079 2082 """dump rename information"""
2080 2083
2081 2084 opts = pycompat.byteskwargs(opts)
2082 2085 ctx = scmutil.revsingle(repo, opts.get('rev'))
2083 2086 m = scmutil.match(ctx, pats, opts)
2084 2087 for abs in ctx.walk(m):
2085 2088 fctx = ctx[abs]
2086 2089 o = fctx.filelog().renamed(fctx.filenode())
2087 2090 rel = repo.pathto(abs)
2088 2091 if o:
2089 2092 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2090 2093 else:
2091 2094 ui.write(_("%s not renamed\n") % rel)
2092 2095
2093 2096 @command('debugrevlog', cmdutil.debugrevlogopts +
2094 2097 [('d', 'dump', False, _('dump index data'))],
2095 2098 _('-c|-m|FILE'),
2096 2099 optionalrepo=True)
2097 2100 def debugrevlog(ui, repo, file_=None, **opts):
2098 2101 """show data and statistics about a revlog"""
2099 2102 opts = pycompat.byteskwargs(opts)
2100 2103 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2101 2104
2102 2105 if opts.get("dump"):
2103 2106 numrevs = len(r)
2104 2107 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2105 2108 " rawsize totalsize compression heads chainlen\n"))
2106 2109 ts = 0
2107 2110 heads = set()
2108 2111
2109 2112 for rev in pycompat.xrange(numrevs):
2110 2113 dbase = r.deltaparent(rev)
2111 2114 if dbase == -1:
2112 2115 dbase = rev
2113 2116 cbase = r.chainbase(rev)
2114 2117 clen = r.chainlen(rev)
2115 2118 p1, p2 = r.parentrevs(rev)
2116 2119 rs = r.rawsize(rev)
2117 2120 ts = ts + rs
2118 2121 heads -= set(r.parentrevs(rev))
2119 2122 heads.add(rev)
2120 2123 try:
2121 2124 compression = ts / r.end(rev)
2122 2125 except ZeroDivisionError:
2123 2126 compression = 0
2124 2127 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2125 2128 "%11d %5d %8d\n" %
2126 2129 (rev, p1, p2, r.start(rev), r.end(rev),
2127 2130 r.start(dbase), r.start(cbase),
2128 2131 r.start(p1), r.start(p2),
2129 2132 rs, ts, compression, len(heads), clen))
2130 2133 return 0
2131 2134
2132 2135 v = r.version
2133 2136 format = v & 0xFFFF
2134 2137 flags = []
2135 2138 gdelta = False
2136 2139 if v & revlog.FLAG_INLINE_DATA:
2137 2140 flags.append('inline')
2138 2141 if v & revlog.FLAG_GENERALDELTA:
2139 2142 gdelta = True
2140 2143 flags.append('generaldelta')
2141 2144 if not flags:
2142 2145 flags = ['(none)']
2143 2146
2144 2147 ### tracks merge vs single parent
2145 2148 nummerges = 0
2146 2149
2147 2150 ### tracks ways the "delta" are build
2148 2151 # nodelta
2149 2152 numempty = 0
2150 2153 numemptytext = 0
2151 2154 numemptydelta = 0
2152 2155 # full file content
2153 2156 numfull = 0
2154 2157 # intermediate snapshot against a prior snapshot
2155 2158 numsemi = 0
2156 2159 # snapshot count per depth
2157 2160 numsnapdepth = collections.defaultdict(lambda: 0)
2158 2161 # delta against previous revision
2159 2162 numprev = 0
2160 2163 # delta against first or second parent (not prev)
2161 2164 nump1 = 0
2162 2165 nump2 = 0
2163 2166 # delta against neither prev nor parents
2164 2167 numother = 0
2165 2168 # delta against prev that are also first or second parent
2166 2169 # (details of `numprev`)
2167 2170 nump1prev = 0
2168 2171 nump2prev = 0
2169 2172
2170 2173 # data about delta chain of each revs
2171 2174 chainlengths = []
2172 2175 chainbases = []
2173 2176 chainspans = []
2174 2177
2175 2178 # data about each revision
2176 2179 datasize = [None, 0, 0]
2177 2180 fullsize = [None, 0, 0]
2178 2181 semisize = [None, 0, 0]
2179 2182 # snapshot count per depth
2180 2183 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2181 2184 deltasize = [None, 0, 0]
2182 2185 chunktypecounts = {}
2183 2186 chunktypesizes = {}
2184 2187
2185 2188 def addsize(size, l):
2186 2189 if l[0] is None or size < l[0]:
2187 2190 l[0] = size
2188 2191 if size > l[1]:
2189 2192 l[1] = size
2190 2193 l[2] += size
2191 2194
2192 2195 numrevs = len(r)
2193 2196 for rev in pycompat.xrange(numrevs):
2194 2197 p1, p2 = r.parentrevs(rev)
2195 2198 delta = r.deltaparent(rev)
2196 2199 if format > 0:
2197 2200 addsize(r.rawsize(rev), datasize)
2198 2201 if p2 != nullrev:
2199 2202 nummerges += 1
2200 2203 size = r.length(rev)
2201 2204 if delta == nullrev:
2202 2205 chainlengths.append(0)
2203 2206 chainbases.append(r.start(rev))
2204 2207 chainspans.append(size)
2205 2208 if size == 0:
2206 2209 numempty += 1
2207 2210 numemptytext += 1
2208 2211 else:
2209 2212 numfull += 1
2210 2213 numsnapdepth[0] += 1
2211 2214 addsize(size, fullsize)
2212 2215 addsize(size, snapsizedepth[0])
2213 2216 else:
2214 2217 chainlengths.append(chainlengths[delta] + 1)
2215 2218 baseaddr = chainbases[delta]
2216 2219 revaddr = r.start(rev)
2217 2220 chainbases.append(baseaddr)
2218 2221 chainspans.append((revaddr - baseaddr) + size)
2219 2222 if size == 0:
2220 2223 numempty += 1
2221 2224 numemptydelta += 1
2222 2225 elif r.issnapshot(rev):
2223 2226 addsize(size, semisize)
2224 2227 numsemi += 1
2225 2228 depth = r.snapshotdepth(rev)
2226 2229 numsnapdepth[depth] += 1
2227 2230 addsize(size, snapsizedepth[depth])
2228 2231 else:
2229 2232 addsize(size, deltasize)
2230 2233 if delta == rev - 1:
2231 2234 numprev += 1
2232 2235 if delta == p1:
2233 2236 nump1prev += 1
2234 2237 elif delta == p2:
2235 2238 nump2prev += 1
2236 2239 elif delta == p1:
2237 2240 nump1 += 1
2238 2241 elif delta == p2:
2239 2242 nump2 += 1
2240 2243 elif delta != nullrev:
2241 2244 numother += 1
2242 2245
2243 2246 # Obtain data on the raw chunks in the revlog.
2244 2247 if util.safehasattr(r, '_getsegmentforrevs'):
2245 2248 segment = r._getsegmentforrevs(rev, rev)[1]
2246 2249 else:
2247 2250 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2248 2251 if segment:
2249 2252 chunktype = bytes(segment[0:1])
2250 2253 else:
2251 2254 chunktype = 'empty'
2252 2255
2253 2256 if chunktype not in chunktypecounts:
2254 2257 chunktypecounts[chunktype] = 0
2255 2258 chunktypesizes[chunktype] = 0
2256 2259
2257 2260 chunktypecounts[chunktype] += 1
2258 2261 chunktypesizes[chunktype] += size
2259 2262
2260 2263 # Adjust size min value for empty cases
2261 2264 for size in (datasize, fullsize, semisize, deltasize):
2262 2265 if size[0] is None:
2263 2266 size[0] = 0
2264 2267
2265 2268 numdeltas = numrevs - numfull - numempty - numsemi
2266 2269 numoprev = numprev - nump1prev - nump2prev
2267 2270 totalrawsize = datasize[2]
2268 2271 datasize[2] /= numrevs
2269 2272 fulltotal = fullsize[2]
2270 2273 fullsize[2] /= numfull
2271 2274 semitotal = semisize[2]
2272 2275 snaptotal = {}
2273 2276 if numsemi > 0:
2274 2277 semisize[2] /= numsemi
2275 2278 for depth in snapsizedepth:
2276 2279 snaptotal[depth] = snapsizedepth[depth][2]
2277 2280 snapsizedepth[depth][2] /= numsnapdepth[depth]
2278 2281
2279 2282 deltatotal = deltasize[2]
2280 2283 if numdeltas > 0:
2281 2284 deltasize[2] /= numdeltas
2282 2285 totalsize = fulltotal + semitotal + deltatotal
2283 2286 avgchainlen = sum(chainlengths) / numrevs
2284 2287 maxchainlen = max(chainlengths)
2285 2288 maxchainspan = max(chainspans)
2286 2289 compratio = 1
2287 2290 if totalsize:
2288 2291 compratio = totalrawsize / totalsize
2289 2292
2290 2293 basedfmtstr = '%%%dd\n'
2291 2294 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2292 2295
2293 2296 def dfmtstr(max):
2294 2297 return basedfmtstr % len(str(max))
2295 2298 def pcfmtstr(max, padding=0):
2296 2299 return basepcfmtstr % (len(str(max)), ' ' * padding)
2297 2300
2298 2301 def pcfmt(value, total):
2299 2302 if total:
2300 2303 return (value, 100 * float(value) / total)
2301 2304 else:
2302 2305 return value, 100.0
2303 2306
2304 2307 ui.write(('format : %d\n') % format)
2305 2308 ui.write(('flags : %s\n') % ', '.join(flags))
2306 2309
2307 2310 ui.write('\n')
2308 2311 fmt = pcfmtstr(totalsize)
2309 2312 fmt2 = dfmtstr(totalsize)
2310 2313 ui.write(('revisions : ') + fmt2 % numrevs)
2311 2314 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2312 2315 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2313 2316 ui.write(('revisions : ') + fmt2 % numrevs)
2314 2317 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2315 2318 ui.write((' text : ')
2316 2319 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2317 2320 ui.write((' delta : ')
2318 2321 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2319 2322 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2320 2323 for depth in sorted(numsnapdepth):
2321 2324 ui.write((' lvl-%-3d : ' % depth)
2322 2325 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2323 2326 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2324 2327 ui.write(('revision size : ') + fmt2 % totalsize)
2325 2328 ui.write((' snapshot : ')
2326 2329 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2327 2330 for depth in sorted(numsnapdepth):
2328 2331 ui.write((' lvl-%-3d : ' % depth)
2329 2332 + fmt % pcfmt(snaptotal[depth], totalsize))
2330 2333 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2331 2334
2332 2335 def fmtchunktype(chunktype):
2333 2336 if chunktype == 'empty':
2334 2337 return ' %s : ' % chunktype
2335 2338 elif chunktype in pycompat.bytestr(string.ascii_letters):
2336 2339 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2337 2340 else:
2338 2341 return ' 0x%s : ' % hex(chunktype)
2339 2342
2340 2343 ui.write('\n')
2341 2344 ui.write(('chunks : ') + fmt2 % numrevs)
2342 2345 for chunktype in sorted(chunktypecounts):
2343 2346 ui.write(fmtchunktype(chunktype))
2344 2347 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2345 2348 ui.write(('chunks size : ') + fmt2 % totalsize)
2346 2349 for chunktype in sorted(chunktypecounts):
2347 2350 ui.write(fmtchunktype(chunktype))
2348 2351 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2349 2352
2350 2353 ui.write('\n')
2351 2354 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2352 2355 ui.write(('avg chain length : ') + fmt % avgchainlen)
2353 2356 ui.write(('max chain length : ') + fmt % maxchainlen)
2354 2357 ui.write(('max chain reach : ') + fmt % maxchainspan)
2355 2358 ui.write(('compression ratio : ') + fmt % compratio)
2356 2359
2357 2360 if format > 0:
2358 2361 ui.write('\n')
2359 2362 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2360 2363 % tuple(datasize))
2361 2364 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2362 2365 % tuple(fullsize))
2363 2366 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2364 2367 % tuple(semisize))
2365 2368 for depth in sorted(snapsizedepth):
2366 2369 if depth == 0:
2367 2370 continue
2368 2371 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2369 2372 % ((depth,) + tuple(snapsizedepth[depth])))
2370 2373 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2371 2374 % tuple(deltasize))
2372 2375
2373 2376 if numdeltas > 0:
2374 2377 ui.write('\n')
2375 2378 fmt = pcfmtstr(numdeltas)
2376 2379 fmt2 = pcfmtstr(numdeltas, 4)
2377 2380 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2378 2381 if numprev > 0:
2379 2382 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2380 2383 numprev))
2381 2384 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2382 2385 numprev))
2383 2386 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2384 2387 numprev))
2385 2388 if gdelta:
2386 2389 ui.write(('deltas against p1 : ')
2387 2390 + fmt % pcfmt(nump1, numdeltas))
2388 2391 ui.write(('deltas against p2 : ')
2389 2392 + fmt % pcfmt(nump2, numdeltas))
2390 2393 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2391 2394 numdeltas))
2392 2395
2393 2396 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2394 2397 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2395 2398 _('[-f FORMAT] -c|-m|FILE'),
2396 2399 optionalrepo=True)
2397 2400 def debugrevlogindex(ui, repo, file_=None, **opts):
2398 2401 """dump the contents of a revlog index"""
2399 2402 opts = pycompat.byteskwargs(opts)
2400 2403 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2401 2404 format = opts.get('format', 0)
2402 2405 if format not in (0, 1):
2403 2406 raise error.Abort(_("unknown format %d") % format)
2404 2407
2405 2408 if ui.debugflag:
2406 2409 shortfn = hex
2407 2410 else:
2408 2411 shortfn = short
2409 2412
2410 2413 # There might not be anything in r, so have a sane default
2411 2414 idlen = 12
2412 2415 for i in r:
2413 2416 idlen = len(shortfn(r.node(i)))
2414 2417 break
2415 2418
2416 2419 if format == 0:
2417 2420 if ui.verbose:
2418 2421 ui.write((" rev offset length linkrev"
2419 2422 " %s %s p2\n") % ("nodeid".ljust(idlen),
2420 2423 "p1".ljust(idlen)))
2421 2424 else:
2422 2425 ui.write((" rev linkrev %s %s p2\n") % (
2423 2426 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2424 2427 elif format == 1:
2425 2428 if ui.verbose:
2426 2429 ui.write((" rev flag offset length size link p1"
2427 2430 " p2 %s\n") % "nodeid".rjust(idlen))
2428 2431 else:
2429 2432 ui.write((" rev flag size link p1 p2 %s\n") %
2430 2433 "nodeid".rjust(idlen))
2431 2434
2432 2435 for i in r:
2433 2436 node = r.node(i)
2434 2437 if format == 0:
2435 2438 try:
2436 2439 pp = r.parents(node)
2437 2440 except Exception:
2438 2441 pp = [nullid, nullid]
2439 2442 if ui.verbose:
2440 2443 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2441 2444 i, r.start(i), r.length(i), r.linkrev(i),
2442 2445 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2443 2446 else:
2444 2447 ui.write("% 6d % 7d %s %s %s\n" % (
2445 2448 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2446 2449 shortfn(pp[1])))
2447 2450 elif format == 1:
2448 2451 pr = r.parentrevs(i)
2449 2452 if ui.verbose:
2450 2453 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2451 2454 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2452 2455 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2453 2456 else:
2454 2457 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2455 2458 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2456 2459 shortfn(node)))
2457 2460
2458 2461 @command('debugrevspec',
2459 2462 [('', 'optimize', None,
2460 2463 _('print parsed tree after optimizing (DEPRECATED)')),
2461 2464 ('', 'show-revs', True, _('print list of result revisions (default)')),
2462 2465 ('s', 'show-set', None, _('print internal representation of result set')),
2463 2466 ('p', 'show-stage', [],
2464 2467 _('print parsed tree at the given stage'), _('NAME')),
2465 2468 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2466 2469 ('', 'verify-optimized', False, _('verify optimized result')),
2467 2470 ],
2468 2471 ('REVSPEC'))
2469 2472 def debugrevspec(ui, repo, expr, **opts):
2470 2473 """parse and apply a revision specification
2471 2474
2472 2475 Use -p/--show-stage option to print the parsed tree at the given stages.
2473 2476 Use -p all to print tree at every stage.
2474 2477
2475 2478 Use --no-show-revs option with -s or -p to print only the set
2476 2479 representation or the parsed tree respectively.
2477 2480
2478 2481 Use --verify-optimized to compare the optimized result with the unoptimized
2479 2482 one. Returns 1 if the optimized result differs.
2480 2483 """
2481 2484 opts = pycompat.byteskwargs(opts)
2482 2485 aliases = ui.configitems('revsetalias')
2483 2486 stages = [
2484 2487 ('parsed', lambda tree: tree),
2485 2488 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2486 2489 ui.warn)),
2487 2490 ('concatenated', revsetlang.foldconcat),
2488 2491 ('analyzed', revsetlang.analyze),
2489 2492 ('optimized', revsetlang.optimize),
2490 2493 ]
2491 2494 if opts['no_optimized']:
2492 2495 stages = stages[:-1]
2493 2496 if opts['verify_optimized'] and opts['no_optimized']:
2494 2497 raise error.Abort(_('cannot use --verify-optimized with '
2495 2498 '--no-optimized'))
2496 2499 stagenames = set(n for n, f in stages)
2497 2500
2498 2501 showalways = set()
2499 2502 showchanged = set()
2500 2503 if ui.verbose and not opts['show_stage']:
2501 2504 # show parsed tree by --verbose (deprecated)
2502 2505 showalways.add('parsed')
2503 2506 showchanged.update(['expanded', 'concatenated'])
2504 2507 if opts['optimize']:
2505 2508 showalways.add('optimized')
2506 2509 if opts['show_stage'] and opts['optimize']:
2507 2510 raise error.Abort(_('cannot use --optimize with --show-stage'))
2508 2511 if opts['show_stage'] == ['all']:
2509 2512 showalways.update(stagenames)
2510 2513 else:
2511 2514 for n in opts['show_stage']:
2512 2515 if n not in stagenames:
2513 2516 raise error.Abort(_('invalid stage name: %s') % n)
2514 2517 showalways.update(opts['show_stage'])
2515 2518
2516 2519 treebystage = {}
2517 2520 printedtree = None
2518 2521 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2519 2522 for n, f in stages:
2520 2523 treebystage[n] = tree = f(tree)
2521 2524 if n in showalways or (n in showchanged and tree != printedtree):
2522 2525 if opts['show_stage'] or n != 'parsed':
2523 2526 ui.write(("* %s:\n") % n)
2524 2527 ui.write(revsetlang.prettyformat(tree), "\n")
2525 2528 printedtree = tree
2526 2529
2527 2530 if opts['verify_optimized']:
2528 2531 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2529 2532 brevs = revset.makematcher(treebystage['optimized'])(repo)
2530 2533 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2531 2534 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2532 2535 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2533 2536 arevs = list(arevs)
2534 2537 brevs = list(brevs)
2535 2538 if arevs == brevs:
2536 2539 return 0
2537 2540 ui.write(('--- analyzed\n'), label='diff.file_a')
2538 2541 ui.write(('+++ optimized\n'), label='diff.file_b')
2539 2542 sm = difflib.SequenceMatcher(None, arevs, brevs)
2540 2543 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2541 2544 if tag in (r'delete', r'replace'):
2542 2545 for c in arevs[alo:ahi]:
2543 2546 ui.write('-%d\n' % c, label='diff.deleted')
2544 2547 if tag in (r'insert', r'replace'):
2545 2548 for c in brevs[blo:bhi]:
2546 2549 ui.write('+%d\n' % c, label='diff.inserted')
2547 2550 if tag == r'equal':
2548 2551 for c in arevs[alo:ahi]:
2549 2552 ui.write(' %d\n' % c)
2550 2553 return 1
2551 2554
2552 2555 func = revset.makematcher(tree)
2553 2556 revs = func(repo)
2554 2557 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2555 2558 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2556 2559 if not opts['show_revs']:
2557 2560 return
2558 2561 for c in revs:
2559 2562 ui.write("%d\n" % c)
2560 2563
2561 2564 @command('debugserve', [
2562 2565 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2563 2566 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2564 2567 ('', 'logiofile', '', _('file to log server I/O to')),
2565 2568 ], '')
2566 2569 def debugserve(ui, repo, **opts):
2567 2570 """run a server with advanced settings
2568 2571
2569 2572 This command is similar to :hg:`serve`. It exists partially as a
2570 2573 workaround to the fact that ``hg serve --stdio`` must have specific
2571 2574 arguments for security reasons.
2572 2575 """
2573 2576 opts = pycompat.byteskwargs(opts)
2574 2577
2575 2578 if not opts['sshstdio']:
2576 2579 raise error.Abort(_('only --sshstdio is currently supported'))
2577 2580
2578 2581 logfh = None
2579 2582
2580 2583 if opts['logiofd'] and opts['logiofile']:
2581 2584 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2582 2585
2583 2586 if opts['logiofd']:
2584 2587 # Line buffered because output is line based.
2585 2588 try:
2586 2589 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2587 2590 except OSError as e:
2588 2591 if e.errno != errno.ESPIPE:
2589 2592 raise
2590 2593 # can't seek a pipe, so `ab` mode fails on py3
2591 2594 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2592 2595 elif opts['logiofile']:
2593 2596 logfh = open(opts['logiofile'], 'ab', 1)
2594 2597
2595 2598 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2596 2599 s.serve_forever()
2597 2600
2598 2601 @command('debugsetparents', [], _('REV1 [REV2]'))
2599 2602 def debugsetparents(ui, repo, rev1, rev2=None):
2600 2603 """manually set the parents of the current working directory
2601 2604
2602 2605 This is useful for writing repository conversion tools, but should
2603 2606 be used with care. For example, neither the working directory nor the
2604 2607 dirstate is updated, so file status may be incorrect after running this
2605 2608 command.
2606 2609
2607 2610 Returns 0 on success.
2608 2611 """
2609 2612
2610 2613 node1 = scmutil.revsingle(repo, rev1).node()
2611 2614 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2612 2615
2613 2616 with repo.wlock():
2614 2617 repo.setparents(node1, node2)
2615 2618
2616 2619 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2617 2620 def debugssl(ui, repo, source=None, **opts):
2618 2621 '''test a secure connection to a server
2619 2622
2620 2623 This builds the certificate chain for the server on Windows, installing the
2621 2624 missing intermediates and trusted root via Windows Update if necessary. It
2622 2625 does nothing on other platforms.
2623 2626
2624 2627 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2625 2628 that server is used. See :hg:`help urls` for more information.
2626 2629
2627 2630 If the update succeeds, retry the original operation. Otherwise, the cause
2628 2631 of the SSL error is likely another issue.
2629 2632 '''
2630 2633 if not pycompat.iswindows:
2631 2634 raise error.Abort(_('certificate chain building is only possible on '
2632 2635 'Windows'))
2633 2636
2634 2637 if not source:
2635 2638 if not repo:
2636 2639 raise error.Abort(_("there is no Mercurial repository here, and no "
2637 2640 "server specified"))
2638 2641 source = "default"
2639 2642
2640 2643 source, branches = hg.parseurl(ui.expandpath(source))
2641 2644 url = util.url(source)
2642 2645
2643 2646 defaultport = {'https': 443, 'ssh': 22}
2644 2647 if url.scheme in defaultport:
2645 2648 try:
2646 2649 addr = (url.host, int(url.port or defaultport[url.scheme]))
2647 2650 except ValueError:
2648 2651 raise error.Abort(_("malformed port number in URL"))
2649 2652 else:
2650 2653 raise error.Abort(_("only https and ssh connections are supported"))
2651 2654
2652 2655 from . import win32
2653 2656
2654 2657 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2655 2658 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2656 2659
2657 2660 try:
2658 2661 s.connect(addr)
2659 2662 cert = s.getpeercert(True)
2660 2663
2661 2664 ui.status(_('checking the certificate chain for %s\n') % url.host)
2662 2665
2663 2666 complete = win32.checkcertificatechain(cert, build=False)
2664 2667
2665 2668 if not complete:
2666 2669 ui.status(_('certificate chain is incomplete, updating... '))
2667 2670
2668 2671 if not win32.checkcertificatechain(cert):
2669 2672 ui.status(_('failed.\n'))
2670 2673 else:
2671 2674 ui.status(_('done.\n'))
2672 2675 else:
2673 2676 ui.status(_('full certificate chain is available\n'))
2674 2677 finally:
2675 2678 s.close()
2676 2679
2677 2680 @command('debugsub',
2678 2681 [('r', 'rev', '',
2679 2682 _('revision to check'), _('REV'))],
2680 2683 _('[-r REV] [REV]'))
2681 2684 def debugsub(ui, repo, rev=None):
2682 2685 ctx = scmutil.revsingle(repo, rev, None)
2683 2686 for k, v in sorted(ctx.substate.items()):
2684 2687 ui.write(('path %s\n') % k)
2685 2688 ui.write((' source %s\n') % v[0])
2686 2689 ui.write((' revision %s\n') % v[1])
2687 2690
2688 2691 @command('debugsuccessorssets',
2689 2692 [('', 'closest', False, _('return closest successors sets only'))],
2690 2693 _('[REV]'))
2691 2694 def debugsuccessorssets(ui, repo, *revs, **opts):
2692 2695 """show set of successors for revision
2693 2696
2694 2697 A successors set of changeset A is a consistent group of revisions that
2695 2698 succeed A. It contains non-obsolete changesets only unless closests
2696 2699 successors set is set.
2697 2700
2698 2701 In most cases a changeset A has a single successors set containing a single
2699 2702 successor (changeset A replaced by A').
2700 2703
2701 2704 A changeset that is made obsolete with no successors are called "pruned".
2702 2705 Such changesets have no successors sets at all.
2703 2706
2704 2707 A changeset that has been "split" will have a successors set containing
2705 2708 more than one successor.
2706 2709
2707 2710 A changeset that has been rewritten in multiple different ways is called
2708 2711 "divergent". Such changesets have multiple successor sets (each of which
2709 2712 may also be split, i.e. have multiple successors).
2710 2713
2711 2714 Results are displayed as follows::
2712 2715
2713 2716 <rev1>
2714 2717 <successors-1A>
2715 2718 <rev2>
2716 2719 <successors-2A>
2717 2720 <successors-2B1> <successors-2B2> <successors-2B3>
2718 2721
2719 2722 Here rev2 has two possible (i.e. divergent) successors sets. The first
2720 2723 holds one element, whereas the second holds three (i.e. the changeset has
2721 2724 been split).
2722 2725 """
2723 2726 # passed to successorssets caching computation from one call to another
2724 2727 cache = {}
2725 2728 ctx2str = bytes
2726 2729 node2str = short
2727 2730 for rev in scmutil.revrange(repo, revs):
2728 2731 ctx = repo[rev]
2729 2732 ui.write('%s\n'% ctx2str(ctx))
2730 2733 for succsset in obsutil.successorssets(repo, ctx.node(),
2731 2734 closest=opts[r'closest'],
2732 2735 cache=cache):
2733 2736 if succsset:
2734 2737 ui.write(' ')
2735 2738 ui.write(node2str(succsset[0]))
2736 2739 for node in succsset[1:]:
2737 2740 ui.write(' ')
2738 2741 ui.write(node2str(node))
2739 2742 ui.write('\n')
2740 2743
2741 2744 @command('debugtemplate',
2742 2745 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2743 2746 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2744 2747 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2745 2748 optionalrepo=True)
2746 2749 def debugtemplate(ui, repo, tmpl, **opts):
2747 2750 """parse and apply a template
2748 2751
2749 2752 If -r/--rev is given, the template is processed as a log template and
2750 2753 applied to the given changesets. Otherwise, it is processed as a generic
2751 2754 template.
2752 2755
2753 2756 Use --verbose to print the parsed tree.
2754 2757 """
2755 2758 revs = None
2756 2759 if opts[r'rev']:
2757 2760 if repo is None:
2758 2761 raise error.RepoError(_('there is no Mercurial repository here '
2759 2762 '(.hg not found)'))
2760 2763 revs = scmutil.revrange(repo, opts[r'rev'])
2761 2764
2762 2765 props = {}
2763 2766 for d in opts[r'define']:
2764 2767 try:
2765 2768 k, v = (e.strip() for e in d.split('=', 1))
2766 2769 if not k or k == 'ui':
2767 2770 raise ValueError
2768 2771 props[k] = v
2769 2772 except ValueError:
2770 2773 raise error.Abort(_('malformed keyword definition: %s') % d)
2771 2774
2772 2775 if ui.verbose:
2773 2776 aliases = ui.configitems('templatealias')
2774 2777 tree = templater.parse(tmpl)
2775 2778 ui.note(templater.prettyformat(tree), '\n')
2776 2779 newtree = templater.expandaliases(tree, aliases)
2777 2780 if newtree != tree:
2778 2781 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2779 2782
2780 2783 if revs is None:
2781 2784 tres = formatter.templateresources(ui, repo)
2782 2785 t = formatter.maketemplater(ui, tmpl, resources=tres)
2783 2786 if ui.verbose:
2784 2787 kwds, funcs = t.symbolsuseddefault()
2785 2788 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2786 2789 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2787 2790 ui.write(t.renderdefault(props))
2788 2791 else:
2789 2792 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2790 2793 if ui.verbose:
2791 2794 kwds, funcs = displayer.t.symbolsuseddefault()
2792 2795 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2793 2796 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2794 2797 for r in revs:
2795 2798 displayer.show(repo[r], **pycompat.strkwargs(props))
2796 2799 displayer.close()
2797 2800
2798 2801 @command('debuguigetpass', [
2799 2802 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2800 2803 ], _('[-p TEXT]'), norepo=True)
2801 2804 def debuguigetpass(ui, prompt=''):
2802 2805 """show prompt to type password"""
2803 2806 r = ui.getpass(prompt)
2804 2807 ui.write(('respose: %s\n') % r)
2805 2808
2806 2809 @command('debuguiprompt', [
2807 2810 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2808 2811 ], _('[-p TEXT]'), norepo=True)
2809 2812 def debuguiprompt(ui, prompt=''):
2810 2813 """show plain prompt"""
2811 2814 r = ui.prompt(prompt)
2812 2815 ui.write(('response: %s\n') % r)
2813 2816
2814 2817 @command('debugupdatecaches', [])
2815 2818 def debugupdatecaches(ui, repo, *pats, **opts):
2816 2819 """warm all known caches in the repository"""
2817 2820 with repo.wlock(), repo.lock():
2818 2821 repo.updatecaches(full=True)
2819 2822
2820 2823 @command('debugupgraderepo', [
2821 2824 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2822 2825 ('', 'run', False, _('performs an upgrade')),
2823 2826 ('', 'backup', True, _('keep the old repository content around')),
2824 2827 ])
2825 2828 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2826 2829 """upgrade a repository to use different features
2827 2830
2828 2831 If no arguments are specified, the repository is evaluated for upgrade
2829 2832 and a list of problems and potential optimizations is printed.
2830 2833
2831 2834 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2832 2835 can be influenced via additional arguments. More details will be provided
2833 2836 by the command output when run without ``--run``.
2834 2837
2835 2838 During the upgrade, the repository will be locked and no writes will be
2836 2839 allowed.
2837 2840
2838 2841 At the end of the upgrade, the repository may not be readable while new
2839 2842 repository data is swapped in. This window will be as long as it takes to
2840 2843 rename some directories inside the ``.hg`` directory. On most machines, this
2841 2844 should complete almost instantaneously and the chances of a consumer being
2842 2845 unable to access the repository should be low.
2843 2846 """
2844 2847 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2845 2848 backup=backup)
2846 2849
2847 2850 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2848 2851 inferrepo=True)
2849 2852 def debugwalk(ui, repo, *pats, **opts):
2850 2853 """show how files match on given patterns"""
2851 2854 opts = pycompat.byteskwargs(opts)
2852 2855 m = scmutil.match(repo[None], pats, opts)
2853 2856 if ui.verbose:
2854 2857 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2855 2858 items = list(repo[None].walk(m))
2856 2859 if not items:
2857 2860 return
2858 2861 f = lambda fn: fn
2859 2862 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2860 2863 f = lambda fn: util.normpath(fn)
2861 2864 fmt = 'f %%-%ds %%-%ds %%s' % (
2862 2865 max([len(abs) for abs in items]),
2863 2866 max([len(repo.pathto(abs)) for abs in items]))
2864 2867 for abs in items:
2865 2868 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2866 2869 ui.write("%s\n" % line.rstrip())
2867 2870
2868 2871 @command('debugwhyunstable', [], _('REV'))
2869 2872 def debugwhyunstable(ui, repo, rev):
2870 2873 """explain instabilities of a changeset"""
2871 2874 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2872 2875 dnodes = ''
2873 2876 if entry.get('divergentnodes'):
2874 2877 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2875 2878 for ctx in entry['divergentnodes']) + ' '
2876 2879 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2877 2880 entry['reason'], entry['node']))
2878 2881
2879 2882 @command('debugwireargs',
2880 2883 [('', 'three', '', 'three'),
2881 2884 ('', 'four', '', 'four'),
2882 2885 ('', 'five', '', 'five'),
2883 2886 ] + cmdutil.remoteopts,
2884 2887 _('REPO [OPTIONS]... [ONE [TWO]]'),
2885 2888 norepo=True)
2886 2889 def debugwireargs(ui, repopath, *vals, **opts):
2887 2890 opts = pycompat.byteskwargs(opts)
2888 2891 repo = hg.peer(ui, opts, repopath)
2889 2892 for opt in cmdutil.remoteopts:
2890 2893 del opts[opt[1]]
2891 2894 args = {}
2892 2895 for k, v in opts.iteritems():
2893 2896 if v:
2894 2897 args[k] = v
2895 2898 args = pycompat.strkwargs(args)
2896 2899 # run twice to check that we don't mess up the stream for the next command
2897 2900 res1 = repo.debugwireargs(*vals, **args)
2898 2901 res2 = repo.debugwireargs(*vals, **args)
2899 2902 ui.write("%s\n" % res1)
2900 2903 if res1 != res2:
2901 2904 ui.warn("%s\n" % res2)
2902 2905
2903 2906 def _parsewirelangblocks(fh):
2904 2907 activeaction = None
2905 2908 blocklines = []
2906 2909 lastindent = 0
2907 2910
2908 2911 for line in fh:
2909 2912 line = line.rstrip()
2910 2913 if not line:
2911 2914 continue
2912 2915
2913 2916 if line.startswith(b'#'):
2914 2917 continue
2915 2918
2916 2919 if not line.startswith(b' '):
2917 2920 # New block. Flush previous one.
2918 2921 if activeaction:
2919 2922 yield activeaction, blocklines
2920 2923
2921 2924 activeaction = line
2922 2925 blocklines = []
2923 2926 lastindent = 0
2924 2927 continue
2925 2928
2926 2929 # Else we start with an indent.
2927 2930
2928 2931 if not activeaction:
2929 2932 raise error.Abort(_('indented line outside of block'))
2930 2933
2931 2934 indent = len(line) - len(line.lstrip())
2932 2935
2933 2936 # If this line is indented more than the last line, concatenate it.
2934 2937 if indent > lastindent and blocklines:
2935 2938 blocklines[-1] += line.lstrip()
2936 2939 else:
2937 2940 blocklines.append(line)
2938 2941 lastindent = indent
2939 2942
2940 2943 # Flush last block.
2941 2944 if activeaction:
2942 2945 yield activeaction, blocklines
2943 2946
2944 2947 @command('debugwireproto',
2945 2948 [
2946 2949 ('', 'localssh', False, _('start an SSH server for this repo')),
2947 2950 ('', 'peer', '', _('construct a specific version of the peer')),
2948 2951 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2949 2952 ('', 'nologhandshake', False,
2950 2953 _('do not log I/O related to the peer handshake')),
2951 2954 ] + cmdutil.remoteopts,
2952 2955 _('[PATH]'),
2953 2956 optionalrepo=True)
2954 2957 def debugwireproto(ui, repo, path=None, **opts):
2955 2958 """send wire protocol commands to a server
2956 2959
2957 2960 This command can be used to issue wire protocol commands to remote
2958 2961 peers and to debug the raw data being exchanged.
2959 2962
2960 2963 ``--localssh`` will start an SSH server against the current repository
2961 2964 and connect to that. By default, the connection will perform a handshake
2962 2965 and establish an appropriate peer instance.
2963 2966
2964 2967 ``--peer`` can be used to bypass the handshake protocol and construct a
2965 2968 peer instance using the specified class type. Valid values are ``raw``,
2966 2969 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2967 2970 raw data payloads and don't support higher-level command actions.
2968 2971
2969 2972 ``--noreadstderr`` can be used to disable automatic reading from stderr
2970 2973 of the peer (for SSH connections only). Disabling automatic reading of
2971 2974 stderr is useful for making output more deterministic.
2972 2975
2973 2976 Commands are issued via a mini language which is specified via stdin.
2974 2977 The language consists of individual actions to perform. An action is
2975 2978 defined by a block. A block is defined as a line with no leading
2976 2979 space followed by 0 or more lines with leading space. Blocks are
2977 2980 effectively a high-level command with additional metadata.
2978 2981
2979 2982 Lines beginning with ``#`` are ignored.
2980 2983
2981 2984 The following sections denote available actions.
2982 2985
2983 2986 raw
2984 2987 ---
2985 2988
2986 2989 Send raw data to the server.
2987 2990
2988 2991 The block payload contains the raw data to send as one atomic send
2989 2992 operation. The data may not actually be delivered in a single system
2990 2993 call: it depends on the abilities of the transport being used.
2991 2994
2992 2995 Each line in the block is de-indented and concatenated. Then, that
2993 2996 value is evaluated as a Python b'' literal. This allows the use of
2994 2997 backslash escaping, etc.
2995 2998
2996 2999 raw+
2997 3000 ----
2998 3001
2999 3002 Behaves like ``raw`` except flushes output afterwards.
3000 3003
3001 3004 command <X>
3002 3005 -----------
3003 3006
3004 3007 Send a request to run a named command, whose name follows the ``command``
3005 3008 string.
3006 3009
3007 3010 Arguments to the command are defined as lines in this block. The format of
3008 3011 each line is ``<key> <value>``. e.g.::
3009 3012
3010 3013 command listkeys
3011 3014 namespace bookmarks
3012 3015
3013 3016 If the value begins with ``eval:``, it will be interpreted as a Python
3014 3017 literal expression. Otherwise values are interpreted as Python b'' literals.
3015 3018 This allows sending complex types and encoding special byte sequences via
3016 3019 backslash escaping.
3017 3020
3018 3021 The following arguments have special meaning:
3019 3022
3020 3023 ``PUSHFILE``
3021 3024 When defined, the *push* mechanism of the peer will be used instead
3022 3025 of the static request-response mechanism and the content of the
3023 3026 file specified in the value of this argument will be sent as the
3024 3027 command payload.
3025 3028
3026 3029 This can be used to submit a local bundle file to the remote.
3027 3030
3028 3031 batchbegin
3029 3032 ----------
3030 3033
3031 3034 Instruct the peer to begin a batched send.
3032 3035
3033 3036 All ``command`` blocks are queued for execution until the next
3034 3037 ``batchsubmit`` block.
3035 3038
3036 3039 batchsubmit
3037 3040 -----------
3038 3041
3039 3042 Submit previously queued ``command`` blocks as a batch request.
3040 3043
3041 3044 This action MUST be paired with a ``batchbegin`` action.
3042 3045
3043 3046 httprequest <method> <path>
3044 3047 ---------------------------
3045 3048
3046 3049 (HTTP peer only)
3047 3050
3048 3051 Send an HTTP request to the peer.
3049 3052
3050 3053 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3051 3054
3052 3055 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3053 3056 headers to add to the request. e.g. ``Accept: foo``.
3054 3057
3055 3058 The following arguments are special:
3056 3059
3057 3060 ``BODYFILE``
3058 3061 The content of the file defined as the value to this argument will be
3059 3062 transferred verbatim as the HTTP request body.
3060 3063
3061 3064 ``frame <type> <flags> <payload>``
3062 3065 Send a unified protocol frame as part of the request body.
3063 3066
3064 3067 All frames will be collected and sent as the body to the HTTP
3065 3068 request.
3066 3069
3067 3070 close
3068 3071 -----
3069 3072
3070 3073 Close the connection to the server.
3071 3074
3072 3075 flush
3073 3076 -----
3074 3077
3075 3078 Flush data written to the server.
3076 3079
3077 3080 readavailable
3078 3081 -------------
3079 3082
3080 3083 Close the write end of the connection and read all available data from
3081 3084 the server.
3082 3085
3083 3086 If the connection to the server encompasses multiple pipes, we poll both
3084 3087 pipes and read available data.
3085 3088
3086 3089 readline
3087 3090 --------
3088 3091
3089 3092 Read a line of output from the server. If there are multiple output
3090 3093 pipes, reads only the main pipe.
3091 3094
3092 3095 ereadline
3093 3096 ---------
3094 3097
3095 3098 Like ``readline``, but read from the stderr pipe, if available.
3096 3099
3097 3100 read <X>
3098 3101 --------
3099 3102
3100 3103 ``read()`` N bytes from the server's main output pipe.
3101 3104
3102 3105 eread <X>
3103 3106 ---------
3104 3107
3105 3108 ``read()`` N bytes from the server's stderr pipe, if available.
3106 3109
3107 3110 Specifying Unified Frame-Based Protocol Frames
3108 3111 ----------------------------------------------
3109 3112
3110 3113 It is possible to emit a *Unified Frame-Based Protocol* by using special
3111 3114 syntax.
3112 3115
3113 3116 A frame is composed as a type, flags, and payload. These can be parsed
3114 3117 from a string of the form:
3115 3118
3116 3119 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3117 3120
3118 3121 ``request-id`` and ``stream-id`` are integers defining the request and
3119 3122 stream identifiers.
3120 3123
3121 3124 ``type`` can be an integer value for the frame type or the string name
3122 3125 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3123 3126 ``command-name``.
3124 3127
3125 3128 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3126 3129 components. Each component (and there can be just one) can be an integer
3127 3130 or a flag name for stream flags or frame flags, respectively. Values are
3128 3131 resolved to integers and then bitwise OR'd together.
3129 3132
3130 3133 ``payload`` represents the raw frame payload. If it begins with
3131 3134 ``cbor:``, the following string is evaluated as Python code and the
3132 3135 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3133 3136 as a Python byte string literal.
3134 3137 """
3135 3138 opts = pycompat.byteskwargs(opts)
3136 3139
3137 3140 if opts['localssh'] and not repo:
3138 3141 raise error.Abort(_('--localssh requires a repository'))
3139 3142
3140 3143 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3141 3144 raise error.Abort(_('invalid value for --peer'),
3142 3145 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3143 3146
3144 3147 if path and opts['localssh']:
3145 3148 raise error.Abort(_('cannot specify --localssh with an explicit '
3146 3149 'path'))
3147 3150
3148 3151 if ui.interactive():
3149 3152 ui.write(_('(waiting for commands on stdin)\n'))
3150 3153
3151 3154 blocks = list(_parsewirelangblocks(ui.fin))
3152 3155
3153 3156 proc = None
3154 3157 stdin = None
3155 3158 stdout = None
3156 3159 stderr = None
3157 3160 opener = None
3158 3161
3159 3162 if opts['localssh']:
3160 3163 # We start the SSH server in its own process so there is process
3161 3164 # separation. This prevents a whole class of potential bugs around
3162 3165 # shared state from interfering with server operation.
3163 3166 args = procutil.hgcmd() + [
3164 3167 '-R', repo.root,
3165 3168 'debugserve', '--sshstdio',
3166 3169 ]
3167 3170 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3168 3171 stdin=subprocess.PIPE,
3169 3172 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3170 3173 bufsize=0)
3171 3174
3172 3175 stdin = proc.stdin
3173 3176 stdout = proc.stdout
3174 3177 stderr = proc.stderr
3175 3178
3176 3179 # We turn the pipes into observers so we can log I/O.
3177 3180 if ui.verbose or opts['peer'] == 'raw':
3178 3181 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3179 3182 logdata=True)
3180 3183 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3181 3184 logdata=True)
3182 3185 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3183 3186 logdata=True)
3184 3187
3185 3188 # --localssh also implies the peer connection settings.
3186 3189
3187 3190 url = 'ssh://localserver'
3188 3191 autoreadstderr = not opts['noreadstderr']
3189 3192
3190 3193 if opts['peer'] == 'ssh1':
3191 3194 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3192 3195 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3193 3196 None, autoreadstderr=autoreadstderr)
3194 3197 elif opts['peer'] == 'ssh2':
3195 3198 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3196 3199 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3197 3200 None, autoreadstderr=autoreadstderr)
3198 3201 elif opts['peer'] == 'raw':
3199 3202 ui.write(_('using raw connection to peer\n'))
3200 3203 peer = None
3201 3204 else:
3202 3205 ui.write(_('creating ssh peer from handshake results\n'))
3203 3206 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3204 3207 autoreadstderr=autoreadstderr)
3205 3208
3206 3209 elif path:
3207 3210 # We bypass hg.peer() so we can proxy the sockets.
3208 3211 # TODO consider not doing this because we skip
3209 3212 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3210 3213 u = util.url(path)
3211 3214 if u.scheme != 'http':
3212 3215 raise error.Abort(_('only http:// paths are currently supported'))
3213 3216
3214 3217 url, authinfo = u.authinfo()
3215 3218 openerargs = {
3216 3219 r'useragent': b'Mercurial debugwireproto',
3217 3220 }
3218 3221
3219 3222 # Turn pipes/sockets into observers so we can log I/O.
3220 3223 if ui.verbose:
3221 3224 openerargs.update({
3222 3225 r'loggingfh': ui,
3223 3226 r'loggingname': b's',
3224 3227 r'loggingopts': {
3225 3228 r'logdata': True,
3226 3229 r'logdataapis': False,
3227 3230 },
3228 3231 })
3229 3232
3230 3233 if ui.debugflag:
3231 3234 openerargs[r'loggingopts'][r'logdataapis'] = True
3232 3235
3233 3236 # Don't send default headers when in raw mode. This allows us to
3234 3237 # bypass most of the behavior of our URL handling code so we can
3235 3238 # have near complete control over what's sent on the wire.
3236 3239 if opts['peer'] == 'raw':
3237 3240 openerargs[r'sendaccept'] = False
3238 3241
3239 3242 opener = urlmod.opener(ui, authinfo, **openerargs)
3240 3243
3241 3244 if opts['peer'] == 'http2':
3242 3245 ui.write(_('creating http peer for wire protocol version 2\n'))
3243 3246 # We go through makepeer() because we need an API descriptor for
3244 3247 # the peer instance to be useful.
3245 3248 with ui.configoverride({
3246 3249 ('experimental', 'httppeer.advertise-v2'): True}):
3247 3250 if opts['nologhandshake']:
3248 3251 ui.pushbuffer()
3249 3252
3250 3253 peer = httppeer.makepeer(ui, path, opener=opener)
3251 3254
3252 3255 if opts['nologhandshake']:
3253 3256 ui.popbuffer()
3254 3257
3255 3258 if not isinstance(peer, httppeer.httpv2peer):
3256 3259 raise error.Abort(_('could not instantiate HTTP peer for '
3257 3260 'wire protocol version 2'),
3258 3261 hint=_('the server may not have the feature '
3259 3262 'enabled or is not allowing this '
3260 3263 'client version'))
3261 3264
3262 3265 elif opts['peer'] == 'raw':
3263 3266 ui.write(_('using raw connection to peer\n'))
3264 3267 peer = None
3265 3268 elif opts['peer']:
3266 3269 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3267 3270 opts['peer'])
3268 3271 else:
3269 3272 peer = httppeer.makepeer(ui, path, opener=opener)
3270 3273
3271 3274 # We /could/ populate stdin/stdout with sock.makefile()...
3272 3275 else:
3273 3276 raise error.Abort(_('unsupported connection configuration'))
3274 3277
3275 3278 batchedcommands = None
3276 3279
3277 3280 # Now perform actions based on the parsed wire language instructions.
3278 3281 for action, lines in blocks:
3279 3282 if action in ('raw', 'raw+'):
3280 3283 if not stdin:
3281 3284 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3282 3285
3283 3286 # Concatenate the data together.
3284 3287 data = ''.join(l.lstrip() for l in lines)
3285 3288 data = stringutil.unescapestr(data)
3286 3289 stdin.write(data)
3287 3290
3288 3291 if action == 'raw+':
3289 3292 stdin.flush()
3290 3293 elif action == 'flush':
3291 3294 if not stdin:
3292 3295 raise error.Abort(_('cannot call flush on this peer'))
3293 3296 stdin.flush()
3294 3297 elif action.startswith('command'):
3295 3298 if not peer:
3296 3299 raise error.Abort(_('cannot send commands unless peer instance '
3297 3300 'is available'))
3298 3301
3299 3302 command = action.split(' ', 1)[1]
3300 3303
3301 3304 args = {}
3302 3305 for line in lines:
3303 3306 # We need to allow empty values.
3304 3307 fields = line.lstrip().split(' ', 1)
3305 3308 if len(fields) == 1:
3306 3309 key = fields[0]
3307 3310 value = ''
3308 3311 else:
3309 3312 key, value = fields
3310 3313
3311 3314 if value.startswith('eval:'):
3312 3315 value = stringutil.evalpythonliteral(value[5:])
3313 3316 else:
3314 3317 value = stringutil.unescapestr(value)
3315 3318
3316 3319 args[key] = value
3317 3320
3318 3321 if batchedcommands is not None:
3319 3322 batchedcommands.append((command, args))
3320 3323 continue
3321 3324
3322 3325 ui.status(_('sending %s command\n') % command)
3323 3326
3324 3327 if 'PUSHFILE' in args:
3325 3328 with open(args['PUSHFILE'], r'rb') as fh:
3326 3329 del args['PUSHFILE']
3327 3330 res, output = peer._callpush(command, fh,
3328 3331 **pycompat.strkwargs(args))
3329 3332 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3330 3333 ui.status(_('remote output: %s\n') %
3331 3334 stringutil.escapestr(output))
3332 3335 else:
3333 3336 with peer.commandexecutor() as e:
3334 3337 res = e.callcommand(command, args).result()
3335 3338
3336 3339 if isinstance(res, wireprotov2peer.commandresponse):
3337 3340 val = res.objects()
3338 3341 ui.status(_('response: %s\n') %
3339 3342 stringutil.pprint(val, bprefix=True, indent=2))
3340 3343 else:
3341 3344 ui.status(_('response: %s\n') %
3342 3345 stringutil.pprint(res, bprefix=True, indent=2))
3343 3346
3344 3347 elif action == 'batchbegin':
3345 3348 if batchedcommands is not None:
3346 3349 raise error.Abort(_('nested batchbegin not allowed'))
3347 3350
3348 3351 batchedcommands = []
3349 3352 elif action == 'batchsubmit':
3350 3353 # There is a batching API we could go through. But it would be
3351 3354 # difficult to normalize requests into function calls. It is easier
3352 3355 # to bypass this layer and normalize to commands + args.
3353 3356 ui.status(_('sending batch with %d sub-commands\n') %
3354 3357 len(batchedcommands))
3355 3358 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3356 3359 ui.status(_('response #%d: %s\n') %
3357 3360 (i, stringutil.escapestr(chunk)))
3358 3361
3359 3362 batchedcommands = None
3360 3363
3361 3364 elif action.startswith('httprequest '):
3362 3365 if not opener:
3363 3366 raise error.Abort(_('cannot use httprequest without an HTTP '
3364 3367 'peer'))
3365 3368
3366 3369 request = action.split(' ', 2)
3367 3370 if len(request) != 3:
3368 3371 raise error.Abort(_('invalid httprequest: expected format is '
3369 3372 '"httprequest <method> <path>'))
3370 3373
3371 3374 method, httppath = request[1:]
3372 3375 headers = {}
3373 3376 body = None
3374 3377 frames = []
3375 3378 for line in lines:
3376 3379 line = line.lstrip()
3377 3380 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3378 3381 if m:
3379 3382 # Headers need to use native strings.
3380 3383 key = pycompat.strurl(m.group(1))
3381 3384 value = pycompat.strurl(m.group(2))
3382 3385 headers[key] = value
3383 3386 continue
3384 3387
3385 3388 if line.startswith(b'BODYFILE '):
3386 3389 with open(line.split(b' ', 1), 'rb') as fh:
3387 3390 body = fh.read()
3388 3391 elif line.startswith(b'frame '):
3389 3392 frame = wireprotoframing.makeframefromhumanstring(
3390 3393 line[len(b'frame '):])
3391 3394
3392 3395 frames.append(frame)
3393 3396 else:
3394 3397 raise error.Abort(_('unknown argument to httprequest: %s') %
3395 3398 line)
3396 3399
3397 3400 url = path + httppath
3398 3401
3399 3402 if frames:
3400 3403 body = b''.join(bytes(f) for f in frames)
3401 3404
3402 3405 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3403 3406
3404 3407 # urllib.Request insists on using has_data() as a proxy for
3405 3408 # determining the request method. Override that to use our
3406 3409 # explicitly requested method.
3407 3410 req.get_method = lambda: pycompat.sysstr(method)
3408 3411
3409 3412 try:
3410 3413 res = opener.open(req)
3411 3414 body = res.read()
3412 3415 except util.urlerr.urlerror as e:
3413 3416 # read() method must be called, but only exists in Python 2
3414 3417 getattr(e, 'read', lambda: None)()
3415 3418 continue
3416 3419
3417 3420 ct = res.headers.get(r'Content-Type')
3418 3421 if ct == r'application/mercurial-cbor':
3419 3422 ui.write(_('cbor> %s\n') %
3420 3423 stringutil.pprint(cborutil.decodeall(body),
3421 3424 bprefix=True,
3422 3425 indent=2))
3423 3426
3424 3427 elif action == 'close':
3425 3428 peer.close()
3426 3429 elif action == 'readavailable':
3427 3430 if not stdout or not stderr:
3428 3431 raise error.Abort(_('readavailable not available on this peer'))
3429 3432
3430 3433 stdin.close()
3431 3434 stdout.read()
3432 3435 stderr.read()
3433 3436
3434 3437 elif action == 'readline':
3435 3438 if not stdout:
3436 3439 raise error.Abort(_('readline not available on this peer'))
3437 3440 stdout.readline()
3438 3441 elif action == 'ereadline':
3439 3442 if not stderr:
3440 3443 raise error.Abort(_('ereadline not available on this peer'))
3441 3444 stderr.readline()
3442 3445 elif action.startswith('read '):
3443 3446 count = int(action.split(' ', 1)[1])
3444 3447 if not stdout:
3445 3448 raise error.Abort(_('read not available on this peer'))
3446 3449 stdout.read(count)
3447 3450 elif action.startswith('eread '):
3448 3451 count = int(action.split(' ', 1)[1])
3449 3452 if not stderr:
3450 3453 raise error.Abort(_('eread not available on this peer'))
3451 3454 stderr.read(count)
3452 3455 else:
3453 3456 raise error.Abort(_('unknown action: %s') % action)
3454 3457
3455 3458 if batchedcommands is not None:
3456 3459 raise error.Abort(_('unclosed "batchbegin" request'))
3457 3460
3458 3461 if peer:
3459 3462 peer.close()
3460 3463
3461 3464 if proc:
3462 3465 proc.kill()
@@ -1,1045 +1,1083 b''
1 1
2 2 Function to test discovery between two repos in both directions, using both the local shortcut
3 3 (which is currently not activated by default) and the full remotable protocol:
4 4
5 5 $ testdesc() { # revs_a, revs_b, dagdesc
6 6 > if [ -d foo ]; then rm -rf foo; fi
7 7 > hg init foo
8 8 > cd foo
9 9 > hg debugbuilddag "$3"
10 10 > hg clone . a $1 --quiet
11 11 > hg clone . b $2 --quiet
12 12 > echo
13 13 > echo "% -- a -> b tree"
14 14 > hg -R a debugdiscovery b --verbose --old
15 15 > echo
16 16 > echo "% -- a -> b set"
17 17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 18 > echo
19 19 > echo "% -- a -> b set (tip only)"
20 20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
21 21 > echo
22 22 > echo "% -- b -> a tree"
23 23 > hg -R b debugdiscovery a --verbose --old
24 24 > echo
25 25 > echo "% -- b -> a set"
26 26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
27 27 > echo
28 28 > echo "% -- b -> a set (tip only)"
29 29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
30 30 > cd ..
31 31 > }
32 32
33 33
34 34 Small superset:
35 35
36 36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
37 37 > +2:f +1:a1:b1
38 38 > <f +4 :a2
39 39 > +5 :b2
40 40 > <f +3 :b3'
41 41
42 42 % -- a -> b tree
43 43 comparing with b
44 44 searching for changes
45 45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
46 elapsed time: * seconds (glob)
46 47 heads summary:
47 48 total common heads: 2
48 49 also local heads: 2
49 50 also remote heads: 1
50 51 local heads: 2
51 52 common: 2
52 53 missing: 0
53 54 remote heads: 3
54 55 common: 1
55 56 unknown: 2
56 57 local changesets: 7
57 58 common: 7
58 59 missing: 0
59 60 common heads: 01241442b3c2 b5714e113bc0
60 61
61 62 % -- a -> b set
62 63 comparing with b
63 64 query 1; heads
64 65 searching for changes
65 66 all local heads known remotely
67 elapsed time: * seconds (glob)
66 68 heads summary:
67 69 total common heads: 2
68 70 also local heads: 2
69 71 also remote heads: 1
70 72 local heads: 2
71 73 common: 2
72 74 missing: 0
73 75 remote heads: 3
74 76 common: 1
75 77 unknown: 2
76 78 local changesets: 7
77 79 common: 7
78 80 missing: 0
79 81 common heads: 01241442b3c2 b5714e113bc0
80 82
81 83 % -- a -> b set (tip only)
82 84 comparing with b
83 85 query 1; heads
84 86 searching for changes
85 87 all local heads known remotely
88 elapsed time: * seconds (glob)
86 89 heads summary:
87 90 total common heads: 1
88 91 also local heads: 1
89 92 also remote heads: 0
90 93 local heads: 2
91 94 common: 1
92 95 missing: 1
93 96 remote heads: 3
94 97 common: 0
95 98 unknown: 3
96 99 local changesets: 7
97 100 common: 6
98 101 missing: 1
99 102 common heads: b5714e113bc0
100 103
101 104 % -- b -> a tree
102 105 comparing with a
103 106 searching for changes
104 107 unpruned common: 01241442b3c2 b5714e113bc0
108 elapsed time: * seconds (glob)
105 109 heads summary:
106 110 total common heads: 2
107 111 also local heads: 1
108 112 also remote heads: 2
109 113 local heads: 3
110 114 common: 1
111 115 missing: 2
112 116 remote heads: 2
113 117 common: 2
114 118 unknown: 0
115 119 local changesets: 15
116 120 common: 7
117 121 missing: 8
118 122 common heads: 01241442b3c2 b5714e113bc0
119 123
120 124 % -- b -> a set
121 125 comparing with a
122 126 query 1; heads
123 127 searching for changes
124 128 all remote heads known locally
129 elapsed time: * seconds (glob)
125 130 heads summary:
126 131 total common heads: 2
127 132 also local heads: 1
128 133 also remote heads: 2
129 134 local heads: 3
130 135 common: 1
131 136 missing: 2
132 137 remote heads: 2
133 138 common: 2
134 139 unknown: 0
135 140 local changesets: 15
136 141 common: 7
137 142 missing: 8
138 143 common heads: 01241442b3c2 b5714e113bc0
139 144
140 145 % -- b -> a set (tip only)
141 146 comparing with a
142 147 query 1; heads
143 148 searching for changes
144 149 all remote heads known locally
150 elapsed time: * seconds (glob)
145 151 heads summary:
146 152 total common heads: 2
147 153 also local heads: 1
148 154 also remote heads: 2
149 155 local heads: 3
150 156 common: 1
151 157 missing: 2
152 158 remote heads: 2
153 159 common: 2
154 160 unknown: 0
155 161 local changesets: 15
156 162 common: 7
157 163 missing: 8
158 164 common heads: 01241442b3c2 b5714e113bc0
159 165
160 166
161 167 Many new:
162 168
163 169 $ testdesc '-ra1 -ra2' '-rb' '
164 170 > +2:f +3:a1 +3:b
165 171 > <f +30 :a2'
166 172
167 173 % -- a -> b tree
168 174 comparing with b
169 175 searching for changes
170 176 unpruned common: bebd167eb94d
177 elapsed time: * seconds (glob)
171 178 heads summary:
172 179 total common heads: 1
173 180 also local heads: 1
174 181 also remote heads: 0
175 182 local heads: 2
176 183 common: 1
177 184 missing: 1
178 185 remote heads: 1
179 186 common: 0
180 187 unknown: 1
181 188 local changesets: 35
182 189 common: 5
183 190 missing: 30
184 191 common heads: bebd167eb94d
185 192
186 193 % -- a -> b set
187 194 comparing with b
188 195 query 1; heads
189 196 searching for changes
190 197 taking initial sample
191 198 searching: 2 queries
192 199 query 2; still undecided: 29, sample size is: 29
193 200 2 total queries in *.????s (glob)
201 elapsed time: * seconds (glob)
194 202 heads summary:
195 203 total common heads: 1
196 204 also local heads: 1
197 205 also remote heads: 0
198 206 local heads: 2
199 207 common: 1
200 208 missing: 1
201 209 remote heads: 1
202 210 common: 0
203 211 unknown: 1
204 212 local changesets: 35
205 213 common: 5
206 214 missing: 30
207 215 common heads: bebd167eb94d
208 216
209 217 % -- a -> b set (tip only)
210 218 comparing with b
211 219 query 1; heads
212 220 searching for changes
213 221 taking quick initial sample
214 222 searching: 2 queries
215 223 query 2; still undecided: 31, sample size is: 31
216 224 2 total queries in *.????s (glob)
225 elapsed time: * seconds (glob)
217 226 heads summary:
218 227 total common heads: 1
219 228 also local heads: 0
220 229 also remote heads: 0
221 230 local heads: 2
222 231 common: 0
223 232 missing: 2
224 233 remote heads: 1
225 234 common: 0
226 235 unknown: 1
227 236 local changesets: 35
228 237 common: 2
229 238 missing: 33
230 239 common heads: 66f7d451a68b
231 240
232 241 % -- b -> a tree
233 242 comparing with a
234 243 searching for changes
235 244 unpruned common: 66f7d451a68b bebd167eb94d
245 elapsed time: * seconds (glob)
236 246 heads summary:
237 247 total common heads: 1
238 248 also local heads: 0
239 249 also remote heads: 1
240 250 local heads: 1
241 251 common: 0
242 252 missing: 1
243 253 remote heads: 2
244 254 common: 1
245 255 unknown: 1
246 256 local changesets: 8
247 257 common: 5
248 258 missing: 3
249 259 common heads: bebd167eb94d
250 260
251 261 % -- b -> a set
252 262 comparing with a
253 263 query 1; heads
254 264 searching for changes
255 265 taking initial sample
256 266 searching: 2 queries
257 267 query 2; still undecided: 2, sample size is: 2
258 268 2 total queries in *.????s (glob)
269 elapsed time: * seconds (glob)
259 270 heads summary:
260 271 total common heads: 1
261 272 also local heads: 0
262 273 also remote heads: 1
263 274 local heads: 1
264 275 common: 0
265 276 missing: 1
266 277 remote heads: 2
267 278 common: 1
268 279 unknown: 1
269 280 local changesets: 8
270 281 common: 5
271 282 missing: 3
272 283 common heads: bebd167eb94d
273 284
274 285 % -- b -> a set (tip only)
275 286 comparing with a
276 287 query 1; heads
277 288 searching for changes
278 289 taking initial sample
279 290 searching: 2 queries
280 291 query 2; still undecided: 2, sample size is: 2
281 292 2 total queries in *.????s (glob)
293 elapsed time: * seconds (glob)
282 294 heads summary:
283 295 total common heads: 1
284 296 also local heads: 0
285 297 also remote heads: 1
286 298 local heads: 1
287 299 common: 0
288 300 missing: 1
289 301 remote heads: 2
290 302 common: 1
291 303 unknown: 1
292 304 local changesets: 8
293 305 common: 5
294 306 missing: 3
295 307 common heads: bebd167eb94d
296 308
297 309 Both sides many new with stub:
298 310
299 311 $ testdesc '-ra1 -ra2' '-rb' '
300 312 > +2:f +2:a1 +30 :b
301 313 > <f +30 :a2'
302 314
303 315 % -- a -> b tree
304 316 comparing with b
305 317 searching for changes
306 318 unpruned common: 2dc09a01254d
319 elapsed time: * seconds (glob)
307 320 heads summary:
308 321 total common heads: 1
309 322 also local heads: 1
310 323 also remote heads: 0
311 324 local heads: 2
312 325 common: 1
313 326 missing: 1
314 327 remote heads: 1
315 328 common: 0
316 329 unknown: 1
317 330 local changesets: 34
318 331 common: 4
319 332 missing: 30
320 333 common heads: 2dc09a01254d
321 334
322 335 % -- a -> b set
323 336 comparing with b
324 337 query 1; heads
325 338 searching for changes
326 339 taking initial sample
327 340 searching: 2 queries
328 341 query 2; still undecided: 29, sample size is: 29
329 342 2 total queries in *.????s (glob)
343 elapsed time: * seconds (glob)
330 344 heads summary:
331 345 total common heads: 1
332 346 also local heads: 1
333 347 also remote heads: 0
334 348 local heads: 2
335 349 common: 1
336 350 missing: 1
337 351 remote heads: 1
338 352 common: 0
339 353 unknown: 1
340 354 local changesets: 34
341 355 common: 4
342 356 missing: 30
343 357 common heads: 2dc09a01254d
344 358
345 359 % -- a -> b set (tip only)
346 360 comparing with b
347 361 query 1; heads
348 362 searching for changes
349 363 taking quick initial sample
350 364 searching: 2 queries
351 365 query 2; still undecided: 31, sample size is: 31
352 366 2 total queries in *.????s (glob)
367 elapsed time: * seconds (glob)
353 368 heads summary:
354 369 total common heads: 1
355 370 also local heads: 0
356 371 also remote heads: 0
357 372 local heads: 2
358 373 common: 0
359 374 missing: 2
360 375 remote heads: 1
361 376 common: 0
362 377 unknown: 1
363 378 local changesets: 34
364 379 common: 2
365 380 missing: 32
366 381 common heads: 66f7d451a68b
367 382
368 383 % -- b -> a tree
369 384 comparing with a
370 385 searching for changes
371 386 unpruned common: 2dc09a01254d 66f7d451a68b
387 elapsed time: * seconds (glob)
372 388 heads summary:
373 389 total common heads: 1
374 390 also local heads: 0
375 391 also remote heads: 1
376 392 local heads: 1
377 393 common: 0
378 394 missing: 1
379 395 remote heads: 2
380 396 common: 1
381 397 unknown: 1
382 398 local changesets: 34
383 399 common: 4
384 400 missing: 30
385 401 common heads: 2dc09a01254d
386 402
387 403 % -- b -> a set
388 404 comparing with a
389 405 query 1; heads
390 406 searching for changes
391 407 taking initial sample
392 408 searching: 2 queries
393 409 query 2; still undecided: 29, sample size is: 29
394 410 2 total queries in *.????s (glob)
411 elapsed time: * seconds (glob)
395 412 heads summary:
396 413 total common heads: 1
397 414 also local heads: 0
398 415 also remote heads: 1
399 416 local heads: 1
400 417 common: 0
401 418 missing: 1
402 419 remote heads: 2
403 420 common: 1
404 421 unknown: 1
405 422 local changesets: 34
406 423 common: 4
407 424 missing: 30
408 425 common heads: 2dc09a01254d
409 426
410 427 % -- b -> a set (tip only)
411 428 comparing with a
412 429 query 1; heads
413 430 searching for changes
414 431 taking initial sample
415 432 searching: 2 queries
416 433 query 2; still undecided: 29, sample size is: 29
417 434 2 total queries in *.????s (glob)
435 elapsed time: * seconds (glob)
418 436 heads summary:
419 437 total common heads: 1
420 438 also local heads: 0
421 439 also remote heads: 1
422 440 local heads: 1
423 441 common: 0
424 442 missing: 1
425 443 remote heads: 2
426 444 common: 1
427 445 unknown: 1
428 446 local changesets: 34
429 447 common: 4
430 448 missing: 30
431 449 common heads: 2dc09a01254d
432 450
433 451
434 452 Both many new:
435 453
436 454 $ testdesc '-ra' '-rb' '
437 455 > +2:f +30 :b
438 456 > <f +30 :a'
439 457
440 458 % -- a -> b tree
441 459 comparing with b
442 460 searching for changes
443 461 unpruned common: 66f7d451a68b
462 elapsed time: * seconds (glob)
444 463 heads summary:
445 464 total common heads: 1
446 465 also local heads: 0
447 466 also remote heads: 0
448 467 local heads: 1
449 468 common: 0
450 469 missing: 1
451 470 remote heads: 1
452 471 common: 0
453 472 unknown: 1
454 473 local changesets: 32
455 474 common: 2
456 475 missing: 30
457 476 common heads: 66f7d451a68b
458 477
459 478 % -- a -> b set
460 479 comparing with b
461 480 query 1; heads
462 481 searching for changes
463 482 taking quick initial sample
464 483 searching: 2 queries
465 484 query 2; still undecided: 31, sample size is: 31
466 485 2 total queries in *.????s (glob)
486 elapsed time: * seconds (glob)
467 487 heads summary:
468 488 total common heads: 1
469 489 also local heads: 0
470 490 also remote heads: 0
471 491 local heads: 1
472 492 common: 0
473 493 missing: 1
474 494 remote heads: 1
475 495 common: 0
476 496 unknown: 1
477 497 local changesets: 32
478 498 common: 2
479 499 missing: 30
480 500 common heads: 66f7d451a68b
481 501
482 502 % -- a -> b set (tip only)
483 503 comparing with b
484 504 query 1; heads
485 505 searching for changes
486 506 taking quick initial sample
487 507 searching: 2 queries
488 508 query 2; still undecided: 31, sample size is: 31
489 509 2 total queries in *.????s (glob)
510 elapsed time: * seconds (glob)
490 511 heads summary:
491 512 total common heads: 1
492 513 also local heads: 0
493 514 also remote heads: 0
494 515 local heads: 1
495 516 common: 0
496 517 missing: 1
497 518 remote heads: 1
498 519 common: 0
499 520 unknown: 1
500 521 local changesets: 32
501 522 common: 2
502 523 missing: 30
503 524 common heads: 66f7d451a68b
504 525
505 526 % -- b -> a tree
506 527 comparing with a
507 528 searching for changes
508 529 unpruned common: 66f7d451a68b
530 elapsed time: * seconds (glob)
509 531 heads summary:
510 532 total common heads: 1
511 533 also local heads: 0
512 534 also remote heads: 0
513 535 local heads: 1
514 536 common: 0
515 537 missing: 1
516 538 remote heads: 1
517 539 common: 0
518 540 unknown: 1
519 541 local changesets: 32
520 542 common: 2
521 543 missing: 30
522 544 common heads: 66f7d451a68b
523 545
524 546 % -- b -> a set
525 547 comparing with a
526 548 query 1; heads
527 549 searching for changes
528 550 taking quick initial sample
529 551 searching: 2 queries
530 552 query 2; still undecided: 31, sample size is: 31
531 553 2 total queries in *.????s (glob)
554 elapsed time: * seconds (glob)
532 555 heads summary:
533 556 total common heads: 1
534 557 also local heads: 0
535 558 also remote heads: 0
536 559 local heads: 1
537 560 common: 0
538 561 missing: 1
539 562 remote heads: 1
540 563 common: 0
541 564 unknown: 1
542 565 local changesets: 32
543 566 common: 2
544 567 missing: 30
545 568 common heads: 66f7d451a68b
546 569
547 570 % -- b -> a set (tip only)
548 571 comparing with a
549 572 query 1; heads
550 573 searching for changes
551 574 taking quick initial sample
552 575 searching: 2 queries
553 576 query 2; still undecided: 31, sample size is: 31
554 577 2 total queries in *.????s (glob)
578 elapsed time: * seconds (glob)
555 579 heads summary:
556 580 total common heads: 1
557 581 also local heads: 0
558 582 also remote heads: 0
559 583 local heads: 1
560 584 common: 0
561 585 missing: 1
562 586 remote heads: 1
563 587 common: 0
564 588 unknown: 1
565 589 local changesets: 32
566 590 common: 2
567 591 missing: 30
568 592 common heads: 66f7d451a68b
569 593
570 594
571 595 Both many new skewed:
572 596
573 597 $ testdesc '-ra' '-rb' '
574 598 > +2:f +30 :b
575 599 > <f +50 :a'
576 600
577 601 % -- a -> b tree
578 602 comparing with b
579 603 searching for changes
580 604 unpruned common: 66f7d451a68b
605 elapsed time: * seconds (glob)
581 606 heads summary:
582 607 total common heads: 1
583 608 also local heads: 0
584 609 also remote heads: 0
585 610 local heads: 1
586 611 common: 0
587 612 missing: 1
588 613 remote heads: 1
589 614 common: 0
590 615 unknown: 1
591 616 local changesets: 52
592 617 common: 2
593 618 missing: 50
594 619 common heads: 66f7d451a68b
595 620
596 621 % -- a -> b set
597 622 comparing with b
598 623 query 1; heads
599 624 searching for changes
600 625 taking quick initial sample
601 626 searching: 2 queries
602 627 query 2; still undecided: 51, sample size is: 51
603 628 2 total queries in *.????s (glob)
629 elapsed time: * seconds (glob)
604 630 heads summary:
605 631 total common heads: 1
606 632 also local heads: 0
607 633 also remote heads: 0
608 634 local heads: 1
609 635 common: 0
610 636 missing: 1
611 637 remote heads: 1
612 638 common: 0
613 639 unknown: 1
614 640 local changesets: 52
615 641 common: 2
616 642 missing: 50
617 643 common heads: 66f7d451a68b
618 644
619 645 % -- a -> b set (tip only)
620 646 comparing with b
621 647 query 1; heads
622 648 searching for changes
623 649 taking quick initial sample
624 650 searching: 2 queries
625 651 query 2; still undecided: 51, sample size is: 51
626 652 2 total queries in *.????s (glob)
653 elapsed time: * seconds (glob)
627 654 heads summary:
628 655 total common heads: 1
629 656 also local heads: 0
630 657 also remote heads: 0
631 658 local heads: 1
632 659 common: 0
633 660 missing: 1
634 661 remote heads: 1
635 662 common: 0
636 663 unknown: 1
637 664 local changesets: 52
638 665 common: 2
639 666 missing: 50
640 667 common heads: 66f7d451a68b
641 668
642 669 % -- b -> a tree
643 670 comparing with a
644 671 searching for changes
645 672 unpruned common: 66f7d451a68b
673 elapsed time: * seconds (glob)
646 674 heads summary:
647 675 total common heads: 1
648 676 also local heads: 0
649 677 also remote heads: 0
650 678 local heads: 1
651 679 common: 0
652 680 missing: 1
653 681 remote heads: 1
654 682 common: 0
655 683 unknown: 1
656 684 local changesets: 32
657 685 common: 2
658 686 missing: 30
659 687 common heads: 66f7d451a68b
660 688
661 689 % -- b -> a set
662 690 comparing with a
663 691 query 1; heads
664 692 searching for changes
665 693 taking quick initial sample
666 694 searching: 2 queries
667 695 query 2; still undecided: 31, sample size is: 31
668 696 2 total queries in *.????s (glob)
697 elapsed time: * seconds (glob)
669 698 heads summary:
670 699 total common heads: 1
671 700 also local heads: 0
672 701 also remote heads: 0
673 702 local heads: 1
674 703 common: 0
675 704 missing: 1
676 705 remote heads: 1
677 706 common: 0
678 707 unknown: 1
679 708 local changesets: 32
680 709 common: 2
681 710 missing: 30
682 711 common heads: 66f7d451a68b
683 712
684 713 % -- b -> a set (tip only)
685 714 comparing with a
686 715 query 1; heads
687 716 searching for changes
688 717 taking quick initial sample
689 718 searching: 2 queries
690 719 query 2; still undecided: 31, sample size is: 31
691 720 2 total queries in *.????s (glob)
721 elapsed time: * seconds (glob)
692 722 heads summary:
693 723 total common heads: 1
694 724 also local heads: 0
695 725 also remote heads: 0
696 726 local heads: 1
697 727 common: 0
698 728 missing: 1
699 729 remote heads: 1
700 730 common: 0
701 731 unknown: 1
702 732 local changesets: 32
703 733 common: 2
704 734 missing: 30
705 735 common heads: 66f7d451a68b
706 736
707 737
708 738 Both many new on top of long history:
709 739
710 740 $ testdesc '-ra' '-rb' '
711 741 > +1000:f +30 :b
712 742 > <f +50 :a'
713 743
714 744 % -- a -> b tree
715 745 comparing with b
716 746 searching for changes
717 747 unpruned common: 7ead0cba2838
748 elapsed time: * seconds (glob)
718 749 heads summary:
719 750 total common heads: 1
720 751 also local heads: 0
721 752 also remote heads: 0
722 753 local heads: 1
723 754 common: 0
724 755 missing: 1
725 756 remote heads: 1
726 757 common: 0
727 758 unknown: 1
728 759 local changesets: 1050
729 760 common: 1000
730 761 missing: 50
731 762 common heads: 7ead0cba2838
732 763
733 764 % -- a -> b set
734 765 comparing with b
735 766 query 1; heads
736 767 searching for changes
737 768 taking quick initial sample
738 769 searching: 2 queries
739 770 query 2; still undecided: 1049, sample size is: 11
740 771 sampling from both directions
741 772 searching: 3 queries
742 773 query 3; still undecided: 31, sample size is: 31
743 774 3 total queries in *.????s (glob)
775 elapsed time: * seconds (glob)
744 776 heads summary:
745 777 total common heads: 1
746 778 also local heads: 0
747 779 also remote heads: 0
748 780 local heads: 1
749 781 common: 0
750 782 missing: 1
751 783 remote heads: 1
752 784 common: 0
753 785 unknown: 1
754 786 local changesets: 1050
755 787 common: 1000
756 788 missing: 50
757 789 common heads: 7ead0cba2838
758 790
759 791 % -- a -> b set (tip only)
760 792 comparing with b
761 793 query 1; heads
762 794 searching for changes
763 795 taking quick initial sample
764 796 searching: 2 queries
765 797 query 2; still undecided: 1049, sample size is: 11
766 798 sampling from both directions
767 799 searching: 3 queries
768 800 query 3; still undecided: 31, sample size is: 31
769 801 3 total queries in *.????s (glob)
802 elapsed time: * seconds (glob)
770 803 heads summary:
771 804 total common heads: 1
772 805 also local heads: 0
773 806 also remote heads: 0
774 807 local heads: 1
775 808 common: 0
776 809 missing: 1
777 810 remote heads: 1
778 811 common: 0
779 812 unknown: 1
780 813 local changesets: 1050
781 814 common: 1000
782 815 missing: 50
783 816 common heads: 7ead0cba2838
784 817
785 818 % -- b -> a tree
786 819 comparing with a
787 820 searching for changes
788 821 unpruned common: 7ead0cba2838
822 elapsed time: * seconds (glob)
789 823 heads summary:
790 824 total common heads: 1
791 825 also local heads: 0
792 826 also remote heads: 0
793 827 local heads: 1
794 828 common: 0
795 829 missing: 1
796 830 remote heads: 1
797 831 common: 0
798 832 unknown: 1
799 833 local changesets: 1030
800 834 common: 1000
801 835 missing: 30
802 836 common heads: 7ead0cba2838
803 837
804 838 % -- b -> a set
805 839 comparing with a
806 840 query 1; heads
807 841 searching for changes
808 842 taking quick initial sample
809 843 searching: 2 queries
810 844 query 2; still undecided: 1029, sample size is: 11
811 845 sampling from both directions
812 846 searching: 3 queries
813 847 query 3; still undecided: 15, sample size is: 15
814 848 3 total queries in *.????s (glob)
849 elapsed time: * seconds (glob)
815 850 heads summary:
816 851 total common heads: 1
817 852 also local heads: 0
818 853 also remote heads: 0
819 854 local heads: 1
820 855 common: 0
821 856 missing: 1
822 857 remote heads: 1
823 858 common: 0
824 859 unknown: 1
825 860 local changesets: 1030
826 861 common: 1000
827 862 missing: 30
828 863 common heads: 7ead0cba2838
829 864
830 865 % -- b -> a set (tip only)
831 866 comparing with a
832 867 query 1; heads
833 868 searching for changes
834 869 taking quick initial sample
835 870 searching: 2 queries
836 871 query 2; still undecided: 1029, sample size is: 11
837 872 sampling from both directions
838 873 searching: 3 queries
839 874 query 3; still undecided: 15, sample size is: 15
840 875 3 total queries in *.????s (glob)
876 elapsed time: * seconds (glob)
841 877 heads summary:
842 878 total common heads: 1
843 879 also local heads: 0
844 880 also remote heads: 0
845 881 local heads: 1
846 882 common: 0
847 883 missing: 1
848 884 remote heads: 1
849 885 common: 0
850 886 unknown: 1
851 887 local changesets: 1030
852 888 common: 1000
853 889 missing: 30
854 890 common heads: 7ead0cba2838
855 891
856 892
857 893 One with >200 heads, which used to use up all of the sample:
858 894
859 895 $ hg init manyheads
860 896 $ cd manyheads
861 897 $ echo "+300:r @a" >dagdesc
862 898 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
863 899 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
864 900 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
865 901 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
866 902 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
867 903 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
868 904 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
869 905 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
870 906 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
871 907 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
872 908 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
873 909 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
874 910 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
875 911 $ echo "@b *r+3" >>dagdesc # one more head
876 912 $ hg debugbuilddag <dagdesc
877 913 reading DAG from stdin
878 914
879 915 $ hg heads -t --template . | wc -c
880 916 \s*261 (re)
881 917
882 918 $ hg clone -b a . a
883 919 adding changesets
884 920 adding manifests
885 921 adding file changes
886 922 added 1340 changesets with 0 changes to 0 files (+259 heads)
887 923 new changesets 1ea73414a91b:1c51e2c80832
888 924 updating to branch a
889 925 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
890 926 $ hg clone -b b . b
891 927 adding changesets
892 928 adding manifests
893 929 adding file changes
894 930 added 304 changesets with 0 changes to 0 files
895 931 new changesets 1ea73414a91b:513314ca8b3a
896 932 updating to branch b
897 933 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
898 934
899 935 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true
900 936 comparing with b
901 937 query 1; heads
902 938 searching for changes
903 939 taking quick initial sample
904 940 searching: 2 queries
905 941 query 2; still undecided: 1240, sample size is: 100
906 942 sampling from both directions
907 943 searching: 3 queries
908 944 query 3; still undecided: 1140, sample size is: 200
909 945 sampling from both directions
910 946 searching: 4 queries
911 947 query 4; still undecided: \d+, sample size is: 200 (re)
912 948 sampling from both directions
913 949 searching: 5 queries
914 950 query 5; still undecided: \d+, sample size is: 200 (re)
915 951 sampling from both directions
916 952 searching: 6 queries
917 953 query 6; still undecided: \d+, sample size is: \d+ (re)
918 954 6 total queries in *.????s (glob)
955 elapsed time: * seconds (glob)
919 956 heads summary:
920 957 total common heads: 1
921 958 also local heads: 0
922 959 also remote heads: 0
923 960 local heads: 260
924 961 common: 0
925 962 missing: 260
926 963 remote heads: 1
927 964 common: 0
928 965 unknown: 1
929 966 local changesets: 1340
930 967 common: 300
931 968 missing: 1040
932 969 common heads: 3ee37d65064a
933 970 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
934 971 comparing with b
935 972 query 1; heads
936 973 searching for changes
937 974 taking quick initial sample
938 975 searching: 2 queries
939 976 query 2; still undecided: 303, sample size is: 9
940 977 sampling from both directions
941 978 searching: 3 queries
942 979 query 3; still undecided: 3, sample size is: 3
943 980 3 total queries in *.????s (glob)
981 elapsed time: * seconds (glob)
944 982 heads summary:
945 983 total common heads: 1
946 984 also local heads: 0
947 985 also remote heads: 0
948 986 local heads: 260
949 987 common: 0
950 988 missing: 260
951 989 remote heads: 1
952 990 common: 0
953 991 unknown: 1
954 992 local changesets: 1340
955 993 common: 300
956 994 missing: 1040
957 995 common heads: 3ee37d65064a
958 996
959 997 Test actual protocol when pulling one new head in addition to common heads
960 998
961 999 $ hg clone -U b c
962 1000 $ hg -R c id -ir tip
963 1001 513314ca8b3a
964 1002 $ hg -R c up -qr default
965 1003 $ touch c/f
966 1004 $ hg -R c ci -Aqm "extra head"
967 1005 $ hg -R c id -i
968 1006 e64a39e7da8b
969 1007
970 1008 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
971 1009 $ cat hg.pid >> $DAEMON_PIDS
972 1010
973 1011 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
974 1012 comparing with http://localhost:$HGPORT/
975 1013 searching for changes
976 1014 e64a39e7da8b
977 1015
978 1016 $ killdaemons.py
979 1017 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
980 1018 "GET /?cmd=capabilities HTTP/1.1" 200 -
981 1019 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
982 1020 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
983 1021 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
984 1022 $ cat errors.log
985 1023
986 1024 $ cd ..
987 1025
988 1026
989 1027 Issue 4438 - test coverage for 3ef893520a85 issues.
990 1028
991 1029 $ mkdir issue4438
992 1030 $ cd issue4438
993 1031 #if false
994 1032 generate new bundles:
995 1033 $ hg init r1
996 1034 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
997 1035 $ hg clone -q r1 r2
998 1036 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
999 1037 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1000 1038 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1001 1039 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1002 1040 #else
1003 1041 use existing bundles:
1004 1042 $ hg init r1
1005 1043 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1006 1044 $ hg -R r1 -q up
1007 1045 $ hg init r2
1008 1046 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1009 1047 $ hg -R r2 -q up
1010 1048 #endif
1011 1049
1012 1050 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1013 1051
1014 1052 $ hg -R r1 outgoing r2 -T'{rev} '
1015 1053 comparing with r2
1016 1054 searching for changes
1017 1055 101 102 103 104 105 106 107 108 109 110 (no-eol)
1018 1056
1019 1057 The case where all the 'initialsamplesize' samples already were common would
1020 1058 give 'all remote heads known locally' without checking the remaining heads -
1021 1059 fixed in 86c35b7ae300:
1022 1060
1023 1061 $ cat >> $TESTTMP/unrandomsample.py << EOF
1024 1062 > import random
1025 1063 > def sample(population, k):
1026 1064 > return sorted(population)[:k]
1027 1065 > random.sample = sample
1028 1066 > EOF
1029 1067
1030 1068 $ cat >> r1/.hg/hgrc << EOF
1031 1069 > [extensions]
1032 1070 > unrandomsample = $TESTTMP/unrandomsample.py
1033 1071 > EOF
1034 1072
1035 1073 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1036 1074 > --config blackbox.track='command commandfinish discovery'
1037 1075 comparing with r2
1038 1076 searching for changes
1039 1077 101 102 103 104 105 106 107 108 109 110 (no-eol)
1040 1078 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1041 1079 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
1042 1080 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1043 1081 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 2 roundtrips in *.????s (glob)
1044 1082 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1045 1083 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now