##// END OF EJS Templates
setdiscovery: don't use dagutil for node -> rev conversion...
Gregory Szorc -
r39197:858a1284 default
parent child Browse files
Show More
@@ -1,3327 +1,3327 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .thirdparty import (
36 36 cbor,
37 37 )
38 38 from . import (
39 39 bundle2,
40 40 changegroup,
41 41 cmdutil,
42 42 color,
43 43 context,
44 44 dagparser,
45 45 dagutil,
46 46 encoding,
47 47 error,
48 48 exchange,
49 49 extensions,
50 50 filemerge,
51 51 filesetlang,
52 52 formatter,
53 53 hg,
54 54 httppeer,
55 55 localrepo,
56 56 lock as lockmod,
57 57 logcmdutil,
58 58 merge as mergemod,
59 59 obsolete,
60 60 obsutil,
61 61 phases,
62 62 policy,
63 63 pvec,
64 64 pycompat,
65 65 registrar,
66 66 repair,
67 67 revlog,
68 68 revset,
69 69 revsetlang,
70 70 scmutil,
71 71 setdiscovery,
72 72 simplemerge,
73 73 sshpeer,
74 74 sslutil,
75 75 streamclone,
76 76 templater,
77 77 treediscovery,
78 78 upgrade,
79 79 url as urlmod,
80 80 util,
81 81 vfs as vfsmod,
82 82 wireprotoframing,
83 83 wireprotoserver,
84 84 wireprotov2peer,
85 85 )
86 86 from .utils import (
87 87 dateutil,
88 88 procutil,
89 89 stringutil,
90 90 )
91 91
92 92 release = lockmod.release
93 93
94 94 command = registrar.command()
95 95
96 96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 97 def debugancestor(ui, repo, *args):
98 98 """find the ancestor revision of two revisions in a given index"""
99 99 if len(args) == 3:
100 100 index, rev1, rev2 = args
101 101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 102 lookup = r.lookup
103 103 elif len(args) == 2:
104 104 if not repo:
105 105 raise error.Abort(_('there is no Mercurial repository here '
106 106 '(.hg not found)'))
107 107 rev1, rev2 = args
108 108 r = repo.changelog
109 109 lookup = repo.lookup
110 110 else:
111 111 raise error.Abort(_('either two or three arguments required'))
112 112 a = r.ancestor(lookup(rev1), lookup(rev2))
113 113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114 114
115 115 @command('debugapplystreamclonebundle', [], 'FILE')
116 116 def debugapplystreamclonebundle(ui, repo, fname):
117 117 """apply a stream clone bundle file"""
118 118 f = hg.openpath(ui, fname)
119 119 gen = exchange.readbundle(ui, f, fname)
120 120 gen.apply(repo)
121 121
122 122 @command('debugbuilddag',
123 123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 125 ('n', 'new-file', None, _('add new file at each rev'))],
126 126 _('[OPTION]... [TEXT]'))
127 127 def debugbuilddag(ui, repo, text=None,
128 128 mergeable_file=False,
129 129 overwritten_file=False,
130 130 new_file=False):
131 131 """builds a repo with a given DAG from scratch in the current empty repo
132 132
133 133 The description of the DAG is read from stdin if not given on the
134 134 command line.
135 135
136 136 Elements:
137 137
138 138 - "+n" is a linear run of n nodes based on the current default parent
139 139 - "." is a single node based on the current default parent
140 140 - "$" resets the default parent to null (implied at the start);
141 141 otherwise the default parent is always the last node created
142 142 - "<p" sets the default parent to the backref p
143 143 - "*p" is a fork at parent p, which is a backref
144 144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 145 - "/p2" is a merge of the preceding node and p2
146 146 - ":tag" defines a local tag for the preceding node
147 147 - "@branch" sets the named branch for subsequent nodes
148 148 - "#...\\n" is a comment up to the end of the line
149 149
150 150 Whitespace between the above elements is ignored.
151 151
152 152 A backref is either
153 153
154 154 - a number n, which references the node curr-n, where curr is the current
155 155 node, or
156 156 - the name of a local tag you placed earlier using ":tag", or
157 157 - empty to denote the default parent.
158 158
159 159 All string valued-elements are either strictly alphanumeric, or must
160 160 be enclosed in double quotes ("..."), with "\\" as escape character.
161 161 """
162 162
163 163 if text is None:
164 164 ui.status(_("reading DAG from stdin\n"))
165 165 text = ui.fin.read()
166 166
167 167 cl = repo.changelog
168 168 if len(cl) > 0:
169 169 raise error.Abort(_('repository is not empty'))
170 170
171 171 # determine number of revs in DAG
172 172 total = 0
173 173 for type, data in dagparser.parsedag(text):
174 174 if type == 'n':
175 175 total += 1
176 176
177 177 if mergeable_file:
178 178 linesperrev = 2
179 179 # make a file with k lines per rev
180 180 initialmergedlines = ['%d' % i
181 181 for i in pycompat.xrange(0, total * linesperrev)]
182 182 initialmergedlines.append("")
183 183
184 184 tags = []
185 185 progress = ui.makeprogress(_('building'), unit=_('revisions'),
186 186 total=total)
187 187 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
188 188 at = -1
189 189 atbranch = 'default'
190 190 nodeids = []
191 191 id = 0
192 192 progress.update(id)
193 193 for type, data in dagparser.parsedag(text):
194 194 if type == 'n':
195 195 ui.note(('node %s\n' % pycompat.bytestr(data)))
196 196 id, ps = data
197 197
198 198 files = []
199 199 filecontent = {}
200 200
201 201 p2 = None
202 202 if mergeable_file:
203 203 fn = "mf"
204 204 p1 = repo[ps[0]]
205 205 if len(ps) > 1:
206 206 p2 = repo[ps[1]]
207 207 pa = p1.ancestor(p2)
208 208 base, local, other = [x[fn].data() for x in (pa, p1,
209 209 p2)]
210 210 m3 = simplemerge.Merge3Text(base, local, other)
211 211 ml = [l.strip() for l in m3.merge_lines()]
212 212 ml.append("")
213 213 elif at > 0:
214 214 ml = p1[fn].data().split("\n")
215 215 else:
216 216 ml = initialmergedlines
217 217 ml[id * linesperrev] += " r%i" % id
218 218 mergedtext = "\n".join(ml)
219 219 files.append(fn)
220 220 filecontent[fn] = mergedtext
221 221
222 222 if overwritten_file:
223 223 fn = "of"
224 224 files.append(fn)
225 225 filecontent[fn] = "r%i\n" % id
226 226
227 227 if new_file:
228 228 fn = "nf%i" % id
229 229 files.append(fn)
230 230 filecontent[fn] = "r%i\n" % id
231 231 if len(ps) > 1:
232 232 if not p2:
233 233 p2 = repo[ps[1]]
234 234 for fn in p2:
235 235 if fn.startswith("nf"):
236 236 files.append(fn)
237 237 filecontent[fn] = p2[fn].data()
238 238
239 239 def fctxfn(repo, cx, path):
240 240 if path in filecontent:
241 241 return context.memfilectx(repo, cx, path,
242 242 filecontent[path])
243 243 return None
244 244
245 245 if len(ps) == 0 or ps[0] < 0:
246 246 pars = [None, None]
247 247 elif len(ps) == 1:
248 248 pars = [nodeids[ps[0]], None]
249 249 else:
250 250 pars = [nodeids[p] for p in ps]
251 251 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
252 252 date=(id, 0),
253 253 user="debugbuilddag",
254 254 extra={'branch': atbranch})
255 255 nodeid = repo.commitctx(cx)
256 256 nodeids.append(nodeid)
257 257 at = id
258 258 elif type == 'l':
259 259 id, name = data
260 260 ui.note(('tag %s\n' % name))
261 261 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
262 262 elif type == 'a':
263 263 ui.note(('branch %s\n' % data))
264 264 atbranch = data
265 265 progress.update(id)
266 266
267 267 if tags:
268 268 repo.vfs.write("localtags", "".join(tags))
269 269
270 270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
271 271 indent_string = ' ' * indent
272 272 if all:
273 273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
274 274 % indent_string)
275 275
276 276 def showchunks(named):
277 277 ui.write("\n%s%s\n" % (indent_string, named))
278 278 for deltadata in gen.deltaiter():
279 279 node, p1, p2, cs, deltabase, delta, flags = deltadata
280 280 ui.write("%s%s %s %s %s %s %d\n" %
281 281 (indent_string, hex(node), hex(p1), hex(p2),
282 282 hex(cs), hex(deltabase), len(delta)))
283 283
284 284 chunkdata = gen.changelogheader()
285 285 showchunks("changelog")
286 286 chunkdata = gen.manifestheader()
287 287 showchunks("manifest")
288 288 for chunkdata in iter(gen.filelogheader, {}):
289 289 fname = chunkdata['filename']
290 290 showchunks(fname)
291 291 else:
292 292 if isinstance(gen, bundle2.unbundle20):
293 293 raise error.Abort(_('use debugbundle2 for this file'))
294 294 chunkdata = gen.changelogheader()
295 295 for deltadata in gen.deltaiter():
296 296 node, p1, p2, cs, deltabase, delta, flags = deltadata
297 297 ui.write("%s%s\n" % (indent_string, hex(node)))
298 298
299 299 def _debugobsmarkers(ui, part, indent=0, **opts):
300 300 """display version and markers contained in 'data'"""
301 301 opts = pycompat.byteskwargs(opts)
302 302 data = part.read()
303 303 indent_string = ' ' * indent
304 304 try:
305 305 version, markers = obsolete._readmarkers(data)
306 306 except error.UnknownVersion as exc:
307 307 msg = "%sunsupported version: %s (%d bytes)\n"
308 308 msg %= indent_string, exc.version, len(data)
309 309 ui.write(msg)
310 310 else:
311 311 msg = "%sversion: %d (%d bytes)\n"
312 312 msg %= indent_string, version, len(data)
313 313 ui.write(msg)
314 314 fm = ui.formatter('debugobsolete', opts)
315 315 for rawmarker in sorted(markers):
316 316 m = obsutil.marker(None, rawmarker)
317 317 fm.startitem()
318 318 fm.plain(indent_string)
319 319 cmdutil.showmarker(fm, m)
320 320 fm.end()
321 321
322 322 def _debugphaseheads(ui, data, indent=0):
323 323 """display version and markers contained in 'data'"""
324 324 indent_string = ' ' * indent
325 325 headsbyphase = phases.binarydecode(data)
326 326 for phase in phases.allphases:
327 327 for head in headsbyphase[phase]:
328 328 ui.write(indent_string)
329 329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
330 330
331 331 def _quasirepr(thing):
332 332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
333 333 return '{%s}' % (
334 334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
335 335 return pycompat.bytestr(repr(thing))
336 336
337 337 def _debugbundle2(ui, gen, all=None, **opts):
338 338 """lists the contents of a bundle2"""
339 339 if not isinstance(gen, bundle2.unbundle20):
340 340 raise error.Abort(_('not a bundle2 file'))
341 341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
342 342 parttypes = opts.get(r'part_type', [])
343 343 for part in gen.iterparts():
344 344 if parttypes and part.type not in parttypes:
345 345 continue
346 346 msg = '%s -- %s (mandatory: %r)\n'
347 347 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
348 348 if part.type == 'changegroup':
349 349 version = part.params.get('version', '01')
350 350 cg = changegroup.getunbundler(version, part, 'UN')
351 351 if not ui.quiet:
352 352 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
353 353 if part.type == 'obsmarkers':
354 354 if not ui.quiet:
355 355 _debugobsmarkers(ui, part, indent=4, **opts)
356 356 if part.type == 'phase-heads':
357 357 if not ui.quiet:
358 358 _debugphaseheads(ui, part, indent=4)
359 359
360 360 @command('debugbundle',
361 361 [('a', 'all', None, _('show all details')),
362 362 ('', 'part-type', [], _('show only the named part type')),
363 363 ('', 'spec', None, _('print the bundlespec of the bundle'))],
364 364 _('FILE'),
365 365 norepo=True)
366 366 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
367 367 """lists the contents of a bundle"""
368 368 with hg.openpath(ui, bundlepath) as f:
369 369 if spec:
370 370 spec = exchange.getbundlespec(ui, f)
371 371 ui.write('%s\n' % spec)
372 372 return
373 373
374 374 gen = exchange.readbundle(ui, f, bundlepath)
375 375 if isinstance(gen, bundle2.unbundle20):
376 376 return _debugbundle2(ui, gen, all=all, **opts)
377 377 _debugchangegroup(ui, gen, all=all, **opts)
378 378
379 379 @command('debugcapabilities',
380 380 [], _('PATH'),
381 381 norepo=True)
382 382 def debugcapabilities(ui, path, **opts):
383 383 """lists the capabilities of a remote peer"""
384 384 opts = pycompat.byteskwargs(opts)
385 385 peer = hg.peer(ui, opts, path)
386 386 caps = peer.capabilities()
387 387 ui.write(('Main capabilities:\n'))
388 388 for c in sorted(caps):
389 389 ui.write((' %s\n') % c)
390 390 b2caps = bundle2.bundle2caps(peer)
391 391 if b2caps:
392 392 ui.write(('Bundle2 capabilities:\n'))
393 393 for key, values in sorted(b2caps.iteritems()):
394 394 ui.write((' %s\n') % key)
395 395 for v in values:
396 396 ui.write((' %s\n') % v)
397 397
398 398 @command('debugcheckstate', [], '')
399 399 def debugcheckstate(ui, repo):
400 400 """validate the correctness of the current dirstate"""
401 401 parent1, parent2 = repo.dirstate.parents()
402 402 m1 = repo[parent1].manifest()
403 403 m2 = repo[parent2].manifest()
404 404 errors = 0
405 405 for f in repo.dirstate:
406 406 state = repo.dirstate[f]
407 407 if state in "nr" and f not in m1:
408 408 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
409 409 errors += 1
410 410 if state in "a" and f in m1:
411 411 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
412 412 errors += 1
413 413 if state in "m" and f not in m1 and f not in m2:
414 414 ui.warn(_("%s in state %s, but not in either manifest\n") %
415 415 (f, state))
416 416 errors += 1
417 417 for f in m1:
418 418 state = repo.dirstate[f]
419 419 if state not in "nrm":
420 420 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
421 421 errors += 1
422 422 if errors:
423 423 error = _(".hg/dirstate inconsistent with current parent's manifest")
424 424 raise error.Abort(error)
425 425
426 426 @command('debugcolor',
427 427 [('', 'style', None, _('show all configured styles'))],
428 428 'hg debugcolor')
429 429 def debugcolor(ui, repo, **opts):
430 430 """show available color, effects or style"""
431 431 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
432 432 if opts.get(r'style'):
433 433 return _debugdisplaystyle(ui)
434 434 else:
435 435 return _debugdisplaycolor(ui)
436 436
437 437 def _debugdisplaycolor(ui):
438 438 ui = ui.copy()
439 439 ui._styles.clear()
440 440 for effect in color._activeeffects(ui).keys():
441 441 ui._styles[effect] = effect
442 442 if ui._terminfoparams:
443 443 for k, v in ui.configitems('color'):
444 444 if k.startswith('color.'):
445 445 ui._styles[k] = k[6:]
446 446 elif k.startswith('terminfo.'):
447 447 ui._styles[k] = k[9:]
448 448 ui.write(_('available colors:\n'))
449 449 # sort label with a '_' after the other to group '_background' entry.
450 450 items = sorted(ui._styles.items(),
451 451 key=lambda i: ('_' in i[0], i[0], i[1]))
452 452 for colorname, label in items:
453 453 ui.write(('%s\n') % colorname, label=label)
454 454
455 455 def _debugdisplaystyle(ui):
456 456 ui.write(_('available style:\n'))
457 457 if not ui._styles:
458 458 return
459 459 width = max(len(s) for s in ui._styles)
460 460 for label, effects in sorted(ui._styles.items()):
461 461 ui.write('%s' % label, label=label)
462 462 if effects:
463 463 # 50
464 464 ui.write(': ')
465 465 ui.write(' ' * (max(0, width - len(label))))
466 466 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
467 467 ui.write('\n')
468 468
469 469 @command('debugcreatestreamclonebundle', [], 'FILE')
470 470 def debugcreatestreamclonebundle(ui, repo, fname):
471 471 """create a stream clone bundle file
472 472
473 473 Stream bundles are special bundles that are essentially archives of
474 474 revlog files. They are commonly used for cloning very quickly.
475 475 """
476 476 # TODO we may want to turn this into an abort when this functionality
477 477 # is moved into `hg bundle`.
478 478 if phases.hassecret(repo):
479 479 ui.warn(_('(warning: stream clone bundle will contain secret '
480 480 'revisions)\n'))
481 481
482 482 requirements, gen = streamclone.generatebundlev1(repo)
483 483 changegroup.writechunks(ui, gen, fname)
484 484
485 485 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
486 486
487 487 @command('debugdag',
488 488 [('t', 'tags', None, _('use tags as labels')),
489 489 ('b', 'branches', None, _('annotate with branch names')),
490 490 ('', 'dots', None, _('use dots for runs')),
491 491 ('s', 'spaces', None, _('separate elements by spaces'))],
492 492 _('[OPTION]... [FILE [REV]...]'),
493 493 optionalrepo=True)
494 494 def debugdag(ui, repo, file_=None, *revs, **opts):
495 495 """format the changelog or an index DAG as a concise textual description
496 496
497 497 If you pass a revlog index, the revlog's DAG is emitted. If you list
498 498 revision numbers, they get labeled in the output as rN.
499 499
500 500 Otherwise, the changelog DAG of the current repo is emitted.
501 501 """
502 502 spaces = opts.get(r'spaces')
503 503 dots = opts.get(r'dots')
504 504 if file_:
505 505 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
506 506 file_)
507 507 revs = set((int(r) for r in revs))
508 508 def events():
509 509 for r in rlog:
510 510 yield 'n', (r, list(p for p in rlog.parentrevs(r)
511 511 if p != -1))
512 512 if r in revs:
513 513 yield 'l', (r, "r%i" % r)
514 514 elif repo:
515 515 cl = repo.changelog
516 516 tags = opts.get(r'tags')
517 517 branches = opts.get(r'branches')
518 518 if tags:
519 519 labels = {}
520 520 for l, n in repo.tags().items():
521 521 labels.setdefault(cl.rev(n), []).append(l)
522 522 def events():
523 523 b = "default"
524 524 for r in cl:
525 525 if branches:
526 526 newb = cl.read(cl.node(r))[5]['branch']
527 527 if newb != b:
528 528 yield 'a', newb
529 529 b = newb
530 530 yield 'n', (r, list(p for p in cl.parentrevs(r)
531 531 if p != -1))
532 532 if tags:
533 533 ls = labels.get(r)
534 534 if ls:
535 535 for l in ls:
536 536 yield 'l', (r, l)
537 537 else:
538 538 raise error.Abort(_('need repo for changelog dag'))
539 539
540 540 for line in dagparser.dagtextlines(events(),
541 541 addspaces=spaces,
542 542 wraplabels=True,
543 543 wrapannotations=True,
544 544 wrapnonlinear=dots,
545 545 usedots=dots,
546 546 maxlinewidth=70):
547 547 ui.write(line)
548 548 ui.write("\n")
549 549
550 550 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
551 551 def debugdata(ui, repo, file_, rev=None, **opts):
552 552 """dump the contents of a data file revision"""
553 553 opts = pycompat.byteskwargs(opts)
554 554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
555 555 if rev is not None:
556 556 raise error.CommandError('debugdata', _('invalid arguments'))
557 557 file_, rev = None, file_
558 558 elif rev is None:
559 559 raise error.CommandError('debugdata', _('invalid arguments'))
560 560 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
561 561 try:
562 562 ui.write(r.revision(r.lookup(rev), raw=True))
563 563 except KeyError:
564 564 raise error.Abort(_('invalid revision identifier %s') % rev)
565 565
566 566 @command('debugdate',
567 567 [('e', 'extended', None, _('try extended date formats'))],
568 568 _('[-e] DATE [RANGE]'),
569 569 norepo=True, optionalrepo=True)
570 570 def debugdate(ui, date, range=None, **opts):
571 571 """parse and display a date"""
572 572 if opts[r"extended"]:
573 573 d = dateutil.parsedate(date, util.extendeddateformats)
574 574 else:
575 575 d = dateutil.parsedate(date)
576 576 ui.write(("internal: %d %d\n") % d)
577 577 ui.write(("standard: %s\n") % dateutil.datestr(d))
578 578 if range:
579 579 m = dateutil.matchdate(range)
580 580 ui.write(("match: %s\n") % m(d[0]))
581 581
582 582 @command('debugdeltachain',
583 583 cmdutil.debugrevlogopts + cmdutil.formatteropts,
584 584 _('-c|-m|FILE'),
585 585 optionalrepo=True)
586 586 def debugdeltachain(ui, repo, file_=None, **opts):
587 587 """dump information about delta chains in a revlog
588 588
589 589 Output can be templatized. Available template keywords are:
590 590
591 591 :``rev``: revision number
592 592 :``chainid``: delta chain identifier (numbered by unique base)
593 593 :``chainlen``: delta chain length to this revision
594 594 :``prevrev``: previous revision in delta chain
595 595 :``deltatype``: role of delta / how it was computed
596 596 :``compsize``: compressed size of revision
597 597 :``uncompsize``: uncompressed size of revision
598 598 :``chainsize``: total size of compressed revisions in chain
599 599 :``chainratio``: total chain size divided by uncompressed revision size
600 600 (new delta chains typically start at ratio 2.00)
601 601 :``lindist``: linear distance from base revision in delta chain to end
602 602 of this revision
603 603 :``extradist``: total size of revisions not part of this delta chain from
604 604 base of delta chain to end of this revision; a measurement
605 605 of how much extra data we need to read/seek across to read
606 606 the delta chain for this revision
607 607 :``extraratio``: extradist divided by chainsize; another representation of
608 608 how much unrelated data is needed to load this delta chain
609 609
610 610 If the repository is configured to use the sparse read, additional keywords
611 611 are available:
612 612
613 613 :``readsize``: total size of data read from the disk for a revision
614 614 (sum of the sizes of all the blocks)
615 615 :``largestblock``: size of the largest block of data read from the disk
616 616 :``readdensity``: density of useful bytes in the data read from the disk
617 617 :``srchunks``: in how many data hunks the whole revision would be read
618 618
619 619 The sparse read can be enabled with experimental.sparse-read = True
620 620 """
621 621 opts = pycompat.byteskwargs(opts)
622 622 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
623 623 index = r.index
624 624 start = r.start
625 625 length = r.length
626 626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
627 627 withsparseread = getattr(r, '_withsparseread', False)
628 628
629 629 def revinfo(rev):
630 630 e = index[rev]
631 631 compsize = e[1]
632 632 uncompsize = e[2]
633 633 chainsize = 0
634 634
635 635 if generaldelta:
636 636 if e[3] == e[5]:
637 637 deltatype = 'p1'
638 638 elif e[3] == e[6]:
639 639 deltatype = 'p2'
640 640 elif e[3] == rev - 1:
641 641 deltatype = 'prev'
642 642 elif e[3] == rev:
643 643 deltatype = 'base'
644 644 else:
645 645 deltatype = 'other'
646 646 else:
647 647 if e[3] == rev:
648 648 deltatype = 'base'
649 649 else:
650 650 deltatype = 'prev'
651 651
652 652 chain = r._deltachain(rev)[0]
653 653 for iterrev in chain:
654 654 e = index[iterrev]
655 655 chainsize += e[1]
656 656
657 657 return compsize, uncompsize, deltatype, chain, chainsize
658 658
659 659 fm = ui.formatter('debugdeltachain', opts)
660 660
661 661 fm.plain(' rev chain# chainlen prev delta '
662 662 'size rawsize chainsize ratio lindist extradist '
663 663 'extraratio')
664 664 if withsparseread:
665 665 fm.plain(' readsize largestblk rddensity srchunks')
666 666 fm.plain('\n')
667 667
668 668 chainbases = {}
669 669 for rev in r:
670 670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
671 671 chainbase = chain[0]
672 672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
673 673 basestart = start(chainbase)
674 674 revstart = start(rev)
675 675 lineardist = revstart + comp - basestart
676 676 extradist = lineardist - chainsize
677 677 try:
678 678 prevrev = chain[-2]
679 679 except IndexError:
680 680 prevrev = -1
681 681
682 682 if uncomp != 0:
683 683 chainratio = float(chainsize) / float(uncomp)
684 684 else:
685 685 chainratio = chainsize
686 686
687 687 if chainsize != 0:
688 688 extraratio = float(extradist) / float(chainsize)
689 689 else:
690 690 extraratio = extradist
691 691
692 692 fm.startitem()
693 693 fm.write('rev chainid chainlen prevrev deltatype compsize '
694 694 'uncompsize chainsize chainratio lindist extradist '
695 695 'extraratio',
696 696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
697 697 rev, chainid, len(chain), prevrev, deltatype, comp,
698 698 uncomp, chainsize, chainratio, lineardist, extradist,
699 699 extraratio,
700 700 rev=rev, chainid=chainid, chainlen=len(chain),
701 701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
702 702 uncompsize=uncomp, chainsize=chainsize,
703 703 chainratio=chainratio, lindist=lineardist,
704 704 extradist=extradist, extraratio=extraratio)
705 705 if withsparseread:
706 706 readsize = 0
707 707 largestblock = 0
708 708 srchunks = 0
709 709
710 710 for revschunk in revlog._slicechunk(r, chain):
711 711 srchunks += 1
712 712 blkend = start(revschunk[-1]) + length(revschunk[-1])
713 713 blksize = blkend - start(revschunk[0])
714 714
715 715 readsize += blksize
716 716 if largestblock < blksize:
717 717 largestblock = blksize
718 718
719 719 if readsize:
720 720 readdensity = float(chainsize) / float(readsize)
721 721 else:
722 722 readdensity = 1
723 723
724 724 fm.write('readsize largestblock readdensity srchunks',
725 725 ' %10d %10d %9.5f %8d',
726 726 readsize, largestblock, readdensity, srchunks,
727 727 readsize=readsize, largestblock=largestblock,
728 728 readdensity=readdensity, srchunks=srchunks)
729 729
730 730 fm.plain('\n')
731 731
732 732 fm.end()
733 733
734 734 @command('debugdirstate|debugstate',
735 735 [('', 'nodates', None, _('do not display the saved mtime')),
736 736 ('', 'datesort', None, _('sort by saved mtime'))],
737 737 _('[OPTION]...'))
738 738 def debugstate(ui, repo, **opts):
739 739 """show the contents of the current dirstate"""
740 740
741 741 nodates = opts.get(r'nodates')
742 742 datesort = opts.get(r'datesort')
743 743
744 744 timestr = ""
745 745 if datesort:
746 746 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
747 747 else:
748 748 keyfunc = None # sort by filename
749 749 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
750 750 if ent[3] == -1:
751 751 timestr = 'unset '
752 752 elif nodates:
753 753 timestr = 'set '
754 754 else:
755 755 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
756 756 time.localtime(ent[3]))
757 757 timestr = encoding.strtolocal(timestr)
758 758 if ent[1] & 0o20000:
759 759 mode = 'lnk'
760 760 else:
761 761 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
762 762 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
763 763 for f in repo.dirstate.copies():
764 764 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
765 765
766 766 @command('debugdiscovery',
767 767 [('', 'old', None, _('use old-style discovery')),
768 768 ('', 'nonheads', None,
769 769 _('use old-style discovery with non-heads included')),
770 770 ('', 'rev', [], 'restrict discovery to this set of revs'),
771 771 ] + cmdutil.remoteopts,
772 772 _('[--rev REV] [OTHER]'))
773 773 def debugdiscovery(ui, repo, remoteurl="default", **opts):
774 774 """runs the changeset discovery protocol in isolation"""
775 775 opts = pycompat.byteskwargs(opts)
776 776 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
777 777 remote = hg.peer(repo, opts, remoteurl)
778 778 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
779 779
780 780 # make sure tests are repeatable
781 781 random.seed(12323)
782 782
783 783 def doit(pushedrevs, remoteheads, remote=remote):
784 784 if opts.get('old'):
785 785 if not util.safehasattr(remote, 'branches'):
786 786 # enable in-client legacy support
787 787 remote = localrepo.locallegacypeer(remote.local())
788 788 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
789 789 force=True)
790 790 common = set(common)
791 791 if not opts.get('nonheads'):
792 792 ui.write(("unpruned common: %s\n") %
793 793 " ".join(sorted(short(n) for n in common)))
794 794 cl = repo.changelog
795 795 clnode = cl.node
796 796 dag = dagutil.revlogdag(cl)
797 all = dag.ancestorset(dag.internalizeall(common))
797 all = dag.ancestorset(cl.rev(n) for n in common)
798 798 common = {clnode(r) for r in dag.headsetofconnecteds(all)}
799 799 else:
800 800 nodes = None
801 801 if pushedrevs:
802 802 revs = scmutil.revrange(repo, pushedrevs)
803 803 nodes = [repo[r].node() for r in revs]
804 804 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
805 805 ancestorsof=nodes)
806 806 common = set(common)
807 807 rheads = set(hds)
808 808 lheads = set(repo.heads())
809 809 ui.write(("common heads: %s\n") %
810 810 " ".join(sorted(short(n) for n in common)))
811 811 if lheads <= common:
812 812 ui.write(("local is subset\n"))
813 813 elif rheads <= common:
814 814 ui.write(("remote is subset\n"))
815 815
816 816 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
817 817 localrevs = opts['rev']
818 818 doit(localrevs, remoterevs)
819 819
820 820 _chunksize = 4 << 10
821 821
822 822 @command('debugdownload',
823 823 [
824 824 ('o', 'output', '', _('path')),
825 825 ],
826 826 optionalrepo=True)
827 827 def debugdownload(ui, repo, url, output=None, **opts):
828 828 """download a resource using Mercurial logic and config
829 829 """
830 830 fh = urlmod.open(ui, url, output)
831 831
832 832 dest = ui
833 833 if output:
834 834 dest = open(output, "wb", _chunksize)
835 835 try:
836 836 data = fh.read(_chunksize)
837 837 while data:
838 838 dest.write(data)
839 839 data = fh.read(_chunksize)
840 840 finally:
841 841 if output:
842 842 dest.close()
843 843
844 844 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
845 845 def debugextensions(ui, repo, **opts):
846 846 '''show information about active extensions'''
847 847 opts = pycompat.byteskwargs(opts)
848 848 exts = extensions.extensions(ui)
849 849 hgver = util.version()
850 850 fm = ui.formatter('debugextensions', opts)
851 851 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
852 852 isinternal = extensions.ismoduleinternal(extmod)
853 853 extsource = pycompat.fsencode(extmod.__file__)
854 854 if isinternal:
855 855 exttestedwith = [] # never expose magic string to users
856 856 else:
857 857 exttestedwith = getattr(extmod, 'testedwith', '').split()
858 858 extbuglink = getattr(extmod, 'buglink', None)
859 859
860 860 fm.startitem()
861 861
862 862 if ui.quiet or ui.verbose:
863 863 fm.write('name', '%s\n', extname)
864 864 else:
865 865 fm.write('name', '%s', extname)
866 866 if isinternal or hgver in exttestedwith:
867 867 fm.plain('\n')
868 868 elif not exttestedwith:
869 869 fm.plain(_(' (untested!)\n'))
870 870 else:
871 871 lasttestedversion = exttestedwith[-1]
872 872 fm.plain(' (%s!)\n' % lasttestedversion)
873 873
874 874 fm.condwrite(ui.verbose and extsource, 'source',
875 875 _(' location: %s\n'), extsource or "")
876 876
877 877 if ui.verbose:
878 878 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
879 879 fm.data(bundled=isinternal)
880 880
881 881 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
882 882 _(' tested with: %s\n'),
883 883 fm.formatlist(exttestedwith, name='ver'))
884 884
885 885 fm.condwrite(ui.verbose and extbuglink, 'buglink',
886 886 _(' bug reporting: %s\n'), extbuglink or "")
887 887
888 888 fm.end()
889 889
890 890 @command('debugfileset',
891 891 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
892 892 ('', 'all-files', False,
893 893 _('test files from all revisions and working directory')),
894 894 ('s', 'show-matcher', None,
895 895 _('print internal representation of matcher')),
896 896 ('p', 'show-stage', [],
897 897 _('print parsed tree at the given stage'), _('NAME'))],
898 898 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
899 899 def debugfileset(ui, repo, expr, **opts):
900 900 '''parse and apply a fileset specification'''
901 901 from . import fileset
902 902 fileset.symbols # force import of fileset so we have predicates to optimize
903 903 opts = pycompat.byteskwargs(opts)
904 904 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
905 905
906 906 stages = [
907 907 ('parsed', pycompat.identity),
908 908 ('analyzed', filesetlang.analyze),
909 909 ('optimized', filesetlang.optimize),
910 910 ]
911 911 stagenames = set(n for n, f in stages)
912 912
913 913 showalways = set()
914 914 if ui.verbose and not opts['show_stage']:
915 915 # show parsed tree by --verbose (deprecated)
916 916 showalways.add('parsed')
917 917 if opts['show_stage'] == ['all']:
918 918 showalways.update(stagenames)
919 919 else:
920 920 for n in opts['show_stage']:
921 921 if n not in stagenames:
922 922 raise error.Abort(_('invalid stage name: %s') % n)
923 923 showalways.update(opts['show_stage'])
924 924
925 925 tree = filesetlang.parse(expr)
926 926 for n, f in stages:
927 927 tree = f(tree)
928 928 if n in showalways:
929 929 if opts['show_stage'] or n != 'parsed':
930 930 ui.write(("* %s:\n") % n)
931 931 ui.write(filesetlang.prettyformat(tree), "\n")
932 932
933 933 files = set()
934 934 if opts['all_files']:
935 935 for r in repo:
936 936 c = repo[r]
937 937 files.update(c.files())
938 938 files.update(c.substate)
939 939 if opts['all_files'] or ctx.rev() is None:
940 940 wctx = repo[None]
941 941 files.update(repo.dirstate.walk(scmutil.matchall(repo),
942 942 subrepos=list(wctx.substate),
943 943 unknown=True, ignored=True))
944 944 files.update(wctx.substate)
945 945 else:
946 946 files.update(ctx.files())
947 947 files.update(ctx.substate)
948 948
949 949 m = ctx.matchfileset(expr)
950 950 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
951 951 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
952 952 for f in sorted(files):
953 953 if not m(f):
954 954 continue
955 955 ui.write("%s\n" % f)
956 956
957 957 @command('debugformat',
958 958 [] + cmdutil.formatteropts)
959 959 def debugformat(ui, repo, **opts):
960 960 """display format information about the current repository
961 961
962 962 Use --verbose to get extra information about current config value and
963 963 Mercurial default."""
964 964 opts = pycompat.byteskwargs(opts)
965 965 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
966 966 maxvariantlength = max(len('format-variant'), maxvariantlength)
967 967
968 968 def makeformatname(name):
969 969 return '%s:' + (' ' * (maxvariantlength - len(name)))
970 970
971 971 fm = ui.formatter('debugformat', opts)
972 972 if fm.isplain():
973 973 def formatvalue(value):
974 974 if util.safehasattr(value, 'startswith'):
975 975 return value
976 976 if value:
977 977 return 'yes'
978 978 else:
979 979 return 'no'
980 980 else:
981 981 formatvalue = pycompat.identity
982 982
983 983 fm.plain('format-variant')
984 984 fm.plain(' ' * (maxvariantlength - len('format-variant')))
985 985 fm.plain(' repo')
986 986 if ui.verbose:
987 987 fm.plain(' config default')
988 988 fm.plain('\n')
989 989 for fv in upgrade.allformatvariant:
990 990 fm.startitem()
991 991 repovalue = fv.fromrepo(repo)
992 992 configvalue = fv.fromconfig(repo)
993 993
994 994 if repovalue != configvalue:
995 995 namelabel = 'formatvariant.name.mismatchconfig'
996 996 repolabel = 'formatvariant.repo.mismatchconfig'
997 997 elif repovalue != fv.default:
998 998 namelabel = 'formatvariant.name.mismatchdefault'
999 999 repolabel = 'formatvariant.repo.mismatchdefault'
1000 1000 else:
1001 1001 namelabel = 'formatvariant.name.uptodate'
1002 1002 repolabel = 'formatvariant.repo.uptodate'
1003 1003
1004 1004 fm.write('name', makeformatname(fv.name), fv.name,
1005 1005 label=namelabel)
1006 1006 fm.write('repo', ' %3s', formatvalue(repovalue),
1007 1007 label=repolabel)
1008 1008 if fv.default != configvalue:
1009 1009 configlabel = 'formatvariant.config.special'
1010 1010 else:
1011 1011 configlabel = 'formatvariant.config.default'
1012 1012 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1013 1013 label=configlabel)
1014 1014 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1015 1015 label='formatvariant.default')
1016 1016 fm.plain('\n')
1017 1017 fm.end()
1018 1018
1019 1019 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1020 1020 def debugfsinfo(ui, path="."):
1021 1021 """show information detected about current filesystem"""
1022 1022 ui.write(('path: %s\n') % path)
1023 1023 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1024 1024 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1025 1025 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1026 1026 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1027 1027 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1028 1028 casesensitive = '(unknown)'
1029 1029 try:
1030 1030 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1031 1031 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1032 1032 except OSError:
1033 1033 pass
1034 1034 ui.write(('case-sensitive: %s\n') % casesensitive)
1035 1035
1036 1036 @command('debuggetbundle',
1037 1037 [('H', 'head', [], _('id of head node'), _('ID')),
1038 1038 ('C', 'common', [], _('id of common node'), _('ID')),
1039 1039 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1040 1040 _('REPO FILE [-H|-C ID]...'),
1041 1041 norepo=True)
1042 1042 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1043 1043 """retrieves a bundle from a repo
1044 1044
1045 1045 Every ID must be a full-length hex node id string. Saves the bundle to the
1046 1046 given file.
1047 1047 """
1048 1048 opts = pycompat.byteskwargs(opts)
1049 1049 repo = hg.peer(ui, opts, repopath)
1050 1050 if not repo.capable('getbundle'):
1051 1051 raise error.Abort("getbundle() not supported by target repository")
1052 1052 args = {}
1053 1053 if common:
1054 1054 args[r'common'] = [bin(s) for s in common]
1055 1055 if head:
1056 1056 args[r'heads'] = [bin(s) for s in head]
1057 1057 # TODO: get desired bundlecaps from command line.
1058 1058 args[r'bundlecaps'] = None
1059 1059 bundle = repo.getbundle('debug', **args)
1060 1060
1061 1061 bundletype = opts.get('type', 'bzip2').lower()
1062 1062 btypes = {'none': 'HG10UN',
1063 1063 'bzip2': 'HG10BZ',
1064 1064 'gzip': 'HG10GZ',
1065 1065 'bundle2': 'HG20'}
1066 1066 bundletype = btypes.get(bundletype)
1067 1067 if bundletype not in bundle2.bundletypes:
1068 1068 raise error.Abort(_('unknown bundle type specified with --type'))
1069 1069 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1070 1070
1071 1071 @command('debugignore', [], '[FILE]')
1072 1072 def debugignore(ui, repo, *files, **opts):
1073 1073 """display the combined ignore pattern and information about ignored files
1074 1074
1075 1075 With no argument display the combined ignore pattern.
1076 1076
1077 1077 Given space separated file names, shows if the given file is ignored and
1078 1078 if so, show the ignore rule (file and line number) that matched it.
1079 1079 """
1080 1080 ignore = repo.dirstate._ignore
1081 1081 if not files:
1082 1082 # Show all the patterns
1083 1083 ui.write("%s\n" % pycompat.byterepr(ignore))
1084 1084 else:
1085 1085 m = scmutil.match(repo[None], pats=files)
1086 1086 for f in m.files():
1087 1087 nf = util.normpath(f)
1088 1088 ignored = None
1089 1089 ignoredata = None
1090 1090 if nf != '.':
1091 1091 if ignore(nf):
1092 1092 ignored = nf
1093 1093 ignoredata = repo.dirstate._ignorefileandline(nf)
1094 1094 else:
1095 1095 for p in util.finddirs(nf):
1096 1096 if ignore(p):
1097 1097 ignored = p
1098 1098 ignoredata = repo.dirstate._ignorefileandline(p)
1099 1099 break
1100 1100 if ignored:
1101 1101 if ignored == nf:
1102 1102 ui.write(_("%s is ignored\n") % m.uipath(f))
1103 1103 else:
1104 1104 ui.write(_("%s is ignored because of "
1105 1105 "containing folder %s\n")
1106 1106 % (m.uipath(f), ignored))
1107 1107 ignorefile, lineno, line = ignoredata
1108 1108 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1109 1109 % (ignorefile, lineno, line))
1110 1110 else:
1111 1111 ui.write(_("%s is not ignored\n") % m.uipath(f))
1112 1112
1113 1113 @command('debugindex', cmdutil.debugrevlogopts +
1114 1114 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1115 1115 _('[-f FORMAT] -c|-m|FILE'),
1116 1116 optionalrepo=True)
1117 1117 def debugindex(ui, repo, file_=None, **opts):
1118 1118 """dump the contents of an index file"""
1119 1119 opts = pycompat.byteskwargs(opts)
1120 1120 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1121 1121 format = opts.get('format', 0)
1122 1122 if format not in (0, 1):
1123 1123 raise error.Abort(_("unknown format %d") % format)
1124 1124
1125 1125 if ui.debugflag:
1126 1126 shortfn = hex
1127 1127 else:
1128 1128 shortfn = short
1129 1129
1130 1130 # There might not be anything in r, so have a sane default
1131 1131 idlen = 12
1132 1132 for i in r:
1133 1133 idlen = len(shortfn(r.node(i)))
1134 1134 break
1135 1135
1136 1136 if format == 0:
1137 1137 if ui.verbose:
1138 1138 ui.write((" rev offset length linkrev"
1139 1139 " %s %s p2\n") % ("nodeid".ljust(idlen),
1140 1140 "p1".ljust(idlen)))
1141 1141 else:
1142 1142 ui.write((" rev linkrev %s %s p2\n") % (
1143 1143 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1144 1144 elif format == 1:
1145 1145 if ui.verbose:
1146 1146 ui.write((" rev flag offset length size link p1"
1147 1147 " p2 %s\n") % "nodeid".rjust(idlen))
1148 1148 else:
1149 1149 ui.write((" rev flag size link p1 p2 %s\n") %
1150 1150 "nodeid".rjust(idlen))
1151 1151
1152 1152 for i in r:
1153 1153 node = r.node(i)
1154 1154 if format == 0:
1155 1155 try:
1156 1156 pp = r.parents(node)
1157 1157 except Exception:
1158 1158 pp = [nullid, nullid]
1159 1159 if ui.verbose:
1160 1160 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1161 1161 i, r.start(i), r.length(i), r.linkrev(i),
1162 1162 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1163 1163 else:
1164 1164 ui.write("% 6d % 7d %s %s %s\n" % (
1165 1165 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1166 1166 shortfn(pp[1])))
1167 1167 elif format == 1:
1168 1168 pr = r.parentrevs(i)
1169 1169 if ui.verbose:
1170 1170 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1171 1171 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1172 1172 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1173 1173 else:
1174 1174 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1175 1175 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1176 1176 shortfn(node)))
1177 1177
1178 1178 @command('debugindexdot', cmdutil.debugrevlogopts,
1179 1179 _('-c|-m|FILE'), optionalrepo=True)
1180 1180 def debugindexdot(ui, repo, file_=None, **opts):
1181 1181 """dump an index DAG as a graphviz dot file"""
1182 1182 opts = pycompat.byteskwargs(opts)
1183 1183 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1184 1184 ui.write(("digraph G {\n"))
1185 1185 for i in r:
1186 1186 node = r.node(i)
1187 1187 pp = r.parents(node)
1188 1188 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1189 1189 if pp[1] != nullid:
1190 1190 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1191 1191 ui.write("}\n")
1192 1192
1193 1193 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1194 1194 def debuginstall(ui, **opts):
1195 1195 '''test Mercurial installation
1196 1196
1197 1197 Returns 0 on success.
1198 1198 '''
1199 1199 opts = pycompat.byteskwargs(opts)
1200 1200
1201 1201 def writetemp(contents):
1202 1202 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1203 1203 f = os.fdopen(fd, r"wb")
1204 1204 f.write(contents)
1205 1205 f.close()
1206 1206 return name
1207 1207
1208 1208 problems = 0
1209 1209
1210 1210 fm = ui.formatter('debuginstall', opts)
1211 1211 fm.startitem()
1212 1212
1213 1213 # encoding
1214 1214 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1215 1215 err = None
1216 1216 try:
1217 1217 codecs.lookup(pycompat.sysstr(encoding.encoding))
1218 1218 except LookupError as inst:
1219 1219 err = stringutil.forcebytestr(inst)
1220 1220 problems += 1
1221 1221 fm.condwrite(err, 'encodingerror', _(" %s\n"
1222 1222 " (check that your locale is properly set)\n"), err)
1223 1223
1224 1224 # Python
1225 1225 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1226 1226 pycompat.sysexecutable)
1227 1227 fm.write('pythonver', _("checking Python version (%s)\n"),
1228 1228 ("%d.%d.%d" % sys.version_info[:3]))
1229 1229 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1230 1230 os.path.dirname(pycompat.fsencode(os.__file__)))
1231 1231
1232 1232 security = set(sslutil.supportedprotocols)
1233 1233 if sslutil.hassni:
1234 1234 security.add('sni')
1235 1235
1236 1236 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1237 1237 fm.formatlist(sorted(security), name='protocol',
1238 1238 fmt='%s', sep=','))
1239 1239
1240 1240 # These are warnings, not errors. So don't increment problem count. This
1241 1241 # may change in the future.
1242 1242 if 'tls1.2' not in security:
1243 1243 fm.plain(_(' TLS 1.2 not supported by Python install; '
1244 1244 'network connections lack modern security\n'))
1245 1245 if 'sni' not in security:
1246 1246 fm.plain(_(' SNI not supported by Python install; may have '
1247 1247 'connectivity issues with some servers\n'))
1248 1248
1249 1249 # TODO print CA cert info
1250 1250
1251 1251 # hg version
1252 1252 hgver = util.version()
1253 1253 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1254 1254 hgver.split('+')[0])
1255 1255 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1256 1256 '+'.join(hgver.split('+')[1:]))
1257 1257
1258 1258 # compiled modules
1259 1259 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1260 1260 policy.policy)
1261 1261 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1262 1262 os.path.dirname(pycompat.fsencode(__file__)))
1263 1263
1264 1264 if policy.policy in ('c', 'allow'):
1265 1265 err = None
1266 1266 try:
1267 1267 from .cext import (
1268 1268 base85,
1269 1269 bdiff,
1270 1270 mpatch,
1271 1271 osutil,
1272 1272 )
1273 1273 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1274 1274 except Exception as inst:
1275 1275 err = stringutil.forcebytestr(inst)
1276 1276 problems += 1
1277 1277 fm.condwrite(err, 'extensionserror', " %s\n", err)
1278 1278
1279 1279 compengines = util.compengines._engines.values()
1280 1280 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1281 1281 fm.formatlist(sorted(e.name() for e in compengines),
1282 1282 name='compengine', fmt='%s', sep=', '))
1283 1283 fm.write('compenginesavail', _('checking available compression engines '
1284 1284 '(%s)\n'),
1285 1285 fm.formatlist(sorted(e.name() for e in compengines
1286 1286 if e.available()),
1287 1287 name='compengine', fmt='%s', sep=', '))
1288 1288 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1289 1289 fm.write('compenginesserver', _('checking available compression engines '
1290 1290 'for wire protocol (%s)\n'),
1291 1291 fm.formatlist([e.name() for e in wirecompengines
1292 1292 if e.wireprotosupport()],
1293 1293 name='compengine', fmt='%s', sep=', '))
1294 1294 re2 = 'missing'
1295 1295 if util._re2:
1296 1296 re2 = 'available'
1297 1297 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1298 1298 fm.data(re2=bool(util._re2))
1299 1299
1300 1300 # templates
1301 1301 p = templater.templatepaths()
1302 1302 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1303 1303 fm.condwrite(not p, '', _(" no template directories found\n"))
1304 1304 if p:
1305 1305 m = templater.templatepath("map-cmdline.default")
1306 1306 if m:
1307 1307 # template found, check if it is working
1308 1308 err = None
1309 1309 try:
1310 1310 templater.templater.frommapfile(m)
1311 1311 except Exception as inst:
1312 1312 err = stringutil.forcebytestr(inst)
1313 1313 p = None
1314 1314 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1315 1315 else:
1316 1316 p = None
1317 1317 fm.condwrite(p, 'defaulttemplate',
1318 1318 _("checking default template (%s)\n"), m)
1319 1319 fm.condwrite(not m, 'defaulttemplatenotfound',
1320 1320 _(" template '%s' not found\n"), "default")
1321 1321 if not p:
1322 1322 problems += 1
1323 1323 fm.condwrite(not p, '',
1324 1324 _(" (templates seem to have been installed incorrectly)\n"))
1325 1325
1326 1326 # editor
1327 1327 editor = ui.geteditor()
1328 1328 editor = util.expandpath(editor)
1329 1329 editorbin = procutil.shellsplit(editor)[0]
1330 1330 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1331 1331 cmdpath = procutil.findexe(editorbin)
1332 1332 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1333 1333 _(" No commit editor set and can't find %s in PATH\n"
1334 1334 " (specify a commit editor in your configuration"
1335 1335 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1336 1336 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1337 1337 _(" Can't find editor '%s' in PATH\n"
1338 1338 " (specify a commit editor in your configuration"
1339 1339 " file)\n"), not cmdpath and editorbin)
1340 1340 if not cmdpath and editor != 'vi':
1341 1341 problems += 1
1342 1342
1343 1343 # check username
1344 1344 username = None
1345 1345 err = None
1346 1346 try:
1347 1347 username = ui.username()
1348 1348 except error.Abort as e:
1349 1349 err = stringutil.forcebytestr(e)
1350 1350 problems += 1
1351 1351
1352 1352 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1353 1353 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1354 1354 " (specify a username in your configuration file)\n"), err)
1355 1355
1356 1356 fm.condwrite(not problems, '',
1357 1357 _("no problems detected\n"))
1358 1358 if not problems:
1359 1359 fm.data(problems=problems)
1360 1360 fm.condwrite(problems, 'problems',
1361 1361 _("%d problems detected,"
1362 1362 " please check your install!\n"), problems)
1363 1363 fm.end()
1364 1364
1365 1365 return problems
1366 1366
1367 1367 @command('debugknown', [], _('REPO ID...'), norepo=True)
1368 1368 def debugknown(ui, repopath, *ids, **opts):
1369 1369 """test whether node ids are known to a repo
1370 1370
1371 1371 Every ID must be a full-length hex node id string. Returns a list of 0s
1372 1372 and 1s indicating unknown/known.
1373 1373 """
1374 1374 opts = pycompat.byteskwargs(opts)
1375 1375 repo = hg.peer(ui, opts, repopath)
1376 1376 if not repo.capable('known'):
1377 1377 raise error.Abort("known() not supported by target repository")
1378 1378 flags = repo.known([bin(s) for s in ids])
1379 1379 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1380 1380
1381 1381 @command('debuglabelcomplete', [], _('LABEL...'))
1382 1382 def debuglabelcomplete(ui, repo, *args):
1383 1383 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1384 1384 debugnamecomplete(ui, repo, *args)
1385 1385
1386 1386 @command('debuglocks',
1387 1387 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1388 1388 ('W', 'force-wlock', None,
1389 1389 _('free the working state lock (DANGEROUS)')),
1390 1390 ('s', 'set-lock', None, _('set the store lock until stopped')),
1391 1391 ('S', 'set-wlock', None,
1392 1392 _('set the working state lock until stopped'))],
1393 1393 _('[OPTION]...'))
1394 1394 def debuglocks(ui, repo, **opts):
1395 1395 """show or modify state of locks
1396 1396
1397 1397 By default, this command will show which locks are held. This
1398 1398 includes the user and process holding the lock, the amount of time
1399 1399 the lock has been held, and the machine name where the process is
1400 1400 running if it's not local.
1401 1401
1402 1402 Locks protect the integrity of Mercurial's data, so should be
1403 1403 treated with care. System crashes or other interruptions may cause
1404 1404 locks to not be properly released, though Mercurial will usually
1405 1405 detect and remove such stale locks automatically.
1406 1406
1407 1407 However, detecting stale locks may not always be possible (for
1408 1408 instance, on a shared filesystem). Removing locks may also be
1409 1409 blocked by filesystem permissions.
1410 1410
1411 1411 Setting a lock will prevent other commands from changing the data.
1412 1412 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1413 1413 The set locks are removed when the command exits.
1414 1414
1415 1415 Returns 0 if no locks are held.
1416 1416
1417 1417 """
1418 1418
1419 1419 if opts.get(r'force_lock'):
1420 1420 repo.svfs.unlink('lock')
1421 1421 if opts.get(r'force_wlock'):
1422 1422 repo.vfs.unlink('wlock')
1423 1423 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1424 1424 return 0
1425 1425
1426 1426 locks = []
1427 1427 try:
1428 1428 if opts.get(r'set_wlock'):
1429 1429 try:
1430 1430 locks.append(repo.wlock(False))
1431 1431 except error.LockHeld:
1432 1432 raise error.Abort(_('wlock is already held'))
1433 1433 if opts.get(r'set_lock'):
1434 1434 try:
1435 1435 locks.append(repo.lock(False))
1436 1436 except error.LockHeld:
1437 1437 raise error.Abort(_('lock is already held'))
1438 1438 if len(locks):
1439 1439 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1440 1440 return 0
1441 1441 finally:
1442 1442 release(*locks)
1443 1443
1444 1444 now = time.time()
1445 1445 held = 0
1446 1446
1447 1447 def report(vfs, name, method):
1448 1448 # this causes stale locks to get reaped for more accurate reporting
1449 1449 try:
1450 1450 l = method(False)
1451 1451 except error.LockHeld:
1452 1452 l = None
1453 1453
1454 1454 if l:
1455 1455 l.release()
1456 1456 else:
1457 1457 try:
1458 1458 st = vfs.lstat(name)
1459 1459 age = now - st[stat.ST_MTIME]
1460 1460 user = util.username(st.st_uid)
1461 1461 locker = vfs.readlock(name)
1462 1462 if ":" in locker:
1463 1463 host, pid = locker.split(':')
1464 1464 if host == socket.gethostname():
1465 1465 locker = 'user %s, process %s' % (user, pid)
1466 1466 else:
1467 1467 locker = 'user %s, process %s, host %s' \
1468 1468 % (user, pid, host)
1469 1469 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1470 1470 return 1
1471 1471 except OSError as e:
1472 1472 if e.errno != errno.ENOENT:
1473 1473 raise
1474 1474
1475 1475 ui.write(("%-6s free\n") % (name + ":"))
1476 1476 return 0
1477 1477
1478 1478 held += report(repo.svfs, "lock", repo.lock)
1479 1479 held += report(repo.vfs, "wlock", repo.wlock)
1480 1480
1481 1481 return held
1482 1482
1483 1483 @command('debugmanifestfulltextcache', [
1484 1484 ('', 'clear', False, _('clear the cache')),
1485 1485 ('a', 'add', '', _('add the given manifest node to the cache'),
1486 1486 _('NODE'))
1487 1487 ], '')
1488 1488 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1489 1489 """show, clear or amend the contents of the manifest fulltext cache"""
1490 1490 with repo.lock():
1491 1491 r = repo.manifestlog._revlog
1492 1492 try:
1493 1493 cache = r._fulltextcache
1494 1494 except AttributeError:
1495 1495 ui.warn(_(
1496 1496 "Current revlog implementation doesn't appear to have a "
1497 1497 'manifest fulltext cache\n'))
1498 1498 return
1499 1499
1500 1500 if opts.get(r'clear'):
1501 1501 cache.clear()
1502 1502
1503 1503 if add:
1504 1504 try:
1505 1505 manifest = repo.manifestlog[r.lookup(add)]
1506 1506 except error.LookupError as e:
1507 1507 raise error.Abort(e, hint="Check your manifest node id")
1508 1508 manifest.read() # stores revisision in cache too
1509 1509
1510 1510 if not len(cache):
1511 1511 ui.write(_('Cache empty'))
1512 1512 else:
1513 1513 ui.write(
1514 1514 _('Cache contains %d manifest entries, in order of most to '
1515 1515 'least recent:\n') % (len(cache),))
1516 1516 totalsize = 0
1517 1517 for nodeid in cache:
1518 1518 # Use cache.get to not update the LRU order
1519 1519 data = cache.get(nodeid)
1520 1520 size = len(data)
1521 1521 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1522 1522 ui.write(_('id: %s, size %s\n') % (
1523 1523 hex(nodeid), util.bytecount(size)))
1524 1524 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1525 1525 ui.write(
1526 1526 _('Total cache data size %s, on-disk %s\n') % (
1527 1527 util.bytecount(totalsize), util.bytecount(ondisk))
1528 1528 )
1529 1529
1530 1530 @command('debugmergestate', [], '')
1531 1531 def debugmergestate(ui, repo, *args):
1532 1532 """print merge state
1533 1533
1534 1534 Use --verbose to print out information about whether v1 or v2 merge state
1535 1535 was chosen."""
1536 1536 def _hashornull(h):
1537 1537 if h == nullhex:
1538 1538 return 'null'
1539 1539 else:
1540 1540 return h
1541 1541
1542 1542 def printrecords(version):
1543 1543 ui.write(('* version %d records\n') % version)
1544 1544 if version == 1:
1545 1545 records = v1records
1546 1546 else:
1547 1547 records = v2records
1548 1548
1549 1549 for rtype, record in records:
1550 1550 # pretty print some record types
1551 1551 if rtype == 'L':
1552 1552 ui.write(('local: %s\n') % record)
1553 1553 elif rtype == 'O':
1554 1554 ui.write(('other: %s\n') % record)
1555 1555 elif rtype == 'm':
1556 1556 driver, mdstate = record.split('\0', 1)
1557 1557 ui.write(('merge driver: %s (state "%s")\n')
1558 1558 % (driver, mdstate))
1559 1559 elif rtype in 'FDC':
1560 1560 r = record.split('\0')
1561 1561 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1562 1562 if version == 1:
1563 1563 onode = 'not stored in v1 format'
1564 1564 flags = r[7]
1565 1565 else:
1566 1566 onode, flags = r[7:9]
1567 1567 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1568 1568 % (f, rtype, state, _hashornull(hash)))
1569 1569 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1570 1570 ui.write((' ancestor path: %s (node %s)\n')
1571 1571 % (afile, _hashornull(anode)))
1572 1572 ui.write((' other path: %s (node %s)\n')
1573 1573 % (ofile, _hashornull(onode)))
1574 1574 elif rtype == 'f':
1575 1575 filename, rawextras = record.split('\0', 1)
1576 1576 extras = rawextras.split('\0')
1577 1577 i = 0
1578 1578 extrastrings = []
1579 1579 while i < len(extras):
1580 1580 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1581 1581 i += 2
1582 1582
1583 1583 ui.write(('file extras: %s (%s)\n')
1584 1584 % (filename, ', '.join(extrastrings)))
1585 1585 elif rtype == 'l':
1586 1586 labels = record.split('\0', 2)
1587 1587 labels = [l for l in labels if len(l) > 0]
1588 1588 ui.write(('labels:\n'))
1589 1589 ui.write((' local: %s\n' % labels[0]))
1590 1590 ui.write((' other: %s\n' % labels[1]))
1591 1591 if len(labels) > 2:
1592 1592 ui.write((' base: %s\n' % labels[2]))
1593 1593 else:
1594 1594 ui.write(('unrecognized entry: %s\t%s\n')
1595 1595 % (rtype, record.replace('\0', '\t')))
1596 1596
1597 1597 # Avoid mergestate.read() since it may raise an exception for unsupported
1598 1598 # merge state records. We shouldn't be doing this, but this is OK since this
1599 1599 # command is pretty low-level.
1600 1600 ms = mergemod.mergestate(repo)
1601 1601
1602 1602 # sort so that reasonable information is on top
1603 1603 v1records = ms._readrecordsv1()
1604 1604 v2records = ms._readrecordsv2()
1605 1605 order = 'LOml'
1606 1606 def key(r):
1607 1607 idx = order.find(r[0])
1608 1608 if idx == -1:
1609 1609 return (1, r[1])
1610 1610 else:
1611 1611 return (0, idx)
1612 1612 v1records.sort(key=key)
1613 1613 v2records.sort(key=key)
1614 1614
1615 1615 if not v1records and not v2records:
1616 1616 ui.write(('no merge state found\n'))
1617 1617 elif not v2records:
1618 1618 ui.note(('no version 2 merge state\n'))
1619 1619 printrecords(1)
1620 1620 elif ms._v1v2match(v1records, v2records):
1621 1621 ui.note(('v1 and v2 states match: using v2\n'))
1622 1622 printrecords(2)
1623 1623 else:
1624 1624 ui.note(('v1 and v2 states mismatch: using v1\n'))
1625 1625 printrecords(1)
1626 1626 if ui.verbose:
1627 1627 printrecords(2)
1628 1628
1629 1629 @command('debugnamecomplete', [], _('NAME...'))
1630 1630 def debugnamecomplete(ui, repo, *args):
1631 1631 '''complete "names" - tags, open branch names, bookmark names'''
1632 1632
1633 1633 names = set()
1634 1634 # since we previously only listed open branches, we will handle that
1635 1635 # specially (after this for loop)
1636 1636 for name, ns in repo.names.iteritems():
1637 1637 if name != 'branches':
1638 1638 names.update(ns.listnames(repo))
1639 1639 names.update(tag for (tag, heads, tip, closed)
1640 1640 in repo.branchmap().iterbranches() if not closed)
1641 1641 completions = set()
1642 1642 if not args:
1643 1643 args = ['']
1644 1644 for a in args:
1645 1645 completions.update(n for n in names if n.startswith(a))
1646 1646 ui.write('\n'.join(sorted(completions)))
1647 1647 ui.write('\n')
1648 1648
1649 1649 @command('debugobsolete',
1650 1650 [('', 'flags', 0, _('markers flag')),
1651 1651 ('', 'record-parents', False,
1652 1652 _('record parent information for the precursor')),
1653 1653 ('r', 'rev', [], _('display markers relevant to REV')),
1654 1654 ('', 'exclusive', False, _('restrict display to markers only '
1655 1655 'relevant to REV')),
1656 1656 ('', 'index', False, _('display index of the marker')),
1657 1657 ('', 'delete', [], _('delete markers specified by indices')),
1658 1658 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1659 1659 _('[OBSOLETED [REPLACEMENT ...]]'))
1660 1660 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1661 1661 """create arbitrary obsolete marker
1662 1662
1663 1663 With no arguments, displays the list of obsolescence markers."""
1664 1664
1665 1665 opts = pycompat.byteskwargs(opts)
1666 1666
1667 1667 def parsenodeid(s):
1668 1668 try:
1669 1669 # We do not use revsingle/revrange functions here to accept
1670 1670 # arbitrary node identifiers, possibly not present in the
1671 1671 # local repository.
1672 1672 n = bin(s)
1673 1673 if len(n) != len(nullid):
1674 1674 raise TypeError()
1675 1675 return n
1676 1676 except TypeError:
1677 1677 raise error.Abort('changeset references must be full hexadecimal '
1678 1678 'node identifiers')
1679 1679
1680 1680 if opts.get('delete'):
1681 1681 indices = []
1682 1682 for v in opts.get('delete'):
1683 1683 try:
1684 1684 indices.append(int(v))
1685 1685 except ValueError:
1686 1686 raise error.Abort(_('invalid index value: %r') % v,
1687 1687 hint=_('use integers for indices'))
1688 1688
1689 1689 if repo.currenttransaction():
1690 1690 raise error.Abort(_('cannot delete obsmarkers in the middle '
1691 1691 'of transaction.'))
1692 1692
1693 1693 with repo.lock():
1694 1694 n = repair.deleteobsmarkers(repo.obsstore, indices)
1695 1695 ui.write(_('deleted %i obsolescence markers\n') % n)
1696 1696
1697 1697 return
1698 1698
1699 1699 if precursor is not None:
1700 1700 if opts['rev']:
1701 1701 raise error.Abort('cannot select revision when creating marker')
1702 1702 metadata = {}
1703 1703 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1704 1704 succs = tuple(parsenodeid(succ) for succ in successors)
1705 1705 l = repo.lock()
1706 1706 try:
1707 1707 tr = repo.transaction('debugobsolete')
1708 1708 try:
1709 1709 date = opts.get('date')
1710 1710 if date:
1711 1711 date = dateutil.parsedate(date)
1712 1712 else:
1713 1713 date = None
1714 1714 prec = parsenodeid(precursor)
1715 1715 parents = None
1716 1716 if opts['record_parents']:
1717 1717 if prec not in repo.unfiltered():
1718 1718 raise error.Abort('cannot used --record-parents on '
1719 1719 'unknown changesets')
1720 1720 parents = repo.unfiltered()[prec].parents()
1721 1721 parents = tuple(p.node() for p in parents)
1722 1722 repo.obsstore.create(tr, prec, succs, opts['flags'],
1723 1723 parents=parents, date=date,
1724 1724 metadata=metadata, ui=ui)
1725 1725 tr.close()
1726 1726 except ValueError as exc:
1727 1727 raise error.Abort(_('bad obsmarker input: %s') %
1728 1728 pycompat.bytestr(exc))
1729 1729 finally:
1730 1730 tr.release()
1731 1731 finally:
1732 1732 l.release()
1733 1733 else:
1734 1734 if opts['rev']:
1735 1735 revs = scmutil.revrange(repo, opts['rev'])
1736 1736 nodes = [repo[r].node() for r in revs]
1737 1737 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1738 1738 exclusive=opts['exclusive']))
1739 1739 markers.sort(key=lambda x: x._data)
1740 1740 else:
1741 1741 markers = obsutil.getmarkers(repo)
1742 1742
1743 1743 markerstoiter = markers
1744 1744 isrelevant = lambda m: True
1745 1745 if opts.get('rev') and opts.get('index'):
1746 1746 markerstoiter = obsutil.getmarkers(repo)
1747 1747 markerset = set(markers)
1748 1748 isrelevant = lambda m: m in markerset
1749 1749
1750 1750 fm = ui.formatter('debugobsolete', opts)
1751 1751 for i, m in enumerate(markerstoiter):
1752 1752 if not isrelevant(m):
1753 1753 # marker can be irrelevant when we're iterating over a set
1754 1754 # of markers (markerstoiter) which is bigger than the set
1755 1755 # of markers we want to display (markers)
1756 1756 # this can happen if both --index and --rev options are
1757 1757 # provided and thus we need to iterate over all of the markers
1758 1758 # to get the correct indices, but only display the ones that
1759 1759 # are relevant to --rev value
1760 1760 continue
1761 1761 fm.startitem()
1762 1762 ind = i if opts.get('index') else None
1763 1763 cmdutil.showmarker(fm, m, index=ind)
1764 1764 fm.end()
1765 1765
1766 1766 @command('debugpathcomplete',
1767 1767 [('f', 'full', None, _('complete an entire path')),
1768 1768 ('n', 'normal', None, _('show only normal files')),
1769 1769 ('a', 'added', None, _('show only added files')),
1770 1770 ('r', 'removed', None, _('show only removed files'))],
1771 1771 _('FILESPEC...'))
1772 1772 def debugpathcomplete(ui, repo, *specs, **opts):
1773 1773 '''complete part or all of a tracked path
1774 1774
1775 1775 This command supports shells that offer path name completion. It
1776 1776 currently completes only files already known to the dirstate.
1777 1777
1778 1778 Completion extends only to the next path segment unless
1779 1779 --full is specified, in which case entire paths are used.'''
1780 1780
1781 1781 def complete(path, acceptable):
1782 1782 dirstate = repo.dirstate
1783 1783 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1784 1784 rootdir = repo.root + pycompat.ossep
1785 1785 if spec != repo.root and not spec.startswith(rootdir):
1786 1786 return [], []
1787 1787 if os.path.isdir(spec):
1788 1788 spec += '/'
1789 1789 spec = spec[len(rootdir):]
1790 1790 fixpaths = pycompat.ossep != '/'
1791 1791 if fixpaths:
1792 1792 spec = spec.replace(pycompat.ossep, '/')
1793 1793 speclen = len(spec)
1794 1794 fullpaths = opts[r'full']
1795 1795 files, dirs = set(), set()
1796 1796 adddir, addfile = dirs.add, files.add
1797 1797 for f, st in dirstate.iteritems():
1798 1798 if f.startswith(spec) and st[0] in acceptable:
1799 1799 if fixpaths:
1800 1800 f = f.replace('/', pycompat.ossep)
1801 1801 if fullpaths:
1802 1802 addfile(f)
1803 1803 continue
1804 1804 s = f.find(pycompat.ossep, speclen)
1805 1805 if s >= 0:
1806 1806 adddir(f[:s])
1807 1807 else:
1808 1808 addfile(f)
1809 1809 return files, dirs
1810 1810
1811 1811 acceptable = ''
1812 1812 if opts[r'normal']:
1813 1813 acceptable += 'nm'
1814 1814 if opts[r'added']:
1815 1815 acceptable += 'a'
1816 1816 if opts[r'removed']:
1817 1817 acceptable += 'r'
1818 1818 cwd = repo.getcwd()
1819 1819 if not specs:
1820 1820 specs = ['.']
1821 1821
1822 1822 files, dirs = set(), set()
1823 1823 for spec in specs:
1824 1824 f, d = complete(spec, acceptable or 'nmar')
1825 1825 files.update(f)
1826 1826 dirs.update(d)
1827 1827 files.update(dirs)
1828 1828 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1829 1829 ui.write('\n')
1830 1830
1831 1831 @command('debugpeer', [], _('PATH'), norepo=True)
1832 1832 def debugpeer(ui, path):
1833 1833 """establish a connection to a peer repository"""
1834 1834 # Always enable peer request logging. Requires --debug to display
1835 1835 # though.
1836 1836 overrides = {
1837 1837 ('devel', 'debug.peer-request'): True,
1838 1838 }
1839 1839
1840 1840 with ui.configoverride(overrides):
1841 1841 peer = hg.peer(ui, {}, path)
1842 1842
1843 1843 local = peer.local() is not None
1844 1844 canpush = peer.canpush()
1845 1845
1846 1846 ui.write(_('url: %s\n') % peer.url())
1847 1847 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1848 1848 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1849 1849
1850 1850 @command('debugpickmergetool',
1851 1851 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1852 1852 ('', 'changedelete', None, _('emulate merging change and delete')),
1853 1853 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1854 1854 _('[PATTERN]...'),
1855 1855 inferrepo=True)
1856 1856 def debugpickmergetool(ui, repo, *pats, **opts):
1857 1857 """examine which merge tool is chosen for specified file
1858 1858
1859 1859 As described in :hg:`help merge-tools`, Mercurial examines
1860 1860 configurations below in this order to decide which merge tool is
1861 1861 chosen for specified file.
1862 1862
1863 1863 1. ``--tool`` option
1864 1864 2. ``HGMERGE`` environment variable
1865 1865 3. configurations in ``merge-patterns`` section
1866 1866 4. configuration of ``ui.merge``
1867 1867 5. configurations in ``merge-tools`` section
1868 1868 6. ``hgmerge`` tool (for historical reason only)
1869 1869 7. default tool for fallback (``:merge`` or ``:prompt``)
1870 1870
1871 1871 This command writes out examination result in the style below::
1872 1872
1873 1873 FILE = MERGETOOL
1874 1874
1875 1875 By default, all files known in the first parent context of the
1876 1876 working directory are examined. Use file patterns and/or -I/-X
1877 1877 options to limit target files. -r/--rev is also useful to examine
1878 1878 files in another context without actual updating to it.
1879 1879
1880 1880 With --debug, this command shows warning messages while matching
1881 1881 against ``merge-patterns`` and so on, too. It is recommended to
1882 1882 use this option with explicit file patterns and/or -I/-X options,
1883 1883 because this option increases amount of output per file according
1884 1884 to configurations in hgrc.
1885 1885
1886 1886 With -v/--verbose, this command shows configurations below at
1887 1887 first (only if specified).
1888 1888
1889 1889 - ``--tool`` option
1890 1890 - ``HGMERGE`` environment variable
1891 1891 - configuration of ``ui.merge``
1892 1892
1893 1893 If merge tool is chosen before matching against
1894 1894 ``merge-patterns``, this command can't show any helpful
1895 1895 information, even with --debug. In such case, information above is
1896 1896 useful to know why a merge tool is chosen.
1897 1897 """
1898 1898 opts = pycompat.byteskwargs(opts)
1899 1899 overrides = {}
1900 1900 if opts['tool']:
1901 1901 overrides[('ui', 'forcemerge')] = opts['tool']
1902 1902 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1903 1903
1904 1904 with ui.configoverride(overrides, 'debugmergepatterns'):
1905 1905 hgmerge = encoding.environ.get("HGMERGE")
1906 1906 if hgmerge is not None:
1907 1907 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1908 1908 uimerge = ui.config("ui", "merge")
1909 1909 if uimerge:
1910 1910 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1911 1911
1912 1912 ctx = scmutil.revsingle(repo, opts.get('rev'))
1913 1913 m = scmutil.match(ctx, pats, opts)
1914 1914 changedelete = opts['changedelete']
1915 1915 for path in ctx.walk(m):
1916 1916 fctx = ctx[path]
1917 1917 try:
1918 1918 if not ui.debugflag:
1919 1919 ui.pushbuffer(error=True)
1920 1920 tool, toolpath = filemerge._picktool(repo, ui, path,
1921 1921 fctx.isbinary(),
1922 1922 'l' in fctx.flags(),
1923 1923 changedelete)
1924 1924 finally:
1925 1925 if not ui.debugflag:
1926 1926 ui.popbuffer()
1927 1927 ui.write(('%s = %s\n') % (path, tool))
1928 1928
1929 1929 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1930 1930 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1931 1931 '''access the pushkey key/value protocol
1932 1932
1933 1933 With two args, list the keys in the given namespace.
1934 1934
1935 1935 With five args, set a key to new if it currently is set to old.
1936 1936 Reports success or failure.
1937 1937 '''
1938 1938
1939 1939 target = hg.peer(ui, {}, repopath)
1940 1940 if keyinfo:
1941 1941 key, old, new = keyinfo
1942 1942 with target.commandexecutor() as e:
1943 1943 r = e.callcommand('pushkey', {
1944 1944 'namespace': namespace,
1945 1945 'key': key,
1946 1946 'old': old,
1947 1947 'new': new,
1948 1948 }).result()
1949 1949
1950 1950 ui.status(pycompat.bytestr(r) + '\n')
1951 1951 return not r
1952 1952 else:
1953 1953 for k, v in sorted(target.listkeys(namespace).iteritems()):
1954 1954 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1955 1955 stringutil.escapestr(v)))
1956 1956
1957 1957 @command('debugpvec', [], _('A B'))
1958 1958 def debugpvec(ui, repo, a, b=None):
1959 1959 ca = scmutil.revsingle(repo, a)
1960 1960 cb = scmutil.revsingle(repo, b)
1961 1961 pa = pvec.ctxpvec(ca)
1962 1962 pb = pvec.ctxpvec(cb)
1963 1963 if pa == pb:
1964 1964 rel = "="
1965 1965 elif pa > pb:
1966 1966 rel = ">"
1967 1967 elif pa < pb:
1968 1968 rel = "<"
1969 1969 elif pa | pb:
1970 1970 rel = "|"
1971 1971 ui.write(_("a: %s\n") % pa)
1972 1972 ui.write(_("b: %s\n") % pb)
1973 1973 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1974 1974 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1975 1975 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1976 1976 pa.distance(pb), rel))
1977 1977
1978 1978 @command('debugrebuilddirstate|debugrebuildstate',
1979 1979 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1980 1980 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1981 1981 'the working copy parent')),
1982 1982 ],
1983 1983 _('[-r REV]'))
1984 1984 def debugrebuilddirstate(ui, repo, rev, **opts):
1985 1985 """rebuild the dirstate as it would look like for the given revision
1986 1986
1987 1987 If no revision is specified the first current parent will be used.
1988 1988
1989 1989 The dirstate will be set to the files of the given revision.
1990 1990 The actual working directory content or existing dirstate
1991 1991 information such as adds or removes is not considered.
1992 1992
1993 1993 ``minimal`` will only rebuild the dirstate status for files that claim to be
1994 1994 tracked but are not in the parent manifest, or that exist in the parent
1995 1995 manifest but are not in the dirstate. It will not change adds, removes, or
1996 1996 modified files that are in the working copy parent.
1997 1997
1998 1998 One use of this command is to make the next :hg:`status` invocation
1999 1999 check the actual file content.
2000 2000 """
2001 2001 ctx = scmutil.revsingle(repo, rev)
2002 2002 with repo.wlock():
2003 2003 dirstate = repo.dirstate
2004 2004 changedfiles = None
2005 2005 # See command doc for what minimal does.
2006 2006 if opts.get(r'minimal'):
2007 2007 manifestfiles = set(ctx.manifest().keys())
2008 2008 dirstatefiles = set(dirstate)
2009 2009 manifestonly = manifestfiles - dirstatefiles
2010 2010 dsonly = dirstatefiles - manifestfiles
2011 2011 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2012 2012 changedfiles = manifestonly | dsnotadded
2013 2013
2014 2014 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2015 2015
2016 2016 @command('debugrebuildfncache', [], '')
2017 2017 def debugrebuildfncache(ui, repo):
2018 2018 """rebuild the fncache file"""
2019 2019 repair.rebuildfncache(ui, repo)
2020 2020
2021 2021 @command('debugrename',
2022 2022 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2023 2023 _('[-r REV] FILE'))
2024 2024 def debugrename(ui, repo, file1, *pats, **opts):
2025 2025 """dump rename information"""
2026 2026
2027 2027 opts = pycompat.byteskwargs(opts)
2028 2028 ctx = scmutil.revsingle(repo, opts.get('rev'))
2029 2029 m = scmutil.match(ctx, (file1,) + pats, opts)
2030 2030 for abs in ctx.walk(m):
2031 2031 fctx = ctx[abs]
2032 2032 o = fctx.filelog().renamed(fctx.filenode())
2033 2033 rel = m.rel(abs)
2034 2034 if o:
2035 2035 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2036 2036 else:
2037 2037 ui.write(_("%s not renamed\n") % rel)
2038 2038
2039 2039 @command('debugrevlog', cmdutil.debugrevlogopts +
2040 2040 [('d', 'dump', False, _('dump index data'))],
2041 2041 _('-c|-m|FILE'),
2042 2042 optionalrepo=True)
2043 2043 def debugrevlog(ui, repo, file_=None, **opts):
2044 2044 """show data and statistics about a revlog"""
2045 2045 opts = pycompat.byteskwargs(opts)
2046 2046 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2047 2047
2048 2048 if opts.get("dump"):
2049 2049 numrevs = len(r)
2050 2050 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2051 2051 " rawsize totalsize compression heads chainlen\n"))
2052 2052 ts = 0
2053 2053 heads = set()
2054 2054
2055 2055 for rev in pycompat.xrange(numrevs):
2056 2056 dbase = r.deltaparent(rev)
2057 2057 if dbase == -1:
2058 2058 dbase = rev
2059 2059 cbase = r.chainbase(rev)
2060 2060 clen = r.chainlen(rev)
2061 2061 p1, p2 = r.parentrevs(rev)
2062 2062 rs = r.rawsize(rev)
2063 2063 ts = ts + rs
2064 2064 heads -= set(r.parentrevs(rev))
2065 2065 heads.add(rev)
2066 2066 try:
2067 2067 compression = ts / r.end(rev)
2068 2068 except ZeroDivisionError:
2069 2069 compression = 0
2070 2070 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2071 2071 "%11d %5d %8d\n" %
2072 2072 (rev, p1, p2, r.start(rev), r.end(rev),
2073 2073 r.start(dbase), r.start(cbase),
2074 2074 r.start(p1), r.start(p2),
2075 2075 rs, ts, compression, len(heads), clen))
2076 2076 return 0
2077 2077
2078 2078 v = r.version
2079 2079 format = v & 0xFFFF
2080 2080 flags = []
2081 2081 gdelta = False
2082 2082 if v & revlog.FLAG_INLINE_DATA:
2083 2083 flags.append('inline')
2084 2084 if v & revlog.FLAG_GENERALDELTA:
2085 2085 gdelta = True
2086 2086 flags.append('generaldelta')
2087 2087 if not flags:
2088 2088 flags = ['(none)']
2089 2089
2090 2090 ### tracks merge vs single parent
2091 2091 nummerges = 0
2092 2092
2093 2093 ### tracks ways the "delta" are build
2094 2094 # nodelta
2095 2095 numempty = 0
2096 2096 numemptytext = 0
2097 2097 numemptydelta = 0
2098 2098 # full file content
2099 2099 numfull = 0
2100 2100 # intermediate snapshot against a prior snapshot
2101 2101 numsemi = 0
2102 2102 # snapshot count per depth
2103 2103 numsnapdepth = collections.defaultdict(lambda: 0)
2104 2104 # delta against previous revision
2105 2105 numprev = 0
2106 2106 # delta against first or second parent (not prev)
2107 2107 nump1 = 0
2108 2108 nump2 = 0
2109 2109 # delta against neither prev nor parents
2110 2110 numother = 0
2111 2111 # delta against prev that are also first or second parent
2112 2112 # (details of `numprev`)
2113 2113 nump1prev = 0
2114 2114 nump2prev = 0
2115 2115
2116 2116 # data about delta chain of each revs
2117 2117 chainlengths = []
2118 2118 chainbases = []
2119 2119 chainspans = []
2120 2120
2121 2121 # data about each revision
2122 2122 datasize = [None, 0, 0]
2123 2123 fullsize = [None, 0, 0]
2124 2124 semisize = [None, 0, 0]
2125 2125 # snapshot count per depth
2126 2126 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2127 2127 deltasize = [None, 0, 0]
2128 2128 chunktypecounts = {}
2129 2129 chunktypesizes = {}
2130 2130
2131 2131 def addsize(size, l):
2132 2132 if l[0] is None or size < l[0]:
2133 2133 l[0] = size
2134 2134 if size > l[1]:
2135 2135 l[1] = size
2136 2136 l[2] += size
2137 2137
2138 2138 numrevs = len(r)
2139 2139 for rev in pycompat.xrange(numrevs):
2140 2140 p1, p2 = r.parentrevs(rev)
2141 2141 delta = r.deltaparent(rev)
2142 2142 if format > 0:
2143 2143 addsize(r.rawsize(rev), datasize)
2144 2144 if p2 != nullrev:
2145 2145 nummerges += 1
2146 2146 size = r.length(rev)
2147 2147 if delta == nullrev:
2148 2148 chainlengths.append(0)
2149 2149 chainbases.append(r.start(rev))
2150 2150 chainspans.append(size)
2151 2151 if size == 0:
2152 2152 numempty += 1
2153 2153 numemptytext += 1
2154 2154 else:
2155 2155 numfull += 1
2156 2156 numsnapdepth[0] += 1
2157 2157 addsize(size, fullsize)
2158 2158 addsize(size, snapsizedepth[0])
2159 2159 else:
2160 2160 chainlengths.append(chainlengths[delta] + 1)
2161 2161 baseaddr = chainbases[delta]
2162 2162 revaddr = r.start(rev)
2163 2163 chainbases.append(baseaddr)
2164 2164 chainspans.append((revaddr - baseaddr) + size)
2165 2165 if size == 0:
2166 2166 numempty += 1
2167 2167 numemptydelta += 1
2168 2168 elif r.issnapshot(rev):
2169 2169 addsize(size, semisize)
2170 2170 numsemi += 1
2171 2171 depth = r.snapshotdepth(rev)
2172 2172 numsnapdepth[depth] += 1
2173 2173 addsize(size, snapsizedepth[depth])
2174 2174 else:
2175 2175 addsize(size, deltasize)
2176 2176 if delta == rev - 1:
2177 2177 numprev += 1
2178 2178 if delta == p1:
2179 2179 nump1prev += 1
2180 2180 elif delta == p2:
2181 2181 nump2prev += 1
2182 2182 elif delta == p1:
2183 2183 nump1 += 1
2184 2184 elif delta == p2:
2185 2185 nump2 += 1
2186 2186 elif delta != nullrev:
2187 2187 numother += 1
2188 2188
2189 2189 # Obtain data on the raw chunks in the revlog.
2190 2190 if util.safehasattr(r, '_getsegmentforrevs'):
2191 2191 segment = r._getsegmentforrevs(rev, rev)[1]
2192 2192 else:
2193 2193 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2194 2194 if segment:
2195 2195 chunktype = bytes(segment[0:1])
2196 2196 else:
2197 2197 chunktype = 'empty'
2198 2198
2199 2199 if chunktype not in chunktypecounts:
2200 2200 chunktypecounts[chunktype] = 0
2201 2201 chunktypesizes[chunktype] = 0
2202 2202
2203 2203 chunktypecounts[chunktype] += 1
2204 2204 chunktypesizes[chunktype] += size
2205 2205
2206 2206 # Adjust size min value for empty cases
2207 2207 for size in (datasize, fullsize, semisize, deltasize):
2208 2208 if size[0] is None:
2209 2209 size[0] = 0
2210 2210
2211 2211 numdeltas = numrevs - numfull - numempty - numsemi
2212 2212 numoprev = numprev - nump1prev - nump2prev
2213 2213 totalrawsize = datasize[2]
2214 2214 datasize[2] /= numrevs
2215 2215 fulltotal = fullsize[2]
2216 2216 fullsize[2] /= numfull
2217 2217 semitotal = semisize[2]
2218 2218 snaptotal = {}
2219 2219 if 0 < numsemi:
2220 2220 semisize[2] /= numsemi
2221 2221 for depth in snapsizedepth:
2222 2222 snaptotal[depth] = snapsizedepth[depth][2]
2223 2223 snapsizedepth[depth][2] /= numsnapdepth[depth]
2224 2224
2225 2225 deltatotal = deltasize[2]
2226 2226 if numdeltas > 0:
2227 2227 deltasize[2] /= numdeltas
2228 2228 totalsize = fulltotal + semitotal + deltatotal
2229 2229 avgchainlen = sum(chainlengths) / numrevs
2230 2230 maxchainlen = max(chainlengths)
2231 2231 maxchainspan = max(chainspans)
2232 2232 compratio = 1
2233 2233 if totalsize:
2234 2234 compratio = totalrawsize / totalsize
2235 2235
2236 2236 basedfmtstr = '%%%dd\n'
2237 2237 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2238 2238
2239 2239 def dfmtstr(max):
2240 2240 return basedfmtstr % len(str(max))
2241 2241 def pcfmtstr(max, padding=0):
2242 2242 return basepcfmtstr % (len(str(max)), ' ' * padding)
2243 2243
2244 2244 def pcfmt(value, total):
2245 2245 if total:
2246 2246 return (value, 100 * float(value) / total)
2247 2247 else:
2248 2248 return value, 100.0
2249 2249
2250 2250 ui.write(('format : %d\n') % format)
2251 2251 ui.write(('flags : %s\n') % ', '.join(flags))
2252 2252
2253 2253 ui.write('\n')
2254 2254 fmt = pcfmtstr(totalsize)
2255 2255 fmt2 = dfmtstr(totalsize)
2256 2256 ui.write(('revisions : ') + fmt2 % numrevs)
2257 2257 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2258 2258 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2259 2259 ui.write(('revisions : ') + fmt2 % numrevs)
2260 2260 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2261 2261 ui.write((' text : ')
2262 2262 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2263 2263 ui.write((' delta : ')
2264 2264 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2265 2265 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2266 2266 for depth in sorted(numsnapdepth):
2267 2267 ui.write((' lvl-%-3d : ' % depth)
2268 2268 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2269 2269 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2270 2270 ui.write(('revision size : ') + fmt2 % totalsize)
2271 2271 ui.write((' snapshot : ')
2272 2272 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2273 2273 for depth in sorted(numsnapdepth):
2274 2274 ui.write((' lvl-%-3d : ' % depth)
2275 2275 + fmt % pcfmt(snaptotal[depth], totalsize))
2276 2276 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2277 2277
2278 2278 def fmtchunktype(chunktype):
2279 2279 if chunktype == 'empty':
2280 2280 return ' %s : ' % chunktype
2281 2281 elif chunktype in pycompat.bytestr(string.ascii_letters):
2282 2282 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2283 2283 else:
2284 2284 return ' 0x%s : ' % hex(chunktype)
2285 2285
2286 2286 ui.write('\n')
2287 2287 ui.write(('chunks : ') + fmt2 % numrevs)
2288 2288 for chunktype in sorted(chunktypecounts):
2289 2289 ui.write(fmtchunktype(chunktype))
2290 2290 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2291 2291 ui.write(('chunks size : ') + fmt2 % totalsize)
2292 2292 for chunktype in sorted(chunktypecounts):
2293 2293 ui.write(fmtchunktype(chunktype))
2294 2294 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2295 2295
2296 2296 ui.write('\n')
2297 2297 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2298 2298 ui.write(('avg chain length : ') + fmt % avgchainlen)
2299 2299 ui.write(('max chain length : ') + fmt % maxchainlen)
2300 2300 ui.write(('max chain reach : ') + fmt % maxchainspan)
2301 2301 ui.write(('compression ratio : ') + fmt % compratio)
2302 2302
2303 2303 if format > 0:
2304 2304 ui.write('\n')
2305 2305 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2306 2306 % tuple(datasize))
2307 2307 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2308 2308 % tuple(fullsize))
2309 2309 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2310 2310 % tuple(semisize))
2311 2311 for depth in sorted(snapsizedepth):
2312 2312 if depth == 0:
2313 2313 continue
2314 2314 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2315 2315 % ((depth,) + tuple(snapsizedepth[depth])))
2316 2316 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2317 2317 % tuple(deltasize))
2318 2318
2319 2319 if numdeltas > 0:
2320 2320 ui.write('\n')
2321 2321 fmt = pcfmtstr(numdeltas)
2322 2322 fmt2 = pcfmtstr(numdeltas, 4)
2323 2323 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2324 2324 if numprev > 0:
2325 2325 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2326 2326 numprev))
2327 2327 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2328 2328 numprev))
2329 2329 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2330 2330 numprev))
2331 2331 if gdelta:
2332 2332 ui.write(('deltas against p1 : ')
2333 2333 + fmt % pcfmt(nump1, numdeltas))
2334 2334 ui.write(('deltas against p2 : ')
2335 2335 + fmt % pcfmt(nump2, numdeltas))
2336 2336 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2337 2337 numdeltas))
2338 2338
2339 2339 @command('debugrevspec',
2340 2340 [('', 'optimize', None,
2341 2341 _('print parsed tree after optimizing (DEPRECATED)')),
2342 2342 ('', 'show-revs', True, _('print list of result revisions (default)')),
2343 2343 ('s', 'show-set', None, _('print internal representation of result set')),
2344 2344 ('p', 'show-stage', [],
2345 2345 _('print parsed tree at the given stage'), _('NAME')),
2346 2346 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2347 2347 ('', 'verify-optimized', False, _('verify optimized result')),
2348 2348 ],
2349 2349 ('REVSPEC'))
2350 2350 def debugrevspec(ui, repo, expr, **opts):
2351 2351 """parse and apply a revision specification
2352 2352
2353 2353 Use -p/--show-stage option to print the parsed tree at the given stages.
2354 2354 Use -p all to print tree at every stage.
2355 2355
2356 2356 Use --no-show-revs option with -s or -p to print only the set
2357 2357 representation or the parsed tree respectively.
2358 2358
2359 2359 Use --verify-optimized to compare the optimized result with the unoptimized
2360 2360 one. Returns 1 if the optimized result differs.
2361 2361 """
2362 2362 opts = pycompat.byteskwargs(opts)
2363 2363 aliases = ui.configitems('revsetalias')
2364 2364 stages = [
2365 2365 ('parsed', lambda tree: tree),
2366 2366 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2367 2367 ui.warn)),
2368 2368 ('concatenated', revsetlang.foldconcat),
2369 2369 ('analyzed', revsetlang.analyze),
2370 2370 ('optimized', revsetlang.optimize),
2371 2371 ]
2372 2372 if opts['no_optimized']:
2373 2373 stages = stages[:-1]
2374 2374 if opts['verify_optimized'] and opts['no_optimized']:
2375 2375 raise error.Abort(_('cannot use --verify-optimized with '
2376 2376 '--no-optimized'))
2377 2377 stagenames = set(n for n, f in stages)
2378 2378
2379 2379 showalways = set()
2380 2380 showchanged = set()
2381 2381 if ui.verbose and not opts['show_stage']:
2382 2382 # show parsed tree by --verbose (deprecated)
2383 2383 showalways.add('parsed')
2384 2384 showchanged.update(['expanded', 'concatenated'])
2385 2385 if opts['optimize']:
2386 2386 showalways.add('optimized')
2387 2387 if opts['show_stage'] and opts['optimize']:
2388 2388 raise error.Abort(_('cannot use --optimize with --show-stage'))
2389 2389 if opts['show_stage'] == ['all']:
2390 2390 showalways.update(stagenames)
2391 2391 else:
2392 2392 for n in opts['show_stage']:
2393 2393 if n not in stagenames:
2394 2394 raise error.Abort(_('invalid stage name: %s') % n)
2395 2395 showalways.update(opts['show_stage'])
2396 2396
2397 2397 treebystage = {}
2398 2398 printedtree = None
2399 2399 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2400 2400 for n, f in stages:
2401 2401 treebystage[n] = tree = f(tree)
2402 2402 if n in showalways or (n in showchanged and tree != printedtree):
2403 2403 if opts['show_stage'] or n != 'parsed':
2404 2404 ui.write(("* %s:\n") % n)
2405 2405 ui.write(revsetlang.prettyformat(tree), "\n")
2406 2406 printedtree = tree
2407 2407
2408 2408 if opts['verify_optimized']:
2409 2409 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2410 2410 brevs = revset.makematcher(treebystage['optimized'])(repo)
2411 2411 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2412 2412 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2413 2413 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2414 2414 arevs = list(arevs)
2415 2415 brevs = list(brevs)
2416 2416 if arevs == brevs:
2417 2417 return 0
2418 2418 ui.write(('--- analyzed\n'), label='diff.file_a')
2419 2419 ui.write(('+++ optimized\n'), label='diff.file_b')
2420 2420 sm = difflib.SequenceMatcher(None, arevs, brevs)
2421 2421 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2422 2422 if tag in ('delete', 'replace'):
2423 2423 for c in arevs[alo:ahi]:
2424 2424 ui.write('-%s\n' % c, label='diff.deleted')
2425 2425 if tag in ('insert', 'replace'):
2426 2426 for c in brevs[blo:bhi]:
2427 2427 ui.write('+%s\n' % c, label='diff.inserted')
2428 2428 if tag == 'equal':
2429 2429 for c in arevs[alo:ahi]:
2430 2430 ui.write(' %s\n' % c)
2431 2431 return 1
2432 2432
2433 2433 func = revset.makematcher(tree)
2434 2434 revs = func(repo)
2435 2435 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2436 2436 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2437 2437 if not opts['show_revs']:
2438 2438 return
2439 2439 for c in revs:
2440 2440 ui.write("%d\n" % c)
2441 2441
2442 2442 @command('debugserve', [
2443 2443 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2444 2444 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2445 2445 ('', 'logiofile', '', _('file to log server I/O to')),
2446 2446 ], '')
2447 2447 def debugserve(ui, repo, **opts):
2448 2448 """run a server with advanced settings
2449 2449
2450 2450 This command is similar to :hg:`serve`. It exists partially as a
2451 2451 workaround to the fact that ``hg serve --stdio`` must have specific
2452 2452 arguments for security reasons.
2453 2453 """
2454 2454 opts = pycompat.byteskwargs(opts)
2455 2455
2456 2456 if not opts['sshstdio']:
2457 2457 raise error.Abort(_('only --sshstdio is currently supported'))
2458 2458
2459 2459 logfh = None
2460 2460
2461 2461 if opts['logiofd'] and opts['logiofile']:
2462 2462 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2463 2463
2464 2464 if opts['logiofd']:
2465 2465 # Line buffered because output is line based.
2466 2466 try:
2467 2467 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2468 2468 except OSError as e:
2469 2469 if e.errno != errno.ESPIPE:
2470 2470 raise
2471 2471 # can't seek a pipe, so `ab` mode fails on py3
2472 2472 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2473 2473 elif opts['logiofile']:
2474 2474 logfh = open(opts['logiofile'], 'ab', 1)
2475 2475
2476 2476 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2477 2477 s.serve_forever()
2478 2478
2479 2479 @command('debugsetparents', [], _('REV1 [REV2]'))
2480 2480 def debugsetparents(ui, repo, rev1, rev2=None):
2481 2481 """manually set the parents of the current working directory
2482 2482
2483 2483 This is useful for writing repository conversion tools, but should
2484 2484 be used with care. For example, neither the working directory nor the
2485 2485 dirstate is updated, so file status may be incorrect after running this
2486 2486 command.
2487 2487
2488 2488 Returns 0 on success.
2489 2489 """
2490 2490
2491 2491 node1 = scmutil.revsingle(repo, rev1).node()
2492 2492 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2493 2493
2494 2494 with repo.wlock():
2495 2495 repo.setparents(node1, node2)
2496 2496
2497 2497 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2498 2498 def debugssl(ui, repo, source=None, **opts):
2499 2499 '''test a secure connection to a server
2500 2500
2501 2501 This builds the certificate chain for the server on Windows, installing the
2502 2502 missing intermediates and trusted root via Windows Update if necessary. It
2503 2503 does nothing on other platforms.
2504 2504
2505 2505 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2506 2506 that server is used. See :hg:`help urls` for more information.
2507 2507
2508 2508 If the update succeeds, retry the original operation. Otherwise, the cause
2509 2509 of the SSL error is likely another issue.
2510 2510 '''
2511 2511 if not pycompat.iswindows:
2512 2512 raise error.Abort(_('certificate chain building is only possible on '
2513 2513 'Windows'))
2514 2514
2515 2515 if not source:
2516 2516 if not repo:
2517 2517 raise error.Abort(_("there is no Mercurial repository here, and no "
2518 2518 "server specified"))
2519 2519 source = "default"
2520 2520
2521 2521 source, branches = hg.parseurl(ui.expandpath(source))
2522 2522 url = util.url(source)
2523 2523 addr = None
2524 2524
2525 2525 defaultport = {'https': 443, 'ssh': 22}
2526 2526 if url.scheme in defaultport:
2527 2527 try:
2528 2528 addr = (url.host, int(url.port or defaultport[url.scheme]))
2529 2529 except ValueError:
2530 2530 raise error.Abort(_("malformed port number in URL"))
2531 2531 else:
2532 2532 raise error.Abort(_("only https and ssh connections are supported"))
2533 2533
2534 2534 from . import win32
2535 2535
2536 2536 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2537 2537 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2538 2538
2539 2539 try:
2540 2540 s.connect(addr)
2541 2541 cert = s.getpeercert(True)
2542 2542
2543 2543 ui.status(_('checking the certificate chain for %s\n') % url.host)
2544 2544
2545 2545 complete = win32.checkcertificatechain(cert, build=False)
2546 2546
2547 2547 if not complete:
2548 2548 ui.status(_('certificate chain is incomplete, updating... '))
2549 2549
2550 2550 if not win32.checkcertificatechain(cert):
2551 2551 ui.status(_('failed.\n'))
2552 2552 else:
2553 2553 ui.status(_('done.\n'))
2554 2554 else:
2555 2555 ui.status(_('full certificate chain is available\n'))
2556 2556 finally:
2557 2557 s.close()
2558 2558
2559 2559 @command('debugsub',
2560 2560 [('r', 'rev', '',
2561 2561 _('revision to check'), _('REV'))],
2562 2562 _('[-r REV] [REV]'))
2563 2563 def debugsub(ui, repo, rev=None):
2564 2564 ctx = scmutil.revsingle(repo, rev, None)
2565 2565 for k, v in sorted(ctx.substate.items()):
2566 2566 ui.write(('path %s\n') % k)
2567 2567 ui.write((' source %s\n') % v[0])
2568 2568 ui.write((' revision %s\n') % v[1])
2569 2569
2570 2570 @command('debugsuccessorssets',
2571 2571 [('', 'closest', False, _('return closest successors sets only'))],
2572 2572 _('[REV]'))
2573 2573 def debugsuccessorssets(ui, repo, *revs, **opts):
2574 2574 """show set of successors for revision
2575 2575
2576 2576 A successors set of changeset A is a consistent group of revisions that
2577 2577 succeed A. It contains non-obsolete changesets only unless closests
2578 2578 successors set is set.
2579 2579
2580 2580 In most cases a changeset A has a single successors set containing a single
2581 2581 successor (changeset A replaced by A').
2582 2582
2583 2583 A changeset that is made obsolete with no successors are called "pruned".
2584 2584 Such changesets have no successors sets at all.
2585 2585
2586 2586 A changeset that has been "split" will have a successors set containing
2587 2587 more than one successor.
2588 2588
2589 2589 A changeset that has been rewritten in multiple different ways is called
2590 2590 "divergent". Such changesets have multiple successor sets (each of which
2591 2591 may also be split, i.e. have multiple successors).
2592 2592
2593 2593 Results are displayed as follows::
2594 2594
2595 2595 <rev1>
2596 2596 <successors-1A>
2597 2597 <rev2>
2598 2598 <successors-2A>
2599 2599 <successors-2B1> <successors-2B2> <successors-2B3>
2600 2600
2601 2601 Here rev2 has two possible (i.e. divergent) successors sets. The first
2602 2602 holds one element, whereas the second holds three (i.e. the changeset has
2603 2603 been split).
2604 2604 """
2605 2605 # passed to successorssets caching computation from one call to another
2606 2606 cache = {}
2607 2607 ctx2str = bytes
2608 2608 node2str = short
2609 2609 for rev in scmutil.revrange(repo, revs):
2610 2610 ctx = repo[rev]
2611 2611 ui.write('%s\n'% ctx2str(ctx))
2612 2612 for succsset in obsutil.successorssets(repo, ctx.node(),
2613 2613 closest=opts[r'closest'],
2614 2614 cache=cache):
2615 2615 if succsset:
2616 2616 ui.write(' ')
2617 2617 ui.write(node2str(succsset[0]))
2618 2618 for node in succsset[1:]:
2619 2619 ui.write(' ')
2620 2620 ui.write(node2str(node))
2621 2621 ui.write('\n')
2622 2622
2623 2623 @command('debugtemplate',
2624 2624 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2625 2625 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2626 2626 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2627 2627 optionalrepo=True)
2628 2628 def debugtemplate(ui, repo, tmpl, **opts):
2629 2629 """parse and apply a template
2630 2630
2631 2631 If -r/--rev is given, the template is processed as a log template and
2632 2632 applied to the given changesets. Otherwise, it is processed as a generic
2633 2633 template.
2634 2634
2635 2635 Use --verbose to print the parsed tree.
2636 2636 """
2637 2637 revs = None
2638 2638 if opts[r'rev']:
2639 2639 if repo is None:
2640 2640 raise error.RepoError(_('there is no Mercurial repository here '
2641 2641 '(.hg not found)'))
2642 2642 revs = scmutil.revrange(repo, opts[r'rev'])
2643 2643
2644 2644 props = {}
2645 2645 for d in opts[r'define']:
2646 2646 try:
2647 2647 k, v = (e.strip() for e in d.split('=', 1))
2648 2648 if not k or k == 'ui':
2649 2649 raise ValueError
2650 2650 props[k] = v
2651 2651 except ValueError:
2652 2652 raise error.Abort(_('malformed keyword definition: %s') % d)
2653 2653
2654 2654 if ui.verbose:
2655 2655 aliases = ui.configitems('templatealias')
2656 2656 tree = templater.parse(tmpl)
2657 2657 ui.note(templater.prettyformat(tree), '\n')
2658 2658 newtree = templater.expandaliases(tree, aliases)
2659 2659 if newtree != tree:
2660 2660 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2661 2661
2662 2662 if revs is None:
2663 2663 tres = formatter.templateresources(ui, repo)
2664 2664 t = formatter.maketemplater(ui, tmpl, resources=tres)
2665 2665 if ui.verbose:
2666 2666 kwds, funcs = t.symbolsuseddefault()
2667 2667 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2668 2668 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2669 2669 ui.write(t.renderdefault(props))
2670 2670 else:
2671 2671 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2672 2672 if ui.verbose:
2673 2673 kwds, funcs = displayer.t.symbolsuseddefault()
2674 2674 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2675 2675 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2676 2676 for r in revs:
2677 2677 displayer.show(repo[r], **pycompat.strkwargs(props))
2678 2678 displayer.close()
2679 2679
2680 2680 @command('debuguigetpass', [
2681 2681 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2682 2682 ], _('[-p TEXT]'), norepo=True)
2683 2683 def debuguigetpass(ui, prompt=''):
2684 2684 """show prompt to type password"""
2685 2685 r = ui.getpass(prompt)
2686 2686 ui.write(('respose: %s\n') % r)
2687 2687
2688 2688 @command('debuguiprompt', [
2689 2689 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2690 2690 ], _('[-p TEXT]'), norepo=True)
2691 2691 def debuguiprompt(ui, prompt=''):
2692 2692 """show plain prompt"""
2693 2693 r = ui.prompt(prompt)
2694 2694 ui.write(('response: %s\n') % r)
2695 2695
2696 2696 @command('debugupdatecaches', [])
2697 2697 def debugupdatecaches(ui, repo, *pats, **opts):
2698 2698 """warm all known caches in the repository"""
2699 2699 with repo.wlock(), repo.lock():
2700 2700 repo.updatecaches(full=True)
2701 2701
2702 2702 @command('debugupgraderepo', [
2703 2703 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2704 2704 ('', 'run', False, _('performs an upgrade')),
2705 2705 ])
2706 2706 def debugupgraderepo(ui, repo, run=False, optimize=None):
2707 2707 """upgrade a repository to use different features
2708 2708
2709 2709 If no arguments are specified, the repository is evaluated for upgrade
2710 2710 and a list of problems and potential optimizations is printed.
2711 2711
2712 2712 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2713 2713 can be influenced via additional arguments. More details will be provided
2714 2714 by the command output when run without ``--run``.
2715 2715
2716 2716 During the upgrade, the repository will be locked and no writes will be
2717 2717 allowed.
2718 2718
2719 2719 At the end of the upgrade, the repository may not be readable while new
2720 2720 repository data is swapped in. This window will be as long as it takes to
2721 2721 rename some directories inside the ``.hg`` directory. On most machines, this
2722 2722 should complete almost instantaneously and the chances of a consumer being
2723 2723 unable to access the repository should be low.
2724 2724 """
2725 2725 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2726 2726
2727 2727 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2728 2728 inferrepo=True)
2729 2729 def debugwalk(ui, repo, *pats, **opts):
2730 2730 """show how files match on given patterns"""
2731 2731 opts = pycompat.byteskwargs(opts)
2732 2732 m = scmutil.match(repo[None], pats, opts)
2733 2733 if ui.verbose:
2734 2734 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2735 2735 items = list(repo[None].walk(m))
2736 2736 if not items:
2737 2737 return
2738 2738 f = lambda fn: fn
2739 2739 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2740 2740 f = lambda fn: util.normpath(fn)
2741 2741 fmt = 'f %%-%ds %%-%ds %%s' % (
2742 2742 max([len(abs) for abs in items]),
2743 2743 max([len(m.rel(abs)) for abs in items]))
2744 2744 for abs in items:
2745 2745 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2746 2746 ui.write("%s\n" % line.rstrip())
2747 2747
2748 2748 @command('debugwhyunstable', [], _('REV'))
2749 2749 def debugwhyunstable(ui, repo, rev):
2750 2750 """explain instabilities of a changeset"""
2751 2751 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2752 2752 dnodes = ''
2753 2753 if entry.get('divergentnodes'):
2754 2754 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2755 2755 for ctx in entry['divergentnodes']) + ' '
2756 2756 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2757 2757 entry['reason'], entry['node']))
2758 2758
2759 2759 @command('debugwireargs',
2760 2760 [('', 'three', '', 'three'),
2761 2761 ('', 'four', '', 'four'),
2762 2762 ('', 'five', '', 'five'),
2763 2763 ] + cmdutil.remoteopts,
2764 2764 _('REPO [OPTIONS]... [ONE [TWO]]'),
2765 2765 norepo=True)
2766 2766 def debugwireargs(ui, repopath, *vals, **opts):
2767 2767 opts = pycompat.byteskwargs(opts)
2768 2768 repo = hg.peer(ui, opts, repopath)
2769 2769 for opt in cmdutil.remoteopts:
2770 2770 del opts[opt[1]]
2771 2771 args = {}
2772 2772 for k, v in opts.iteritems():
2773 2773 if v:
2774 2774 args[k] = v
2775 2775 args = pycompat.strkwargs(args)
2776 2776 # run twice to check that we don't mess up the stream for the next command
2777 2777 res1 = repo.debugwireargs(*vals, **args)
2778 2778 res2 = repo.debugwireargs(*vals, **args)
2779 2779 ui.write("%s\n" % res1)
2780 2780 if res1 != res2:
2781 2781 ui.warn("%s\n" % res2)
2782 2782
2783 2783 def _parsewirelangblocks(fh):
2784 2784 activeaction = None
2785 2785 blocklines = []
2786 2786
2787 2787 for line in fh:
2788 2788 line = line.rstrip()
2789 2789 if not line:
2790 2790 continue
2791 2791
2792 2792 if line.startswith(b'#'):
2793 2793 continue
2794 2794
2795 2795 if not line.startswith(b' '):
2796 2796 # New block. Flush previous one.
2797 2797 if activeaction:
2798 2798 yield activeaction, blocklines
2799 2799
2800 2800 activeaction = line
2801 2801 blocklines = []
2802 2802 continue
2803 2803
2804 2804 # Else we start with an indent.
2805 2805
2806 2806 if not activeaction:
2807 2807 raise error.Abort(_('indented line outside of block'))
2808 2808
2809 2809 blocklines.append(line)
2810 2810
2811 2811 # Flush last block.
2812 2812 if activeaction:
2813 2813 yield activeaction, blocklines
2814 2814
2815 2815 @command('debugwireproto',
2816 2816 [
2817 2817 ('', 'localssh', False, _('start an SSH server for this repo')),
2818 2818 ('', 'peer', '', _('construct a specific version of the peer')),
2819 2819 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2820 2820 ('', 'nologhandshake', False,
2821 2821 _('do not log I/O related to the peer handshake')),
2822 2822 ] + cmdutil.remoteopts,
2823 2823 _('[PATH]'),
2824 2824 optionalrepo=True)
2825 2825 def debugwireproto(ui, repo, path=None, **opts):
2826 2826 """send wire protocol commands to a server
2827 2827
2828 2828 This command can be used to issue wire protocol commands to remote
2829 2829 peers and to debug the raw data being exchanged.
2830 2830
2831 2831 ``--localssh`` will start an SSH server against the current repository
2832 2832 and connect to that. By default, the connection will perform a handshake
2833 2833 and establish an appropriate peer instance.
2834 2834
2835 2835 ``--peer`` can be used to bypass the handshake protocol and construct a
2836 2836 peer instance using the specified class type. Valid values are ``raw``,
2837 2837 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2838 2838 raw data payloads and don't support higher-level command actions.
2839 2839
2840 2840 ``--noreadstderr`` can be used to disable automatic reading from stderr
2841 2841 of the peer (for SSH connections only). Disabling automatic reading of
2842 2842 stderr is useful for making output more deterministic.
2843 2843
2844 2844 Commands are issued via a mini language which is specified via stdin.
2845 2845 The language consists of individual actions to perform. An action is
2846 2846 defined by a block. A block is defined as a line with no leading
2847 2847 space followed by 0 or more lines with leading space. Blocks are
2848 2848 effectively a high-level command with additional metadata.
2849 2849
2850 2850 Lines beginning with ``#`` are ignored.
2851 2851
2852 2852 The following sections denote available actions.
2853 2853
2854 2854 raw
2855 2855 ---
2856 2856
2857 2857 Send raw data to the server.
2858 2858
2859 2859 The block payload contains the raw data to send as one atomic send
2860 2860 operation. The data may not actually be delivered in a single system
2861 2861 call: it depends on the abilities of the transport being used.
2862 2862
2863 2863 Each line in the block is de-indented and concatenated. Then, that
2864 2864 value is evaluated as a Python b'' literal. This allows the use of
2865 2865 backslash escaping, etc.
2866 2866
2867 2867 raw+
2868 2868 ----
2869 2869
2870 2870 Behaves like ``raw`` except flushes output afterwards.
2871 2871
2872 2872 command <X>
2873 2873 -----------
2874 2874
2875 2875 Send a request to run a named command, whose name follows the ``command``
2876 2876 string.
2877 2877
2878 2878 Arguments to the command are defined as lines in this block. The format of
2879 2879 each line is ``<key> <value>``. e.g.::
2880 2880
2881 2881 command listkeys
2882 2882 namespace bookmarks
2883 2883
2884 2884 If the value begins with ``eval:``, it will be interpreted as a Python
2885 2885 literal expression. Otherwise values are interpreted as Python b'' literals.
2886 2886 This allows sending complex types and encoding special byte sequences via
2887 2887 backslash escaping.
2888 2888
2889 2889 The following arguments have special meaning:
2890 2890
2891 2891 ``PUSHFILE``
2892 2892 When defined, the *push* mechanism of the peer will be used instead
2893 2893 of the static request-response mechanism and the content of the
2894 2894 file specified in the value of this argument will be sent as the
2895 2895 command payload.
2896 2896
2897 2897 This can be used to submit a local bundle file to the remote.
2898 2898
2899 2899 batchbegin
2900 2900 ----------
2901 2901
2902 2902 Instruct the peer to begin a batched send.
2903 2903
2904 2904 All ``command`` blocks are queued for execution until the next
2905 2905 ``batchsubmit`` block.
2906 2906
2907 2907 batchsubmit
2908 2908 -----------
2909 2909
2910 2910 Submit previously queued ``command`` blocks as a batch request.
2911 2911
2912 2912 This action MUST be paired with a ``batchbegin`` action.
2913 2913
2914 2914 httprequest <method> <path>
2915 2915 ---------------------------
2916 2916
2917 2917 (HTTP peer only)
2918 2918
2919 2919 Send an HTTP request to the peer.
2920 2920
2921 2921 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2922 2922
2923 2923 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2924 2924 headers to add to the request. e.g. ``Accept: foo``.
2925 2925
2926 2926 The following arguments are special:
2927 2927
2928 2928 ``BODYFILE``
2929 2929 The content of the file defined as the value to this argument will be
2930 2930 transferred verbatim as the HTTP request body.
2931 2931
2932 2932 ``frame <type> <flags> <payload>``
2933 2933 Send a unified protocol frame as part of the request body.
2934 2934
2935 2935 All frames will be collected and sent as the body to the HTTP
2936 2936 request.
2937 2937
2938 2938 close
2939 2939 -----
2940 2940
2941 2941 Close the connection to the server.
2942 2942
2943 2943 flush
2944 2944 -----
2945 2945
2946 2946 Flush data written to the server.
2947 2947
2948 2948 readavailable
2949 2949 -------------
2950 2950
2951 2951 Close the write end of the connection and read all available data from
2952 2952 the server.
2953 2953
2954 2954 If the connection to the server encompasses multiple pipes, we poll both
2955 2955 pipes and read available data.
2956 2956
2957 2957 readline
2958 2958 --------
2959 2959
2960 2960 Read a line of output from the server. If there are multiple output
2961 2961 pipes, reads only the main pipe.
2962 2962
2963 2963 ereadline
2964 2964 ---------
2965 2965
2966 2966 Like ``readline``, but read from the stderr pipe, if available.
2967 2967
2968 2968 read <X>
2969 2969 --------
2970 2970
2971 2971 ``read()`` N bytes from the server's main output pipe.
2972 2972
2973 2973 eread <X>
2974 2974 ---------
2975 2975
2976 2976 ``read()`` N bytes from the server's stderr pipe, if available.
2977 2977
2978 2978 Specifying Unified Frame-Based Protocol Frames
2979 2979 ----------------------------------------------
2980 2980
2981 2981 It is possible to emit a *Unified Frame-Based Protocol* by using special
2982 2982 syntax.
2983 2983
2984 2984 A frame is composed as a type, flags, and payload. These can be parsed
2985 2985 from a string of the form:
2986 2986
2987 2987 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2988 2988
2989 2989 ``request-id`` and ``stream-id`` are integers defining the request and
2990 2990 stream identifiers.
2991 2991
2992 2992 ``type`` can be an integer value for the frame type or the string name
2993 2993 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2994 2994 ``command-name``.
2995 2995
2996 2996 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2997 2997 components. Each component (and there can be just one) can be an integer
2998 2998 or a flag name for stream flags or frame flags, respectively. Values are
2999 2999 resolved to integers and then bitwise OR'd together.
3000 3000
3001 3001 ``payload`` represents the raw frame payload. If it begins with
3002 3002 ``cbor:``, the following string is evaluated as Python code and the
3003 3003 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3004 3004 as a Python byte string literal.
3005 3005 """
3006 3006 opts = pycompat.byteskwargs(opts)
3007 3007
3008 3008 if opts['localssh'] and not repo:
3009 3009 raise error.Abort(_('--localssh requires a repository'))
3010 3010
3011 3011 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3012 3012 raise error.Abort(_('invalid value for --peer'),
3013 3013 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3014 3014
3015 3015 if path and opts['localssh']:
3016 3016 raise error.Abort(_('cannot specify --localssh with an explicit '
3017 3017 'path'))
3018 3018
3019 3019 if ui.interactive():
3020 3020 ui.write(_('(waiting for commands on stdin)\n'))
3021 3021
3022 3022 blocks = list(_parsewirelangblocks(ui.fin))
3023 3023
3024 3024 proc = None
3025 3025 stdin = None
3026 3026 stdout = None
3027 3027 stderr = None
3028 3028 opener = None
3029 3029
3030 3030 if opts['localssh']:
3031 3031 # We start the SSH server in its own process so there is process
3032 3032 # separation. This prevents a whole class of potential bugs around
3033 3033 # shared state from interfering with server operation.
3034 3034 args = procutil.hgcmd() + [
3035 3035 '-R', repo.root,
3036 3036 'debugserve', '--sshstdio',
3037 3037 ]
3038 3038 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
3039 3039 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3040 3040 bufsize=0)
3041 3041
3042 3042 stdin = proc.stdin
3043 3043 stdout = proc.stdout
3044 3044 stderr = proc.stderr
3045 3045
3046 3046 # We turn the pipes into observers so we can log I/O.
3047 3047 if ui.verbose or opts['peer'] == 'raw':
3048 3048 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3049 3049 logdata=True)
3050 3050 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3051 3051 logdata=True)
3052 3052 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3053 3053 logdata=True)
3054 3054
3055 3055 # --localssh also implies the peer connection settings.
3056 3056
3057 3057 url = 'ssh://localserver'
3058 3058 autoreadstderr = not opts['noreadstderr']
3059 3059
3060 3060 if opts['peer'] == 'ssh1':
3061 3061 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3062 3062 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3063 3063 None, autoreadstderr=autoreadstderr)
3064 3064 elif opts['peer'] == 'ssh2':
3065 3065 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3066 3066 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3067 3067 None, autoreadstderr=autoreadstderr)
3068 3068 elif opts['peer'] == 'raw':
3069 3069 ui.write(_('using raw connection to peer\n'))
3070 3070 peer = None
3071 3071 else:
3072 3072 ui.write(_('creating ssh peer from handshake results\n'))
3073 3073 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3074 3074 autoreadstderr=autoreadstderr)
3075 3075
3076 3076 elif path:
3077 3077 # We bypass hg.peer() so we can proxy the sockets.
3078 3078 # TODO consider not doing this because we skip
3079 3079 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3080 3080 u = util.url(path)
3081 3081 if u.scheme != 'http':
3082 3082 raise error.Abort(_('only http:// paths are currently supported'))
3083 3083
3084 3084 url, authinfo = u.authinfo()
3085 3085 openerargs = {
3086 3086 r'useragent': b'Mercurial debugwireproto',
3087 3087 }
3088 3088
3089 3089 # Turn pipes/sockets into observers so we can log I/O.
3090 3090 if ui.verbose:
3091 3091 openerargs.update({
3092 3092 r'loggingfh': ui,
3093 3093 r'loggingname': b's',
3094 3094 r'loggingopts': {
3095 3095 r'logdata': True,
3096 3096 r'logdataapis': False,
3097 3097 },
3098 3098 })
3099 3099
3100 3100 if ui.debugflag:
3101 3101 openerargs[r'loggingopts'][r'logdataapis'] = True
3102 3102
3103 3103 # Don't send default headers when in raw mode. This allows us to
3104 3104 # bypass most of the behavior of our URL handling code so we can
3105 3105 # have near complete control over what's sent on the wire.
3106 3106 if opts['peer'] == 'raw':
3107 3107 openerargs[r'sendaccept'] = False
3108 3108
3109 3109 opener = urlmod.opener(ui, authinfo, **openerargs)
3110 3110
3111 3111 if opts['peer'] == 'http2':
3112 3112 ui.write(_('creating http peer for wire protocol version 2\n'))
3113 3113 # We go through makepeer() because we need an API descriptor for
3114 3114 # the peer instance to be useful.
3115 3115 with ui.configoverride({
3116 3116 ('experimental', 'httppeer.advertise-v2'): True}):
3117 3117 if opts['nologhandshake']:
3118 3118 ui.pushbuffer()
3119 3119
3120 3120 peer = httppeer.makepeer(ui, path, opener=opener)
3121 3121
3122 3122 if opts['nologhandshake']:
3123 3123 ui.popbuffer()
3124 3124
3125 3125 if not isinstance(peer, httppeer.httpv2peer):
3126 3126 raise error.Abort(_('could not instantiate HTTP peer for '
3127 3127 'wire protocol version 2'),
3128 3128 hint=_('the server may not have the feature '
3129 3129 'enabled or is not allowing this '
3130 3130 'client version'))
3131 3131
3132 3132 elif opts['peer'] == 'raw':
3133 3133 ui.write(_('using raw connection to peer\n'))
3134 3134 peer = None
3135 3135 elif opts['peer']:
3136 3136 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3137 3137 opts['peer'])
3138 3138 else:
3139 3139 peer = httppeer.makepeer(ui, path, opener=opener)
3140 3140
3141 3141 # We /could/ populate stdin/stdout with sock.makefile()...
3142 3142 else:
3143 3143 raise error.Abort(_('unsupported connection configuration'))
3144 3144
3145 3145 batchedcommands = None
3146 3146
3147 3147 # Now perform actions based on the parsed wire language instructions.
3148 3148 for action, lines in blocks:
3149 3149 if action in ('raw', 'raw+'):
3150 3150 if not stdin:
3151 3151 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3152 3152
3153 3153 # Concatenate the data together.
3154 3154 data = ''.join(l.lstrip() for l in lines)
3155 3155 data = stringutil.unescapestr(data)
3156 3156 stdin.write(data)
3157 3157
3158 3158 if action == 'raw+':
3159 3159 stdin.flush()
3160 3160 elif action == 'flush':
3161 3161 if not stdin:
3162 3162 raise error.Abort(_('cannot call flush on this peer'))
3163 3163 stdin.flush()
3164 3164 elif action.startswith('command'):
3165 3165 if not peer:
3166 3166 raise error.Abort(_('cannot send commands unless peer instance '
3167 3167 'is available'))
3168 3168
3169 3169 command = action.split(' ', 1)[1]
3170 3170
3171 3171 args = {}
3172 3172 for line in lines:
3173 3173 # We need to allow empty values.
3174 3174 fields = line.lstrip().split(' ', 1)
3175 3175 if len(fields) == 1:
3176 3176 key = fields[0]
3177 3177 value = ''
3178 3178 else:
3179 3179 key, value = fields
3180 3180
3181 3181 if value.startswith('eval:'):
3182 3182 value = stringutil.evalpythonliteral(value[5:])
3183 3183 else:
3184 3184 value = stringutil.unescapestr(value)
3185 3185
3186 3186 args[key] = value
3187 3187
3188 3188 if batchedcommands is not None:
3189 3189 batchedcommands.append((command, args))
3190 3190 continue
3191 3191
3192 3192 ui.status(_('sending %s command\n') % command)
3193 3193
3194 3194 if 'PUSHFILE' in args:
3195 3195 with open(args['PUSHFILE'], r'rb') as fh:
3196 3196 del args['PUSHFILE']
3197 3197 res, output = peer._callpush(command, fh,
3198 3198 **pycompat.strkwargs(args))
3199 3199 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3200 3200 ui.status(_('remote output: %s\n') %
3201 3201 stringutil.escapestr(output))
3202 3202 else:
3203 3203 with peer.commandexecutor() as e:
3204 3204 res = e.callcommand(command, args).result()
3205 3205
3206 3206 if isinstance(res, wireprotov2peer.commandresponse):
3207 3207 val = list(res.cborobjects())
3208 3208 ui.status(_('response: %s\n') %
3209 3209 stringutil.pprint(val, bprefix=True))
3210 3210
3211 3211 else:
3212 3212 ui.status(_('response: %s\n') %
3213 3213 stringutil.pprint(res, bprefix=True))
3214 3214
3215 3215 elif action == 'batchbegin':
3216 3216 if batchedcommands is not None:
3217 3217 raise error.Abort(_('nested batchbegin not allowed'))
3218 3218
3219 3219 batchedcommands = []
3220 3220 elif action == 'batchsubmit':
3221 3221 # There is a batching API we could go through. But it would be
3222 3222 # difficult to normalize requests into function calls. It is easier
3223 3223 # to bypass this layer and normalize to commands + args.
3224 3224 ui.status(_('sending batch with %d sub-commands\n') %
3225 3225 len(batchedcommands))
3226 3226 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3227 3227 ui.status(_('response #%d: %s\n') %
3228 3228 (i, stringutil.escapestr(chunk)))
3229 3229
3230 3230 batchedcommands = None
3231 3231
3232 3232 elif action.startswith('httprequest '):
3233 3233 if not opener:
3234 3234 raise error.Abort(_('cannot use httprequest without an HTTP '
3235 3235 'peer'))
3236 3236
3237 3237 request = action.split(' ', 2)
3238 3238 if len(request) != 3:
3239 3239 raise error.Abort(_('invalid httprequest: expected format is '
3240 3240 '"httprequest <method> <path>'))
3241 3241
3242 3242 method, httppath = request[1:]
3243 3243 headers = {}
3244 3244 body = None
3245 3245 frames = []
3246 3246 for line in lines:
3247 3247 line = line.lstrip()
3248 3248 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3249 3249 if m:
3250 3250 headers[m.group(1)] = m.group(2)
3251 3251 continue
3252 3252
3253 3253 if line.startswith(b'BODYFILE '):
3254 3254 with open(line.split(b' ', 1), 'rb') as fh:
3255 3255 body = fh.read()
3256 3256 elif line.startswith(b'frame '):
3257 3257 frame = wireprotoframing.makeframefromhumanstring(
3258 3258 line[len(b'frame '):])
3259 3259
3260 3260 frames.append(frame)
3261 3261 else:
3262 3262 raise error.Abort(_('unknown argument to httprequest: %s') %
3263 3263 line)
3264 3264
3265 3265 url = path + httppath
3266 3266
3267 3267 if frames:
3268 3268 body = b''.join(bytes(f) for f in frames)
3269 3269
3270 3270 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3271 3271
3272 3272 # urllib.Request insists on using has_data() as a proxy for
3273 3273 # determining the request method. Override that to use our
3274 3274 # explicitly requested method.
3275 3275 req.get_method = lambda: pycompat.sysstr(method)
3276 3276
3277 3277 try:
3278 3278 res = opener.open(req)
3279 3279 body = res.read()
3280 3280 except util.urlerr.urlerror as e:
3281 3281 # read() method must be called, but only exists in Python 2
3282 3282 getattr(e, 'read', lambda: None)()
3283 3283 continue
3284 3284
3285 3285 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3286 3286 ui.write(_('cbor> %s\n') %
3287 3287 stringutil.pprint(cbor.loads(body), bprefix=True))
3288 3288
3289 3289 elif action == 'close':
3290 3290 peer.close()
3291 3291 elif action == 'readavailable':
3292 3292 if not stdout or not stderr:
3293 3293 raise error.Abort(_('readavailable not available on this peer'))
3294 3294
3295 3295 stdin.close()
3296 3296 stdout.read()
3297 3297 stderr.read()
3298 3298
3299 3299 elif action == 'readline':
3300 3300 if not stdout:
3301 3301 raise error.Abort(_('readline not available on this peer'))
3302 3302 stdout.readline()
3303 3303 elif action == 'ereadline':
3304 3304 if not stderr:
3305 3305 raise error.Abort(_('ereadline not available on this peer'))
3306 3306 stderr.readline()
3307 3307 elif action.startswith('read '):
3308 3308 count = int(action.split(' ', 1)[1])
3309 3309 if not stdout:
3310 3310 raise error.Abort(_('read not available on this peer'))
3311 3311 stdout.read(count)
3312 3312 elif action.startswith('eread '):
3313 3313 count = int(action.split(' ', 1)[1])
3314 3314 if not stderr:
3315 3315 raise error.Abort(_('eread not available on this peer'))
3316 3316 stderr.read(count)
3317 3317 else:
3318 3318 raise error.Abort(_('unknown action: %s') % action)
3319 3319
3320 3320 if batchedcommands is not None:
3321 3321 raise error.Abort(_('unclosed "batchbegin" request'))
3322 3322
3323 3323 if peer:
3324 3324 peer.close()
3325 3325
3326 3326 if proc:
3327 3327 proc.kill()
@@ -1,274 +1,284 b''
1 1 # setdiscovery.py - improved discovery of common nodeset for mercurial
2 2 #
3 3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8 """
9 9 Algorithm works in the following way. You have two repository: local and
10 10 remote. They both contains a DAG of changelists.
11 11
12 12 The goal of the discovery protocol is to find one set of node *common*,
13 13 the set of nodes shared by local and remote.
14 14
15 15 One of the issue with the original protocol was latency, it could
16 16 potentially require lots of roundtrips to discover that the local repo was a
17 17 subset of remote (which is a very common case, you usually have few changes
18 18 compared to upstream, while upstream probably had lots of development).
19 19
20 20 The new protocol only requires one interface for the remote repo: `known()`,
21 21 which given a set of changelists tells you if they are present in the DAG.
22 22
23 23 The algorithm then works as follow:
24 24
25 25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
26 26 all nodes are in `unknown`.
27 27 - Take a sample from `unknown`, call `remote.known(sample)`
28 28 - For each node that remote knows, move it and all its ancestors to `common`
29 29 - For each node that remote doesn't know, move it and all its descendants
30 30 to `missing`
31 31 - Iterate until `unknown` is empty
32 32
33 33 There are a couple optimizations, first is instead of starting with a random
34 34 sample of missing, start by sending all heads, in the case where the local
35 35 repo is a subset, you computed the answer in one round trip.
36 36
37 37 Then you can do something similar to the bisecting strategy used when
38 38 finding faulty changesets. Instead of random samples, you can try picking
39 39 nodes that will maximize the number of nodes that will be
40 40 classified with it (since all ancestors or descendants will be marked as well).
41 41 """
42 42
43 43 from __future__ import absolute_import
44 44
45 45 import collections
46 46 import random
47 47
48 48 from .i18n import _
49 49 from .node import (
50 50 nullid,
51 51 nullrev,
52 52 )
53 53 from . import (
54 54 dagutil,
55 55 error,
56 56 util,
57 57 )
58 58
59 59 def _updatesample(dag, nodes, sample, quicksamplesize=0):
60 60 """update an existing sample to match the expected size
61 61
62 62 The sample is updated with nodes exponentially distant from each head of the
63 63 <nodes> set. (H~1, H~2, H~4, H~8, etc).
64 64
65 65 If a target size is specified, the sampling will stop once this size is
66 66 reached. Otherwise sampling will happen until roots of the <nodes> set are
67 67 reached.
68 68
69 69 :dag: a dag object from dagutil
70 70 :nodes: set of nodes we want to discover (if None, assume the whole dag)
71 71 :sample: a sample to update
72 72 :quicksamplesize: optional target size of the sample"""
73 73 # if nodes is empty we scan the entire graph
74 74 if nodes:
75 75 heads = dag.headsetofconnecteds(nodes)
76 76 else:
77 77 heads = dag.heads()
78 78 dist = {}
79 79 visit = collections.deque(heads)
80 80 seen = set()
81 81 factor = 1
82 82 while visit:
83 83 curr = visit.popleft()
84 84 if curr in seen:
85 85 continue
86 86 d = dist.setdefault(curr, 1)
87 87 if d > factor:
88 88 factor *= 2
89 89 if d == factor:
90 90 sample.add(curr)
91 91 if quicksamplesize and (len(sample) >= quicksamplesize):
92 92 return
93 93 seen.add(curr)
94 94 for p in dag.parents(curr):
95 95 if not nodes or p in nodes:
96 96 dist.setdefault(p, d + 1)
97 97 visit.append(p)
98 98
99 99 def _takequicksample(dag, nodes, size):
100 100 """takes a quick sample of size <size>
101 101
102 102 It is meant for initial sampling and focuses on querying heads and close
103 103 ancestors of heads.
104 104
105 105 :dag: a dag object
106 106 :nodes: set of nodes to discover
107 107 :size: the maximum size of the sample"""
108 108 sample = dag.headsetofconnecteds(nodes)
109 109 if len(sample) >= size:
110 110 return _limitsample(sample, size)
111 111 _updatesample(dag, None, sample, quicksamplesize=size)
112 112 return sample
113 113
114 114 def _takefullsample(dag, nodes, size):
115 115 sample = dag.headsetofconnecteds(nodes)
116 116 # update from heads
117 117 _updatesample(dag, nodes, sample)
118 118 # update from roots
119 119 _updatesample(dag.inverse(), nodes, sample)
120 120 assert sample
121 121 sample = _limitsample(sample, size)
122 122 if len(sample) < size:
123 123 more = size - len(sample)
124 124 sample.update(random.sample(list(nodes - sample), more))
125 125 return sample
126 126
127 127 def _limitsample(sample, desiredlen):
128 128 """return a random subset of sample of at most desiredlen item"""
129 129 if len(sample) > desiredlen:
130 130 sample = set(random.sample(sample, desiredlen))
131 131 return sample
132 132
133 133 def findcommonheads(ui, local, remote,
134 134 initialsamplesize=100,
135 135 fullsamplesize=200,
136 136 abortwhenunrelated=True,
137 137 ancestorsof=None):
138 138 '''Return a tuple (common, anyincoming, remoteheads) used to identify
139 139 missing nodes from or in remote.
140 140 '''
141 141 start = util.timer()
142 142
143 143 roundtrips = 0
144 144 cl = local.changelog
145 145 clnode = cl.node
146 clrev = cl.rev
146 147 localsubset = None
147 148
148 149 if ancestorsof is not None:
149 rev = local.changelog.rev
150 localsubset = [rev(n) for n in ancestorsof]
150 localsubset = [clrev(n) for n in ancestorsof]
151 151 dag = dagutil.revlogdag(cl, localsubset=localsubset)
152 152
153 153 # early exit if we know all the specified remote heads already
154 154 ui.debug("query 1; heads\n")
155 155 roundtrips += 1
156 156 ownheads = dag.heads()
157 157 sample = _limitsample(ownheads, initialsamplesize)
158 158 # indices between sample and externalized version must match
159 159 sample = list(sample)
160 160
161 161 with remote.commandexecutor() as e:
162 162 fheads = e.callcommand('heads', {})
163 163 fknown = e.callcommand('known', {
164 164 'nodes': [clnode(r) for r in sample],
165 165 })
166 166
167 167 srvheadhashes, yesno = fheads.result(), fknown.result()
168 168
169 169 if cl.tip() == nullid:
170 170 if srvheadhashes != [nullid]:
171 171 return [nullid], True, srvheadhashes
172 172 return [nullid], False, []
173 173
174 174 # start actual discovery (we note this before the next "if" for
175 175 # compatibility reasons)
176 176 ui.status(_("searching for changes\n"))
177 177
178 srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
178 srvheads = []
179 for node in srvheadhashes:
180 if node == nullid:
181 continue
182
183 try:
184 srvheads.append(clrev(node))
185 # Catches unknown and filtered nodes.
186 except error.LookupError:
187 continue
188
179 189 if len(srvheads) == len(srvheadhashes):
180 190 ui.debug("all remote heads known locally\n")
181 191 return srvheadhashes, False, srvheadhashes
182 192
183 193 if len(sample) == len(ownheads) and all(yesno):
184 194 ui.note(_("all local heads known remotely\n"))
185 195 ownheadhashes = [clnode(r) for r in ownheads]
186 196 return ownheadhashes, True, srvheadhashes
187 197
188 198 # full blown discovery
189 199
190 200 # own nodes I know we both know
191 201 # treat remote heads (and maybe own heads) as a first implicit sample
192 202 # response
193 203 common = cl.incrementalmissingrevs(srvheads)
194 204 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
195 205 common.addbases(commoninsample)
196 206 # own nodes where I don't know if remote knows them
197 207 undecided = set(common.missingancestors(ownheads))
198 208 # own nodes I know remote lacks
199 209 missing = set()
200 210
201 211 full = False
202 212 progress = ui.makeprogress(_('searching'), unit=_('queries'))
203 213 while undecided:
204 214
205 215 if sample:
206 216 missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
207 217 missing.update(dag.descendantset(missinginsample, missing))
208 218
209 219 undecided.difference_update(missing)
210 220
211 221 if not undecided:
212 222 break
213 223
214 224 if full or common.hasbases():
215 225 if full:
216 226 ui.note(_("sampling from both directions\n"))
217 227 else:
218 228 ui.debug("taking initial sample\n")
219 229 samplefunc = _takefullsample
220 230 targetsize = fullsamplesize
221 231 else:
222 232 # use even cheaper initial sample
223 233 ui.debug("taking quick initial sample\n")
224 234 samplefunc = _takequicksample
225 235 targetsize = initialsamplesize
226 236 if len(undecided) < targetsize:
227 237 sample = list(undecided)
228 238 else:
229 239 sample = samplefunc(dag, undecided, targetsize)
230 240
231 241 roundtrips += 1
232 242 progress.update(roundtrips)
233 243 ui.debug("query %i; still undecided: %i, sample size is: %i\n"
234 244 % (roundtrips, len(undecided), len(sample)))
235 245 # indices between sample and externalized version must match
236 246 sample = list(sample)
237 247
238 248 with remote.commandexecutor() as e:
239 249 yesno = e.callcommand('known', {
240 250 'nodes': [clnode(r) for r in sample],
241 251 }).result()
242 252
243 253 full = True
244 254
245 255 if sample:
246 256 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
247 257 common.addbases(commoninsample)
248 258 common.removeancestorsfrom(undecided)
249 259
250 260 # heads(common) == heads(common.bases) since common represents common.bases
251 261 # and all its ancestors
252 262 result = dag.headsetofconnecteds(common.bases)
253 263 # common.bases can include nullrev, but our contract requires us to not
254 264 # return any heads in that case, so discard that
255 265 result.discard(nullrev)
256 266 elapsed = util.timer() - start
257 267 progress.complete()
258 268 ui.debug("%d total queries in %.4fs\n" % (roundtrips, elapsed))
259 269 msg = ('found %d common and %d unknown server heads,'
260 270 ' %d roundtrips in %.4fs\n')
261 271 missing = set(result) - set(srvheads)
262 272 ui.log('discovery', msg, len(result), len(missing), roundtrips,
263 273 elapsed)
264 274
265 275 if not result and srvheadhashes != [nullid]:
266 276 if abortwhenunrelated:
267 277 raise error.Abort(_("repository is unrelated"))
268 278 else:
269 279 ui.warn(_("warning: repository is unrelated\n"))
270 280 return ({nullid}, True, srvheadhashes,)
271 281
272 282 anyincoming = (srvheadhashes != [nullid])
273 283 result = {clnode(r) for r in result}
274 284 return result, anyincoming, srvheadhashes
General Comments 0
You need to be logged in to leave comments. Login now