##// END OF EJS Templates
debugdiscovery: drop reference to non-existent --serverlog option...
Martin von Zweigbergk -
r35419:c73d23cb default
parent child Browse files
Show More
@@ -1,2460 +1,2444
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import socket
18 18 import ssl
19 19 import string
20 20 import sys
21 21 import tempfile
22 22 import time
23 23
24 24 from .i18n import _
25 25 from .node import (
26 26 bin,
27 27 hex,
28 28 nullhex,
29 29 nullid,
30 30 nullrev,
31 31 short,
32 32 )
33 33 from . import (
34 34 bundle2,
35 35 changegroup,
36 36 cmdutil,
37 37 color,
38 38 context,
39 39 dagparser,
40 40 dagutil,
41 41 encoding,
42 42 error,
43 43 exchange,
44 44 extensions,
45 45 filemerge,
46 46 fileset,
47 47 formatter,
48 48 hg,
49 49 localrepo,
50 50 lock as lockmod,
51 51 merge as mergemod,
52 52 obsolete,
53 53 obsutil,
54 54 phases,
55 55 policy,
56 56 pvec,
57 57 pycompat,
58 58 registrar,
59 59 repair,
60 60 revlog,
61 61 revset,
62 62 revsetlang,
63 63 scmutil,
64 64 setdiscovery,
65 65 simplemerge,
66 66 smartset,
67 67 sslutil,
68 68 streamclone,
69 69 templater,
70 70 treediscovery,
71 71 upgrade,
72 72 util,
73 73 vfs as vfsmod,
74 74 )
75 75
76 76 release = lockmod.release
77 77
78 78 command = registrar.command()
79 79
80 80 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
81 81 def debugancestor(ui, repo, *args):
82 82 """find the ancestor revision of two revisions in a given index"""
83 83 if len(args) == 3:
84 84 index, rev1, rev2 = args
85 85 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
86 86 lookup = r.lookup
87 87 elif len(args) == 2:
88 88 if not repo:
89 89 raise error.Abort(_('there is no Mercurial repository here '
90 90 '(.hg not found)'))
91 91 rev1, rev2 = args
92 92 r = repo.changelog
93 93 lookup = repo.lookup
94 94 else:
95 95 raise error.Abort(_('either two or three arguments required'))
96 96 a = r.ancestor(lookup(rev1), lookup(rev2))
97 97 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
98 98
99 99 @command('debugapplystreamclonebundle', [], 'FILE')
100 100 def debugapplystreamclonebundle(ui, repo, fname):
101 101 """apply a stream clone bundle file"""
102 102 f = hg.openpath(ui, fname)
103 103 gen = exchange.readbundle(ui, f, fname)
104 104 gen.apply(repo)
105 105
106 106 @command('debugbuilddag',
107 107 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
108 108 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
109 109 ('n', 'new-file', None, _('add new file at each rev'))],
110 110 _('[OPTION]... [TEXT]'))
111 111 def debugbuilddag(ui, repo, text=None,
112 112 mergeable_file=False,
113 113 overwritten_file=False,
114 114 new_file=False):
115 115 """builds a repo with a given DAG from scratch in the current empty repo
116 116
117 117 The description of the DAG is read from stdin if not given on the
118 118 command line.
119 119
120 120 Elements:
121 121
122 122 - "+n" is a linear run of n nodes based on the current default parent
123 123 - "." is a single node based on the current default parent
124 124 - "$" resets the default parent to null (implied at the start);
125 125 otherwise the default parent is always the last node created
126 126 - "<p" sets the default parent to the backref p
127 127 - "*p" is a fork at parent p, which is a backref
128 128 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
129 129 - "/p2" is a merge of the preceding node and p2
130 130 - ":tag" defines a local tag for the preceding node
131 131 - "@branch" sets the named branch for subsequent nodes
132 132 - "#...\\n" is a comment up to the end of the line
133 133
134 134 Whitespace between the above elements is ignored.
135 135
136 136 A backref is either
137 137
138 138 - a number n, which references the node curr-n, where curr is the current
139 139 node, or
140 140 - the name of a local tag you placed earlier using ":tag", or
141 141 - empty to denote the default parent.
142 142
143 143 All string valued-elements are either strictly alphanumeric, or must
144 144 be enclosed in double quotes ("..."), with "\\" as escape character.
145 145 """
146 146
147 147 if text is None:
148 148 ui.status(_("reading DAG from stdin\n"))
149 149 text = ui.fin.read()
150 150
151 151 cl = repo.changelog
152 152 if len(cl) > 0:
153 153 raise error.Abort(_('repository is not empty'))
154 154
155 155 # determine number of revs in DAG
156 156 total = 0
157 157 for type, data in dagparser.parsedag(text):
158 158 if type == 'n':
159 159 total += 1
160 160
161 161 if mergeable_file:
162 162 linesperrev = 2
163 163 # make a file with k lines per rev
164 164 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
165 165 initialmergedlines.append("")
166 166
167 167 tags = []
168 168
169 169 wlock = lock = tr = None
170 170 try:
171 171 wlock = repo.wlock()
172 172 lock = repo.lock()
173 173 tr = repo.transaction("builddag")
174 174
175 175 at = -1
176 176 atbranch = 'default'
177 177 nodeids = []
178 178 id = 0
179 179 ui.progress(_('building'), id, unit=_('revisions'), total=total)
180 180 for type, data in dagparser.parsedag(text):
181 181 if type == 'n':
182 182 ui.note(('node %s\n' % str(data)))
183 183 id, ps = data
184 184
185 185 files = []
186 186 filecontent = {}
187 187
188 188 p2 = None
189 189 if mergeable_file:
190 190 fn = "mf"
191 191 p1 = repo[ps[0]]
192 192 if len(ps) > 1:
193 193 p2 = repo[ps[1]]
194 194 pa = p1.ancestor(p2)
195 195 base, local, other = [x[fn].data() for x in (pa, p1,
196 196 p2)]
197 197 m3 = simplemerge.Merge3Text(base, local, other)
198 198 ml = [l.strip() for l in m3.merge_lines()]
199 199 ml.append("")
200 200 elif at > 0:
201 201 ml = p1[fn].data().split("\n")
202 202 else:
203 203 ml = initialmergedlines
204 204 ml[id * linesperrev] += " r%i" % id
205 205 mergedtext = "\n".join(ml)
206 206 files.append(fn)
207 207 filecontent[fn] = mergedtext
208 208
209 209 if overwritten_file:
210 210 fn = "of"
211 211 files.append(fn)
212 212 filecontent[fn] = "r%i\n" % id
213 213
214 214 if new_file:
215 215 fn = "nf%i" % id
216 216 files.append(fn)
217 217 filecontent[fn] = "r%i\n" % id
218 218 if len(ps) > 1:
219 219 if not p2:
220 220 p2 = repo[ps[1]]
221 221 for fn in p2:
222 222 if fn.startswith("nf"):
223 223 files.append(fn)
224 224 filecontent[fn] = p2[fn].data()
225 225
226 226 def fctxfn(repo, cx, path):
227 227 if path in filecontent:
228 228 return context.memfilectx(repo, cx, path,
229 229 filecontent[path])
230 230 return None
231 231
232 232 if len(ps) == 0 or ps[0] < 0:
233 233 pars = [None, None]
234 234 elif len(ps) == 1:
235 235 pars = [nodeids[ps[0]], None]
236 236 else:
237 237 pars = [nodeids[p] for p in ps]
238 238 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
239 239 date=(id, 0),
240 240 user="debugbuilddag",
241 241 extra={'branch': atbranch})
242 242 nodeid = repo.commitctx(cx)
243 243 nodeids.append(nodeid)
244 244 at = id
245 245 elif type == 'l':
246 246 id, name = data
247 247 ui.note(('tag %s\n' % name))
248 248 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
249 249 elif type == 'a':
250 250 ui.note(('branch %s\n' % data))
251 251 atbranch = data
252 252 ui.progress(_('building'), id, unit=_('revisions'), total=total)
253 253 tr.close()
254 254
255 255 if tags:
256 256 repo.vfs.write("localtags", "".join(tags))
257 257 finally:
258 258 ui.progress(_('building'), None)
259 259 release(tr, lock, wlock)
260 260
261 261 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
262 262 indent_string = ' ' * indent
263 263 if all:
264 264 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
265 265 % indent_string)
266 266
267 267 def showchunks(named):
268 268 ui.write("\n%s%s\n" % (indent_string, named))
269 269 for deltadata in gen.deltaiter():
270 270 node, p1, p2, cs, deltabase, delta, flags = deltadata
271 271 ui.write("%s%s %s %s %s %s %s\n" %
272 272 (indent_string, hex(node), hex(p1), hex(p2),
273 273 hex(cs), hex(deltabase), len(delta)))
274 274
275 275 chunkdata = gen.changelogheader()
276 276 showchunks("changelog")
277 277 chunkdata = gen.manifestheader()
278 278 showchunks("manifest")
279 279 for chunkdata in iter(gen.filelogheader, {}):
280 280 fname = chunkdata['filename']
281 281 showchunks(fname)
282 282 else:
283 283 if isinstance(gen, bundle2.unbundle20):
284 284 raise error.Abort(_('use debugbundle2 for this file'))
285 285 chunkdata = gen.changelogheader()
286 286 for deltadata in gen.deltaiter():
287 287 node, p1, p2, cs, deltabase, delta, flags = deltadata
288 288 ui.write("%s%s\n" % (indent_string, hex(node)))
289 289
290 290 def _debugobsmarkers(ui, part, indent=0, **opts):
291 291 """display version and markers contained in 'data'"""
292 292 opts = pycompat.byteskwargs(opts)
293 293 data = part.read()
294 294 indent_string = ' ' * indent
295 295 try:
296 296 version, markers = obsolete._readmarkers(data)
297 297 except error.UnknownVersion as exc:
298 298 msg = "%sunsupported version: %s (%d bytes)\n"
299 299 msg %= indent_string, exc.version, len(data)
300 300 ui.write(msg)
301 301 else:
302 302 msg = "%sversion: %d (%d bytes)\n"
303 303 msg %= indent_string, version, len(data)
304 304 ui.write(msg)
305 305 fm = ui.formatter('debugobsolete', opts)
306 306 for rawmarker in sorted(markers):
307 307 m = obsutil.marker(None, rawmarker)
308 308 fm.startitem()
309 309 fm.plain(indent_string)
310 310 cmdutil.showmarker(fm, m)
311 311 fm.end()
312 312
313 313 def _debugphaseheads(ui, data, indent=0):
314 314 """display version and markers contained in 'data'"""
315 315 indent_string = ' ' * indent
316 316 headsbyphase = phases.binarydecode(data)
317 317 for phase in phases.allphases:
318 318 for head in headsbyphase[phase]:
319 319 ui.write(indent_string)
320 320 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
321 321
322 322 def _quasirepr(thing):
323 323 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
324 324 return '{%s}' % (
325 325 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
326 326 return pycompat.bytestr(repr(thing))
327 327
328 328 def _debugbundle2(ui, gen, all=None, **opts):
329 329 """lists the contents of a bundle2"""
330 330 if not isinstance(gen, bundle2.unbundle20):
331 331 raise error.Abort(_('not a bundle2 file'))
332 332 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
333 333 parttypes = opts.get(r'part_type', [])
334 334 for part in gen.iterparts():
335 335 if parttypes and part.type not in parttypes:
336 336 continue
337 337 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
338 338 if part.type == 'changegroup':
339 339 version = part.params.get('version', '01')
340 340 cg = changegroup.getunbundler(version, part, 'UN')
341 341 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
342 342 if part.type == 'obsmarkers':
343 343 _debugobsmarkers(ui, part, indent=4, **opts)
344 344 if part.type == 'phase-heads':
345 345 _debugphaseheads(ui, part, indent=4)
346 346
347 347 @command('debugbundle',
348 348 [('a', 'all', None, _('show all details')),
349 349 ('', 'part-type', [], _('show only the named part type')),
350 350 ('', 'spec', None, _('print the bundlespec of the bundle'))],
351 351 _('FILE'),
352 352 norepo=True)
353 353 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
354 354 """lists the contents of a bundle"""
355 355 with hg.openpath(ui, bundlepath) as f:
356 356 if spec:
357 357 spec = exchange.getbundlespec(ui, f)
358 358 ui.write('%s\n' % spec)
359 359 return
360 360
361 361 gen = exchange.readbundle(ui, f, bundlepath)
362 362 if isinstance(gen, bundle2.unbundle20):
363 363 return _debugbundle2(ui, gen, all=all, **opts)
364 364 _debugchangegroup(ui, gen, all=all, **opts)
365 365
366 366 @command('debugcapabilities',
367 367 [], _('PATH'),
368 368 norepo=True)
369 369 def debugcapabilities(ui, path, **opts):
370 370 """lists the capabilities of a remote peer"""
371 371 opts = pycompat.byteskwargs(opts)
372 372 peer = hg.peer(ui, opts, path)
373 373 caps = peer.capabilities()
374 374 ui.write(('Main capabilities:\n'))
375 375 for c in sorted(caps):
376 376 ui.write((' %s\n') % c)
377 377 b2caps = bundle2.bundle2caps(peer)
378 378 if b2caps:
379 379 ui.write(('Bundle2 capabilities:\n'))
380 380 for key, values in sorted(b2caps.iteritems()):
381 381 ui.write((' %s\n') % key)
382 382 for v in values:
383 383 ui.write((' %s\n') % v)
384 384
385 385 @command('debugcheckstate', [], '')
386 386 def debugcheckstate(ui, repo):
387 387 """validate the correctness of the current dirstate"""
388 388 parent1, parent2 = repo.dirstate.parents()
389 389 m1 = repo[parent1].manifest()
390 390 m2 = repo[parent2].manifest()
391 391 errors = 0
392 392 for f in repo.dirstate:
393 393 state = repo.dirstate[f]
394 394 if state in "nr" and f not in m1:
395 395 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
396 396 errors += 1
397 397 if state in "a" and f in m1:
398 398 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
399 399 errors += 1
400 400 if state in "m" and f not in m1 and f not in m2:
401 401 ui.warn(_("%s in state %s, but not in either manifest\n") %
402 402 (f, state))
403 403 errors += 1
404 404 for f in m1:
405 405 state = repo.dirstate[f]
406 406 if state not in "nrm":
407 407 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
408 408 errors += 1
409 409 if errors:
410 410 error = _(".hg/dirstate inconsistent with current parent's manifest")
411 411 raise error.Abort(error)
412 412
413 413 @command('debugcolor',
414 414 [('', 'style', None, _('show all configured styles'))],
415 415 'hg debugcolor')
416 416 def debugcolor(ui, repo, **opts):
417 417 """show available color, effects or style"""
418 418 ui.write(('color mode: %s\n') % ui._colormode)
419 419 if opts.get(r'style'):
420 420 return _debugdisplaystyle(ui)
421 421 else:
422 422 return _debugdisplaycolor(ui)
423 423
424 424 def _debugdisplaycolor(ui):
425 425 ui = ui.copy()
426 426 ui._styles.clear()
427 427 for effect in color._activeeffects(ui).keys():
428 428 ui._styles[effect] = effect
429 429 if ui._terminfoparams:
430 430 for k, v in ui.configitems('color'):
431 431 if k.startswith('color.'):
432 432 ui._styles[k] = k[6:]
433 433 elif k.startswith('terminfo.'):
434 434 ui._styles[k] = k[9:]
435 435 ui.write(_('available colors:\n'))
436 436 # sort label with a '_' after the other to group '_background' entry.
437 437 items = sorted(ui._styles.items(),
438 438 key=lambda i: ('_' in i[0], i[0], i[1]))
439 439 for colorname, label in items:
440 440 ui.write(('%s\n') % colorname, label=label)
441 441
442 442 def _debugdisplaystyle(ui):
443 443 ui.write(_('available style:\n'))
444 444 width = max(len(s) for s in ui._styles)
445 445 for label, effects in sorted(ui._styles.items()):
446 446 ui.write('%s' % label, label=label)
447 447 if effects:
448 448 # 50
449 449 ui.write(': ')
450 450 ui.write(' ' * (max(0, width - len(label))))
451 451 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
452 452 ui.write('\n')
453 453
454 454 @command('debugcreatestreamclonebundle', [], 'FILE')
455 455 def debugcreatestreamclonebundle(ui, repo, fname):
456 456 """create a stream clone bundle file
457 457
458 458 Stream bundles are special bundles that are essentially archives of
459 459 revlog files. They are commonly used for cloning very quickly.
460 460 """
461 461 # TODO we may want to turn this into an abort when this functionality
462 462 # is moved into `hg bundle`.
463 463 if phases.hassecret(repo):
464 464 ui.warn(_('(warning: stream clone bundle will contain secret '
465 465 'revisions)\n'))
466 466
467 467 requirements, gen = streamclone.generatebundlev1(repo)
468 468 changegroup.writechunks(ui, gen, fname)
469 469
470 470 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
471 471
472 472 @command('debugdag',
473 473 [('t', 'tags', None, _('use tags as labels')),
474 474 ('b', 'branches', None, _('annotate with branch names')),
475 475 ('', 'dots', None, _('use dots for runs')),
476 476 ('s', 'spaces', None, _('separate elements by spaces'))],
477 477 _('[OPTION]... [FILE [REV]...]'),
478 478 optionalrepo=True)
479 479 def debugdag(ui, repo, file_=None, *revs, **opts):
480 480 """format the changelog or an index DAG as a concise textual description
481 481
482 482 If you pass a revlog index, the revlog's DAG is emitted. If you list
483 483 revision numbers, they get labeled in the output as rN.
484 484
485 485 Otherwise, the changelog DAG of the current repo is emitted.
486 486 """
487 487 spaces = opts.get(r'spaces')
488 488 dots = opts.get(r'dots')
489 489 if file_:
490 490 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
491 491 file_)
492 492 revs = set((int(r) for r in revs))
493 493 def events():
494 494 for r in rlog:
495 495 yield 'n', (r, list(p for p in rlog.parentrevs(r)
496 496 if p != -1))
497 497 if r in revs:
498 498 yield 'l', (r, "r%i" % r)
499 499 elif repo:
500 500 cl = repo.changelog
501 501 tags = opts.get(r'tags')
502 502 branches = opts.get(r'branches')
503 503 if tags:
504 504 labels = {}
505 505 for l, n in repo.tags().items():
506 506 labels.setdefault(cl.rev(n), []).append(l)
507 507 def events():
508 508 b = "default"
509 509 for r in cl:
510 510 if branches:
511 511 newb = cl.read(cl.node(r))[5]['branch']
512 512 if newb != b:
513 513 yield 'a', newb
514 514 b = newb
515 515 yield 'n', (r, list(p for p in cl.parentrevs(r)
516 516 if p != -1))
517 517 if tags:
518 518 ls = labels.get(r)
519 519 if ls:
520 520 for l in ls:
521 521 yield 'l', (r, l)
522 522 else:
523 523 raise error.Abort(_('need repo for changelog dag'))
524 524
525 525 for line in dagparser.dagtextlines(events(),
526 526 addspaces=spaces,
527 527 wraplabels=True,
528 528 wrapannotations=True,
529 529 wrapnonlinear=dots,
530 530 usedots=dots,
531 531 maxlinewidth=70):
532 532 ui.write(line)
533 533 ui.write("\n")
534 534
535 535 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
536 536 def debugdata(ui, repo, file_, rev=None, **opts):
537 537 """dump the contents of a data file revision"""
538 538 opts = pycompat.byteskwargs(opts)
539 539 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
540 540 if rev is not None:
541 541 raise error.CommandError('debugdata', _('invalid arguments'))
542 542 file_, rev = None, file_
543 543 elif rev is None:
544 544 raise error.CommandError('debugdata', _('invalid arguments'))
545 545 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
546 546 try:
547 547 ui.write(r.revision(r.lookup(rev), raw=True))
548 548 except KeyError:
549 549 raise error.Abort(_('invalid revision identifier %s') % rev)
550 550
551 551 @command('debugdate',
552 552 [('e', 'extended', None, _('try extended date formats'))],
553 553 _('[-e] DATE [RANGE]'),
554 554 norepo=True, optionalrepo=True)
555 555 def debugdate(ui, date, range=None, **opts):
556 556 """parse and display a date"""
557 557 if opts[r"extended"]:
558 558 d = util.parsedate(date, util.extendeddateformats)
559 559 else:
560 560 d = util.parsedate(date)
561 561 ui.write(("internal: %s %s\n") % d)
562 562 ui.write(("standard: %s\n") % util.datestr(d))
563 563 if range:
564 564 m = util.matchdate(range)
565 565 ui.write(("match: %s\n") % m(d[0]))
566 566
567 567 @command('debugdeltachain',
568 568 cmdutil.debugrevlogopts + cmdutil.formatteropts,
569 569 _('-c|-m|FILE'),
570 570 optionalrepo=True)
571 571 def debugdeltachain(ui, repo, file_=None, **opts):
572 572 """dump information about delta chains in a revlog
573 573
574 574 Output can be templatized. Available template keywords are:
575 575
576 576 :``rev``: revision number
577 577 :``chainid``: delta chain identifier (numbered by unique base)
578 578 :``chainlen``: delta chain length to this revision
579 579 :``prevrev``: previous revision in delta chain
580 580 :``deltatype``: role of delta / how it was computed
581 581 :``compsize``: compressed size of revision
582 582 :``uncompsize``: uncompressed size of revision
583 583 :``chainsize``: total size of compressed revisions in chain
584 584 :``chainratio``: total chain size divided by uncompressed revision size
585 585 (new delta chains typically start at ratio 2.00)
586 586 :``lindist``: linear distance from base revision in delta chain to end
587 587 of this revision
588 588 :``extradist``: total size of revisions not part of this delta chain from
589 589 base of delta chain to end of this revision; a measurement
590 590 of how much extra data we need to read/seek across to read
591 591 the delta chain for this revision
592 592 :``extraratio``: extradist divided by chainsize; another representation of
593 593 how much unrelated data is needed to load this delta chain
594 594
595 595 If the repository is configured to use the sparse read, additional keywords
596 596 are available:
597 597
598 598 :``readsize``: total size of data read from the disk for a revision
599 599 (sum of the sizes of all the blocks)
600 600 :``largestblock``: size of the largest block of data read from the disk
601 601 :``readdensity``: density of useful bytes in the data read from the disk
602 602
603 603 The sparse read can be enabled with experimental.sparse-read = True
604 604 """
605 605 opts = pycompat.byteskwargs(opts)
606 606 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
607 607 index = r.index
608 608 generaldelta = r.version & revlog.FLAG_GENERALDELTA
609 609 withsparseread = getattr(r, '_withsparseread', False)
610 610
611 611 def revinfo(rev):
612 612 e = index[rev]
613 613 compsize = e[1]
614 614 uncompsize = e[2]
615 615 chainsize = 0
616 616
617 617 if generaldelta:
618 618 if e[3] == e[5]:
619 619 deltatype = 'p1'
620 620 elif e[3] == e[6]:
621 621 deltatype = 'p2'
622 622 elif e[3] == rev - 1:
623 623 deltatype = 'prev'
624 624 elif e[3] == rev:
625 625 deltatype = 'base'
626 626 else:
627 627 deltatype = 'other'
628 628 else:
629 629 if e[3] == rev:
630 630 deltatype = 'base'
631 631 else:
632 632 deltatype = 'prev'
633 633
634 634 chain = r._deltachain(rev)[0]
635 635 for iterrev in chain:
636 636 e = index[iterrev]
637 637 chainsize += e[1]
638 638
639 639 return compsize, uncompsize, deltatype, chain, chainsize
640 640
641 641 fm = ui.formatter('debugdeltachain', opts)
642 642
643 643 fm.plain(' rev chain# chainlen prev delta '
644 644 'size rawsize chainsize ratio lindist extradist '
645 645 'extraratio')
646 646 if withsparseread:
647 647 fm.plain(' readsize largestblk rddensity')
648 648 fm.plain('\n')
649 649
650 650 chainbases = {}
651 651 for rev in r:
652 652 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
653 653 chainbase = chain[0]
654 654 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
655 655 start = r.start
656 656 length = r.length
657 657 basestart = start(chainbase)
658 658 revstart = start(rev)
659 659 lineardist = revstart + comp - basestart
660 660 extradist = lineardist - chainsize
661 661 try:
662 662 prevrev = chain[-2]
663 663 except IndexError:
664 664 prevrev = -1
665 665
666 666 chainratio = float(chainsize) / float(uncomp)
667 667 extraratio = float(extradist) / float(chainsize)
668 668
669 669 fm.startitem()
670 670 fm.write('rev chainid chainlen prevrev deltatype compsize '
671 671 'uncompsize chainsize chainratio lindist extradist '
672 672 'extraratio',
673 673 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
674 674 rev, chainid, len(chain), prevrev, deltatype, comp,
675 675 uncomp, chainsize, chainratio, lineardist, extradist,
676 676 extraratio,
677 677 rev=rev, chainid=chainid, chainlen=len(chain),
678 678 prevrev=prevrev, deltatype=deltatype, compsize=comp,
679 679 uncompsize=uncomp, chainsize=chainsize,
680 680 chainratio=chainratio, lindist=lineardist,
681 681 extradist=extradist, extraratio=extraratio)
682 682 if withsparseread:
683 683 readsize = 0
684 684 largestblock = 0
685 685 for revschunk in revlog._slicechunk(r, chain):
686 686 blkend = start(revschunk[-1]) + length(revschunk[-1])
687 687 blksize = blkend - start(revschunk[0])
688 688
689 689 readsize += blksize
690 690 if largestblock < blksize:
691 691 largestblock = blksize
692 692
693 693 readdensity = float(chainsize) / float(readsize)
694 694
695 695 fm.write('readsize largestblock readdensity',
696 696 ' %10d %10d %9.5f',
697 697 readsize, largestblock, readdensity,
698 698 readsize=readsize, largestblock=largestblock,
699 699 readdensity=readdensity)
700 700
701 701 fm.plain('\n')
702 702
703 703 fm.end()
704 704
705 705 @command('debugdirstate|debugstate',
706 706 [('', 'nodates', None, _('do not display the saved mtime')),
707 707 ('', 'datesort', None, _('sort by saved mtime'))],
708 708 _('[OPTION]...'))
709 709 def debugstate(ui, repo, **opts):
710 710 """show the contents of the current dirstate"""
711 711
712 712 nodates = opts.get(r'nodates')
713 713 datesort = opts.get(r'datesort')
714 714
715 715 timestr = ""
716 716 if datesort:
717 717 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
718 718 else:
719 719 keyfunc = None # sort by filename
720 720 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
721 721 if ent[3] == -1:
722 722 timestr = 'unset '
723 723 elif nodates:
724 724 timestr = 'set '
725 725 else:
726 726 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
727 727 time.localtime(ent[3]))
728 728 timestr = encoding.strtolocal(timestr)
729 729 if ent[1] & 0o20000:
730 730 mode = 'lnk'
731 731 else:
732 732 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
733 733 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
734 734 for f in repo.dirstate.copies():
735 735 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
736 736
737 737 @command('debugdiscovery',
738 738 [('', 'old', None, _('use old-style discovery')),
739 739 ('', 'nonheads', None,
740 740 _('use old-style discovery with non-heads included')),
741 741 ('', 'rev', [], 'restrict discovery to this set of revs'),
742 742 ] + cmdutil.remoteopts,
743 743 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
744 744 def debugdiscovery(ui, repo, remoteurl="default", **opts):
745 745 """runs the changeset discovery protocol in isolation"""
746 746 opts = pycompat.byteskwargs(opts)
747 747 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
748 748 remote = hg.peer(repo, opts, remoteurl)
749 749 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
750 750
751 751 # make sure tests are repeatable
752 752 random.seed(12323)
753 753
754 754 def doit(pushedrevs, remoteheads, remote=remote):
755 755 if opts.get('old'):
756 756 if not util.safehasattr(remote, 'branches'):
757 757 # enable in-client legacy support
758 758 remote = localrepo.locallegacypeer(remote.local())
759 759 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
760 760 force=True)
761 761 common = set(common)
762 762 if not opts.get('nonheads'):
763 763 ui.write(("unpruned common: %s\n") %
764 764 " ".join(sorted(short(n) for n in common)))
765 765 dag = dagutil.revlogdag(repo.changelog)
766 766 all = dag.ancestorset(dag.internalizeall(common))
767 767 common = dag.externalizeall(dag.headsetofconnecteds(all))
768 768 else:
769 769 nodes = None
770 770 if pushedrevs:
771 771 revs = scmutil.revrange(repo, pushedrevs)
772 772 nodes = [repo[r].node() for r in revs]
773 773 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
774 774 ancestorsof=nodes)
775 775 common = set(common)
776 776 rheads = set(hds)
777 777 lheads = set(repo.heads())
778 778 ui.write(("common heads: %s\n") %
779 779 " ".join(sorted(short(n) for n in common)))
780 780 if lheads <= common:
781 781 ui.write(("local is subset\n"))
782 782 elif rheads <= common:
783 783 ui.write(("remote is subset\n"))
784 784
785 serverlogs = opts.get('serverlog')
786 if serverlogs:
787 for filename in serverlogs:
788 with open(filename, 'r') as logfile:
789 line = logfile.readline()
790 while line:
791 parts = line.strip().split(';')
792 op = parts[1]
793 if op == 'cg':
794 pass
795 elif op == 'cgss':
796 doit(parts[2].split(' '), parts[3].split(' '))
797 elif op == 'unb':
798 doit(parts[3].split(' '), parts[2].split(' '))
799 line = logfile.readline()
800 else:
801 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
802 opts.get('remote_head'))
803 localrevs = opts.get('rev')
804 doit(localrevs, remoterevs)
785 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
786 opts.get('remote_head'))
787 localrevs = opts['rev']
788 doit(localrevs, remoterevs)
805 789
806 790 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
807 791 def debugextensions(ui, **opts):
808 792 '''show information about active extensions'''
809 793 opts = pycompat.byteskwargs(opts)
810 794 exts = extensions.extensions(ui)
811 795 hgver = util.version()
812 796 fm = ui.formatter('debugextensions', opts)
813 797 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
814 798 isinternal = extensions.ismoduleinternal(extmod)
815 799 extsource = pycompat.fsencode(extmod.__file__)
816 800 if isinternal:
817 801 exttestedwith = [] # never expose magic string to users
818 802 else:
819 803 exttestedwith = getattr(extmod, 'testedwith', '').split()
820 804 extbuglink = getattr(extmod, 'buglink', None)
821 805
822 806 fm.startitem()
823 807
824 808 if ui.quiet or ui.verbose:
825 809 fm.write('name', '%s\n', extname)
826 810 else:
827 811 fm.write('name', '%s', extname)
828 812 if isinternal or hgver in exttestedwith:
829 813 fm.plain('\n')
830 814 elif not exttestedwith:
831 815 fm.plain(_(' (untested!)\n'))
832 816 else:
833 817 lasttestedversion = exttestedwith[-1]
834 818 fm.plain(' (%s!)\n' % lasttestedversion)
835 819
836 820 fm.condwrite(ui.verbose and extsource, 'source',
837 821 _(' location: %s\n'), extsource or "")
838 822
839 823 if ui.verbose:
840 824 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
841 825 fm.data(bundled=isinternal)
842 826
843 827 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
844 828 _(' tested with: %s\n'),
845 829 fm.formatlist(exttestedwith, name='ver'))
846 830
847 831 fm.condwrite(ui.verbose and extbuglink, 'buglink',
848 832 _(' bug reporting: %s\n'), extbuglink or "")
849 833
850 834 fm.end()
851 835
852 836 @command('debugfileset',
853 837 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
854 838 _('[-r REV] FILESPEC'))
855 839 def debugfileset(ui, repo, expr, **opts):
856 840 '''parse and apply a fileset specification'''
857 841 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
858 842 if ui.verbose:
859 843 tree = fileset.parse(expr)
860 844 ui.note(fileset.prettyformat(tree), "\n")
861 845
862 846 for f in ctx.getfileset(expr):
863 847 ui.write("%s\n" % f)
864 848
865 849 @command('debugformat',
866 850 [] + cmdutil.formatteropts,
867 851 _(''))
868 852 def debugformat(ui, repo, **opts):
869 853 """display format information about the current repository
870 854
871 855 Use --verbose to get extra information about current config value and
872 856 Mercurial default."""
873 857 opts = pycompat.byteskwargs(opts)
874 858 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
875 859 maxvariantlength = max(len('format-variant'), maxvariantlength)
876 860
877 861 def makeformatname(name):
878 862 return '%s:' + (' ' * (maxvariantlength - len(name)))
879 863
880 864 fm = ui.formatter('debugformat', opts)
881 865 if fm.isplain():
882 866 def formatvalue(value):
883 867 if util.safehasattr(value, 'startswith'):
884 868 return value
885 869 if value:
886 870 return 'yes'
887 871 else:
888 872 return 'no'
889 873 else:
890 874 formatvalue = pycompat.identity
891 875
892 876 fm.plain('format-variant')
893 877 fm.plain(' ' * (maxvariantlength - len('format-variant')))
894 878 fm.plain(' repo')
895 879 if ui.verbose:
896 880 fm.plain(' config default')
897 881 fm.plain('\n')
898 882 for fv in upgrade.allformatvariant:
899 883 fm.startitem()
900 884 repovalue = fv.fromrepo(repo)
901 885 configvalue = fv.fromconfig(repo)
902 886
903 887 if repovalue != configvalue:
904 888 namelabel = 'formatvariant.name.mismatchconfig'
905 889 repolabel = 'formatvariant.repo.mismatchconfig'
906 890 elif repovalue != fv.default:
907 891 namelabel = 'formatvariant.name.mismatchdefault'
908 892 repolabel = 'formatvariant.repo.mismatchdefault'
909 893 else:
910 894 namelabel = 'formatvariant.name.uptodate'
911 895 repolabel = 'formatvariant.repo.uptodate'
912 896
913 897 fm.write('name', makeformatname(fv.name), fv.name,
914 898 label=namelabel)
915 899 fm.write('repo', ' %3s', formatvalue(repovalue),
916 900 label=repolabel)
917 901 if fv.default != configvalue:
918 902 configlabel = 'formatvariant.config.special'
919 903 else:
920 904 configlabel = 'formatvariant.config.default'
921 905 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
922 906 label=configlabel)
923 907 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
924 908 label='formatvariant.default')
925 909 fm.plain('\n')
926 910 fm.end()
927 911
928 912 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
929 913 def debugfsinfo(ui, path="."):
930 914 """show information detected about current filesystem"""
931 915 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
932 916 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
933 917 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
934 918 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
935 919 casesensitive = '(unknown)'
936 920 try:
937 921 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
938 922 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
939 923 except OSError:
940 924 pass
941 925 ui.write(('case-sensitive: %s\n') % casesensitive)
942 926
943 927 @command('debuggetbundle',
944 928 [('H', 'head', [], _('id of head node'), _('ID')),
945 929 ('C', 'common', [], _('id of common node'), _('ID')),
946 930 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
947 931 _('REPO FILE [-H|-C ID]...'),
948 932 norepo=True)
949 933 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
950 934 """retrieves a bundle from a repo
951 935
952 936 Every ID must be a full-length hex node id string. Saves the bundle to the
953 937 given file.
954 938 """
955 939 opts = pycompat.byteskwargs(opts)
956 940 repo = hg.peer(ui, opts, repopath)
957 941 if not repo.capable('getbundle'):
958 942 raise error.Abort("getbundle() not supported by target repository")
959 943 args = {}
960 944 if common:
961 945 args[r'common'] = [bin(s) for s in common]
962 946 if head:
963 947 args[r'heads'] = [bin(s) for s in head]
964 948 # TODO: get desired bundlecaps from command line.
965 949 args[r'bundlecaps'] = None
966 950 bundle = repo.getbundle('debug', **args)
967 951
968 952 bundletype = opts.get('type', 'bzip2').lower()
969 953 btypes = {'none': 'HG10UN',
970 954 'bzip2': 'HG10BZ',
971 955 'gzip': 'HG10GZ',
972 956 'bundle2': 'HG20'}
973 957 bundletype = btypes.get(bundletype)
974 958 if bundletype not in bundle2.bundletypes:
975 959 raise error.Abort(_('unknown bundle type specified with --type'))
976 960 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
977 961
978 962 @command('debugignore', [], '[FILE]')
979 963 def debugignore(ui, repo, *files, **opts):
980 964 """display the combined ignore pattern and information about ignored files
981 965
982 966 With no argument display the combined ignore pattern.
983 967
984 968 Given space separated file names, shows if the given file is ignored and
985 969 if so, show the ignore rule (file and line number) that matched it.
986 970 """
987 971 ignore = repo.dirstate._ignore
988 972 if not files:
989 973 # Show all the patterns
990 974 ui.write("%s\n" % repr(ignore))
991 975 else:
992 976 m = scmutil.match(repo[None], pats=files)
993 977 for f in m.files():
994 978 nf = util.normpath(f)
995 979 ignored = None
996 980 ignoredata = None
997 981 if nf != '.':
998 982 if ignore(nf):
999 983 ignored = nf
1000 984 ignoredata = repo.dirstate._ignorefileandline(nf)
1001 985 else:
1002 986 for p in util.finddirs(nf):
1003 987 if ignore(p):
1004 988 ignored = p
1005 989 ignoredata = repo.dirstate._ignorefileandline(p)
1006 990 break
1007 991 if ignored:
1008 992 if ignored == nf:
1009 993 ui.write(_("%s is ignored\n") % m.uipath(f))
1010 994 else:
1011 995 ui.write(_("%s is ignored because of "
1012 996 "containing folder %s\n")
1013 997 % (m.uipath(f), ignored))
1014 998 ignorefile, lineno, line = ignoredata
1015 999 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1016 1000 % (ignorefile, lineno, line))
1017 1001 else:
1018 1002 ui.write(_("%s is not ignored\n") % m.uipath(f))
1019 1003
1020 1004 @command('debugindex', cmdutil.debugrevlogopts +
1021 1005 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1022 1006 _('[-f FORMAT] -c|-m|FILE'),
1023 1007 optionalrepo=True)
1024 1008 def debugindex(ui, repo, file_=None, **opts):
1025 1009 """dump the contents of an index file"""
1026 1010 opts = pycompat.byteskwargs(opts)
1027 1011 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1028 1012 format = opts.get('format', 0)
1029 1013 if format not in (0, 1):
1030 1014 raise error.Abort(_("unknown format %d") % format)
1031 1015
1032 1016 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1033 1017 if generaldelta:
1034 1018 basehdr = ' delta'
1035 1019 else:
1036 1020 basehdr = ' base'
1037 1021
1038 1022 if ui.debugflag:
1039 1023 shortfn = hex
1040 1024 else:
1041 1025 shortfn = short
1042 1026
1043 1027 # There might not be anything in r, so have a sane default
1044 1028 idlen = 12
1045 1029 for i in r:
1046 1030 idlen = len(shortfn(r.node(i)))
1047 1031 break
1048 1032
1049 1033 if format == 0:
1050 1034 ui.write((" rev offset length " + basehdr + " linkrev"
1051 1035 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1052 1036 elif format == 1:
1053 1037 ui.write((" rev flag offset length"
1054 1038 " size " + basehdr + " link p1 p2"
1055 1039 " %s\n") % "nodeid".rjust(idlen))
1056 1040
1057 1041 for i in r:
1058 1042 node = r.node(i)
1059 1043 if generaldelta:
1060 1044 base = r.deltaparent(i)
1061 1045 else:
1062 1046 base = r.chainbase(i)
1063 1047 if format == 0:
1064 1048 try:
1065 1049 pp = r.parents(node)
1066 1050 except Exception:
1067 1051 pp = [nullid, nullid]
1068 1052 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1069 1053 i, r.start(i), r.length(i), base, r.linkrev(i),
1070 1054 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1071 1055 elif format == 1:
1072 1056 pr = r.parentrevs(i)
1073 1057 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1074 1058 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1075 1059 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1076 1060
1077 1061 @command('debugindexdot', cmdutil.debugrevlogopts,
1078 1062 _('-c|-m|FILE'), optionalrepo=True)
1079 1063 def debugindexdot(ui, repo, file_=None, **opts):
1080 1064 """dump an index DAG as a graphviz dot file"""
1081 1065 opts = pycompat.byteskwargs(opts)
1082 1066 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1083 1067 ui.write(("digraph G {\n"))
1084 1068 for i in r:
1085 1069 node = r.node(i)
1086 1070 pp = r.parents(node)
1087 1071 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1088 1072 if pp[1] != nullid:
1089 1073 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1090 1074 ui.write("}\n")
1091 1075
1092 1076 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1093 1077 def debuginstall(ui, **opts):
1094 1078 '''test Mercurial installation
1095 1079
1096 1080 Returns 0 on success.
1097 1081 '''
1098 1082 opts = pycompat.byteskwargs(opts)
1099 1083
1100 1084 def writetemp(contents):
1101 1085 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1102 1086 f = os.fdopen(fd, pycompat.sysstr("wb"))
1103 1087 f.write(contents)
1104 1088 f.close()
1105 1089 return name
1106 1090
1107 1091 problems = 0
1108 1092
1109 1093 fm = ui.formatter('debuginstall', opts)
1110 1094 fm.startitem()
1111 1095
1112 1096 # encoding
1113 1097 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1114 1098 err = None
1115 1099 try:
1116 1100 codecs.lookup(pycompat.sysstr(encoding.encoding))
1117 1101 except LookupError as inst:
1118 1102 err = util.forcebytestr(inst)
1119 1103 problems += 1
1120 1104 fm.condwrite(err, 'encodingerror', _(" %s\n"
1121 1105 " (check that your locale is properly set)\n"), err)
1122 1106
1123 1107 # Python
1124 1108 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1125 1109 pycompat.sysexecutable)
1126 1110 fm.write('pythonver', _("checking Python version (%s)\n"),
1127 1111 ("%d.%d.%d" % sys.version_info[:3]))
1128 1112 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1129 1113 os.path.dirname(pycompat.fsencode(os.__file__)))
1130 1114
1131 1115 security = set(sslutil.supportedprotocols)
1132 1116 if sslutil.hassni:
1133 1117 security.add('sni')
1134 1118
1135 1119 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1136 1120 fm.formatlist(sorted(security), name='protocol',
1137 1121 fmt='%s', sep=','))
1138 1122
1139 1123 # These are warnings, not errors. So don't increment problem count. This
1140 1124 # may change in the future.
1141 1125 if 'tls1.2' not in security:
1142 1126 fm.plain(_(' TLS 1.2 not supported by Python install; '
1143 1127 'network connections lack modern security\n'))
1144 1128 if 'sni' not in security:
1145 1129 fm.plain(_(' SNI not supported by Python install; may have '
1146 1130 'connectivity issues with some servers\n'))
1147 1131
1148 1132 # TODO print CA cert info
1149 1133
1150 1134 # hg version
1151 1135 hgver = util.version()
1152 1136 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1153 1137 hgver.split('+')[0])
1154 1138 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1155 1139 '+'.join(hgver.split('+')[1:]))
1156 1140
1157 1141 # compiled modules
1158 1142 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1159 1143 policy.policy)
1160 1144 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1161 1145 os.path.dirname(pycompat.fsencode(__file__)))
1162 1146
1163 1147 if policy.policy in ('c', 'allow'):
1164 1148 err = None
1165 1149 try:
1166 1150 from .cext import (
1167 1151 base85,
1168 1152 bdiff,
1169 1153 mpatch,
1170 1154 osutil,
1171 1155 )
1172 1156 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1173 1157 except Exception as inst:
1174 1158 err = util.forcebytestr(inst)
1175 1159 problems += 1
1176 1160 fm.condwrite(err, 'extensionserror', " %s\n", err)
1177 1161
1178 1162 compengines = util.compengines._engines.values()
1179 1163 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1180 1164 fm.formatlist(sorted(e.name() for e in compengines),
1181 1165 name='compengine', fmt='%s', sep=', '))
1182 1166 fm.write('compenginesavail', _('checking available compression engines '
1183 1167 '(%s)\n'),
1184 1168 fm.formatlist(sorted(e.name() for e in compengines
1185 1169 if e.available()),
1186 1170 name='compengine', fmt='%s', sep=', '))
1187 1171 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1188 1172 fm.write('compenginesserver', _('checking available compression engines '
1189 1173 'for wire protocol (%s)\n'),
1190 1174 fm.formatlist([e.name() for e in wirecompengines
1191 1175 if e.wireprotosupport()],
1192 1176 name='compengine', fmt='%s', sep=', '))
1193 1177
1194 1178 # templates
1195 1179 p = templater.templatepaths()
1196 1180 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1197 1181 fm.condwrite(not p, '', _(" no template directories found\n"))
1198 1182 if p:
1199 1183 m = templater.templatepath("map-cmdline.default")
1200 1184 if m:
1201 1185 # template found, check if it is working
1202 1186 err = None
1203 1187 try:
1204 1188 templater.templater.frommapfile(m)
1205 1189 except Exception as inst:
1206 1190 err = util.forcebytestr(inst)
1207 1191 p = None
1208 1192 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1209 1193 else:
1210 1194 p = None
1211 1195 fm.condwrite(p, 'defaulttemplate',
1212 1196 _("checking default template (%s)\n"), m)
1213 1197 fm.condwrite(not m, 'defaulttemplatenotfound',
1214 1198 _(" template '%s' not found\n"), "default")
1215 1199 if not p:
1216 1200 problems += 1
1217 1201 fm.condwrite(not p, '',
1218 1202 _(" (templates seem to have been installed incorrectly)\n"))
1219 1203
1220 1204 # editor
1221 1205 editor = ui.geteditor()
1222 1206 editor = util.expandpath(editor)
1223 1207 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1224 1208 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1225 1209 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1226 1210 _(" No commit editor set and can't find %s in PATH\n"
1227 1211 " (specify a commit editor in your configuration"
1228 1212 " file)\n"), not cmdpath and editor == 'vi' and editor)
1229 1213 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1230 1214 _(" Can't find editor '%s' in PATH\n"
1231 1215 " (specify a commit editor in your configuration"
1232 1216 " file)\n"), not cmdpath and editor)
1233 1217 if not cmdpath and editor != 'vi':
1234 1218 problems += 1
1235 1219
1236 1220 # check username
1237 1221 username = None
1238 1222 err = None
1239 1223 try:
1240 1224 username = ui.username()
1241 1225 except error.Abort as e:
1242 1226 err = util.forcebytestr(e)
1243 1227 problems += 1
1244 1228
1245 1229 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1246 1230 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1247 1231 " (specify a username in your configuration file)\n"), err)
1248 1232
1249 1233 fm.condwrite(not problems, '',
1250 1234 _("no problems detected\n"))
1251 1235 if not problems:
1252 1236 fm.data(problems=problems)
1253 1237 fm.condwrite(problems, 'problems',
1254 1238 _("%d problems detected,"
1255 1239 " please check your install!\n"), problems)
1256 1240 fm.end()
1257 1241
1258 1242 return problems
1259 1243
1260 1244 @command('debugknown', [], _('REPO ID...'), norepo=True)
1261 1245 def debugknown(ui, repopath, *ids, **opts):
1262 1246 """test whether node ids are known to a repo
1263 1247
1264 1248 Every ID must be a full-length hex node id string. Returns a list of 0s
1265 1249 and 1s indicating unknown/known.
1266 1250 """
1267 1251 opts = pycompat.byteskwargs(opts)
1268 1252 repo = hg.peer(ui, opts, repopath)
1269 1253 if not repo.capable('known'):
1270 1254 raise error.Abort("known() not supported by target repository")
1271 1255 flags = repo.known([bin(s) for s in ids])
1272 1256 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1273 1257
1274 1258 @command('debuglabelcomplete', [], _('LABEL...'))
1275 1259 def debuglabelcomplete(ui, repo, *args):
1276 1260 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1277 1261 debugnamecomplete(ui, repo, *args)
1278 1262
1279 1263 @command('debuglocks',
1280 1264 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1281 1265 ('W', 'force-wlock', None,
1282 1266 _('free the working state lock (DANGEROUS)')),
1283 1267 ('s', 'set-lock', None, _('set the store lock until stopped')),
1284 1268 ('S', 'set-wlock', None,
1285 1269 _('set the working state lock until stopped'))],
1286 1270 _('[OPTION]...'))
1287 1271 def debuglocks(ui, repo, **opts):
1288 1272 """show or modify state of locks
1289 1273
1290 1274 By default, this command will show which locks are held. This
1291 1275 includes the user and process holding the lock, the amount of time
1292 1276 the lock has been held, and the machine name where the process is
1293 1277 running if it's not local.
1294 1278
1295 1279 Locks protect the integrity of Mercurial's data, so should be
1296 1280 treated with care. System crashes or other interruptions may cause
1297 1281 locks to not be properly released, though Mercurial will usually
1298 1282 detect and remove such stale locks automatically.
1299 1283
1300 1284 However, detecting stale locks may not always be possible (for
1301 1285 instance, on a shared filesystem). Removing locks may also be
1302 1286 blocked by filesystem permissions.
1303 1287
1304 1288 Setting a lock will prevent other commands from changing the data.
1305 1289 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1306 1290 The set locks are removed when the command exits.
1307 1291
1308 1292 Returns 0 if no locks are held.
1309 1293
1310 1294 """
1311 1295
1312 1296 if opts.get(r'force_lock'):
1313 1297 repo.svfs.unlink('lock')
1314 1298 if opts.get(r'force_wlock'):
1315 1299 repo.vfs.unlink('wlock')
1316 1300 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1317 1301 return 0
1318 1302
1319 1303 locks = []
1320 1304 try:
1321 1305 if opts.get(r'set_wlock'):
1322 1306 try:
1323 1307 locks.append(repo.wlock(False))
1324 1308 except error.LockHeld:
1325 1309 raise error.Abort(_('wlock is already held'))
1326 1310 if opts.get(r'set_lock'):
1327 1311 try:
1328 1312 locks.append(repo.lock(False))
1329 1313 except error.LockHeld:
1330 1314 raise error.Abort(_('lock is already held'))
1331 1315 if len(locks):
1332 1316 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1333 1317 return 0
1334 1318 finally:
1335 1319 release(*locks)
1336 1320
1337 1321 now = time.time()
1338 1322 held = 0
1339 1323
1340 1324 def report(vfs, name, method):
1341 1325 # this causes stale locks to get reaped for more accurate reporting
1342 1326 try:
1343 1327 l = method(False)
1344 1328 except error.LockHeld:
1345 1329 l = None
1346 1330
1347 1331 if l:
1348 1332 l.release()
1349 1333 else:
1350 1334 try:
1351 1335 stat = vfs.lstat(name)
1352 1336 age = now - stat.st_mtime
1353 1337 user = util.username(stat.st_uid)
1354 1338 locker = vfs.readlock(name)
1355 1339 if ":" in locker:
1356 1340 host, pid = locker.split(':')
1357 1341 if host == socket.gethostname():
1358 1342 locker = 'user %s, process %s' % (user, pid)
1359 1343 else:
1360 1344 locker = 'user %s, process %s, host %s' \
1361 1345 % (user, pid, host)
1362 1346 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1363 1347 return 1
1364 1348 except OSError as e:
1365 1349 if e.errno != errno.ENOENT:
1366 1350 raise
1367 1351
1368 1352 ui.write(("%-6s free\n") % (name + ":"))
1369 1353 return 0
1370 1354
1371 1355 held += report(repo.svfs, "lock", repo.lock)
1372 1356 held += report(repo.vfs, "wlock", repo.wlock)
1373 1357
1374 1358 return held
1375 1359
1376 1360 @command('debugmergestate', [], '')
1377 1361 def debugmergestate(ui, repo, *args):
1378 1362 """print merge state
1379 1363
1380 1364 Use --verbose to print out information about whether v1 or v2 merge state
1381 1365 was chosen."""
1382 1366 def _hashornull(h):
1383 1367 if h == nullhex:
1384 1368 return 'null'
1385 1369 else:
1386 1370 return h
1387 1371
1388 1372 def printrecords(version):
1389 1373 ui.write(('* version %s records\n') % version)
1390 1374 if version == 1:
1391 1375 records = v1records
1392 1376 else:
1393 1377 records = v2records
1394 1378
1395 1379 for rtype, record in records:
1396 1380 # pretty print some record types
1397 1381 if rtype == 'L':
1398 1382 ui.write(('local: %s\n') % record)
1399 1383 elif rtype == 'O':
1400 1384 ui.write(('other: %s\n') % record)
1401 1385 elif rtype == 'm':
1402 1386 driver, mdstate = record.split('\0', 1)
1403 1387 ui.write(('merge driver: %s (state "%s")\n')
1404 1388 % (driver, mdstate))
1405 1389 elif rtype in 'FDC':
1406 1390 r = record.split('\0')
1407 1391 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1408 1392 if version == 1:
1409 1393 onode = 'not stored in v1 format'
1410 1394 flags = r[7]
1411 1395 else:
1412 1396 onode, flags = r[7:9]
1413 1397 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1414 1398 % (f, rtype, state, _hashornull(hash)))
1415 1399 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1416 1400 ui.write((' ancestor path: %s (node %s)\n')
1417 1401 % (afile, _hashornull(anode)))
1418 1402 ui.write((' other path: %s (node %s)\n')
1419 1403 % (ofile, _hashornull(onode)))
1420 1404 elif rtype == 'f':
1421 1405 filename, rawextras = record.split('\0', 1)
1422 1406 extras = rawextras.split('\0')
1423 1407 i = 0
1424 1408 extrastrings = []
1425 1409 while i < len(extras):
1426 1410 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1427 1411 i += 2
1428 1412
1429 1413 ui.write(('file extras: %s (%s)\n')
1430 1414 % (filename, ', '.join(extrastrings)))
1431 1415 elif rtype == 'l':
1432 1416 labels = record.split('\0', 2)
1433 1417 labels = [l for l in labels if len(l) > 0]
1434 1418 ui.write(('labels:\n'))
1435 1419 ui.write((' local: %s\n' % labels[0]))
1436 1420 ui.write((' other: %s\n' % labels[1]))
1437 1421 if len(labels) > 2:
1438 1422 ui.write((' base: %s\n' % labels[2]))
1439 1423 else:
1440 1424 ui.write(('unrecognized entry: %s\t%s\n')
1441 1425 % (rtype, record.replace('\0', '\t')))
1442 1426
1443 1427 # Avoid mergestate.read() since it may raise an exception for unsupported
1444 1428 # merge state records. We shouldn't be doing this, but this is OK since this
1445 1429 # command is pretty low-level.
1446 1430 ms = mergemod.mergestate(repo)
1447 1431
1448 1432 # sort so that reasonable information is on top
1449 1433 v1records = ms._readrecordsv1()
1450 1434 v2records = ms._readrecordsv2()
1451 1435 order = 'LOml'
1452 1436 def key(r):
1453 1437 idx = order.find(r[0])
1454 1438 if idx == -1:
1455 1439 return (1, r[1])
1456 1440 else:
1457 1441 return (0, idx)
1458 1442 v1records.sort(key=key)
1459 1443 v2records.sort(key=key)
1460 1444
1461 1445 if not v1records and not v2records:
1462 1446 ui.write(('no merge state found\n'))
1463 1447 elif not v2records:
1464 1448 ui.note(('no version 2 merge state\n'))
1465 1449 printrecords(1)
1466 1450 elif ms._v1v2match(v1records, v2records):
1467 1451 ui.note(('v1 and v2 states match: using v2\n'))
1468 1452 printrecords(2)
1469 1453 else:
1470 1454 ui.note(('v1 and v2 states mismatch: using v1\n'))
1471 1455 printrecords(1)
1472 1456 if ui.verbose:
1473 1457 printrecords(2)
1474 1458
1475 1459 @command('debugnamecomplete', [], _('NAME...'))
1476 1460 def debugnamecomplete(ui, repo, *args):
1477 1461 '''complete "names" - tags, open branch names, bookmark names'''
1478 1462
1479 1463 names = set()
1480 1464 # since we previously only listed open branches, we will handle that
1481 1465 # specially (after this for loop)
1482 1466 for name, ns in repo.names.iteritems():
1483 1467 if name != 'branches':
1484 1468 names.update(ns.listnames(repo))
1485 1469 names.update(tag for (tag, heads, tip, closed)
1486 1470 in repo.branchmap().iterbranches() if not closed)
1487 1471 completions = set()
1488 1472 if not args:
1489 1473 args = ['']
1490 1474 for a in args:
1491 1475 completions.update(n for n in names if n.startswith(a))
1492 1476 ui.write('\n'.join(sorted(completions)))
1493 1477 ui.write('\n')
1494 1478
1495 1479 @command('debugobsolete',
1496 1480 [('', 'flags', 0, _('markers flag')),
1497 1481 ('', 'record-parents', False,
1498 1482 _('record parent information for the precursor')),
1499 1483 ('r', 'rev', [], _('display markers relevant to REV')),
1500 1484 ('', 'exclusive', False, _('restrict display to markers only '
1501 1485 'relevant to REV')),
1502 1486 ('', 'index', False, _('display index of the marker')),
1503 1487 ('', 'delete', [], _('delete markers specified by indices')),
1504 1488 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1505 1489 _('[OBSOLETED [REPLACEMENT ...]]'))
1506 1490 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1507 1491 """create arbitrary obsolete marker
1508 1492
1509 1493 With no arguments, displays the list of obsolescence markers."""
1510 1494
1511 1495 opts = pycompat.byteskwargs(opts)
1512 1496
1513 1497 def parsenodeid(s):
1514 1498 try:
1515 1499 # We do not use revsingle/revrange functions here to accept
1516 1500 # arbitrary node identifiers, possibly not present in the
1517 1501 # local repository.
1518 1502 n = bin(s)
1519 1503 if len(n) != len(nullid):
1520 1504 raise TypeError()
1521 1505 return n
1522 1506 except TypeError:
1523 1507 raise error.Abort('changeset references must be full hexadecimal '
1524 1508 'node identifiers')
1525 1509
1526 1510 if opts.get('delete'):
1527 1511 indices = []
1528 1512 for v in opts.get('delete'):
1529 1513 try:
1530 1514 indices.append(int(v))
1531 1515 except ValueError:
1532 1516 raise error.Abort(_('invalid index value: %r') % v,
1533 1517 hint=_('use integers for indices'))
1534 1518
1535 1519 if repo.currenttransaction():
1536 1520 raise error.Abort(_('cannot delete obsmarkers in the middle '
1537 1521 'of transaction.'))
1538 1522
1539 1523 with repo.lock():
1540 1524 n = repair.deleteobsmarkers(repo.obsstore, indices)
1541 1525 ui.write(_('deleted %i obsolescence markers\n') % n)
1542 1526
1543 1527 return
1544 1528
1545 1529 if precursor is not None:
1546 1530 if opts['rev']:
1547 1531 raise error.Abort('cannot select revision when creating marker')
1548 1532 metadata = {}
1549 1533 metadata['user'] = opts['user'] or ui.username()
1550 1534 succs = tuple(parsenodeid(succ) for succ in successors)
1551 1535 l = repo.lock()
1552 1536 try:
1553 1537 tr = repo.transaction('debugobsolete')
1554 1538 try:
1555 1539 date = opts.get('date')
1556 1540 if date:
1557 1541 date = util.parsedate(date)
1558 1542 else:
1559 1543 date = None
1560 1544 prec = parsenodeid(precursor)
1561 1545 parents = None
1562 1546 if opts['record_parents']:
1563 1547 if prec not in repo.unfiltered():
1564 1548 raise error.Abort('cannot used --record-parents on '
1565 1549 'unknown changesets')
1566 1550 parents = repo.unfiltered()[prec].parents()
1567 1551 parents = tuple(p.node() for p in parents)
1568 1552 repo.obsstore.create(tr, prec, succs, opts['flags'],
1569 1553 parents=parents, date=date,
1570 1554 metadata=metadata, ui=ui)
1571 1555 tr.close()
1572 1556 except ValueError as exc:
1573 1557 raise error.Abort(_('bad obsmarker input: %s') % exc)
1574 1558 finally:
1575 1559 tr.release()
1576 1560 finally:
1577 1561 l.release()
1578 1562 else:
1579 1563 if opts['rev']:
1580 1564 revs = scmutil.revrange(repo, opts['rev'])
1581 1565 nodes = [repo[r].node() for r in revs]
1582 1566 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1583 1567 exclusive=opts['exclusive']))
1584 1568 markers.sort(key=lambda x: x._data)
1585 1569 else:
1586 1570 markers = obsutil.getmarkers(repo)
1587 1571
1588 1572 markerstoiter = markers
1589 1573 isrelevant = lambda m: True
1590 1574 if opts.get('rev') and opts.get('index'):
1591 1575 markerstoiter = obsutil.getmarkers(repo)
1592 1576 markerset = set(markers)
1593 1577 isrelevant = lambda m: m in markerset
1594 1578
1595 1579 fm = ui.formatter('debugobsolete', opts)
1596 1580 for i, m in enumerate(markerstoiter):
1597 1581 if not isrelevant(m):
1598 1582 # marker can be irrelevant when we're iterating over a set
1599 1583 # of markers (markerstoiter) which is bigger than the set
1600 1584 # of markers we want to display (markers)
1601 1585 # this can happen if both --index and --rev options are
1602 1586 # provided and thus we need to iterate over all of the markers
1603 1587 # to get the correct indices, but only display the ones that
1604 1588 # are relevant to --rev value
1605 1589 continue
1606 1590 fm.startitem()
1607 1591 ind = i if opts.get('index') else None
1608 1592 cmdutil.showmarker(fm, m, index=ind)
1609 1593 fm.end()
1610 1594
1611 1595 @command('debugpathcomplete',
1612 1596 [('f', 'full', None, _('complete an entire path')),
1613 1597 ('n', 'normal', None, _('show only normal files')),
1614 1598 ('a', 'added', None, _('show only added files')),
1615 1599 ('r', 'removed', None, _('show only removed files'))],
1616 1600 _('FILESPEC...'))
1617 1601 def debugpathcomplete(ui, repo, *specs, **opts):
1618 1602 '''complete part or all of a tracked path
1619 1603
1620 1604 This command supports shells that offer path name completion. It
1621 1605 currently completes only files already known to the dirstate.
1622 1606
1623 1607 Completion extends only to the next path segment unless
1624 1608 --full is specified, in which case entire paths are used.'''
1625 1609
1626 1610 def complete(path, acceptable):
1627 1611 dirstate = repo.dirstate
1628 1612 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1629 1613 rootdir = repo.root + pycompat.ossep
1630 1614 if spec != repo.root and not spec.startswith(rootdir):
1631 1615 return [], []
1632 1616 if os.path.isdir(spec):
1633 1617 spec += '/'
1634 1618 spec = spec[len(rootdir):]
1635 1619 fixpaths = pycompat.ossep != '/'
1636 1620 if fixpaths:
1637 1621 spec = spec.replace(pycompat.ossep, '/')
1638 1622 speclen = len(spec)
1639 1623 fullpaths = opts[r'full']
1640 1624 files, dirs = set(), set()
1641 1625 adddir, addfile = dirs.add, files.add
1642 1626 for f, st in dirstate.iteritems():
1643 1627 if f.startswith(spec) and st[0] in acceptable:
1644 1628 if fixpaths:
1645 1629 f = f.replace('/', pycompat.ossep)
1646 1630 if fullpaths:
1647 1631 addfile(f)
1648 1632 continue
1649 1633 s = f.find(pycompat.ossep, speclen)
1650 1634 if s >= 0:
1651 1635 adddir(f[:s])
1652 1636 else:
1653 1637 addfile(f)
1654 1638 return files, dirs
1655 1639
1656 1640 acceptable = ''
1657 1641 if opts[r'normal']:
1658 1642 acceptable += 'nm'
1659 1643 if opts[r'added']:
1660 1644 acceptable += 'a'
1661 1645 if opts[r'removed']:
1662 1646 acceptable += 'r'
1663 1647 cwd = repo.getcwd()
1664 1648 if not specs:
1665 1649 specs = ['.']
1666 1650
1667 1651 files, dirs = set(), set()
1668 1652 for spec in specs:
1669 1653 f, d = complete(spec, acceptable or 'nmar')
1670 1654 files.update(f)
1671 1655 dirs.update(d)
1672 1656 files.update(dirs)
1673 1657 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1674 1658 ui.write('\n')
1675 1659
1676 1660 @command('debugpickmergetool',
1677 1661 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1678 1662 ('', 'changedelete', None, _('emulate merging change and delete')),
1679 1663 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1680 1664 _('[PATTERN]...'),
1681 1665 inferrepo=True)
1682 1666 def debugpickmergetool(ui, repo, *pats, **opts):
1683 1667 """examine which merge tool is chosen for specified file
1684 1668
1685 1669 As described in :hg:`help merge-tools`, Mercurial examines
1686 1670 configurations below in this order to decide which merge tool is
1687 1671 chosen for specified file.
1688 1672
1689 1673 1. ``--tool`` option
1690 1674 2. ``HGMERGE`` environment variable
1691 1675 3. configurations in ``merge-patterns`` section
1692 1676 4. configuration of ``ui.merge``
1693 1677 5. configurations in ``merge-tools`` section
1694 1678 6. ``hgmerge`` tool (for historical reason only)
1695 1679 7. default tool for fallback (``:merge`` or ``:prompt``)
1696 1680
1697 1681 This command writes out examination result in the style below::
1698 1682
1699 1683 FILE = MERGETOOL
1700 1684
1701 1685 By default, all files known in the first parent context of the
1702 1686 working directory are examined. Use file patterns and/or -I/-X
1703 1687 options to limit target files. -r/--rev is also useful to examine
1704 1688 files in another context without actual updating to it.
1705 1689
1706 1690 With --debug, this command shows warning messages while matching
1707 1691 against ``merge-patterns`` and so on, too. It is recommended to
1708 1692 use this option with explicit file patterns and/or -I/-X options,
1709 1693 because this option increases amount of output per file according
1710 1694 to configurations in hgrc.
1711 1695
1712 1696 With -v/--verbose, this command shows configurations below at
1713 1697 first (only if specified).
1714 1698
1715 1699 - ``--tool`` option
1716 1700 - ``HGMERGE`` environment variable
1717 1701 - configuration of ``ui.merge``
1718 1702
1719 1703 If merge tool is chosen before matching against
1720 1704 ``merge-patterns``, this command can't show any helpful
1721 1705 information, even with --debug. In such case, information above is
1722 1706 useful to know why a merge tool is chosen.
1723 1707 """
1724 1708 opts = pycompat.byteskwargs(opts)
1725 1709 overrides = {}
1726 1710 if opts['tool']:
1727 1711 overrides[('ui', 'forcemerge')] = opts['tool']
1728 1712 ui.note(('with --tool %r\n') % (opts['tool']))
1729 1713
1730 1714 with ui.configoverride(overrides, 'debugmergepatterns'):
1731 1715 hgmerge = encoding.environ.get("HGMERGE")
1732 1716 if hgmerge is not None:
1733 1717 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1734 1718 uimerge = ui.config("ui", "merge")
1735 1719 if uimerge:
1736 1720 ui.note(('with ui.merge=%r\n') % (uimerge))
1737 1721
1738 1722 ctx = scmutil.revsingle(repo, opts.get('rev'))
1739 1723 m = scmutil.match(ctx, pats, opts)
1740 1724 changedelete = opts['changedelete']
1741 1725 for path in ctx.walk(m):
1742 1726 fctx = ctx[path]
1743 1727 try:
1744 1728 if not ui.debugflag:
1745 1729 ui.pushbuffer(error=True)
1746 1730 tool, toolpath = filemerge._picktool(repo, ui, path,
1747 1731 fctx.isbinary(),
1748 1732 'l' in fctx.flags(),
1749 1733 changedelete)
1750 1734 finally:
1751 1735 if not ui.debugflag:
1752 1736 ui.popbuffer()
1753 1737 ui.write(('%s = %s\n') % (path, tool))
1754 1738
1755 1739 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1756 1740 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1757 1741 '''access the pushkey key/value protocol
1758 1742
1759 1743 With two args, list the keys in the given namespace.
1760 1744
1761 1745 With five args, set a key to new if it currently is set to old.
1762 1746 Reports success or failure.
1763 1747 '''
1764 1748
1765 1749 target = hg.peer(ui, {}, repopath)
1766 1750 if keyinfo:
1767 1751 key, old, new = keyinfo
1768 1752 r = target.pushkey(namespace, key, old, new)
1769 1753 ui.status(str(r) + '\n')
1770 1754 return not r
1771 1755 else:
1772 1756 for k, v in sorted(target.listkeys(namespace).iteritems()):
1773 1757 ui.write("%s\t%s\n" % (util.escapestr(k),
1774 1758 util.escapestr(v)))
1775 1759
1776 1760 @command('debugpvec', [], _('A B'))
1777 1761 def debugpvec(ui, repo, a, b=None):
1778 1762 ca = scmutil.revsingle(repo, a)
1779 1763 cb = scmutil.revsingle(repo, b)
1780 1764 pa = pvec.ctxpvec(ca)
1781 1765 pb = pvec.ctxpvec(cb)
1782 1766 if pa == pb:
1783 1767 rel = "="
1784 1768 elif pa > pb:
1785 1769 rel = ">"
1786 1770 elif pa < pb:
1787 1771 rel = "<"
1788 1772 elif pa | pb:
1789 1773 rel = "|"
1790 1774 ui.write(_("a: %s\n") % pa)
1791 1775 ui.write(_("b: %s\n") % pb)
1792 1776 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1793 1777 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1794 1778 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1795 1779 pa.distance(pb), rel))
1796 1780
1797 1781 @command('debugrebuilddirstate|debugrebuildstate',
1798 1782 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1799 1783 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1800 1784 'the working copy parent')),
1801 1785 ],
1802 1786 _('[-r REV]'))
1803 1787 def debugrebuilddirstate(ui, repo, rev, **opts):
1804 1788 """rebuild the dirstate as it would look like for the given revision
1805 1789
1806 1790 If no revision is specified the first current parent will be used.
1807 1791
1808 1792 The dirstate will be set to the files of the given revision.
1809 1793 The actual working directory content or existing dirstate
1810 1794 information such as adds or removes is not considered.
1811 1795
1812 1796 ``minimal`` will only rebuild the dirstate status for files that claim to be
1813 1797 tracked but are not in the parent manifest, or that exist in the parent
1814 1798 manifest but are not in the dirstate. It will not change adds, removes, or
1815 1799 modified files that are in the working copy parent.
1816 1800
1817 1801 One use of this command is to make the next :hg:`status` invocation
1818 1802 check the actual file content.
1819 1803 """
1820 1804 ctx = scmutil.revsingle(repo, rev)
1821 1805 with repo.wlock():
1822 1806 dirstate = repo.dirstate
1823 1807 changedfiles = None
1824 1808 # See command doc for what minimal does.
1825 1809 if opts.get(r'minimal'):
1826 1810 manifestfiles = set(ctx.manifest().keys())
1827 1811 dirstatefiles = set(dirstate)
1828 1812 manifestonly = manifestfiles - dirstatefiles
1829 1813 dsonly = dirstatefiles - manifestfiles
1830 1814 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1831 1815 changedfiles = manifestonly | dsnotadded
1832 1816
1833 1817 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1834 1818
1835 1819 @command('debugrebuildfncache', [], '')
1836 1820 def debugrebuildfncache(ui, repo):
1837 1821 """rebuild the fncache file"""
1838 1822 repair.rebuildfncache(ui, repo)
1839 1823
1840 1824 @command('debugrename',
1841 1825 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1842 1826 _('[-r REV] FILE'))
1843 1827 def debugrename(ui, repo, file1, *pats, **opts):
1844 1828 """dump rename information"""
1845 1829
1846 1830 opts = pycompat.byteskwargs(opts)
1847 1831 ctx = scmutil.revsingle(repo, opts.get('rev'))
1848 1832 m = scmutil.match(ctx, (file1,) + pats, opts)
1849 1833 for abs in ctx.walk(m):
1850 1834 fctx = ctx[abs]
1851 1835 o = fctx.filelog().renamed(fctx.filenode())
1852 1836 rel = m.rel(abs)
1853 1837 if o:
1854 1838 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1855 1839 else:
1856 1840 ui.write(_("%s not renamed\n") % rel)
1857 1841
1858 1842 @command('debugrevlog', cmdutil.debugrevlogopts +
1859 1843 [('d', 'dump', False, _('dump index data'))],
1860 1844 _('-c|-m|FILE'),
1861 1845 optionalrepo=True)
1862 1846 def debugrevlog(ui, repo, file_=None, **opts):
1863 1847 """show data and statistics about a revlog"""
1864 1848 opts = pycompat.byteskwargs(opts)
1865 1849 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1866 1850
1867 1851 if opts.get("dump"):
1868 1852 numrevs = len(r)
1869 1853 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1870 1854 " rawsize totalsize compression heads chainlen\n"))
1871 1855 ts = 0
1872 1856 heads = set()
1873 1857
1874 1858 for rev in xrange(numrevs):
1875 1859 dbase = r.deltaparent(rev)
1876 1860 if dbase == -1:
1877 1861 dbase = rev
1878 1862 cbase = r.chainbase(rev)
1879 1863 clen = r.chainlen(rev)
1880 1864 p1, p2 = r.parentrevs(rev)
1881 1865 rs = r.rawsize(rev)
1882 1866 ts = ts + rs
1883 1867 heads -= set(r.parentrevs(rev))
1884 1868 heads.add(rev)
1885 1869 try:
1886 1870 compression = ts / r.end(rev)
1887 1871 except ZeroDivisionError:
1888 1872 compression = 0
1889 1873 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1890 1874 "%11d %5d %8d\n" %
1891 1875 (rev, p1, p2, r.start(rev), r.end(rev),
1892 1876 r.start(dbase), r.start(cbase),
1893 1877 r.start(p1), r.start(p2),
1894 1878 rs, ts, compression, len(heads), clen))
1895 1879 return 0
1896 1880
1897 1881 v = r.version
1898 1882 format = v & 0xFFFF
1899 1883 flags = []
1900 1884 gdelta = False
1901 1885 if v & revlog.FLAG_INLINE_DATA:
1902 1886 flags.append('inline')
1903 1887 if v & revlog.FLAG_GENERALDELTA:
1904 1888 gdelta = True
1905 1889 flags.append('generaldelta')
1906 1890 if not flags:
1907 1891 flags = ['(none)']
1908 1892
1909 1893 nummerges = 0
1910 1894 numfull = 0
1911 1895 numprev = 0
1912 1896 nump1 = 0
1913 1897 nump2 = 0
1914 1898 numother = 0
1915 1899 nump1prev = 0
1916 1900 nump2prev = 0
1917 1901 chainlengths = []
1918 1902 chainbases = []
1919 1903 chainspans = []
1920 1904
1921 1905 datasize = [None, 0, 0]
1922 1906 fullsize = [None, 0, 0]
1923 1907 deltasize = [None, 0, 0]
1924 1908 chunktypecounts = {}
1925 1909 chunktypesizes = {}
1926 1910
1927 1911 def addsize(size, l):
1928 1912 if l[0] is None or size < l[0]:
1929 1913 l[0] = size
1930 1914 if size > l[1]:
1931 1915 l[1] = size
1932 1916 l[2] += size
1933 1917
1934 1918 numrevs = len(r)
1935 1919 for rev in xrange(numrevs):
1936 1920 p1, p2 = r.parentrevs(rev)
1937 1921 delta = r.deltaparent(rev)
1938 1922 if format > 0:
1939 1923 addsize(r.rawsize(rev), datasize)
1940 1924 if p2 != nullrev:
1941 1925 nummerges += 1
1942 1926 size = r.length(rev)
1943 1927 if delta == nullrev:
1944 1928 chainlengths.append(0)
1945 1929 chainbases.append(r.start(rev))
1946 1930 chainspans.append(size)
1947 1931 numfull += 1
1948 1932 addsize(size, fullsize)
1949 1933 else:
1950 1934 chainlengths.append(chainlengths[delta] + 1)
1951 1935 baseaddr = chainbases[delta]
1952 1936 revaddr = r.start(rev)
1953 1937 chainbases.append(baseaddr)
1954 1938 chainspans.append((revaddr - baseaddr) + size)
1955 1939 addsize(size, deltasize)
1956 1940 if delta == rev - 1:
1957 1941 numprev += 1
1958 1942 if delta == p1:
1959 1943 nump1prev += 1
1960 1944 elif delta == p2:
1961 1945 nump2prev += 1
1962 1946 elif delta == p1:
1963 1947 nump1 += 1
1964 1948 elif delta == p2:
1965 1949 nump2 += 1
1966 1950 elif delta != nullrev:
1967 1951 numother += 1
1968 1952
1969 1953 # Obtain data on the raw chunks in the revlog.
1970 1954 segment = r._getsegmentforrevs(rev, rev)[1]
1971 1955 if segment:
1972 1956 chunktype = bytes(segment[0:1])
1973 1957 else:
1974 1958 chunktype = 'empty'
1975 1959
1976 1960 if chunktype not in chunktypecounts:
1977 1961 chunktypecounts[chunktype] = 0
1978 1962 chunktypesizes[chunktype] = 0
1979 1963
1980 1964 chunktypecounts[chunktype] += 1
1981 1965 chunktypesizes[chunktype] += size
1982 1966
1983 1967 # Adjust size min value for empty cases
1984 1968 for size in (datasize, fullsize, deltasize):
1985 1969 if size[0] is None:
1986 1970 size[0] = 0
1987 1971
1988 1972 numdeltas = numrevs - numfull
1989 1973 numoprev = numprev - nump1prev - nump2prev
1990 1974 totalrawsize = datasize[2]
1991 1975 datasize[2] /= numrevs
1992 1976 fulltotal = fullsize[2]
1993 1977 fullsize[2] /= numfull
1994 1978 deltatotal = deltasize[2]
1995 1979 if numrevs - numfull > 0:
1996 1980 deltasize[2] /= numrevs - numfull
1997 1981 totalsize = fulltotal + deltatotal
1998 1982 avgchainlen = sum(chainlengths) / numrevs
1999 1983 maxchainlen = max(chainlengths)
2000 1984 maxchainspan = max(chainspans)
2001 1985 compratio = 1
2002 1986 if totalsize:
2003 1987 compratio = totalrawsize / totalsize
2004 1988
2005 1989 basedfmtstr = '%%%dd\n'
2006 1990 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2007 1991
2008 1992 def dfmtstr(max):
2009 1993 return basedfmtstr % len(str(max))
2010 1994 def pcfmtstr(max, padding=0):
2011 1995 return basepcfmtstr % (len(str(max)), ' ' * padding)
2012 1996
2013 1997 def pcfmt(value, total):
2014 1998 if total:
2015 1999 return (value, 100 * float(value) / total)
2016 2000 else:
2017 2001 return value, 100.0
2018 2002
2019 2003 ui.write(('format : %d\n') % format)
2020 2004 ui.write(('flags : %s\n') % ', '.join(flags))
2021 2005
2022 2006 ui.write('\n')
2023 2007 fmt = pcfmtstr(totalsize)
2024 2008 fmt2 = dfmtstr(totalsize)
2025 2009 ui.write(('revisions : ') + fmt2 % numrevs)
2026 2010 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2027 2011 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2028 2012 ui.write(('revisions : ') + fmt2 % numrevs)
2029 2013 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2030 2014 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2031 2015 ui.write(('revision size : ') + fmt2 % totalsize)
2032 2016 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2033 2017 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2034 2018
2035 2019 def fmtchunktype(chunktype):
2036 2020 if chunktype == 'empty':
2037 2021 return ' %s : ' % chunktype
2038 2022 elif chunktype in pycompat.bytestr(string.ascii_letters):
2039 2023 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2040 2024 else:
2041 2025 return ' 0x%s : ' % hex(chunktype)
2042 2026
2043 2027 ui.write('\n')
2044 2028 ui.write(('chunks : ') + fmt2 % numrevs)
2045 2029 for chunktype in sorted(chunktypecounts):
2046 2030 ui.write(fmtchunktype(chunktype))
2047 2031 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2048 2032 ui.write(('chunks size : ') + fmt2 % totalsize)
2049 2033 for chunktype in sorted(chunktypecounts):
2050 2034 ui.write(fmtchunktype(chunktype))
2051 2035 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2052 2036
2053 2037 ui.write('\n')
2054 2038 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2055 2039 ui.write(('avg chain length : ') + fmt % avgchainlen)
2056 2040 ui.write(('max chain length : ') + fmt % maxchainlen)
2057 2041 ui.write(('max chain reach : ') + fmt % maxchainspan)
2058 2042 ui.write(('compression ratio : ') + fmt % compratio)
2059 2043
2060 2044 if format > 0:
2061 2045 ui.write('\n')
2062 2046 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2063 2047 % tuple(datasize))
2064 2048 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2065 2049 % tuple(fullsize))
2066 2050 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2067 2051 % tuple(deltasize))
2068 2052
2069 2053 if numdeltas > 0:
2070 2054 ui.write('\n')
2071 2055 fmt = pcfmtstr(numdeltas)
2072 2056 fmt2 = pcfmtstr(numdeltas, 4)
2073 2057 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2074 2058 if numprev > 0:
2075 2059 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2076 2060 numprev))
2077 2061 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2078 2062 numprev))
2079 2063 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2080 2064 numprev))
2081 2065 if gdelta:
2082 2066 ui.write(('deltas against p1 : ')
2083 2067 + fmt % pcfmt(nump1, numdeltas))
2084 2068 ui.write(('deltas against p2 : ')
2085 2069 + fmt % pcfmt(nump2, numdeltas))
2086 2070 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2087 2071 numdeltas))
2088 2072
2089 2073 @command('debugrevspec',
2090 2074 [('', 'optimize', None,
2091 2075 _('print parsed tree after optimizing (DEPRECATED)')),
2092 2076 ('', 'show-revs', True, _('print list of result revisions (default)')),
2093 2077 ('s', 'show-set', None, _('print internal representation of result set')),
2094 2078 ('p', 'show-stage', [],
2095 2079 _('print parsed tree at the given stage'), _('NAME')),
2096 2080 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2097 2081 ('', 'verify-optimized', False, _('verify optimized result')),
2098 2082 ],
2099 2083 ('REVSPEC'))
2100 2084 def debugrevspec(ui, repo, expr, **opts):
2101 2085 """parse and apply a revision specification
2102 2086
2103 2087 Use -p/--show-stage option to print the parsed tree at the given stages.
2104 2088 Use -p all to print tree at every stage.
2105 2089
2106 2090 Use --no-show-revs option with -s or -p to print only the set
2107 2091 representation or the parsed tree respectively.
2108 2092
2109 2093 Use --verify-optimized to compare the optimized result with the unoptimized
2110 2094 one. Returns 1 if the optimized result differs.
2111 2095 """
2112 2096 opts = pycompat.byteskwargs(opts)
2113 2097 aliases = ui.configitems('revsetalias')
2114 2098 stages = [
2115 2099 ('parsed', lambda tree: tree),
2116 2100 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2117 2101 ui.warn)),
2118 2102 ('concatenated', revsetlang.foldconcat),
2119 2103 ('analyzed', revsetlang.analyze),
2120 2104 ('optimized', revsetlang.optimize),
2121 2105 ]
2122 2106 if opts['no_optimized']:
2123 2107 stages = stages[:-1]
2124 2108 if opts['verify_optimized'] and opts['no_optimized']:
2125 2109 raise error.Abort(_('cannot use --verify-optimized with '
2126 2110 '--no-optimized'))
2127 2111 stagenames = set(n for n, f in stages)
2128 2112
2129 2113 showalways = set()
2130 2114 showchanged = set()
2131 2115 if ui.verbose and not opts['show_stage']:
2132 2116 # show parsed tree by --verbose (deprecated)
2133 2117 showalways.add('parsed')
2134 2118 showchanged.update(['expanded', 'concatenated'])
2135 2119 if opts['optimize']:
2136 2120 showalways.add('optimized')
2137 2121 if opts['show_stage'] and opts['optimize']:
2138 2122 raise error.Abort(_('cannot use --optimize with --show-stage'))
2139 2123 if opts['show_stage'] == ['all']:
2140 2124 showalways.update(stagenames)
2141 2125 else:
2142 2126 for n in opts['show_stage']:
2143 2127 if n not in stagenames:
2144 2128 raise error.Abort(_('invalid stage name: %s') % n)
2145 2129 showalways.update(opts['show_stage'])
2146 2130
2147 2131 treebystage = {}
2148 2132 printedtree = None
2149 2133 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2150 2134 for n, f in stages:
2151 2135 treebystage[n] = tree = f(tree)
2152 2136 if n in showalways or (n in showchanged and tree != printedtree):
2153 2137 if opts['show_stage'] or n != 'parsed':
2154 2138 ui.write(("* %s:\n") % n)
2155 2139 ui.write(revsetlang.prettyformat(tree), "\n")
2156 2140 printedtree = tree
2157 2141
2158 2142 if opts['verify_optimized']:
2159 2143 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2160 2144 brevs = revset.makematcher(treebystage['optimized'])(repo)
2161 2145 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2162 2146 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2163 2147 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2164 2148 arevs = list(arevs)
2165 2149 brevs = list(brevs)
2166 2150 if arevs == brevs:
2167 2151 return 0
2168 2152 ui.write(('--- analyzed\n'), label='diff.file_a')
2169 2153 ui.write(('+++ optimized\n'), label='diff.file_b')
2170 2154 sm = difflib.SequenceMatcher(None, arevs, brevs)
2171 2155 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2172 2156 if tag in ('delete', 'replace'):
2173 2157 for c in arevs[alo:ahi]:
2174 2158 ui.write('-%s\n' % c, label='diff.deleted')
2175 2159 if tag in ('insert', 'replace'):
2176 2160 for c in brevs[blo:bhi]:
2177 2161 ui.write('+%s\n' % c, label='diff.inserted')
2178 2162 if tag == 'equal':
2179 2163 for c in arevs[alo:ahi]:
2180 2164 ui.write(' %s\n' % c)
2181 2165 return 1
2182 2166
2183 2167 func = revset.makematcher(tree)
2184 2168 revs = func(repo)
2185 2169 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2186 2170 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2187 2171 if not opts['show_revs']:
2188 2172 return
2189 2173 for c in revs:
2190 2174 ui.write("%s\n" % c)
2191 2175
2192 2176 @command('debugsetparents', [], _('REV1 [REV2]'))
2193 2177 def debugsetparents(ui, repo, rev1, rev2=None):
2194 2178 """manually set the parents of the current working directory
2195 2179
2196 2180 This is useful for writing repository conversion tools, but should
2197 2181 be used with care. For example, neither the working directory nor the
2198 2182 dirstate is updated, so file status may be incorrect after running this
2199 2183 command.
2200 2184
2201 2185 Returns 0 on success.
2202 2186 """
2203 2187
2204 2188 r1 = scmutil.revsingle(repo, rev1).node()
2205 2189 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2206 2190
2207 2191 with repo.wlock():
2208 2192 repo.setparents(r1, r2)
2209 2193
2210 2194 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2211 2195 def debugssl(ui, repo, source=None, **opts):
2212 2196 '''test a secure connection to a server
2213 2197
2214 2198 This builds the certificate chain for the server on Windows, installing the
2215 2199 missing intermediates and trusted root via Windows Update if necessary. It
2216 2200 does nothing on other platforms.
2217 2201
2218 2202 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2219 2203 that server is used. See :hg:`help urls` for more information.
2220 2204
2221 2205 If the update succeeds, retry the original operation. Otherwise, the cause
2222 2206 of the SSL error is likely another issue.
2223 2207 '''
2224 2208 if not pycompat.iswindows:
2225 2209 raise error.Abort(_('certificate chain building is only possible on '
2226 2210 'Windows'))
2227 2211
2228 2212 if not source:
2229 2213 if not repo:
2230 2214 raise error.Abort(_("there is no Mercurial repository here, and no "
2231 2215 "server specified"))
2232 2216 source = "default"
2233 2217
2234 2218 source, branches = hg.parseurl(ui.expandpath(source))
2235 2219 url = util.url(source)
2236 2220 addr = None
2237 2221
2238 2222 if url.scheme == 'https':
2239 2223 addr = (url.host, url.port or 443)
2240 2224 elif url.scheme == 'ssh':
2241 2225 addr = (url.host, url.port or 22)
2242 2226 else:
2243 2227 raise error.Abort(_("only https and ssh connections are supported"))
2244 2228
2245 2229 from . import win32
2246 2230
2247 2231 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2248 2232 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2249 2233
2250 2234 try:
2251 2235 s.connect(addr)
2252 2236 cert = s.getpeercert(True)
2253 2237
2254 2238 ui.status(_('checking the certificate chain for %s\n') % url.host)
2255 2239
2256 2240 complete = win32.checkcertificatechain(cert, build=False)
2257 2241
2258 2242 if not complete:
2259 2243 ui.status(_('certificate chain is incomplete, updating... '))
2260 2244
2261 2245 if not win32.checkcertificatechain(cert):
2262 2246 ui.status(_('failed.\n'))
2263 2247 else:
2264 2248 ui.status(_('done.\n'))
2265 2249 else:
2266 2250 ui.status(_('full certificate chain is available\n'))
2267 2251 finally:
2268 2252 s.close()
2269 2253
2270 2254 @command('debugsub',
2271 2255 [('r', 'rev', '',
2272 2256 _('revision to check'), _('REV'))],
2273 2257 _('[-r REV] [REV]'))
2274 2258 def debugsub(ui, repo, rev=None):
2275 2259 ctx = scmutil.revsingle(repo, rev, None)
2276 2260 for k, v in sorted(ctx.substate.items()):
2277 2261 ui.write(('path %s\n') % k)
2278 2262 ui.write((' source %s\n') % v[0])
2279 2263 ui.write((' revision %s\n') % v[1])
2280 2264
2281 2265 @command('debugsuccessorssets',
2282 2266 [('', 'closest', False, _('return closest successors sets only'))],
2283 2267 _('[REV]'))
2284 2268 def debugsuccessorssets(ui, repo, *revs, **opts):
2285 2269 """show set of successors for revision
2286 2270
2287 2271 A successors set of changeset A is a consistent group of revisions that
2288 2272 succeed A. It contains non-obsolete changesets only unless closests
2289 2273 successors set is set.
2290 2274
2291 2275 In most cases a changeset A has a single successors set containing a single
2292 2276 successor (changeset A replaced by A').
2293 2277
2294 2278 A changeset that is made obsolete with no successors are called "pruned".
2295 2279 Such changesets have no successors sets at all.
2296 2280
2297 2281 A changeset that has been "split" will have a successors set containing
2298 2282 more than one successor.
2299 2283
2300 2284 A changeset that has been rewritten in multiple different ways is called
2301 2285 "divergent". Such changesets have multiple successor sets (each of which
2302 2286 may also be split, i.e. have multiple successors).
2303 2287
2304 2288 Results are displayed as follows::
2305 2289
2306 2290 <rev1>
2307 2291 <successors-1A>
2308 2292 <rev2>
2309 2293 <successors-2A>
2310 2294 <successors-2B1> <successors-2B2> <successors-2B3>
2311 2295
2312 2296 Here rev2 has two possible (i.e. divergent) successors sets. The first
2313 2297 holds one element, whereas the second holds three (i.e. the changeset has
2314 2298 been split).
2315 2299 """
2316 2300 # passed to successorssets caching computation from one call to another
2317 2301 cache = {}
2318 2302 ctx2str = str
2319 2303 node2str = short
2320 2304 if ui.debug():
2321 2305 def ctx2str(ctx):
2322 2306 return ctx.hex()
2323 2307 node2str = hex
2324 2308 for rev in scmutil.revrange(repo, revs):
2325 2309 ctx = repo[rev]
2326 2310 ui.write('%s\n'% ctx2str(ctx))
2327 2311 for succsset in obsutil.successorssets(repo, ctx.node(),
2328 2312 closest=opts[r'closest'],
2329 2313 cache=cache):
2330 2314 if succsset:
2331 2315 ui.write(' ')
2332 2316 ui.write(node2str(succsset[0]))
2333 2317 for node in succsset[1:]:
2334 2318 ui.write(' ')
2335 2319 ui.write(node2str(node))
2336 2320 ui.write('\n')
2337 2321
2338 2322 @command('debugtemplate',
2339 2323 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2340 2324 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2341 2325 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2342 2326 optionalrepo=True)
2343 2327 def debugtemplate(ui, repo, tmpl, **opts):
2344 2328 """parse and apply a template
2345 2329
2346 2330 If -r/--rev is given, the template is processed as a log template and
2347 2331 applied to the given changesets. Otherwise, it is processed as a generic
2348 2332 template.
2349 2333
2350 2334 Use --verbose to print the parsed tree.
2351 2335 """
2352 2336 revs = None
2353 2337 if opts[r'rev']:
2354 2338 if repo is None:
2355 2339 raise error.RepoError(_('there is no Mercurial repository here '
2356 2340 '(.hg not found)'))
2357 2341 revs = scmutil.revrange(repo, opts[r'rev'])
2358 2342
2359 2343 props = {}
2360 2344 for d in opts[r'define']:
2361 2345 try:
2362 2346 k, v = (e.strip() for e in d.split('=', 1))
2363 2347 if not k or k == 'ui':
2364 2348 raise ValueError
2365 2349 props[k] = v
2366 2350 except ValueError:
2367 2351 raise error.Abort(_('malformed keyword definition: %s') % d)
2368 2352
2369 2353 if ui.verbose:
2370 2354 aliases = ui.configitems('templatealias')
2371 2355 tree = templater.parse(tmpl)
2372 2356 ui.note(templater.prettyformat(tree), '\n')
2373 2357 newtree = templater.expandaliases(tree, aliases)
2374 2358 if newtree != tree:
2375 2359 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2376 2360
2377 2361 if revs is None:
2378 2362 t = formatter.maketemplater(ui, tmpl)
2379 2363 props['ui'] = ui
2380 2364 ui.write(t.render(props))
2381 2365 else:
2382 2366 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2383 2367 for r in revs:
2384 2368 displayer.show(repo[r], **pycompat.strkwargs(props))
2385 2369 displayer.close()
2386 2370
2387 2371 @command('debugupdatecaches', [])
2388 2372 def debugupdatecaches(ui, repo, *pats, **opts):
2389 2373 """warm all known caches in the repository"""
2390 2374 with repo.wlock(), repo.lock():
2391 2375 repo.updatecaches()
2392 2376
2393 2377 @command('debugupgraderepo', [
2394 2378 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2395 2379 ('', 'run', False, _('performs an upgrade')),
2396 2380 ])
2397 2381 def debugupgraderepo(ui, repo, run=False, optimize=None):
2398 2382 """upgrade a repository to use different features
2399 2383
2400 2384 If no arguments are specified, the repository is evaluated for upgrade
2401 2385 and a list of problems and potential optimizations is printed.
2402 2386
2403 2387 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2404 2388 can be influenced via additional arguments. More details will be provided
2405 2389 by the command output when run without ``--run``.
2406 2390
2407 2391 During the upgrade, the repository will be locked and no writes will be
2408 2392 allowed.
2409 2393
2410 2394 At the end of the upgrade, the repository may not be readable while new
2411 2395 repository data is swapped in. This window will be as long as it takes to
2412 2396 rename some directories inside the ``.hg`` directory. On most machines, this
2413 2397 should complete almost instantaneously and the chances of a consumer being
2414 2398 unable to access the repository should be low.
2415 2399 """
2416 2400 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2417 2401
2418 2402 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2419 2403 inferrepo=True)
2420 2404 def debugwalk(ui, repo, *pats, **opts):
2421 2405 """show how files match on given patterns"""
2422 2406 opts = pycompat.byteskwargs(opts)
2423 2407 m = scmutil.match(repo[None], pats, opts)
2424 2408 ui.write(('matcher: %r\n' % m))
2425 2409 items = list(repo[None].walk(m))
2426 2410 if not items:
2427 2411 return
2428 2412 f = lambda fn: fn
2429 2413 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2430 2414 f = lambda fn: util.normpath(fn)
2431 2415 fmt = 'f %%-%ds %%-%ds %%s' % (
2432 2416 max([len(abs) for abs in items]),
2433 2417 max([len(m.rel(abs)) for abs in items]))
2434 2418 for abs in items:
2435 2419 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2436 2420 ui.write("%s\n" % line.rstrip())
2437 2421
2438 2422 @command('debugwireargs',
2439 2423 [('', 'three', '', 'three'),
2440 2424 ('', 'four', '', 'four'),
2441 2425 ('', 'five', '', 'five'),
2442 2426 ] + cmdutil.remoteopts,
2443 2427 _('REPO [OPTIONS]... [ONE [TWO]]'),
2444 2428 norepo=True)
2445 2429 def debugwireargs(ui, repopath, *vals, **opts):
2446 2430 opts = pycompat.byteskwargs(opts)
2447 2431 repo = hg.peer(ui, opts, repopath)
2448 2432 for opt in cmdutil.remoteopts:
2449 2433 del opts[opt[1]]
2450 2434 args = {}
2451 2435 for k, v in opts.iteritems():
2452 2436 if v:
2453 2437 args[k] = v
2454 2438 args = pycompat.strkwargs(args)
2455 2439 # run twice to check that we don't mess up the stream for the next command
2456 2440 res1 = repo.debugwireargs(*vals, **args)
2457 2441 res2 = repo.debugwireargs(*vals, **args)
2458 2442 ui.write("%s\n" % res1)
2459 2443 if res1 != res2:
2460 2444 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now