##// END OF EJS Templates
debugfs: display the tested path and mount point of the filesystem, if known...
Matt Harbison -
r35532:58803186 default
parent child Browse files
Show More
@@ -1,2446 +1,2448 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import socket
18 18 import ssl
19 19 import string
20 20 import sys
21 21 import tempfile
22 22 import time
23 23
24 24 from .i18n import _
25 25 from .node import (
26 26 bin,
27 27 hex,
28 28 nullhex,
29 29 nullid,
30 30 nullrev,
31 31 short,
32 32 )
33 33 from . import (
34 34 bundle2,
35 35 changegroup,
36 36 cmdutil,
37 37 color,
38 38 context,
39 39 dagparser,
40 40 dagutil,
41 41 encoding,
42 42 error,
43 43 exchange,
44 44 extensions,
45 45 filemerge,
46 46 fileset,
47 47 formatter,
48 48 hg,
49 49 localrepo,
50 50 lock as lockmod,
51 51 merge as mergemod,
52 52 obsolete,
53 53 obsutil,
54 54 phases,
55 55 policy,
56 56 pvec,
57 57 pycompat,
58 58 registrar,
59 59 repair,
60 60 revlog,
61 61 revset,
62 62 revsetlang,
63 63 scmutil,
64 64 setdiscovery,
65 65 simplemerge,
66 66 smartset,
67 67 sslutil,
68 68 streamclone,
69 69 templater,
70 70 treediscovery,
71 71 upgrade,
72 72 util,
73 73 vfs as vfsmod,
74 74 )
75 75
76 76 release = lockmod.release
77 77
78 78 command = registrar.command()
79 79
80 80 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
81 81 def debugancestor(ui, repo, *args):
82 82 """find the ancestor revision of two revisions in a given index"""
83 83 if len(args) == 3:
84 84 index, rev1, rev2 = args
85 85 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
86 86 lookup = r.lookup
87 87 elif len(args) == 2:
88 88 if not repo:
89 89 raise error.Abort(_('there is no Mercurial repository here '
90 90 '(.hg not found)'))
91 91 rev1, rev2 = args
92 92 r = repo.changelog
93 93 lookup = repo.lookup
94 94 else:
95 95 raise error.Abort(_('either two or three arguments required'))
96 96 a = r.ancestor(lookup(rev1), lookup(rev2))
97 97 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
98 98
99 99 @command('debugapplystreamclonebundle', [], 'FILE')
100 100 def debugapplystreamclonebundle(ui, repo, fname):
101 101 """apply a stream clone bundle file"""
102 102 f = hg.openpath(ui, fname)
103 103 gen = exchange.readbundle(ui, f, fname)
104 104 gen.apply(repo)
105 105
106 106 @command('debugbuilddag',
107 107 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
108 108 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
109 109 ('n', 'new-file', None, _('add new file at each rev'))],
110 110 _('[OPTION]... [TEXT]'))
111 111 def debugbuilddag(ui, repo, text=None,
112 112 mergeable_file=False,
113 113 overwritten_file=False,
114 114 new_file=False):
115 115 """builds a repo with a given DAG from scratch in the current empty repo
116 116
117 117 The description of the DAG is read from stdin if not given on the
118 118 command line.
119 119
120 120 Elements:
121 121
122 122 - "+n" is a linear run of n nodes based on the current default parent
123 123 - "." is a single node based on the current default parent
124 124 - "$" resets the default parent to null (implied at the start);
125 125 otherwise the default parent is always the last node created
126 126 - "<p" sets the default parent to the backref p
127 127 - "*p" is a fork at parent p, which is a backref
128 128 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
129 129 - "/p2" is a merge of the preceding node and p2
130 130 - ":tag" defines a local tag for the preceding node
131 131 - "@branch" sets the named branch for subsequent nodes
132 132 - "#...\\n" is a comment up to the end of the line
133 133
134 134 Whitespace between the above elements is ignored.
135 135
136 136 A backref is either
137 137
138 138 - a number n, which references the node curr-n, where curr is the current
139 139 node, or
140 140 - the name of a local tag you placed earlier using ":tag", or
141 141 - empty to denote the default parent.
142 142
143 143 All string valued-elements are either strictly alphanumeric, or must
144 144 be enclosed in double quotes ("..."), with "\\" as escape character.
145 145 """
146 146
147 147 if text is None:
148 148 ui.status(_("reading DAG from stdin\n"))
149 149 text = ui.fin.read()
150 150
151 151 cl = repo.changelog
152 152 if len(cl) > 0:
153 153 raise error.Abort(_('repository is not empty'))
154 154
155 155 # determine number of revs in DAG
156 156 total = 0
157 157 for type, data in dagparser.parsedag(text):
158 158 if type == 'n':
159 159 total += 1
160 160
161 161 if mergeable_file:
162 162 linesperrev = 2
163 163 # make a file with k lines per rev
164 164 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
165 165 initialmergedlines.append("")
166 166
167 167 tags = []
168 168
169 169 wlock = lock = tr = None
170 170 try:
171 171 wlock = repo.wlock()
172 172 lock = repo.lock()
173 173 tr = repo.transaction("builddag")
174 174
175 175 at = -1
176 176 atbranch = 'default'
177 177 nodeids = []
178 178 id = 0
179 179 ui.progress(_('building'), id, unit=_('revisions'), total=total)
180 180 for type, data in dagparser.parsedag(text):
181 181 if type == 'n':
182 182 ui.note(('node %s\n' % str(data)))
183 183 id, ps = data
184 184
185 185 files = []
186 186 filecontent = {}
187 187
188 188 p2 = None
189 189 if mergeable_file:
190 190 fn = "mf"
191 191 p1 = repo[ps[0]]
192 192 if len(ps) > 1:
193 193 p2 = repo[ps[1]]
194 194 pa = p1.ancestor(p2)
195 195 base, local, other = [x[fn].data() for x in (pa, p1,
196 196 p2)]
197 197 m3 = simplemerge.Merge3Text(base, local, other)
198 198 ml = [l.strip() for l in m3.merge_lines()]
199 199 ml.append("")
200 200 elif at > 0:
201 201 ml = p1[fn].data().split("\n")
202 202 else:
203 203 ml = initialmergedlines
204 204 ml[id * linesperrev] += " r%i" % id
205 205 mergedtext = "\n".join(ml)
206 206 files.append(fn)
207 207 filecontent[fn] = mergedtext
208 208
209 209 if overwritten_file:
210 210 fn = "of"
211 211 files.append(fn)
212 212 filecontent[fn] = "r%i\n" % id
213 213
214 214 if new_file:
215 215 fn = "nf%i" % id
216 216 files.append(fn)
217 217 filecontent[fn] = "r%i\n" % id
218 218 if len(ps) > 1:
219 219 if not p2:
220 220 p2 = repo[ps[1]]
221 221 for fn in p2:
222 222 if fn.startswith("nf"):
223 223 files.append(fn)
224 224 filecontent[fn] = p2[fn].data()
225 225
226 226 def fctxfn(repo, cx, path):
227 227 if path in filecontent:
228 228 return context.memfilectx(repo, cx, path,
229 229 filecontent[path])
230 230 return None
231 231
232 232 if len(ps) == 0 or ps[0] < 0:
233 233 pars = [None, None]
234 234 elif len(ps) == 1:
235 235 pars = [nodeids[ps[0]], None]
236 236 else:
237 237 pars = [nodeids[p] for p in ps]
238 238 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
239 239 date=(id, 0),
240 240 user="debugbuilddag",
241 241 extra={'branch': atbranch})
242 242 nodeid = repo.commitctx(cx)
243 243 nodeids.append(nodeid)
244 244 at = id
245 245 elif type == 'l':
246 246 id, name = data
247 247 ui.note(('tag %s\n' % name))
248 248 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
249 249 elif type == 'a':
250 250 ui.note(('branch %s\n' % data))
251 251 atbranch = data
252 252 ui.progress(_('building'), id, unit=_('revisions'), total=total)
253 253 tr.close()
254 254
255 255 if tags:
256 256 repo.vfs.write("localtags", "".join(tags))
257 257 finally:
258 258 ui.progress(_('building'), None)
259 259 release(tr, lock, wlock)
260 260
261 261 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
262 262 indent_string = ' ' * indent
263 263 if all:
264 264 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
265 265 % indent_string)
266 266
267 267 def showchunks(named):
268 268 ui.write("\n%s%s\n" % (indent_string, named))
269 269 for deltadata in gen.deltaiter():
270 270 node, p1, p2, cs, deltabase, delta, flags = deltadata
271 271 ui.write("%s%s %s %s %s %s %s\n" %
272 272 (indent_string, hex(node), hex(p1), hex(p2),
273 273 hex(cs), hex(deltabase), len(delta)))
274 274
275 275 chunkdata = gen.changelogheader()
276 276 showchunks("changelog")
277 277 chunkdata = gen.manifestheader()
278 278 showchunks("manifest")
279 279 for chunkdata in iter(gen.filelogheader, {}):
280 280 fname = chunkdata['filename']
281 281 showchunks(fname)
282 282 else:
283 283 if isinstance(gen, bundle2.unbundle20):
284 284 raise error.Abort(_('use debugbundle2 for this file'))
285 285 chunkdata = gen.changelogheader()
286 286 for deltadata in gen.deltaiter():
287 287 node, p1, p2, cs, deltabase, delta, flags = deltadata
288 288 ui.write("%s%s\n" % (indent_string, hex(node)))
289 289
290 290 def _debugobsmarkers(ui, part, indent=0, **opts):
291 291 """display version and markers contained in 'data'"""
292 292 opts = pycompat.byteskwargs(opts)
293 293 data = part.read()
294 294 indent_string = ' ' * indent
295 295 try:
296 296 version, markers = obsolete._readmarkers(data)
297 297 except error.UnknownVersion as exc:
298 298 msg = "%sunsupported version: %s (%d bytes)\n"
299 299 msg %= indent_string, exc.version, len(data)
300 300 ui.write(msg)
301 301 else:
302 302 msg = "%sversion: %d (%d bytes)\n"
303 303 msg %= indent_string, version, len(data)
304 304 ui.write(msg)
305 305 fm = ui.formatter('debugobsolete', opts)
306 306 for rawmarker in sorted(markers):
307 307 m = obsutil.marker(None, rawmarker)
308 308 fm.startitem()
309 309 fm.plain(indent_string)
310 310 cmdutil.showmarker(fm, m)
311 311 fm.end()
312 312
313 313 def _debugphaseheads(ui, data, indent=0):
314 314 """display version and markers contained in 'data'"""
315 315 indent_string = ' ' * indent
316 316 headsbyphase = phases.binarydecode(data)
317 317 for phase in phases.allphases:
318 318 for head in headsbyphase[phase]:
319 319 ui.write(indent_string)
320 320 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
321 321
322 322 def _quasirepr(thing):
323 323 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
324 324 return '{%s}' % (
325 325 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
326 326 return pycompat.bytestr(repr(thing))
327 327
328 328 def _debugbundle2(ui, gen, all=None, **opts):
329 329 """lists the contents of a bundle2"""
330 330 if not isinstance(gen, bundle2.unbundle20):
331 331 raise error.Abort(_('not a bundle2 file'))
332 332 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
333 333 parttypes = opts.get(r'part_type', [])
334 334 for part in gen.iterparts():
335 335 if parttypes and part.type not in parttypes:
336 336 continue
337 337 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
338 338 if part.type == 'changegroup':
339 339 version = part.params.get('version', '01')
340 340 cg = changegroup.getunbundler(version, part, 'UN')
341 341 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
342 342 if part.type == 'obsmarkers':
343 343 _debugobsmarkers(ui, part, indent=4, **opts)
344 344 if part.type == 'phase-heads':
345 345 _debugphaseheads(ui, part, indent=4)
346 346
347 347 @command('debugbundle',
348 348 [('a', 'all', None, _('show all details')),
349 349 ('', 'part-type', [], _('show only the named part type')),
350 350 ('', 'spec', None, _('print the bundlespec of the bundle'))],
351 351 _('FILE'),
352 352 norepo=True)
353 353 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
354 354 """lists the contents of a bundle"""
355 355 with hg.openpath(ui, bundlepath) as f:
356 356 if spec:
357 357 spec = exchange.getbundlespec(ui, f)
358 358 ui.write('%s\n' % spec)
359 359 return
360 360
361 361 gen = exchange.readbundle(ui, f, bundlepath)
362 362 if isinstance(gen, bundle2.unbundle20):
363 363 return _debugbundle2(ui, gen, all=all, **opts)
364 364 _debugchangegroup(ui, gen, all=all, **opts)
365 365
366 366 @command('debugcapabilities',
367 367 [], _('PATH'),
368 368 norepo=True)
369 369 def debugcapabilities(ui, path, **opts):
370 370 """lists the capabilities of a remote peer"""
371 371 opts = pycompat.byteskwargs(opts)
372 372 peer = hg.peer(ui, opts, path)
373 373 caps = peer.capabilities()
374 374 ui.write(('Main capabilities:\n'))
375 375 for c in sorted(caps):
376 376 ui.write((' %s\n') % c)
377 377 b2caps = bundle2.bundle2caps(peer)
378 378 if b2caps:
379 379 ui.write(('Bundle2 capabilities:\n'))
380 380 for key, values in sorted(b2caps.iteritems()):
381 381 ui.write((' %s\n') % key)
382 382 for v in values:
383 383 ui.write((' %s\n') % v)
384 384
385 385 @command('debugcheckstate', [], '')
386 386 def debugcheckstate(ui, repo):
387 387 """validate the correctness of the current dirstate"""
388 388 parent1, parent2 = repo.dirstate.parents()
389 389 m1 = repo[parent1].manifest()
390 390 m2 = repo[parent2].manifest()
391 391 errors = 0
392 392 for f in repo.dirstate:
393 393 state = repo.dirstate[f]
394 394 if state in "nr" and f not in m1:
395 395 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
396 396 errors += 1
397 397 if state in "a" and f in m1:
398 398 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
399 399 errors += 1
400 400 if state in "m" and f not in m1 and f not in m2:
401 401 ui.warn(_("%s in state %s, but not in either manifest\n") %
402 402 (f, state))
403 403 errors += 1
404 404 for f in m1:
405 405 state = repo.dirstate[f]
406 406 if state not in "nrm":
407 407 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
408 408 errors += 1
409 409 if errors:
410 410 error = _(".hg/dirstate inconsistent with current parent's manifest")
411 411 raise error.Abort(error)
412 412
413 413 @command('debugcolor',
414 414 [('', 'style', None, _('show all configured styles'))],
415 415 'hg debugcolor')
416 416 def debugcolor(ui, repo, **opts):
417 417 """show available color, effects or style"""
418 418 ui.write(('color mode: %s\n') % ui._colormode)
419 419 if opts.get(r'style'):
420 420 return _debugdisplaystyle(ui)
421 421 else:
422 422 return _debugdisplaycolor(ui)
423 423
424 424 def _debugdisplaycolor(ui):
425 425 ui = ui.copy()
426 426 ui._styles.clear()
427 427 for effect in color._activeeffects(ui).keys():
428 428 ui._styles[effect] = effect
429 429 if ui._terminfoparams:
430 430 for k, v in ui.configitems('color'):
431 431 if k.startswith('color.'):
432 432 ui._styles[k] = k[6:]
433 433 elif k.startswith('terminfo.'):
434 434 ui._styles[k] = k[9:]
435 435 ui.write(_('available colors:\n'))
436 436 # sort label with a '_' after the other to group '_background' entry.
437 437 items = sorted(ui._styles.items(),
438 438 key=lambda i: ('_' in i[0], i[0], i[1]))
439 439 for colorname, label in items:
440 440 ui.write(('%s\n') % colorname, label=label)
441 441
442 442 def _debugdisplaystyle(ui):
443 443 ui.write(_('available style:\n'))
444 444 width = max(len(s) for s in ui._styles)
445 445 for label, effects in sorted(ui._styles.items()):
446 446 ui.write('%s' % label, label=label)
447 447 if effects:
448 448 # 50
449 449 ui.write(': ')
450 450 ui.write(' ' * (max(0, width - len(label))))
451 451 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
452 452 ui.write('\n')
453 453
454 454 @command('debugcreatestreamclonebundle', [], 'FILE')
455 455 def debugcreatestreamclonebundle(ui, repo, fname):
456 456 """create a stream clone bundle file
457 457
458 458 Stream bundles are special bundles that are essentially archives of
459 459 revlog files. They are commonly used for cloning very quickly.
460 460 """
461 461 # TODO we may want to turn this into an abort when this functionality
462 462 # is moved into `hg bundle`.
463 463 if phases.hassecret(repo):
464 464 ui.warn(_('(warning: stream clone bundle will contain secret '
465 465 'revisions)\n'))
466 466
467 467 requirements, gen = streamclone.generatebundlev1(repo)
468 468 changegroup.writechunks(ui, gen, fname)
469 469
470 470 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
471 471
472 472 @command('debugdag',
473 473 [('t', 'tags', None, _('use tags as labels')),
474 474 ('b', 'branches', None, _('annotate with branch names')),
475 475 ('', 'dots', None, _('use dots for runs')),
476 476 ('s', 'spaces', None, _('separate elements by spaces'))],
477 477 _('[OPTION]... [FILE [REV]...]'),
478 478 optionalrepo=True)
479 479 def debugdag(ui, repo, file_=None, *revs, **opts):
480 480 """format the changelog or an index DAG as a concise textual description
481 481
482 482 If you pass a revlog index, the revlog's DAG is emitted. If you list
483 483 revision numbers, they get labeled in the output as rN.
484 484
485 485 Otherwise, the changelog DAG of the current repo is emitted.
486 486 """
487 487 spaces = opts.get(r'spaces')
488 488 dots = opts.get(r'dots')
489 489 if file_:
490 490 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
491 491 file_)
492 492 revs = set((int(r) for r in revs))
493 493 def events():
494 494 for r in rlog:
495 495 yield 'n', (r, list(p for p in rlog.parentrevs(r)
496 496 if p != -1))
497 497 if r in revs:
498 498 yield 'l', (r, "r%i" % r)
499 499 elif repo:
500 500 cl = repo.changelog
501 501 tags = opts.get(r'tags')
502 502 branches = opts.get(r'branches')
503 503 if tags:
504 504 labels = {}
505 505 for l, n in repo.tags().items():
506 506 labels.setdefault(cl.rev(n), []).append(l)
507 507 def events():
508 508 b = "default"
509 509 for r in cl:
510 510 if branches:
511 511 newb = cl.read(cl.node(r))[5]['branch']
512 512 if newb != b:
513 513 yield 'a', newb
514 514 b = newb
515 515 yield 'n', (r, list(p for p in cl.parentrevs(r)
516 516 if p != -1))
517 517 if tags:
518 518 ls = labels.get(r)
519 519 if ls:
520 520 for l in ls:
521 521 yield 'l', (r, l)
522 522 else:
523 523 raise error.Abort(_('need repo for changelog dag'))
524 524
525 525 for line in dagparser.dagtextlines(events(),
526 526 addspaces=spaces,
527 527 wraplabels=True,
528 528 wrapannotations=True,
529 529 wrapnonlinear=dots,
530 530 usedots=dots,
531 531 maxlinewidth=70):
532 532 ui.write(line)
533 533 ui.write("\n")
534 534
535 535 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
536 536 def debugdata(ui, repo, file_, rev=None, **opts):
537 537 """dump the contents of a data file revision"""
538 538 opts = pycompat.byteskwargs(opts)
539 539 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
540 540 if rev is not None:
541 541 raise error.CommandError('debugdata', _('invalid arguments'))
542 542 file_, rev = None, file_
543 543 elif rev is None:
544 544 raise error.CommandError('debugdata', _('invalid arguments'))
545 545 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
546 546 try:
547 547 ui.write(r.revision(r.lookup(rev), raw=True))
548 548 except KeyError:
549 549 raise error.Abort(_('invalid revision identifier %s') % rev)
550 550
551 551 @command('debugdate',
552 552 [('e', 'extended', None, _('try extended date formats'))],
553 553 _('[-e] DATE [RANGE]'),
554 554 norepo=True, optionalrepo=True)
555 555 def debugdate(ui, date, range=None, **opts):
556 556 """parse and display a date"""
557 557 if opts[r"extended"]:
558 558 d = util.parsedate(date, util.extendeddateformats)
559 559 else:
560 560 d = util.parsedate(date)
561 561 ui.write(("internal: %s %s\n") % d)
562 562 ui.write(("standard: %s\n") % util.datestr(d))
563 563 if range:
564 564 m = util.matchdate(range)
565 565 ui.write(("match: %s\n") % m(d[0]))
566 566
567 567 @command('debugdeltachain',
568 568 cmdutil.debugrevlogopts + cmdutil.formatteropts,
569 569 _('-c|-m|FILE'),
570 570 optionalrepo=True)
571 571 def debugdeltachain(ui, repo, file_=None, **opts):
572 572 """dump information about delta chains in a revlog
573 573
574 574 Output can be templatized. Available template keywords are:
575 575
576 576 :``rev``: revision number
577 577 :``chainid``: delta chain identifier (numbered by unique base)
578 578 :``chainlen``: delta chain length to this revision
579 579 :``prevrev``: previous revision in delta chain
580 580 :``deltatype``: role of delta / how it was computed
581 581 :``compsize``: compressed size of revision
582 582 :``uncompsize``: uncompressed size of revision
583 583 :``chainsize``: total size of compressed revisions in chain
584 584 :``chainratio``: total chain size divided by uncompressed revision size
585 585 (new delta chains typically start at ratio 2.00)
586 586 :``lindist``: linear distance from base revision in delta chain to end
587 587 of this revision
588 588 :``extradist``: total size of revisions not part of this delta chain from
589 589 base of delta chain to end of this revision; a measurement
590 590 of how much extra data we need to read/seek across to read
591 591 the delta chain for this revision
592 592 :``extraratio``: extradist divided by chainsize; another representation of
593 593 how much unrelated data is needed to load this delta chain
594 594
595 595 If the repository is configured to use the sparse read, additional keywords
596 596 are available:
597 597
598 598 :``readsize``: total size of data read from the disk for a revision
599 599 (sum of the sizes of all the blocks)
600 600 :``largestblock``: size of the largest block of data read from the disk
601 601 :``readdensity``: density of useful bytes in the data read from the disk
602 602
603 603 The sparse read can be enabled with experimental.sparse-read = True
604 604 """
605 605 opts = pycompat.byteskwargs(opts)
606 606 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
607 607 index = r.index
608 608 generaldelta = r.version & revlog.FLAG_GENERALDELTA
609 609 withsparseread = getattr(r, '_withsparseread', False)
610 610
611 611 def revinfo(rev):
612 612 e = index[rev]
613 613 compsize = e[1]
614 614 uncompsize = e[2]
615 615 chainsize = 0
616 616
617 617 if generaldelta:
618 618 if e[3] == e[5]:
619 619 deltatype = 'p1'
620 620 elif e[3] == e[6]:
621 621 deltatype = 'p2'
622 622 elif e[3] == rev - 1:
623 623 deltatype = 'prev'
624 624 elif e[3] == rev:
625 625 deltatype = 'base'
626 626 else:
627 627 deltatype = 'other'
628 628 else:
629 629 if e[3] == rev:
630 630 deltatype = 'base'
631 631 else:
632 632 deltatype = 'prev'
633 633
634 634 chain = r._deltachain(rev)[0]
635 635 for iterrev in chain:
636 636 e = index[iterrev]
637 637 chainsize += e[1]
638 638
639 639 return compsize, uncompsize, deltatype, chain, chainsize
640 640
641 641 fm = ui.formatter('debugdeltachain', opts)
642 642
643 643 fm.plain(' rev chain# chainlen prev delta '
644 644 'size rawsize chainsize ratio lindist extradist '
645 645 'extraratio')
646 646 if withsparseread:
647 647 fm.plain(' readsize largestblk rddensity')
648 648 fm.plain('\n')
649 649
650 650 chainbases = {}
651 651 for rev in r:
652 652 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
653 653 chainbase = chain[0]
654 654 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
655 655 start = r.start
656 656 length = r.length
657 657 basestart = start(chainbase)
658 658 revstart = start(rev)
659 659 lineardist = revstart + comp - basestart
660 660 extradist = lineardist - chainsize
661 661 try:
662 662 prevrev = chain[-2]
663 663 except IndexError:
664 664 prevrev = -1
665 665
666 666 chainratio = float(chainsize) / float(uncomp)
667 667 extraratio = float(extradist) / float(chainsize)
668 668
669 669 fm.startitem()
670 670 fm.write('rev chainid chainlen prevrev deltatype compsize '
671 671 'uncompsize chainsize chainratio lindist extradist '
672 672 'extraratio',
673 673 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
674 674 rev, chainid, len(chain), prevrev, deltatype, comp,
675 675 uncomp, chainsize, chainratio, lineardist, extradist,
676 676 extraratio,
677 677 rev=rev, chainid=chainid, chainlen=len(chain),
678 678 prevrev=prevrev, deltatype=deltatype, compsize=comp,
679 679 uncompsize=uncomp, chainsize=chainsize,
680 680 chainratio=chainratio, lindist=lineardist,
681 681 extradist=extradist, extraratio=extraratio)
682 682 if withsparseread:
683 683 readsize = 0
684 684 largestblock = 0
685 685 for revschunk in revlog._slicechunk(r, chain):
686 686 blkend = start(revschunk[-1]) + length(revschunk[-1])
687 687 blksize = blkend - start(revschunk[0])
688 688
689 689 readsize += blksize
690 690 if largestblock < blksize:
691 691 largestblock = blksize
692 692
693 693 readdensity = float(chainsize) / float(readsize)
694 694
695 695 fm.write('readsize largestblock readdensity',
696 696 ' %10d %10d %9.5f',
697 697 readsize, largestblock, readdensity,
698 698 readsize=readsize, largestblock=largestblock,
699 699 readdensity=readdensity)
700 700
701 701 fm.plain('\n')
702 702
703 703 fm.end()
704 704
705 705 @command('debugdirstate|debugstate',
706 706 [('', 'nodates', None, _('do not display the saved mtime')),
707 707 ('', 'datesort', None, _('sort by saved mtime'))],
708 708 _('[OPTION]...'))
709 709 def debugstate(ui, repo, **opts):
710 710 """show the contents of the current dirstate"""
711 711
712 712 nodates = opts.get(r'nodates')
713 713 datesort = opts.get(r'datesort')
714 714
715 715 timestr = ""
716 716 if datesort:
717 717 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
718 718 else:
719 719 keyfunc = None # sort by filename
720 720 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
721 721 if ent[3] == -1:
722 722 timestr = 'unset '
723 723 elif nodates:
724 724 timestr = 'set '
725 725 else:
726 726 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
727 727 time.localtime(ent[3]))
728 728 timestr = encoding.strtolocal(timestr)
729 729 if ent[1] & 0o20000:
730 730 mode = 'lnk'
731 731 else:
732 732 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
733 733 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
734 734 for f in repo.dirstate.copies():
735 735 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
736 736
737 737 @command('debugdiscovery',
738 738 [('', 'old', None, _('use old-style discovery')),
739 739 ('', 'nonheads', None,
740 740 _('use old-style discovery with non-heads included')),
741 741 ('', 'rev', [], 'restrict discovery to this set of revs'),
742 742 ] + cmdutil.remoteopts,
743 743 _('[--rev REV] [OTHER]'))
744 744 def debugdiscovery(ui, repo, remoteurl="default", **opts):
745 745 """runs the changeset discovery protocol in isolation"""
746 746 opts = pycompat.byteskwargs(opts)
747 747 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
748 748 remote = hg.peer(repo, opts, remoteurl)
749 749 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
750 750
751 751 # make sure tests are repeatable
752 752 random.seed(12323)
753 753
754 754 def doit(pushedrevs, remoteheads, remote=remote):
755 755 if opts.get('old'):
756 756 if not util.safehasattr(remote, 'branches'):
757 757 # enable in-client legacy support
758 758 remote = localrepo.locallegacypeer(remote.local())
759 759 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
760 760 force=True)
761 761 common = set(common)
762 762 if not opts.get('nonheads'):
763 763 ui.write(("unpruned common: %s\n") %
764 764 " ".join(sorted(short(n) for n in common)))
765 765 dag = dagutil.revlogdag(repo.changelog)
766 766 all = dag.ancestorset(dag.internalizeall(common))
767 767 common = dag.externalizeall(dag.headsetofconnecteds(all))
768 768 else:
769 769 nodes = None
770 770 if pushedrevs:
771 771 revs = scmutil.revrange(repo, pushedrevs)
772 772 nodes = [repo[r].node() for r in revs]
773 773 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
774 774 ancestorsof=nodes)
775 775 common = set(common)
776 776 rheads = set(hds)
777 777 lheads = set(repo.heads())
778 778 ui.write(("common heads: %s\n") %
779 779 " ".join(sorted(short(n) for n in common)))
780 780 if lheads <= common:
781 781 ui.write(("local is subset\n"))
782 782 elif rheads <= common:
783 783 ui.write(("remote is subset\n"))
784 784
785 785 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
786 786 localrevs = opts['rev']
787 787 doit(localrevs, remoterevs)
788 788
789 789 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
790 790 def debugextensions(ui, **opts):
791 791 '''show information about active extensions'''
792 792 opts = pycompat.byteskwargs(opts)
793 793 exts = extensions.extensions(ui)
794 794 hgver = util.version()
795 795 fm = ui.formatter('debugextensions', opts)
796 796 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
797 797 isinternal = extensions.ismoduleinternal(extmod)
798 798 extsource = pycompat.fsencode(extmod.__file__)
799 799 if isinternal:
800 800 exttestedwith = [] # never expose magic string to users
801 801 else:
802 802 exttestedwith = getattr(extmod, 'testedwith', '').split()
803 803 extbuglink = getattr(extmod, 'buglink', None)
804 804
805 805 fm.startitem()
806 806
807 807 if ui.quiet or ui.verbose:
808 808 fm.write('name', '%s\n', extname)
809 809 else:
810 810 fm.write('name', '%s', extname)
811 811 if isinternal or hgver in exttestedwith:
812 812 fm.plain('\n')
813 813 elif not exttestedwith:
814 814 fm.plain(_(' (untested!)\n'))
815 815 else:
816 816 lasttestedversion = exttestedwith[-1]
817 817 fm.plain(' (%s!)\n' % lasttestedversion)
818 818
819 819 fm.condwrite(ui.verbose and extsource, 'source',
820 820 _(' location: %s\n'), extsource or "")
821 821
822 822 if ui.verbose:
823 823 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
824 824 fm.data(bundled=isinternal)
825 825
826 826 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
827 827 _(' tested with: %s\n'),
828 828 fm.formatlist(exttestedwith, name='ver'))
829 829
830 830 fm.condwrite(ui.verbose and extbuglink, 'buglink',
831 831 _(' bug reporting: %s\n'), extbuglink or "")
832 832
833 833 fm.end()
834 834
835 835 @command('debugfileset',
836 836 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
837 837 _('[-r REV] FILESPEC'))
838 838 def debugfileset(ui, repo, expr, **opts):
839 839 '''parse and apply a fileset specification'''
840 840 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
841 841 if ui.verbose:
842 842 tree = fileset.parse(expr)
843 843 ui.note(fileset.prettyformat(tree), "\n")
844 844
845 845 for f in ctx.getfileset(expr):
846 846 ui.write("%s\n" % f)
847 847
848 848 @command('debugformat',
849 849 [] + cmdutil.formatteropts,
850 850 _(''))
851 851 def debugformat(ui, repo, **opts):
852 852 """display format information about the current repository
853 853
854 854 Use --verbose to get extra information about current config value and
855 855 Mercurial default."""
856 856 opts = pycompat.byteskwargs(opts)
857 857 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
858 858 maxvariantlength = max(len('format-variant'), maxvariantlength)
859 859
860 860 def makeformatname(name):
861 861 return '%s:' + (' ' * (maxvariantlength - len(name)))
862 862
863 863 fm = ui.formatter('debugformat', opts)
864 864 if fm.isplain():
865 865 def formatvalue(value):
866 866 if util.safehasattr(value, 'startswith'):
867 867 return value
868 868 if value:
869 869 return 'yes'
870 870 else:
871 871 return 'no'
872 872 else:
873 873 formatvalue = pycompat.identity
874 874
875 875 fm.plain('format-variant')
876 876 fm.plain(' ' * (maxvariantlength - len('format-variant')))
877 877 fm.plain(' repo')
878 878 if ui.verbose:
879 879 fm.plain(' config default')
880 880 fm.plain('\n')
881 881 for fv in upgrade.allformatvariant:
882 882 fm.startitem()
883 883 repovalue = fv.fromrepo(repo)
884 884 configvalue = fv.fromconfig(repo)
885 885
886 886 if repovalue != configvalue:
887 887 namelabel = 'formatvariant.name.mismatchconfig'
888 888 repolabel = 'formatvariant.repo.mismatchconfig'
889 889 elif repovalue != fv.default:
890 890 namelabel = 'formatvariant.name.mismatchdefault'
891 891 repolabel = 'formatvariant.repo.mismatchdefault'
892 892 else:
893 893 namelabel = 'formatvariant.name.uptodate'
894 894 repolabel = 'formatvariant.repo.uptodate'
895 895
896 896 fm.write('name', makeformatname(fv.name), fv.name,
897 897 label=namelabel)
898 898 fm.write('repo', ' %3s', formatvalue(repovalue),
899 899 label=repolabel)
900 900 if fv.default != configvalue:
901 901 configlabel = 'formatvariant.config.special'
902 902 else:
903 903 configlabel = 'formatvariant.config.default'
904 904 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
905 905 label=configlabel)
906 906 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
907 907 label='formatvariant.default')
908 908 fm.plain('\n')
909 909 fm.end()
910 910
911 911 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
912 912 def debugfsinfo(ui, path="."):
913 913 """show information detected about current filesystem"""
914 ui.write(('path: %s\n') % path)
915 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
914 916 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
915 917 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
916 918 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
917 919 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
918 920 casesensitive = '(unknown)'
919 921 try:
920 922 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
921 923 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
922 924 except OSError:
923 925 pass
924 926 ui.write(('case-sensitive: %s\n') % casesensitive)
925 927
926 928 @command('debuggetbundle',
927 929 [('H', 'head', [], _('id of head node'), _('ID')),
928 930 ('C', 'common', [], _('id of common node'), _('ID')),
929 931 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
930 932 _('REPO FILE [-H|-C ID]...'),
931 933 norepo=True)
932 934 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
933 935 """retrieves a bundle from a repo
934 936
935 937 Every ID must be a full-length hex node id string. Saves the bundle to the
936 938 given file.
937 939 """
938 940 opts = pycompat.byteskwargs(opts)
939 941 repo = hg.peer(ui, opts, repopath)
940 942 if not repo.capable('getbundle'):
941 943 raise error.Abort("getbundle() not supported by target repository")
942 944 args = {}
943 945 if common:
944 946 args[r'common'] = [bin(s) for s in common]
945 947 if head:
946 948 args[r'heads'] = [bin(s) for s in head]
947 949 # TODO: get desired bundlecaps from command line.
948 950 args[r'bundlecaps'] = None
949 951 bundle = repo.getbundle('debug', **args)
950 952
951 953 bundletype = opts.get('type', 'bzip2').lower()
952 954 btypes = {'none': 'HG10UN',
953 955 'bzip2': 'HG10BZ',
954 956 'gzip': 'HG10GZ',
955 957 'bundle2': 'HG20'}
956 958 bundletype = btypes.get(bundletype)
957 959 if bundletype not in bundle2.bundletypes:
958 960 raise error.Abort(_('unknown bundle type specified with --type'))
959 961 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
960 962
961 963 @command('debugignore', [], '[FILE]')
962 964 def debugignore(ui, repo, *files, **opts):
963 965 """display the combined ignore pattern and information about ignored files
964 966
965 967 With no argument display the combined ignore pattern.
966 968
967 969 Given space separated file names, shows if the given file is ignored and
968 970 if so, show the ignore rule (file and line number) that matched it.
969 971 """
970 972 ignore = repo.dirstate._ignore
971 973 if not files:
972 974 # Show all the patterns
973 975 ui.write("%s\n" % repr(ignore))
974 976 else:
975 977 m = scmutil.match(repo[None], pats=files)
976 978 for f in m.files():
977 979 nf = util.normpath(f)
978 980 ignored = None
979 981 ignoredata = None
980 982 if nf != '.':
981 983 if ignore(nf):
982 984 ignored = nf
983 985 ignoredata = repo.dirstate._ignorefileandline(nf)
984 986 else:
985 987 for p in util.finddirs(nf):
986 988 if ignore(p):
987 989 ignored = p
988 990 ignoredata = repo.dirstate._ignorefileandline(p)
989 991 break
990 992 if ignored:
991 993 if ignored == nf:
992 994 ui.write(_("%s is ignored\n") % m.uipath(f))
993 995 else:
994 996 ui.write(_("%s is ignored because of "
995 997 "containing folder %s\n")
996 998 % (m.uipath(f), ignored))
997 999 ignorefile, lineno, line = ignoredata
998 1000 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
999 1001 % (ignorefile, lineno, line))
1000 1002 else:
1001 1003 ui.write(_("%s is not ignored\n") % m.uipath(f))
1002 1004
1003 1005 @command('debugindex', cmdutil.debugrevlogopts +
1004 1006 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1005 1007 _('[-f FORMAT] -c|-m|FILE'),
1006 1008 optionalrepo=True)
1007 1009 def debugindex(ui, repo, file_=None, **opts):
1008 1010 """dump the contents of an index file"""
1009 1011 opts = pycompat.byteskwargs(opts)
1010 1012 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1011 1013 format = opts.get('format', 0)
1012 1014 if format not in (0, 1):
1013 1015 raise error.Abort(_("unknown format %d") % format)
1014 1016
1015 1017 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1016 1018 if generaldelta:
1017 1019 basehdr = ' delta'
1018 1020 else:
1019 1021 basehdr = ' base'
1020 1022
1021 1023 if ui.debugflag:
1022 1024 shortfn = hex
1023 1025 else:
1024 1026 shortfn = short
1025 1027
1026 1028 # There might not be anything in r, so have a sane default
1027 1029 idlen = 12
1028 1030 for i in r:
1029 1031 idlen = len(shortfn(r.node(i)))
1030 1032 break
1031 1033
1032 1034 if format == 0:
1033 1035 ui.write((" rev offset length " + basehdr + " linkrev"
1034 1036 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1035 1037 elif format == 1:
1036 1038 ui.write((" rev flag offset length"
1037 1039 " size " + basehdr + " link p1 p2"
1038 1040 " %s\n") % "nodeid".rjust(idlen))
1039 1041
1040 1042 for i in r:
1041 1043 node = r.node(i)
1042 1044 if generaldelta:
1043 1045 base = r.deltaparent(i)
1044 1046 else:
1045 1047 base = r.chainbase(i)
1046 1048 if format == 0:
1047 1049 try:
1048 1050 pp = r.parents(node)
1049 1051 except Exception:
1050 1052 pp = [nullid, nullid]
1051 1053 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1052 1054 i, r.start(i), r.length(i), base, r.linkrev(i),
1053 1055 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1054 1056 elif format == 1:
1055 1057 pr = r.parentrevs(i)
1056 1058 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1057 1059 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1058 1060 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1059 1061
1060 1062 @command('debugindexdot', cmdutil.debugrevlogopts,
1061 1063 _('-c|-m|FILE'), optionalrepo=True)
1062 1064 def debugindexdot(ui, repo, file_=None, **opts):
1063 1065 """dump an index DAG as a graphviz dot file"""
1064 1066 opts = pycompat.byteskwargs(opts)
1065 1067 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1066 1068 ui.write(("digraph G {\n"))
1067 1069 for i in r:
1068 1070 node = r.node(i)
1069 1071 pp = r.parents(node)
1070 1072 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1071 1073 if pp[1] != nullid:
1072 1074 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1073 1075 ui.write("}\n")
1074 1076
1075 1077 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1076 1078 def debuginstall(ui, **opts):
1077 1079 '''test Mercurial installation
1078 1080
1079 1081 Returns 0 on success.
1080 1082 '''
1081 1083 opts = pycompat.byteskwargs(opts)
1082 1084
1083 1085 def writetemp(contents):
1084 1086 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1085 1087 f = os.fdopen(fd, pycompat.sysstr("wb"))
1086 1088 f.write(contents)
1087 1089 f.close()
1088 1090 return name
1089 1091
1090 1092 problems = 0
1091 1093
1092 1094 fm = ui.formatter('debuginstall', opts)
1093 1095 fm.startitem()
1094 1096
1095 1097 # encoding
1096 1098 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1097 1099 err = None
1098 1100 try:
1099 1101 codecs.lookup(pycompat.sysstr(encoding.encoding))
1100 1102 except LookupError as inst:
1101 1103 err = util.forcebytestr(inst)
1102 1104 problems += 1
1103 1105 fm.condwrite(err, 'encodingerror', _(" %s\n"
1104 1106 " (check that your locale is properly set)\n"), err)
1105 1107
1106 1108 # Python
1107 1109 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1108 1110 pycompat.sysexecutable)
1109 1111 fm.write('pythonver', _("checking Python version (%s)\n"),
1110 1112 ("%d.%d.%d" % sys.version_info[:3]))
1111 1113 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1112 1114 os.path.dirname(pycompat.fsencode(os.__file__)))
1113 1115
1114 1116 security = set(sslutil.supportedprotocols)
1115 1117 if sslutil.hassni:
1116 1118 security.add('sni')
1117 1119
1118 1120 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1119 1121 fm.formatlist(sorted(security), name='protocol',
1120 1122 fmt='%s', sep=','))
1121 1123
1122 1124 # These are warnings, not errors. So don't increment problem count. This
1123 1125 # may change in the future.
1124 1126 if 'tls1.2' not in security:
1125 1127 fm.plain(_(' TLS 1.2 not supported by Python install; '
1126 1128 'network connections lack modern security\n'))
1127 1129 if 'sni' not in security:
1128 1130 fm.plain(_(' SNI not supported by Python install; may have '
1129 1131 'connectivity issues with some servers\n'))
1130 1132
1131 1133 # TODO print CA cert info
1132 1134
1133 1135 # hg version
1134 1136 hgver = util.version()
1135 1137 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1136 1138 hgver.split('+')[0])
1137 1139 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1138 1140 '+'.join(hgver.split('+')[1:]))
1139 1141
1140 1142 # compiled modules
1141 1143 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1142 1144 policy.policy)
1143 1145 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1144 1146 os.path.dirname(pycompat.fsencode(__file__)))
1145 1147
1146 1148 if policy.policy in ('c', 'allow'):
1147 1149 err = None
1148 1150 try:
1149 1151 from .cext import (
1150 1152 base85,
1151 1153 bdiff,
1152 1154 mpatch,
1153 1155 osutil,
1154 1156 )
1155 1157 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1156 1158 except Exception as inst:
1157 1159 err = util.forcebytestr(inst)
1158 1160 problems += 1
1159 1161 fm.condwrite(err, 'extensionserror', " %s\n", err)
1160 1162
1161 1163 compengines = util.compengines._engines.values()
1162 1164 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1163 1165 fm.formatlist(sorted(e.name() for e in compengines),
1164 1166 name='compengine', fmt='%s', sep=', '))
1165 1167 fm.write('compenginesavail', _('checking available compression engines '
1166 1168 '(%s)\n'),
1167 1169 fm.formatlist(sorted(e.name() for e in compengines
1168 1170 if e.available()),
1169 1171 name='compengine', fmt='%s', sep=', '))
1170 1172 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1171 1173 fm.write('compenginesserver', _('checking available compression engines '
1172 1174 'for wire protocol (%s)\n'),
1173 1175 fm.formatlist([e.name() for e in wirecompengines
1174 1176 if e.wireprotosupport()],
1175 1177 name='compengine', fmt='%s', sep=', '))
1176 1178 re2 = 'missing'
1177 1179 if util._re2:
1178 1180 re2 = 'available'
1179 1181 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1180 1182 fm.data(re2=bool(util._re2))
1181 1183
1182 1184 # templates
1183 1185 p = templater.templatepaths()
1184 1186 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1185 1187 fm.condwrite(not p, '', _(" no template directories found\n"))
1186 1188 if p:
1187 1189 m = templater.templatepath("map-cmdline.default")
1188 1190 if m:
1189 1191 # template found, check if it is working
1190 1192 err = None
1191 1193 try:
1192 1194 templater.templater.frommapfile(m)
1193 1195 except Exception as inst:
1194 1196 err = util.forcebytestr(inst)
1195 1197 p = None
1196 1198 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1197 1199 else:
1198 1200 p = None
1199 1201 fm.condwrite(p, 'defaulttemplate',
1200 1202 _("checking default template (%s)\n"), m)
1201 1203 fm.condwrite(not m, 'defaulttemplatenotfound',
1202 1204 _(" template '%s' not found\n"), "default")
1203 1205 if not p:
1204 1206 problems += 1
1205 1207 fm.condwrite(not p, '',
1206 1208 _(" (templates seem to have been installed incorrectly)\n"))
1207 1209
1208 1210 # editor
1209 1211 editor = ui.geteditor()
1210 1212 editor = util.expandpath(editor)
1211 1213 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1212 1214 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1213 1215 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1214 1216 _(" No commit editor set and can't find %s in PATH\n"
1215 1217 " (specify a commit editor in your configuration"
1216 1218 " file)\n"), not cmdpath and editor == 'vi' and editor)
1217 1219 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1218 1220 _(" Can't find editor '%s' in PATH\n"
1219 1221 " (specify a commit editor in your configuration"
1220 1222 " file)\n"), not cmdpath and editor)
1221 1223 if not cmdpath and editor != 'vi':
1222 1224 problems += 1
1223 1225
1224 1226 # check username
1225 1227 username = None
1226 1228 err = None
1227 1229 try:
1228 1230 username = ui.username()
1229 1231 except error.Abort as e:
1230 1232 err = util.forcebytestr(e)
1231 1233 problems += 1
1232 1234
1233 1235 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1234 1236 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1235 1237 " (specify a username in your configuration file)\n"), err)
1236 1238
1237 1239 fm.condwrite(not problems, '',
1238 1240 _("no problems detected\n"))
1239 1241 if not problems:
1240 1242 fm.data(problems=problems)
1241 1243 fm.condwrite(problems, 'problems',
1242 1244 _("%d problems detected,"
1243 1245 " please check your install!\n"), problems)
1244 1246 fm.end()
1245 1247
1246 1248 return problems
1247 1249
1248 1250 @command('debugknown', [], _('REPO ID...'), norepo=True)
1249 1251 def debugknown(ui, repopath, *ids, **opts):
1250 1252 """test whether node ids are known to a repo
1251 1253
1252 1254 Every ID must be a full-length hex node id string. Returns a list of 0s
1253 1255 and 1s indicating unknown/known.
1254 1256 """
1255 1257 opts = pycompat.byteskwargs(opts)
1256 1258 repo = hg.peer(ui, opts, repopath)
1257 1259 if not repo.capable('known'):
1258 1260 raise error.Abort("known() not supported by target repository")
1259 1261 flags = repo.known([bin(s) for s in ids])
1260 1262 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1261 1263
1262 1264 @command('debuglabelcomplete', [], _('LABEL...'))
1263 1265 def debuglabelcomplete(ui, repo, *args):
1264 1266 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1265 1267 debugnamecomplete(ui, repo, *args)
1266 1268
1267 1269 @command('debuglocks',
1268 1270 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1269 1271 ('W', 'force-wlock', None,
1270 1272 _('free the working state lock (DANGEROUS)')),
1271 1273 ('s', 'set-lock', None, _('set the store lock until stopped')),
1272 1274 ('S', 'set-wlock', None,
1273 1275 _('set the working state lock until stopped'))],
1274 1276 _('[OPTION]...'))
1275 1277 def debuglocks(ui, repo, **opts):
1276 1278 """show or modify state of locks
1277 1279
1278 1280 By default, this command will show which locks are held. This
1279 1281 includes the user and process holding the lock, the amount of time
1280 1282 the lock has been held, and the machine name where the process is
1281 1283 running if it's not local.
1282 1284
1283 1285 Locks protect the integrity of Mercurial's data, so should be
1284 1286 treated with care. System crashes or other interruptions may cause
1285 1287 locks to not be properly released, though Mercurial will usually
1286 1288 detect and remove such stale locks automatically.
1287 1289
1288 1290 However, detecting stale locks may not always be possible (for
1289 1291 instance, on a shared filesystem). Removing locks may also be
1290 1292 blocked by filesystem permissions.
1291 1293
1292 1294 Setting a lock will prevent other commands from changing the data.
1293 1295 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1294 1296 The set locks are removed when the command exits.
1295 1297
1296 1298 Returns 0 if no locks are held.
1297 1299
1298 1300 """
1299 1301
1300 1302 if opts.get(r'force_lock'):
1301 1303 repo.svfs.unlink('lock')
1302 1304 if opts.get(r'force_wlock'):
1303 1305 repo.vfs.unlink('wlock')
1304 1306 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1305 1307 return 0
1306 1308
1307 1309 locks = []
1308 1310 try:
1309 1311 if opts.get(r'set_wlock'):
1310 1312 try:
1311 1313 locks.append(repo.wlock(False))
1312 1314 except error.LockHeld:
1313 1315 raise error.Abort(_('wlock is already held'))
1314 1316 if opts.get(r'set_lock'):
1315 1317 try:
1316 1318 locks.append(repo.lock(False))
1317 1319 except error.LockHeld:
1318 1320 raise error.Abort(_('lock is already held'))
1319 1321 if len(locks):
1320 1322 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1321 1323 return 0
1322 1324 finally:
1323 1325 release(*locks)
1324 1326
1325 1327 now = time.time()
1326 1328 held = 0
1327 1329
1328 1330 def report(vfs, name, method):
1329 1331 # this causes stale locks to get reaped for more accurate reporting
1330 1332 try:
1331 1333 l = method(False)
1332 1334 except error.LockHeld:
1333 1335 l = None
1334 1336
1335 1337 if l:
1336 1338 l.release()
1337 1339 else:
1338 1340 try:
1339 1341 stat = vfs.lstat(name)
1340 1342 age = now - stat.st_mtime
1341 1343 user = util.username(stat.st_uid)
1342 1344 locker = vfs.readlock(name)
1343 1345 if ":" in locker:
1344 1346 host, pid = locker.split(':')
1345 1347 if host == socket.gethostname():
1346 1348 locker = 'user %s, process %s' % (user, pid)
1347 1349 else:
1348 1350 locker = 'user %s, process %s, host %s' \
1349 1351 % (user, pid, host)
1350 1352 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1351 1353 return 1
1352 1354 except OSError as e:
1353 1355 if e.errno != errno.ENOENT:
1354 1356 raise
1355 1357
1356 1358 ui.write(("%-6s free\n") % (name + ":"))
1357 1359 return 0
1358 1360
1359 1361 held += report(repo.svfs, "lock", repo.lock)
1360 1362 held += report(repo.vfs, "wlock", repo.wlock)
1361 1363
1362 1364 return held
1363 1365
1364 1366 @command('debugmergestate', [], '')
1365 1367 def debugmergestate(ui, repo, *args):
1366 1368 """print merge state
1367 1369
1368 1370 Use --verbose to print out information about whether v1 or v2 merge state
1369 1371 was chosen."""
1370 1372 def _hashornull(h):
1371 1373 if h == nullhex:
1372 1374 return 'null'
1373 1375 else:
1374 1376 return h
1375 1377
1376 1378 def printrecords(version):
1377 1379 ui.write(('* version %s records\n') % version)
1378 1380 if version == 1:
1379 1381 records = v1records
1380 1382 else:
1381 1383 records = v2records
1382 1384
1383 1385 for rtype, record in records:
1384 1386 # pretty print some record types
1385 1387 if rtype == 'L':
1386 1388 ui.write(('local: %s\n') % record)
1387 1389 elif rtype == 'O':
1388 1390 ui.write(('other: %s\n') % record)
1389 1391 elif rtype == 'm':
1390 1392 driver, mdstate = record.split('\0', 1)
1391 1393 ui.write(('merge driver: %s (state "%s")\n')
1392 1394 % (driver, mdstate))
1393 1395 elif rtype in 'FDC':
1394 1396 r = record.split('\0')
1395 1397 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1396 1398 if version == 1:
1397 1399 onode = 'not stored in v1 format'
1398 1400 flags = r[7]
1399 1401 else:
1400 1402 onode, flags = r[7:9]
1401 1403 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1402 1404 % (f, rtype, state, _hashornull(hash)))
1403 1405 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1404 1406 ui.write((' ancestor path: %s (node %s)\n')
1405 1407 % (afile, _hashornull(anode)))
1406 1408 ui.write((' other path: %s (node %s)\n')
1407 1409 % (ofile, _hashornull(onode)))
1408 1410 elif rtype == 'f':
1409 1411 filename, rawextras = record.split('\0', 1)
1410 1412 extras = rawextras.split('\0')
1411 1413 i = 0
1412 1414 extrastrings = []
1413 1415 while i < len(extras):
1414 1416 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1415 1417 i += 2
1416 1418
1417 1419 ui.write(('file extras: %s (%s)\n')
1418 1420 % (filename, ', '.join(extrastrings)))
1419 1421 elif rtype == 'l':
1420 1422 labels = record.split('\0', 2)
1421 1423 labels = [l for l in labels if len(l) > 0]
1422 1424 ui.write(('labels:\n'))
1423 1425 ui.write((' local: %s\n' % labels[0]))
1424 1426 ui.write((' other: %s\n' % labels[1]))
1425 1427 if len(labels) > 2:
1426 1428 ui.write((' base: %s\n' % labels[2]))
1427 1429 else:
1428 1430 ui.write(('unrecognized entry: %s\t%s\n')
1429 1431 % (rtype, record.replace('\0', '\t')))
1430 1432
1431 1433 # Avoid mergestate.read() since it may raise an exception for unsupported
1432 1434 # merge state records. We shouldn't be doing this, but this is OK since this
1433 1435 # command is pretty low-level.
1434 1436 ms = mergemod.mergestate(repo)
1435 1437
1436 1438 # sort so that reasonable information is on top
1437 1439 v1records = ms._readrecordsv1()
1438 1440 v2records = ms._readrecordsv2()
1439 1441 order = 'LOml'
1440 1442 def key(r):
1441 1443 idx = order.find(r[0])
1442 1444 if idx == -1:
1443 1445 return (1, r[1])
1444 1446 else:
1445 1447 return (0, idx)
1446 1448 v1records.sort(key=key)
1447 1449 v2records.sort(key=key)
1448 1450
1449 1451 if not v1records and not v2records:
1450 1452 ui.write(('no merge state found\n'))
1451 1453 elif not v2records:
1452 1454 ui.note(('no version 2 merge state\n'))
1453 1455 printrecords(1)
1454 1456 elif ms._v1v2match(v1records, v2records):
1455 1457 ui.note(('v1 and v2 states match: using v2\n'))
1456 1458 printrecords(2)
1457 1459 else:
1458 1460 ui.note(('v1 and v2 states mismatch: using v1\n'))
1459 1461 printrecords(1)
1460 1462 if ui.verbose:
1461 1463 printrecords(2)
1462 1464
1463 1465 @command('debugnamecomplete', [], _('NAME...'))
1464 1466 def debugnamecomplete(ui, repo, *args):
1465 1467 '''complete "names" - tags, open branch names, bookmark names'''
1466 1468
1467 1469 names = set()
1468 1470 # since we previously only listed open branches, we will handle that
1469 1471 # specially (after this for loop)
1470 1472 for name, ns in repo.names.iteritems():
1471 1473 if name != 'branches':
1472 1474 names.update(ns.listnames(repo))
1473 1475 names.update(tag for (tag, heads, tip, closed)
1474 1476 in repo.branchmap().iterbranches() if not closed)
1475 1477 completions = set()
1476 1478 if not args:
1477 1479 args = ['']
1478 1480 for a in args:
1479 1481 completions.update(n for n in names if n.startswith(a))
1480 1482 ui.write('\n'.join(sorted(completions)))
1481 1483 ui.write('\n')
1482 1484
1483 1485 @command('debugobsolete',
1484 1486 [('', 'flags', 0, _('markers flag')),
1485 1487 ('', 'record-parents', False,
1486 1488 _('record parent information for the precursor')),
1487 1489 ('r', 'rev', [], _('display markers relevant to REV')),
1488 1490 ('', 'exclusive', False, _('restrict display to markers only '
1489 1491 'relevant to REV')),
1490 1492 ('', 'index', False, _('display index of the marker')),
1491 1493 ('', 'delete', [], _('delete markers specified by indices')),
1492 1494 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1493 1495 _('[OBSOLETED [REPLACEMENT ...]]'))
1494 1496 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1495 1497 """create arbitrary obsolete marker
1496 1498
1497 1499 With no arguments, displays the list of obsolescence markers."""
1498 1500
1499 1501 opts = pycompat.byteskwargs(opts)
1500 1502
1501 1503 def parsenodeid(s):
1502 1504 try:
1503 1505 # We do not use revsingle/revrange functions here to accept
1504 1506 # arbitrary node identifiers, possibly not present in the
1505 1507 # local repository.
1506 1508 n = bin(s)
1507 1509 if len(n) != len(nullid):
1508 1510 raise TypeError()
1509 1511 return n
1510 1512 except TypeError:
1511 1513 raise error.Abort('changeset references must be full hexadecimal '
1512 1514 'node identifiers')
1513 1515
1514 1516 if opts.get('delete'):
1515 1517 indices = []
1516 1518 for v in opts.get('delete'):
1517 1519 try:
1518 1520 indices.append(int(v))
1519 1521 except ValueError:
1520 1522 raise error.Abort(_('invalid index value: %r') % v,
1521 1523 hint=_('use integers for indices'))
1522 1524
1523 1525 if repo.currenttransaction():
1524 1526 raise error.Abort(_('cannot delete obsmarkers in the middle '
1525 1527 'of transaction.'))
1526 1528
1527 1529 with repo.lock():
1528 1530 n = repair.deleteobsmarkers(repo.obsstore, indices)
1529 1531 ui.write(_('deleted %i obsolescence markers\n') % n)
1530 1532
1531 1533 return
1532 1534
1533 1535 if precursor is not None:
1534 1536 if opts['rev']:
1535 1537 raise error.Abort('cannot select revision when creating marker')
1536 1538 metadata = {}
1537 1539 metadata['user'] = opts['user'] or ui.username()
1538 1540 succs = tuple(parsenodeid(succ) for succ in successors)
1539 1541 l = repo.lock()
1540 1542 try:
1541 1543 tr = repo.transaction('debugobsolete')
1542 1544 try:
1543 1545 date = opts.get('date')
1544 1546 if date:
1545 1547 date = util.parsedate(date)
1546 1548 else:
1547 1549 date = None
1548 1550 prec = parsenodeid(precursor)
1549 1551 parents = None
1550 1552 if opts['record_parents']:
1551 1553 if prec not in repo.unfiltered():
1552 1554 raise error.Abort('cannot used --record-parents on '
1553 1555 'unknown changesets')
1554 1556 parents = repo.unfiltered()[prec].parents()
1555 1557 parents = tuple(p.node() for p in parents)
1556 1558 repo.obsstore.create(tr, prec, succs, opts['flags'],
1557 1559 parents=parents, date=date,
1558 1560 metadata=metadata, ui=ui)
1559 1561 tr.close()
1560 1562 except ValueError as exc:
1561 1563 raise error.Abort(_('bad obsmarker input: %s') % exc)
1562 1564 finally:
1563 1565 tr.release()
1564 1566 finally:
1565 1567 l.release()
1566 1568 else:
1567 1569 if opts['rev']:
1568 1570 revs = scmutil.revrange(repo, opts['rev'])
1569 1571 nodes = [repo[r].node() for r in revs]
1570 1572 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1571 1573 exclusive=opts['exclusive']))
1572 1574 markers.sort(key=lambda x: x._data)
1573 1575 else:
1574 1576 markers = obsutil.getmarkers(repo)
1575 1577
1576 1578 markerstoiter = markers
1577 1579 isrelevant = lambda m: True
1578 1580 if opts.get('rev') and opts.get('index'):
1579 1581 markerstoiter = obsutil.getmarkers(repo)
1580 1582 markerset = set(markers)
1581 1583 isrelevant = lambda m: m in markerset
1582 1584
1583 1585 fm = ui.formatter('debugobsolete', opts)
1584 1586 for i, m in enumerate(markerstoiter):
1585 1587 if not isrelevant(m):
1586 1588 # marker can be irrelevant when we're iterating over a set
1587 1589 # of markers (markerstoiter) which is bigger than the set
1588 1590 # of markers we want to display (markers)
1589 1591 # this can happen if both --index and --rev options are
1590 1592 # provided and thus we need to iterate over all of the markers
1591 1593 # to get the correct indices, but only display the ones that
1592 1594 # are relevant to --rev value
1593 1595 continue
1594 1596 fm.startitem()
1595 1597 ind = i if opts.get('index') else None
1596 1598 cmdutil.showmarker(fm, m, index=ind)
1597 1599 fm.end()
1598 1600
1599 1601 @command('debugpathcomplete',
1600 1602 [('f', 'full', None, _('complete an entire path')),
1601 1603 ('n', 'normal', None, _('show only normal files')),
1602 1604 ('a', 'added', None, _('show only added files')),
1603 1605 ('r', 'removed', None, _('show only removed files'))],
1604 1606 _('FILESPEC...'))
1605 1607 def debugpathcomplete(ui, repo, *specs, **opts):
1606 1608 '''complete part or all of a tracked path
1607 1609
1608 1610 This command supports shells that offer path name completion. It
1609 1611 currently completes only files already known to the dirstate.
1610 1612
1611 1613 Completion extends only to the next path segment unless
1612 1614 --full is specified, in which case entire paths are used.'''
1613 1615
1614 1616 def complete(path, acceptable):
1615 1617 dirstate = repo.dirstate
1616 1618 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1617 1619 rootdir = repo.root + pycompat.ossep
1618 1620 if spec != repo.root and not spec.startswith(rootdir):
1619 1621 return [], []
1620 1622 if os.path.isdir(spec):
1621 1623 spec += '/'
1622 1624 spec = spec[len(rootdir):]
1623 1625 fixpaths = pycompat.ossep != '/'
1624 1626 if fixpaths:
1625 1627 spec = spec.replace(pycompat.ossep, '/')
1626 1628 speclen = len(spec)
1627 1629 fullpaths = opts[r'full']
1628 1630 files, dirs = set(), set()
1629 1631 adddir, addfile = dirs.add, files.add
1630 1632 for f, st in dirstate.iteritems():
1631 1633 if f.startswith(spec) and st[0] in acceptable:
1632 1634 if fixpaths:
1633 1635 f = f.replace('/', pycompat.ossep)
1634 1636 if fullpaths:
1635 1637 addfile(f)
1636 1638 continue
1637 1639 s = f.find(pycompat.ossep, speclen)
1638 1640 if s >= 0:
1639 1641 adddir(f[:s])
1640 1642 else:
1641 1643 addfile(f)
1642 1644 return files, dirs
1643 1645
1644 1646 acceptable = ''
1645 1647 if opts[r'normal']:
1646 1648 acceptable += 'nm'
1647 1649 if opts[r'added']:
1648 1650 acceptable += 'a'
1649 1651 if opts[r'removed']:
1650 1652 acceptable += 'r'
1651 1653 cwd = repo.getcwd()
1652 1654 if not specs:
1653 1655 specs = ['.']
1654 1656
1655 1657 files, dirs = set(), set()
1656 1658 for spec in specs:
1657 1659 f, d = complete(spec, acceptable or 'nmar')
1658 1660 files.update(f)
1659 1661 dirs.update(d)
1660 1662 files.update(dirs)
1661 1663 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1662 1664 ui.write('\n')
1663 1665
1664 1666 @command('debugpickmergetool',
1665 1667 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1666 1668 ('', 'changedelete', None, _('emulate merging change and delete')),
1667 1669 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1668 1670 _('[PATTERN]...'),
1669 1671 inferrepo=True)
1670 1672 def debugpickmergetool(ui, repo, *pats, **opts):
1671 1673 """examine which merge tool is chosen for specified file
1672 1674
1673 1675 As described in :hg:`help merge-tools`, Mercurial examines
1674 1676 configurations below in this order to decide which merge tool is
1675 1677 chosen for specified file.
1676 1678
1677 1679 1. ``--tool`` option
1678 1680 2. ``HGMERGE`` environment variable
1679 1681 3. configurations in ``merge-patterns`` section
1680 1682 4. configuration of ``ui.merge``
1681 1683 5. configurations in ``merge-tools`` section
1682 1684 6. ``hgmerge`` tool (for historical reason only)
1683 1685 7. default tool for fallback (``:merge`` or ``:prompt``)
1684 1686
1685 1687 This command writes out examination result in the style below::
1686 1688
1687 1689 FILE = MERGETOOL
1688 1690
1689 1691 By default, all files known in the first parent context of the
1690 1692 working directory are examined. Use file patterns and/or -I/-X
1691 1693 options to limit target files. -r/--rev is also useful to examine
1692 1694 files in another context without actual updating to it.
1693 1695
1694 1696 With --debug, this command shows warning messages while matching
1695 1697 against ``merge-patterns`` and so on, too. It is recommended to
1696 1698 use this option with explicit file patterns and/or -I/-X options,
1697 1699 because this option increases amount of output per file according
1698 1700 to configurations in hgrc.
1699 1701
1700 1702 With -v/--verbose, this command shows configurations below at
1701 1703 first (only if specified).
1702 1704
1703 1705 - ``--tool`` option
1704 1706 - ``HGMERGE`` environment variable
1705 1707 - configuration of ``ui.merge``
1706 1708
1707 1709 If merge tool is chosen before matching against
1708 1710 ``merge-patterns``, this command can't show any helpful
1709 1711 information, even with --debug. In such case, information above is
1710 1712 useful to know why a merge tool is chosen.
1711 1713 """
1712 1714 opts = pycompat.byteskwargs(opts)
1713 1715 overrides = {}
1714 1716 if opts['tool']:
1715 1717 overrides[('ui', 'forcemerge')] = opts['tool']
1716 1718 ui.note(('with --tool %r\n') % (opts['tool']))
1717 1719
1718 1720 with ui.configoverride(overrides, 'debugmergepatterns'):
1719 1721 hgmerge = encoding.environ.get("HGMERGE")
1720 1722 if hgmerge is not None:
1721 1723 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1722 1724 uimerge = ui.config("ui", "merge")
1723 1725 if uimerge:
1724 1726 ui.note(('with ui.merge=%r\n') % (uimerge))
1725 1727
1726 1728 ctx = scmutil.revsingle(repo, opts.get('rev'))
1727 1729 m = scmutil.match(ctx, pats, opts)
1728 1730 changedelete = opts['changedelete']
1729 1731 for path in ctx.walk(m):
1730 1732 fctx = ctx[path]
1731 1733 try:
1732 1734 if not ui.debugflag:
1733 1735 ui.pushbuffer(error=True)
1734 1736 tool, toolpath = filemerge._picktool(repo, ui, path,
1735 1737 fctx.isbinary(),
1736 1738 'l' in fctx.flags(),
1737 1739 changedelete)
1738 1740 finally:
1739 1741 if not ui.debugflag:
1740 1742 ui.popbuffer()
1741 1743 ui.write(('%s = %s\n') % (path, tool))
1742 1744
1743 1745 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1744 1746 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1745 1747 '''access the pushkey key/value protocol
1746 1748
1747 1749 With two args, list the keys in the given namespace.
1748 1750
1749 1751 With five args, set a key to new if it currently is set to old.
1750 1752 Reports success or failure.
1751 1753 '''
1752 1754
1753 1755 target = hg.peer(ui, {}, repopath)
1754 1756 if keyinfo:
1755 1757 key, old, new = keyinfo
1756 1758 r = target.pushkey(namespace, key, old, new)
1757 1759 ui.status(str(r) + '\n')
1758 1760 return not r
1759 1761 else:
1760 1762 for k, v in sorted(target.listkeys(namespace).iteritems()):
1761 1763 ui.write("%s\t%s\n" % (util.escapestr(k),
1762 1764 util.escapestr(v)))
1763 1765
1764 1766 @command('debugpvec', [], _('A B'))
1765 1767 def debugpvec(ui, repo, a, b=None):
1766 1768 ca = scmutil.revsingle(repo, a)
1767 1769 cb = scmutil.revsingle(repo, b)
1768 1770 pa = pvec.ctxpvec(ca)
1769 1771 pb = pvec.ctxpvec(cb)
1770 1772 if pa == pb:
1771 1773 rel = "="
1772 1774 elif pa > pb:
1773 1775 rel = ">"
1774 1776 elif pa < pb:
1775 1777 rel = "<"
1776 1778 elif pa | pb:
1777 1779 rel = "|"
1778 1780 ui.write(_("a: %s\n") % pa)
1779 1781 ui.write(_("b: %s\n") % pb)
1780 1782 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1781 1783 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1782 1784 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1783 1785 pa.distance(pb), rel))
1784 1786
1785 1787 @command('debugrebuilddirstate|debugrebuildstate',
1786 1788 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1787 1789 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1788 1790 'the working copy parent')),
1789 1791 ],
1790 1792 _('[-r REV]'))
1791 1793 def debugrebuilddirstate(ui, repo, rev, **opts):
1792 1794 """rebuild the dirstate as it would look like for the given revision
1793 1795
1794 1796 If no revision is specified the first current parent will be used.
1795 1797
1796 1798 The dirstate will be set to the files of the given revision.
1797 1799 The actual working directory content or existing dirstate
1798 1800 information such as adds or removes is not considered.
1799 1801
1800 1802 ``minimal`` will only rebuild the dirstate status for files that claim to be
1801 1803 tracked but are not in the parent manifest, or that exist in the parent
1802 1804 manifest but are not in the dirstate. It will not change adds, removes, or
1803 1805 modified files that are in the working copy parent.
1804 1806
1805 1807 One use of this command is to make the next :hg:`status` invocation
1806 1808 check the actual file content.
1807 1809 """
1808 1810 ctx = scmutil.revsingle(repo, rev)
1809 1811 with repo.wlock():
1810 1812 dirstate = repo.dirstate
1811 1813 changedfiles = None
1812 1814 # See command doc for what minimal does.
1813 1815 if opts.get(r'minimal'):
1814 1816 manifestfiles = set(ctx.manifest().keys())
1815 1817 dirstatefiles = set(dirstate)
1816 1818 manifestonly = manifestfiles - dirstatefiles
1817 1819 dsonly = dirstatefiles - manifestfiles
1818 1820 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1819 1821 changedfiles = manifestonly | dsnotadded
1820 1822
1821 1823 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1822 1824
1823 1825 @command('debugrebuildfncache', [], '')
1824 1826 def debugrebuildfncache(ui, repo):
1825 1827 """rebuild the fncache file"""
1826 1828 repair.rebuildfncache(ui, repo)
1827 1829
1828 1830 @command('debugrename',
1829 1831 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1830 1832 _('[-r REV] FILE'))
1831 1833 def debugrename(ui, repo, file1, *pats, **opts):
1832 1834 """dump rename information"""
1833 1835
1834 1836 opts = pycompat.byteskwargs(opts)
1835 1837 ctx = scmutil.revsingle(repo, opts.get('rev'))
1836 1838 m = scmutil.match(ctx, (file1,) + pats, opts)
1837 1839 for abs in ctx.walk(m):
1838 1840 fctx = ctx[abs]
1839 1841 o = fctx.filelog().renamed(fctx.filenode())
1840 1842 rel = m.rel(abs)
1841 1843 if o:
1842 1844 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1843 1845 else:
1844 1846 ui.write(_("%s not renamed\n") % rel)
1845 1847
1846 1848 @command('debugrevlog', cmdutil.debugrevlogopts +
1847 1849 [('d', 'dump', False, _('dump index data'))],
1848 1850 _('-c|-m|FILE'),
1849 1851 optionalrepo=True)
1850 1852 def debugrevlog(ui, repo, file_=None, **opts):
1851 1853 """show data and statistics about a revlog"""
1852 1854 opts = pycompat.byteskwargs(opts)
1853 1855 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1854 1856
1855 1857 if opts.get("dump"):
1856 1858 numrevs = len(r)
1857 1859 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1858 1860 " rawsize totalsize compression heads chainlen\n"))
1859 1861 ts = 0
1860 1862 heads = set()
1861 1863
1862 1864 for rev in xrange(numrevs):
1863 1865 dbase = r.deltaparent(rev)
1864 1866 if dbase == -1:
1865 1867 dbase = rev
1866 1868 cbase = r.chainbase(rev)
1867 1869 clen = r.chainlen(rev)
1868 1870 p1, p2 = r.parentrevs(rev)
1869 1871 rs = r.rawsize(rev)
1870 1872 ts = ts + rs
1871 1873 heads -= set(r.parentrevs(rev))
1872 1874 heads.add(rev)
1873 1875 try:
1874 1876 compression = ts / r.end(rev)
1875 1877 except ZeroDivisionError:
1876 1878 compression = 0
1877 1879 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1878 1880 "%11d %5d %8d\n" %
1879 1881 (rev, p1, p2, r.start(rev), r.end(rev),
1880 1882 r.start(dbase), r.start(cbase),
1881 1883 r.start(p1), r.start(p2),
1882 1884 rs, ts, compression, len(heads), clen))
1883 1885 return 0
1884 1886
1885 1887 v = r.version
1886 1888 format = v & 0xFFFF
1887 1889 flags = []
1888 1890 gdelta = False
1889 1891 if v & revlog.FLAG_INLINE_DATA:
1890 1892 flags.append('inline')
1891 1893 if v & revlog.FLAG_GENERALDELTA:
1892 1894 gdelta = True
1893 1895 flags.append('generaldelta')
1894 1896 if not flags:
1895 1897 flags = ['(none)']
1896 1898
1897 1899 nummerges = 0
1898 1900 numfull = 0
1899 1901 numprev = 0
1900 1902 nump1 = 0
1901 1903 nump2 = 0
1902 1904 numother = 0
1903 1905 nump1prev = 0
1904 1906 nump2prev = 0
1905 1907 chainlengths = []
1906 1908 chainbases = []
1907 1909 chainspans = []
1908 1910
1909 1911 datasize = [None, 0, 0]
1910 1912 fullsize = [None, 0, 0]
1911 1913 deltasize = [None, 0, 0]
1912 1914 chunktypecounts = {}
1913 1915 chunktypesizes = {}
1914 1916
1915 1917 def addsize(size, l):
1916 1918 if l[0] is None or size < l[0]:
1917 1919 l[0] = size
1918 1920 if size > l[1]:
1919 1921 l[1] = size
1920 1922 l[2] += size
1921 1923
1922 1924 numrevs = len(r)
1923 1925 for rev in xrange(numrevs):
1924 1926 p1, p2 = r.parentrevs(rev)
1925 1927 delta = r.deltaparent(rev)
1926 1928 if format > 0:
1927 1929 addsize(r.rawsize(rev), datasize)
1928 1930 if p2 != nullrev:
1929 1931 nummerges += 1
1930 1932 size = r.length(rev)
1931 1933 if delta == nullrev:
1932 1934 chainlengths.append(0)
1933 1935 chainbases.append(r.start(rev))
1934 1936 chainspans.append(size)
1935 1937 numfull += 1
1936 1938 addsize(size, fullsize)
1937 1939 else:
1938 1940 chainlengths.append(chainlengths[delta] + 1)
1939 1941 baseaddr = chainbases[delta]
1940 1942 revaddr = r.start(rev)
1941 1943 chainbases.append(baseaddr)
1942 1944 chainspans.append((revaddr - baseaddr) + size)
1943 1945 addsize(size, deltasize)
1944 1946 if delta == rev - 1:
1945 1947 numprev += 1
1946 1948 if delta == p1:
1947 1949 nump1prev += 1
1948 1950 elif delta == p2:
1949 1951 nump2prev += 1
1950 1952 elif delta == p1:
1951 1953 nump1 += 1
1952 1954 elif delta == p2:
1953 1955 nump2 += 1
1954 1956 elif delta != nullrev:
1955 1957 numother += 1
1956 1958
1957 1959 # Obtain data on the raw chunks in the revlog.
1958 1960 segment = r._getsegmentforrevs(rev, rev)[1]
1959 1961 if segment:
1960 1962 chunktype = bytes(segment[0:1])
1961 1963 else:
1962 1964 chunktype = 'empty'
1963 1965
1964 1966 if chunktype not in chunktypecounts:
1965 1967 chunktypecounts[chunktype] = 0
1966 1968 chunktypesizes[chunktype] = 0
1967 1969
1968 1970 chunktypecounts[chunktype] += 1
1969 1971 chunktypesizes[chunktype] += size
1970 1972
1971 1973 # Adjust size min value for empty cases
1972 1974 for size in (datasize, fullsize, deltasize):
1973 1975 if size[0] is None:
1974 1976 size[0] = 0
1975 1977
1976 1978 numdeltas = numrevs - numfull
1977 1979 numoprev = numprev - nump1prev - nump2prev
1978 1980 totalrawsize = datasize[2]
1979 1981 datasize[2] /= numrevs
1980 1982 fulltotal = fullsize[2]
1981 1983 fullsize[2] /= numfull
1982 1984 deltatotal = deltasize[2]
1983 1985 if numrevs - numfull > 0:
1984 1986 deltasize[2] /= numrevs - numfull
1985 1987 totalsize = fulltotal + deltatotal
1986 1988 avgchainlen = sum(chainlengths) / numrevs
1987 1989 maxchainlen = max(chainlengths)
1988 1990 maxchainspan = max(chainspans)
1989 1991 compratio = 1
1990 1992 if totalsize:
1991 1993 compratio = totalrawsize / totalsize
1992 1994
1993 1995 basedfmtstr = '%%%dd\n'
1994 1996 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1995 1997
1996 1998 def dfmtstr(max):
1997 1999 return basedfmtstr % len(str(max))
1998 2000 def pcfmtstr(max, padding=0):
1999 2001 return basepcfmtstr % (len(str(max)), ' ' * padding)
2000 2002
2001 2003 def pcfmt(value, total):
2002 2004 if total:
2003 2005 return (value, 100 * float(value) / total)
2004 2006 else:
2005 2007 return value, 100.0
2006 2008
2007 2009 ui.write(('format : %d\n') % format)
2008 2010 ui.write(('flags : %s\n') % ', '.join(flags))
2009 2011
2010 2012 ui.write('\n')
2011 2013 fmt = pcfmtstr(totalsize)
2012 2014 fmt2 = dfmtstr(totalsize)
2013 2015 ui.write(('revisions : ') + fmt2 % numrevs)
2014 2016 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2015 2017 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2016 2018 ui.write(('revisions : ') + fmt2 % numrevs)
2017 2019 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2018 2020 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2019 2021 ui.write(('revision size : ') + fmt2 % totalsize)
2020 2022 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2021 2023 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2022 2024
2023 2025 def fmtchunktype(chunktype):
2024 2026 if chunktype == 'empty':
2025 2027 return ' %s : ' % chunktype
2026 2028 elif chunktype in pycompat.bytestr(string.ascii_letters):
2027 2029 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2028 2030 else:
2029 2031 return ' 0x%s : ' % hex(chunktype)
2030 2032
2031 2033 ui.write('\n')
2032 2034 ui.write(('chunks : ') + fmt2 % numrevs)
2033 2035 for chunktype in sorted(chunktypecounts):
2034 2036 ui.write(fmtchunktype(chunktype))
2035 2037 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2036 2038 ui.write(('chunks size : ') + fmt2 % totalsize)
2037 2039 for chunktype in sorted(chunktypecounts):
2038 2040 ui.write(fmtchunktype(chunktype))
2039 2041 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2040 2042
2041 2043 ui.write('\n')
2042 2044 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2043 2045 ui.write(('avg chain length : ') + fmt % avgchainlen)
2044 2046 ui.write(('max chain length : ') + fmt % maxchainlen)
2045 2047 ui.write(('max chain reach : ') + fmt % maxchainspan)
2046 2048 ui.write(('compression ratio : ') + fmt % compratio)
2047 2049
2048 2050 if format > 0:
2049 2051 ui.write('\n')
2050 2052 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2051 2053 % tuple(datasize))
2052 2054 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2053 2055 % tuple(fullsize))
2054 2056 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2055 2057 % tuple(deltasize))
2056 2058
2057 2059 if numdeltas > 0:
2058 2060 ui.write('\n')
2059 2061 fmt = pcfmtstr(numdeltas)
2060 2062 fmt2 = pcfmtstr(numdeltas, 4)
2061 2063 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2062 2064 if numprev > 0:
2063 2065 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2064 2066 numprev))
2065 2067 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2066 2068 numprev))
2067 2069 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2068 2070 numprev))
2069 2071 if gdelta:
2070 2072 ui.write(('deltas against p1 : ')
2071 2073 + fmt % pcfmt(nump1, numdeltas))
2072 2074 ui.write(('deltas against p2 : ')
2073 2075 + fmt % pcfmt(nump2, numdeltas))
2074 2076 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2075 2077 numdeltas))
2076 2078
2077 2079 @command('debugrevspec',
2078 2080 [('', 'optimize', None,
2079 2081 _('print parsed tree after optimizing (DEPRECATED)')),
2080 2082 ('', 'show-revs', True, _('print list of result revisions (default)')),
2081 2083 ('s', 'show-set', None, _('print internal representation of result set')),
2082 2084 ('p', 'show-stage', [],
2083 2085 _('print parsed tree at the given stage'), _('NAME')),
2084 2086 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2085 2087 ('', 'verify-optimized', False, _('verify optimized result')),
2086 2088 ],
2087 2089 ('REVSPEC'))
2088 2090 def debugrevspec(ui, repo, expr, **opts):
2089 2091 """parse and apply a revision specification
2090 2092
2091 2093 Use -p/--show-stage option to print the parsed tree at the given stages.
2092 2094 Use -p all to print tree at every stage.
2093 2095
2094 2096 Use --no-show-revs option with -s or -p to print only the set
2095 2097 representation or the parsed tree respectively.
2096 2098
2097 2099 Use --verify-optimized to compare the optimized result with the unoptimized
2098 2100 one. Returns 1 if the optimized result differs.
2099 2101 """
2100 2102 opts = pycompat.byteskwargs(opts)
2101 2103 aliases = ui.configitems('revsetalias')
2102 2104 stages = [
2103 2105 ('parsed', lambda tree: tree),
2104 2106 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2105 2107 ui.warn)),
2106 2108 ('concatenated', revsetlang.foldconcat),
2107 2109 ('analyzed', revsetlang.analyze),
2108 2110 ('optimized', revsetlang.optimize),
2109 2111 ]
2110 2112 if opts['no_optimized']:
2111 2113 stages = stages[:-1]
2112 2114 if opts['verify_optimized'] and opts['no_optimized']:
2113 2115 raise error.Abort(_('cannot use --verify-optimized with '
2114 2116 '--no-optimized'))
2115 2117 stagenames = set(n for n, f in stages)
2116 2118
2117 2119 showalways = set()
2118 2120 showchanged = set()
2119 2121 if ui.verbose and not opts['show_stage']:
2120 2122 # show parsed tree by --verbose (deprecated)
2121 2123 showalways.add('parsed')
2122 2124 showchanged.update(['expanded', 'concatenated'])
2123 2125 if opts['optimize']:
2124 2126 showalways.add('optimized')
2125 2127 if opts['show_stage'] and opts['optimize']:
2126 2128 raise error.Abort(_('cannot use --optimize with --show-stage'))
2127 2129 if opts['show_stage'] == ['all']:
2128 2130 showalways.update(stagenames)
2129 2131 else:
2130 2132 for n in opts['show_stage']:
2131 2133 if n not in stagenames:
2132 2134 raise error.Abort(_('invalid stage name: %s') % n)
2133 2135 showalways.update(opts['show_stage'])
2134 2136
2135 2137 treebystage = {}
2136 2138 printedtree = None
2137 2139 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2138 2140 for n, f in stages:
2139 2141 treebystage[n] = tree = f(tree)
2140 2142 if n in showalways or (n in showchanged and tree != printedtree):
2141 2143 if opts['show_stage'] or n != 'parsed':
2142 2144 ui.write(("* %s:\n") % n)
2143 2145 ui.write(revsetlang.prettyformat(tree), "\n")
2144 2146 printedtree = tree
2145 2147
2146 2148 if opts['verify_optimized']:
2147 2149 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2148 2150 brevs = revset.makematcher(treebystage['optimized'])(repo)
2149 2151 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2150 2152 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2151 2153 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2152 2154 arevs = list(arevs)
2153 2155 brevs = list(brevs)
2154 2156 if arevs == brevs:
2155 2157 return 0
2156 2158 ui.write(('--- analyzed\n'), label='diff.file_a')
2157 2159 ui.write(('+++ optimized\n'), label='diff.file_b')
2158 2160 sm = difflib.SequenceMatcher(None, arevs, brevs)
2159 2161 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2160 2162 if tag in ('delete', 'replace'):
2161 2163 for c in arevs[alo:ahi]:
2162 2164 ui.write('-%s\n' % c, label='diff.deleted')
2163 2165 if tag in ('insert', 'replace'):
2164 2166 for c in brevs[blo:bhi]:
2165 2167 ui.write('+%s\n' % c, label='diff.inserted')
2166 2168 if tag == 'equal':
2167 2169 for c in arevs[alo:ahi]:
2168 2170 ui.write(' %s\n' % c)
2169 2171 return 1
2170 2172
2171 2173 func = revset.makematcher(tree)
2172 2174 revs = func(repo)
2173 2175 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2174 2176 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2175 2177 if not opts['show_revs']:
2176 2178 return
2177 2179 for c in revs:
2178 2180 ui.write("%s\n" % c)
2179 2181
2180 2182 @command('debugsetparents', [], _('REV1 [REV2]'))
2181 2183 def debugsetparents(ui, repo, rev1, rev2=None):
2182 2184 """manually set the parents of the current working directory
2183 2185
2184 2186 This is useful for writing repository conversion tools, but should
2185 2187 be used with care. For example, neither the working directory nor the
2186 2188 dirstate is updated, so file status may be incorrect after running this
2187 2189 command.
2188 2190
2189 2191 Returns 0 on success.
2190 2192 """
2191 2193
2192 2194 r1 = scmutil.revsingle(repo, rev1).node()
2193 2195 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2194 2196
2195 2197 with repo.wlock():
2196 2198 repo.setparents(r1, r2)
2197 2199
2198 2200 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2199 2201 def debugssl(ui, repo, source=None, **opts):
2200 2202 '''test a secure connection to a server
2201 2203
2202 2204 This builds the certificate chain for the server on Windows, installing the
2203 2205 missing intermediates and trusted root via Windows Update if necessary. It
2204 2206 does nothing on other platforms.
2205 2207
2206 2208 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2207 2209 that server is used. See :hg:`help urls` for more information.
2208 2210
2209 2211 If the update succeeds, retry the original operation. Otherwise, the cause
2210 2212 of the SSL error is likely another issue.
2211 2213 '''
2212 2214 if not pycompat.iswindows:
2213 2215 raise error.Abort(_('certificate chain building is only possible on '
2214 2216 'Windows'))
2215 2217
2216 2218 if not source:
2217 2219 if not repo:
2218 2220 raise error.Abort(_("there is no Mercurial repository here, and no "
2219 2221 "server specified"))
2220 2222 source = "default"
2221 2223
2222 2224 source, branches = hg.parseurl(ui.expandpath(source))
2223 2225 url = util.url(source)
2224 2226 addr = None
2225 2227
2226 2228 defaultport = {'https': 443, 'ssh': 22}
2227 2229 if url.scheme in defaultport:
2228 2230 try:
2229 2231 addr = (url.host, int(url.port or defaultport[url.scheme]))
2230 2232 except ValueError:
2231 2233 raise error.Abort(_("malformed port number in URL"))
2232 2234 else:
2233 2235 raise error.Abort(_("only https and ssh connections are supported"))
2234 2236
2235 2237 from . import win32
2236 2238
2237 2239 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2238 2240 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2239 2241
2240 2242 try:
2241 2243 s.connect(addr)
2242 2244 cert = s.getpeercert(True)
2243 2245
2244 2246 ui.status(_('checking the certificate chain for %s\n') % url.host)
2245 2247
2246 2248 complete = win32.checkcertificatechain(cert, build=False)
2247 2249
2248 2250 if not complete:
2249 2251 ui.status(_('certificate chain is incomplete, updating... '))
2250 2252
2251 2253 if not win32.checkcertificatechain(cert):
2252 2254 ui.status(_('failed.\n'))
2253 2255 else:
2254 2256 ui.status(_('done.\n'))
2255 2257 else:
2256 2258 ui.status(_('full certificate chain is available\n'))
2257 2259 finally:
2258 2260 s.close()
2259 2261
2260 2262 @command('debugsub',
2261 2263 [('r', 'rev', '',
2262 2264 _('revision to check'), _('REV'))],
2263 2265 _('[-r REV] [REV]'))
2264 2266 def debugsub(ui, repo, rev=None):
2265 2267 ctx = scmutil.revsingle(repo, rev, None)
2266 2268 for k, v in sorted(ctx.substate.items()):
2267 2269 ui.write(('path %s\n') % k)
2268 2270 ui.write((' source %s\n') % v[0])
2269 2271 ui.write((' revision %s\n') % v[1])
2270 2272
2271 2273 @command('debugsuccessorssets',
2272 2274 [('', 'closest', False, _('return closest successors sets only'))],
2273 2275 _('[REV]'))
2274 2276 def debugsuccessorssets(ui, repo, *revs, **opts):
2275 2277 """show set of successors for revision
2276 2278
2277 2279 A successors set of changeset A is a consistent group of revisions that
2278 2280 succeed A. It contains non-obsolete changesets only unless closests
2279 2281 successors set is set.
2280 2282
2281 2283 In most cases a changeset A has a single successors set containing a single
2282 2284 successor (changeset A replaced by A').
2283 2285
2284 2286 A changeset that is made obsolete with no successors are called "pruned".
2285 2287 Such changesets have no successors sets at all.
2286 2288
2287 2289 A changeset that has been "split" will have a successors set containing
2288 2290 more than one successor.
2289 2291
2290 2292 A changeset that has been rewritten in multiple different ways is called
2291 2293 "divergent". Such changesets have multiple successor sets (each of which
2292 2294 may also be split, i.e. have multiple successors).
2293 2295
2294 2296 Results are displayed as follows::
2295 2297
2296 2298 <rev1>
2297 2299 <successors-1A>
2298 2300 <rev2>
2299 2301 <successors-2A>
2300 2302 <successors-2B1> <successors-2B2> <successors-2B3>
2301 2303
2302 2304 Here rev2 has two possible (i.e. divergent) successors sets. The first
2303 2305 holds one element, whereas the second holds three (i.e. the changeset has
2304 2306 been split).
2305 2307 """
2306 2308 # passed to successorssets caching computation from one call to another
2307 2309 cache = {}
2308 2310 ctx2str = str
2309 2311 node2str = short
2310 2312 for rev in scmutil.revrange(repo, revs):
2311 2313 ctx = repo[rev]
2312 2314 ui.write('%s\n'% ctx2str(ctx))
2313 2315 for succsset in obsutil.successorssets(repo, ctx.node(),
2314 2316 closest=opts[r'closest'],
2315 2317 cache=cache):
2316 2318 if succsset:
2317 2319 ui.write(' ')
2318 2320 ui.write(node2str(succsset[0]))
2319 2321 for node in succsset[1:]:
2320 2322 ui.write(' ')
2321 2323 ui.write(node2str(node))
2322 2324 ui.write('\n')
2323 2325
2324 2326 @command('debugtemplate',
2325 2327 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2326 2328 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2327 2329 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2328 2330 optionalrepo=True)
2329 2331 def debugtemplate(ui, repo, tmpl, **opts):
2330 2332 """parse and apply a template
2331 2333
2332 2334 If -r/--rev is given, the template is processed as a log template and
2333 2335 applied to the given changesets. Otherwise, it is processed as a generic
2334 2336 template.
2335 2337
2336 2338 Use --verbose to print the parsed tree.
2337 2339 """
2338 2340 revs = None
2339 2341 if opts[r'rev']:
2340 2342 if repo is None:
2341 2343 raise error.RepoError(_('there is no Mercurial repository here '
2342 2344 '(.hg not found)'))
2343 2345 revs = scmutil.revrange(repo, opts[r'rev'])
2344 2346
2345 2347 props = {}
2346 2348 for d in opts[r'define']:
2347 2349 try:
2348 2350 k, v = (e.strip() for e in d.split('=', 1))
2349 2351 if not k or k == 'ui':
2350 2352 raise ValueError
2351 2353 props[k] = v
2352 2354 except ValueError:
2353 2355 raise error.Abort(_('malformed keyword definition: %s') % d)
2354 2356
2355 2357 if ui.verbose:
2356 2358 aliases = ui.configitems('templatealias')
2357 2359 tree = templater.parse(tmpl)
2358 2360 ui.note(templater.prettyformat(tree), '\n')
2359 2361 newtree = templater.expandaliases(tree, aliases)
2360 2362 if newtree != tree:
2361 2363 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2362 2364
2363 2365 if revs is None:
2364 2366 tres = formatter.templateresources(ui, repo)
2365 2367 t = formatter.maketemplater(ui, tmpl, resources=tres)
2366 2368 ui.write(t.render(props))
2367 2369 else:
2368 2370 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2369 2371 for r in revs:
2370 2372 displayer.show(repo[r], **pycompat.strkwargs(props))
2371 2373 displayer.close()
2372 2374
2373 2375 @command('debugupdatecaches', [])
2374 2376 def debugupdatecaches(ui, repo, *pats, **opts):
2375 2377 """warm all known caches in the repository"""
2376 2378 with repo.wlock(), repo.lock():
2377 2379 repo.updatecaches()
2378 2380
2379 2381 @command('debugupgraderepo', [
2380 2382 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2381 2383 ('', 'run', False, _('performs an upgrade')),
2382 2384 ])
2383 2385 def debugupgraderepo(ui, repo, run=False, optimize=None):
2384 2386 """upgrade a repository to use different features
2385 2387
2386 2388 If no arguments are specified, the repository is evaluated for upgrade
2387 2389 and a list of problems and potential optimizations is printed.
2388 2390
2389 2391 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2390 2392 can be influenced via additional arguments. More details will be provided
2391 2393 by the command output when run without ``--run``.
2392 2394
2393 2395 During the upgrade, the repository will be locked and no writes will be
2394 2396 allowed.
2395 2397
2396 2398 At the end of the upgrade, the repository may not be readable while new
2397 2399 repository data is swapped in. This window will be as long as it takes to
2398 2400 rename some directories inside the ``.hg`` directory. On most machines, this
2399 2401 should complete almost instantaneously and the chances of a consumer being
2400 2402 unable to access the repository should be low.
2401 2403 """
2402 2404 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2403 2405
2404 2406 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2405 2407 inferrepo=True)
2406 2408 def debugwalk(ui, repo, *pats, **opts):
2407 2409 """show how files match on given patterns"""
2408 2410 opts = pycompat.byteskwargs(opts)
2409 2411 m = scmutil.match(repo[None], pats, opts)
2410 2412 ui.write(('matcher: %r\n' % m))
2411 2413 items = list(repo[None].walk(m))
2412 2414 if not items:
2413 2415 return
2414 2416 f = lambda fn: fn
2415 2417 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2416 2418 f = lambda fn: util.normpath(fn)
2417 2419 fmt = 'f %%-%ds %%-%ds %%s' % (
2418 2420 max([len(abs) for abs in items]),
2419 2421 max([len(m.rel(abs)) for abs in items]))
2420 2422 for abs in items:
2421 2423 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2422 2424 ui.write("%s\n" % line.rstrip())
2423 2425
2424 2426 @command('debugwireargs',
2425 2427 [('', 'three', '', 'three'),
2426 2428 ('', 'four', '', 'four'),
2427 2429 ('', 'five', '', 'five'),
2428 2430 ] + cmdutil.remoteopts,
2429 2431 _('REPO [OPTIONS]... [ONE [TWO]]'),
2430 2432 norepo=True)
2431 2433 def debugwireargs(ui, repopath, *vals, **opts):
2432 2434 opts = pycompat.byteskwargs(opts)
2433 2435 repo = hg.peer(ui, opts, repopath)
2434 2436 for opt in cmdutil.remoteopts:
2435 2437 del opts[opt[1]]
2436 2438 args = {}
2437 2439 for k, v in opts.iteritems():
2438 2440 if v:
2439 2441 args[k] = v
2440 2442 args = pycompat.strkwargs(args)
2441 2443 # run twice to check that we don't mess up the stream for the next command
2442 2444 res1 = repo.debugwireargs(*vals, **args)
2443 2445 res2 = repo.debugwireargs(*vals, **args)
2444 2446 ui.write("%s\n" % res1)
2445 2447 if res1 != res2:
2446 2448 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now