##// END OF EJS Templates
debuglocks: add tests (and fix typo in early return)
Paul Morelle -
r35395:a43b2dd9 default
parent child Browse files
Show More
@@ -1,2430 +1,2430
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import socket
18 18 import ssl
19 19 import string
20 20 import sys
21 21 import tempfile
22 22 import time
23 23
24 24 from .i18n import _
25 25 from .node import (
26 26 bin,
27 27 hex,
28 28 nullhex,
29 29 nullid,
30 30 nullrev,
31 31 short,
32 32 )
33 33 from . import (
34 34 bundle2,
35 35 changegroup,
36 36 cmdutil,
37 37 color,
38 38 context,
39 39 dagparser,
40 40 dagutil,
41 41 encoding,
42 42 error,
43 43 exchange,
44 44 extensions,
45 45 filemerge,
46 46 fileset,
47 47 formatter,
48 48 hg,
49 49 localrepo,
50 50 lock as lockmod,
51 51 merge as mergemod,
52 52 obsolete,
53 53 obsutil,
54 54 phases,
55 55 policy,
56 56 pvec,
57 57 pycompat,
58 58 registrar,
59 59 repair,
60 60 revlog,
61 61 revset,
62 62 revsetlang,
63 63 scmutil,
64 64 setdiscovery,
65 65 simplemerge,
66 66 smartset,
67 67 sslutil,
68 68 streamclone,
69 69 templater,
70 70 treediscovery,
71 71 upgrade,
72 72 util,
73 73 vfs as vfsmod,
74 74 )
75 75
76 76 release = lockmod.release
77 77
78 78 command = registrar.command()
79 79
80 80 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
81 81 def debugancestor(ui, repo, *args):
82 82 """find the ancestor revision of two revisions in a given index"""
83 83 if len(args) == 3:
84 84 index, rev1, rev2 = args
85 85 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
86 86 lookup = r.lookup
87 87 elif len(args) == 2:
88 88 if not repo:
89 89 raise error.Abort(_('there is no Mercurial repository here '
90 90 '(.hg not found)'))
91 91 rev1, rev2 = args
92 92 r = repo.changelog
93 93 lookup = repo.lookup
94 94 else:
95 95 raise error.Abort(_('either two or three arguments required'))
96 96 a = r.ancestor(lookup(rev1), lookup(rev2))
97 97 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
98 98
99 99 @command('debugapplystreamclonebundle', [], 'FILE')
100 100 def debugapplystreamclonebundle(ui, repo, fname):
101 101 """apply a stream clone bundle file"""
102 102 f = hg.openpath(ui, fname)
103 103 gen = exchange.readbundle(ui, f, fname)
104 104 gen.apply(repo)
105 105
106 106 @command('debugbuilddag',
107 107 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
108 108 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
109 109 ('n', 'new-file', None, _('add new file at each rev'))],
110 110 _('[OPTION]... [TEXT]'))
111 111 def debugbuilddag(ui, repo, text=None,
112 112 mergeable_file=False,
113 113 overwritten_file=False,
114 114 new_file=False):
115 115 """builds a repo with a given DAG from scratch in the current empty repo
116 116
117 117 The description of the DAG is read from stdin if not given on the
118 118 command line.
119 119
120 120 Elements:
121 121
122 122 - "+n" is a linear run of n nodes based on the current default parent
123 123 - "." is a single node based on the current default parent
124 124 - "$" resets the default parent to null (implied at the start);
125 125 otherwise the default parent is always the last node created
126 126 - "<p" sets the default parent to the backref p
127 127 - "*p" is a fork at parent p, which is a backref
128 128 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
129 129 - "/p2" is a merge of the preceding node and p2
130 130 - ":tag" defines a local tag for the preceding node
131 131 - "@branch" sets the named branch for subsequent nodes
132 132 - "#...\\n" is a comment up to the end of the line
133 133
134 134 Whitespace between the above elements is ignored.
135 135
136 136 A backref is either
137 137
138 138 - a number n, which references the node curr-n, where curr is the current
139 139 node, or
140 140 - the name of a local tag you placed earlier using ":tag", or
141 141 - empty to denote the default parent.
142 142
143 143 All string valued-elements are either strictly alphanumeric, or must
144 144 be enclosed in double quotes ("..."), with "\\" as escape character.
145 145 """
146 146
147 147 if text is None:
148 148 ui.status(_("reading DAG from stdin\n"))
149 149 text = ui.fin.read()
150 150
151 151 cl = repo.changelog
152 152 if len(cl) > 0:
153 153 raise error.Abort(_('repository is not empty'))
154 154
155 155 # determine number of revs in DAG
156 156 total = 0
157 157 for type, data in dagparser.parsedag(text):
158 158 if type == 'n':
159 159 total += 1
160 160
161 161 if mergeable_file:
162 162 linesperrev = 2
163 163 # make a file with k lines per rev
164 164 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
165 165 initialmergedlines.append("")
166 166
167 167 tags = []
168 168
169 169 wlock = lock = tr = None
170 170 try:
171 171 wlock = repo.wlock()
172 172 lock = repo.lock()
173 173 tr = repo.transaction("builddag")
174 174
175 175 at = -1
176 176 atbranch = 'default'
177 177 nodeids = []
178 178 id = 0
179 179 ui.progress(_('building'), id, unit=_('revisions'), total=total)
180 180 for type, data in dagparser.parsedag(text):
181 181 if type == 'n':
182 182 ui.note(('node %s\n' % str(data)))
183 183 id, ps = data
184 184
185 185 files = []
186 186 fctxs = {}
187 187
188 188 p2 = None
189 189 if mergeable_file:
190 190 fn = "mf"
191 191 p1 = repo[ps[0]]
192 192 if len(ps) > 1:
193 193 p2 = repo[ps[1]]
194 194 pa = p1.ancestor(p2)
195 195 base, local, other = [x[fn].data() for x in (pa, p1,
196 196 p2)]
197 197 m3 = simplemerge.Merge3Text(base, local, other)
198 198 ml = [l.strip() for l in m3.merge_lines()]
199 199 ml.append("")
200 200 elif at > 0:
201 201 ml = p1[fn].data().split("\n")
202 202 else:
203 203 ml = initialmergedlines
204 204 ml[id * linesperrev] += " r%i" % id
205 205 mergedtext = "\n".join(ml)
206 206 files.append(fn)
207 207 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
208 208
209 209 if overwritten_file:
210 210 fn = "of"
211 211 files.append(fn)
212 212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 213
214 214 if new_file:
215 215 fn = "nf%i" % id
216 216 files.append(fn)
217 217 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
218 218 if len(ps) > 1:
219 219 if not p2:
220 220 p2 = repo[ps[1]]
221 221 for fn in p2:
222 222 if fn.startswith("nf"):
223 223 files.append(fn)
224 224 fctxs[fn] = p2[fn]
225 225
226 226 def fctxfn(repo, cx, path):
227 227 return fctxs.get(path)
228 228
229 229 if len(ps) == 0 or ps[0] < 0:
230 230 pars = [None, None]
231 231 elif len(ps) == 1:
232 232 pars = [nodeids[ps[0]], None]
233 233 else:
234 234 pars = [nodeids[p] for p in ps]
235 235 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
236 236 date=(id, 0),
237 237 user="debugbuilddag",
238 238 extra={'branch': atbranch})
239 239 nodeid = repo.commitctx(cx)
240 240 nodeids.append(nodeid)
241 241 at = id
242 242 elif type == 'l':
243 243 id, name = data
244 244 ui.note(('tag %s\n' % name))
245 245 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
246 246 elif type == 'a':
247 247 ui.note(('branch %s\n' % data))
248 248 atbranch = data
249 249 ui.progress(_('building'), id, unit=_('revisions'), total=total)
250 250 tr.close()
251 251
252 252 if tags:
253 253 repo.vfs.write("localtags", "".join(tags))
254 254 finally:
255 255 ui.progress(_('building'), None)
256 256 release(tr, lock, wlock)
257 257
258 258 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
259 259 indent_string = ' ' * indent
260 260 if all:
261 261 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
262 262 % indent_string)
263 263
264 264 def showchunks(named):
265 265 ui.write("\n%s%s\n" % (indent_string, named))
266 266 for deltadata in gen.deltaiter():
267 267 node, p1, p2, cs, deltabase, delta, flags = deltadata
268 268 ui.write("%s%s %s %s %s %s %s\n" %
269 269 (indent_string, hex(node), hex(p1), hex(p2),
270 270 hex(cs), hex(deltabase), len(delta)))
271 271
272 272 chunkdata = gen.changelogheader()
273 273 showchunks("changelog")
274 274 chunkdata = gen.manifestheader()
275 275 showchunks("manifest")
276 276 for chunkdata in iter(gen.filelogheader, {}):
277 277 fname = chunkdata['filename']
278 278 showchunks(fname)
279 279 else:
280 280 if isinstance(gen, bundle2.unbundle20):
281 281 raise error.Abort(_('use debugbundle2 for this file'))
282 282 chunkdata = gen.changelogheader()
283 283 for deltadata in gen.deltaiter():
284 284 node, p1, p2, cs, deltabase, delta, flags = deltadata
285 285 ui.write("%s%s\n" % (indent_string, hex(node)))
286 286
287 287 def _debugobsmarkers(ui, part, indent=0, **opts):
288 288 """display version and markers contained in 'data'"""
289 289 opts = pycompat.byteskwargs(opts)
290 290 data = part.read()
291 291 indent_string = ' ' * indent
292 292 try:
293 293 version, markers = obsolete._readmarkers(data)
294 294 except error.UnknownVersion as exc:
295 295 msg = "%sunsupported version: %s (%d bytes)\n"
296 296 msg %= indent_string, exc.version, len(data)
297 297 ui.write(msg)
298 298 else:
299 299 msg = "%sversion: %d (%d bytes)\n"
300 300 msg %= indent_string, version, len(data)
301 301 ui.write(msg)
302 302 fm = ui.formatter('debugobsolete', opts)
303 303 for rawmarker in sorted(markers):
304 304 m = obsutil.marker(None, rawmarker)
305 305 fm.startitem()
306 306 fm.plain(indent_string)
307 307 cmdutil.showmarker(fm, m)
308 308 fm.end()
309 309
310 310 def _debugphaseheads(ui, data, indent=0):
311 311 """display version and markers contained in 'data'"""
312 312 indent_string = ' ' * indent
313 313 headsbyphase = phases.binarydecode(data)
314 314 for phase in phases.allphases:
315 315 for head in headsbyphase[phase]:
316 316 ui.write(indent_string)
317 317 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
318 318
319 319 def _quasirepr(thing):
320 320 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
321 321 return '{%s}' % (
322 322 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
323 323 return pycompat.bytestr(repr(thing))
324 324
325 325 def _debugbundle2(ui, gen, all=None, **opts):
326 326 """lists the contents of a bundle2"""
327 327 if not isinstance(gen, bundle2.unbundle20):
328 328 raise error.Abort(_('not a bundle2 file'))
329 329 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
330 330 parttypes = opts.get(r'part_type', [])
331 331 for part in gen.iterparts():
332 332 if parttypes and part.type not in parttypes:
333 333 continue
334 334 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
335 335 if part.type == 'changegroup':
336 336 version = part.params.get('version', '01')
337 337 cg = changegroup.getunbundler(version, part, 'UN')
338 338 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
339 339 if part.type == 'obsmarkers':
340 340 _debugobsmarkers(ui, part, indent=4, **opts)
341 341 if part.type == 'phase-heads':
342 342 _debugphaseheads(ui, part, indent=4)
343 343
344 344 @command('debugbundle',
345 345 [('a', 'all', None, _('show all details')),
346 346 ('', 'part-type', [], _('show only the named part type')),
347 347 ('', 'spec', None, _('print the bundlespec of the bundle'))],
348 348 _('FILE'),
349 349 norepo=True)
350 350 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
351 351 """lists the contents of a bundle"""
352 352 with hg.openpath(ui, bundlepath) as f:
353 353 if spec:
354 354 spec = exchange.getbundlespec(ui, f)
355 355 ui.write('%s\n' % spec)
356 356 return
357 357
358 358 gen = exchange.readbundle(ui, f, bundlepath)
359 359 if isinstance(gen, bundle2.unbundle20):
360 360 return _debugbundle2(ui, gen, all=all, **opts)
361 361 _debugchangegroup(ui, gen, all=all, **opts)
362 362
363 363 @command('debugcapabilities',
364 364 [], _('PATH'),
365 365 norepo=True)
366 366 def debugcapabilities(ui, path, **opts):
367 367 """lists the capabilities of a remote peer"""
368 368 peer = hg.peer(ui, opts, path)
369 369 caps = peer.capabilities()
370 370 ui.write(('Main capabilities:\n'))
371 371 for c in sorted(caps):
372 372 ui.write((' %s\n') % c)
373 373 b2caps = bundle2.bundle2caps(peer)
374 374 if b2caps:
375 375 ui.write(('Bundle2 capabilities:\n'))
376 376 for key, values in sorted(b2caps.iteritems()):
377 377 ui.write((' %s\n') % key)
378 378 for v in values:
379 379 ui.write((' %s\n') % v)
380 380
381 381 @command('debugcheckstate', [], '')
382 382 def debugcheckstate(ui, repo):
383 383 """validate the correctness of the current dirstate"""
384 384 parent1, parent2 = repo.dirstate.parents()
385 385 m1 = repo[parent1].manifest()
386 386 m2 = repo[parent2].manifest()
387 387 errors = 0
388 388 for f in repo.dirstate:
389 389 state = repo.dirstate[f]
390 390 if state in "nr" and f not in m1:
391 391 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
392 392 errors += 1
393 393 if state in "a" and f in m1:
394 394 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
395 395 errors += 1
396 396 if state in "m" and f not in m1 and f not in m2:
397 397 ui.warn(_("%s in state %s, but not in either manifest\n") %
398 398 (f, state))
399 399 errors += 1
400 400 for f in m1:
401 401 state = repo.dirstate[f]
402 402 if state not in "nrm":
403 403 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
404 404 errors += 1
405 405 if errors:
406 406 error = _(".hg/dirstate inconsistent with current parent's manifest")
407 407 raise error.Abort(error)
408 408
409 409 @command('debugcolor',
410 410 [('', 'style', None, _('show all configured styles'))],
411 411 'hg debugcolor')
412 412 def debugcolor(ui, repo, **opts):
413 413 """show available color, effects or style"""
414 414 ui.write(('color mode: %s\n') % ui._colormode)
415 415 if opts.get(r'style'):
416 416 return _debugdisplaystyle(ui)
417 417 else:
418 418 return _debugdisplaycolor(ui)
419 419
420 420 def _debugdisplaycolor(ui):
421 421 ui = ui.copy()
422 422 ui._styles.clear()
423 423 for effect in color._activeeffects(ui).keys():
424 424 ui._styles[effect] = effect
425 425 if ui._terminfoparams:
426 426 for k, v in ui.configitems('color'):
427 427 if k.startswith('color.'):
428 428 ui._styles[k] = k[6:]
429 429 elif k.startswith('terminfo.'):
430 430 ui._styles[k] = k[9:]
431 431 ui.write(_('available colors:\n'))
432 432 # sort label with a '_' after the other to group '_background' entry.
433 433 items = sorted(ui._styles.items(),
434 434 key=lambda i: ('_' in i[0], i[0], i[1]))
435 435 for colorname, label in items:
436 436 ui.write(('%s\n') % colorname, label=label)
437 437
438 438 def _debugdisplaystyle(ui):
439 439 ui.write(_('available style:\n'))
440 440 width = max(len(s) for s in ui._styles)
441 441 for label, effects in sorted(ui._styles.items()):
442 442 ui.write('%s' % label, label=label)
443 443 if effects:
444 444 # 50
445 445 ui.write(': ')
446 446 ui.write(' ' * (max(0, width - len(label))))
447 447 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
448 448 ui.write('\n')
449 449
450 450 @command('debugcreatestreamclonebundle', [], 'FILE')
451 451 def debugcreatestreamclonebundle(ui, repo, fname):
452 452 """create a stream clone bundle file
453 453
454 454 Stream bundles are special bundles that are essentially archives of
455 455 revlog files. They are commonly used for cloning very quickly.
456 456 """
457 457 # TODO we may want to turn this into an abort when this functionality
458 458 # is moved into `hg bundle`.
459 459 if phases.hassecret(repo):
460 460 ui.warn(_('(warning: stream clone bundle will contain secret '
461 461 'revisions)\n'))
462 462
463 463 requirements, gen = streamclone.generatebundlev1(repo)
464 464 changegroup.writechunks(ui, gen, fname)
465 465
466 466 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
467 467
468 468 @command('debugdag',
469 469 [('t', 'tags', None, _('use tags as labels')),
470 470 ('b', 'branches', None, _('annotate with branch names')),
471 471 ('', 'dots', None, _('use dots for runs')),
472 472 ('s', 'spaces', None, _('separate elements by spaces'))],
473 473 _('[OPTION]... [FILE [REV]...]'),
474 474 optionalrepo=True)
475 475 def debugdag(ui, repo, file_=None, *revs, **opts):
476 476 """format the changelog or an index DAG as a concise textual description
477 477
478 478 If you pass a revlog index, the revlog's DAG is emitted. If you list
479 479 revision numbers, they get labeled in the output as rN.
480 480
481 481 Otherwise, the changelog DAG of the current repo is emitted.
482 482 """
483 483 spaces = opts.get(r'spaces')
484 484 dots = opts.get(r'dots')
485 485 if file_:
486 486 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
487 487 file_)
488 488 revs = set((int(r) for r in revs))
489 489 def events():
490 490 for r in rlog:
491 491 yield 'n', (r, list(p for p in rlog.parentrevs(r)
492 492 if p != -1))
493 493 if r in revs:
494 494 yield 'l', (r, "r%i" % r)
495 495 elif repo:
496 496 cl = repo.changelog
497 497 tags = opts.get(r'tags')
498 498 branches = opts.get(r'branches')
499 499 if tags:
500 500 labels = {}
501 501 for l, n in repo.tags().items():
502 502 labels.setdefault(cl.rev(n), []).append(l)
503 503 def events():
504 504 b = "default"
505 505 for r in cl:
506 506 if branches:
507 507 newb = cl.read(cl.node(r))[5]['branch']
508 508 if newb != b:
509 509 yield 'a', newb
510 510 b = newb
511 511 yield 'n', (r, list(p for p in cl.parentrevs(r)
512 512 if p != -1))
513 513 if tags:
514 514 ls = labels.get(r)
515 515 if ls:
516 516 for l in ls:
517 517 yield 'l', (r, l)
518 518 else:
519 519 raise error.Abort(_('need repo for changelog dag'))
520 520
521 521 for line in dagparser.dagtextlines(events(),
522 522 addspaces=spaces,
523 523 wraplabels=True,
524 524 wrapannotations=True,
525 525 wrapnonlinear=dots,
526 526 usedots=dots,
527 527 maxlinewidth=70):
528 528 ui.write(line)
529 529 ui.write("\n")
530 530
531 531 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
532 532 def debugdata(ui, repo, file_, rev=None, **opts):
533 533 """dump the contents of a data file revision"""
534 534 opts = pycompat.byteskwargs(opts)
535 535 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
536 536 if rev is not None:
537 537 raise error.CommandError('debugdata', _('invalid arguments'))
538 538 file_, rev = None, file_
539 539 elif rev is None:
540 540 raise error.CommandError('debugdata', _('invalid arguments'))
541 541 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
542 542 try:
543 543 ui.write(r.revision(r.lookup(rev), raw=True))
544 544 except KeyError:
545 545 raise error.Abort(_('invalid revision identifier %s') % rev)
546 546
547 547 @command('debugdate',
548 548 [('e', 'extended', None, _('try extended date formats'))],
549 549 _('[-e] DATE [RANGE]'),
550 550 norepo=True, optionalrepo=True)
551 551 def debugdate(ui, date, range=None, **opts):
552 552 """parse and display a date"""
553 553 if opts[r"extended"]:
554 554 d = util.parsedate(date, util.extendeddateformats)
555 555 else:
556 556 d = util.parsedate(date)
557 557 ui.write(("internal: %s %s\n") % d)
558 558 ui.write(("standard: %s\n") % util.datestr(d))
559 559 if range:
560 560 m = util.matchdate(range)
561 561 ui.write(("match: %s\n") % m(d[0]))
562 562
563 563 @command('debugdeltachain',
564 564 cmdutil.debugrevlogopts + cmdutil.formatteropts,
565 565 _('-c|-m|FILE'),
566 566 optionalrepo=True)
567 567 def debugdeltachain(ui, repo, file_=None, **opts):
568 568 """dump information about delta chains in a revlog
569 569
570 570 Output can be templatized. Available template keywords are:
571 571
572 572 :``rev``: revision number
573 573 :``chainid``: delta chain identifier (numbered by unique base)
574 574 :``chainlen``: delta chain length to this revision
575 575 :``prevrev``: previous revision in delta chain
576 576 :``deltatype``: role of delta / how it was computed
577 577 :``compsize``: compressed size of revision
578 578 :``uncompsize``: uncompressed size of revision
579 579 :``chainsize``: total size of compressed revisions in chain
580 580 :``chainratio``: total chain size divided by uncompressed revision size
581 581 (new delta chains typically start at ratio 2.00)
582 582 :``lindist``: linear distance from base revision in delta chain to end
583 583 of this revision
584 584 :``extradist``: total size of revisions not part of this delta chain from
585 585 base of delta chain to end of this revision; a measurement
586 586 of how much extra data we need to read/seek across to read
587 587 the delta chain for this revision
588 588 :``extraratio``: extradist divided by chainsize; another representation of
589 589 how much unrelated data is needed to load this delta chain
590 590
591 591 If the repository is configured to use the sparse read, additional keywords
592 592 are available:
593 593
594 594 :``readsize``: total size of data read from the disk for a revision
595 595 (sum of the sizes of all the blocks)
596 596 :``largestblock``: size of the largest block of data read from the disk
597 597 :``readdensity``: density of useful bytes in the data read from the disk
598 598
599 599 The sparse read can be enabled with experimental.sparse-read = True
600 600 """
601 601 opts = pycompat.byteskwargs(opts)
602 602 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
603 603 index = r.index
604 604 generaldelta = r.version & revlog.FLAG_GENERALDELTA
605 605 withsparseread = getattr(r, '_withsparseread', False)
606 606
607 607 def revinfo(rev):
608 608 e = index[rev]
609 609 compsize = e[1]
610 610 uncompsize = e[2]
611 611 chainsize = 0
612 612
613 613 if generaldelta:
614 614 if e[3] == e[5]:
615 615 deltatype = 'p1'
616 616 elif e[3] == e[6]:
617 617 deltatype = 'p2'
618 618 elif e[3] == rev - 1:
619 619 deltatype = 'prev'
620 620 elif e[3] == rev:
621 621 deltatype = 'base'
622 622 else:
623 623 deltatype = 'other'
624 624 else:
625 625 if e[3] == rev:
626 626 deltatype = 'base'
627 627 else:
628 628 deltatype = 'prev'
629 629
630 630 chain = r._deltachain(rev)[0]
631 631 for iterrev in chain:
632 632 e = index[iterrev]
633 633 chainsize += e[1]
634 634
635 635 return compsize, uncompsize, deltatype, chain, chainsize
636 636
637 637 fm = ui.formatter('debugdeltachain', opts)
638 638
639 639 fm.plain(' rev chain# chainlen prev delta '
640 640 'size rawsize chainsize ratio lindist extradist '
641 641 'extraratio')
642 642 if withsparseread:
643 643 fm.plain(' readsize largestblk rddensity')
644 644 fm.plain('\n')
645 645
646 646 chainbases = {}
647 647 for rev in r:
648 648 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
649 649 chainbase = chain[0]
650 650 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
651 651 start = r.start
652 652 length = r.length
653 653 basestart = start(chainbase)
654 654 revstart = start(rev)
655 655 lineardist = revstart + comp - basestart
656 656 extradist = lineardist - chainsize
657 657 try:
658 658 prevrev = chain[-2]
659 659 except IndexError:
660 660 prevrev = -1
661 661
662 662 chainratio = float(chainsize) / float(uncomp)
663 663 extraratio = float(extradist) / float(chainsize)
664 664
665 665 fm.startitem()
666 666 fm.write('rev chainid chainlen prevrev deltatype compsize '
667 667 'uncompsize chainsize chainratio lindist extradist '
668 668 'extraratio',
669 669 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
670 670 rev, chainid, len(chain), prevrev, deltatype, comp,
671 671 uncomp, chainsize, chainratio, lineardist, extradist,
672 672 extraratio,
673 673 rev=rev, chainid=chainid, chainlen=len(chain),
674 674 prevrev=prevrev, deltatype=deltatype, compsize=comp,
675 675 uncompsize=uncomp, chainsize=chainsize,
676 676 chainratio=chainratio, lindist=lineardist,
677 677 extradist=extradist, extraratio=extraratio)
678 678 if withsparseread:
679 679 readsize = 0
680 680 largestblock = 0
681 681 for revschunk in revlog._slicechunk(r, chain):
682 682 blkend = start(revschunk[-1]) + length(revschunk[-1])
683 683 blksize = blkend - start(revschunk[0])
684 684
685 685 readsize += blksize
686 686 if largestblock < blksize:
687 687 largestblock = blksize
688 688
689 689 readdensity = float(chainsize) / float(readsize)
690 690
691 691 fm.write('readsize largestblock readdensity',
692 692 ' %10d %10d %9.5f',
693 693 readsize, largestblock, readdensity,
694 694 readsize=readsize, largestblock=largestblock,
695 695 readdensity=readdensity)
696 696
697 697 fm.plain('\n')
698 698
699 699 fm.end()
700 700
701 701 @command('debugdirstate|debugstate',
702 702 [('', 'nodates', None, _('do not display the saved mtime')),
703 703 ('', 'datesort', None, _('sort by saved mtime'))],
704 704 _('[OPTION]...'))
705 705 def debugstate(ui, repo, **opts):
706 706 """show the contents of the current dirstate"""
707 707
708 708 nodates = opts.get(r'nodates')
709 709 datesort = opts.get(r'datesort')
710 710
711 711 timestr = ""
712 712 if datesort:
713 713 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
714 714 else:
715 715 keyfunc = None # sort by filename
716 716 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
717 717 if ent[3] == -1:
718 718 timestr = 'unset '
719 719 elif nodates:
720 720 timestr = 'set '
721 721 else:
722 722 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
723 723 time.localtime(ent[3]))
724 724 timestr = encoding.strtolocal(timestr)
725 725 if ent[1] & 0o20000:
726 726 mode = 'lnk'
727 727 else:
728 728 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
729 729 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
730 730 for f in repo.dirstate.copies():
731 731 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
732 732
733 733 @command('debugdiscovery',
734 734 [('', 'old', None, _('use old-style discovery')),
735 735 ('', 'nonheads', None,
736 736 _('use old-style discovery with non-heads included')),
737 737 ('', 'rev', [], 'restrict discovery to this set of revs'),
738 738 ] + cmdutil.remoteopts,
739 739 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
740 740 def debugdiscovery(ui, repo, remoteurl="default", **opts):
741 741 """runs the changeset discovery protocol in isolation"""
742 742 opts = pycompat.byteskwargs(opts)
743 743 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
744 744 opts.get('branch'))
745 745 remote = hg.peer(repo, opts, remoteurl)
746 746 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
747 747
748 748 # make sure tests are repeatable
749 749 random.seed(12323)
750 750
751 751 def doit(pushedrevs, remoteheads, remote=remote):
752 752 if opts.get('old'):
753 753 if not util.safehasattr(remote, 'branches'):
754 754 # enable in-client legacy support
755 755 remote = localrepo.locallegacypeer(remote.local())
756 756 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
757 757 force=True)
758 758 common = set(common)
759 759 if not opts.get('nonheads'):
760 760 ui.write(("unpruned common: %s\n") %
761 761 " ".join(sorted(short(n) for n in common)))
762 762 dag = dagutil.revlogdag(repo.changelog)
763 763 all = dag.ancestorset(dag.internalizeall(common))
764 764 common = dag.externalizeall(dag.headsetofconnecteds(all))
765 765 else:
766 766 nodes = None
767 767 if pushedrevs:
768 768 revs = scmutil.revrange(repo, pushedrevs)
769 769 nodes = [repo[r].node() for r in revs]
770 770 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
771 771 ancestorsof=nodes)
772 772 common = set(common)
773 773 rheads = set(hds)
774 774 lheads = set(repo.heads())
775 775 ui.write(("common heads: %s\n") %
776 776 " ".join(sorted(short(n) for n in common)))
777 777 if lheads <= common:
778 778 ui.write(("local is subset\n"))
779 779 elif rheads <= common:
780 780 ui.write(("remote is subset\n"))
781 781
782 782 serverlogs = opts.get('serverlog')
783 783 if serverlogs:
784 784 for filename in serverlogs:
785 785 with open(filename, 'r') as logfile:
786 786 line = logfile.readline()
787 787 while line:
788 788 parts = line.strip().split(';')
789 789 op = parts[1]
790 790 if op == 'cg':
791 791 pass
792 792 elif op == 'cgss':
793 793 doit(parts[2].split(' '), parts[3].split(' '))
794 794 elif op == 'unb':
795 795 doit(parts[3].split(' '), parts[2].split(' '))
796 796 line = logfile.readline()
797 797 else:
798 798 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
799 799 opts.get('remote_head'))
800 800 localrevs = opts.get('rev')
801 801 doit(localrevs, remoterevs)
802 802
803 803 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
804 804 def debugextensions(ui, **opts):
805 805 '''show information about active extensions'''
806 806 opts = pycompat.byteskwargs(opts)
807 807 exts = extensions.extensions(ui)
808 808 hgver = util.version()
809 809 fm = ui.formatter('debugextensions', opts)
810 810 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
811 811 isinternal = extensions.ismoduleinternal(extmod)
812 812 extsource = pycompat.fsencode(extmod.__file__)
813 813 if isinternal:
814 814 exttestedwith = [] # never expose magic string to users
815 815 else:
816 816 exttestedwith = getattr(extmod, 'testedwith', '').split()
817 817 extbuglink = getattr(extmod, 'buglink', None)
818 818
819 819 fm.startitem()
820 820
821 821 if ui.quiet or ui.verbose:
822 822 fm.write('name', '%s\n', extname)
823 823 else:
824 824 fm.write('name', '%s', extname)
825 825 if isinternal or hgver in exttestedwith:
826 826 fm.plain('\n')
827 827 elif not exttestedwith:
828 828 fm.plain(_(' (untested!)\n'))
829 829 else:
830 830 lasttestedversion = exttestedwith[-1]
831 831 fm.plain(' (%s!)\n' % lasttestedversion)
832 832
833 833 fm.condwrite(ui.verbose and extsource, 'source',
834 834 _(' location: %s\n'), extsource or "")
835 835
836 836 if ui.verbose:
837 837 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
838 838 fm.data(bundled=isinternal)
839 839
840 840 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
841 841 _(' tested with: %s\n'),
842 842 fm.formatlist(exttestedwith, name='ver'))
843 843
844 844 fm.condwrite(ui.verbose and extbuglink, 'buglink',
845 845 _(' bug reporting: %s\n'), extbuglink or "")
846 846
847 847 fm.end()
848 848
849 849 @command('debugfileset',
850 850 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
851 851 _('[-r REV] FILESPEC'))
852 852 def debugfileset(ui, repo, expr, **opts):
853 853 '''parse and apply a fileset specification'''
854 854 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
855 855 if ui.verbose:
856 856 tree = fileset.parse(expr)
857 857 ui.note(fileset.prettyformat(tree), "\n")
858 858
859 859 for f in ctx.getfileset(expr):
860 860 ui.write("%s\n" % f)
861 861
862 862 @command('debugformat',
863 863 [] + cmdutil.formatteropts,
864 864 _(''))
865 865 def debugformat(ui, repo, **opts):
866 866 """display format information about the current repository
867 867
868 868 Use --verbose to get extra information about current config value and
869 869 Mercurial default."""
870 870 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
871 871 maxvariantlength = max(len('format-variant'), maxvariantlength)
872 872
873 873 def makeformatname(name):
874 874 return '%s:' + (' ' * (maxvariantlength - len(name)))
875 875
876 876 fm = ui.formatter('debugformat', opts)
877 877 if fm.isplain():
878 878 def formatvalue(value):
879 879 if util.safehasattr(value, 'startswith'):
880 880 return value
881 881 if value:
882 882 return 'yes'
883 883 else:
884 884 return 'no'
885 885 else:
886 886 formatvalue = pycompat.identity
887 887
888 888 fm.plain('format-variant')
889 889 fm.plain(' ' * (maxvariantlength - len('format-variant')))
890 890 fm.plain(' repo')
891 891 if ui.verbose:
892 892 fm.plain(' config default')
893 893 fm.plain('\n')
894 894 for fv in upgrade.allformatvariant:
895 895 fm.startitem()
896 896 repovalue = fv.fromrepo(repo)
897 897 configvalue = fv.fromconfig(repo)
898 898
899 899 if repovalue != configvalue:
900 900 namelabel = 'formatvariant.name.mismatchconfig'
901 901 repolabel = 'formatvariant.repo.mismatchconfig'
902 902 elif repovalue != fv.default:
903 903 namelabel = 'formatvariant.name.mismatchdefault'
904 904 repolabel = 'formatvariant.repo.mismatchdefault'
905 905 else:
906 906 namelabel = 'formatvariant.name.uptodate'
907 907 repolabel = 'formatvariant.repo.uptodate'
908 908
909 909 fm.write('name', makeformatname(fv.name), fv.name,
910 910 label=namelabel)
911 911 fm.write('repo', ' %3s', formatvalue(repovalue),
912 912 label=repolabel)
913 913 if fv.default != configvalue:
914 914 configlabel = 'formatvariant.config.special'
915 915 else:
916 916 configlabel = 'formatvariant.config.default'
917 917 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
918 918 label=configlabel)
919 919 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
920 920 label='formatvariant.default')
921 921 fm.plain('\n')
922 922 fm.end()
923 923
924 924 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
925 925 def debugfsinfo(ui, path="."):
926 926 """show information detected about current filesystem"""
927 927 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
928 928 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
929 929 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
930 930 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
931 931 casesensitive = '(unknown)'
932 932 try:
933 933 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
934 934 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
935 935 except OSError:
936 936 pass
937 937 ui.write(('case-sensitive: %s\n') % casesensitive)
938 938
939 939 @command('debuggetbundle',
940 940 [('H', 'head', [], _('id of head node'), _('ID')),
941 941 ('C', 'common', [], _('id of common node'), _('ID')),
942 942 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
943 943 _('REPO FILE [-H|-C ID]...'),
944 944 norepo=True)
945 945 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
946 946 """retrieves a bundle from a repo
947 947
948 948 Every ID must be a full-length hex node id string. Saves the bundle to the
949 949 given file.
950 950 """
951 951 opts = pycompat.byteskwargs(opts)
952 952 repo = hg.peer(ui, opts, repopath)
953 953 if not repo.capable('getbundle'):
954 954 raise error.Abort("getbundle() not supported by target repository")
955 955 args = {}
956 956 if common:
957 957 args[r'common'] = [bin(s) for s in common]
958 958 if head:
959 959 args[r'heads'] = [bin(s) for s in head]
960 960 # TODO: get desired bundlecaps from command line.
961 961 args[r'bundlecaps'] = None
962 962 bundle = repo.getbundle('debug', **args)
963 963
964 964 bundletype = opts.get('type', 'bzip2').lower()
965 965 btypes = {'none': 'HG10UN',
966 966 'bzip2': 'HG10BZ',
967 967 'gzip': 'HG10GZ',
968 968 'bundle2': 'HG20'}
969 969 bundletype = btypes.get(bundletype)
970 970 if bundletype not in bundle2.bundletypes:
971 971 raise error.Abort(_('unknown bundle type specified with --type'))
972 972 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
973 973
974 974 @command('debugignore', [], '[FILE]')
975 975 def debugignore(ui, repo, *files, **opts):
976 976 """display the combined ignore pattern and information about ignored files
977 977
978 978 With no argument display the combined ignore pattern.
979 979
980 980 Given space separated file names, shows if the given file is ignored and
981 981 if so, show the ignore rule (file and line number) that matched it.
982 982 """
983 983 ignore = repo.dirstate._ignore
984 984 if not files:
985 985 # Show all the patterns
986 986 ui.write("%s\n" % repr(ignore))
987 987 else:
988 988 m = scmutil.match(repo[None], pats=files)
989 989 for f in m.files():
990 990 nf = util.normpath(f)
991 991 ignored = None
992 992 ignoredata = None
993 993 if nf != '.':
994 994 if ignore(nf):
995 995 ignored = nf
996 996 ignoredata = repo.dirstate._ignorefileandline(nf)
997 997 else:
998 998 for p in util.finddirs(nf):
999 999 if ignore(p):
1000 1000 ignored = p
1001 1001 ignoredata = repo.dirstate._ignorefileandline(p)
1002 1002 break
1003 1003 if ignored:
1004 1004 if ignored == nf:
1005 1005 ui.write(_("%s is ignored\n") % m.uipath(f))
1006 1006 else:
1007 1007 ui.write(_("%s is ignored because of "
1008 1008 "containing folder %s\n")
1009 1009 % (m.uipath(f), ignored))
1010 1010 ignorefile, lineno, line = ignoredata
1011 1011 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1012 1012 % (ignorefile, lineno, line))
1013 1013 else:
1014 1014 ui.write(_("%s is not ignored\n") % m.uipath(f))
1015 1015
1016 1016 @command('debugindex', cmdutil.debugrevlogopts +
1017 1017 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1018 1018 _('[-f FORMAT] -c|-m|FILE'),
1019 1019 optionalrepo=True)
1020 1020 def debugindex(ui, repo, file_=None, **opts):
1021 1021 """dump the contents of an index file"""
1022 1022 opts = pycompat.byteskwargs(opts)
1023 1023 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1024 1024 format = opts.get('format', 0)
1025 1025 if format not in (0, 1):
1026 1026 raise error.Abort(_("unknown format %d") % format)
1027 1027
1028 1028 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1029 1029 if generaldelta:
1030 1030 basehdr = ' delta'
1031 1031 else:
1032 1032 basehdr = ' base'
1033 1033
1034 1034 if ui.debugflag:
1035 1035 shortfn = hex
1036 1036 else:
1037 1037 shortfn = short
1038 1038
1039 1039 # There might not be anything in r, so have a sane default
1040 1040 idlen = 12
1041 1041 for i in r:
1042 1042 idlen = len(shortfn(r.node(i)))
1043 1043 break
1044 1044
1045 1045 if format == 0:
1046 1046 ui.write((" rev offset length " + basehdr + " linkrev"
1047 1047 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1048 1048 elif format == 1:
1049 1049 ui.write((" rev flag offset length"
1050 1050 " size " + basehdr + " link p1 p2"
1051 1051 " %s\n") % "nodeid".rjust(idlen))
1052 1052
1053 1053 for i in r:
1054 1054 node = r.node(i)
1055 1055 if generaldelta:
1056 1056 base = r.deltaparent(i)
1057 1057 else:
1058 1058 base = r.chainbase(i)
1059 1059 if format == 0:
1060 1060 try:
1061 1061 pp = r.parents(node)
1062 1062 except Exception:
1063 1063 pp = [nullid, nullid]
1064 1064 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1065 1065 i, r.start(i), r.length(i), base, r.linkrev(i),
1066 1066 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1067 1067 elif format == 1:
1068 1068 pr = r.parentrevs(i)
1069 1069 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1070 1070 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1071 1071 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1072 1072
1073 1073 @command('debugindexdot', cmdutil.debugrevlogopts,
1074 1074 _('-c|-m|FILE'), optionalrepo=True)
1075 1075 def debugindexdot(ui, repo, file_=None, **opts):
1076 1076 """dump an index DAG as a graphviz dot file"""
1077 1077 opts = pycompat.byteskwargs(opts)
1078 1078 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1079 1079 ui.write(("digraph G {\n"))
1080 1080 for i in r:
1081 1081 node = r.node(i)
1082 1082 pp = r.parents(node)
1083 1083 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1084 1084 if pp[1] != nullid:
1085 1085 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1086 1086 ui.write("}\n")
1087 1087
1088 1088 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1089 1089 def debuginstall(ui, **opts):
1090 1090 '''test Mercurial installation
1091 1091
1092 1092 Returns 0 on success.
1093 1093 '''
1094 1094 opts = pycompat.byteskwargs(opts)
1095 1095
1096 1096 def writetemp(contents):
1097 1097 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1098 1098 f = os.fdopen(fd, pycompat.sysstr("wb"))
1099 1099 f.write(contents)
1100 1100 f.close()
1101 1101 return name
1102 1102
1103 1103 problems = 0
1104 1104
1105 1105 fm = ui.formatter('debuginstall', opts)
1106 1106 fm.startitem()
1107 1107
1108 1108 # encoding
1109 1109 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1110 1110 err = None
1111 1111 try:
1112 1112 codecs.lookup(pycompat.sysstr(encoding.encoding))
1113 1113 except LookupError as inst:
1114 1114 err = util.forcebytestr(inst)
1115 1115 problems += 1
1116 1116 fm.condwrite(err, 'encodingerror', _(" %s\n"
1117 1117 " (check that your locale is properly set)\n"), err)
1118 1118
1119 1119 # Python
1120 1120 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1121 1121 pycompat.sysexecutable)
1122 1122 fm.write('pythonver', _("checking Python version (%s)\n"),
1123 1123 ("%d.%d.%d" % sys.version_info[:3]))
1124 1124 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1125 1125 os.path.dirname(pycompat.fsencode(os.__file__)))
1126 1126
1127 1127 security = set(sslutil.supportedprotocols)
1128 1128 if sslutil.hassni:
1129 1129 security.add('sni')
1130 1130
1131 1131 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1132 1132 fm.formatlist(sorted(security), name='protocol',
1133 1133 fmt='%s', sep=','))
1134 1134
1135 1135 # These are warnings, not errors. So don't increment problem count. This
1136 1136 # may change in the future.
1137 1137 if 'tls1.2' not in security:
1138 1138 fm.plain(_(' TLS 1.2 not supported by Python install; '
1139 1139 'network connections lack modern security\n'))
1140 1140 if 'sni' not in security:
1141 1141 fm.plain(_(' SNI not supported by Python install; may have '
1142 1142 'connectivity issues with some servers\n'))
1143 1143
1144 1144 # TODO print CA cert info
1145 1145
1146 1146 # hg version
1147 1147 hgver = util.version()
1148 1148 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1149 1149 hgver.split('+')[0])
1150 1150 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1151 1151 '+'.join(hgver.split('+')[1:]))
1152 1152
1153 1153 # compiled modules
1154 1154 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1155 1155 policy.policy)
1156 1156 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1157 1157 os.path.dirname(pycompat.fsencode(__file__)))
1158 1158
1159 1159 if policy.policy in ('c', 'allow'):
1160 1160 err = None
1161 1161 try:
1162 1162 from .cext import (
1163 1163 base85,
1164 1164 bdiff,
1165 1165 mpatch,
1166 1166 osutil,
1167 1167 )
1168 1168 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1169 1169 except Exception as inst:
1170 1170 err = util.forcebytestr(inst)
1171 1171 problems += 1
1172 1172 fm.condwrite(err, 'extensionserror', " %s\n", err)
1173 1173
1174 1174 compengines = util.compengines._engines.values()
1175 1175 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1176 1176 fm.formatlist(sorted(e.name() for e in compengines),
1177 1177 name='compengine', fmt='%s', sep=', '))
1178 1178 fm.write('compenginesavail', _('checking available compression engines '
1179 1179 '(%s)\n'),
1180 1180 fm.formatlist(sorted(e.name() for e in compengines
1181 1181 if e.available()),
1182 1182 name='compengine', fmt='%s', sep=', '))
1183 1183 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1184 1184 fm.write('compenginesserver', _('checking available compression engines '
1185 1185 'for wire protocol (%s)\n'),
1186 1186 fm.formatlist([e.name() for e in wirecompengines
1187 1187 if e.wireprotosupport()],
1188 1188 name='compengine', fmt='%s', sep=', '))
1189 1189
1190 1190 # templates
1191 1191 p = templater.templatepaths()
1192 1192 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1193 1193 fm.condwrite(not p, '', _(" no template directories found\n"))
1194 1194 if p:
1195 1195 m = templater.templatepath("map-cmdline.default")
1196 1196 if m:
1197 1197 # template found, check if it is working
1198 1198 err = None
1199 1199 try:
1200 1200 templater.templater.frommapfile(m)
1201 1201 except Exception as inst:
1202 1202 err = util.forcebytestr(inst)
1203 1203 p = None
1204 1204 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1205 1205 else:
1206 1206 p = None
1207 1207 fm.condwrite(p, 'defaulttemplate',
1208 1208 _("checking default template (%s)\n"), m)
1209 1209 fm.condwrite(not m, 'defaulttemplatenotfound',
1210 1210 _(" template '%s' not found\n"), "default")
1211 1211 if not p:
1212 1212 problems += 1
1213 1213 fm.condwrite(not p, '',
1214 1214 _(" (templates seem to have been installed incorrectly)\n"))
1215 1215
1216 1216 # editor
1217 1217 editor = ui.geteditor()
1218 1218 editor = util.expandpath(editor)
1219 1219 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1220 1220 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1221 1221 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1222 1222 _(" No commit editor set and can't find %s in PATH\n"
1223 1223 " (specify a commit editor in your configuration"
1224 1224 " file)\n"), not cmdpath and editor == 'vi' and editor)
1225 1225 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1226 1226 _(" Can't find editor '%s' in PATH\n"
1227 1227 " (specify a commit editor in your configuration"
1228 1228 " file)\n"), not cmdpath and editor)
1229 1229 if not cmdpath and editor != 'vi':
1230 1230 problems += 1
1231 1231
1232 1232 # check username
1233 1233 username = None
1234 1234 err = None
1235 1235 try:
1236 1236 username = ui.username()
1237 1237 except error.Abort as e:
1238 1238 err = util.forcebytestr(e)
1239 1239 problems += 1
1240 1240
1241 1241 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1242 1242 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1243 1243 " (specify a username in your configuration file)\n"), err)
1244 1244
1245 1245 fm.condwrite(not problems, '',
1246 1246 _("no problems detected\n"))
1247 1247 if not problems:
1248 1248 fm.data(problems=problems)
1249 1249 fm.condwrite(problems, 'problems',
1250 1250 _("%d problems detected,"
1251 1251 " please check your install!\n"), problems)
1252 1252 fm.end()
1253 1253
1254 1254 return problems
1255 1255
1256 1256 @command('debugknown', [], _('REPO ID...'), norepo=True)
1257 1257 def debugknown(ui, repopath, *ids, **opts):
1258 1258 """test whether node ids are known to a repo
1259 1259
1260 1260 Every ID must be a full-length hex node id string. Returns a list of 0s
1261 1261 and 1s indicating unknown/known.
1262 1262 """
1263 1263 opts = pycompat.byteskwargs(opts)
1264 1264 repo = hg.peer(ui, opts, repopath)
1265 1265 if not repo.capable('known'):
1266 1266 raise error.Abort("known() not supported by target repository")
1267 1267 flags = repo.known([bin(s) for s in ids])
1268 1268 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1269 1269
1270 1270 @command('debuglabelcomplete', [], _('LABEL...'))
1271 1271 def debuglabelcomplete(ui, repo, *args):
1272 1272 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1273 1273 debugnamecomplete(ui, repo, *args)
1274 1274
1275 1275 @command('debuglocks',
1276 1276 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1277 1277 ('W', 'force-wlock', None,
1278 1278 _('free the working state lock (DANGEROUS)'))],
1279 1279 _('[OPTION]...'))
1280 1280 def debuglocks(ui, repo, **opts):
1281 1281 """show or modify state of locks
1282 1282
1283 1283 By default, this command will show which locks are held. This
1284 1284 includes the user and process holding the lock, the amount of time
1285 1285 the lock has been held, and the machine name where the process is
1286 1286 running if it's not local.
1287 1287
1288 1288 Locks protect the integrity of Mercurial's data, so should be
1289 1289 treated with care. System crashes or other interruptions may cause
1290 1290 locks to not be properly released, though Mercurial will usually
1291 1291 detect and remove such stale locks automatically.
1292 1292
1293 1293 However, detecting stale locks may not always be possible (for
1294 1294 instance, on a shared filesystem). Removing locks may also be
1295 1295 blocked by filesystem permissions.
1296 1296
1297 1297 Returns 0 if no locks are held.
1298 1298
1299 1299 """
1300 1300
1301 1301 if opts.get(r'force_lock'):
1302 1302 repo.svfs.unlink('lock')
1303 1303 if opts.get(r'force_wlock'):
1304 1304 repo.vfs.unlink('wlock')
1305 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1305 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1306 1306 return 0
1307 1307
1308 1308 now = time.time()
1309 1309 held = 0
1310 1310
1311 1311 def report(vfs, name, method):
1312 1312 # this causes stale locks to get reaped for more accurate reporting
1313 1313 try:
1314 1314 l = method(False)
1315 1315 except error.LockHeld:
1316 1316 l = None
1317 1317
1318 1318 if l:
1319 1319 l.release()
1320 1320 else:
1321 1321 try:
1322 1322 stat = vfs.lstat(name)
1323 1323 age = now - stat.st_mtime
1324 1324 user = util.username(stat.st_uid)
1325 1325 locker = vfs.readlock(name)
1326 1326 if ":" in locker:
1327 1327 host, pid = locker.split(':')
1328 1328 if host == socket.gethostname():
1329 1329 locker = 'user %s, process %s' % (user, pid)
1330 1330 else:
1331 1331 locker = 'user %s, process %s, host %s' \
1332 1332 % (user, pid, host)
1333 1333 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1334 1334 return 1
1335 1335 except OSError as e:
1336 1336 if e.errno != errno.ENOENT:
1337 1337 raise
1338 1338
1339 1339 ui.write(("%-6s free\n") % (name + ":"))
1340 1340 return 0
1341 1341
1342 1342 held += report(repo.svfs, "lock", repo.lock)
1343 1343 held += report(repo.vfs, "wlock", repo.wlock)
1344 1344
1345 1345 return held
1346 1346
1347 1347 @command('debugmergestate', [], '')
1348 1348 def debugmergestate(ui, repo, *args):
1349 1349 """print merge state
1350 1350
1351 1351 Use --verbose to print out information about whether v1 or v2 merge state
1352 1352 was chosen."""
1353 1353 def _hashornull(h):
1354 1354 if h == nullhex:
1355 1355 return 'null'
1356 1356 else:
1357 1357 return h
1358 1358
1359 1359 def printrecords(version):
1360 1360 ui.write(('* version %s records\n') % version)
1361 1361 if version == 1:
1362 1362 records = v1records
1363 1363 else:
1364 1364 records = v2records
1365 1365
1366 1366 for rtype, record in records:
1367 1367 # pretty print some record types
1368 1368 if rtype == 'L':
1369 1369 ui.write(('local: %s\n') % record)
1370 1370 elif rtype == 'O':
1371 1371 ui.write(('other: %s\n') % record)
1372 1372 elif rtype == 'm':
1373 1373 driver, mdstate = record.split('\0', 1)
1374 1374 ui.write(('merge driver: %s (state "%s")\n')
1375 1375 % (driver, mdstate))
1376 1376 elif rtype in 'FDC':
1377 1377 r = record.split('\0')
1378 1378 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1379 1379 if version == 1:
1380 1380 onode = 'not stored in v1 format'
1381 1381 flags = r[7]
1382 1382 else:
1383 1383 onode, flags = r[7:9]
1384 1384 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1385 1385 % (f, rtype, state, _hashornull(hash)))
1386 1386 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1387 1387 ui.write((' ancestor path: %s (node %s)\n')
1388 1388 % (afile, _hashornull(anode)))
1389 1389 ui.write((' other path: %s (node %s)\n')
1390 1390 % (ofile, _hashornull(onode)))
1391 1391 elif rtype == 'f':
1392 1392 filename, rawextras = record.split('\0', 1)
1393 1393 extras = rawextras.split('\0')
1394 1394 i = 0
1395 1395 extrastrings = []
1396 1396 while i < len(extras):
1397 1397 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1398 1398 i += 2
1399 1399
1400 1400 ui.write(('file extras: %s (%s)\n')
1401 1401 % (filename, ', '.join(extrastrings)))
1402 1402 elif rtype == 'l':
1403 1403 labels = record.split('\0', 2)
1404 1404 labels = [l for l in labels if len(l) > 0]
1405 1405 ui.write(('labels:\n'))
1406 1406 ui.write((' local: %s\n' % labels[0]))
1407 1407 ui.write((' other: %s\n' % labels[1]))
1408 1408 if len(labels) > 2:
1409 1409 ui.write((' base: %s\n' % labels[2]))
1410 1410 else:
1411 1411 ui.write(('unrecognized entry: %s\t%s\n')
1412 1412 % (rtype, record.replace('\0', '\t')))
1413 1413
1414 1414 # Avoid mergestate.read() since it may raise an exception for unsupported
1415 1415 # merge state records. We shouldn't be doing this, but this is OK since this
1416 1416 # command is pretty low-level.
1417 1417 ms = mergemod.mergestate(repo)
1418 1418
1419 1419 # sort so that reasonable information is on top
1420 1420 v1records = ms._readrecordsv1()
1421 1421 v2records = ms._readrecordsv2()
1422 1422 order = 'LOml'
1423 1423 def key(r):
1424 1424 idx = order.find(r[0])
1425 1425 if idx == -1:
1426 1426 return (1, r[1])
1427 1427 else:
1428 1428 return (0, idx)
1429 1429 v1records.sort(key=key)
1430 1430 v2records.sort(key=key)
1431 1431
1432 1432 if not v1records and not v2records:
1433 1433 ui.write(('no merge state found\n'))
1434 1434 elif not v2records:
1435 1435 ui.note(('no version 2 merge state\n'))
1436 1436 printrecords(1)
1437 1437 elif ms._v1v2match(v1records, v2records):
1438 1438 ui.note(('v1 and v2 states match: using v2\n'))
1439 1439 printrecords(2)
1440 1440 else:
1441 1441 ui.note(('v1 and v2 states mismatch: using v1\n'))
1442 1442 printrecords(1)
1443 1443 if ui.verbose:
1444 1444 printrecords(2)
1445 1445
1446 1446 @command('debugnamecomplete', [], _('NAME...'))
1447 1447 def debugnamecomplete(ui, repo, *args):
1448 1448 '''complete "names" - tags, open branch names, bookmark names'''
1449 1449
1450 1450 names = set()
1451 1451 # since we previously only listed open branches, we will handle that
1452 1452 # specially (after this for loop)
1453 1453 for name, ns in repo.names.iteritems():
1454 1454 if name != 'branches':
1455 1455 names.update(ns.listnames(repo))
1456 1456 names.update(tag for (tag, heads, tip, closed)
1457 1457 in repo.branchmap().iterbranches() if not closed)
1458 1458 completions = set()
1459 1459 if not args:
1460 1460 args = ['']
1461 1461 for a in args:
1462 1462 completions.update(n for n in names if n.startswith(a))
1463 1463 ui.write('\n'.join(sorted(completions)))
1464 1464 ui.write('\n')
1465 1465
1466 1466 @command('debugobsolete',
1467 1467 [('', 'flags', 0, _('markers flag')),
1468 1468 ('', 'record-parents', False,
1469 1469 _('record parent information for the precursor')),
1470 1470 ('r', 'rev', [], _('display markers relevant to REV')),
1471 1471 ('', 'exclusive', False, _('restrict display to markers only '
1472 1472 'relevant to REV')),
1473 1473 ('', 'index', False, _('display index of the marker')),
1474 1474 ('', 'delete', [], _('delete markers specified by indices')),
1475 1475 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1476 1476 _('[OBSOLETED [REPLACEMENT ...]]'))
1477 1477 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1478 1478 """create arbitrary obsolete marker
1479 1479
1480 1480 With no arguments, displays the list of obsolescence markers."""
1481 1481
1482 1482 opts = pycompat.byteskwargs(opts)
1483 1483
1484 1484 def parsenodeid(s):
1485 1485 try:
1486 1486 # We do not use revsingle/revrange functions here to accept
1487 1487 # arbitrary node identifiers, possibly not present in the
1488 1488 # local repository.
1489 1489 n = bin(s)
1490 1490 if len(n) != len(nullid):
1491 1491 raise TypeError()
1492 1492 return n
1493 1493 except TypeError:
1494 1494 raise error.Abort('changeset references must be full hexadecimal '
1495 1495 'node identifiers')
1496 1496
1497 1497 if opts.get('delete'):
1498 1498 indices = []
1499 1499 for v in opts.get('delete'):
1500 1500 try:
1501 1501 indices.append(int(v))
1502 1502 except ValueError:
1503 1503 raise error.Abort(_('invalid index value: %r') % v,
1504 1504 hint=_('use integers for indices'))
1505 1505
1506 1506 if repo.currenttransaction():
1507 1507 raise error.Abort(_('cannot delete obsmarkers in the middle '
1508 1508 'of transaction.'))
1509 1509
1510 1510 with repo.lock():
1511 1511 n = repair.deleteobsmarkers(repo.obsstore, indices)
1512 1512 ui.write(_('deleted %i obsolescence markers\n') % n)
1513 1513
1514 1514 return
1515 1515
1516 1516 if precursor is not None:
1517 1517 if opts['rev']:
1518 1518 raise error.Abort('cannot select revision when creating marker')
1519 1519 metadata = {}
1520 1520 metadata['user'] = opts['user'] or ui.username()
1521 1521 succs = tuple(parsenodeid(succ) for succ in successors)
1522 1522 l = repo.lock()
1523 1523 try:
1524 1524 tr = repo.transaction('debugobsolete')
1525 1525 try:
1526 1526 date = opts.get('date')
1527 1527 if date:
1528 1528 date = util.parsedate(date)
1529 1529 else:
1530 1530 date = None
1531 1531 prec = parsenodeid(precursor)
1532 1532 parents = None
1533 1533 if opts['record_parents']:
1534 1534 if prec not in repo.unfiltered():
1535 1535 raise error.Abort('cannot used --record-parents on '
1536 1536 'unknown changesets')
1537 1537 parents = repo.unfiltered()[prec].parents()
1538 1538 parents = tuple(p.node() for p in parents)
1539 1539 repo.obsstore.create(tr, prec, succs, opts['flags'],
1540 1540 parents=parents, date=date,
1541 1541 metadata=metadata, ui=ui)
1542 1542 tr.close()
1543 1543 except ValueError as exc:
1544 1544 raise error.Abort(_('bad obsmarker input: %s') % exc)
1545 1545 finally:
1546 1546 tr.release()
1547 1547 finally:
1548 1548 l.release()
1549 1549 else:
1550 1550 if opts['rev']:
1551 1551 revs = scmutil.revrange(repo, opts['rev'])
1552 1552 nodes = [repo[r].node() for r in revs]
1553 1553 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1554 1554 exclusive=opts['exclusive']))
1555 1555 markers.sort(key=lambda x: x._data)
1556 1556 else:
1557 1557 markers = obsutil.getmarkers(repo)
1558 1558
1559 1559 markerstoiter = markers
1560 1560 isrelevant = lambda m: True
1561 1561 if opts.get('rev') and opts.get('index'):
1562 1562 markerstoiter = obsutil.getmarkers(repo)
1563 1563 markerset = set(markers)
1564 1564 isrelevant = lambda m: m in markerset
1565 1565
1566 1566 fm = ui.formatter('debugobsolete', opts)
1567 1567 for i, m in enumerate(markerstoiter):
1568 1568 if not isrelevant(m):
1569 1569 # marker can be irrelevant when we're iterating over a set
1570 1570 # of markers (markerstoiter) which is bigger than the set
1571 1571 # of markers we want to display (markers)
1572 1572 # this can happen if both --index and --rev options are
1573 1573 # provided and thus we need to iterate over all of the markers
1574 1574 # to get the correct indices, but only display the ones that
1575 1575 # are relevant to --rev value
1576 1576 continue
1577 1577 fm.startitem()
1578 1578 ind = i if opts.get('index') else None
1579 1579 cmdutil.showmarker(fm, m, index=ind)
1580 1580 fm.end()
1581 1581
1582 1582 @command('debugpathcomplete',
1583 1583 [('f', 'full', None, _('complete an entire path')),
1584 1584 ('n', 'normal', None, _('show only normal files')),
1585 1585 ('a', 'added', None, _('show only added files')),
1586 1586 ('r', 'removed', None, _('show only removed files'))],
1587 1587 _('FILESPEC...'))
1588 1588 def debugpathcomplete(ui, repo, *specs, **opts):
1589 1589 '''complete part or all of a tracked path
1590 1590
1591 1591 This command supports shells that offer path name completion. It
1592 1592 currently completes only files already known to the dirstate.
1593 1593
1594 1594 Completion extends only to the next path segment unless
1595 1595 --full is specified, in which case entire paths are used.'''
1596 1596
1597 1597 def complete(path, acceptable):
1598 1598 dirstate = repo.dirstate
1599 1599 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1600 1600 rootdir = repo.root + pycompat.ossep
1601 1601 if spec != repo.root and not spec.startswith(rootdir):
1602 1602 return [], []
1603 1603 if os.path.isdir(spec):
1604 1604 spec += '/'
1605 1605 spec = spec[len(rootdir):]
1606 1606 fixpaths = pycompat.ossep != '/'
1607 1607 if fixpaths:
1608 1608 spec = spec.replace(pycompat.ossep, '/')
1609 1609 speclen = len(spec)
1610 1610 fullpaths = opts[r'full']
1611 1611 files, dirs = set(), set()
1612 1612 adddir, addfile = dirs.add, files.add
1613 1613 for f, st in dirstate.iteritems():
1614 1614 if f.startswith(spec) and st[0] in acceptable:
1615 1615 if fixpaths:
1616 1616 f = f.replace('/', pycompat.ossep)
1617 1617 if fullpaths:
1618 1618 addfile(f)
1619 1619 continue
1620 1620 s = f.find(pycompat.ossep, speclen)
1621 1621 if s >= 0:
1622 1622 adddir(f[:s])
1623 1623 else:
1624 1624 addfile(f)
1625 1625 return files, dirs
1626 1626
1627 1627 acceptable = ''
1628 1628 if opts[r'normal']:
1629 1629 acceptable += 'nm'
1630 1630 if opts[r'added']:
1631 1631 acceptable += 'a'
1632 1632 if opts[r'removed']:
1633 1633 acceptable += 'r'
1634 1634 cwd = repo.getcwd()
1635 1635 if not specs:
1636 1636 specs = ['.']
1637 1637
1638 1638 files, dirs = set(), set()
1639 1639 for spec in specs:
1640 1640 f, d = complete(spec, acceptable or 'nmar')
1641 1641 files.update(f)
1642 1642 dirs.update(d)
1643 1643 files.update(dirs)
1644 1644 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1645 1645 ui.write('\n')
1646 1646
1647 1647 @command('debugpickmergetool',
1648 1648 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1649 1649 ('', 'changedelete', None, _('emulate merging change and delete')),
1650 1650 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1651 1651 _('[PATTERN]...'),
1652 1652 inferrepo=True)
1653 1653 def debugpickmergetool(ui, repo, *pats, **opts):
1654 1654 """examine which merge tool is chosen for specified file
1655 1655
1656 1656 As described in :hg:`help merge-tools`, Mercurial examines
1657 1657 configurations below in this order to decide which merge tool is
1658 1658 chosen for specified file.
1659 1659
1660 1660 1. ``--tool`` option
1661 1661 2. ``HGMERGE`` environment variable
1662 1662 3. configurations in ``merge-patterns`` section
1663 1663 4. configuration of ``ui.merge``
1664 1664 5. configurations in ``merge-tools`` section
1665 1665 6. ``hgmerge`` tool (for historical reason only)
1666 1666 7. default tool for fallback (``:merge`` or ``:prompt``)
1667 1667
1668 1668 This command writes out examination result in the style below::
1669 1669
1670 1670 FILE = MERGETOOL
1671 1671
1672 1672 By default, all files known in the first parent context of the
1673 1673 working directory are examined. Use file patterns and/or -I/-X
1674 1674 options to limit target files. -r/--rev is also useful to examine
1675 1675 files in another context without actual updating to it.
1676 1676
1677 1677 With --debug, this command shows warning messages while matching
1678 1678 against ``merge-patterns`` and so on, too. It is recommended to
1679 1679 use this option with explicit file patterns and/or -I/-X options,
1680 1680 because this option increases amount of output per file according
1681 1681 to configurations in hgrc.
1682 1682
1683 1683 With -v/--verbose, this command shows configurations below at
1684 1684 first (only if specified).
1685 1685
1686 1686 - ``--tool`` option
1687 1687 - ``HGMERGE`` environment variable
1688 1688 - configuration of ``ui.merge``
1689 1689
1690 1690 If merge tool is chosen before matching against
1691 1691 ``merge-patterns``, this command can't show any helpful
1692 1692 information, even with --debug. In such case, information above is
1693 1693 useful to know why a merge tool is chosen.
1694 1694 """
1695 1695 opts = pycompat.byteskwargs(opts)
1696 1696 overrides = {}
1697 1697 if opts['tool']:
1698 1698 overrides[('ui', 'forcemerge')] = opts['tool']
1699 1699 ui.note(('with --tool %r\n') % (opts['tool']))
1700 1700
1701 1701 with ui.configoverride(overrides, 'debugmergepatterns'):
1702 1702 hgmerge = encoding.environ.get("HGMERGE")
1703 1703 if hgmerge is not None:
1704 1704 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1705 1705 uimerge = ui.config("ui", "merge")
1706 1706 if uimerge:
1707 1707 ui.note(('with ui.merge=%r\n') % (uimerge))
1708 1708
1709 1709 ctx = scmutil.revsingle(repo, opts.get('rev'))
1710 1710 m = scmutil.match(ctx, pats, opts)
1711 1711 changedelete = opts['changedelete']
1712 1712 for path in ctx.walk(m):
1713 1713 fctx = ctx[path]
1714 1714 try:
1715 1715 if not ui.debugflag:
1716 1716 ui.pushbuffer(error=True)
1717 1717 tool, toolpath = filemerge._picktool(repo, ui, path,
1718 1718 fctx.isbinary(),
1719 1719 'l' in fctx.flags(),
1720 1720 changedelete)
1721 1721 finally:
1722 1722 if not ui.debugflag:
1723 1723 ui.popbuffer()
1724 1724 ui.write(('%s = %s\n') % (path, tool))
1725 1725
1726 1726 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1727 1727 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1728 1728 '''access the pushkey key/value protocol
1729 1729
1730 1730 With two args, list the keys in the given namespace.
1731 1731
1732 1732 With five args, set a key to new if it currently is set to old.
1733 1733 Reports success or failure.
1734 1734 '''
1735 1735
1736 1736 target = hg.peer(ui, {}, repopath)
1737 1737 if keyinfo:
1738 1738 key, old, new = keyinfo
1739 1739 r = target.pushkey(namespace, key, old, new)
1740 1740 ui.status(str(r) + '\n')
1741 1741 return not r
1742 1742 else:
1743 1743 for k, v in sorted(target.listkeys(namespace).iteritems()):
1744 1744 ui.write("%s\t%s\n" % (util.escapestr(k),
1745 1745 util.escapestr(v)))
1746 1746
1747 1747 @command('debugpvec', [], _('A B'))
1748 1748 def debugpvec(ui, repo, a, b=None):
1749 1749 ca = scmutil.revsingle(repo, a)
1750 1750 cb = scmutil.revsingle(repo, b)
1751 1751 pa = pvec.ctxpvec(ca)
1752 1752 pb = pvec.ctxpvec(cb)
1753 1753 if pa == pb:
1754 1754 rel = "="
1755 1755 elif pa > pb:
1756 1756 rel = ">"
1757 1757 elif pa < pb:
1758 1758 rel = "<"
1759 1759 elif pa | pb:
1760 1760 rel = "|"
1761 1761 ui.write(_("a: %s\n") % pa)
1762 1762 ui.write(_("b: %s\n") % pb)
1763 1763 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1764 1764 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1765 1765 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1766 1766 pa.distance(pb), rel))
1767 1767
1768 1768 @command('debugrebuilddirstate|debugrebuildstate',
1769 1769 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1770 1770 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1771 1771 'the working copy parent')),
1772 1772 ],
1773 1773 _('[-r REV]'))
1774 1774 def debugrebuilddirstate(ui, repo, rev, **opts):
1775 1775 """rebuild the dirstate as it would look like for the given revision
1776 1776
1777 1777 If no revision is specified the first current parent will be used.
1778 1778
1779 1779 The dirstate will be set to the files of the given revision.
1780 1780 The actual working directory content or existing dirstate
1781 1781 information such as adds or removes is not considered.
1782 1782
1783 1783 ``minimal`` will only rebuild the dirstate status for files that claim to be
1784 1784 tracked but are not in the parent manifest, or that exist in the parent
1785 1785 manifest but are not in the dirstate. It will not change adds, removes, or
1786 1786 modified files that are in the working copy parent.
1787 1787
1788 1788 One use of this command is to make the next :hg:`status` invocation
1789 1789 check the actual file content.
1790 1790 """
1791 1791 ctx = scmutil.revsingle(repo, rev)
1792 1792 with repo.wlock():
1793 1793 dirstate = repo.dirstate
1794 1794 changedfiles = None
1795 1795 # See command doc for what minimal does.
1796 1796 if opts.get(r'minimal'):
1797 1797 manifestfiles = set(ctx.manifest().keys())
1798 1798 dirstatefiles = set(dirstate)
1799 1799 manifestonly = manifestfiles - dirstatefiles
1800 1800 dsonly = dirstatefiles - manifestfiles
1801 1801 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1802 1802 changedfiles = manifestonly | dsnotadded
1803 1803
1804 1804 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1805 1805
1806 1806 @command('debugrebuildfncache', [], '')
1807 1807 def debugrebuildfncache(ui, repo):
1808 1808 """rebuild the fncache file"""
1809 1809 repair.rebuildfncache(ui, repo)
1810 1810
1811 1811 @command('debugrename',
1812 1812 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1813 1813 _('[-r REV] FILE'))
1814 1814 def debugrename(ui, repo, file1, *pats, **opts):
1815 1815 """dump rename information"""
1816 1816
1817 1817 opts = pycompat.byteskwargs(opts)
1818 1818 ctx = scmutil.revsingle(repo, opts.get('rev'))
1819 1819 m = scmutil.match(ctx, (file1,) + pats, opts)
1820 1820 for abs in ctx.walk(m):
1821 1821 fctx = ctx[abs]
1822 1822 o = fctx.filelog().renamed(fctx.filenode())
1823 1823 rel = m.rel(abs)
1824 1824 if o:
1825 1825 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1826 1826 else:
1827 1827 ui.write(_("%s not renamed\n") % rel)
1828 1828
1829 1829 @command('debugrevlog', cmdutil.debugrevlogopts +
1830 1830 [('d', 'dump', False, _('dump index data'))],
1831 1831 _('-c|-m|FILE'),
1832 1832 optionalrepo=True)
1833 1833 def debugrevlog(ui, repo, file_=None, **opts):
1834 1834 """show data and statistics about a revlog"""
1835 1835 opts = pycompat.byteskwargs(opts)
1836 1836 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1837 1837
1838 1838 if opts.get("dump"):
1839 1839 numrevs = len(r)
1840 1840 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1841 1841 " rawsize totalsize compression heads chainlen\n"))
1842 1842 ts = 0
1843 1843 heads = set()
1844 1844
1845 1845 for rev in xrange(numrevs):
1846 1846 dbase = r.deltaparent(rev)
1847 1847 if dbase == -1:
1848 1848 dbase = rev
1849 1849 cbase = r.chainbase(rev)
1850 1850 clen = r.chainlen(rev)
1851 1851 p1, p2 = r.parentrevs(rev)
1852 1852 rs = r.rawsize(rev)
1853 1853 ts = ts + rs
1854 1854 heads -= set(r.parentrevs(rev))
1855 1855 heads.add(rev)
1856 1856 try:
1857 1857 compression = ts / r.end(rev)
1858 1858 except ZeroDivisionError:
1859 1859 compression = 0
1860 1860 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1861 1861 "%11d %5d %8d\n" %
1862 1862 (rev, p1, p2, r.start(rev), r.end(rev),
1863 1863 r.start(dbase), r.start(cbase),
1864 1864 r.start(p1), r.start(p2),
1865 1865 rs, ts, compression, len(heads), clen))
1866 1866 return 0
1867 1867
1868 1868 v = r.version
1869 1869 format = v & 0xFFFF
1870 1870 flags = []
1871 1871 gdelta = False
1872 1872 if v & revlog.FLAG_INLINE_DATA:
1873 1873 flags.append('inline')
1874 1874 if v & revlog.FLAG_GENERALDELTA:
1875 1875 gdelta = True
1876 1876 flags.append('generaldelta')
1877 1877 if not flags:
1878 1878 flags = ['(none)']
1879 1879
1880 1880 nummerges = 0
1881 1881 numfull = 0
1882 1882 numprev = 0
1883 1883 nump1 = 0
1884 1884 nump2 = 0
1885 1885 numother = 0
1886 1886 nump1prev = 0
1887 1887 nump2prev = 0
1888 1888 chainlengths = []
1889 1889 chainbases = []
1890 1890 chainspans = []
1891 1891
1892 1892 datasize = [None, 0, 0]
1893 1893 fullsize = [None, 0, 0]
1894 1894 deltasize = [None, 0, 0]
1895 1895 chunktypecounts = {}
1896 1896 chunktypesizes = {}
1897 1897
1898 1898 def addsize(size, l):
1899 1899 if l[0] is None or size < l[0]:
1900 1900 l[0] = size
1901 1901 if size > l[1]:
1902 1902 l[1] = size
1903 1903 l[2] += size
1904 1904
1905 1905 numrevs = len(r)
1906 1906 for rev in xrange(numrevs):
1907 1907 p1, p2 = r.parentrevs(rev)
1908 1908 delta = r.deltaparent(rev)
1909 1909 if format > 0:
1910 1910 addsize(r.rawsize(rev), datasize)
1911 1911 if p2 != nullrev:
1912 1912 nummerges += 1
1913 1913 size = r.length(rev)
1914 1914 if delta == nullrev:
1915 1915 chainlengths.append(0)
1916 1916 chainbases.append(r.start(rev))
1917 1917 chainspans.append(size)
1918 1918 numfull += 1
1919 1919 addsize(size, fullsize)
1920 1920 else:
1921 1921 chainlengths.append(chainlengths[delta] + 1)
1922 1922 baseaddr = chainbases[delta]
1923 1923 revaddr = r.start(rev)
1924 1924 chainbases.append(baseaddr)
1925 1925 chainspans.append((revaddr - baseaddr) + size)
1926 1926 addsize(size, deltasize)
1927 1927 if delta == rev - 1:
1928 1928 numprev += 1
1929 1929 if delta == p1:
1930 1930 nump1prev += 1
1931 1931 elif delta == p2:
1932 1932 nump2prev += 1
1933 1933 elif delta == p1:
1934 1934 nump1 += 1
1935 1935 elif delta == p2:
1936 1936 nump2 += 1
1937 1937 elif delta != nullrev:
1938 1938 numother += 1
1939 1939
1940 1940 # Obtain data on the raw chunks in the revlog.
1941 1941 segment = r._getsegmentforrevs(rev, rev)[1]
1942 1942 if segment:
1943 1943 chunktype = bytes(segment[0:1])
1944 1944 else:
1945 1945 chunktype = 'empty'
1946 1946
1947 1947 if chunktype not in chunktypecounts:
1948 1948 chunktypecounts[chunktype] = 0
1949 1949 chunktypesizes[chunktype] = 0
1950 1950
1951 1951 chunktypecounts[chunktype] += 1
1952 1952 chunktypesizes[chunktype] += size
1953 1953
1954 1954 # Adjust size min value for empty cases
1955 1955 for size in (datasize, fullsize, deltasize):
1956 1956 if size[0] is None:
1957 1957 size[0] = 0
1958 1958
1959 1959 numdeltas = numrevs - numfull
1960 1960 numoprev = numprev - nump1prev - nump2prev
1961 1961 totalrawsize = datasize[2]
1962 1962 datasize[2] /= numrevs
1963 1963 fulltotal = fullsize[2]
1964 1964 fullsize[2] /= numfull
1965 1965 deltatotal = deltasize[2]
1966 1966 if numrevs - numfull > 0:
1967 1967 deltasize[2] /= numrevs - numfull
1968 1968 totalsize = fulltotal + deltatotal
1969 1969 avgchainlen = sum(chainlengths) / numrevs
1970 1970 maxchainlen = max(chainlengths)
1971 1971 maxchainspan = max(chainspans)
1972 1972 compratio = 1
1973 1973 if totalsize:
1974 1974 compratio = totalrawsize / totalsize
1975 1975
1976 1976 basedfmtstr = '%%%dd\n'
1977 1977 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1978 1978
1979 1979 def dfmtstr(max):
1980 1980 return basedfmtstr % len(str(max))
1981 1981 def pcfmtstr(max, padding=0):
1982 1982 return basepcfmtstr % (len(str(max)), ' ' * padding)
1983 1983
1984 1984 def pcfmt(value, total):
1985 1985 if total:
1986 1986 return (value, 100 * float(value) / total)
1987 1987 else:
1988 1988 return value, 100.0
1989 1989
1990 1990 ui.write(('format : %d\n') % format)
1991 1991 ui.write(('flags : %s\n') % ', '.join(flags))
1992 1992
1993 1993 ui.write('\n')
1994 1994 fmt = pcfmtstr(totalsize)
1995 1995 fmt2 = dfmtstr(totalsize)
1996 1996 ui.write(('revisions : ') + fmt2 % numrevs)
1997 1997 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1998 1998 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1999 1999 ui.write(('revisions : ') + fmt2 % numrevs)
2000 2000 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2001 2001 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2002 2002 ui.write(('revision size : ') + fmt2 % totalsize)
2003 2003 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2004 2004 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2005 2005
2006 2006 def fmtchunktype(chunktype):
2007 2007 if chunktype == 'empty':
2008 2008 return ' %s : ' % chunktype
2009 2009 elif chunktype in pycompat.bytestr(string.ascii_letters):
2010 2010 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2011 2011 else:
2012 2012 return ' 0x%s : ' % hex(chunktype)
2013 2013
2014 2014 ui.write('\n')
2015 2015 ui.write(('chunks : ') + fmt2 % numrevs)
2016 2016 for chunktype in sorted(chunktypecounts):
2017 2017 ui.write(fmtchunktype(chunktype))
2018 2018 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2019 2019 ui.write(('chunks size : ') + fmt2 % totalsize)
2020 2020 for chunktype in sorted(chunktypecounts):
2021 2021 ui.write(fmtchunktype(chunktype))
2022 2022 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2023 2023
2024 2024 ui.write('\n')
2025 2025 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2026 2026 ui.write(('avg chain length : ') + fmt % avgchainlen)
2027 2027 ui.write(('max chain length : ') + fmt % maxchainlen)
2028 2028 ui.write(('max chain reach : ') + fmt % maxchainspan)
2029 2029 ui.write(('compression ratio : ') + fmt % compratio)
2030 2030
2031 2031 if format > 0:
2032 2032 ui.write('\n')
2033 2033 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2034 2034 % tuple(datasize))
2035 2035 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2036 2036 % tuple(fullsize))
2037 2037 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2038 2038 % tuple(deltasize))
2039 2039
2040 2040 if numdeltas > 0:
2041 2041 ui.write('\n')
2042 2042 fmt = pcfmtstr(numdeltas)
2043 2043 fmt2 = pcfmtstr(numdeltas, 4)
2044 2044 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2045 2045 if numprev > 0:
2046 2046 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2047 2047 numprev))
2048 2048 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2049 2049 numprev))
2050 2050 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2051 2051 numprev))
2052 2052 if gdelta:
2053 2053 ui.write(('deltas against p1 : ')
2054 2054 + fmt % pcfmt(nump1, numdeltas))
2055 2055 ui.write(('deltas against p2 : ')
2056 2056 + fmt % pcfmt(nump2, numdeltas))
2057 2057 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2058 2058 numdeltas))
2059 2059
2060 2060 @command('debugrevspec',
2061 2061 [('', 'optimize', None,
2062 2062 _('print parsed tree after optimizing (DEPRECATED)')),
2063 2063 ('', 'show-revs', True, _('print list of result revisions (default)')),
2064 2064 ('s', 'show-set', None, _('print internal representation of result set')),
2065 2065 ('p', 'show-stage', [],
2066 2066 _('print parsed tree at the given stage'), _('NAME')),
2067 2067 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2068 2068 ('', 'verify-optimized', False, _('verify optimized result')),
2069 2069 ],
2070 2070 ('REVSPEC'))
2071 2071 def debugrevspec(ui, repo, expr, **opts):
2072 2072 """parse and apply a revision specification
2073 2073
2074 2074 Use -p/--show-stage option to print the parsed tree at the given stages.
2075 2075 Use -p all to print tree at every stage.
2076 2076
2077 2077 Use --no-show-revs option with -s or -p to print only the set
2078 2078 representation or the parsed tree respectively.
2079 2079
2080 2080 Use --verify-optimized to compare the optimized result with the unoptimized
2081 2081 one. Returns 1 if the optimized result differs.
2082 2082 """
2083 2083 opts = pycompat.byteskwargs(opts)
2084 2084 aliases = ui.configitems('revsetalias')
2085 2085 stages = [
2086 2086 ('parsed', lambda tree: tree),
2087 2087 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2088 2088 ui.warn)),
2089 2089 ('concatenated', revsetlang.foldconcat),
2090 2090 ('analyzed', revsetlang.analyze),
2091 2091 ('optimized', revsetlang.optimize),
2092 2092 ]
2093 2093 if opts['no_optimized']:
2094 2094 stages = stages[:-1]
2095 2095 if opts['verify_optimized'] and opts['no_optimized']:
2096 2096 raise error.Abort(_('cannot use --verify-optimized with '
2097 2097 '--no-optimized'))
2098 2098 stagenames = set(n for n, f in stages)
2099 2099
2100 2100 showalways = set()
2101 2101 showchanged = set()
2102 2102 if ui.verbose and not opts['show_stage']:
2103 2103 # show parsed tree by --verbose (deprecated)
2104 2104 showalways.add('parsed')
2105 2105 showchanged.update(['expanded', 'concatenated'])
2106 2106 if opts['optimize']:
2107 2107 showalways.add('optimized')
2108 2108 if opts['show_stage'] and opts['optimize']:
2109 2109 raise error.Abort(_('cannot use --optimize with --show-stage'))
2110 2110 if opts['show_stage'] == ['all']:
2111 2111 showalways.update(stagenames)
2112 2112 else:
2113 2113 for n in opts['show_stage']:
2114 2114 if n not in stagenames:
2115 2115 raise error.Abort(_('invalid stage name: %s') % n)
2116 2116 showalways.update(opts['show_stage'])
2117 2117
2118 2118 treebystage = {}
2119 2119 printedtree = None
2120 2120 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2121 2121 for n, f in stages:
2122 2122 treebystage[n] = tree = f(tree)
2123 2123 if n in showalways or (n in showchanged and tree != printedtree):
2124 2124 if opts['show_stage'] or n != 'parsed':
2125 2125 ui.write(("* %s:\n") % n)
2126 2126 ui.write(revsetlang.prettyformat(tree), "\n")
2127 2127 printedtree = tree
2128 2128
2129 2129 if opts['verify_optimized']:
2130 2130 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2131 2131 brevs = revset.makematcher(treebystage['optimized'])(repo)
2132 2132 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2133 2133 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2134 2134 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2135 2135 arevs = list(arevs)
2136 2136 brevs = list(brevs)
2137 2137 if arevs == brevs:
2138 2138 return 0
2139 2139 ui.write(('--- analyzed\n'), label='diff.file_a')
2140 2140 ui.write(('+++ optimized\n'), label='diff.file_b')
2141 2141 sm = difflib.SequenceMatcher(None, arevs, brevs)
2142 2142 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2143 2143 if tag in ('delete', 'replace'):
2144 2144 for c in arevs[alo:ahi]:
2145 2145 ui.write('-%s\n' % c, label='diff.deleted')
2146 2146 if tag in ('insert', 'replace'):
2147 2147 for c in brevs[blo:bhi]:
2148 2148 ui.write('+%s\n' % c, label='diff.inserted')
2149 2149 if tag == 'equal':
2150 2150 for c in arevs[alo:ahi]:
2151 2151 ui.write(' %s\n' % c)
2152 2152 return 1
2153 2153
2154 2154 func = revset.makematcher(tree)
2155 2155 revs = func(repo)
2156 2156 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2157 2157 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2158 2158 if not opts['show_revs']:
2159 2159 return
2160 2160 for c in revs:
2161 2161 ui.write("%s\n" % c)
2162 2162
2163 2163 @command('debugsetparents', [], _('REV1 [REV2]'))
2164 2164 def debugsetparents(ui, repo, rev1, rev2=None):
2165 2165 """manually set the parents of the current working directory
2166 2166
2167 2167 This is useful for writing repository conversion tools, but should
2168 2168 be used with care. For example, neither the working directory nor the
2169 2169 dirstate is updated, so file status may be incorrect after running this
2170 2170 command.
2171 2171
2172 2172 Returns 0 on success.
2173 2173 """
2174 2174
2175 2175 r1 = scmutil.revsingle(repo, rev1).node()
2176 2176 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2177 2177
2178 2178 with repo.wlock():
2179 2179 repo.setparents(r1, r2)
2180 2180
2181 2181 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2182 2182 def debugssl(ui, repo, source=None, **opts):
2183 2183 '''test a secure connection to a server
2184 2184
2185 2185 This builds the certificate chain for the server on Windows, installing the
2186 2186 missing intermediates and trusted root via Windows Update if necessary. It
2187 2187 does nothing on other platforms.
2188 2188
2189 2189 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2190 2190 that server is used. See :hg:`help urls` for more information.
2191 2191
2192 2192 If the update succeeds, retry the original operation. Otherwise, the cause
2193 2193 of the SSL error is likely another issue.
2194 2194 '''
2195 2195 if not pycompat.iswindows:
2196 2196 raise error.Abort(_('certificate chain building is only possible on '
2197 2197 'Windows'))
2198 2198
2199 2199 if not source:
2200 2200 if not repo:
2201 2201 raise error.Abort(_("there is no Mercurial repository here, and no "
2202 2202 "server specified"))
2203 2203 source = "default"
2204 2204
2205 2205 source, branches = hg.parseurl(ui.expandpath(source))
2206 2206 url = util.url(source)
2207 2207 addr = None
2208 2208
2209 2209 if url.scheme == 'https':
2210 2210 addr = (url.host, url.port or 443)
2211 2211 elif url.scheme == 'ssh':
2212 2212 addr = (url.host, url.port or 22)
2213 2213 else:
2214 2214 raise error.Abort(_("only https and ssh connections are supported"))
2215 2215
2216 2216 from . import win32
2217 2217
2218 2218 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2219 2219 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2220 2220
2221 2221 try:
2222 2222 s.connect(addr)
2223 2223 cert = s.getpeercert(True)
2224 2224
2225 2225 ui.status(_('checking the certificate chain for %s\n') % url.host)
2226 2226
2227 2227 complete = win32.checkcertificatechain(cert, build=False)
2228 2228
2229 2229 if not complete:
2230 2230 ui.status(_('certificate chain is incomplete, updating... '))
2231 2231
2232 2232 if not win32.checkcertificatechain(cert):
2233 2233 ui.status(_('failed.\n'))
2234 2234 else:
2235 2235 ui.status(_('done.\n'))
2236 2236 else:
2237 2237 ui.status(_('full certificate chain is available\n'))
2238 2238 finally:
2239 2239 s.close()
2240 2240
2241 2241 @command('debugsub',
2242 2242 [('r', 'rev', '',
2243 2243 _('revision to check'), _('REV'))],
2244 2244 _('[-r REV] [REV]'))
2245 2245 def debugsub(ui, repo, rev=None):
2246 2246 ctx = scmutil.revsingle(repo, rev, None)
2247 2247 for k, v in sorted(ctx.substate.items()):
2248 2248 ui.write(('path %s\n') % k)
2249 2249 ui.write((' source %s\n') % v[0])
2250 2250 ui.write((' revision %s\n') % v[1])
2251 2251
2252 2252 @command('debugsuccessorssets',
2253 2253 [('', 'closest', False, _('return closest successors sets only'))],
2254 2254 _('[REV]'))
2255 2255 def debugsuccessorssets(ui, repo, *revs, **opts):
2256 2256 """show set of successors for revision
2257 2257
2258 2258 A successors set of changeset A is a consistent group of revisions that
2259 2259 succeed A. It contains non-obsolete changesets only unless closests
2260 2260 successors set is set.
2261 2261
2262 2262 In most cases a changeset A has a single successors set containing a single
2263 2263 successor (changeset A replaced by A').
2264 2264
2265 2265 A changeset that is made obsolete with no successors are called "pruned".
2266 2266 Such changesets have no successors sets at all.
2267 2267
2268 2268 A changeset that has been "split" will have a successors set containing
2269 2269 more than one successor.
2270 2270
2271 2271 A changeset that has been rewritten in multiple different ways is called
2272 2272 "divergent". Such changesets have multiple successor sets (each of which
2273 2273 may also be split, i.e. have multiple successors).
2274 2274
2275 2275 Results are displayed as follows::
2276 2276
2277 2277 <rev1>
2278 2278 <successors-1A>
2279 2279 <rev2>
2280 2280 <successors-2A>
2281 2281 <successors-2B1> <successors-2B2> <successors-2B3>
2282 2282
2283 2283 Here rev2 has two possible (i.e. divergent) successors sets. The first
2284 2284 holds one element, whereas the second holds three (i.e. the changeset has
2285 2285 been split).
2286 2286 """
2287 2287 # passed to successorssets caching computation from one call to another
2288 2288 cache = {}
2289 2289 ctx2str = str
2290 2290 node2str = short
2291 2291 if ui.debug():
2292 2292 def ctx2str(ctx):
2293 2293 return ctx.hex()
2294 2294 node2str = hex
2295 2295 for rev in scmutil.revrange(repo, revs):
2296 2296 ctx = repo[rev]
2297 2297 ui.write('%s\n'% ctx2str(ctx))
2298 2298 for succsset in obsutil.successorssets(repo, ctx.node(),
2299 2299 closest=opts['closest'],
2300 2300 cache=cache):
2301 2301 if succsset:
2302 2302 ui.write(' ')
2303 2303 ui.write(node2str(succsset[0]))
2304 2304 for node in succsset[1:]:
2305 2305 ui.write(' ')
2306 2306 ui.write(node2str(node))
2307 2307 ui.write('\n')
2308 2308
2309 2309 @command('debugtemplate',
2310 2310 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2311 2311 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2312 2312 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2313 2313 optionalrepo=True)
2314 2314 def debugtemplate(ui, repo, tmpl, **opts):
2315 2315 """parse and apply a template
2316 2316
2317 2317 If -r/--rev is given, the template is processed as a log template and
2318 2318 applied to the given changesets. Otherwise, it is processed as a generic
2319 2319 template.
2320 2320
2321 2321 Use --verbose to print the parsed tree.
2322 2322 """
2323 2323 revs = None
2324 2324 if opts[r'rev']:
2325 2325 if repo is None:
2326 2326 raise error.RepoError(_('there is no Mercurial repository here '
2327 2327 '(.hg not found)'))
2328 2328 revs = scmutil.revrange(repo, opts[r'rev'])
2329 2329
2330 2330 props = {}
2331 2331 for d in opts[r'define']:
2332 2332 try:
2333 2333 k, v = (e.strip() for e in d.split('=', 1))
2334 2334 if not k or k == 'ui':
2335 2335 raise ValueError
2336 2336 props[k] = v
2337 2337 except ValueError:
2338 2338 raise error.Abort(_('malformed keyword definition: %s') % d)
2339 2339
2340 2340 if ui.verbose:
2341 2341 aliases = ui.configitems('templatealias')
2342 2342 tree = templater.parse(tmpl)
2343 2343 ui.note(templater.prettyformat(tree), '\n')
2344 2344 newtree = templater.expandaliases(tree, aliases)
2345 2345 if newtree != tree:
2346 2346 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2347 2347
2348 2348 if revs is None:
2349 2349 t = formatter.maketemplater(ui, tmpl)
2350 2350 props['ui'] = ui
2351 2351 ui.write(t.render(props))
2352 2352 else:
2353 2353 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2354 2354 for r in revs:
2355 2355 displayer.show(repo[r], **pycompat.strkwargs(props))
2356 2356 displayer.close()
2357 2357
2358 2358 @command('debugupdatecaches', [])
2359 2359 def debugupdatecaches(ui, repo, *pats, **opts):
2360 2360 """warm all known caches in the repository"""
2361 2361 with repo.wlock(), repo.lock():
2362 2362 repo.updatecaches()
2363 2363
2364 2364 @command('debugupgraderepo', [
2365 2365 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2366 2366 ('', 'run', False, _('performs an upgrade')),
2367 2367 ])
2368 2368 def debugupgraderepo(ui, repo, run=False, optimize=None):
2369 2369 """upgrade a repository to use different features
2370 2370
2371 2371 If no arguments are specified, the repository is evaluated for upgrade
2372 2372 and a list of problems and potential optimizations is printed.
2373 2373
2374 2374 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2375 2375 can be influenced via additional arguments. More details will be provided
2376 2376 by the command output when run without ``--run``.
2377 2377
2378 2378 During the upgrade, the repository will be locked and no writes will be
2379 2379 allowed.
2380 2380
2381 2381 At the end of the upgrade, the repository may not be readable while new
2382 2382 repository data is swapped in. This window will be as long as it takes to
2383 2383 rename some directories inside the ``.hg`` directory. On most machines, this
2384 2384 should complete almost instantaneously and the chances of a consumer being
2385 2385 unable to access the repository should be low.
2386 2386 """
2387 2387 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2388 2388
2389 2389 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2390 2390 inferrepo=True)
2391 2391 def debugwalk(ui, repo, *pats, **opts):
2392 2392 """show how files match on given patterns"""
2393 2393 opts = pycompat.byteskwargs(opts)
2394 2394 m = scmutil.match(repo[None], pats, opts)
2395 2395 ui.write(('matcher: %r\n' % m))
2396 2396 items = list(repo[None].walk(m))
2397 2397 if not items:
2398 2398 return
2399 2399 f = lambda fn: fn
2400 2400 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2401 2401 f = lambda fn: util.normpath(fn)
2402 2402 fmt = 'f %%-%ds %%-%ds %%s' % (
2403 2403 max([len(abs) for abs in items]),
2404 2404 max([len(m.rel(abs)) for abs in items]))
2405 2405 for abs in items:
2406 2406 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2407 2407 ui.write("%s\n" % line.rstrip())
2408 2408
2409 2409 @command('debugwireargs',
2410 2410 [('', 'three', '', 'three'),
2411 2411 ('', 'four', '', 'four'),
2412 2412 ('', 'five', '', 'five'),
2413 2413 ] + cmdutil.remoteopts,
2414 2414 _('REPO [OPTIONS]... [ONE [TWO]]'),
2415 2415 norepo=True)
2416 2416 def debugwireargs(ui, repopath, *vals, **opts):
2417 2417 opts = pycompat.byteskwargs(opts)
2418 2418 repo = hg.peer(ui, opts, repopath)
2419 2419 for opt in cmdutil.remoteopts:
2420 2420 del opts[opt[1]]
2421 2421 args = {}
2422 2422 for k, v in opts.iteritems():
2423 2423 if v:
2424 2424 args[k] = v
2425 2425 # run twice to check that we don't mess up the stream for the next command
2426 2426 res1 = repo.debugwireargs(*vals, **args)
2427 2427 res2 = repo.debugwireargs(*vals, **args)
2428 2428 ui.write("%s\n" % res1)
2429 2429 if res1 != res2:
2430 2430 ui.warn("%s\n" % res2)
@@ -1,227 +1,343
1 1 $ cat << EOF >> $HGRCPATH
2 2 > [format]
3 3 > usegeneraldelta=yes
4 4 > EOF
5 5
6 6 $ hg init debugrevlog
7 7 $ cd debugrevlog
8 8 $ echo a > a
9 9 $ hg ci -Am adda
10 10 adding a
11 11 $ hg debugrevlog -m
12 12 format : 1
13 13 flags : inline, generaldelta
14 14
15 15 revisions : 1
16 16 merges : 0 ( 0.00%)
17 17 normal : 1 (100.00%)
18 18 revisions : 1
19 19 full : 1 (100.00%)
20 20 deltas : 0 ( 0.00%)
21 21 revision size : 44
22 22 full : 44 (100.00%)
23 23 deltas : 0 ( 0.00%)
24 24
25 25 chunks : 1
26 26 0x75 (u) : 1 (100.00%)
27 27 chunks size : 44
28 28 0x75 (u) : 44 (100.00%)
29 29
30 30 avg chain length : 0
31 31 max chain length : 0
32 32 max chain reach : 44
33 33 compression ratio : 0
34 34
35 35 uncompressed data size (min/max/avg) : 43 / 43 / 43
36 36 full revision size (min/max/avg) : 44 / 44 / 44
37 37 delta size (min/max/avg) : 0 / 0 / 0
38 38
39 39 Test debugindex, with and without the --debug flag
40 40 $ hg debugindex a
41 41 rev offset length ..... linkrev nodeid p1 p2 (re)
42 42 0 0 3 .... 0 b789fdd96dc2 000000000000 000000000000 (re)
43 43 $ hg --debug debugindex a
44 44 rev offset length ..... linkrev nodeid p1 p2 (re)
45 45 0 0 3 .... 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 (re)
46 46 $ hg debugindex -f 1 a
47 47 rev flag offset length size ..... link p1 p2 nodeid (re)
48 48 0 0000 0 3 2 .... 0 -1 -1 b789fdd96dc2 (re)
49 49 $ hg --debug debugindex -f 1 a
50 50 rev flag offset length size ..... link p1 p2 nodeid (re)
51 51 0 0000 0 3 2 .... 0 -1 -1 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 (re)
52 52
53 53 debugdelta chain basic output
54 54
55 55 $ hg debugdeltachain -m
56 56 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
57 57 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000
58 58
59 59 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen}\n'
60 60 0 1 1
61 61
62 62 $ hg debugdeltachain -m -Tjson
63 63 [
64 64 {
65 65 "chainid": 1,
66 66 "chainlen": 1,
67 67 "chainratio": 1.02325581395,
68 68 "chainsize": 44,
69 69 "compsize": 44,
70 70 "deltatype": "base",
71 71 "extradist": 0,
72 72 "extraratio": 0.0,
73 73 "lindist": 44,
74 74 "prevrev": -1,
75 75 "rev": 0,
76 76 "uncompsize": 43
77 77 }
78 78 ]
79 79
80 80 debugdelta chain with sparse read enabled
81 81
82 82 $ cat >> $HGRCPATH <<EOF
83 83 > [experimental]
84 84 > sparse-read = True
85 85 > EOF
86 86 $ hg debugdeltachain -m
87 87 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity
88 88 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000
89 89
90 90 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen} {readsize} {largestblock} {readdensity}\n'
91 91 0 1 1 44 44 1.0
92 92
93 93 $ hg debugdeltachain -m -Tjson
94 94 [
95 95 {
96 96 "chainid": 1,
97 97 "chainlen": 1,
98 98 "chainratio": 1.02325581395,
99 99 "chainsize": 44,
100 100 "compsize": 44,
101 101 "deltatype": "base",
102 102 "extradist": 0,
103 103 "extraratio": 0.0,
104 104 "largestblock": 44,
105 105 "lindist": 44,
106 106 "prevrev": -1,
107 107 "readdensity": 1.0,
108 108 "readsize": 44,
109 109 "rev": 0,
110 110 "uncompsize": 43
111 111 }
112 112 ]
113 113
114 114 Test max chain len
115 115 $ cat >> $HGRCPATH << EOF
116 116 > [format]
117 117 > maxchainlen=4
118 118 > EOF
119 119
120 120 $ printf "This test checks if maxchainlen config value is respected also it can serve as basic test for debugrevlog -d <file>.\n" >> a
121 121 $ hg ci -m a
122 122 $ printf "b\n" >> a
123 123 $ hg ci -m a
124 124 $ printf "c\n" >> a
125 125 $ hg ci -m a
126 126 $ printf "d\n" >> a
127 127 $ hg ci -m a
128 128 $ printf "e\n" >> a
129 129 $ hg ci -m a
130 130 $ printf "f\n" >> a
131 131 $ hg ci -m a
132 132 $ printf 'g\n' >> a
133 133 $ hg ci -m a
134 134 $ printf 'h\n' >> a
135 135 $ hg ci -m a
136 136 $ hg debugrevlog -d a
137 137 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
138 138 0 -1 -1 0 ??? 0 0 0 0 ??? ???? ? 1 0 (glob)
139 139 1 0 -1 ??? ??? 0 0 0 0 ??? ???? ? 1 1 (glob)
140 140 2 1 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
141 141 3 2 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
142 142 4 3 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 4 (glob)
143 143 5 4 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 0 (glob)
144 144 6 5 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 1 (glob)
145 145 7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
146 146 8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
147 147
148 Test debuglocks command:
149
150 $ hg debuglocks
151 lock: free
152 wlock: free
153
154 * Test setting the lock
155
156 waitlock <file> will wait for file to be created. If it isn't in a reasonable
157 amount of time, displays error message and returns 1
158 $ waitlock() {
159 > start=`date +%s`
160 > timeout=1
161 > while [ \( ! -f $1 \) -a \( ! -L $1 \) ]; do
162 > now=`date +%s`
163 > if [ "`expr $now - $start`" -gt $timeout ]; then
164 > echo "timeout: $1 was not created in $timeout seconds"
165 > return 1
166 > fi
167 > sleep 0.1
168 > done
169 > }
170 dolock [wlock] [lock] will set the locks until interrupted
171 $ dolock() {
172 > declare -A options
173 > options=([${1:-nolock}]=1 [${2:-nowlock}]=1)
174 > python <<EOF
175 > from mercurial import hg, ui as uimod
176 > import os
177 > import time
178 >
179 > repo = hg.repository(uimod.ui.load(), path='.')
180 > `[ -n "${options["wlock"]}" ] && echo "with repo.wlock(False):" || echo "if True:"`
181 > `[ -n "${options["lock"]}" ] && echo "with repo.lock(False):" || echo "if True:"`
182 > while not os.path.exists('.hg/unlock'):
183 > time.sleep(0.1)
184 > os.unlink('.hg/unlock')
185 > EOF
186 > }
187
188 $ dolock lock &
189 $ waitlock .hg/store/lock
190
191 $ hg debuglocks
192 lock: user *, process * (*s) (glob)
193 wlock: free
194 [1]
195 $ touch .hg/unlock
196 $ wait
197
198 * Test setting the wlock
199
200 $ dolock wlock &
201 $ waitlock .hg/wlock
202
203 $ hg debuglocks
204 lock: free
205 wlock: user *, process * (*s) (glob)
206 [1]
207 $ touch .hg/unlock
208 $ wait
209
210 * Test setting both locks
211
212 $ dolock wlock lock &
213 $ waitlock .hg/wlock && waitlock .hg/store/lock
214
215 $ hg debuglocks
216 lock: user *, process * (*s) (glob)
217 wlock: user *, process * (*s) (glob)
218 [2]
219 $ touch .hg/unlock
220 $ wait
221
222 $ hg debuglocks
223 lock: free
224 wlock: free
225
226 * Test forcing the lock
227
228 $ dolock lock &
229 $ waitlock .hg/store/lock
230
231 $ hg debuglocks
232 lock: user *, process * (*s) (glob)
233 wlock: free
234 [1]
235
236 $ hg debuglocks -L
237
238 $ hg debuglocks
239 lock: free
240 wlock: free
241
242 $ touch .hg/unlock
243 $ wait
244
245 * Test forcing the wlock
246
247 $ dolock wlock &
248 $ waitlock .hg/wlock
249
250 $ hg debuglocks
251 lock: free
252 wlock: user *, process * (*s) (glob)
253 [1]
254
255 $ hg debuglocks -W
256
257 $ hg debuglocks
258 lock: free
259 wlock: free
260
261 $ touch .hg/unlock
262 $ wait
263
148 264 Test WdirUnsupported exception
149 265
150 266 $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff
151 267 abort: working directory revision cannot be specified
152 268 [255]
153 269
154 270 Test cache warming command
155 271
156 272 $ rm -rf .hg/cache/
157 273 $ hg debugupdatecaches --debug
158 274 updating the branch cache
159 275 $ ls -r .hg/cache/*
160 276 .hg/cache/rbc-revs-v1
161 277 .hg/cache/rbc-names-v1
162 278 .hg/cache/branch2-served
163 279
164 280 $ cd ..
165 281
166 282 Test internal debugstacktrace command
167 283
168 284 $ cat > debugstacktrace.py << EOF
169 285 > from __future__ import absolute_import
170 286 > import sys
171 287 > from mercurial import util
172 288 > def f():
173 289 > util.debugstacktrace(f=sys.stdout)
174 290 > g()
175 291 > def g():
176 292 > util.dst('hello from g\\n', skip=1)
177 293 > h()
178 294 > def h():
179 295 > util.dst('hi ...\\nfrom h hidden in g', 1, depth=2)
180 296 > f()
181 297 > EOF
182 298 $ $PYTHON debugstacktrace.py
183 299 stacktrace at:
184 300 debugstacktrace.py:12 in * (glob)
185 301 debugstacktrace.py:5 in f
186 302 hello from g at:
187 303 debugstacktrace.py:12 in * (glob)
188 304 debugstacktrace.py:6 in f
189 305 hi ...
190 306 from h hidden in g at:
191 307 debugstacktrace.py:6 in f
192 308 debugstacktrace.py:9 in g
193 309
194 310 Test debugcapabilities command:
195 311
196 312 $ hg debugcapabilities ./debugrevlog/
197 313 Main capabilities:
198 314 branchmap
199 315 $USUAL_BUNDLE2_CAPS$
200 316 getbundle
201 317 known
202 318 lookup
203 319 pushkey
204 320 unbundle
205 321 Bundle2 capabilities:
206 322 HG20
207 323 bookmarks
208 324 changegroup
209 325 01
210 326 02
211 327 digests
212 328 md5
213 329 sha1
214 330 sha512
215 331 error
216 332 abort
217 333 unsupportedcontent
218 334 pushraced
219 335 pushkey
220 336 hgtagsfnodes
221 337 listkeys
222 338 phases
223 339 heads
224 340 pushkey
225 341 remote-changegroup
226 342 http
227 343 https
General Comments 0
You need to be logged in to leave comments. Login now