##// END OF EJS Templates
py3: use '%d' for integers rather than '%s'...
Pulkit Goyal -
r35144:8f6641fa default
parent child Browse files
Show More
@@ -1,2364 +1,2364 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import socket
18 18 import ssl
19 19 import string
20 20 import sys
21 21 import tempfile
22 22 import time
23 23
24 24 from .i18n import _
25 25 from .node import (
26 26 bin,
27 27 hex,
28 28 nullhex,
29 29 nullid,
30 30 nullrev,
31 31 short,
32 32 )
33 33 from . import (
34 34 bundle2,
35 35 changegroup,
36 36 cmdutil,
37 37 color,
38 38 context,
39 39 dagparser,
40 40 dagutil,
41 41 encoding,
42 42 error,
43 43 exchange,
44 44 extensions,
45 45 filemerge,
46 46 fileset,
47 47 formatter,
48 48 hg,
49 49 localrepo,
50 50 lock as lockmod,
51 51 merge as mergemod,
52 52 obsolete,
53 53 obsutil,
54 54 phases,
55 55 policy,
56 56 pvec,
57 57 pycompat,
58 58 registrar,
59 59 repair,
60 60 revlog,
61 61 revset,
62 62 revsetlang,
63 63 scmutil,
64 64 setdiscovery,
65 65 simplemerge,
66 66 smartset,
67 67 sslutil,
68 68 streamclone,
69 69 templater,
70 70 treediscovery,
71 71 upgrade,
72 72 util,
73 73 vfs as vfsmod,
74 74 )
75 75
76 76 release = lockmod.release
77 77
78 78 command = registrar.command()
79 79
80 80 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
81 81 def debugancestor(ui, repo, *args):
82 82 """find the ancestor revision of two revisions in a given index"""
83 83 if len(args) == 3:
84 84 index, rev1, rev2 = args
85 85 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
86 86 lookup = r.lookup
87 87 elif len(args) == 2:
88 88 if not repo:
89 89 raise error.Abort(_('there is no Mercurial repository here '
90 90 '(.hg not found)'))
91 91 rev1, rev2 = args
92 92 r = repo.changelog
93 93 lookup = repo.lookup
94 94 else:
95 95 raise error.Abort(_('either two or three arguments required'))
96 96 a = r.ancestor(lookup(rev1), lookup(rev2))
97 97 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
98 98
99 99 @command('debugapplystreamclonebundle', [], 'FILE')
100 100 def debugapplystreamclonebundle(ui, repo, fname):
101 101 """apply a stream clone bundle file"""
102 102 f = hg.openpath(ui, fname)
103 103 gen = exchange.readbundle(ui, f, fname)
104 104 gen.apply(repo)
105 105
106 106 @command('debugbuilddag',
107 107 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
108 108 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
109 109 ('n', 'new-file', None, _('add new file at each rev'))],
110 110 _('[OPTION]... [TEXT]'))
111 111 def debugbuilddag(ui, repo, text=None,
112 112 mergeable_file=False,
113 113 overwritten_file=False,
114 114 new_file=False):
115 115 """builds a repo with a given DAG from scratch in the current empty repo
116 116
117 117 The description of the DAG is read from stdin if not given on the
118 118 command line.
119 119
120 120 Elements:
121 121
122 122 - "+n" is a linear run of n nodes based on the current default parent
123 123 - "." is a single node based on the current default parent
124 124 - "$" resets the default parent to null (implied at the start);
125 125 otherwise the default parent is always the last node created
126 126 - "<p" sets the default parent to the backref p
127 127 - "*p" is a fork at parent p, which is a backref
128 128 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
129 129 - "/p2" is a merge of the preceding node and p2
130 130 - ":tag" defines a local tag for the preceding node
131 131 - "@branch" sets the named branch for subsequent nodes
132 132 - "#...\\n" is a comment up to the end of the line
133 133
134 134 Whitespace between the above elements is ignored.
135 135
136 136 A backref is either
137 137
138 138 - a number n, which references the node curr-n, where curr is the current
139 139 node, or
140 140 - the name of a local tag you placed earlier using ":tag", or
141 141 - empty to denote the default parent.
142 142
143 143 All string valued-elements are either strictly alphanumeric, or must
144 144 be enclosed in double quotes ("..."), with "\\" as escape character.
145 145 """
146 146
147 147 if text is None:
148 148 ui.status(_("reading DAG from stdin\n"))
149 149 text = ui.fin.read()
150 150
151 151 cl = repo.changelog
152 152 if len(cl) > 0:
153 153 raise error.Abort(_('repository is not empty'))
154 154
155 155 # determine number of revs in DAG
156 156 total = 0
157 157 for type, data in dagparser.parsedag(text):
158 158 if type == 'n':
159 159 total += 1
160 160
161 161 if mergeable_file:
162 162 linesperrev = 2
163 163 # make a file with k lines per rev
164 164 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
165 165 initialmergedlines.append("")
166 166
167 167 tags = []
168 168
169 169 wlock = lock = tr = None
170 170 try:
171 171 wlock = repo.wlock()
172 172 lock = repo.lock()
173 173 tr = repo.transaction("builddag")
174 174
175 175 at = -1
176 176 atbranch = 'default'
177 177 nodeids = []
178 178 id = 0
179 179 ui.progress(_('building'), id, unit=_('revisions'), total=total)
180 180 for type, data in dagparser.parsedag(text):
181 181 if type == 'n':
182 182 ui.note(('node %s\n' % str(data)))
183 183 id, ps = data
184 184
185 185 files = []
186 186 fctxs = {}
187 187
188 188 p2 = None
189 189 if mergeable_file:
190 190 fn = "mf"
191 191 p1 = repo[ps[0]]
192 192 if len(ps) > 1:
193 193 p2 = repo[ps[1]]
194 194 pa = p1.ancestor(p2)
195 195 base, local, other = [x[fn].data() for x in (pa, p1,
196 196 p2)]
197 197 m3 = simplemerge.Merge3Text(base, local, other)
198 198 ml = [l.strip() for l in m3.merge_lines()]
199 199 ml.append("")
200 200 elif at > 0:
201 201 ml = p1[fn].data().split("\n")
202 202 else:
203 203 ml = initialmergedlines
204 204 ml[id * linesperrev] += " r%i" % id
205 205 mergedtext = "\n".join(ml)
206 206 files.append(fn)
207 207 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
208 208
209 209 if overwritten_file:
210 210 fn = "of"
211 211 files.append(fn)
212 212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 213
214 214 if new_file:
215 215 fn = "nf%i" % id
216 216 files.append(fn)
217 217 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
218 218 if len(ps) > 1:
219 219 if not p2:
220 220 p2 = repo[ps[1]]
221 221 for fn in p2:
222 222 if fn.startswith("nf"):
223 223 files.append(fn)
224 224 fctxs[fn] = p2[fn]
225 225
226 226 def fctxfn(repo, cx, path):
227 227 return fctxs.get(path)
228 228
229 229 if len(ps) == 0 or ps[0] < 0:
230 230 pars = [None, None]
231 231 elif len(ps) == 1:
232 232 pars = [nodeids[ps[0]], None]
233 233 else:
234 234 pars = [nodeids[p] for p in ps]
235 235 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
236 236 date=(id, 0),
237 237 user="debugbuilddag",
238 238 extra={'branch': atbranch})
239 239 nodeid = repo.commitctx(cx)
240 240 nodeids.append(nodeid)
241 241 at = id
242 242 elif type == 'l':
243 243 id, name = data
244 244 ui.note(('tag %s\n' % name))
245 245 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
246 246 elif type == 'a':
247 247 ui.note(('branch %s\n' % data))
248 248 atbranch = data
249 249 ui.progress(_('building'), id, unit=_('revisions'), total=total)
250 250 tr.close()
251 251
252 252 if tags:
253 253 repo.vfs.write("localtags", "".join(tags))
254 254 finally:
255 255 ui.progress(_('building'), None)
256 256 release(tr, lock, wlock)
257 257
258 258 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
259 259 indent_string = ' ' * indent
260 260 if all:
261 261 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
262 262 % indent_string)
263 263
264 264 def showchunks(named):
265 265 ui.write("\n%s%s\n" % (indent_string, named))
266 266 for deltadata in gen.deltaiter():
267 267 node, p1, p2, cs, deltabase, delta, flags = deltadata
268 268 ui.write("%s%s %s %s %s %s %s\n" %
269 269 (indent_string, hex(node), hex(p1), hex(p2),
270 270 hex(cs), hex(deltabase), len(delta)))
271 271
272 272 chunkdata = gen.changelogheader()
273 273 showchunks("changelog")
274 274 chunkdata = gen.manifestheader()
275 275 showchunks("manifest")
276 276 for chunkdata in iter(gen.filelogheader, {}):
277 277 fname = chunkdata['filename']
278 278 showchunks(fname)
279 279 else:
280 280 if isinstance(gen, bundle2.unbundle20):
281 281 raise error.Abort(_('use debugbundle2 for this file'))
282 282 chunkdata = gen.changelogheader()
283 283 for deltadata in gen.deltaiter():
284 284 node, p1, p2, cs, deltabase, delta, flags = deltadata
285 285 ui.write("%s%s\n" % (indent_string, hex(node)))
286 286
287 287 def _debugobsmarkers(ui, part, indent=0, **opts):
288 288 """display version and markers contained in 'data'"""
289 289 opts = pycompat.byteskwargs(opts)
290 290 data = part.read()
291 291 indent_string = ' ' * indent
292 292 try:
293 293 version, markers = obsolete._readmarkers(data)
294 294 except error.UnknownVersion as exc:
295 295 msg = "%sunsupported version: %s (%d bytes)\n"
296 296 msg %= indent_string, exc.version, len(data)
297 297 ui.write(msg)
298 298 else:
299 msg = "%sversion: %s (%d bytes)\n"
299 msg = "%sversion: %d (%d bytes)\n"
300 300 msg %= indent_string, version, len(data)
301 301 ui.write(msg)
302 302 fm = ui.formatter('debugobsolete', opts)
303 303 for rawmarker in sorted(markers):
304 304 m = obsutil.marker(None, rawmarker)
305 305 fm.startitem()
306 306 fm.plain(indent_string)
307 307 cmdutil.showmarker(fm, m)
308 308 fm.end()
309 309
310 310 def _debugphaseheads(ui, data, indent=0):
311 311 """display version and markers contained in 'data'"""
312 312 indent_string = ' ' * indent
313 313 headsbyphase = phases.binarydecode(data)
314 314 for phase in phases.allphases:
315 315 for head in headsbyphase[phase]:
316 316 ui.write(indent_string)
317 317 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
318 318
319 319 def _quasirepr(thing):
320 320 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
321 321 return '{%s}' % (
322 322 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
323 323 return pycompat.bytestr(repr(thing))
324 324
325 325 def _debugbundle2(ui, gen, all=None, **opts):
326 326 """lists the contents of a bundle2"""
327 327 if not isinstance(gen, bundle2.unbundle20):
328 328 raise error.Abort(_('not a bundle2 file'))
329 329 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
330 330 parttypes = opts.get(r'part_type', [])
331 331 for part in gen.iterparts():
332 332 if parttypes and part.type not in parttypes:
333 333 continue
334 334 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
335 335 if part.type == 'changegroup':
336 336 version = part.params.get('version', '01')
337 337 cg = changegroup.getunbundler(version, part, 'UN')
338 338 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
339 339 if part.type == 'obsmarkers':
340 340 _debugobsmarkers(ui, part, indent=4, **opts)
341 341 if part.type == 'phase-heads':
342 342 _debugphaseheads(ui, part, indent=4)
343 343
344 344 @command('debugbundle',
345 345 [('a', 'all', None, _('show all details')),
346 346 ('', 'part-type', [], _('show only the named part type')),
347 347 ('', 'spec', None, _('print the bundlespec of the bundle'))],
348 348 _('FILE'),
349 349 norepo=True)
350 350 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
351 351 """lists the contents of a bundle"""
352 352 with hg.openpath(ui, bundlepath) as f:
353 353 if spec:
354 354 spec = exchange.getbundlespec(ui, f)
355 355 ui.write('%s\n' % spec)
356 356 return
357 357
358 358 gen = exchange.readbundle(ui, f, bundlepath)
359 359 if isinstance(gen, bundle2.unbundle20):
360 360 return _debugbundle2(ui, gen, all=all, **opts)
361 361 _debugchangegroup(ui, gen, all=all, **opts)
362 362
363 363 @command('debugcapabilities',
364 364 [], _('PATH'),
365 365 norepo=True)
366 366 def debugcapabilities(ui, path, **opts):
367 367 """lists the capabilities of a remote peer"""
368 368 peer = hg.peer(ui, opts, path)
369 369 caps = peer.capabilities()
370 370 ui.write(('Main capabilities:\n'))
371 371 for c in sorted(caps):
372 372 ui.write((' %s\n') % c)
373 373 b2caps = bundle2.bundle2caps(peer)
374 374 if b2caps:
375 375 ui.write(('Bundle2 capabilities:\n'))
376 376 for key, values in sorted(b2caps.iteritems()):
377 377 ui.write((' %s\n') % key)
378 378 for v in values:
379 379 ui.write((' %s\n') % v)
380 380
381 381 @command('debugcheckstate', [], '')
382 382 def debugcheckstate(ui, repo):
383 383 """validate the correctness of the current dirstate"""
384 384 parent1, parent2 = repo.dirstate.parents()
385 385 m1 = repo[parent1].manifest()
386 386 m2 = repo[parent2].manifest()
387 387 errors = 0
388 388 for f in repo.dirstate:
389 389 state = repo.dirstate[f]
390 390 if state in "nr" and f not in m1:
391 391 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
392 392 errors += 1
393 393 if state in "a" and f in m1:
394 394 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
395 395 errors += 1
396 396 if state in "m" and f not in m1 and f not in m2:
397 397 ui.warn(_("%s in state %s, but not in either manifest\n") %
398 398 (f, state))
399 399 errors += 1
400 400 for f in m1:
401 401 state = repo.dirstate[f]
402 402 if state not in "nrm":
403 403 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
404 404 errors += 1
405 405 if errors:
406 406 error = _(".hg/dirstate inconsistent with current parent's manifest")
407 407 raise error.Abort(error)
408 408
409 409 @command('debugcolor',
410 410 [('', 'style', None, _('show all configured styles'))],
411 411 'hg debugcolor')
412 412 def debugcolor(ui, repo, **opts):
413 413 """show available color, effects or style"""
414 414 ui.write(('color mode: %s\n') % ui._colormode)
415 415 if opts.get(r'style'):
416 416 return _debugdisplaystyle(ui)
417 417 else:
418 418 return _debugdisplaycolor(ui)
419 419
420 420 def _debugdisplaycolor(ui):
421 421 ui = ui.copy()
422 422 ui._styles.clear()
423 423 for effect in color._activeeffects(ui).keys():
424 424 ui._styles[effect] = effect
425 425 if ui._terminfoparams:
426 426 for k, v in ui.configitems('color'):
427 427 if k.startswith('color.'):
428 428 ui._styles[k] = k[6:]
429 429 elif k.startswith('terminfo.'):
430 430 ui._styles[k] = k[9:]
431 431 ui.write(_('available colors:\n'))
432 432 # sort label with a '_' after the other to group '_background' entry.
433 433 items = sorted(ui._styles.items(),
434 434 key=lambda i: ('_' in i[0], i[0], i[1]))
435 435 for colorname, label in items:
436 436 ui.write(('%s\n') % colorname, label=label)
437 437
438 438 def _debugdisplaystyle(ui):
439 439 ui.write(_('available style:\n'))
440 440 width = max(len(s) for s in ui._styles)
441 441 for label, effects in sorted(ui._styles.items()):
442 442 ui.write('%s' % label, label=label)
443 443 if effects:
444 444 # 50
445 445 ui.write(': ')
446 446 ui.write(' ' * (max(0, width - len(label))))
447 447 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
448 448 ui.write('\n')
449 449
450 450 @command('debugcreatestreamclonebundle', [], 'FILE')
451 451 def debugcreatestreamclonebundle(ui, repo, fname):
452 452 """create a stream clone bundle file
453 453
454 454 Stream bundles are special bundles that are essentially archives of
455 455 revlog files. They are commonly used for cloning very quickly.
456 456 """
457 457 # TODO we may want to turn this into an abort when this functionality
458 458 # is moved into `hg bundle`.
459 459 if phases.hassecret(repo):
460 460 ui.warn(_('(warning: stream clone bundle will contain secret '
461 461 'revisions)\n'))
462 462
463 463 requirements, gen = streamclone.generatebundlev1(repo)
464 464 changegroup.writechunks(ui, gen, fname)
465 465
466 466 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
467 467
468 468 @command('debugdag',
469 469 [('t', 'tags', None, _('use tags as labels')),
470 470 ('b', 'branches', None, _('annotate with branch names')),
471 471 ('', 'dots', None, _('use dots for runs')),
472 472 ('s', 'spaces', None, _('separate elements by spaces'))],
473 473 _('[OPTION]... [FILE [REV]...]'),
474 474 optionalrepo=True)
475 475 def debugdag(ui, repo, file_=None, *revs, **opts):
476 476 """format the changelog or an index DAG as a concise textual description
477 477
478 478 If you pass a revlog index, the revlog's DAG is emitted. If you list
479 479 revision numbers, they get labeled in the output as rN.
480 480
481 481 Otherwise, the changelog DAG of the current repo is emitted.
482 482 """
483 483 spaces = opts.get(r'spaces')
484 484 dots = opts.get(r'dots')
485 485 if file_:
486 486 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
487 487 file_)
488 488 revs = set((int(r) for r in revs))
489 489 def events():
490 490 for r in rlog:
491 491 yield 'n', (r, list(p for p in rlog.parentrevs(r)
492 492 if p != -1))
493 493 if r in revs:
494 494 yield 'l', (r, "r%i" % r)
495 495 elif repo:
496 496 cl = repo.changelog
497 497 tags = opts.get(r'tags')
498 498 branches = opts.get(r'branches')
499 499 if tags:
500 500 labels = {}
501 501 for l, n in repo.tags().items():
502 502 labels.setdefault(cl.rev(n), []).append(l)
503 503 def events():
504 504 b = "default"
505 505 for r in cl:
506 506 if branches:
507 507 newb = cl.read(cl.node(r))[5]['branch']
508 508 if newb != b:
509 509 yield 'a', newb
510 510 b = newb
511 511 yield 'n', (r, list(p for p in cl.parentrevs(r)
512 512 if p != -1))
513 513 if tags:
514 514 ls = labels.get(r)
515 515 if ls:
516 516 for l in ls:
517 517 yield 'l', (r, l)
518 518 else:
519 519 raise error.Abort(_('need repo for changelog dag'))
520 520
521 521 for line in dagparser.dagtextlines(events(),
522 522 addspaces=spaces,
523 523 wraplabels=True,
524 524 wrapannotations=True,
525 525 wrapnonlinear=dots,
526 526 usedots=dots,
527 527 maxlinewidth=70):
528 528 ui.write(line)
529 529 ui.write("\n")
530 530
531 531 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
532 532 def debugdata(ui, repo, file_, rev=None, **opts):
533 533 """dump the contents of a data file revision"""
534 534 opts = pycompat.byteskwargs(opts)
535 535 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
536 536 if rev is not None:
537 537 raise error.CommandError('debugdata', _('invalid arguments'))
538 538 file_, rev = None, file_
539 539 elif rev is None:
540 540 raise error.CommandError('debugdata', _('invalid arguments'))
541 541 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
542 542 try:
543 543 ui.write(r.revision(r.lookup(rev), raw=True))
544 544 except KeyError:
545 545 raise error.Abort(_('invalid revision identifier %s') % rev)
546 546
547 547 @command('debugdate',
548 548 [('e', 'extended', None, _('try extended date formats'))],
549 549 _('[-e] DATE [RANGE]'),
550 550 norepo=True, optionalrepo=True)
551 551 def debugdate(ui, date, range=None, **opts):
552 552 """parse and display a date"""
553 553 if opts[r"extended"]:
554 554 d = util.parsedate(date, util.extendeddateformats)
555 555 else:
556 556 d = util.parsedate(date)
557 557 ui.write(("internal: %s %s\n") % d)
558 558 ui.write(("standard: %s\n") % util.datestr(d))
559 559 if range:
560 560 m = util.matchdate(range)
561 561 ui.write(("match: %s\n") % m(d[0]))
562 562
563 563 @command('debugdeltachain',
564 564 cmdutil.debugrevlogopts + cmdutil.formatteropts,
565 565 _('-c|-m|FILE'),
566 566 optionalrepo=True)
567 567 def debugdeltachain(ui, repo, file_=None, **opts):
568 568 """dump information about delta chains in a revlog
569 569
570 570 Output can be templatized. Available template keywords are:
571 571
572 572 :``rev``: revision number
573 573 :``chainid``: delta chain identifier (numbered by unique base)
574 574 :``chainlen``: delta chain length to this revision
575 575 :``prevrev``: previous revision in delta chain
576 576 :``deltatype``: role of delta / how it was computed
577 577 :``compsize``: compressed size of revision
578 578 :``uncompsize``: uncompressed size of revision
579 579 :``chainsize``: total size of compressed revisions in chain
580 580 :``chainratio``: total chain size divided by uncompressed revision size
581 581 (new delta chains typically start at ratio 2.00)
582 582 :``lindist``: linear distance from base revision in delta chain to end
583 583 of this revision
584 584 :``extradist``: total size of revisions not part of this delta chain from
585 585 base of delta chain to end of this revision; a measurement
586 586 of how much extra data we need to read/seek across to read
587 587 the delta chain for this revision
588 588 :``extraratio``: extradist divided by chainsize; another representation of
589 589 how much unrelated data is needed to load this delta chain
590 590
591 591 If the repository is configured to use the sparse read, additional keywords
592 592 are available:
593 593
594 594 :``readsize``: total size of data read from the disk for a revision
595 595 (sum of the sizes of all the blocks)
596 596 :``largestblock``: size of the largest block of data read from the disk
597 597 :``readdensity``: density of useful bytes in the data read from the disk
598 598
599 599 The sparse read can be enabled with experimental.sparse-read = True
600 600 """
601 601 opts = pycompat.byteskwargs(opts)
602 602 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
603 603 index = r.index
604 604 generaldelta = r.version & revlog.FLAG_GENERALDELTA
605 605 withsparseread = getattr(r, '_withsparseread', False)
606 606
607 607 def revinfo(rev):
608 608 e = index[rev]
609 609 compsize = e[1]
610 610 uncompsize = e[2]
611 611 chainsize = 0
612 612
613 613 if generaldelta:
614 614 if e[3] == e[5]:
615 615 deltatype = 'p1'
616 616 elif e[3] == e[6]:
617 617 deltatype = 'p2'
618 618 elif e[3] == rev - 1:
619 619 deltatype = 'prev'
620 620 elif e[3] == rev:
621 621 deltatype = 'base'
622 622 else:
623 623 deltatype = 'other'
624 624 else:
625 625 if e[3] == rev:
626 626 deltatype = 'base'
627 627 else:
628 628 deltatype = 'prev'
629 629
630 630 chain = r._deltachain(rev)[0]
631 631 for iterrev in chain:
632 632 e = index[iterrev]
633 633 chainsize += e[1]
634 634
635 635 return compsize, uncompsize, deltatype, chain, chainsize
636 636
637 637 fm = ui.formatter('debugdeltachain', opts)
638 638
639 639 fm.plain(' rev chain# chainlen prev delta '
640 640 'size rawsize chainsize ratio lindist extradist '
641 641 'extraratio')
642 642 if withsparseread:
643 643 fm.plain(' readsize largestblk rddensity')
644 644 fm.plain('\n')
645 645
646 646 chainbases = {}
647 647 for rev in r:
648 648 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
649 649 chainbase = chain[0]
650 650 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
651 651 start = r.start
652 652 length = r.length
653 653 basestart = start(chainbase)
654 654 revstart = start(rev)
655 655 lineardist = revstart + comp - basestart
656 656 extradist = lineardist - chainsize
657 657 try:
658 658 prevrev = chain[-2]
659 659 except IndexError:
660 660 prevrev = -1
661 661
662 662 chainratio = float(chainsize) / float(uncomp)
663 663 extraratio = float(extradist) / float(chainsize)
664 664
665 665 fm.startitem()
666 666 fm.write('rev chainid chainlen prevrev deltatype compsize '
667 667 'uncompsize chainsize chainratio lindist extradist '
668 668 'extraratio',
669 669 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
670 670 rev, chainid, len(chain), prevrev, deltatype, comp,
671 671 uncomp, chainsize, chainratio, lineardist, extradist,
672 672 extraratio,
673 673 rev=rev, chainid=chainid, chainlen=len(chain),
674 674 prevrev=prevrev, deltatype=deltatype, compsize=comp,
675 675 uncompsize=uncomp, chainsize=chainsize,
676 676 chainratio=chainratio, lindist=lineardist,
677 677 extradist=extradist, extraratio=extraratio)
678 678 if withsparseread:
679 679 readsize = 0
680 680 largestblock = 0
681 681 for revschunk in revlog._slicechunk(r, chain):
682 682 blkend = start(revschunk[-1]) + length(revschunk[-1])
683 683 blksize = blkend - start(revschunk[0])
684 684
685 685 readsize += blksize
686 686 if largestblock < blksize:
687 687 largestblock = blksize
688 688
689 689 readdensity = float(chainsize) / float(readsize)
690 690
691 691 fm.write('readsize largestblock readdensity',
692 692 ' %10d %10d %9.5f',
693 693 readsize, largestblock, readdensity,
694 694 readsize=readsize, largestblock=largestblock,
695 695 readdensity=readdensity)
696 696
697 697 fm.plain('\n')
698 698
699 699 fm.end()
700 700
701 701 @command('debugdirstate|debugstate',
702 702 [('', 'nodates', None, _('do not display the saved mtime')),
703 703 ('', 'datesort', None, _('sort by saved mtime'))],
704 704 _('[OPTION]...'))
705 705 def debugstate(ui, repo, **opts):
706 706 """show the contents of the current dirstate"""
707 707
708 708 nodates = opts.get(r'nodates')
709 709 datesort = opts.get(r'datesort')
710 710
711 711 timestr = ""
712 712 if datesort:
713 713 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
714 714 else:
715 715 keyfunc = None # sort by filename
716 716 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
717 717 if ent[3] == -1:
718 718 timestr = 'unset '
719 719 elif nodates:
720 720 timestr = 'set '
721 721 else:
722 722 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
723 723 time.localtime(ent[3]))
724 724 if ent[1] & 0o20000:
725 725 mode = 'lnk'
726 726 else:
727 727 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
728 728 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
729 729 for f in repo.dirstate.copies():
730 730 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
731 731
732 732 @command('debugdiscovery',
733 733 [('', 'old', None, _('use old-style discovery')),
734 734 ('', 'nonheads', None,
735 735 _('use old-style discovery with non-heads included')),
736 736 ] + cmdutil.remoteopts,
737 737 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
738 738 def debugdiscovery(ui, repo, remoteurl="default", **opts):
739 739 """runs the changeset discovery protocol in isolation"""
740 740 opts = pycompat.byteskwargs(opts)
741 741 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
742 742 opts.get('branch'))
743 743 remote = hg.peer(repo, opts, remoteurl)
744 744 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
745 745
746 746 # make sure tests are repeatable
747 747 random.seed(12323)
748 748
749 749 def doit(localheads, remoteheads, remote=remote):
750 750 if opts.get('old'):
751 751 if localheads:
752 752 raise error.Abort('cannot use localheads with old style '
753 753 'discovery')
754 754 if not util.safehasattr(remote, 'branches'):
755 755 # enable in-client legacy support
756 756 remote = localrepo.locallegacypeer(remote.local())
757 757 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
758 758 force=True)
759 759 common = set(common)
760 760 if not opts.get('nonheads'):
761 761 ui.write(("unpruned common: %s\n") %
762 762 " ".join(sorted(short(n) for n in common)))
763 763 dag = dagutil.revlogdag(repo.changelog)
764 764 all = dag.ancestorset(dag.internalizeall(common))
765 765 common = dag.externalizeall(dag.headsetofconnecteds(all))
766 766 else:
767 767 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
768 768 common = set(common)
769 769 rheads = set(hds)
770 770 lheads = set(repo.heads())
771 771 ui.write(("common heads: %s\n") %
772 772 " ".join(sorted(short(n) for n in common)))
773 773 if lheads <= common:
774 774 ui.write(("local is subset\n"))
775 775 elif rheads <= common:
776 776 ui.write(("remote is subset\n"))
777 777
778 778 serverlogs = opts.get('serverlog')
779 779 if serverlogs:
780 780 for filename in serverlogs:
781 781 with open(filename, 'r') as logfile:
782 782 line = logfile.readline()
783 783 while line:
784 784 parts = line.strip().split(';')
785 785 op = parts[1]
786 786 if op == 'cg':
787 787 pass
788 788 elif op == 'cgss':
789 789 doit(parts[2].split(' '), parts[3].split(' '))
790 790 elif op == 'unb':
791 791 doit(parts[3].split(' '), parts[2].split(' '))
792 792 line = logfile.readline()
793 793 else:
794 794 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
795 795 opts.get('remote_head'))
796 796 localrevs = opts.get('local_head')
797 797 doit(localrevs, remoterevs)
798 798
799 799 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
800 800 def debugextensions(ui, **opts):
801 801 '''show information about active extensions'''
802 802 opts = pycompat.byteskwargs(opts)
803 803 exts = extensions.extensions(ui)
804 804 hgver = util.version()
805 805 fm = ui.formatter('debugextensions', opts)
806 806 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
807 807 isinternal = extensions.ismoduleinternal(extmod)
808 808 extsource = pycompat.fsencode(extmod.__file__)
809 809 if isinternal:
810 810 exttestedwith = [] # never expose magic string to users
811 811 else:
812 812 exttestedwith = getattr(extmod, 'testedwith', '').split()
813 813 extbuglink = getattr(extmod, 'buglink', None)
814 814
815 815 fm.startitem()
816 816
817 817 if ui.quiet or ui.verbose:
818 818 fm.write('name', '%s\n', extname)
819 819 else:
820 820 fm.write('name', '%s', extname)
821 821 if isinternal or hgver in exttestedwith:
822 822 fm.plain('\n')
823 823 elif not exttestedwith:
824 824 fm.plain(_(' (untested!)\n'))
825 825 else:
826 826 lasttestedversion = exttestedwith[-1]
827 827 fm.plain(' (%s!)\n' % lasttestedversion)
828 828
829 829 fm.condwrite(ui.verbose and extsource, 'source',
830 830 _(' location: %s\n'), extsource or "")
831 831
832 832 if ui.verbose:
833 833 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
834 834 fm.data(bundled=isinternal)
835 835
836 836 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
837 837 _(' tested with: %s\n'),
838 838 fm.formatlist(exttestedwith, name='ver'))
839 839
840 840 fm.condwrite(ui.verbose and extbuglink, 'buglink',
841 841 _(' bug reporting: %s\n'), extbuglink or "")
842 842
843 843 fm.end()
844 844
845 845 @command('debugfileset',
846 846 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
847 847 _('[-r REV] FILESPEC'))
848 848 def debugfileset(ui, repo, expr, **opts):
849 849 '''parse and apply a fileset specification'''
850 850 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
851 851 if ui.verbose:
852 852 tree = fileset.parse(expr)
853 853 ui.note(fileset.prettyformat(tree), "\n")
854 854
855 855 for f in ctx.getfileset(expr):
856 856 ui.write("%s\n" % f)
857 857
858 858 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
859 859 def debugfsinfo(ui, path="."):
860 860 """show information detected about current filesystem"""
861 861 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
862 862 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
863 863 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
864 864 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
865 865 casesensitive = '(unknown)'
866 866 try:
867 867 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
868 868 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
869 869 except OSError:
870 870 pass
871 871 ui.write(('case-sensitive: %s\n') % casesensitive)
872 872
873 873 @command('debuggetbundle',
874 874 [('H', 'head', [], _('id of head node'), _('ID')),
875 875 ('C', 'common', [], _('id of common node'), _('ID')),
876 876 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
877 877 _('REPO FILE [-H|-C ID]...'),
878 878 norepo=True)
879 879 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
880 880 """retrieves a bundle from a repo
881 881
882 882 Every ID must be a full-length hex node id string. Saves the bundle to the
883 883 given file.
884 884 """
885 885 opts = pycompat.byteskwargs(opts)
886 886 repo = hg.peer(ui, opts, repopath)
887 887 if not repo.capable('getbundle'):
888 888 raise error.Abort("getbundle() not supported by target repository")
889 889 args = {}
890 890 if common:
891 891 args[r'common'] = [bin(s) for s in common]
892 892 if head:
893 893 args[r'heads'] = [bin(s) for s in head]
894 894 # TODO: get desired bundlecaps from command line.
895 895 args[r'bundlecaps'] = None
896 896 bundle = repo.getbundle('debug', **args)
897 897
898 898 bundletype = opts.get('type', 'bzip2').lower()
899 899 btypes = {'none': 'HG10UN',
900 900 'bzip2': 'HG10BZ',
901 901 'gzip': 'HG10GZ',
902 902 'bundle2': 'HG20'}
903 903 bundletype = btypes.get(bundletype)
904 904 if bundletype not in bundle2.bundletypes:
905 905 raise error.Abort(_('unknown bundle type specified with --type'))
906 906 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
907 907
908 908 @command('debugignore', [], '[FILE]')
909 909 def debugignore(ui, repo, *files, **opts):
910 910 """display the combined ignore pattern and information about ignored files
911 911
912 912 With no argument display the combined ignore pattern.
913 913
914 914 Given space separated file names, shows if the given file is ignored and
915 915 if so, show the ignore rule (file and line number) that matched it.
916 916 """
917 917 ignore = repo.dirstate._ignore
918 918 if not files:
919 919 # Show all the patterns
920 920 ui.write("%s\n" % repr(ignore))
921 921 else:
922 922 m = scmutil.match(repo[None], pats=files)
923 923 for f in m.files():
924 924 nf = util.normpath(f)
925 925 ignored = None
926 926 ignoredata = None
927 927 if nf != '.':
928 928 if ignore(nf):
929 929 ignored = nf
930 930 ignoredata = repo.dirstate._ignorefileandline(nf)
931 931 else:
932 932 for p in util.finddirs(nf):
933 933 if ignore(p):
934 934 ignored = p
935 935 ignoredata = repo.dirstate._ignorefileandline(p)
936 936 break
937 937 if ignored:
938 938 if ignored == nf:
939 939 ui.write(_("%s is ignored\n") % m.uipath(f))
940 940 else:
941 941 ui.write(_("%s is ignored because of "
942 942 "containing folder %s\n")
943 943 % (m.uipath(f), ignored))
944 944 ignorefile, lineno, line = ignoredata
945 945 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
946 946 % (ignorefile, lineno, line))
947 947 else:
948 948 ui.write(_("%s is not ignored\n") % m.uipath(f))
949 949
950 950 @command('debugindex', cmdutil.debugrevlogopts +
951 951 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
952 952 _('[-f FORMAT] -c|-m|FILE'),
953 953 optionalrepo=True)
954 954 def debugindex(ui, repo, file_=None, **opts):
955 955 """dump the contents of an index file"""
956 956 opts = pycompat.byteskwargs(opts)
957 957 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
958 958 format = opts.get('format', 0)
959 959 if format not in (0, 1):
960 960 raise error.Abort(_("unknown format %d") % format)
961 961
962 962 generaldelta = r.version & revlog.FLAG_GENERALDELTA
963 963 if generaldelta:
964 964 basehdr = ' delta'
965 965 else:
966 966 basehdr = ' base'
967 967
968 968 if ui.debugflag:
969 969 shortfn = hex
970 970 else:
971 971 shortfn = short
972 972
973 973 # There might not be anything in r, so have a sane default
974 974 idlen = 12
975 975 for i in r:
976 976 idlen = len(shortfn(r.node(i)))
977 977 break
978 978
979 979 if format == 0:
980 980 ui.write((" rev offset length " + basehdr + " linkrev"
981 981 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
982 982 elif format == 1:
983 983 ui.write((" rev flag offset length"
984 984 " size " + basehdr + " link p1 p2"
985 985 " %s\n") % "nodeid".rjust(idlen))
986 986
987 987 for i in r:
988 988 node = r.node(i)
989 989 if generaldelta:
990 990 base = r.deltaparent(i)
991 991 else:
992 992 base = r.chainbase(i)
993 993 if format == 0:
994 994 try:
995 995 pp = r.parents(node)
996 996 except Exception:
997 997 pp = [nullid, nullid]
998 998 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
999 999 i, r.start(i), r.length(i), base, r.linkrev(i),
1000 1000 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1001 1001 elif format == 1:
1002 1002 pr = r.parentrevs(i)
1003 1003 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1004 1004 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1005 1005 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1006 1006
1007 1007 @command('debugindexdot', cmdutil.debugrevlogopts,
1008 1008 _('-c|-m|FILE'), optionalrepo=True)
1009 1009 def debugindexdot(ui, repo, file_=None, **opts):
1010 1010 """dump an index DAG as a graphviz dot file"""
1011 1011 opts = pycompat.byteskwargs(opts)
1012 1012 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1013 1013 ui.write(("digraph G {\n"))
1014 1014 for i in r:
1015 1015 node = r.node(i)
1016 1016 pp = r.parents(node)
1017 1017 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1018 1018 if pp[1] != nullid:
1019 1019 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1020 1020 ui.write("}\n")
1021 1021
1022 1022 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1023 1023 def debuginstall(ui, **opts):
1024 1024 '''test Mercurial installation
1025 1025
1026 1026 Returns 0 on success.
1027 1027 '''
1028 1028 opts = pycompat.byteskwargs(opts)
1029 1029
1030 1030 def writetemp(contents):
1031 1031 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1032 1032 f = os.fdopen(fd, pycompat.sysstr("wb"))
1033 1033 f.write(contents)
1034 1034 f.close()
1035 1035 return name
1036 1036
1037 1037 problems = 0
1038 1038
1039 1039 fm = ui.formatter('debuginstall', opts)
1040 1040 fm.startitem()
1041 1041
1042 1042 # encoding
1043 1043 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1044 1044 err = None
1045 1045 try:
1046 1046 codecs.lookup(pycompat.sysstr(encoding.encoding))
1047 1047 except LookupError as inst:
1048 1048 err = util.forcebytestr(inst)
1049 1049 problems += 1
1050 1050 fm.condwrite(err, 'encodingerror', _(" %s\n"
1051 1051 " (check that your locale is properly set)\n"), err)
1052 1052
1053 1053 # Python
1054 1054 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1055 1055 pycompat.sysexecutable)
1056 1056 fm.write('pythonver', _("checking Python version (%s)\n"),
1057 1057 ("%d.%d.%d" % sys.version_info[:3]))
1058 1058 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1059 1059 os.path.dirname(pycompat.fsencode(os.__file__)))
1060 1060
1061 1061 security = set(sslutil.supportedprotocols)
1062 1062 if sslutil.hassni:
1063 1063 security.add('sni')
1064 1064
1065 1065 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1066 1066 fm.formatlist(sorted(security), name='protocol',
1067 1067 fmt='%s', sep=','))
1068 1068
1069 1069 # These are warnings, not errors. So don't increment problem count. This
1070 1070 # may change in the future.
1071 1071 if 'tls1.2' not in security:
1072 1072 fm.plain(_(' TLS 1.2 not supported by Python install; '
1073 1073 'network connections lack modern security\n'))
1074 1074 if 'sni' not in security:
1075 1075 fm.plain(_(' SNI not supported by Python install; may have '
1076 1076 'connectivity issues with some servers\n'))
1077 1077
1078 1078 # TODO print CA cert info
1079 1079
1080 1080 # hg version
1081 1081 hgver = util.version()
1082 1082 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1083 1083 hgver.split('+')[0])
1084 1084 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1085 1085 '+'.join(hgver.split('+')[1:]))
1086 1086
1087 1087 # compiled modules
1088 1088 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1089 1089 policy.policy)
1090 1090 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1091 1091 os.path.dirname(pycompat.fsencode(__file__)))
1092 1092
1093 1093 if policy.policy in ('c', 'allow'):
1094 1094 err = None
1095 1095 try:
1096 1096 from .cext import (
1097 1097 base85,
1098 1098 bdiff,
1099 1099 mpatch,
1100 1100 osutil,
1101 1101 )
1102 1102 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1103 1103 except Exception as inst:
1104 1104 err = util.forcebytestr(inst)
1105 1105 problems += 1
1106 1106 fm.condwrite(err, 'extensionserror', " %s\n", err)
1107 1107
1108 1108 compengines = util.compengines._engines.values()
1109 1109 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1110 1110 fm.formatlist(sorted(e.name() for e in compengines),
1111 1111 name='compengine', fmt='%s', sep=', '))
1112 1112 fm.write('compenginesavail', _('checking available compression engines '
1113 1113 '(%s)\n'),
1114 1114 fm.formatlist(sorted(e.name() for e in compengines
1115 1115 if e.available()),
1116 1116 name='compengine', fmt='%s', sep=', '))
1117 1117 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1118 1118 fm.write('compenginesserver', _('checking available compression engines '
1119 1119 'for wire protocol (%s)\n'),
1120 1120 fm.formatlist([e.name() for e in wirecompengines
1121 1121 if e.wireprotosupport()],
1122 1122 name='compengine', fmt='%s', sep=', '))
1123 1123
1124 1124 # templates
1125 1125 p = templater.templatepaths()
1126 1126 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1127 1127 fm.condwrite(not p, '', _(" no template directories found\n"))
1128 1128 if p:
1129 1129 m = templater.templatepath("map-cmdline.default")
1130 1130 if m:
1131 1131 # template found, check if it is working
1132 1132 err = None
1133 1133 try:
1134 1134 templater.templater.frommapfile(m)
1135 1135 except Exception as inst:
1136 1136 err = util.forcebytestr(inst)
1137 1137 p = None
1138 1138 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1139 1139 else:
1140 1140 p = None
1141 1141 fm.condwrite(p, 'defaulttemplate',
1142 1142 _("checking default template (%s)\n"), m)
1143 1143 fm.condwrite(not m, 'defaulttemplatenotfound',
1144 1144 _(" template '%s' not found\n"), "default")
1145 1145 if not p:
1146 1146 problems += 1
1147 1147 fm.condwrite(not p, '',
1148 1148 _(" (templates seem to have been installed incorrectly)\n"))
1149 1149
1150 1150 # editor
1151 1151 editor = ui.geteditor()
1152 1152 editor = util.expandpath(editor)
1153 1153 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1154 1154 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1155 1155 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1156 1156 _(" No commit editor set and can't find %s in PATH\n"
1157 1157 " (specify a commit editor in your configuration"
1158 1158 " file)\n"), not cmdpath and editor == 'vi' and editor)
1159 1159 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1160 1160 _(" Can't find editor '%s' in PATH\n"
1161 1161 " (specify a commit editor in your configuration"
1162 1162 " file)\n"), not cmdpath and editor)
1163 1163 if not cmdpath and editor != 'vi':
1164 1164 problems += 1
1165 1165
1166 1166 # check username
1167 1167 username = None
1168 1168 err = None
1169 1169 try:
1170 1170 username = ui.username()
1171 1171 except error.Abort as e:
1172 1172 err = util.forcebytestr(e)
1173 1173 problems += 1
1174 1174
1175 1175 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1176 1176 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1177 1177 " (specify a username in your configuration file)\n"), err)
1178 1178
1179 1179 fm.condwrite(not problems, '',
1180 1180 _("no problems detected\n"))
1181 1181 if not problems:
1182 1182 fm.data(problems=problems)
1183 1183 fm.condwrite(problems, 'problems',
1184 1184 _("%d problems detected,"
1185 1185 " please check your install!\n"), problems)
1186 1186 fm.end()
1187 1187
1188 1188 return problems
1189 1189
1190 1190 @command('debugknown', [], _('REPO ID...'), norepo=True)
1191 1191 def debugknown(ui, repopath, *ids, **opts):
1192 1192 """test whether node ids are known to a repo
1193 1193
1194 1194 Every ID must be a full-length hex node id string. Returns a list of 0s
1195 1195 and 1s indicating unknown/known.
1196 1196 """
1197 1197 opts = pycompat.byteskwargs(opts)
1198 1198 repo = hg.peer(ui, opts, repopath)
1199 1199 if not repo.capable('known'):
1200 1200 raise error.Abort("known() not supported by target repository")
1201 1201 flags = repo.known([bin(s) for s in ids])
1202 1202 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1203 1203
1204 1204 @command('debuglabelcomplete', [], _('LABEL...'))
1205 1205 def debuglabelcomplete(ui, repo, *args):
1206 1206 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1207 1207 debugnamecomplete(ui, repo, *args)
1208 1208
1209 1209 @command('debuglocks',
1210 1210 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1211 1211 ('W', 'force-wlock', None,
1212 1212 _('free the working state lock (DANGEROUS)'))],
1213 1213 _('[OPTION]...'))
1214 1214 def debuglocks(ui, repo, **opts):
1215 1215 """show or modify state of locks
1216 1216
1217 1217 By default, this command will show which locks are held. This
1218 1218 includes the user and process holding the lock, the amount of time
1219 1219 the lock has been held, and the machine name where the process is
1220 1220 running if it's not local.
1221 1221
1222 1222 Locks protect the integrity of Mercurial's data, so should be
1223 1223 treated with care. System crashes or other interruptions may cause
1224 1224 locks to not be properly released, though Mercurial will usually
1225 1225 detect and remove such stale locks automatically.
1226 1226
1227 1227 However, detecting stale locks may not always be possible (for
1228 1228 instance, on a shared filesystem). Removing locks may also be
1229 1229 blocked by filesystem permissions.
1230 1230
1231 1231 Returns 0 if no locks are held.
1232 1232
1233 1233 """
1234 1234
1235 1235 if opts.get(r'force_lock'):
1236 1236 repo.svfs.unlink('lock')
1237 1237 if opts.get(r'force_wlock'):
1238 1238 repo.vfs.unlink('wlock')
1239 1239 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1240 1240 return 0
1241 1241
1242 1242 now = time.time()
1243 1243 held = 0
1244 1244
1245 1245 def report(vfs, name, method):
1246 1246 # this causes stale locks to get reaped for more accurate reporting
1247 1247 try:
1248 1248 l = method(False)
1249 1249 except error.LockHeld:
1250 1250 l = None
1251 1251
1252 1252 if l:
1253 1253 l.release()
1254 1254 else:
1255 1255 try:
1256 1256 stat = vfs.lstat(name)
1257 1257 age = now - stat.st_mtime
1258 1258 user = util.username(stat.st_uid)
1259 1259 locker = vfs.readlock(name)
1260 1260 if ":" in locker:
1261 1261 host, pid = locker.split(':')
1262 1262 if host == socket.gethostname():
1263 1263 locker = 'user %s, process %s' % (user, pid)
1264 1264 else:
1265 1265 locker = 'user %s, process %s, host %s' \
1266 1266 % (user, pid, host)
1267 1267 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1268 1268 return 1
1269 1269 except OSError as e:
1270 1270 if e.errno != errno.ENOENT:
1271 1271 raise
1272 1272
1273 1273 ui.write(("%-6s free\n") % (name + ":"))
1274 1274 return 0
1275 1275
1276 1276 held += report(repo.svfs, "lock", repo.lock)
1277 1277 held += report(repo.vfs, "wlock", repo.wlock)
1278 1278
1279 1279 return held
1280 1280
1281 1281 @command('debugmergestate', [], '')
1282 1282 def debugmergestate(ui, repo, *args):
1283 1283 """print merge state
1284 1284
1285 1285 Use --verbose to print out information about whether v1 or v2 merge state
1286 1286 was chosen."""
1287 1287 def _hashornull(h):
1288 1288 if h == nullhex:
1289 1289 return 'null'
1290 1290 else:
1291 1291 return h
1292 1292
1293 1293 def printrecords(version):
1294 1294 ui.write(('* version %s records\n') % version)
1295 1295 if version == 1:
1296 1296 records = v1records
1297 1297 else:
1298 1298 records = v2records
1299 1299
1300 1300 for rtype, record in records:
1301 1301 # pretty print some record types
1302 1302 if rtype == 'L':
1303 1303 ui.write(('local: %s\n') % record)
1304 1304 elif rtype == 'O':
1305 1305 ui.write(('other: %s\n') % record)
1306 1306 elif rtype == 'm':
1307 1307 driver, mdstate = record.split('\0', 1)
1308 1308 ui.write(('merge driver: %s (state "%s")\n')
1309 1309 % (driver, mdstate))
1310 1310 elif rtype in 'FDC':
1311 1311 r = record.split('\0')
1312 1312 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1313 1313 if version == 1:
1314 1314 onode = 'not stored in v1 format'
1315 1315 flags = r[7]
1316 1316 else:
1317 1317 onode, flags = r[7:9]
1318 1318 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1319 1319 % (f, rtype, state, _hashornull(hash)))
1320 1320 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1321 1321 ui.write((' ancestor path: %s (node %s)\n')
1322 1322 % (afile, _hashornull(anode)))
1323 1323 ui.write((' other path: %s (node %s)\n')
1324 1324 % (ofile, _hashornull(onode)))
1325 1325 elif rtype == 'f':
1326 1326 filename, rawextras = record.split('\0', 1)
1327 1327 extras = rawextras.split('\0')
1328 1328 i = 0
1329 1329 extrastrings = []
1330 1330 while i < len(extras):
1331 1331 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1332 1332 i += 2
1333 1333
1334 1334 ui.write(('file extras: %s (%s)\n')
1335 1335 % (filename, ', '.join(extrastrings)))
1336 1336 elif rtype == 'l':
1337 1337 labels = record.split('\0', 2)
1338 1338 labels = [l for l in labels if len(l) > 0]
1339 1339 ui.write(('labels:\n'))
1340 1340 ui.write((' local: %s\n' % labels[0]))
1341 1341 ui.write((' other: %s\n' % labels[1]))
1342 1342 if len(labels) > 2:
1343 1343 ui.write((' base: %s\n' % labels[2]))
1344 1344 else:
1345 1345 ui.write(('unrecognized entry: %s\t%s\n')
1346 1346 % (rtype, record.replace('\0', '\t')))
1347 1347
1348 1348 # Avoid mergestate.read() since it may raise an exception for unsupported
1349 1349 # merge state records. We shouldn't be doing this, but this is OK since this
1350 1350 # command is pretty low-level.
1351 1351 ms = mergemod.mergestate(repo)
1352 1352
1353 1353 # sort so that reasonable information is on top
1354 1354 v1records = ms._readrecordsv1()
1355 1355 v2records = ms._readrecordsv2()
1356 1356 order = 'LOml'
1357 1357 def key(r):
1358 1358 idx = order.find(r[0])
1359 1359 if idx == -1:
1360 1360 return (1, r[1])
1361 1361 else:
1362 1362 return (0, idx)
1363 1363 v1records.sort(key=key)
1364 1364 v2records.sort(key=key)
1365 1365
1366 1366 if not v1records and not v2records:
1367 1367 ui.write(('no merge state found\n'))
1368 1368 elif not v2records:
1369 1369 ui.note(('no version 2 merge state\n'))
1370 1370 printrecords(1)
1371 1371 elif ms._v1v2match(v1records, v2records):
1372 1372 ui.note(('v1 and v2 states match: using v2\n'))
1373 1373 printrecords(2)
1374 1374 else:
1375 1375 ui.note(('v1 and v2 states mismatch: using v1\n'))
1376 1376 printrecords(1)
1377 1377 if ui.verbose:
1378 1378 printrecords(2)
1379 1379
1380 1380 @command('debugnamecomplete', [], _('NAME...'))
1381 1381 def debugnamecomplete(ui, repo, *args):
1382 1382 '''complete "names" - tags, open branch names, bookmark names'''
1383 1383
1384 1384 names = set()
1385 1385 # since we previously only listed open branches, we will handle that
1386 1386 # specially (after this for loop)
1387 1387 for name, ns in repo.names.iteritems():
1388 1388 if name != 'branches':
1389 1389 names.update(ns.listnames(repo))
1390 1390 names.update(tag for (tag, heads, tip, closed)
1391 1391 in repo.branchmap().iterbranches() if not closed)
1392 1392 completions = set()
1393 1393 if not args:
1394 1394 args = ['']
1395 1395 for a in args:
1396 1396 completions.update(n for n in names if n.startswith(a))
1397 1397 ui.write('\n'.join(sorted(completions)))
1398 1398 ui.write('\n')
1399 1399
1400 1400 @command('debugobsolete',
1401 1401 [('', 'flags', 0, _('markers flag')),
1402 1402 ('', 'record-parents', False,
1403 1403 _('record parent information for the precursor')),
1404 1404 ('r', 'rev', [], _('display markers relevant to REV')),
1405 1405 ('', 'exclusive', False, _('restrict display to markers only '
1406 1406 'relevant to REV')),
1407 1407 ('', 'index', False, _('display index of the marker')),
1408 1408 ('', 'delete', [], _('delete markers specified by indices')),
1409 1409 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1410 1410 _('[OBSOLETED [REPLACEMENT ...]]'))
1411 1411 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1412 1412 """create arbitrary obsolete marker
1413 1413
1414 1414 With no arguments, displays the list of obsolescence markers."""
1415 1415
1416 1416 opts = pycompat.byteskwargs(opts)
1417 1417
1418 1418 def parsenodeid(s):
1419 1419 try:
1420 1420 # We do not use revsingle/revrange functions here to accept
1421 1421 # arbitrary node identifiers, possibly not present in the
1422 1422 # local repository.
1423 1423 n = bin(s)
1424 1424 if len(n) != len(nullid):
1425 1425 raise TypeError()
1426 1426 return n
1427 1427 except TypeError:
1428 1428 raise error.Abort('changeset references must be full hexadecimal '
1429 1429 'node identifiers')
1430 1430
1431 1431 if opts.get('delete'):
1432 1432 indices = []
1433 1433 for v in opts.get('delete'):
1434 1434 try:
1435 1435 indices.append(int(v))
1436 1436 except ValueError:
1437 1437 raise error.Abort(_('invalid index value: %r') % v,
1438 1438 hint=_('use integers for indices'))
1439 1439
1440 1440 if repo.currenttransaction():
1441 1441 raise error.Abort(_('cannot delete obsmarkers in the middle '
1442 1442 'of transaction.'))
1443 1443
1444 1444 with repo.lock():
1445 1445 n = repair.deleteobsmarkers(repo.obsstore, indices)
1446 1446 ui.write(_('deleted %i obsolescence markers\n') % n)
1447 1447
1448 1448 return
1449 1449
1450 1450 if precursor is not None:
1451 1451 if opts['rev']:
1452 1452 raise error.Abort('cannot select revision when creating marker')
1453 1453 metadata = {}
1454 1454 metadata['user'] = opts['user'] or ui.username()
1455 1455 succs = tuple(parsenodeid(succ) for succ in successors)
1456 1456 l = repo.lock()
1457 1457 try:
1458 1458 tr = repo.transaction('debugobsolete')
1459 1459 try:
1460 1460 date = opts.get('date')
1461 1461 if date:
1462 1462 date = util.parsedate(date)
1463 1463 else:
1464 1464 date = None
1465 1465 prec = parsenodeid(precursor)
1466 1466 parents = None
1467 1467 if opts['record_parents']:
1468 1468 if prec not in repo.unfiltered():
1469 1469 raise error.Abort('cannot used --record-parents on '
1470 1470 'unknown changesets')
1471 1471 parents = repo.unfiltered()[prec].parents()
1472 1472 parents = tuple(p.node() for p in parents)
1473 1473 repo.obsstore.create(tr, prec, succs, opts['flags'],
1474 1474 parents=parents, date=date,
1475 1475 metadata=metadata, ui=ui)
1476 1476 tr.close()
1477 1477 except ValueError as exc:
1478 1478 raise error.Abort(_('bad obsmarker input: %s') % exc)
1479 1479 finally:
1480 1480 tr.release()
1481 1481 finally:
1482 1482 l.release()
1483 1483 else:
1484 1484 if opts['rev']:
1485 1485 revs = scmutil.revrange(repo, opts['rev'])
1486 1486 nodes = [repo[r].node() for r in revs]
1487 1487 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1488 1488 exclusive=opts['exclusive']))
1489 1489 markers.sort(key=lambda x: x._data)
1490 1490 else:
1491 1491 markers = obsutil.getmarkers(repo)
1492 1492
1493 1493 markerstoiter = markers
1494 1494 isrelevant = lambda m: True
1495 1495 if opts.get('rev') and opts.get('index'):
1496 1496 markerstoiter = obsutil.getmarkers(repo)
1497 1497 markerset = set(markers)
1498 1498 isrelevant = lambda m: m in markerset
1499 1499
1500 1500 fm = ui.formatter('debugobsolete', opts)
1501 1501 for i, m in enumerate(markerstoiter):
1502 1502 if not isrelevant(m):
1503 1503 # marker can be irrelevant when we're iterating over a set
1504 1504 # of markers (markerstoiter) which is bigger than the set
1505 1505 # of markers we want to display (markers)
1506 1506 # this can happen if both --index and --rev options are
1507 1507 # provided and thus we need to iterate over all of the markers
1508 1508 # to get the correct indices, but only display the ones that
1509 1509 # are relevant to --rev value
1510 1510 continue
1511 1511 fm.startitem()
1512 1512 ind = i if opts.get('index') else None
1513 1513 cmdutil.showmarker(fm, m, index=ind)
1514 1514 fm.end()
1515 1515
1516 1516 @command('debugpathcomplete',
1517 1517 [('f', 'full', None, _('complete an entire path')),
1518 1518 ('n', 'normal', None, _('show only normal files')),
1519 1519 ('a', 'added', None, _('show only added files')),
1520 1520 ('r', 'removed', None, _('show only removed files'))],
1521 1521 _('FILESPEC...'))
1522 1522 def debugpathcomplete(ui, repo, *specs, **opts):
1523 1523 '''complete part or all of a tracked path
1524 1524
1525 1525 This command supports shells that offer path name completion. It
1526 1526 currently completes only files already known to the dirstate.
1527 1527
1528 1528 Completion extends only to the next path segment unless
1529 1529 --full is specified, in which case entire paths are used.'''
1530 1530
1531 1531 def complete(path, acceptable):
1532 1532 dirstate = repo.dirstate
1533 1533 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1534 1534 rootdir = repo.root + pycompat.ossep
1535 1535 if spec != repo.root and not spec.startswith(rootdir):
1536 1536 return [], []
1537 1537 if os.path.isdir(spec):
1538 1538 spec += '/'
1539 1539 spec = spec[len(rootdir):]
1540 1540 fixpaths = pycompat.ossep != '/'
1541 1541 if fixpaths:
1542 1542 spec = spec.replace(pycompat.ossep, '/')
1543 1543 speclen = len(spec)
1544 1544 fullpaths = opts[r'full']
1545 1545 files, dirs = set(), set()
1546 1546 adddir, addfile = dirs.add, files.add
1547 1547 for f, st in dirstate.iteritems():
1548 1548 if f.startswith(spec) and st[0] in acceptable:
1549 1549 if fixpaths:
1550 1550 f = f.replace('/', pycompat.ossep)
1551 1551 if fullpaths:
1552 1552 addfile(f)
1553 1553 continue
1554 1554 s = f.find(pycompat.ossep, speclen)
1555 1555 if s >= 0:
1556 1556 adddir(f[:s])
1557 1557 else:
1558 1558 addfile(f)
1559 1559 return files, dirs
1560 1560
1561 1561 acceptable = ''
1562 1562 if opts[r'normal']:
1563 1563 acceptable += 'nm'
1564 1564 if opts[r'added']:
1565 1565 acceptable += 'a'
1566 1566 if opts[r'removed']:
1567 1567 acceptable += 'r'
1568 1568 cwd = repo.getcwd()
1569 1569 if not specs:
1570 1570 specs = ['.']
1571 1571
1572 1572 files, dirs = set(), set()
1573 1573 for spec in specs:
1574 1574 f, d = complete(spec, acceptable or 'nmar')
1575 1575 files.update(f)
1576 1576 dirs.update(d)
1577 1577 files.update(dirs)
1578 1578 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1579 1579 ui.write('\n')
1580 1580
1581 1581 @command('debugpickmergetool',
1582 1582 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1583 1583 ('', 'changedelete', None, _('emulate merging change and delete')),
1584 1584 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1585 1585 _('[PATTERN]...'),
1586 1586 inferrepo=True)
1587 1587 def debugpickmergetool(ui, repo, *pats, **opts):
1588 1588 """examine which merge tool is chosen for specified file
1589 1589
1590 1590 As described in :hg:`help merge-tools`, Mercurial examines
1591 1591 configurations below in this order to decide which merge tool is
1592 1592 chosen for specified file.
1593 1593
1594 1594 1. ``--tool`` option
1595 1595 2. ``HGMERGE`` environment variable
1596 1596 3. configurations in ``merge-patterns`` section
1597 1597 4. configuration of ``ui.merge``
1598 1598 5. configurations in ``merge-tools`` section
1599 1599 6. ``hgmerge`` tool (for historical reason only)
1600 1600 7. default tool for fallback (``:merge`` or ``:prompt``)
1601 1601
1602 1602 This command writes out examination result in the style below::
1603 1603
1604 1604 FILE = MERGETOOL
1605 1605
1606 1606 By default, all files known in the first parent context of the
1607 1607 working directory are examined. Use file patterns and/or -I/-X
1608 1608 options to limit target files. -r/--rev is also useful to examine
1609 1609 files in another context without actual updating to it.
1610 1610
1611 1611 With --debug, this command shows warning messages while matching
1612 1612 against ``merge-patterns`` and so on, too. It is recommended to
1613 1613 use this option with explicit file patterns and/or -I/-X options,
1614 1614 because this option increases amount of output per file according
1615 1615 to configurations in hgrc.
1616 1616
1617 1617 With -v/--verbose, this command shows configurations below at
1618 1618 first (only if specified).
1619 1619
1620 1620 - ``--tool`` option
1621 1621 - ``HGMERGE`` environment variable
1622 1622 - configuration of ``ui.merge``
1623 1623
1624 1624 If merge tool is chosen before matching against
1625 1625 ``merge-patterns``, this command can't show any helpful
1626 1626 information, even with --debug. In such case, information above is
1627 1627 useful to know why a merge tool is chosen.
1628 1628 """
1629 1629 opts = pycompat.byteskwargs(opts)
1630 1630 overrides = {}
1631 1631 if opts['tool']:
1632 1632 overrides[('ui', 'forcemerge')] = opts['tool']
1633 1633 ui.note(('with --tool %r\n') % (opts['tool']))
1634 1634
1635 1635 with ui.configoverride(overrides, 'debugmergepatterns'):
1636 1636 hgmerge = encoding.environ.get("HGMERGE")
1637 1637 if hgmerge is not None:
1638 1638 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1639 1639 uimerge = ui.config("ui", "merge")
1640 1640 if uimerge:
1641 1641 ui.note(('with ui.merge=%r\n') % (uimerge))
1642 1642
1643 1643 ctx = scmutil.revsingle(repo, opts.get('rev'))
1644 1644 m = scmutil.match(ctx, pats, opts)
1645 1645 changedelete = opts['changedelete']
1646 1646 for path in ctx.walk(m):
1647 1647 fctx = ctx[path]
1648 1648 try:
1649 1649 if not ui.debugflag:
1650 1650 ui.pushbuffer(error=True)
1651 1651 tool, toolpath = filemerge._picktool(repo, ui, path,
1652 1652 fctx.isbinary(),
1653 1653 'l' in fctx.flags(),
1654 1654 changedelete)
1655 1655 finally:
1656 1656 if not ui.debugflag:
1657 1657 ui.popbuffer()
1658 1658 ui.write(('%s = %s\n') % (path, tool))
1659 1659
1660 1660 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1661 1661 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1662 1662 '''access the pushkey key/value protocol
1663 1663
1664 1664 With two args, list the keys in the given namespace.
1665 1665
1666 1666 With five args, set a key to new if it currently is set to old.
1667 1667 Reports success or failure.
1668 1668 '''
1669 1669
1670 1670 target = hg.peer(ui, {}, repopath)
1671 1671 if keyinfo:
1672 1672 key, old, new = keyinfo
1673 1673 r = target.pushkey(namespace, key, old, new)
1674 1674 ui.status(str(r) + '\n')
1675 1675 return not r
1676 1676 else:
1677 1677 for k, v in sorted(target.listkeys(namespace).iteritems()):
1678 1678 ui.write("%s\t%s\n" % (util.escapestr(k),
1679 1679 util.escapestr(v)))
1680 1680
1681 1681 @command('debugpvec', [], _('A B'))
1682 1682 def debugpvec(ui, repo, a, b=None):
1683 1683 ca = scmutil.revsingle(repo, a)
1684 1684 cb = scmutil.revsingle(repo, b)
1685 1685 pa = pvec.ctxpvec(ca)
1686 1686 pb = pvec.ctxpvec(cb)
1687 1687 if pa == pb:
1688 1688 rel = "="
1689 1689 elif pa > pb:
1690 1690 rel = ">"
1691 1691 elif pa < pb:
1692 1692 rel = "<"
1693 1693 elif pa | pb:
1694 1694 rel = "|"
1695 1695 ui.write(_("a: %s\n") % pa)
1696 1696 ui.write(_("b: %s\n") % pb)
1697 1697 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1698 1698 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1699 1699 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1700 1700 pa.distance(pb), rel))
1701 1701
1702 1702 @command('debugrebuilddirstate|debugrebuildstate',
1703 1703 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1704 1704 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1705 1705 'the working copy parent')),
1706 1706 ],
1707 1707 _('[-r REV]'))
1708 1708 def debugrebuilddirstate(ui, repo, rev, **opts):
1709 1709 """rebuild the dirstate as it would look like for the given revision
1710 1710
1711 1711 If no revision is specified the first current parent will be used.
1712 1712
1713 1713 The dirstate will be set to the files of the given revision.
1714 1714 The actual working directory content or existing dirstate
1715 1715 information such as adds or removes is not considered.
1716 1716
1717 1717 ``minimal`` will only rebuild the dirstate status for files that claim to be
1718 1718 tracked but are not in the parent manifest, or that exist in the parent
1719 1719 manifest but are not in the dirstate. It will not change adds, removes, or
1720 1720 modified files that are in the working copy parent.
1721 1721
1722 1722 One use of this command is to make the next :hg:`status` invocation
1723 1723 check the actual file content.
1724 1724 """
1725 1725 ctx = scmutil.revsingle(repo, rev)
1726 1726 with repo.wlock():
1727 1727 dirstate = repo.dirstate
1728 1728 changedfiles = None
1729 1729 # See command doc for what minimal does.
1730 1730 if opts.get(r'minimal'):
1731 1731 manifestfiles = set(ctx.manifest().keys())
1732 1732 dirstatefiles = set(dirstate)
1733 1733 manifestonly = manifestfiles - dirstatefiles
1734 1734 dsonly = dirstatefiles - manifestfiles
1735 1735 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1736 1736 changedfiles = manifestonly | dsnotadded
1737 1737
1738 1738 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1739 1739
1740 1740 @command('debugrebuildfncache', [], '')
1741 1741 def debugrebuildfncache(ui, repo):
1742 1742 """rebuild the fncache file"""
1743 1743 repair.rebuildfncache(ui, repo)
1744 1744
1745 1745 @command('debugrename',
1746 1746 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1747 1747 _('[-r REV] FILE'))
1748 1748 def debugrename(ui, repo, file1, *pats, **opts):
1749 1749 """dump rename information"""
1750 1750
1751 1751 opts = pycompat.byteskwargs(opts)
1752 1752 ctx = scmutil.revsingle(repo, opts.get('rev'))
1753 1753 m = scmutil.match(ctx, (file1,) + pats, opts)
1754 1754 for abs in ctx.walk(m):
1755 1755 fctx = ctx[abs]
1756 1756 o = fctx.filelog().renamed(fctx.filenode())
1757 1757 rel = m.rel(abs)
1758 1758 if o:
1759 1759 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1760 1760 else:
1761 1761 ui.write(_("%s not renamed\n") % rel)
1762 1762
1763 1763 @command('debugrevlog', cmdutil.debugrevlogopts +
1764 1764 [('d', 'dump', False, _('dump index data'))],
1765 1765 _('-c|-m|FILE'),
1766 1766 optionalrepo=True)
1767 1767 def debugrevlog(ui, repo, file_=None, **opts):
1768 1768 """show data and statistics about a revlog"""
1769 1769 opts = pycompat.byteskwargs(opts)
1770 1770 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1771 1771
1772 1772 if opts.get("dump"):
1773 1773 numrevs = len(r)
1774 1774 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1775 1775 " rawsize totalsize compression heads chainlen\n"))
1776 1776 ts = 0
1777 1777 heads = set()
1778 1778
1779 1779 for rev in xrange(numrevs):
1780 1780 dbase = r.deltaparent(rev)
1781 1781 if dbase == -1:
1782 1782 dbase = rev
1783 1783 cbase = r.chainbase(rev)
1784 1784 clen = r.chainlen(rev)
1785 1785 p1, p2 = r.parentrevs(rev)
1786 1786 rs = r.rawsize(rev)
1787 1787 ts = ts + rs
1788 1788 heads -= set(r.parentrevs(rev))
1789 1789 heads.add(rev)
1790 1790 try:
1791 1791 compression = ts / r.end(rev)
1792 1792 except ZeroDivisionError:
1793 1793 compression = 0
1794 1794 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1795 1795 "%11d %5d %8d\n" %
1796 1796 (rev, p1, p2, r.start(rev), r.end(rev),
1797 1797 r.start(dbase), r.start(cbase),
1798 1798 r.start(p1), r.start(p2),
1799 1799 rs, ts, compression, len(heads), clen))
1800 1800 return 0
1801 1801
1802 1802 v = r.version
1803 1803 format = v & 0xFFFF
1804 1804 flags = []
1805 1805 gdelta = False
1806 1806 if v & revlog.FLAG_INLINE_DATA:
1807 1807 flags.append('inline')
1808 1808 if v & revlog.FLAG_GENERALDELTA:
1809 1809 gdelta = True
1810 1810 flags.append('generaldelta')
1811 1811 if not flags:
1812 1812 flags = ['(none)']
1813 1813
1814 1814 nummerges = 0
1815 1815 numfull = 0
1816 1816 numprev = 0
1817 1817 nump1 = 0
1818 1818 nump2 = 0
1819 1819 numother = 0
1820 1820 nump1prev = 0
1821 1821 nump2prev = 0
1822 1822 chainlengths = []
1823 1823 chainbases = []
1824 1824 chainspans = []
1825 1825
1826 1826 datasize = [None, 0, 0]
1827 1827 fullsize = [None, 0, 0]
1828 1828 deltasize = [None, 0, 0]
1829 1829 chunktypecounts = {}
1830 1830 chunktypesizes = {}
1831 1831
1832 1832 def addsize(size, l):
1833 1833 if l[0] is None or size < l[0]:
1834 1834 l[0] = size
1835 1835 if size > l[1]:
1836 1836 l[1] = size
1837 1837 l[2] += size
1838 1838
1839 1839 numrevs = len(r)
1840 1840 for rev in xrange(numrevs):
1841 1841 p1, p2 = r.parentrevs(rev)
1842 1842 delta = r.deltaparent(rev)
1843 1843 if format > 0:
1844 1844 addsize(r.rawsize(rev), datasize)
1845 1845 if p2 != nullrev:
1846 1846 nummerges += 1
1847 1847 size = r.length(rev)
1848 1848 if delta == nullrev:
1849 1849 chainlengths.append(0)
1850 1850 chainbases.append(r.start(rev))
1851 1851 chainspans.append(size)
1852 1852 numfull += 1
1853 1853 addsize(size, fullsize)
1854 1854 else:
1855 1855 chainlengths.append(chainlengths[delta] + 1)
1856 1856 baseaddr = chainbases[delta]
1857 1857 revaddr = r.start(rev)
1858 1858 chainbases.append(baseaddr)
1859 1859 chainspans.append((revaddr - baseaddr) + size)
1860 1860 addsize(size, deltasize)
1861 1861 if delta == rev - 1:
1862 1862 numprev += 1
1863 1863 if delta == p1:
1864 1864 nump1prev += 1
1865 1865 elif delta == p2:
1866 1866 nump2prev += 1
1867 1867 elif delta == p1:
1868 1868 nump1 += 1
1869 1869 elif delta == p2:
1870 1870 nump2 += 1
1871 1871 elif delta != nullrev:
1872 1872 numother += 1
1873 1873
1874 1874 # Obtain data on the raw chunks in the revlog.
1875 1875 segment = r._getsegmentforrevs(rev, rev)[1]
1876 1876 if segment:
1877 1877 chunktype = bytes(segment[0:1])
1878 1878 else:
1879 1879 chunktype = 'empty'
1880 1880
1881 1881 if chunktype not in chunktypecounts:
1882 1882 chunktypecounts[chunktype] = 0
1883 1883 chunktypesizes[chunktype] = 0
1884 1884
1885 1885 chunktypecounts[chunktype] += 1
1886 1886 chunktypesizes[chunktype] += size
1887 1887
1888 1888 # Adjust size min value for empty cases
1889 1889 for size in (datasize, fullsize, deltasize):
1890 1890 if size[0] is None:
1891 1891 size[0] = 0
1892 1892
1893 1893 numdeltas = numrevs - numfull
1894 1894 numoprev = numprev - nump1prev - nump2prev
1895 1895 totalrawsize = datasize[2]
1896 1896 datasize[2] /= numrevs
1897 1897 fulltotal = fullsize[2]
1898 1898 fullsize[2] /= numfull
1899 1899 deltatotal = deltasize[2]
1900 1900 if numrevs - numfull > 0:
1901 1901 deltasize[2] /= numrevs - numfull
1902 1902 totalsize = fulltotal + deltatotal
1903 1903 avgchainlen = sum(chainlengths) / numrevs
1904 1904 maxchainlen = max(chainlengths)
1905 1905 maxchainspan = max(chainspans)
1906 1906 compratio = 1
1907 1907 if totalsize:
1908 1908 compratio = totalrawsize / totalsize
1909 1909
1910 1910 basedfmtstr = '%%%dd\n'
1911 1911 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1912 1912
1913 1913 def dfmtstr(max):
1914 1914 return basedfmtstr % len(str(max))
1915 1915 def pcfmtstr(max, padding=0):
1916 1916 return basepcfmtstr % (len(str(max)), ' ' * padding)
1917 1917
1918 1918 def pcfmt(value, total):
1919 1919 if total:
1920 1920 return (value, 100 * float(value) / total)
1921 1921 else:
1922 1922 return value, 100.0
1923 1923
1924 1924 ui.write(('format : %d\n') % format)
1925 1925 ui.write(('flags : %s\n') % ', '.join(flags))
1926 1926
1927 1927 ui.write('\n')
1928 1928 fmt = pcfmtstr(totalsize)
1929 1929 fmt2 = dfmtstr(totalsize)
1930 1930 ui.write(('revisions : ') + fmt2 % numrevs)
1931 1931 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1932 1932 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1933 1933 ui.write(('revisions : ') + fmt2 % numrevs)
1934 1934 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1935 1935 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1936 1936 ui.write(('revision size : ') + fmt2 % totalsize)
1937 1937 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1938 1938 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1939 1939
1940 1940 def fmtchunktype(chunktype):
1941 1941 if chunktype == 'empty':
1942 1942 return ' %s : ' % chunktype
1943 1943 elif chunktype in pycompat.bytestr(string.ascii_letters):
1944 1944 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1945 1945 else:
1946 1946 return ' 0x%s : ' % hex(chunktype)
1947 1947
1948 1948 ui.write('\n')
1949 1949 ui.write(('chunks : ') + fmt2 % numrevs)
1950 1950 for chunktype in sorted(chunktypecounts):
1951 1951 ui.write(fmtchunktype(chunktype))
1952 1952 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1953 1953 ui.write(('chunks size : ') + fmt2 % totalsize)
1954 1954 for chunktype in sorted(chunktypecounts):
1955 1955 ui.write(fmtchunktype(chunktype))
1956 1956 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1957 1957
1958 1958 ui.write('\n')
1959 1959 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1960 1960 ui.write(('avg chain length : ') + fmt % avgchainlen)
1961 1961 ui.write(('max chain length : ') + fmt % maxchainlen)
1962 1962 ui.write(('max chain reach : ') + fmt % maxchainspan)
1963 1963 ui.write(('compression ratio : ') + fmt % compratio)
1964 1964
1965 1965 if format > 0:
1966 1966 ui.write('\n')
1967 1967 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1968 1968 % tuple(datasize))
1969 1969 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1970 1970 % tuple(fullsize))
1971 1971 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1972 1972 % tuple(deltasize))
1973 1973
1974 1974 if numdeltas > 0:
1975 1975 ui.write('\n')
1976 1976 fmt = pcfmtstr(numdeltas)
1977 1977 fmt2 = pcfmtstr(numdeltas, 4)
1978 1978 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1979 1979 if numprev > 0:
1980 1980 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1981 1981 numprev))
1982 1982 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1983 1983 numprev))
1984 1984 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1985 1985 numprev))
1986 1986 if gdelta:
1987 1987 ui.write(('deltas against p1 : ')
1988 1988 + fmt % pcfmt(nump1, numdeltas))
1989 1989 ui.write(('deltas against p2 : ')
1990 1990 + fmt % pcfmt(nump2, numdeltas))
1991 1991 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1992 1992 numdeltas))
1993 1993
1994 1994 @command('debugrevspec',
1995 1995 [('', 'optimize', None,
1996 1996 _('print parsed tree after optimizing (DEPRECATED)')),
1997 1997 ('', 'show-revs', True, _('print list of result revisions (default)')),
1998 1998 ('s', 'show-set', None, _('print internal representation of result set')),
1999 1999 ('p', 'show-stage', [],
2000 2000 _('print parsed tree at the given stage'), _('NAME')),
2001 2001 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2002 2002 ('', 'verify-optimized', False, _('verify optimized result')),
2003 2003 ],
2004 2004 ('REVSPEC'))
2005 2005 def debugrevspec(ui, repo, expr, **opts):
2006 2006 """parse and apply a revision specification
2007 2007
2008 2008 Use -p/--show-stage option to print the parsed tree at the given stages.
2009 2009 Use -p all to print tree at every stage.
2010 2010
2011 2011 Use --no-show-revs option with -s or -p to print only the set
2012 2012 representation or the parsed tree respectively.
2013 2013
2014 2014 Use --verify-optimized to compare the optimized result with the unoptimized
2015 2015 one. Returns 1 if the optimized result differs.
2016 2016 """
2017 2017 opts = pycompat.byteskwargs(opts)
2018 2018 aliases = ui.configitems('revsetalias')
2019 2019 stages = [
2020 2020 ('parsed', lambda tree: tree),
2021 2021 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2022 2022 ui.warn)),
2023 2023 ('concatenated', revsetlang.foldconcat),
2024 2024 ('analyzed', revsetlang.analyze),
2025 2025 ('optimized', revsetlang.optimize),
2026 2026 ]
2027 2027 if opts['no_optimized']:
2028 2028 stages = stages[:-1]
2029 2029 if opts['verify_optimized'] and opts['no_optimized']:
2030 2030 raise error.Abort(_('cannot use --verify-optimized with '
2031 2031 '--no-optimized'))
2032 2032 stagenames = set(n for n, f in stages)
2033 2033
2034 2034 showalways = set()
2035 2035 showchanged = set()
2036 2036 if ui.verbose and not opts['show_stage']:
2037 2037 # show parsed tree by --verbose (deprecated)
2038 2038 showalways.add('parsed')
2039 2039 showchanged.update(['expanded', 'concatenated'])
2040 2040 if opts['optimize']:
2041 2041 showalways.add('optimized')
2042 2042 if opts['show_stage'] and opts['optimize']:
2043 2043 raise error.Abort(_('cannot use --optimize with --show-stage'))
2044 2044 if opts['show_stage'] == ['all']:
2045 2045 showalways.update(stagenames)
2046 2046 else:
2047 2047 for n in opts['show_stage']:
2048 2048 if n not in stagenames:
2049 2049 raise error.Abort(_('invalid stage name: %s') % n)
2050 2050 showalways.update(opts['show_stage'])
2051 2051
2052 2052 treebystage = {}
2053 2053 printedtree = None
2054 2054 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2055 2055 for n, f in stages:
2056 2056 treebystage[n] = tree = f(tree)
2057 2057 if n in showalways or (n in showchanged and tree != printedtree):
2058 2058 if opts['show_stage'] or n != 'parsed':
2059 2059 ui.write(("* %s:\n") % n)
2060 2060 ui.write(revsetlang.prettyformat(tree), "\n")
2061 2061 printedtree = tree
2062 2062
2063 2063 if opts['verify_optimized']:
2064 2064 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2065 2065 brevs = revset.makematcher(treebystage['optimized'])(repo)
2066 2066 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2067 2067 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2068 2068 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2069 2069 arevs = list(arevs)
2070 2070 brevs = list(brevs)
2071 2071 if arevs == brevs:
2072 2072 return 0
2073 2073 ui.write(('--- analyzed\n'), label='diff.file_a')
2074 2074 ui.write(('+++ optimized\n'), label='diff.file_b')
2075 2075 sm = difflib.SequenceMatcher(None, arevs, brevs)
2076 2076 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2077 2077 if tag in ('delete', 'replace'):
2078 2078 for c in arevs[alo:ahi]:
2079 2079 ui.write('-%s\n' % c, label='diff.deleted')
2080 2080 if tag in ('insert', 'replace'):
2081 2081 for c in brevs[blo:bhi]:
2082 2082 ui.write('+%s\n' % c, label='diff.inserted')
2083 2083 if tag == 'equal':
2084 2084 for c in arevs[alo:ahi]:
2085 2085 ui.write(' %s\n' % c)
2086 2086 return 1
2087 2087
2088 2088 func = revset.makematcher(tree)
2089 2089 revs = func(repo)
2090 2090 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2091 2091 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2092 2092 if not opts['show_revs']:
2093 2093 return
2094 2094 for c in revs:
2095 2095 ui.write("%s\n" % c)
2096 2096
2097 2097 @command('debugsetparents', [], _('REV1 [REV2]'))
2098 2098 def debugsetparents(ui, repo, rev1, rev2=None):
2099 2099 """manually set the parents of the current working directory
2100 2100
2101 2101 This is useful for writing repository conversion tools, but should
2102 2102 be used with care. For example, neither the working directory nor the
2103 2103 dirstate is updated, so file status may be incorrect after running this
2104 2104 command.
2105 2105
2106 2106 Returns 0 on success.
2107 2107 """
2108 2108
2109 2109 r1 = scmutil.revsingle(repo, rev1).node()
2110 2110 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2111 2111
2112 2112 with repo.wlock():
2113 2113 repo.setparents(r1, r2)
2114 2114
2115 2115 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2116 2116 def debugssl(ui, repo, source=None, **opts):
2117 2117 '''test a secure connection to a server
2118 2118
2119 2119 This builds the certificate chain for the server on Windows, installing the
2120 2120 missing intermediates and trusted root via Windows Update if necessary. It
2121 2121 does nothing on other platforms.
2122 2122
2123 2123 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2124 2124 that server is used. See :hg:`help urls` for more information.
2125 2125
2126 2126 If the update succeeds, retry the original operation. Otherwise, the cause
2127 2127 of the SSL error is likely another issue.
2128 2128 '''
2129 2129 if not pycompat.iswindows:
2130 2130 raise error.Abort(_('certificate chain building is only possible on '
2131 2131 'Windows'))
2132 2132
2133 2133 if not source:
2134 2134 if not repo:
2135 2135 raise error.Abort(_("there is no Mercurial repository here, and no "
2136 2136 "server specified"))
2137 2137 source = "default"
2138 2138
2139 2139 source, branches = hg.parseurl(ui.expandpath(source))
2140 2140 url = util.url(source)
2141 2141 addr = None
2142 2142
2143 2143 if url.scheme == 'https':
2144 2144 addr = (url.host, url.port or 443)
2145 2145 elif url.scheme == 'ssh':
2146 2146 addr = (url.host, url.port or 22)
2147 2147 else:
2148 2148 raise error.Abort(_("only https and ssh connections are supported"))
2149 2149
2150 2150 from . import win32
2151 2151
2152 2152 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2153 2153 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2154 2154
2155 2155 try:
2156 2156 s.connect(addr)
2157 2157 cert = s.getpeercert(True)
2158 2158
2159 2159 ui.status(_('checking the certificate chain for %s\n') % url.host)
2160 2160
2161 2161 complete = win32.checkcertificatechain(cert, build=False)
2162 2162
2163 2163 if not complete:
2164 2164 ui.status(_('certificate chain is incomplete, updating... '))
2165 2165
2166 2166 if not win32.checkcertificatechain(cert):
2167 2167 ui.status(_('failed.\n'))
2168 2168 else:
2169 2169 ui.status(_('done.\n'))
2170 2170 else:
2171 2171 ui.status(_('full certificate chain is available\n'))
2172 2172 finally:
2173 2173 s.close()
2174 2174
2175 2175 @command('debugsub',
2176 2176 [('r', 'rev', '',
2177 2177 _('revision to check'), _('REV'))],
2178 2178 _('[-r REV] [REV]'))
2179 2179 def debugsub(ui, repo, rev=None):
2180 2180 ctx = scmutil.revsingle(repo, rev, None)
2181 2181 for k, v in sorted(ctx.substate.items()):
2182 2182 ui.write(('path %s\n') % k)
2183 2183 ui.write((' source %s\n') % v[0])
2184 2184 ui.write((' revision %s\n') % v[1])
2185 2185
2186 2186 @command('debugsuccessorssets',
2187 2187 [('', 'closest', False, _('return closest successors sets only'))],
2188 2188 _('[REV]'))
2189 2189 def debugsuccessorssets(ui, repo, *revs, **opts):
2190 2190 """show set of successors for revision
2191 2191
2192 2192 A successors set of changeset A is a consistent group of revisions that
2193 2193 succeed A. It contains non-obsolete changesets only unless closests
2194 2194 successors set is set.
2195 2195
2196 2196 In most cases a changeset A has a single successors set containing a single
2197 2197 successor (changeset A replaced by A').
2198 2198
2199 2199 A changeset that is made obsolete with no successors are called "pruned".
2200 2200 Such changesets have no successors sets at all.
2201 2201
2202 2202 A changeset that has been "split" will have a successors set containing
2203 2203 more than one successor.
2204 2204
2205 2205 A changeset that has been rewritten in multiple different ways is called
2206 2206 "divergent". Such changesets have multiple successor sets (each of which
2207 2207 may also be split, i.e. have multiple successors).
2208 2208
2209 2209 Results are displayed as follows::
2210 2210
2211 2211 <rev1>
2212 2212 <successors-1A>
2213 2213 <rev2>
2214 2214 <successors-2A>
2215 2215 <successors-2B1> <successors-2B2> <successors-2B3>
2216 2216
2217 2217 Here rev2 has two possible (i.e. divergent) successors sets. The first
2218 2218 holds one element, whereas the second holds three (i.e. the changeset has
2219 2219 been split).
2220 2220 """
2221 2221 # passed to successorssets caching computation from one call to another
2222 2222 cache = {}
2223 2223 ctx2str = str
2224 2224 node2str = short
2225 2225 if ui.debug():
2226 2226 def ctx2str(ctx):
2227 2227 return ctx.hex()
2228 2228 node2str = hex
2229 2229 for rev in scmutil.revrange(repo, revs):
2230 2230 ctx = repo[rev]
2231 2231 ui.write('%s\n'% ctx2str(ctx))
2232 2232 for succsset in obsutil.successorssets(repo, ctx.node(),
2233 2233 closest=opts['closest'],
2234 2234 cache=cache):
2235 2235 if succsset:
2236 2236 ui.write(' ')
2237 2237 ui.write(node2str(succsset[0]))
2238 2238 for node in succsset[1:]:
2239 2239 ui.write(' ')
2240 2240 ui.write(node2str(node))
2241 2241 ui.write('\n')
2242 2242
2243 2243 @command('debugtemplate',
2244 2244 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2245 2245 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2246 2246 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2247 2247 optionalrepo=True)
2248 2248 def debugtemplate(ui, repo, tmpl, **opts):
2249 2249 """parse and apply a template
2250 2250
2251 2251 If -r/--rev is given, the template is processed as a log template and
2252 2252 applied to the given changesets. Otherwise, it is processed as a generic
2253 2253 template.
2254 2254
2255 2255 Use --verbose to print the parsed tree.
2256 2256 """
2257 2257 revs = None
2258 2258 if opts[r'rev']:
2259 2259 if repo is None:
2260 2260 raise error.RepoError(_('there is no Mercurial repository here '
2261 2261 '(.hg not found)'))
2262 2262 revs = scmutil.revrange(repo, opts[r'rev'])
2263 2263
2264 2264 props = {}
2265 2265 for d in opts[r'define']:
2266 2266 try:
2267 2267 k, v = (e.strip() for e in d.split('=', 1))
2268 2268 if not k or k == 'ui':
2269 2269 raise ValueError
2270 2270 props[k] = v
2271 2271 except ValueError:
2272 2272 raise error.Abort(_('malformed keyword definition: %s') % d)
2273 2273
2274 2274 if ui.verbose:
2275 2275 aliases = ui.configitems('templatealias')
2276 2276 tree = templater.parse(tmpl)
2277 2277 ui.note(templater.prettyformat(tree), '\n')
2278 2278 newtree = templater.expandaliases(tree, aliases)
2279 2279 if newtree != tree:
2280 2280 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2281 2281
2282 2282 if revs is None:
2283 2283 t = formatter.maketemplater(ui, tmpl)
2284 2284 props['ui'] = ui
2285 2285 ui.write(t.render(props))
2286 2286 else:
2287 2287 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2288 2288 for r in revs:
2289 2289 displayer.show(repo[r], **pycompat.strkwargs(props))
2290 2290 displayer.close()
2291 2291
2292 2292 @command('debugupdatecaches', [])
2293 2293 def debugupdatecaches(ui, repo, *pats, **opts):
2294 2294 """warm all known caches in the repository"""
2295 2295 with repo.wlock(), repo.lock():
2296 2296 repo.updatecaches()
2297 2297
2298 2298 @command('debugupgraderepo', [
2299 2299 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2300 2300 ('', 'run', False, _('performs an upgrade')),
2301 2301 ])
2302 2302 def debugupgraderepo(ui, repo, run=False, optimize=None):
2303 2303 """upgrade a repository to use different features
2304 2304
2305 2305 If no arguments are specified, the repository is evaluated for upgrade
2306 2306 and a list of problems and potential optimizations is printed.
2307 2307
2308 2308 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2309 2309 can be influenced via additional arguments. More details will be provided
2310 2310 by the command output when run without ``--run``.
2311 2311
2312 2312 During the upgrade, the repository will be locked and no writes will be
2313 2313 allowed.
2314 2314
2315 2315 At the end of the upgrade, the repository may not be readable while new
2316 2316 repository data is swapped in. This window will be as long as it takes to
2317 2317 rename some directories inside the ``.hg`` directory. On most machines, this
2318 2318 should complete almost instantaneously and the chances of a consumer being
2319 2319 unable to access the repository should be low.
2320 2320 """
2321 2321 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2322 2322
2323 2323 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2324 2324 inferrepo=True)
2325 2325 def debugwalk(ui, repo, *pats, **opts):
2326 2326 """show how files match on given patterns"""
2327 2327 opts = pycompat.byteskwargs(opts)
2328 2328 m = scmutil.match(repo[None], pats, opts)
2329 2329 ui.write(('matcher: %r\n' % m))
2330 2330 items = list(repo[None].walk(m))
2331 2331 if not items:
2332 2332 return
2333 2333 f = lambda fn: fn
2334 2334 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2335 2335 f = lambda fn: util.normpath(fn)
2336 2336 fmt = 'f %%-%ds %%-%ds %%s' % (
2337 2337 max([len(abs) for abs in items]),
2338 2338 max([len(m.rel(abs)) for abs in items]))
2339 2339 for abs in items:
2340 2340 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2341 2341 ui.write("%s\n" % line.rstrip())
2342 2342
2343 2343 @command('debugwireargs',
2344 2344 [('', 'three', '', 'three'),
2345 2345 ('', 'four', '', 'four'),
2346 2346 ('', 'five', '', 'five'),
2347 2347 ] + cmdutil.remoteopts,
2348 2348 _('REPO [OPTIONS]... [ONE [TWO]]'),
2349 2349 norepo=True)
2350 2350 def debugwireargs(ui, repopath, *vals, **opts):
2351 2351 opts = pycompat.byteskwargs(opts)
2352 2352 repo = hg.peer(ui, opts, repopath)
2353 2353 for opt in cmdutil.remoteopts:
2354 2354 del opts[opt[1]]
2355 2355 args = {}
2356 2356 for k, v in opts.iteritems():
2357 2357 if v:
2358 2358 args[k] = v
2359 2359 # run twice to check that we don't mess up the stream for the next command
2360 2360 res1 = repo.debugwireargs(*vals, **args)
2361 2361 res2 = repo.debugwireargs(*vals, **args)
2362 2362 ui.write("%s\n" % res1)
2363 2363 if res1 != res2:
2364 2364 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now