##// END OF EJS Templates
py3: pass the memoryview object into bytes() to get the value
Pulkit Goyal -
r33106:2f812b0d default
parent child Browse files
Show More
@@ -1,2244 +1,2244
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import difflib
11 11 import errno
12 12 import operator
13 13 import os
14 14 import random
15 15 import socket
16 16 import string
17 17 import sys
18 18 import tempfile
19 19 import time
20 20
21 21 from .i18n import _
22 22 from .node import (
23 23 bin,
24 24 hex,
25 25 nullhex,
26 26 nullid,
27 27 nullrev,
28 28 short,
29 29 )
30 30 from . import (
31 31 bundle2,
32 32 changegroup,
33 33 cmdutil,
34 34 color,
35 35 context,
36 36 dagparser,
37 37 dagutil,
38 38 encoding,
39 39 error,
40 40 exchange,
41 41 extensions,
42 42 filemerge,
43 43 fileset,
44 44 formatter,
45 45 hg,
46 46 localrepo,
47 47 lock as lockmod,
48 48 merge as mergemod,
49 49 obsolete,
50 50 phases,
51 51 policy,
52 52 pvec,
53 53 pycompat,
54 54 registrar,
55 55 repair,
56 56 revlog,
57 57 revset,
58 58 revsetlang,
59 59 scmutil,
60 60 setdiscovery,
61 61 simplemerge,
62 62 smartset,
63 63 sslutil,
64 64 streamclone,
65 65 templater,
66 66 treediscovery,
67 67 upgrade,
68 68 util,
69 69 vfs as vfsmod,
70 70 )
71 71
72 72 release = lockmod.release
73 73
74 74 command = registrar.command()
75 75
76 76 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
77 77 def debugancestor(ui, repo, *args):
78 78 """find the ancestor revision of two revisions in a given index"""
79 79 if len(args) == 3:
80 80 index, rev1, rev2 = args
81 81 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
82 82 lookup = r.lookup
83 83 elif len(args) == 2:
84 84 if not repo:
85 85 raise error.Abort(_('there is no Mercurial repository here '
86 86 '(.hg not found)'))
87 87 rev1, rev2 = args
88 88 r = repo.changelog
89 89 lookup = repo.lookup
90 90 else:
91 91 raise error.Abort(_('either two or three arguments required'))
92 92 a = r.ancestor(lookup(rev1), lookup(rev2))
93 93 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
94 94
95 95 @command('debugapplystreamclonebundle', [], 'FILE')
96 96 def debugapplystreamclonebundle(ui, repo, fname):
97 97 """apply a stream clone bundle file"""
98 98 f = hg.openpath(ui, fname)
99 99 gen = exchange.readbundle(ui, f, fname)
100 100 gen.apply(repo)
101 101
102 102 @command('debugbuilddag',
103 103 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
104 104 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
105 105 ('n', 'new-file', None, _('add new file at each rev'))],
106 106 _('[OPTION]... [TEXT]'))
107 107 def debugbuilddag(ui, repo, text=None,
108 108 mergeable_file=False,
109 109 overwritten_file=False,
110 110 new_file=False):
111 111 """builds a repo with a given DAG from scratch in the current empty repo
112 112
113 113 The description of the DAG is read from stdin if not given on the
114 114 command line.
115 115
116 116 Elements:
117 117
118 118 - "+n" is a linear run of n nodes based on the current default parent
119 119 - "." is a single node based on the current default parent
120 120 - "$" resets the default parent to null (implied at the start);
121 121 otherwise the default parent is always the last node created
122 122 - "<p" sets the default parent to the backref p
123 123 - "*p" is a fork at parent p, which is a backref
124 124 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
125 125 - "/p2" is a merge of the preceding node and p2
126 126 - ":tag" defines a local tag for the preceding node
127 127 - "@branch" sets the named branch for subsequent nodes
128 128 - "#...\\n" is a comment up to the end of the line
129 129
130 130 Whitespace between the above elements is ignored.
131 131
132 132 A backref is either
133 133
134 134 - a number n, which references the node curr-n, where curr is the current
135 135 node, or
136 136 - the name of a local tag you placed earlier using ":tag", or
137 137 - empty to denote the default parent.
138 138
139 139 All string valued-elements are either strictly alphanumeric, or must
140 140 be enclosed in double quotes ("..."), with "\\" as escape character.
141 141 """
142 142
143 143 if text is None:
144 144 ui.status(_("reading DAG from stdin\n"))
145 145 text = ui.fin.read()
146 146
147 147 cl = repo.changelog
148 148 if len(cl) > 0:
149 149 raise error.Abort(_('repository is not empty'))
150 150
151 151 # determine number of revs in DAG
152 152 total = 0
153 153 for type, data in dagparser.parsedag(text):
154 154 if type == 'n':
155 155 total += 1
156 156
157 157 if mergeable_file:
158 158 linesperrev = 2
159 159 # make a file with k lines per rev
160 160 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
161 161 initialmergedlines.append("")
162 162
163 163 tags = []
164 164
165 165 wlock = lock = tr = None
166 166 try:
167 167 wlock = repo.wlock()
168 168 lock = repo.lock()
169 169 tr = repo.transaction("builddag")
170 170
171 171 at = -1
172 172 atbranch = 'default'
173 173 nodeids = []
174 174 id = 0
175 175 ui.progress(_('building'), id, unit=_('revisions'), total=total)
176 176 for type, data in dagparser.parsedag(text):
177 177 if type == 'n':
178 178 ui.note(('node %s\n' % str(data)))
179 179 id, ps = data
180 180
181 181 files = []
182 182 fctxs = {}
183 183
184 184 p2 = None
185 185 if mergeable_file:
186 186 fn = "mf"
187 187 p1 = repo[ps[0]]
188 188 if len(ps) > 1:
189 189 p2 = repo[ps[1]]
190 190 pa = p1.ancestor(p2)
191 191 base, local, other = [x[fn].data() for x in (pa, p1,
192 192 p2)]
193 193 m3 = simplemerge.Merge3Text(base, local, other)
194 194 ml = [l.strip() for l in m3.merge_lines()]
195 195 ml.append("")
196 196 elif at > 0:
197 197 ml = p1[fn].data().split("\n")
198 198 else:
199 199 ml = initialmergedlines
200 200 ml[id * linesperrev] += " r%i" % id
201 201 mergedtext = "\n".join(ml)
202 202 files.append(fn)
203 203 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
204 204
205 205 if overwritten_file:
206 206 fn = "of"
207 207 files.append(fn)
208 208 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
209 209
210 210 if new_file:
211 211 fn = "nf%i" % id
212 212 files.append(fn)
213 213 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
214 214 if len(ps) > 1:
215 215 if not p2:
216 216 p2 = repo[ps[1]]
217 217 for fn in p2:
218 218 if fn.startswith("nf"):
219 219 files.append(fn)
220 220 fctxs[fn] = p2[fn]
221 221
222 222 def fctxfn(repo, cx, path):
223 223 return fctxs.get(path)
224 224
225 225 if len(ps) == 0 or ps[0] < 0:
226 226 pars = [None, None]
227 227 elif len(ps) == 1:
228 228 pars = [nodeids[ps[0]], None]
229 229 else:
230 230 pars = [nodeids[p] for p in ps]
231 231 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
232 232 date=(id, 0),
233 233 user="debugbuilddag",
234 234 extra={'branch': atbranch})
235 235 nodeid = repo.commitctx(cx)
236 236 nodeids.append(nodeid)
237 237 at = id
238 238 elif type == 'l':
239 239 id, name = data
240 240 ui.note(('tag %s\n' % name))
241 241 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
242 242 elif type == 'a':
243 243 ui.note(('branch %s\n' % data))
244 244 atbranch = data
245 245 ui.progress(_('building'), id, unit=_('revisions'), total=total)
246 246 tr.close()
247 247
248 248 if tags:
249 249 repo.vfs.write("localtags", "".join(tags))
250 250 finally:
251 251 ui.progress(_('building'), None)
252 252 release(tr, lock, wlock)
253 253
254 254 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
255 255 indent_string = ' ' * indent
256 256 if all:
257 257 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
258 258 % indent_string)
259 259
260 260 def showchunks(named):
261 261 ui.write("\n%s%s\n" % (indent_string, named))
262 262 chain = None
263 263 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
264 264 node = chunkdata['node']
265 265 p1 = chunkdata['p1']
266 266 p2 = chunkdata['p2']
267 267 cs = chunkdata['cs']
268 268 deltabase = chunkdata['deltabase']
269 269 delta = chunkdata['delta']
270 270 ui.write("%s%s %s %s %s %s %s\n" %
271 271 (indent_string, hex(node), hex(p1), hex(p2),
272 272 hex(cs), hex(deltabase), len(delta)))
273 273 chain = node
274 274
275 275 chunkdata = gen.changelogheader()
276 276 showchunks("changelog")
277 277 chunkdata = gen.manifestheader()
278 278 showchunks("manifest")
279 279 for chunkdata in iter(gen.filelogheader, {}):
280 280 fname = chunkdata['filename']
281 281 showchunks(fname)
282 282 else:
283 283 if isinstance(gen, bundle2.unbundle20):
284 284 raise error.Abort(_('use debugbundle2 for this file'))
285 285 chunkdata = gen.changelogheader()
286 286 chain = None
287 287 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
288 288 node = chunkdata['node']
289 289 ui.write("%s%s\n" % (indent_string, hex(node)))
290 290 chain = node
291 291
292 292 def _debugobsmarkers(ui, part, indent=0, **opts):
293 293 """display version and markers contained in 'data'"""
294 294 opts = pycompat.byteskwargs(opts)
295 295 data = part.read()
296 296 indent_string = ' ' * indent
297 297 try:
298 298 version, markers = obsolete._readmarkers(data)
299 299 except error.UnknownVersion as exc:
300 300 msg = "%sunsupported version: %s (%d bytes)\n"
301 301 msg %= indent_string, exc.version, len(data)
302 302 ui.write(msg)
303 303 else:
304 304 msg = "%sversion: %s (%d bytes)\n"
305 305 msg %= indent_string, version, len(data)
306 306 ui.write(msg)
307 307 fm = ui.formatter('debugobsolete', opts)
308 308 for rawmarker in sorted(markers):
309 309 m = obsolete.marker(None, rawmarker)
310 310 fm.startitem()
311 311 fm.plain(indent_string)
312 312 cmdutil.showmarker(fm, m)
313 313 fm.end()
314 314
315 315 def _debugphaseheads(ui, data, indent=0):
316 316 """display version and markers contained in 'data'"""
317 317 indent_string = ' ' * indent
318 318 headsbyphase = bundle2._readphaseheads(data)
319 319 for phase in phases.allphases:
320 320 for head in headsbyphase[phase]:
321 321 ui.write(indent_string)
322 322 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
323 323
324 324 def _debugbundle2(ui, gen, all=None, **opts):
325 325 """lists the contents of a bundle2"""
326 326 if not isinstance(gen, bundle2.unbundle20):
327 327 raise error.Abort(_('not a bundle2 file'))
328 328 ui.write(('Stream params: %s\n' % repr(gen.params)))
329 329 parttypes = opts.get(r'part_type', [])
330 330 for part in gen.iterparts():
331 331 if parttypes and part.type not in parttypes:
332 332 continue
333 333 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
334 334 if part.type == 'changegroup':
335 335 version = part.params.get('version', '01')
336 336 cg = changegroup.getunbundler(version, part, 'UN')
337 337 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
338 338 if part.type == 'obsmarkers':
339 339 _debugobsmarkers(ui, part, indent=4, **opts)
340 340 if part.type == 'phase-heads':
341 341 _debugphaseheads(ui, part, indent=4)
342 342
343 343 @command('debugbundle',
344 344 [('a', 'all', None, _('show all details')),
345 345 ('', 'part-type', [], _('show only the named part type')),
346 346 ('', 'spec', None, _('print the bundlespec of the bundle'))],
347 347 _('FILE'),
348 348 norepo=True)
349 349 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
350 350 """lists the contents of a bundle"""
351 351 with hg.openpath(ui, bundlepath) as f:
352 352 if spec:
353 353 spec = exchange.getbundlespec(ui, f)
354 354 ui.write('%s\n' % spec)
355 355 return
356 356
357 357 gen = exchange.readbundle(ui, f, bundlepath)
358 358 if isinstance(gen, bundle2.unbundle20):
359 359 return _debugbundle2(ui, gen, all=all, **opts)
360 360 _debugchangegroup(ui, gen, all=all, **opts)
361 361
362 362 @command('debugcheckstate', [], '')
363 363 def debugcheckstate(ui, repo):
364 364 """validate the correctness of the current dirstate"""
365 365 parent1, parent2 = repo.dirstate.parents()
366 366 m1 = repo[parent1].manifest()
367 367 m2 = repo[parent2].manifest()
368 368 errors = 0
369 369 for f in repo.dirstate:
370 370 state = repo.dirstate[f]
371 371 if state in "nr" and f not in m1:
372 372 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
373 373 errors += 1
374 374 if state in "a" and f in m1:
375 375 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
376 376 errors += 1
377 377 if state in "m" and f not in m1 and f not in m2:
378 378 ui.warn(_("%s in state %s, but not in either manifest\n") %
379 379 (f, state))
380 380 errors += 1
381 381 for f in m1:
382 382 state = repo.dirstate[f]
383 383 if state not in "nrm":
384 384 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
385 385 errors += 1
386 386 if errors:
387 387 error = _(".hg/dirstate inconsistent with current parent's manifest")
388 388 raise error.Abort(error)
389 389
390 390 @command('debugcolor',
391 391 [('', 'style', None, _('show all configured styles'))],
392 392 'hg debugcolor')
393 393 def debugcolor(ui, repo, **opts):
394 394 """show available color, effects or style"""
395 395 ui.write(('color mode: %s\n') % ui._colormode)
396 396 if opts.get(r'style'):
397 397 return _debugdisplaystyle(ui)
398 398 else:
399 399 return _debugdisplaycolor(ui)
400 400
401 401 def _debugdisplaycolor(ui):
402 402 ui = ui.copy()
403 403 ui._styles.clear()
404 404 for effect in color._activeeffects(ui).keys():
405 405 ui._styles[effect] = effect
406 406 if ui._terminfoparams:
407 407 for k, v in ui.configitems('color'):
408 408 if k.startswith('color.'):
409 409 ui._styles[k] = k[6:]
410 410 elif k.startswith('terminfo.'):
411 411 ui._styles[k] = k[9:]
412 412 ui.write(_('available colors:\n'))
413 413 # sort label with a '_' after the other to group '_background' entry.
414 414 items = sorted(ui._styles.items(),
415 415 key=lambda i: ('_' in i[0], i[0], i[1]))
416 416 for colorname, label in items:
417 417 ui.write(('%s\n') % colorname, label=label)
418 418
419 419 def _debugdisplaystyle(ui):
420 420 ui.write(_('available style:\n'))
421 421 width = max(len(s) for s in ui._styles)
422 422 for label, effects in sorted(ui._styles.items()):
423 423 ui.write('%s' % label, label=label)
424 424 if effects:
425 425 # 50
426 426 ui.write(': ')
427 427 ui.write(' ' * (max(0, width - len(label))))
428 428 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
429 429 ui.write('\n')
430 430
431 431 @command('debugcreatestreamclonebundle', [], 'FILE')
432 432 def debugcreatestreamclonebundle(ui, repo, fname):
433 433 """create a stream clone bundle file
434 434
435 435 Stream bundles are special bundles that are essentially archives of
436 436 revlog files. They are commonly used for cloning very quickly.
437 437 """
438 438 # TODO we may want to turn this into an abort when this functionality
439 439 # is moved into `hg bundle`.
440 440 if phases.hassecret(repo):
441 441 ui.warn(_('(warning: stream clone bundle will contain secret '
442 442 'revisions)\n'))
443 443
444 444 requirements, gen = streamclone.generatebundlev1(repo)
445 445 changegroup.writechunks(ui, gen, fname)
446 446
447 447 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
448 448
449 449 @command('debugdag',
450 450 [('t', 'tags', None, _('use tags as labels')),
451 451 ('b', 'branches', None, _('annotate with branch names')),
452 452 ('', 'dots', None, _('use dots for runs')),
453 453 ('s', 'spaces', None, _('separate elements by spaces'))],
454 454 _('[OPTION]... [FILE [REV]...]'),
455 455 optionalrepo=True)
456 456 def debugdag(ui, repo, file_=None, *revs, **opts):
457 457 """format the changelog or an index DAG as a concise textual description
458 458
459 459 If you pass a revlog index, the revlog's DAG is emitted. If you list
460 460 revision numbers, they get labeled in the output as rN.
461 461
462 462 Otherwise, the changelog DAG of the current repo is emitted.
463 463 """
464 464 spaces = opts.get(r'spaces')
465 465 dots = opts.get(r'dots')
466 466 if file_:
467 467 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
468 468 file_)
469 469 revs = set((int(r) for r in revs))
470 470 def events():
471 471 for r in rlog:
472 472 yield 'n', (r, list(p for p in rlog.parentrevs(r)
473 473 if p != -1))
474 474 if r in revs:
475 475 yield 'l', (r, "r%i" % r)
476 476 elif repo:
477 477 cl = repo.changelog
478 478 tags = opts.get(r'tags')
479 479 branches = opts.get(r'branches')
480 480 if tags:
481 481 labels = {}
482 482 for l, n in repo.tags().items():
483 483 labels.setdefault(cl.rev(n), []).append(l)
484 484 def events():
485 485 b = "default"
486 486 for r in cl:
487 487 if branches:
488 488 newb = cl.read(cl.node(r))[5]['branch']
489 489 if newb != b:
490 490 yield 'a', newb
491 491 b = newb
492 492 yield 'n', (r, list(p for p in cl.parentrevs(r)
493 493 if p != -1))
494 494 if tags:
495 495 ls = labels.get(r)
496 496 if ls:
497 497 for l in ls:
498 498 yield 'l', (r, l)
499 499 else:
500 500 raise error.Abort(_('need repo for changelog dag'))
501 501
502 502 for line in dagparser.dagtextlines(events(),
503 503 addspaces=spaces,
504 504 wraplabels=True,
505 505 wrapannotations=True,
506 506 wrapnonlinear=dots,
507 507 usedots=dots,
508 508 maxlinewidth=70):
509 509 ui.write(line)
510 510 ui.write("\n")
511 511
512 512 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
513 513 def debugdata(ui, repo, file_, rev=None, **opts):
514 514 """dump the contents of a data file revision"""
515 515 opts = pycompat.byteskwargs(opts)
516 516 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
517 517 if rev is not None:
518 518 raise error.CommandError('debugdata', _('invalid arguments'))
519 519 file_, rev = None, file_
520 520 elif rev is None:
521 521 raise error.CommandError('debugdata', _('invalid arguments'))
522 522 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
523 523 try:
524 524 ui.write(r.revision(r.lookup(rev), raw=True))
525 525 except KeyError:
526 526 raise error.Abort(_('invalid revision identifier %s') % rev)
527 527
528 528 @command('debugdate',
529 529 [('e', 'extended', None, _('try extended date formats'))],
530 530 _('[-e] DATE [RANGE]'),
531 531 norepo=True, optionalrepo=True)
532 532 def debugdate(ui, date, range=None, **opts):
533 533 """parse and display a date"""
534 534 if opts[r"extended"]:
535 535 d = util.parsedate(date, util.extendeddateformats)
536 536 else:
537 537 d = util.parsedate(date)
538 538 ui.write(("internal: %s %s\n") % d)
539 539 ui.write(("standard: %s\n") % util.datestr(d))
540 540 if range:
541 541 m = util.matchdate(range)
542 542 ui.write(("match: %s\n") % m(d[0]))
543 543
544 544 @command('debugdeltachain',
545 545 cmdutil.debugrevlogopts + cmdutil.formatteropts,
546 546 _('-c|-m|FILE'),
547 547 optionalrepo=True)
548 548 def debugdeltachain(ui, repo, file_=None, **opts):
549 549 """dump information about delta chains in a revlog
550 550
551 551 Output can be templatized. Available template keywords are:
552 552
553 553 :``rev``: revision number
554 554 :``chainid``: delta chain identifier (numbered by unique base)
555 555 :``chainlen``: delta chain length to this revision
556 556 :``prevrev``: previous revision in delta chain
557 557 :``deltatype``: role of delta / how it was computed
558 558 :``compsize``: compressed size of revision
559 559 :``uncompsize``: uncompressed size of revision
560 560 :``chainsize``: total size of compressed revisions in chain
561 561 :``chainratio``: total chain size divided by uncompressed revision size
562 562 (new delta chains typically start at ratio 2.00)
563 563 :``lindist``: linear distance from base revision in delta chain to end
564 564 of this revision
565 565 :``extradist``: total size of revisions not part of this delta chain from
566 566 base of delta chain to end of this revision; a measurement
567 567 of how much extra data we need to read/seek across to read
568 568 the delta chain for this revision
569 569 :``extraratio``: extradist divided by chainsize; another representation of
570 570 how much unrelated data is needed to load this delta chain
571 571 """
572 572 opts = pycompat.byteskwargs(opts)
573 573 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
574 574 index = r.index
575 575 generaldelta = r.version & revlog.FLAG_GENERALDELTA
576 576
577 577 def revinfo(rev):
578 578 e = index[rev]
579 579 compsize = e[1]
580 580 uncompsize = e[2]
581 581 chainsize = 0
582 582
583 583 if generaldelta:
584 584 if e[3] == e[5]:
585 585 deltatype = 'p1'
586 586 elif e[3] == e[6]:
587 587 deltatype = 'p2'
588 588 elif e[3] == rev - 1:
589 589 deltatype = 'prev'
590 590 elif e[3] == rev:
591 591 deltatype = 'base'
592 592 else:
593 593 deltatype = 'other'
594 594 else:
595 595 if e[3] == rev:
596 596 deltatype = 'base'
597 597 else:
598 598 deltatype = 'prev'
599 599
600 600 chain = r._deltachain(rev)[0]
601 601 for iterrev in chain:
602 602 e = index[iterrev]
603 603 chainsize += e[1]
604 604
605 605 return compsize, uncompsize, deltatype, chain, chainsize
606 606
607 607 fm = ui.formatter('debugdeltachain', opts)
608 608
609 609 fm.plain(' rev chain# chainlen prev delta '
610 610 'size rawsize chainsize ratio lindist extradist '
611 611 'extraratio\n')
612 612
613 613 chainbases = {}
614 614 for rev in r:
615 615 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
616 616 chainbase = chain[0]
617 617 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
618 618 basestart = r.start(chainbase)
619 619 revstart = r.start(rev)
620 620 lineardist = revstart + comp - basestart
621 621 extradist = lineardist - chainsize
622 622 try:
623 623 prevrev = chain[-2]
624 624 except IndexError:
625 625 prevrev = -1
626 626
627 627 chainratio = float(chainsize) / float(uncomp)
628 628 extraratio = float(extradist) / float(chainsize)
629 629
630 630 fm.startitem()
631 631 fm.write('rev chainid chainlen prevrev deltatype compsize '
632 632 'uncompsize chainsize chainratio lindist extradist '
633 633 'extraratio',
634 634 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
635 635 rev, chainid, len(chain), prevrev, deltatype, comp,
636 636 uncomp, chainsize, chainratio, lineardist, extradist,
637 637 extraratio,
638 638 rev=rev, chainid=chainid, chainlen=len(chain),
639 639 prevrev=prevrev, deltatype=deltatype, compsize=comp,
640 640 uncompsize=uncomp, chainsize=chainsize,
641 641 chainratio=chainratio, lindist=lineardist,
642 642 extradist=extradist, extraratio=extraratio)
643 643
644 644 fm.end()
645 645
646 646 @command('debugdirstate|debugstate',
647 647 [('', 'nodates', None, _('do not display the saved mtime')),
648 648 ('', 'datesort', None, _('sort by saved mtime'))],
649 649 _('[OPTION]...'))
650 650 def debugstate(ui, repo, **opts):
651 651 """show the contents of the current dirstate"""
652 652
653 653 nodates = opts.get(r'nodates')
654 654 datesort = opts.get(r'datesort')
655 655
656 656 timestr = ""
657 657 if datesort:
658 658 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
659 659 else:
660 660 keyfunc = None # sort by filename
661 661 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
662 662 if ent[3] == -1:
663 663 timestr = 'unset '
664 664 elif nodates:
665 665 timestr = 'set '
666 666 else:
667 667 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
668 668 time.localtime(ent[3]))
669 669 if ent[1] & 0o20000:
670 670 mode = 'lnk'
671 671 else:
672 672 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
673 673 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
674 674 for f in repo.dirstate.copies():
675 675 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
676 676
677 677 @command('debugdiscovery',
678 678 [('', 'old', None, _('use old-style discovery')),
679 679 ('', 'nonheads', None,
680 680 _('use old-style discovery with non-heads included')),
681 681 ] + cmdutil.remoteopts,
682 682 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
683 683 def debugdiscovery(ui, repo, remoteurl="default", **opts):
684 684 """runs the changeset discovery protocol in isolation"""
685 685 opts = pycompat.byteskwargs(opts)
686 686 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
687 687 opts.get('branch'))
688 688 remote = hg.peer(repo, opts, remoteurl)
689 689 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
690 690
691 691 # make sure tests are repeatable
692 692 random.seed(12323)
693 693
694 694 def doit(localheads, remoteheads, remote=remote):
695 695 if opts.get('old'):
696 696 if localheads:
697 697 raise error.Abort('cannot use localheads with old style '
698 698 'discovery')
699 699 if not util.safehasattr(remote, 'branches'):
700 700 # enable in-client legacy support
701 701 remote = localrepo.locallegacypeer(remote.local())
702 702 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
703 703 force=True)
704 704 common = set(common)
705 705 if not opts.get('nonheads'):
706 706 ui.write(("unpruned common: %s\n") %
707 707 " ".join(sorted(short(n) for n in common)))
708 708 dag = dagutil.revlogdag(repo.changelog)
709 709 all = dag.ancestorset(dag.internalizeall(common))
710 710 common = dag.externalizeall(dag.headsetofconnecteds(all))
711 711 else:
712 712 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
713 713 common = set(common)
714 714 rheads = set(hds)
715 715 lheads = set(repo.heads())
716 716 ui.write(("common heads: %s\n") %
717 717 " ".join(sorted(short(n) for n in common)))
718 718 if lheads <= common:
719 719 ui.write(("local is subset\n"))
720 720 elif rheads <= common:
721 721 ui.write(("remote is subset\n"))
722 722
723 723 serverlogs = opts.get('serverlog')
724 724 if serverlogs:
725 725 for filename in serverlogs:
726 726 with open(filename, 'r') as logfile:
727 727 line = logfile.readline()
728 728 while line:
729 729 parts = line.strip().split(';')
730 730 op = parts[1]
731 731 if op == 'cg':
732 732 pass
733 733 elif op == 'cgss':
734 734 doit(parts[2].split(' '), parts[3].split(' '))
735 735 elif op == 'unb':
736 736 doit(parts[3].split(' '), parts[2].split(' '))
737 737 line = logfile.readline()
738 738 else:
739 739 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
740 740 opts.get('remote_head'))
741 741 localrevs = opts.get('local_head')
742 742 doit(localrevs, remoterevs)
743 743
744 744 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
745 745 def debugextensions(ui, **opts):
746 746 '''show information about active extensions'''
747 747 opts = pycompat.byteskwargs(opts)
748 748 exts = extensions.extensions(ui)
749 749 hgver = util.version()
750 750 fm = ui.formatter('debugextensions', opts)
751 751 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
752 752 isinternal = extensions.ismoduleinternal(extmod)
753 753 extsource = pycompat.fsencode(extmod.__file__)
754 754 if isinternal:
755 755 exttestedwith = [] # never expose magic string to users
756 756 else:
757 757 exttestedwith = getattr(extmod, 'testedwith', '').split()
758 758 extbuglink = getattr(extmod, 'buglink', None)
759 759
760 760 fm.startitem()
761 761
762 762 if ui.quiet or ui.verbose:
763 763 fm.write('name', '%s\n', extname)
764 764 else:
765 765 fm.write('name', '%s', extname)
766 766 if isinternal or hgver in exttestedwith:
767 767 fm.plain('\n')
768 768 elif not exttestedwith:
769 769 fm.plain(_(' (untested!)\n'))
770 770 else:
771 771 lasttestedversion = exttestedwith[-1]
772 772 fm.plain(' (%s!)\n' % lasttestedversion)
773 773
774 774 fm.condwrite(ui.verbose and extsource, 'source',
775 775 _(' location: %s\n'), extsource or "")
776 776
777 777 if ui.verbose:
778 778 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
779 779 fm.data(bundled=isinternal)
780 780
781 781 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
782 782 _(' tested with: %s\n'),
783 783 fm.formatlist(exttestedwith, name='ver'))
784 784
785 785 fm.condwrite(ui.verbose and extbuglink, 'buglink',
786 786 _(' bug reporting: %s\n'), extbuglink or "")
787 787
788 788 fm.end()
789 789
790 790 @command('debugfileset',
791 791 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
792 792 _('[-r REV] FILESPEC'))
793 793 def debugfileset(ui, repo, expr, **opts):
794 794 '''parse and apply a fileset specification'''
795 795 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
796 796 if ui.verbose:
797 797 tree = fileset.parse(expr)
798 798 ui.note(fileset.prettyformat(tree), "\n")
799 799
800 800 for f in ctx.getfileset(expr):
801 801 ui.write("%s\n" % f)
802 802
803 803 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
804 804 def debugfsinfo(ui, path="."):
805 805 """show information detected about current filesystem"""
806 806 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
807 807 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
808 808 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
809 809 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
810 810 casesensitive = '(unknown)'
811 811 try:
812 812 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
813 813 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
814 814 except OSError:
815 815 pass
816 816 ui.write(('case-sensitive: %s\n') % casesensitive)
817 817
818 818 @command('debuggetbundle',
819 819 [('H', 'head', [], _('id of head node'), _('ID')),
820 820 ('C', 'common', [], _('id of common node'), _('ID')),
821 821 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
822 822 _('REPO FILE [-H|-C ID]...'),
823 823 norepo=True)
824 824 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
825 825 """retrieves a bundle from a repo
826 826
827 827 Every ID must be a full-length hex node id string. Saves the bundle to the
828 828 given file.
829 829 """
830 830 opts = pycompat.byteskwargs(opts)
831 831 repo = hg.peer(ui, opts, repopath)
832 832 if not repo.capable('getbundle'):
833 833 raise error.Abort("getbundle() not supported by target repository")
834 834 args = {}
835 835 if common:
836 836 args[r'common'] = [bin(s) for s in common]
837 837 if head:
838 838 args[r'heads'] = [bin(s) for s in head]
839 839 # TODO: get desired bundlecaps from command line.
840 840 args[r'bundlecaps'] = None
841 841 bundle = repo.getbundle('debug', **args)
842 842
843 843 bundletype = opts.get('type', 'bzip2').lower()
844 844 btypes = {'none': 'HG10UN',
845 845 'bzip2': 'HG10BZ',
846 846 'gzip': 'HG10GZ',
847 847 'bundle2': 'HG20'}
848 848 bundletype = btypes.get(bundletype)
849 849 if bundletype not in bundle2.bundletypes:
850 850 raise error.Abort(_('unknown bundle type specified with --type'))
851 851 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
852 852
853 853 @command('debugignore', [], '[FILE]')
854 854 def debugignore(ui, repo, *files, **opts):
855 855 """display the combined ignore pattern and information about ignored files
856 856
857 857 With no argument display the combined ignore pattern.
858 858
859 859 Given space separated file names, shows if the given file is ignored and
860 860 if so, show the ignore rule (file and line number) that matched it.
861 861 """
862 862 ignore = repo.dirstate._ignore
863 863 if not files:
864 864 # Show all the patterns
865 865 ui.write("%s\n" % repr(ignore))
866 866 else:
867 867 for f in files:
868 868 nf = util.normpath(f)
869 869 ignored = None
870 870 ignoredata = None
871 871 if nf != '.':
872 872 if ignore(nf):
873 873 ignored = nf
874 874 ignoredata = repo.dirstate._ignorefileandline(nf)
875 875 else:
876 876 for p in util.finddirs(nf):
877 877 if ignore(p):
878 878 ignored = p
879 879 ignoredata = repo.dirstate._ignorefileandline(p)
880 880 break
881 881 if ignored:
882 882 if ignored == nf:
883 883 ui.write(_("%s is ignored\n") % f)
884 884 else:
885 885 ui.write(_("%s is ignored because of "
886 886 "containing folder %s\n")
887 887 % (f, ignored))
888 888 ignorefile, lineno, line = ignoredata
889 889 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
890 890 % (ignorefile, lineno, line))
891 891 else:
892 892 ui.write(_("%s is not ignored\n") % f)
893 893
894 894 @command('debugindex', cmdutil.debugrevlogopts +
895 895 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
896 896 _('[-f FORMAT] -c|-m|FILE'),
897 897 optionalrepo=True)
898 898 def debugindex(ui, repo, file_=None, **opts):
899 899 """dump the contents of an index file"""
900 900 opts = pycompat.byteskwargs(opts)
901 901 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
902 902 format = opts.get('format', 0)
903 903 if format not in (0, 1):
904 904 raise error.Abort(_("unknown format %d") % format)
905 905
906 906 generaldelta = r.version & revlog.FLAG_GENERALDELTA
907 907 if generaldelta:
908 908 basehdr = ' delta'
909 909 else:
910 910 basehdr = ' base'
911 911
912 912 if ui.debugflag:
913 913 shortfn = hex
914 914 else:
915 915 shortfn = short
916 916
917 917 # There might not be anything in r, so have a sane default
918 918 idlen = 12
919 919 for i in r:
920 920 idlen = len(shortfn(r.node(i)))
921 921 break
922 922
923 923 if format == 0:
924 924 ui.write((" rev offset length " + basehdr + " linkrev"
925 925 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
926 926 elif format == 1:
927 927 ui.write((" rev flag offset length"
928 928 " size " + basehdr + " link p1 p2"
929 929 " %s\n") % "nodeid".rjust(idlen))
930 930
931 931 for i in r:
932 932 node = r.node(i)
933 933 if generaldelta:
934 934 base = r.deltaparent(i)
935 935 else:
936 936 base = r.chainbase(i)
937 937 if format == 0:
938 938 try:
939 939 pp = r.parents(node)
940 940 except Exception:
941 941 pp = [nullid, nullid]
942 942 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
943 943 i, r.start(i), r.length(i), base, r.linkrev(i),
944 944 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
945 945 elif format == 1:
946 946 pr = r.parentrevs(i)
947 947 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
948 948 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
949 949 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
950 950
951 951 @command('debugindexdot', cmdutil.debugrevlogopts,
952 952 _('-c|-m|FILE'), optionalrepo=True)
953 953 def debugindexdot(ui, repo, file_=None, **opts):
954 954 """dump an index DAG as a graphviz dot file"""
955 955 opts = pycompat.byteskwargs(opts)
956 956 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
957 957 ui.write(("digraph G {\n"))
958 958 for i in r:
959 959 node = r.node(i)
960 960 pp = r.parents(node)
961 961 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
962 962 if pp[1] != nullid:
963 963 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
964 964 ui.write("}\n")
965 965
966 966 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
967 967 def debuginstall(ui, **opts):
968 968 '''test Mercurial installation
969 969
970 970 Returns 0 on success.
971 971 '''
972 972 opts = pycompat.byteskwargs(opts)
973 973
974 974 def writetemp(contents):
975 975 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
976 976 f = os.fdopen(fd, pycompat.sysstr("wb"))
977 977 f.write(contents)
978 978 f.close()
979 979 return name
980 980
981 981 problems = 0
982 982
983 983 fm = ui.formatter('debuginstall', opts)
984 984 fm.startitem()
985 985
986 986 # encoding
987 987 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
988 988 err = None
989 989 try:
990 990 encoding.fromlocal("test")
991 991 except error.Abort as inst:
992 992 err = inst
993 993 problems += 1
994 994 fm.condwrite(err, 'encodingerror', _(" %s\n"
995 995 " (check that your locale is properly set)\n"), err)
996 996
997 997 # Python
998 998 fm.write('pythonexe', _("checking Python executable (%s)\n"),
999 999 pycompat.sysexecutable)
1000 1000 fm.write('pythonver', _("checking Python version (%s)\n"),
1001 1001 ("%d.%d.%d" % sys.version_info[:3]))
1002 1002 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1003 1003 os.path.dirname(pycompat.fsencode(os.__file__)))
1004 1004
1005 1005 security = set(sslutil.supportedprotocols)
1006 1006 if sslutil.hassni:
1007 1007 security.add('sni')
1008 1008
1009 1009 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1010 1010 fm.formatlist(sorted(security), name='protocol',
1011 1011 fmt='%s', sep=','))
1012 1012
1013 1013 # These are warnings, not errors. So don't increment problem count. This
1014 1014 # may change in the future.
1015 1015 if 'tls1.2' not in security:
1016 1016 fm.plain(_(' TLS 1.2 not supported by Python install; '
1017 1017 'network connections lack modern security\n'))
1018 1018 if 'sni' not in security:
1019 1019 fm.plain(_(' SNI not supported by Python install; may have '
1020 1020 'connectivity issues with some servers\n'))
1021 1021
1022 1022 # TODO print CA cert info
1023 1023
1024 1024 # hg version
1025 1025 hgver = util.version()
1026 1026 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1027 1027 hgver.split('+')[0])
1028 1028 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1029 1029 '+'.join(hgver.split('+')[1:]))
1030 1030
1031 1031 # compiled modules
1032 1032 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1033 1033 policy.policy)
1034 1034 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1035 1035 os.path.dirname(pycompat.fsencode(__file__)))
1036 1036
1037 1037 if policy.policy in ('c', 'allow'):
1038 1038 err = None
1039 1039 try:
1040 1040 from .cext import (
1041 1041 base85,
1042 1042 bdiff,
1043 1043 mpatch,
1044 1044 osutil,
1045 1045 )
1046 1046 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1047 1047 except Exception as inst:
1048 1048 err = inst
1049 1049 problems += 1
1050 1050 fm.condwrite(err, 'extensionserror', " %s\n", err)
1051 1051
1052 1052 compengines = util.compengines._engines.values()
1053 1053 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1054 1054 fm.formatlist(sorted(e.name() for e in compengines),
1055 1055 name='compengine', fmt='%s', sep=', '))
1056 1056 fm.write('compenginesavail', _('checking available compression engines '
1057 1057 '(%s)\n'),
1058 1058 fm.formatlist(sorted(e.name() for e in compengines
1059 1059 if e.available()),
1060 1060 name='compengine', fmt='%s', sep=', '))
1061 1061 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1062 1062 fm.write('compenginesserver', _('checking available compression engines '
1063 1063 'for wire protocol (%s)\n'),
1064 1064 fm.formatlist([e.name() for e in wirecompengines
1065 1065 if e.wireprotosupport()],
1066 1066 name='compengine', fmt='%s', sep=', '))
1067 1067
1068 1068 # templates
1069 1069 p = templater.templatepaths()
1070 1070 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1071 1071 fm.condwrite(not p, '', _(" no template directories found\n"))
1072 1072 if p:
1073 1073 m = templater.templatepath("map-cmdline.default")
1074 1074 if m:
1075 1075 # template found, check if it is working
1076 1076 err = None
1077 1077 try:
1078 1078 templater.templater.frommapfile(m)
1079 1079 except Exception as inst:
1080 1080 err = inst
1081 1081 p = None
1082 1082 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1083 1083 else:
1084 1084 p = None
1085 1085 fm.condwrite(p, 'defaulttemplate',
1086 1086 _("checking default template (%s)\n"), m)
1087 1087 fm.condwrite(not m, 'defaulttemplatenotfound',
1088 1088 _(" template '%s' not found\n"), "default")
1089 1089 if not p:
1090 1090 problems += 1
1091 1091 fm.condwrite(not p, '',
1092 1092 _(" (templates seem to have been installed incorrectly)\n"))
1093 1093
1094 1094 # editor
1095 1095 editor = ui.geteditor()
1096 1096 editor = util.expandpath(editor)
1097 1097 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1098 1098 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1099 1099 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1100 1100 _(" No commit editor set and can't find %s in PATH\n"
1101 1101 " (specify a commit editor in your configuration"
1102 1102 " file)\n"), not cmdpath and editor == 'vi' and editor)
1103 1103 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1104 1104 _(" Can't find editor '%s' in PATH\n"
1105 1105 " (specify a commit editor in your configuration"
1106 1106 " file)\n"), not cmdpath and editor)
1107 1107 if not cmdpath and editor != 'vi':
1108 1108 problems += 1
1109 1109
1110 1110 # check username
1111 1111 username = None
1112 1112 err = None
1113 1113 try:
1114 1114 username = ui.username()
1115 1115 except error.Abort as e:
1116 1116 err = e
1117 1117 problems += 1
1118 1118
1119 1119 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1120 1120 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1121 1121 " (specify a username in your configuration file)\n"), err)
1122 1122
1123 1123 fm.condwrite(not problems, '',
1124 1124 _("no problems detected\n"))
1125 1125 if not problems:
1126 1126 fm.data(problems=problems)
1127 1127 fm.condwrite(problems, 'problems',
1128 1128 _("%d problems detected,"
1129 1129 " please check your install!\n"), problems)
1130 1130 fm.end()
1131 1131
1132 1132 return problems
1133 1133
1134 1134 @command('debugknown', [], _('REPO ID...'), norepo=True)
1135 1135 def debugknown(ui, repopath, *ids, **opts):
1136 1136 """test whether node ids are known to a repo
1137 1137
1138 1138 Every ID must be a full-length hex node id string. Returns a list of 0s
1139 1139 and 1s indicating unknown/known.
1140 1140 """
1141 1141 opts = pycompat.byteskwargs(opts)
1142 1142 repo = hg.peer(ui, opts, repopath)
1143 1143 if not repo.capable('known'):
1144 1144 raise error.Abort("known() not supported by target repository")
1145 1145 flags = repo.known([bin(s) for s in ids])
1146 1146 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1147 1147
1148 1148 @command('debuglabelcomplete', [], _('LABEL...'))
1149 1149 def debuglabelcomplete(ui, repo, *args):
1150 1150 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1151 1151 debugnamecomplete(ui, repo, *args)
1152 1152
1153 1153 @command('debuglocks',
1154 1154 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1155 1155 ('W', 'force-wlock', None,
1156 1156 _('free the working state lock (DANGEROUS)'))],
1157 1157 _('[OPTION]...'))
1158 1158 def debuglocks(ui, repo, **opts):
1159 1159 """show or modify state of locks
1160 1160
1161 1161 By default, this command will show which locks are held. This
1162 1162 includes the user and process holding the lock, the amount of time
1163 1163 the lock has been held, and the machine name where the process is
1164 1164 running if it's not local.
1165 1165
1166 1166 Locks protect the integrity of Mercurial's data, so should be
1167 1167 treated with care. System crashes or other interruptions may cause
1168 1168 locks to not be properly released, though Mercurial will usually
1169 1169 detect and remove such stale locks automatically.
1170 1170
1171 1171 However, detecting stale locks may not always be possible (for
1172 1172 instance, on a shared filesystem). Removing locks may also be
1173 1173 blocked by filesystem permissions.
1174 1174
1175 1175 Returns 0 if no locks are held.
1176 1176
1177 1177 """
1178 1178
1179 1179 if opts.get(r'force_lock'):
1180 1180 repo.svfs.unlink('lock')
1181 1181 if opts.get(r'force_wlock'):
1182 1182 repo.vfs.unlink('wlock')
1183 1183 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1184 1184 return 0
1185 1185
1186 1186 now = time.time()
1187 1187 held = 0
1188 1188
1189 1189 def report(vfs, name, method):
1190 1190 # this causes stale locks to get reaped for more accurate reporting
1191 1191 try:
1192 1192 l = method(False)
1193 1193 except error.LockHeld:
1194 1194 l = None
1195 1195
1196 1196 if l:
1197 1197 l.release()
1198 1198 else:
1199 1199 try:
1200 1200 stat = vfs.lstat(name)
1201 1201 age = now - stat.st_mtime
1202 1202 user = util.username(stat.st_uid)
1203 1203 locker = vfs.readlock(name)
1204 1204 if ":" in locker:
1205 1205 host, pid = locker.split(':')
1206 1206 if host == socket.gethostname():
1207 1207 locker = 'user %s, process %s' % (user, pid)
1208 1208 else:
1209 1209 locker = 'user %s, process %s, host %s' \
1210 1210 % (user, pid, host)
1211 1211 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1212 1212 return 1
1213 1213 except OSError as e:
1214 1214 if e.errno != errno.ENOENT:
1215 1215 raise
1216 1216
1217 1217 ui.write(("%-6s free\n") % (name + ":"))
1218 1218 return 0
1219 1219
1220 1220 held += report(repo.svfs, "lock", repo.lock)
1221 1221 held += report(repo.vfs, "wlock", repo.wlock)
1222 1222
1223 1223 return held
1224 1224
1225 1225 @command('debugmergestate', [], '')
1226 1226 def debugmergestate(ui, repo, *args):
1227 1227 """print merge state
1228 1228
1229 1229 Use --verbose to print out information about whether v1 or v2 merge state
1230 1230 was chosen."""
1231 1231 def _hashornull(h):
1232 1232 if h == nullhex:
1233 1233 return 'null'
1234 1234 else:
1235 1235 return h
1236 1236
1237 1237 def printrecords(version):
1238 1238 ui.write(('* version %s records\n') % version)
1239 1239 if version == 1:
1240 1240 records = v1records
1241 1241 else:
1242 1242 records = v2records
1243 1243
1244 1244 for rtype, record in records:
1245 1245 # pretty print some record types
1246 1246 if rtype == 'L':
1247 1247 ui.write(('local: %s\n') % record)
1248 1248 elif rtype == 'O':
1249 1249 ui.write(('other: %s\n') % record)
1250 1250 elif rtype == 'm':
1251 1251 driver, mdstate = record.split('\0', 1)
1252 1252 ui.write(('merge driver: %s (state "%s")\n')
1253 1253 % (driver, mdstate))
1254 1254 elif rtype in 'FDC':
1255 1255 r = record.split('\0')
1256 1256 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1257 1257 if version == 1:
1258 1258 onode = 'not stored in v1 format'
1259 1259 flags = r[7]
1260 1260 else:
1261 1261 onode, flags = r[7:9]
1262 1262 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1263 1263 % (f, rtype, state, _hashornull(hash)))
1264 1264 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1265 1265 ui.write((' ancestor path: %s (node %s)\n')
1266 1266 % (afile, _hashornull(anode)))
1267 1267 ui.write((' other path: %s (node %s)\n')
1268 1268 % (ofile, _hashornull(onode)))
1269 1269 elif rtype == 'f':
1270 1270 filename, rawextras = record.split('\0', 1)
1271 1271 extras = rawextras.split('\0')
1272 1272 i = 0
1273 1273 extrastrings = []
1274 1274 while i < len(extras):
1275 1275 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1276 1276 i += 2
1277 1277
1278 1278 ui.write(('file extras: %s (%s)\n')
1279 1279 % (filename, ', '.join(extrastrings)))
1280 1280 elif rtype == 'l':
1281 1281 labels = record.split('\0', 2)
1282 1282 labels = [l for l in labels if len(l) > 0]
1283 1283 ui.write(('labels:\n'))
1284 1284 ui.write((' local: %s\n' % labels[0]))
1285 1285 ui.write((' other: %s\n' % labels[1]))
1286 1286 if len(labels) > 2:
1287 1287 ui.write((' base: %s\n' % labels[2]))
1288 1288 else:
1289 1289 ui.write(('unrecognized entry: %s\t%s\n')
1290 1290 % (rtype, record.replace('\0', '\t')))
1291 1291
1292 1292 # Avoid mergestate.read() since it may raise an exception for unsupported
1293 1293 # merge state records. We shouldn't be doing this, but this is OK since this
1294 1294 # command is pretty low-level.
1295 1295 ms = mergemod.mergestate(repo)
1296 1296
1297 1297 # sort so that reasonable information is on top
1298 1298 v1records = ms._readrecordsv1()
1299 1299 v2records = ms._readrecordsv2()
1300 1300 order = 'LOml'
1301 1301 def key(r):
1302 1302 idx = order.find(r[0])
1303 1303 if idx == -1:
1304 1304 return (1, r[1])
1305 1305 else:
1306 1306 return (0, idx)
1307 1307 v1records.sort(key=key)
1308 1308 v2records.sort(key=key)
1309 1309
1310 1310 if not v1records and not v2records:
1311 1311 ui.write(('no merge state found\n'))
1312 1312 elif not v2records:
1313 1313 ui.note(('no version 2 merge state\n'))
1314 1314 printrecords(1)
1315 1315 elif ms._v1v2match(v1records, v2records):
1316 1316 ui.note(('v1 and v2 states match: using v2\n'))
1317 1317 printrecords(2)
1318 1318 else:
1319 1319 ui.note(('v1 and v2 states mismatch: using v1\n'))
1320 1320 printrecords(1)
1321 1321 if ui.verbose:
1322 1322 printrecords(2)
1323 1323
1324 1324 @command('debugnamecomplete', [], _('NAME...'))
1325 1325 def debugnamecomplete(ui, repo, *args):
1326 1326 '''complete "names" - tags, open branch names, bookmark names'''
1327 1327
1328 1328 names = set()
1329 1329 # since we previously only listed open branches, we will handle that
1330 1330 # specially (after this for loop)
1331 1331 for name, ns in repo.names.iteritems():
1332 1332 if name != 'branches':
1333 1333 names.update(ns.listnames(repo))
1334 1334 names.update(tag for (tag, heads, tip, closed)
1335 1335 in repo.branchmap().iterbranches() if not closed)
1336 1336 completions = set()
1337 1337 if not args:
1338 1338 args = ['']
1339 1339 for a in args:
1340 1340 completions.update(n for n in names if n.startswith(a))
1341 1341 ui.write('\n'.join(sorted(completions)))
1342 1342 ui.write('\n')
1343 1343
1344 1344 @command('debugobsolete',
1345 1345 [('', 'flags', 0, _('markers flag')),
1346 1346 ('', 'record-parents', False,
1347 1347 _('record parent information for the precursor')),
1348 1348 ('r', 'rev', [], _('display markers relevant to REV')),
1349 1349 ('', 'exclusive', False, _('restrict display to markers only '
1350 1350 'relevant to REV')),
1351 1351 ('', 'index', False, _('display index of the marker')),
1352 1352 ('', 'delete', [], _('delete markers specified by indices')),
1353 1353 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1354 1354 _('[OBSOLETED [REPLACEMENT ...]]'))
1355 1355 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1356 1356 """create arbitrary obsolete marker
1357 1357
1358 1358 With no arguments, displays the list of obsolescence markers."""
1359 1359
1360 1360 opts = pycompat.byteskwargs(opts)
1361 1361
1362 1362 def parsenodeid(s):
1363 1363 try:
1364 1364 # We do not use revsingle/revrange functions here to accept
1365 1365 # arbitrary node identifiers, possibly not present in the
1366 1366 # local repository.
1367 1367 n = bin(s)
1368 1368 if len(n) != len(nullid):
1369 1369 raise TypeError()
1370 1370 return n
1371 1371 except TypeError:
1372 1372 raise error.Abort('changeset references must be full hexadecimal '
1373 1373 'node identifiers')
1374 1374
1375 1375 if opts.get('delete'):
1376 1376 indices = []
1377 1377 for v in opts.get('delete'):
1378 1378 try:
1379 1379 indices.append(int(v))
1380 1380 except ValueError:
1381 1381 raise error.Abort(_('invalid index value: %r') % v,
1382 1382 hint=_('use integers for indices'))
1383 1383
1384 1384 if repo.currenttransaction():
1385 1385 raise error.Abort(_('cannot delete obsmarkers in the middle '
1386 1386 'of transaction.'))
1387 1387
1388 1388 with repo.lock():
1389 1389 n = repair.deleteobsmarkers(repo.obsstore, indices)
1390 1390 ui.write(_('deleted %i obsolescence markers\n') % n)
1391 1391
1392 1392 return
1393 1393
1394 1394 if precursor is not None:
1395 1395 if opts['rev']:
1396 1396 raise error.Abort('cannot select revision when creating marker')
1397 1397 metadata = {}
1398 1398 metadata['user'] = opts['user'] or ui.username()
1399 1399 succs = tuple(parsenodeid(succ) for succ in successors)
1400 1400 l = repo.lock()
1401 1401 try:
1402 1402 tr = repo.transaction('debugobsolete')
1403 1403 try:
1404 1404 date = opts.get('date')
1405 1405 if date:
1406 1406 date = util.parsedate(date)
1407 1407 else:
1408 1408 date = None
1409 1409 prec = parsenodeid(precursor)
1410 1410 parents = None
1411 1411 if opts['record_parents']:
1412 1412 if prec not in repo.unfiltered():
1413 1413 raise error.Abort('cannot used --record-parents on '
1414 1414 'unknown changesets')
1415 1415 parents = repo.unfiltered()[prec].parents()
1416 1416 parents = tuple(p.node() for p in parents)
1417 1417 repo.obsstore.create(tr, prec, succs, opts['flags'],
1418 1418 parents=parents, date=date,
1419 1419 metadata=metadata, ui=ui)
1420 1420 tr.close()
1421 1421 except ValueError as exc:
1422 1422 raise error.Abort(_('bad obsmarker input: %s') % exc)
1423 1423 finally:
1424 1424 tr.release()
1425 1425 finally:
1426 1426 l.release()
1427 1427 else:
1428 1428 if opts['rev']:
1429 1429 revs = scmutil.revrange(repo, opts['rev'])
1430 1430 nodes = [repo[r].node() for r in revs]
1431 1431 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1432 1432 exclusive=opts['exclusive']))
1433 1433 markers.sort(key=lambda x: x._data)
1434 1434 else:
1435 1435 markers = obsolete.getmarkers(repo)
1436 1436
1437 1437 markerstoiter = markers
1438 1438 isrelevant = lambda m: True
1439 1439 if opts.get('rev') and opts.get('index'):
1440 1440 markerstoiter = obsolete.getmarkers(repo)
1441 1441 markerset = set(markers)
1442 1442 isrelevant = lambda m: m in markerset
1443 1443
1444 1444 fm = ui.formatter('debugobsolete', opts)
1445 1445 for i, m in enumerate(markerstoiter):
1446 1446 if not isrelevant(m):
1447 1447 # marker can be irrelevant when we're iterating over a set
1448 1448 # of markers (markerstoiter) which is bigger than the set
1449 1449 # of markers we want to display (markers)
1450 1450 # this can happen if both --index and --rev options are
1451 1451 # provided and thus we need to iterate over all of the markers
1452 1452 # to get the correct indices, but only display the ones that
1453 1453 # are relevant to --rev value
1454 1454 continue
1455 1455 fm.startitem()
1456 1456 ind = i if opts.get('index') else None
1457 1457 cmdutil.showmarker(fm, m, index=ind)
1458 1458 fm.end()
1459 1459
1460 1460 @command('debugpathcomplete',
1461 1461 [('f', 'full', None, _('complete an entire path')),
1462 1462 ('n', 'normal', None, _('show only normal files')),
1463 1463 ('a', 'added', None, _('show only added files')),
1464 1464 ('r', 'removed', None, _('show only removed files'))],
1465 1465 _('FILESPEC...'))
1466 1466 def debugpathcomplete(ui, repo, *specs, **opts):
1467 1467 '''complete part or all of a tracked path
1468 1468
1469 1469 This command supports shells that offer path name completion. It
1470 1470 currently completes only files already known to the dirstate.
1471 1471
1472 1472 Completion extends only to the next path segment unless
1473 1473 --full is specified, in which case entire paths are used.'''
1474 1474
1475 1475 def complete(path, acceptable):
1476 1476 dirstate = repo.dirstate
1477 1477 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1478 1478 rootdir = repo.root + pycompat.ossep
1479 1479 if spec != repo.root and not spec.startswith(rootdir):
1480 1480 return [], []
1481 1481 if os.path.isdir(spec):
1482 1482 spec += '/'
1483 1483 spec = spec[len(rootdir):]
1484 1484 fixpaths = pycompat.ossep != '/'
1485 1485 if fixpaths:
1486 1486 spec = spec.replace(pycompat.ossep, '/')
1487 1487 speclen = len(spec)
1488 1488 fullpaths = opts[r'full']
1489 1489 files, dirs = set(), set()
1490 1490 adddir, addfile = dirs.add, files.add
1491 1491 for f, st in dirstate.iteritems():
1492 1492 if f.startswith(spec) and st[0] in acceptable:
1493 1493 if fixpaths:
1494 1494 f = f.replace('/', pycompat.ossep)
1495 1495 if fullpaths:
1496 1496 addfile(f)
1497 1497 continue
1498 1498 s = f.find(pycompat.ossep, speclen)
1499 1499 if s >= 0:
1500 1500 adddir(f[:s])
1501 1501 else:
1502 1502 addfile(f)
1503 1503 return files, dirs
1504 1504
1505 1505 acceptable = ''
1506 1506 if opts[r'normal']:
1507 1507 acceptable += 'nm'
1508 1508 if opts[r'added']:
1509 1509 acceptable += 'a'
1510 1510 if opts[r'removed']:
1511 1511 acceptable += 'r'
1512 1512 cwd = repo.getcwd()
1513 1513 if not specs:
1514 1514 specs = ['.']
1515 1515
1516 1516 files, dirs = set(), set()
1517 1517 for spec in specs:
1518 1518 f, d = complete(spec, acceptable or 'nmar')
1519 1519 files.update(f)
1520 1520 dirs.update(d)
1521 1521 files.update(dirs)
1522 1522 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1523 1523 ui.write('\n')
1524 1524
1525 1525 @command('debugpickmergetool',
1526 1526 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1527 1527 ('', 'changedelete', None, _('emulate merging change and delete')),
1528 1528 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1529 1529 _('[PATTERN]...'),
1530 1530 inferrepo=True)
1531 1531 def debugpickmergetool(ui, repo, *pats, **opts):
1532 1532 """examine which merge tool is chosen for specified file
1533 1533
1534 1534 As described in :hg:`help merge-tools`, Mercurial examines
1535 1535 configurations below in this order to decide which merge tool is
1536 1536 chosen for specified file.
1537 1537
1538 1538 1. ``--tool`` option
1539 1539 2. ``HGMERGE`` environment variable
1540 1540 3. configurations in ``merge-patterns`` section
1541 1541 4. configuration of ``ui.merge``
1542 1542 5. configurations in ``merge-tools`` section
1543 1543 6. ``hgmerge`` tool (for historical reason only)
1544 1544 7. default tool for fallback (``:merge`` or ``:prompt``)
1545 1545
1546 1546 This command writes out examination result in the style below::
1547 1547
1548 1548 FILE = MERGETOOL
1549 1549
1550 1550 By default, all files known in the first parent context of the
1551 1551 working directory are examined. Use file patterns and/or -I/-X
1552 1552 options to limit target files. -r/--rev is also useful to examine
1553 1553 files in another context without actual updating to it.
1554 1554
1555 1555 With --debug, this command shows warning messages while matching
1556 1556 against ``merge-patterns`` and so on, too. It is recommended to
1557 1557 use this option with explicit file patterns and/or -I/-X options,
1558 1558 because this option increases amount of output per file according
1559 1559 to configurations in hgrc.
1560 1560
1561 1561 With -v/--verbose, this command shows configurations below at
1562 1562 first (only if specified).
1563 1563
1564 1564 - ``--tool`` option
1565 1565 - ``HGMERGE`` environment variable
1566 1566 - configuration of ``ui.merge``
1567 1567
1568 1568 If merge tool is chosen before matching against
1569 1569 ``merge-patterns``, this command can't show any helpful
1570 1570 information, even with --debug. In such case, information above is
1571 1571 useful to know why a merge tool is chosen.
1572 1572 """
1573 1573 opts = pycompat.byteskwargs(opts)
1574 1574 overrides = {}
1575 1575 if opts['tool']:
1576 1576 overrides[('ui', 'forcemerge')] = opts['tool']
1577 1577 ui.note(('with --tool %r\n') % (opts['tool']))
1578 1578
1579 1579 with ui.configoverride(overrides, 'debugmergepatterns'):
1580 1580 hgmerge = encoding.environ.get("HGMERGE")
1581 1581 if hgmerge is not None:
1582 1582 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1583 1583 uimerge = ui.config("ui", "merge")
1584 1584 if uimerge:
1585 1585 ui.note(('with ui.merge=%r\n') % (uimerge))
1586 1586
1587 1587 ctx = scmutil.revsingle(repo, opts.get('rev'))
1588 1588 m = scmutil.match(ctx, pats, opts)
1589 1589 changedelete = opts['changedelete']
1590 1590 for path in ctx.walk(m):
1591 1591 fctx = ctx[path]
1592 1592 try:
1593 1593 if not ui.debugflag:
1594 1594 ui.pushbuffer(error=True)
1595 1595 tool, toolpath = filemerge._picktool(repo, ui, path,
1596 1596 fctx.isbinary(),
1597 1597 'l' in fctx.flags(),
1598 1598 changedelete)
1599 1599 finally:
1600 1600 if not ui.debugflag:
1601 1601 ui.popbuffer()
1602 1602 ui.write(('%s = %s\n') % (path, tool))
1603 1603
1604 1604 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1605 1605 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1606 1606 '''access the pushkey key/value protocol
1607 1607
1608 1608 With two args, list the keys in the given namespace.
1609 1609
1610 1610 With five args, set a key to new if it currently is set to old.
1611 1611 Reports success or failure.
1612 1612 '''
1613 1613
1614 1614 target = hg.peer(ui, {}, repopath)
1615 1615 if keyinfo:
1616 1616 key, old, new = keyinfo
1617 1617 r = target.pushkey(namespace, key, old, new)
1618 1618 ui.status(str(r) + '\n')
1619 1619 return not r
1620 1620 else:
1621 1621 for k, v in sorted(target.listkeys(namespace).iteritems()):
1622 1622 ui.write("%s\t%s\n" % (util.escapestr(k),
1623 1623 util.escapestr(v)))
1624 1624
1625 1625 @command('debugpvec', [], _('A B'))
1626 1626 def debugpvec(ui, repo, a, b=None):
1627 1627 ca = scmutil.revsingle(repo, a)
1628 1628 cb = scmutil.revsingle(repo, b)
1629 1629 pa = pvec.ctxpvec(ca)
1630 1630 pb = pvec.ctxpvec(cb)
1631 1631 if pa == pb:
1632 1632 rel = "="
1633 1633 elif pa > pb:
1634 1634 rel = ">"
1635 1635 elif pa < pb:
1636 1636 rel = "<"
1637 1637 elif pa | pb:
1638 1638 rel = "|"
1639 1639 ui.write(_("a: %s\n") % pa)
1640 1640 ui.write(_("b: %s\n") % pb)
1641 1641 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1642 1642 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1643 1643 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1644 1644 pa.distance(pb), rel))
1645 1645
1646 1646 @command('debugrebuilddirstate|debugrebuildstate',
1647 1647 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1648 1648 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1649 1649 'the working copy parent')),
1650 1650 ],
1651 1651 _('[-r REV]'))
1652 1652 def debugrebuilddirstate(ui, repo, rev, **opts):
1653 1653 """rebuild the dirstate as it would look like for the given revision
1654 1654
1655 1655 If no revision is specified the first current parent will be used.
1656 1656
1657 1657 The dirstate will be set to the files of the given revision.
1658 1658 The actual working directory content or existing dirstate
1659 1659 information such as adds or removes is not considered.
1660 1660
1661 1661 ``minimal`` will only rebuild the dirstate status for files that claim to be
1662 1662 tracked but are not in the parent manifest, or that exist in the parent
1663 1663 manifest but are not in the dirstate. It will not change adds, removes, or
1664 1664 modified files that are in the working copy parent.
1665 1665
1666 1666 One use of this command is to make the next :hg:`status` invocation
1667 1667 check the actual file content.
1668 1668 """
1669 1669 ctx = scmutil.revsingle(repo, rev)
1670 1670 with repo.wlock():
1671 1671 dirstate = repo.dirstate
1672 1672 changedfiles = None
1673 1673 # See command doc for what minimal does.
1674 1674 if opts.get(r'minimal'):
1675 1675 manifestfiles = set(ctx.manifest().keys())
1676 1676 dirstatefiles = set(dirstate)
1677 1677 manifestonly = manifestfiles - dirstatefiles
1678 1678 dsonly = dirstatefiles - manifestfiles
1679 1679 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1680 1680 changedfiles = manifestonly | dsnotadded
1681 1681
1682 1682 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1683 1683
1684 1684 @command('debugrebuildfncache', [], '')
1685 1685 def debugrebuildfncache(ui, repo):
1686 1686 """rebuild the fncache file"""
1687 1687 repair.rebuildfncache(ui, repo)
1688 1688
1689 1689 @command('debugrename',
1690 1690 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1691 1691 _('[-r REV] FILE'))
1692 1692 def debugrename(ui, repo, file1, *pats, **opts):
1693 1693 """dump rename information"""
1694 1694
1695 1695 opts = pycompat.byteskwargs(opts)
1696 1696 ctx = scmutil.revsingle(repo, opts.get('rev'))
1697 1697 m = scmutil.match(ctx, (file1,) + pats, opts)
1698 1698 for abs in ctx.walk(m):
1699 1699 fctx = ctx[abs]
1700 1700 o = fctx.filelog().renamed(fctx.filenode())
1701 1701 rel = m.rel(abs)
1702 1702 if o:
1703 1703 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1704 1704 else:
1705 1705 ui.write(_("%s not renamed\n") % rel)
1706 1706
1707 1707 @command('debugrevlog', cmdutil.debugrevlogopts +
1708 1708 [('d', 'dump', False, _('dump index data'))],
1709 1709 _('-c|-m|FILE'),
1710 1710 optionalrepo=True)
1711 1711 def debugrevlog(ui, repo, file_=None, **opts):
1712 1712 """show data and statistics about a revlog"""
1713 1713 opts = pycompat.byteskwargs(opts)
1714 1714 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1715 1715
1716 1716 if opts.get("dump"):
1717 1717 numrevs = len(r)
1718 1718 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1719 1719 " rawsize totalsize compression heads chainlen\n"))
1720 1720 ts = 0
1721 1721 heads = set()
1722 1722
1723 1723 for rev in xrange(numrevs):
1724 1724 dbase = r.deltaparent(rev)
1725 1725 if dbase == -1:
1726 1726 dbase = rev
1727 1727 cbase = r.chainbase(rev)
1728 1728 clen = r.chainlen(rev)
1729 1729 p1, p2 = r.parentrevs(rev)
1730 1730 rs = r.rawsize(rev)
1731 1731 ts = ts + rs
1732 1732 heads -= set(r.parentrevs(rev))
1733 1733 heads.add(rev)
1734 1734 try:
1735 1735 compression = ts / r.end(rev)
1736 1736 except ZeroDivisionError:
1737 1737 compression = 0
1738 1738 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1739 1739 "%11d %5d %8d\n" %
1740 1740 (rev, p1, p2, r.start(rev), r.end(rev),
1741 1741 r.start(dbase), r.start(cbase),
1742 1742 r.start(p1), r.start(p2),
1743 1743 rs, ts, compression, len(heads), clen))
1744 1744 return 0
1745 1745
1746 1746 v = r.version
1747 1747 format = v & 0xFFFF
1748 1748 flags = []
1749 1749 gdelta = False
1750 1750 if v & revlog.FLAG_INLINE_DATA:
1751 1751 flags.append('inline')
1752 1752 if v & revlog.FLAG_GENERALDELTA:
1753 1753 gdelta = True
1754 1754 flags.append('generaldelta')
1755 1755 if not flags:
1756 1756 flags = ['(none)']
1757 1757
1758 1758 nummerges = 0
1759 1759 numfull = 0
1760 1760 numprev = 0
1761 1761 nump1 = 0
1762 1762 nump2 = 0
1763 1763 numother = 0
1764 1764 nump1prev = 0
1765 1765 nump2prev = 0
1766 1766 chainlengths = []
1767 1767 chainbases = []
1768 1768 chainspans = []
1769 1769
1770 1770 datasize = [None, 0, 0]
1771 1771 fullsize = [None, 0, 0]
1772 1772 deltasize = [None, 0, 0]
1773 1773 chunktypecounts = {}
1774 1774 chunktypesizes = {}
1775 1775
1776 1776 def addsize(size, l):
1777 1777 if l[0] is None or size < l[0]:
1778 1778 l[0] = size
1779 1779 if size > l[1]:
1780 1780 l[1] = size
1781 1781 l[2] += size
1782 1782
1783 1783 numrevs = len(r)
1784 1784 for rev in xrange(numrevs):
1785 1785 p1, p2 = r.parentrevs(rev)
1786 1786 delta = r.deltaparent(rev)
1787 1787 if format > 0:
1788 1788 addsize(r.rawsize(rev), datasize)
1789 1789 if p2 != nullrev:
1790 1790 nummerges += 1
1791 1791 size = r.length(rev)
1792 1792 if delta == nullrev:
1793 1793 chainlengths.append(0)
1794 1794 chainbases.append(r.start(rev))
1795 1795 chainspans.append(size)
1796 1796 numfull += 1
1797 1797 addsize(size, fullsize)
1798 1798 else:
1799 1799 chainlengths.append(chainlengths[delta] + 1)
1800 1800 baseaddr = chainbases[delta]
1801 1801 revaddr = r.start(rev)
1802 1802 chainbases.append(baseaddr)
1803 1803 chainspans.append((revaddr - baseaddr) + size)
1804 1804 addsize(size, deltasize)
1805 1805 if delta == rev - 1:
1806 1806 numprev += 1
1807 1807 if delta == p1:
1808 1808 nump1prev += 1
1809 1809 elif delta == p2:
1810 1810 nump2prev += 1
1811 1811 elif delta == p1:
1812 1812 nump1 += 1
1813 1813 elif delta == p2:
1814 1814 nump2 += 1
1815 1815 elif delta != nullrev:
1816 1816 numother += 1
1817 1817
1818 1818 # Obtain data on the raw chunks in the revlog.
1819 1819 segment = r._getsegmentforrevs(rev, rev)[1]
1820 1820 if segment:
1821 chunktype = segment[0]
1821 chunktype = bytes(segment[0:1])
1822 1822 else:
1823 1823 chunktype = 'empty'
1824 1824
1825 1825 if chunktype not in chunktypecounts:
1826 1826 chunktypecounts[chunktype] = 0
1827 1827 chunktypesizes[chunktype] = 0
1828 1828
1829 1829 chunktypecounts[chunktype] += 1
1830 1830 chunktypesizes[chunktype] += size
1831 1831
1832 1832 # Adjust size min value for empty cases
1833 1833 for size in (datasize, fullsize, deltasize):
1834 1834 if size[0] is None:
1835 1835 size[0] = 0
1836 1836
1837 1837 numdeltas = numrevs - numfull
1838 1838 numoprev = numprev - nump1prev - nump2prev
1839 1839 totalrawsize = datasize[2]
1840 1840 datasize[2] /= numrevs
1841 1841 fulltotal = fullsize[2]
1842 1842 fullsize[2] /= numfull
1843 1843 deltatotal = deltasize[2]
1844 1844 if numrevs - numfull > 0:
1845 1845 deltasize[2] /= numrevs - numfull
1846 1846 totalsize = fulltotal + deltatotal
1847 1847 avgchainlen = sum(chainlengths) / numrevs
1848 1848 maxchainlen = max(chainlengths)
1849 1849 maxchainspan = max(chainspans)
1850 1850 compratio = 1
1851 1851 if totalsize:
1852 1852 compratio = totalrawsize / totalsize
1853 1853
1854 1854 basedfmtstr = '%%%dd\n'
1855 1855 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1856 1856
1857 1857 def dfmtstr(max):
1858 1858 return basedfmtstr % len(str(max))
1859 1859 def pcfmtstr(max, padding=0):
1860 1860 return basepcfmtstr % (len(str(max)), ' ' * padding)
1861 1861
1862 1862 def pcfmt(value, total):
1863 1863 if total:
1864 1864 return (value, 100 * float(value) / total)
1865 1865 else:
1866 1866 return value, 100.0
1867 1867
1868 1868 ui.write(('format : %d\n') % format)
1869 1869 ui.write(('flags : %s\n') % ', '.join(flags))
1870 1870
1871 1871 ui.write('\n')
1872 1872 fmt = pcfmtstr(totalsize)
1873 1873 fmt2 = dfmtstr(totalsize)
1874 1874 ui.write(('revisions : ') + fmt2 % numrevs)
1875 1875 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1876 1876 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1877 1877 ui.write(('revisions : ') + fmt2 % numrevs)
1878 1878 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1879 1879 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1880 1880 ui.write(('revision size : ') + fmt2 % totalsize)
1881 1881 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1882 1882 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1883 1883
1884 1884 def fmtchunktype(chunktype):
1885 1885 if chunktype == 'empty':
1886 1886 return ' %s : ' % chunktype
1887 1887 elif chunktype in string.ascii_letters:
1888 1888 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1889 1889 else:
1890 1890 return ' 0x%s : ' % hex(chunktype)
1891 1891
1892 1892 ui.write('\n')
1893 1893 ui.write(('chunks : ') + fmt2 % numrevs)
1894 1894 for chunktype in sorted(chunktypecounts):
1895 1895 ui.write(fmtchunktype(chunktype))
1896 1896 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1897 1897 ui.write(('chunks size : ') + fmt2 % totalsize)
1898 1898 for chunktype in sorted(chunktypecounts):
1899 1899 ui.write(fmtchunktype(chunktype))
1900 1900 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1901 1901
1902 1902 ui.write('\n')
1903 1903 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1904 1904 ui.write(('avg chain length : ') + fmt % avgchainlen)
1905 1905 ui.write(('max chain length : ') + fmt % maxchainlen)
1906 1906 ui.write(('max chain reach : ') + fmt % maxchainspan)
1907 1907 ui.write(('compression ratio : ') + fmt % compratio)
1908 1908
1909 1909 if format > 0:
1910 1910 ui.write('\n')
1911 1911 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1912 1912 % tuple(datasize))
1913 1913 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1914 1914 % tuple(fullsize))
1915 1915 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1916 1916 % tuple(deltasize))
1917 1917
1918 1918 if numdeltas > 0:
1919 1919 ui.write('\n')
1920 1920 fmt = pcfmtstr(numdeltas)
1921 1921 fmt2 = pcfmtstr(numdeltas, 4)
1922 1922 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1923 1923 if numprev > 0:
1924 1924 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1925 1925 numprev))
1926 1926 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1927 1927 numprev))
1928 1928 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1929 1929 numprev))
1930 1930 if gdelta:
1931 1931 ui.write(('deltas against p1 : ')
1932 1932 + fmt % pcfmt(nump1, numdeltas))
1933 1933 ui.write(('deltas against p2 : ')
1934 1934 + fmt % pcfmt(nump2, numdeltas))
1935 1935 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1936 1936 numdeltas))
1937 1937
1938 1938 @command('debugrevspec',
1939 1939 [('', 'optimize', None,
1940 1940 _('print parsed tree after optimizing (DEPRECATED)')),
1941 1941 ('', 'show-revs', True, _('print list of result revisions (default)')),
1942 1942 ('s', 'show-set', None, _('print internal representation of result set')),
1943 1943 ('p', 'show-stage', [],
1944 1944 _('print parsed tree at the given stage'), _('NAME')),
1945 1945 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1946 1946 ('', 'verify-optimized', False, _('verify optimized result')),
1947 1947 ],
1948 1948 ('REVSPEC'))
1949 1949 def debugrevspec(ui, repo, expr, **opts):
1950 1950 """parse and apply a revision specification
1951 1951
1952 1952 Use -p/--show-stage option to print the parsed tree at the given stages.
1953 1953 Use -p all to print tree at every stage.
1954 1954
1955 1955 Use --no-show-revs option with -s or -p to print only the set
1956 1956 representation or the parsed tree respectively.
1957 1957
1958 1958 Use --verify-optimized to compare the optimized result with the unoptimized
1959 1959 one. Returns 1 if the optimized result differs.
1960 1960 """
1961 1961 opts = pycompat.byteskwargs(opts)
1962 1962 stages = [
1963 1963 ('parsed', lambda tree: tree),
1964 1964 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1965 1965 ('concatenated', revsetlang.foldconcat),
1966 1966 ('analyzed', revsetlang.analyze),
1967 1967 ('optimized', revsetlang.optimize),
1968 1968 ]
1969 1969 if opts['no_optimized']:
1970 1970 stages = stages[:-1]
1971 1971 if opts['verify_optimized'] and opts['no_optimized']:
1972 1972 raise error.Abort(_('cannot use --verify-optimized with '
1973 1973 '--no-optimized'))
1974 1974 stagenames = set(n for n, f in stages)
1975 1975
1976 1976 showalways = set()
1977 1977 showchanged = set()
1978 1978 if ui.verbose and not opts['show_stage']:
1979 1979 # show parsed tree by --verbose (deprecated)
1980 1980 showalways.add('parsed')
1981 1981 showchanged.update(['expanded', 'concatenated'])
1982 1982 if opts['optimize']:
1983 1983 showalways.add('optimized')
1984 1984 if opts['show_stage'] and opts['optimize']:
1985 1985 raise error.Abort(_('cannot use --optimize with --show-stage'))
1986 1986 if opts['show_stage'] == ['all']:
1987 1987 showalways.update(stagenames)
1988 1988 else:
1989 1989 for n in opts['show_stage']:
1990 1990 if n not in stagenames:
1991 1991 raise error.Abort(_('invalid stage name: %s') % n)
1992 1992 showalways.update(opts['show_stage'])
1993 1993
1994 1994 treebystage = {}
1995 1995 printedtree = None
1996 1996 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1997 1997 for n, f in stages:
1998 1998 treebystage[n] = tree = f(tree)
1999 1999 if n in showalways or (n in showchanged and tree != printedtree):
2000 2000 if opts['show_stage'] or n != 'parsed':
2001 2001 ui.write(("* %s:\n") % n)
2002 2002 ui.write(revsetlang.prettyformat(tree), "\n")
2003 2003 printedtree = tree
2004 2004
2005 2005 if opts['verify_optimized']:
2006 2006 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2007 2007 brevs = revset.makematcher(treebystage['optimized'])(repo)
2008 2008 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2009 2009 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2010 2010 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2011 2011 arevs = list(arevs)
2012 2012 brevs = list(brevs)
2013 2013 if arevs == brevs:
2014 2014 return 0
2015 2015 ui.write(('--- analyzed\n'), label='diff.file_a')
2016 2016 ui.write(('+++ optimized\n'), label='diff.file_b')
2017 2017 sm = difflib.SequenceMatcher(None, arevs, brevs)
2018 2018 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2019 2019 if tag in ('delete', 'replace'):
2020 2020 for c in arevs[alo:ahi]:
2021 2021 ui.write('-%s\n' % c, label='diff.deleted')
2022 2022 if tag in ('insert', 'replace'):
2023 2023 for c in brevs[blo:bhi]:
2024 2024 ui.write('+%s\n' % c, label='diff.inserted')
2025 2025 if tag == 'equal':
2026 2026 for c in arevs[alo:ahi]:
2027 2027 ui.write(' %s\n' % c)
2028 2028 return 1
2029 2029
2030 2030 func = revset.makematcher(tree)
2031 2031 revs = func(repo)
2032 2032 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2033 2033 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2034 2034 if not opts['show_revs']:
2035 2035 return
2036 2036 for c in revs:
2037 2037 ui.write("%s\n" % c)
2038 2038
2039 2039 @command('debugsetparents', [], _('REV1 [REV2]'))
2040 2040 def debugsetparents(ui, repo, rev1, rev2=None):
2041 2041 """manually set the parents of the current working directory
2042 2042
2043 2043 This is useful for writing repository conversion tools, but should
2044 2044 be used with care. For example, neither the working directory nor the
2045 2045 dirstate is updated, so file status may be incorrect after running this
2046 2046 command.
2047 2047
2048 2048 Returns 0 on success.
2049 2049 """
2050 2050
2051 2051 r1 = scmutil.revsingle(repo, rev1).node()
2052 2052 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2053 2053
2054 2054 with repo.wlock():
2055 2055 repo.setparents(r1, r2)
2056 2056
2057 2057 @command('debugsub',
2058 2058 [('r', 'rev', '',
2059 2059 _('revision to check'), _('REV'))],
2060 2060 _('[-r REV] [REV]'))
2061 2061 def debugsub(ui, repo, rev=None):
2062 2062 ctx = scmutil.revsingle(repo, rev, None)
2063 2063 for k, v in sorted(ctx.substate.items()):
2064 2064 ui.write(('path %s\n') % k)
2065 2065 ui.write((' source %s\n') % v[0])
2066 2066 ui.write((' revision %s\n') % v[1])
2067 2067
2068 2068 @command('debugsuccessorssets',
2069 2069 [],
2070 2070 _('[REV]'))
2071 2071 def debugsuccessorssets(ui, repo, *revs):
2072 2072 """show set of successors for revision
2073 2073
2074 2074 A successors set of changeset A is a consistent group of revisions that
2075 2075 succeed A. It contains non-obsolete changesets only.
2076 2076
2077 2077 In most cases a changeset A has a single successors set containing a single
2078 2078 successor (changeset A replaced by A').
2079 2079
2080 2080 A changeset that is made obsolete with no successors are called "pruned".
2081 2081 Such changesets have no successors sets at all.
2082 2082
2083 2083 A changeset that has been "split" will have a successors set containing
2084 2084 more than one successor.
2085 2085
2086 2086 A changeset that has been rewritten in multiple different ways is called
2087 2087 "divergent". Such changesets have multiple successor sets (each of which
2088 2088 may also be split, i.e. have multiple successors).
2089 2089
2090 2090 Results are displayed as follows::
2091 2091
2092 2092 <rev1>
2093 2093 <successors-1A>
2094 2094 <rev2>
2095 2095 <successors-2A>
2096 2096 <successors-2B1> <successors-2B2> <successors-2B3>
2097 2097
2098 2098 Here rev2 has two possible (i.e. divergent) successors sets. The first
2099 2099 holds one element, whereas the second holds three (i.e. the changeset has
2100 2100 been split).
2101 2101 """
2102 2102 # passed to successorssets caching computation from one call to another
2103 2103 cache = {}
2104 2104 ctx2str = str
2105 2105 node2str = short
2106 2106 if ui.debug():
2107 2107 def ctx2str(ctx):
2108 2108 return ctx.hex()
2109 2109 node2str = hex
2110 2110 for rev in scmutil.revrange(repo, revs):
2111 2111 ctx = repo[rev]
2112 2112 ui.write('%s\n'% ctx2str(ctx))
2113 2113 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2114 2114 if succsset:
2115 2115 ui.write(' ')
2116 2116 ui.write(node2str(succsset[0]))
2117 2117 for node in succsset[1:]:
2118 2118 ui.write(' ')
2119 2119 ui.write(node2str(node))
2120 2120 ui.write('\n')
2121 2121
2122 2122 @command('debugtemplate',
2123 2123 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2124 2124 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2125 2125 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2126 2126 optionalrepo=True)
2127 2127 def debugtemplate(ui, repo, tmpl, **opts):
2128 2128 """parse and apply a template
2129 2129
2130 2130 If -r/--rev is given, the template is processed as a log template and
2131 2131 applied to the given changesets. Otherwise, it is processed as a generic
2132 2132 template.
2133 2133
2134 2134 Use --verbose to print the parsed tree.
2135 2135 """
2136 2136 revs = None
2137 2137 if opts[r'rev']:
2138 2138 if repo is None:
2139 2139 raise error.RepoError(_('there is no Mercurial repository here '
2140 2140 '(.hg not found)'))
2141 2141 revs = scmutil.revrange(repo, opts[r'rev'])
2142 2142
2143 2143 props = {}
2144 2144 for d in opts[r'define']:
2145 2145 try:
2146 2146 k, v = (e.strip() for e in d.split('=', 1))
2147 2147 if not k or k == 'ui':
2148 2148 raise ValueError
2149 2149 props[k] = v
2150 2150 except ValueError:
2151 2151 raise error.Abort(_('malformed keyword definition: %s') % d)
2152 2152
2153 2153 if ui.verbose:
2154 2154 aliases = ui.configitems('templatealias')
2155 2155 tree = templater.parse(tmpl)
2156 2156 ui.note(templater.prettyformat(tree), '\n')
2157 2157 newtree = templater.expandaliases(tree, aliases)
2158 2158 if newtree != tree:
2159 2159 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2160 2160
2161 2161 if revs is None:
2162 2162 t = formatter.maketemplater(ui, tmpl)
2163 2163 props['ui'] = ui
2164 2164 ui.write(t.render(props))
2165 2165 else:
2166 2166 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2167 2167 for r in revs:
2168 2168 displayer.show(repo[r], **pycompat.strkwargs(props))
2169 2169 displayer.close()
2170 2170
2171 2171 @command('debugupdatecaches', [])
2172 2172 def debugupdatecaches(ui, repo, *pats, **opts):
2173 2173 """warm all known caches in the repository"""
2174 2174 with repo.wlock():
2175 2175 with repo.lock():
2176 2176 repo.updatecaches()
2177 2177
2178 2178 @command('debugupgraderepo', [
2179 2179 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2180 2180 ('', 'run', False, _('performs an upgrade')),
2181 2181 ])
2182 2182 def debugupgraderepo(ui, repo, run=False, optimize=None):
2183 2183 """upgrade a repository to use different features
2184 2184
2185 2185 If no arguments are specified, the repository is evaluated for upgrade
2186 2186 and a list of problems and potential optimizations is printed.
2187 2187
2188 2188 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2189 2189 can be influenced via additional arguments. More details will be provided
2190 2190 by the command output when run without ``--run``.
2191 2191
2192 2192 During the upgrade, the repository will be locked and no writes will be
2193 2193 allowed.
2194 2194
2195 2195 At the end of the upgrade, the repository may not be readable while new
2196 2196 repository data is swapped in. This window will be as long as it takes to
2197 2197 rename some directories inside the ``.hg`` directory. On most machines, this
2198 2198 should complete almost instantaneously and the chances of a consumer being
2199 2199 unable to access the repository should be low.
2200 2200 """
2201 2201 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2202 2202
2203 2203 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2204 2204 inferrepo=True)
2205 2205 def debugwalk(ui, repo, *pats, **opts):
2206 2206 """show how files match on given patterns"""
2207 2207 opts = pycompat.byteskwargs(opts)
2208 2208 m = scmutil.match(repo[None], pats, opts)
2209 2209 ui.write(('matcher: %r\n' % m))
2210 2210 items = list(repo[None].walk(m))
2211 2211 if not items:
2212 2212 return
2213 2213 f = lambda fn: fn
2214 2214 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2215 2215 f = lambda fn: util.normpath(fn)
2216 2216 fmt = 'f %%-%ds %%-%ds %%s' % (
2217 2217 max([len(abs) for abs in items]),
2218 2218 max([len(m.rel(abs)) for abs in items]))
2219 2219 for abs in items:
2220 2220 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2221 2221 ui.write("%s\n" % line.rstrip())
2222 2222
2223 2223 @command('debugwireargs',
2224 2224 [('', 'three', '', 'three'),
2225 2225 ('', 'four', '', 'four'),
2226 2226 ('', 'five', '', 'five'),
2227 2227 ] + cmdutil.remoteopts,
2228 2228 _('REPO [OPTIONS]... [ONE [TWO]]'),
2229 2229 norepo=True)
2230 2230 def debugwireargs(ui, repopath, *vals, **opts):
2231 2231 opts = pycompat.byteskwargs(opts)
2232 2232 repo = hg.peer(ui, opts, repopath)
2233 2233 for opt in cmdutil.remoteopts:
2234 2234 del opts[opt[1]]
2235 2235 args = {}
2236 2236 for k, v in opts.iteritems():
2237 2237 if v:
2238 2238 args[k] = v
2239 2239 # run twice to check that we don't mess up the stream for the next command
2240 2240 res1 = repo.debugwireargs(*vals, **args)
2241 2241 res2 = repo.debugwireargs(*vals, **args)
2242 2242 ui.write("%s\n" % res1)
2243 2243 if res1 != res2:
2244 2244 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now