##// END OF EJS Templates
debugssl: allow a URL to be specified without a local repository...
Matt Harbison -
r34031:3c306636 stable
parent child Browse files
Show More
@@ -1,2311 +1,2311 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import difflib
11 11 import errno
12 12 import operator
13 13 import os
14 14 import random
15 15 import socket
16 16 import ssl
17 17 import string
18 18 import sys
19 19 import tempfile
20 20 import time
21 21
22 22 from .i18n import _
23 23 from .node import (
24 24 bin,
25 25 hex,
26 26 nullhex,
27 27 nullid,
28 28 nullrev,
29 29 short,
30 30 )
31 31 from . import (
32 32 bundle2,
33 33 changegroup,
34 34 cmdutil,
35 35 color,
36 36 context,
37 37 dagparser,
38 38 dagutil,
39 39 encoding,
40 40 error,
41 41 exchange,
42 42 extensions,
43 43 filemerge,
44 44 fileset,
45 45 formatter,
46 46 hg,
47 47 localrepo,
48 48 lock as lockmod,
49 49 merge as mergemod,
50 50 obsolete,
51 51 obsutil,
52 52 phases,
53 53 policy,
54 54 pvec,
55 55 pycompat,
56 56 registrar,
57 57 repair,
58 58 revlog,
59 59 revset,
60 60 revsetlang,
61 61 scmutil,
62 62 setdiscovery,
63 63 simplemerge,
64 64 smartset,
65 65 sslutil,
66 66 streamclone,
67 67 templater,
68 68 treediscovery,
69 69 upgrade,
70 70 util,
71 71 vfs as vfsmod,
72 72 )
73 73
74 74 release = lockmod.release
75 75
76 76 command = registrar.command()
77 77
78 78 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
79 79 def debugancestor(ui, repo, *args):
80 80 """find the ancestor revision of two revisions in a given index"""
81 81 if len(args) == 3:
82 82 index, rev1, rev2 = args
83 83 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
84 84 lookup = r.lookup
85 85 elif len(args) == 2:
86 86 if not repo:
87 87 raise error.Abort(_('there is no Mercurial repository here '
88 88 '(.hg not found)'))
89 89 rev1, rev2 = args
90 90 r = repo.changelog
91 91 lookup = repo.lookup
92 92 else:
93 93 raise error.Abort(_('either two or three arguments required'))
94 94 a = r.ancestor(lookup(rev1), lookup(rev2))
95 95 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
96 96
97 97 @command('debugapplystreamclonebundle', [], 'FILE')
98 98 def debugapplystreamclonebundle(ui, repo, fname):
99 99 """apply a stream clone bundle file"""
100 100 f = hg.openpath(ui, fname)
101 101 gen = exchange.readbundle(ui, f, fname)
102 102 gen.apply(repo)
103 103
104 104 @command('debugbuilddag',
105 105 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
106 106 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
107 107 ('n', 'new-file', None, _('add new file at each rev'))],
108 108 _('[OPTION]... [TEXT]'))
109 109 def debugbuilddag(ui, repo, text=None,
110 110 mergeable_file=False,
111 111 overwritten_file=False,
112 112 new_file=False):
113 113 """builds a repo with a given DAG from scratch in the current empty repo
114 114
115 115 The description of the DAG is read from stdin if not given on the
116 116 command line.
117 117
118 118 Elements:
119 119
120 120 - "+n" is a linear run of n nodes based on the current default parent
121 121 - "." is a single node based on the current default parent
122 122 - "$" resets the default parent to null (implied at the start);
123 123 otherwise the default parent is always the last node created
124 124 - "<p" sets the default parent to the backref p
125 125 - "*p" is a fork at parent p, which is a backref
126 126 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
127 127 - "/p2" is a merge of the preceding node and p2
128 128 - ":tag" defines a local tag for the preceding node
129 129 - "@branch" sets the named branch for subsequent nodes
130 130 - "#...\\n" is a comment up to the end of the line
131 131
132 132 Whitespace between the above elements is ignored.
133 133
134 134 A backref is either
135 135
136 136 - a number n, which references the node curr-n, where curr is the current
137 137 node, or
138 138 - the name of a local tag you placed earlier using ":tag", or
139 139 - empty to denote the default parent.
140 140
141 141 All string valued-elements are either strictly alphanumeric, or must
142 142 be enclosed in double quotes ("..."), with "\\" as escape character.
143 143 """
144 144
145 145 if text is None:
146 146 ui.status(_("reading DAG from stdin\n"))
147 147 text = ui.fin.read()
148 148
149 149 cl = repo.changelog
150 150 if len(cl) > 0:
151 151 raise error.Abort(_('repository is not empty'))
152 152
153 153 # determine number of revs in DAG
154 154 total = 0
155 155 for type, data in dagparser.parsedag(text):
156 156 if type == 'n':
157 157 total += 1
158 158
159 159 if mergeable_file:
160 160 linesperrev = 2
161 161 # make a file with k lines per rev
162 162 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
163 163 initialmergedlines.append("")
164 164
165 165 tags = []
166 166
167 167 wlock = lock = tr = None
168 168 try:
169 169 wlock = repo.wlock()
170 170 lock = repo.lock()
171 171 tr = repo.transaction("builddag")
172 172
173 173 at = -1
174 174 atbranch = 'default'
175 175 nodeids = []
176 176 id = 0
177 177 ui.progress(_('building'), id, unit=_('revisions'), total=total)
178 178 for type, data in dagparser.parsedag(text):
179 179 if type == 'n':
180 180 ui.note(('node %s\n' % str(data)))
181 181 id, ps = data
182 182
183 183 files = []
184 184 fctxs = {}
185 185
186 186 p2 = None
187 187 if mergeable_file:
188 188 fn = "mf"
189 189 p1 = repo[ps[0]]
190 190 if len(ps) > 1:
191 191 p2 = repo[ps[1]]
192 192 pa = p1.ancestor(p2)
193 193 base, local, other = [x[fn].data() for x in (pa, p1,
194 194 p2)]
195 195 m3 = simplemerge.Merge3Text(base, local, other)
196 196 ml = [l.strip() for l in m3.merge_lines()]
197 197 ml.append("")
198 198 elif at > 0:
199 199 ml = p1[fn].data().split("\n")
200 200 else:
201 201 ml = initialmergedlines
202 202 ml[id * linesperrev] += " r%i" % id
203 203 mergedtext = "\n".join(ml)
204 204 files.append(fn)
205 205 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
206 206
207 207 if overwritten_file:
208 208 fn = "of"
209 209 files.append(fn)
210 210 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
211 211
212 212 if new_file:
213 213 fn = "nf%i" % id
214 214 files.append(fn)
215 215 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
216 216 if len(ps) > 1:
217 217 if not p2:
218 218 p2 = repo[ps[1]]
219 219 for fn in p2:
220 220 if fn.startswith("nf"):
221 221 files.append(fn)
222 222 fctxs[fn] = p2[fn]
223 223
224 224 def fctxfn(repo, cx, path):
225 225 return fctxs.get(path)
226 226
227 227 if len(ps) == 0 or ps[0] < 0:
228 228 pars = [None, None]
229 229 elif len(ps) == 1:
230 230 pars = [nodeids[ps[0]], None]
231 231 else:
232 232 pars = [nodeids[p] for p in ps]
233 233 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
234 234 date=(id, 0),
235 235 user="debugbuilddag",
236 236 extra={'branch': atbranch})
237 237 nodeid = repo.commitctx(cx)
238 238 nodeids.append(nodeid)
239 239 at = id
240 240 elif type == 'l':
241 241 id, name = data
242 242 ui.note(('tag %s\n' % name))
243 243 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
244 244 elif type == 'a':
245 245 ui.note(('branch %s\n' % data))
246 246 atbranch = data
247 247 ui.progress(_('building'), id, unit=_('revisions'), total=total)
248 248 tr.close()
249 249
250 250 if tags:
251 251 repo.vfs.write("localtags", "".join(tags))
252 252 finally:
253 253 ui.progress(_('building'), None)
254 254 release(tr, lock, wlock)
255 255
256 256 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
257 257 indent_string = ' ' * indent
258 258 if all:
259 259 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
260 260 % indent_string)
261 261
262 262 def showchunks(named):
263 263 ui.write("\n%s%s\n" % (indent_string, named))
264 264 chain = None
265 265 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
266 266 node = chunkdata['node']
267 267 p1 = chunkdata['p1']
268 268 p2 = chunkdata['p2']
269 269 cs = chunkdata['cs']
270 270 deltabase = chunkdata['deltabase']
271 271 delta = chunkdata['delta']
272 272 ui.write("%s%s %s %s %s %s %s\n" %
273 273 (indent_string, hex(node), hex(p1), hex(p2),
274 274 hex(cs), hex(deltabase), len(delta)))
275 275 chain = node
276 276
277 277 chunkdata = gen.changelogheader()
278 278 showchunks("changelog")
279 279 chunkdata = gen.manifestheader()
280 280 showchunks("manifest")
281 281 for chunkdata in iter(gen.filelogheader, {}):
282 282 fname = chunkdata['filename']
283 283 showchunks(fname)
284 284 else:
285 285 if isinstance(gen, bundle2.unbundle20):
286 286 raise error.Abort(_('use debugbundle2 for this file'))
287 287 chunkdata = gen.changelogheader()
288 288 chain = None
289 289 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
290 290 node = chunkdata['node']
291 291 ui.write("%s%s\n" % (indent_string, hex(node)))
292 292 chain = node
293 293
294 294 def _debugobsmarkers(ui, part, indent=0, **opts):
295 295 """display version and markers contained in 'data'"""
296 296 opts = pycompat.byteskwargs(opts)
297 297 data = part.read()
298 298 indent_string = ' ' * indent
299 299 try:
300 300 version, markers = obsolete._readmarkers(data)
301 301 except error.UnknownVersion as exc:
302 302 msg = "%sunsupported version: %s (%d bytes)\n"
303 303 msg %= indent_string, exc.version, len(data)
304 304 ui.write(msg)
305 305 else:
306 306 msg = "%sversion: %s (%d bytes)\n"
307 307 msg %= indent_string, version, len(data)
308 308 ui.write(msg)
309 309 fm = ui.formatter('debugobsolete', opts)
310 310 for rawmarker in sorted(markers):
311 311 m = obsutil.marker(None, rawmarker)
312 312 fm.startitem()
313 313 fm.plain(indent_string)
314 314 cmdutil.showmarker(fm, m)
315 315 fm.end()
316 316
317 317 def _debugphaseheads(ui, data, indent=0):
318 318 """display version and markers contained in 'data'"""
319 319 indent_string = ' ' * indent
320 320 headsbyphase = bundle2._readphaseheads(data)
321 321 for phase in phases.allphases:
322 322 for head in headsbyphase[phase]:
323 323 ui.write(indent_string)
324 324 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
325 325
326 326 def _debugbundle2(ui, gen, all=None, **opts):
327 327 """lists the contents of a bundle2"""
328 328 if not isinstance(gen, bundle2.unbundle20):
329 329 raise error.Abort(_('not a bundle2 file'))
330 330 ui.write(('Stream params: %s\n' % repr(gen.params)))
331 331 parttypes = opts.get(r'part_type', [])
332 332 for part in gen.iterparts():
333 333 if parttypes and part.type not in parttypes:
334 334 continue
335 335 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
336 336 if part.type == 'changegroup':
337 337 version = part.params.get('version', '01')
338 338 cg = changegroup.getunbundler(version, part, 'UN')
339 339 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
340 340 if part.type == 'obsmarkers':
341 341 _debugobsmarkers(ui, part, indent=4, **opts)
342 342 if part.type == 'phase-heads':
343 343 _debugphaseheads(ui, part, indent=4)
344 344
345 345 @command('debugbundle',
346 346 [('a', 'all', None, _('show all details')),
347 347 ('', 'part-type', [], _('show only the named part type')),
348 348 ('', 'spec', None, _('print the bundlespec of the bundle'))],
349 349 _('FILE'),
350 350 norepo=True)
351 351 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
352 352 """lists the contents of a bundle"""
353 353 with hg.openpath(ui, bundlepath) as f:
354 354 if spec:
355 355 spec = exchange.getbundlespec(ui, f)
356 356 ui.write('%s\n' % spec)
357 357 return
358 358
359 359 gen = exchange.readbundle(ui, f, bundlepath)
360 360 if isinstance(gen, bundle2.unbundle20):
361 361 return _debugbundle2(ui, gen, all=all, **opts)
362 362 _debugchangegroup(ui, gen, all=all, **opts)
363 363
364 364 @command('debugcheckstate', [], '')
365 365 def debugcheckstate(ui, repo):
366 366 """validate the correctness of the current dirstate"""
367 367 parent1, parent2 = repo.dirstate.parents()
368 368 m1 = repo[parent1].manifest()
369 369 m2 = repo[parent2].manifest()
370 370 errors = 0
371 371 for f in repo.dirstate:
372 372 state = repo.dirstate[f]
373 373 if state in "nr" and f not in m1:
374 374 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
375 375 errors += 1
376 376 if state in "a" and f in m1:
377 377 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
378 378 errors += 1
379 379 if state in "m" and f not in m1 and f not in m2:
380 380 ui.warn(_("%s in state %s, but not in either manifest\n") %
381 381 (f, state))
382 382 errors += 1
383 383 for f in m1:
384 384 state = repo.dirstate[f]
385 385 if state not in "nrm":
386 386 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
387 387 errors += 1
388 388 if errors:
389 389 error = _(".hg/dirstate inconsistent with current parent's manifest")
390 390 raise error.Abort(error)
391 391
392 392 @command('debugcolor',
393 393 [('', 'style', None, _('show all configured styles'))],
394 394 'hg debugcolor')
395 395 def debugcolor(ui, repo, **opts):
396 396 """show available color, effects or style"""
397 397 ui.write(('color mode: %s\n') % ui._colormode)
398 398 if opts.get(r'style'):
399 399 return _debugdisplaystyle(ui)
400 400 else:
401 401 return _debugdisplaycolor(ui)
402 402
403 403 def _debugdisplaycolor(ui):
404 404 ui = ui.copy()
405 405 ui._styles.clear()
406 406 for effect in color._activeeffects(ui).keys():
407 407 ui._styles[effect] = effect
408 408 if ui._terminfoparams:
409 409 for k, v in ui.configitems('color'):
410 410 if k.startswith('color.'):
411 411 ui._styles[k] = k[6:]
412 412 elif k.startswith('terminfo.'):
413 413 ui._styles[k] = k[9:]
414 414 ui.write(_('available colors:\n'))
415 415 # sort label with a '_' after the other to group '_background' entry.
416 416 items = sorted(ui._styles.items(),
417 417 key=lambda i: ('_' in i[0], i[0], i[1]))
418 418 for colorname, label in items:
419 419 ui.write(('%s\n') % colorname, label=label)
420 420
421 421 def _debugdisplaystyle(ui):
422 422 ui.write(_('available style:\n'))
423 423 width = max(len(s) for s in ui._styles)
424 424 for label, effects in sorted(ui._styles.items()):
425 425 ui.write('%s' % label, label=label)
426 426 if effects:
427 427 # 50
428 428 ui.write(': ')
429 429 ui.write(' ' * (max(0, width - len(label))))
430 430 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
431 431 ui.write('\n')
432 432
433 433 @command('debugcreatestreamclonebundle', [], 'FILE')
434 434 def debugcreatestreamclonebundle(ui, repo, fname):
435 435 """create a stream clone bundle file
436 436
437 437 Stream bundles are special bundles that are essentially archives of
438 438 revlog files. They are commonly used for cloning very quickly.
439 439 """
440 440 # TODO we may want to turn this into an abort when this functionality
441 441 # is moved into `hg bundle`.
442 442 if phases.hassecret(repo):
443 443 ui.warn(_('(warning: stream clone bundle will contain secret '
444 444 'revisions)\n'))
445 445
446 446 requirements, gen = streamclone.generatebundlev1(repo)
447 447 changegroup.writechunks(ui, gen, fname)
448 448
449 449 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
450 450
451 451 @command('debugdag',
452 452 [('t', 'tags', None, _('use tags as labels')),
453 453 ('b', 'branches', None, _('annotate with branch names')),
454 454 ('', 'dots', None, _('use dots for runs')),
455 455 ('s', 'spaces', None, _('separate elements by spaces'))],
456 456 _('[OPTION]... [FILE [REV]...]'),
457 457 optionalrepo=True)
458 458 def debugdag(ui, repo, file_=None, *revs, **opts):
459 459 """format the changelog or an index DAG as a concise textual description
460 460
461 461 If you pass a revlog index, the revlog's DAG is emitted. If you list
462 462 revision numbers, they get labeled in the output as rN.
463 463
464 464 Otherwise, the changelog DAG of the current repo is emitted.
465 465 """
466 466 spaces = opts.get(r'spaces')
467 467 dots = opts.get(r'dots')
468 468 if file_:
469 469 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
470 470 file_)
471 471 revs = set((int(r) for r in revs))
472 472 def events():
473 473 for r in rlog:
474 474 yield 'n', (r, list(p for p in rlog.parentrevs(r)
475 475 if p != -1))
476 476 if r in revs:
477 477 yield 'l', (r, "r%i" % r)
478 478 elif repo:
479 479 cl = repo.changelog
480 480 tags = opts.get(r'tags')
481 481 branches = opts.get(r'branches')
482 482 if tags:
483 483 labels = {}
484 484 for l, n in repo.tags().items():
485 485 labels.setdefault(cl.rev(n), []).append(l)
486 486 def events():
487 487 b = "default"
488 488 for r in cl:
489 489 if branches:
490 490 newb = cl.read(cl.node(r))[5]['branch']
491 491 if newb != b:
492 492 yield 'a', newb
493 493 b = newb
494 494 yield 'n', (r, list(p for p in cl.parentrevs(r)
495 495 if p != -1))
496 496 if tags:
497 497 ls = labels.get(r)
498 498 if ls:
499 499 for l in ls:
500 500 yield 'l', (r, l)
501 501 else:
502 502 raise error.Abort(_('need repo for changelog dag'))
503 503
504 504 for line in dagparser.dagtextlines(events(),
505 505 addspaces=spaces,
506 506 wraplabels=True,
507 507 wrapannotations=True,
508 508 wrapnonlinear=dots,
509 509 usedots=dots,
510 510 maxlinewidth=70):
511 511 ui.write(line)
512 512 ui.write("\n")
513 513
514 514 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
515 515 def debugdata(ui, repo, file_, rev=None, **opts):
516 516 """dump the contents of a data file revision"""
517 517 opts = pycompat.byteskwargs(opts)
518 518 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
519 519 if rev is not None:
520 520 raise error.CommandError('debugdata', _('invalid arguments'))
521 521 file_, rev = None, file_
522 522 elif rev is None:
523 523 raise error.CommandError('debugdata', _('invalid arguments'))
524 524 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
525 525 try:
526 526 ui.write(r.revision(r.lookup(rev), raw=True))
527 527 except KeyError:
528 528 raise error.Abort(_('invalid revision identifier %s') % rev)
529 529
530 530 @command('debugdate',
531 531 [('e', 'extended', None, _('try extended date formats'))],
532 532 _('[-e] DATE [RANGE]'),
533 533 norepo=True, optionalrepo=True)
534 534 def debugdate(ui, date, range=None, **opts):
535 535 """parse and display a date"""
536 536 if opts[r"extended"]:
537 537 d = util.parsedate(date, util.extendeddateformats)
538 538 else:
539 539 d = util.parsedate(date)
540 540 ui.write(("internal: %s %s\n") % d)
541 541 ui.write(("standard: %s\n") % util.datestr(d))
542 542 if range:
543 543 m = util.matchdate(range)
544 544 ui.write(("match: %s\n") % m(d[0]))
545 545
546 546 @command('debugdeltachain',
547 547 cmdutil.debugrevlogopts + cmdutil.formatteropts,
548 548 _('-c|-m|FILE'),
549 549 optionalrepo=True)
550 550 def debugdeltachain(ui, repo, file_=None, **opts):
551 551 """dump information about delta chains in a revlog
552 552
553 553 Output can be templatized. Available template keywords are:
554 554
555 555 :``rev``: revision number
556 556 :``chainid``: delta chain identifier (numbered by unique base)
557 557 :``chainlen``: delta chain length to this revision
558 558 :``prevrev``: previous revision in delta chain
559 559 :``deltatype``: role of delta / how it was computed
560 560 :``compsize``: compressed size of revision
561 561 :``uncompsize``: uncompressed size of revision
562 562 :``chainsize``: total size of compressed revisions in chain
563 563 :``chainratio``: total chain size divided by uncompressed revision size
564 564 (new delta chains typically start at ratio 2.00)
565 565 :``lindist``: linear distance from base revision in delta chain to end
566 566 of this revision
567 567 :``extradist``: total size of revisions not part of this delta chain from
568 568 base of delta chain to end of this revision; a measurement
569 569 of how much extra data we need to read/seek across to read
570 570 the delta chain for this revision
571 571 :``extraratio``: extradist divided by chainsize; another representation of
572 572 how much unrelated data is needed to load this delta chain
573 573 """
574 574 opts = pycompat.byteskwargs(opts)
575 575 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
576 576 index = r.index
577 577 generaldelta = r.version & revlog.FLAG_GENERALDELTA
578 578
579 579 def revinfo(rev):
580 580 e = index[rev]
581 581 compsize = e[1]
582 582 uncompsize = e[2]
583 583 chainsize = 0
584 584
585 585 if generaldelta:
586 586 if e[3] == e[5]:
587 587 deltatype = 'p1'
588 588 elif e[3] == e[6]:
589 589 deltatype = 'p2'
590 590 elif e[3] == rev - 1:
591 591 deltatype = 'prev'
592 592 elif e[3] == rev:
593 593 deltatype = 'base'
594 594 else:
595 595 deltatype = 'other'
596 596 else:
597 597 if e[3] == rev:
598 598 deltatype = 'base'
599 599 else:
600 600 deltatype = 'prev'
601 601
602 602 chain = r._deltachain(rev)[0]
603 603 for iterrev in chain:
604 604 e = index[iterrev]
605 605 chainsize += e[1]
606 606
607 607 return compsize, uncompsize, deltatype, chain, chainsize
608 608
609 609 fm = ui.formatter('debugdeltachain', opts)
610 610
611 611 fm.plain(' rev chain# chainlen prev delta '
612 612 'size rawsize chainsize ratio lindist extradist '
613 613 'extraratio\n')
614 614
615 615 chainbases = {}
616 616 for rev in r:
617 617 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
618 618 chainbase = chain[0]
619 619 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
620 620 basestart = r.start(chainbase)
621 621 revstart = r.start(rev)
622 622 lineardist = revstart + comp - basestart
623 623 extradist = lineardist - chainsize
624 624 try:
625 625 prevrev = chain[-2]
626 626 except IndexError:
627 627 prevrev = -1
628 628
629 629 chainratio = float(chainsize) / float(uncomp)
630 630 extraratio = float(extradist) / float(chainsize)
631 631
632 632 fm.startitem()
633 633 fm.write('rev chainid chainlen prevrev deltatype compsize '
634 634 'uncompsize chainsize chainratio lindist extradist '
635 635 'extraratio',
636 636 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
637 637 rev, chainid, len(chain), prevrev, deltatype, comp,
638 638 uncomp, chainsize, chainratio, lineardist, extradist,
639 639 extraratio,
640 640 rev=rev, chainid=chainid, chainlen=len(chain),
641 641 prevrev=prevrev, deltatype=deltatype, compsize=comp,
642 642 uncompsize=uncomp, chainsize=chainsize,
643 643 chainratio=chainratio, lindist=lineardist,
644 644 extradist=extradist, extraratio=extraratio)
645 645
646 646 fm.end()
647 647
648 648 @command('debugdirstate|debugstate',
649 649 [('', 'nodates', None, _('do not display the saved mtime')),
650 650 ('', 'datesort', None, _('sort by saved mtime'))],
651 651 _('[OPTION]...'))
652 652 def debugstate(ui, repo, **opts):
653 653 """show the contents of the current dirstate"""
654 654
655 655 nodates = opts.get(r'nodates')
656 656 datesort = opts.get(r'datesort')
657 657
658 658 timestr = ""
659 659 if datesort:
660 660 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
661 661 else:
662 662 keyfunc = None # sort by filename
663 663 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
664 664 if ent[3] == -1:
665 665 timestr = 'unset '
666 666 elif nodates:
667 667 timestr = 'set '
668 668 else:
669 669 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
670 670 time.localtime(ent[3]))
671 671 if ent[1] & 0o20000:
672 672 mode = 'lnk'
673 673 else:
674 674 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
675 675 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
676 676 for f in repo.dirstate.copies():
677 677 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
678 678
679 679 @command('debugdiscovery',
680 680 [('', 'old', None, _('use old-style discovery')),
681 681 ('', 'nonheads', None,
682 682 _('use old-style discovery with non-heads included')),
683 683 ] + cmdutil.remoteopts,
684 684 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
685 685 def debugdiscovery(ui, repo, remoteurl="default", **opts):
686 686 """runs the changeset discovery protocol in isolation"""
687 687 opts = pycompat.byteskwargs(opts)
688 688 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
689 689 opts.get('branch'))
690 690 remote = hg.peer(repo, opts, remoteurl)
691 691 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
692 692
693 693 # make sure tests are repeatable
694 694 random.seed(12323)
695 695
696 696 def doit(localheads, remoteheads, remote=remote):
697 697 if opts.get('old'):
698 698 if localheads:
699 699 raise error.Abort('cannot use localheads with old style '
700 700 'discovery')
701 701 if not util.safehasattr(remote, 'branches'):
702 702 # enable in-client legacy support
703 703 remote = localrepo.locallegacypeer(remote.local())
704 704 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
705 705 force=True)
706 706 common = set(common)
707 707 if not opts.get('nonheads'):
708 708 ui.write(("unpruned common: %s\n") %
709 709 " ".join(sorted(short(n) for n in common)))
710 710 dag = dagutil.revlogdag(repo.changelog)
711 711 all = dag.ancestorset(dag.internalizeall(common))
712 712 common = dag.externalizeall(dag.headsetofconnecteds(all))
713 713 else:
714 714 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
715 715 common = set(common)
716 716 rheads = set(hds)
717 717 lheads = set(repo.heads())
718 718 ui.write(("common heads: %s\n") %
719 719 " ".join(sorted(short(n) for n in common)))
720 720 if lheads <= common:
721 721 ui.write(("local is subset\n"))
722 722 elif rheads <= common:
723 723 ui.write(("remote is subset\n"))
724 724
725 725 serverlogs = opts.get('serverlog')
726 726 if serverlogs:
727 727 for filename in serverlogs:
728 728 with open(filename, 'r') as logfile:
729 729 line = logfile.readline()
730 730 while line:
731 731 parts = line.strip().split(';')
732 732 op = parts[1]
733 733 if op == 'cg':
734 734 pass
735 735 elif op == 'cgss':
736 736 doit(parts[2].split(' '), parts[3].split(' '))
737 737 elif op == 'unb':
738 738 doit(parts[3].split(' '), parts[2].split(' '))
739 739 line = logfile.readline()
740 740 else:
741 741 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
742 742 opts.get('remote_head'))
743 743 localrevs = opts.get('local_head')
744 744 doit(localrevs, remoterevs)
745 745
746 746 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
747 747 def debugextensions(ui, **opts):
748 748 '''show information about active extensions'''
749 749 opts = pycompat.byteskwargs(opts)
750 750 exts = extensions.extensions(ui)
751 751 hgver = util.version()
752 752 fm = ui.formatter('debugextensions', opts)
753 753 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
754 754 isinternal = extensions.ismoduleinternal(extmod)
755 755 extsource = pycompat.fsencode(extmod.__file__)
756 756 if isinternal:
757 757 exttestedwith = [] # never expose magic string to users
758 758 else:
759 759 exttestedwith = getattr(extmod, 'testedwith', '').split()
760 760 extbuglink = getattr(extmod, 'buglink', None)
761 761
762 762 fm.startitem()
763 763
764 764 if ui.quiet or ui.verbose:
765 765 fm.write('name', '%s\n', extname)
766 766 else:
767 767 fm.write('name', '%s', extname)
768 768 if isinternal or hgver in exttestedwith:
769 769 fm.plain('\n')
770 770 elif not exttestedwith:
771 771 fm.plain(_(' (untested!)\n'))
772 772 else:
773 773 lasttestedversion = exttestedwith[-1]
774 774 fm.plain(' (%s!)\n' % lasttestedversion)
775 775
776 776 fm.condwrite(ui.verbose and extsource, 'source',
777 777 _(' location: %s\n'), extsource or "")
778 778
779 779 if ui.verbose:
780 780 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
781 781 fm.data(bundled=isinternal)
782 782
783 783 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
784 784 _(' tested with: %s\n'),
785 785 fm.formatlist(exttestedwith, name='ver'))
786 786
787 787 fm.condwrite(ui.verbose and extbuglink, 'buglink',
788 788 _(' bug reporting: %s\n'), extbuglink or "")
789 789
790 790 fm.end()
791 791
792 792 @command('debugfileset',
793 793 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
794 794 _('[-r REV] FILESPEC'))
795 795 def debugfileset(ui, repo, expr, **opts):
796 796 '''parse and apply a fileset specification'''
797 797 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
798 798 if ui.verbose:
799 799 tree = fileset.parse(expr)
800 800 ui.note(fileset.prettyformat(tree), "\n")
801 801
802 802 for f in ctx.getfileset(expr):
803 803 ui.write("%s\n" % f)
804 804
805 805 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
806 806 def debugfsinfo(ui, path="."):
807 807 """show information detected about current filesystem"""
808 808 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
809 809 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
810 810 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
811 811 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
812 812 casesensitive = '(unknown)'
813 813 try:
814 814 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
815 815 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
816 816 except OSError:
817 817 pass
818 818 ui.write(('case-sensitive: %s\n') % casesensitive)
819 819
820 820 @command('debuggetbundle',
821 821 [('H', 'head', [], _('id of head node'), _('ID')),
822 822 ('C', 'common', [], _('id of common node'), _('ID')),
823 823 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
824 824 _('REPO FILE [-H|-C ID]...'),
825 825 norepo=True)
826 826 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
827 827 """retrieves a bundle from a repo
828 828
829 829 Every ID must be a full-length hex node id string. Saves the bundle to the
830 830 given file.
831 831 """
832 832 opts = pycompat.byteskwargs(opts)
833 833 repo = hg.peer(ui, opts, repopath)
834 834 if not repo.capable('getbundle'):
835 835 raise error.Abort("getbundle() not supported by target repository")
836 836 args = {}
837 837 if common:
838 838 args[r'common'] = [bin(s) for s in common]
839 839 if head:
840 840 args[r'heads'] = [bin(s) for s in head]
841 841 # TODO: get desired bundlecaps from command line.
842 842 args[r'bundlecaps'] = None
843 843 bundle = repo.getbundle('debug', **args)
844 844
845 845 bundletype = opts.get('type', 'bzip2').lower()
846 846 btypes = {'none': 'HG10UN',
847 847 'bzip2': 'HG10BZ',
848 848 'gzip': 'HG10GZ',
849 849 'bundle2': 'HG20'}
850 850 bundletype = btypes.get(bundletype)
851 851 if bundletype not in bundle2.bundletypes:
852 852 raise error.Abort(_('unknown bundle type specified with --type'))
853 853 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
854 854
855 855 @command('debugignore', [], '[FILE]')
856 856 def debugignore(ui, repo, *files, **opts):
857 857 """display the combined ignore pattern and information about ignored files
858 858
859 859 With no argument display the combined ignore pattern.
860 860
861 861 Given space separated file names, shows if the given file is ignored and
862 862 if so, show the ignore rule (file and line number) that matched it.
863 863 """
864 864 ignore = repo.dirstate._ignore
865 865 if not files:
866 866 # Show all the patterns
867 867 ui.write("%s\n" % repr(ignore))
868 868 else:
869 869 m = scmutil.match(repo[None], pats=files)
870 870 for f in m.files():
871 871 nf = util.normpath(f)
872 872 ignored = None
873 873 ignoredata = None
874 874 if nf != '.':
875 875 if ignore(nf):
876 876 ignored = nf
877 877 ignoredata = repo.dirstate._ignorefileandline(nf)
878 878 else:
879 879 for p in util.finddirs(nf):
880 880 if ignore(p):
881 881 ignored = p
882 882 ignoredata = repo.dirstate._ignorefileandline(p)
883 883 break
884 884 if ignored:
885 885 if ignored == nf:
886 886 ui.write(_("%s is ignored\n") % m.uipath(f))
887 887 else:
888 888 ui.write(_("%s is ignored because of "
889 889 "containing folder %s\n")
890 890 % (m.uipath(f), ignored))
891 891 ignorefile, lineno, line = ignoredata
892 892 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
893 893 % (ignorefile, lineno, line))
894 894 else:
895 895 ui.write(_("%s is not ignored\n") % m.uipath(f))
896 896
897 897 @command('debugindex', cmdutil.debugrevlogopts +
898 898 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
899 899 _('[-f FORMAT] -c|-m|FILE'),
900 900 optionalrepo=True)
901 901 def debugindex(ui, repo, file_=None, **opts):
902 902 """dump the contents of an index file"""
903 903 opts = pycompat.byteskwargs(opts)
904 904 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
905 905 format = opts.get('format', 0)
906 906 if format not in (0, 1):
907 907 raise error.Abort(_("unknown format %d") % format)
908 908
909 909 generaldelta = r.version & revlog.FLAG_GENERALDELTA
910 910 if generaldelta:
911 911 basehdr = ' delta'
912 912 else:
913 913 basehdr = ' base'
914 914
915 915 if ui.debugflag:
916 916 shortfn = hex
917 917 else:
918 918 shortfn = short
919 919
920 920 # There might not be anything in r, so have a sane default
921 921 idlen = 12
922 922 for i in r:
923 923 idlen = len(shortfn(r.node(i)))
924 924 break
925 925
926 926 if format == 0:
927 927 ui.write((" rev offset length " + basehdr + " linkrev"
928 928 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
929 929 elif format == 1:
930 930 ui.write((" rev flag offset length"
931 931 " size " + basehdr + " link p1 p2"
932 932 " %s\n") % "nodeid".rjust(idlen))
933 933
934 934 for i in r:
935 935 node = r.node(i)
936 936 if generaldelta:
937 937 base = r.deltaparent(i)
938 938 else:
939 939 base = r.chainbase(i)
940 940 if format == 0:
941 941 try:
942 942 pp = r.parents(node)
943 943 except Exception:
944 944 pp = [nullid, nullid]
945 945 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
946 946 i, r.start(i), r.length(i), base, r.linkrev(i),
947 947 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
948 948 elif format == 1:
949 949 pr = r.parentrevs(i)
950 950 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
951 951 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
952 952 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
953 953
954 954 @command('debugindexdot', cmdutil.debugrevlogopts,
955 955 _('-c|-m|FILE'), optionalrepo=True)
956 956 def debugindexdot(ui, repo, file_=None, **opts):
957 957 """dump an index DAG as a graphviz dot file"""
958 958 opts = pycompat.byteskwargs(opts)
959 959 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
960 960 ui.write(("digraph G {\n"))
961 961 for i in r:
962 962 node = r.node(i)
963 963 pp = r.parents(node)
964 964 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
965 965 if pp[1] != nullid:
966 966 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
967 967 ui.write("}\n")
968 968
969 969 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
970 970 def debuginstall(ui, **opts):
971 971 '''test Mercurial installation
972 972
973 973 Returns 0 on success.
974 974 '''
975 975 opts = pycompat.byteskwargs(opts)
976 976
977 977 def writetemp(contents):
978 978 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
979 979 f = os.fdopen(fd, pycompat.sysstr("wb"))
980 980 f.write(contents)
981 981 f.close()
982 982 return name
983 983
984 984 problems = 0
985 985
986 986 fm = ui.formatter('debuginstall', opts)
987 987 fm.startitem()
988 988
989 989 # encoding
990 990 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
991 991 err = None
992 992 try:
993 993 encoding.fromlocal("test")
994 994 except error.Abort as inst:
995 995 err = inst
996 996 problems += 1
997 997 fm.condwrite(err, 'encodingerror', _(" %s\n"
998 998 " (check that your locale is properly set)\n"), err)
999 999
1000 1000 # Python
1001 1001 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1002 1002 pycompat.sysexecutable)
1003 1003 fm.write('pythonver', _("checking Python version (%s)\n"),
1004 1004 ("%d.%d.%d" % sys.version_info[:3]))
1005 1005 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1006 1006 os.path.dirname(pycompat.fsencode(os.__file__)))
1007 1007
1008 1008 security = set(sslutil.supportedprotocols)
1009 1009 if sslutil.hassni:
1010 1010 security.add('sni')
1011 1011
1012 1012 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1013 1013 fm.formatlist(sorted(security), name='protocol',
1014 1014 fmt='%s', sep=','))
1015 1015
1016 1016 # These are warnings, not errors. So don't increment problem count. This
1017 1017 # may change in the future.
1018 1018 if 'tls1.2' not in security:
1019 1019 fm.plain(_(' TLS 1.2 not supported by Python install; '
1020 1020 'network connections lack modern security\n'))
1021 1021 if 'sni' not in security:
1022 1022 fm.plain(_(' SNI not supported by Python install; may have '
1023 1023 'connectivity issues with some servers\n'))
1024 1024
1025 1025 # TODO print CA cert info
1026 1026
1027 1027 # hg version
1028 1028 hgver = util.version()
1029 1029 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1030 1030 hgver.split('+')[0])
1031 1031 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1032 1032 '+'.join(hgver.split('+')[1:]))
1033 1033
1034 1034 # compiled modules
1035 1035 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1036 1036 policy.policy)
1037 1037 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1038 1038 os.path.dirname(pycompat.fsencode(__file__)))
1039 1039
1040 1040 if policy.policy in ('c', 'allow'):
1041 1041 err = None
1042 1042 try:
1043 1043 from .cext import (
1044 1044 base85,
1045 1045 bdiff,
1046 1046 mpatch,
1047 1047 osutil,
1048 1048 )
1049 1049 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1050 1050 except Exception as inst:
1051 1051 err = inst
1052 1052 problems += 1
1053 1053 fm.condwrite(err, 'extensionserror', " %s\n", err)
1054 1054
1055 1055 compengines = util.compengines._engines.values()
1056 1056 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1057 1057 fm.formatlist(sorted(e.name() for e in compengines),
1058 1058 name='compengine', fmt='%s', sep=', '))
1059 1059 fm.write('compenginesavail', _('checking available compression engines '
1060 1060 '(%s)\n'),
1061 1061 fm.formatlist(sorted(e.name() for e in compengines
1062 1062 if e.available()),
1063 1063 name='compengine', fmt='%s', sep=', '))
1064 1064 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1065 1065 fm.write('compenginesserver', _('checking available compression engines '
1066 1066 'for wire protocol (%s)\n'),
1067 1067 fm.formatlist([e.name() for e in wirecompengines
1068 1068 if e.wireprotosupport()],
1069 1069 name='compengine', fmt='%s', sep=', '))
1070 1070
1071 1071 # templates
1072 1072 p = templater.templatepaths()
1073 1073 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1074 1074 fm.condwrite(not p, '', _(" no template directories found\n"))
1075 1075 if p:
1076 1076 m = templater.templatepath("map-cmdline.default")
1077 1077 if m:
1078 1078 # template found, check if it is working
1079 1079 err = None
1080 1080 try:
1081 1081 templater.templater.frommapfile(m)
1082 1082 except Exception as inst:
1083 1083 err = inst
1084 1084 p = None
1085 1085 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1086 1086 else:
1087 1087 p = None
1088 1088 fm.condwrite(p, 'defaulttemplate',
1089 1089 _("checking default template (%s)\n"), m)
1090 1090 fm.condwrite(not m, 'defaulttemplatenotfound',
1091 1091 _(" template '%s' not found\n"), "default")
1092 1092 if not p:
1093 1093 problems += 1
1094 1094 fm.condwrite(not p, '',
1095 1095 _(" (templates seem to have been installed incorrectly)\n"))
1096 1096
1097 1097 # editor
1098 1098 editor = ui.geteditor()
1099 1099 editor = util.expandpath(editor)
1100 1100 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1101 1101 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1102 1102 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1103 1103 _(" No commit editor set and can't find %s in PATH\n"
1104 1104 " (specify a commit editor in your configuration"
1105 1105 " file)\n"), not cmdpath and editor == 'vi' and editor)
1106 1106 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1107 1107 _(" Can't find editor '%s' in PATH\n"
1108 1108 " (specify a commit editor in your configuration"
1109 1109 " file)\n"), not cmdpath and editor)
1110 1110 if not cmdpath and editor != 'vi':
1111 1111 problems += 1
1112 1112
1113 1113 # check username
1114 1114 username = None
1115 1115 err = None
1116 1116 try:
1117 1117 username = ui.username()
1118 1118 except error.Abort as e:
1119 1119 err = e
1120 1120 problems += 1
1121 1121
1122 1122 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1123 1123 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1124 1124 " (specify a username in your configuration file)\n"), err)
1125 1125
1126 1126 fm.condwrite(not problems, '',
1127 1127 _("no problems detected\n"))
1128 1128 if not problems:
1129 1129 fm.data(problems=problems)
1130 1130 fm.condwrite(problems, 'problems',
1131 1131 _("%d problems detected,"
1132 1132 " please check your install!\n"), problems)
1133 1133 fm.end()
1134 1134
1135 1135 return problems
1136 1136
1137 1137 @command('debugknown', [], _('REPO ID...'), norepo=True)
1138 1138 def debugknown(ui, repopath, *ids, **opts):
1139 1139 """test whether node ids are known to a repo
1140 1140
1141 1141 Every ID must be a full-length hex node id string. Returns a list of 0s
1142 1142 and 1s indicating unknown/known.
1143 1143 """
1144 1144 opts = pycompat.byteskwargs(opts)
1145 1145 repo = hg.peer(ui, opts, repopath)
1146 1146 if not repo.capable('known'):
1147 1147 raise error.Abort("known() not supported by target repository")
1148 1148 flags = repo.known([bin(s) for s in ids])
1149 1149 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1150 1150
1151 1151 @command('debuglabelcomplete', [], _('LABEL...'))
1152 1152 def debuglabelcomplete(ui, repo, *args):
1153 1153 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1154 1154 debugnamecomplete(ui, repo, *args)
1155 1155
1156 1156 @command('debuglocks',
1157 1157 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1158 1158 ('W', 'force-wlock', None,
1159 1159 _('free the working state lock (DANGEROUS)'))],
1160 1160 _('[OPTION]...'))
1161 1161 def debuglocks(ui, repo, **opts):
1162 1162 """show or modify state of locks
1163 1163
1164 1164 By default, this command will show which locks are held. This
1165 1165 includes the user and process holding the lock, the amount of time
1166 1166 the lock has been held, and the machine name where the process is
1167 1167 running if it's not local.
1168 1168
1169 1169 Locks protect the integrity of Mercurial's data, so should be
1170 1170 treated with care. System crashes or other interruptions may cause
1171 1171 locks to not be properly released, though Mercurial will usually
1172 1172 detect and remove such stale locks automatically.
1173 1173
1174 1174 However, detecting stale locks may not always be possible (for
1175 1175 instance, on a shared filesystem). Removing locks may also be
1176 1176 blocked by filesystem permissions.
1177 1177
1178 1178 Returns 0 if no locks are held.
1179 1179
1180 1180 """
1181 1181
1182 1182 if opts.get(r'force_lock'):
1183 1183 repo.svfs.unlink('lock')
1184 1184 if opts.get(r'force_wlock'):
1185 1185 repo.vfs.unlink('wlock')
1186 1186 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1187 1187 return 0
1188 1188
1189 1189 now = time.time()
1190 1190 held = 0
1191 1191
1192 1192 def report(vfs, name, method):
1193 1193 # this causes stale locks to get reaped for more accurate reporting
1194 1194 try:
1195 1195 l = method(False)
1196 1196 except error.LockHeld:
1197 1197 l = None
1198 1198
1199 1199 if l:
1200 1200 l.release()
1201 1201 else:
1202 1202 try:
1203 1203 stat = vfs.lstat(name)
1204 1204 age = now - stat.st_mtime
1205 1205 user = util.username(stat.st_uid)
1206 1206 locker = vfs.readlock(name)
1207 1207 if ":" in locker:
1208 1208 host, pid = locker.split(':')
1209 1209 if host == socket.gethostname():
1210 1210 locker = 'user %s, process %s' % (user, pid)
1211 1211 else:
1212 1212 locker = 'user %s, process %s, host %s' \
1213 1213 % (user, pid, host)
1214 1214 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1215 1215 return 1
1216 1216 except OSError as e:
1217 1217 if e.errno != errno.ENOENT:
1218 1218 raise
1219 1219
1220 1220 ui.write(("%-6s free\n") % (name + ":"))
1221 1221 return 0
1222 1222
1223 1223 held += report(repo.svfs, "lock", repo.lock)
1224 1224 held += report(repo.vfs, "wlock", repo.wlock)
1225 1225
1226 1226 return held
1227 1227
1228 1228 @command('debugmergestate', [], '')
1229 1229 def debugmergestate(ui, repo, *args):
1230 1230 """print merge state
1231 1231
1232 1232 Use --verbose to print out information about whether v1 or v2 merge state
1233 1233 was chosen."""
1234 1234 def _hashornull(h):
1235 1235 if h == nullhex:
1236 1236 return 'null'
1237 1237 else:
1238 1238 return h
1239 1239
1240 1240 def printrecords(version):
1241 1241 ui.write(('* version %s records\n') % version)
1242 1242 if version == 1:
1243 1243 records = v1records
1244 1244 else:
1245 1245 records = v2records
1246 1246
1247 1247 for rtype, record in records:
1248 1248 # pretty print some record types
1249 1249 if rtype == 'L':
1250 1250 ui.write(('local: %s\n') % record)
1251 1251 elif rtype == 'O':
1252 1252 ui.write(('other: %s\n') % record)
1253 1253 elif rtype == 'm':
1254 1254 driver, mdstate = record.split('\0', 1)
1255 1255 ui.write(('merge driver: %s (state "%s")\n')
1256 1256 % (driver, mdstate))
1257 1257 elif rtype in 'FDC':
1258 1258 r = record.split('\0')
1259 1259 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1260 1260 if version == 1:
1261 1261 onode = 'not stored in v1 format'
1262 1262 flags = r[7]
1263 1263 else:
1264 1264 onode, flags = r[7:9]
1265 1265 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1266 1266 % (f, rtype, state, _hashornull(hash)))
1267 1267 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1268 1268 ui.write((' ancestor path: %s (node %s)\n')
1269 1269 % (afile, _hashornull(anode)))
1270 1270 ui.write((' other path: %s (node %s)\n')
1271 1271 % (ofile, _hashornull(onode)))
1272 1272 elif rtype == 'f':
1273 1273 filename, rawextras = record.split('\0', 1)
1274 1274 extras = rawextras.split('\0')
1275 1275 i = 0
1276 1276 extrastrings = []
1277 1277 while i < len(extras):
1278 1278 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1279 1279 i += 2
1280 1280
1281 1281 ui.write(('file extras: %s (%s)\n')
1282 1282 % (filename, ', '.join(extrastrings)))
1283 1283 elif rtype == 'l':
1284 1284 labels = record.split('\0', 2)
1285 1285 labels = [l for l in labels if len(l) > 0]
1286 1286 ui.write(('labels:\n'))
1287 1287 ui.write((' local: %s\n' % labels[0]))
1288 1288 ui.write((' other: %s\n' % labels[1]))
1289 1289 if len(labels) > 2:
1290 1290 ui.write((' base: %s\n' % labels[2]))
1291 1291 else:
1292 1292 ui.write(('unrecognized entry: %s\t%s\n')
1293 1293 % (rtype, record.replace('\0', '\t')))
1294 1294
1295 1295 # Avoid mergestate.read() since it may raise an exception for unsupported
1296 1296 # merge state records. We shouldn't be doing this, but this is OK since this
1297 1297 # command is pretty low-level.
1298 1298 ms = mergemod.mergestate(repo)
1299 1299
1300 1300 # sort so that reasonable information is on top
1301 1301 v1records = ms._readrecordsv1()
1302 1302 v2records = ms._readrecordsv2()
1303 1303 order = 'LOml'
1304 1304 def key(r):
1305 1305 idx = order.find(r[0])
1306 1306 if idx == -1:
1307 1307 return (1, r[1])
1308 1308 else:
1309 1309 return (0, idx)
1310 1310 v1records.sort(key=key)
1311 1311 v2records.sort(key=key)
1312 1312
1313 1313 if not v1records and not v2records:
1314 1314 ui.write(('no merge state found\n'))
1315 1315 elif not v2records:
1316 1316 ui.note(('no version 2 merge state\n'))
1317 1317 printrecords(1)
1318 1318 elif ms._v1v2match(v1records, v2records):
1319 1319 ui.note(('v1 and v2 states match: using v2\n'))
1320 1320 printrecords(2)
1321 1321 else:
1322 1322 ui.note(('v1 and v2 states mismatch: using v1\n'))
1323 1323 printrecords(1)
1324 1324 if ui.verbose:
1325 1325 printrecords(2)
1326 1326
1327 1327 @command('debugnamecomplete', [], _('NAME...'))
1328 1328 def debugnamecomplete(ui, repo, *args):
1329 1329 '''complete "names" - tags, open branch names, bookmark names'''
1330 1330
1331 1331 names = set()
1332 1332 # since we previously only listed open branches, we will handle that
1333 1333 # specially (after this for loop)
1334 1334 for name, ns in repo.names.iteritems():
1335 1335 if name != 'branches':
1336 1336 names.update(ns.listnames(repo))
1337 1337 names.update(tag for (tag, heads, tip, closed)
1338 1338 in repo.branchmap().iterbranches() if not closed)
1339 1339 completions = set()
1340 1340 if not args:
1341 1341 args = ['']
1342 1342 for a in args:
1343 1343 completions.update(n for n in names if n.startswith(a))
1344 1344 ui.write('\n'.join(sorted(completions)))
1345 1345 ui.write('\n')
1346 1346
1347 1347 @command('debugobsolete',
1348 1348 [('', 'flags', 0, _('markers flag')),
1349 1349 ('', 'record-parents', False,
1350 1350 _('record parent information for the precursor')),
1351 1351 ('r', 'rev', [], _('display markers relevant to REV')),
1352 1352 ('', 'exclusive', False, _('restrict display to markers only '
1353 1353 'relevant to REV')),
1354 1354 ('', 'index', False, _('display index of the marker')),
1355 1355 ('', 'delete', [], _('delete markers specified by indices')),
1356 1356 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1357 1357 _('[OBSOLETED [REPLACEMENT ...]]'))
1358 1358 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1359 1359 """create arbitrary obsolete marker
1360 1360
1361 1361 With no arguments, displays the list of obsolescence markers."""
1362 1362
1363 1363 opts = pycompat.byteskwargs(opts)
1364 1364
1365 1365 def parsenodeid(s):
1366 1366 try:
1367 1367 # We do not use revsingle/revrange functions here to accept
1368 1368 # arbitrary node identifiers, possibly not present in the
1369 1369 # local repository.
1370 1370 n = bin(s)
1371 1371 if len(n) != len(nullid):
1372 1372 raise TypeError()
1373 1373 return n
1374 1374 except TypeError:
1375 1375 raise error.Abort('changeset references must be full hexadecimal '
1376 1376 'node identifiers')
1377 1377
1378 1378 if opts.get('delete'):
1379 1379 indices = []
1380 1380 for v in opts.get('delete'):
1381 1381 try:
1382 1382 indices.append(int(v))
1383 1383 except ValueError:
1384 1384 raise error.Abort(_('invalid index value: %r') % v,
1385 1385 hint=_('use integers for indices'))
1386 1386
1387 1387 if repo.currenttransaction():
1388 1388 raise error.Abort(_('cannot delete obsmarkers in the middle '
1389 1389 'of transaction.'))
1390 1390
1391 1391 with repo.lock():
1392 1392 n = repair.deleteobsmarkers(repo.obsstore, indices)
1393 1393 ui.write(_('deleted %i obsolescence markers\n') % n)
1394 1394
1395 1395 return
1396 1396
1397 1397 if precursor is not None:
1398 1398 if opts['rev']:
1399 1399 raise error.Abort('cannot select revision when creating marker')
1400 1400 metadata = {}
1401 1401 metadata['user'] = opts['user'] or ui.username()
1402 1402 succs = tuple(parsenodeid(succ) for succ in successors)
1403 1403 l = repo.lock()
1404 1404 try:
1405 1405 tr = repo.transaction('debugobsolete')
1406 1406 try:
1407 1407 date = opts.get('date')
1408 1408 if date:
1409 1409 date = util.parsedate(date)
1410 1410 else:
1411 1411 date = None
1412 1412 prec = parsenodeid(precursor)
1413 1413 parents = None
1414 1414 if opts['record_parents']:
1415 1415 if prec not in repo.unfiltered():
1416 1416 raise error.Abort('cannot used --record-parents on '
1417 1417 'unknown changesets')
1418 1418 parents = repo.unfiltered()[prec].parents()
1419 1419 parents = tuple(p.node() for p in parents)
1420 1420 repo.obsstore.create(tr, prec, succs, opts['flags'],
1421 1421 parents=parents, date=date,
1422 1422 metadata=metadata, ui=ui)
1423 1423 tr.close()
1424 1424 except ValueError as exc:
1425 1425 raise error.Abort(_('bad obsmarker input: %s') % exc)
1426 1426 finally:
1427 1427 tr.release()
1428 1428 finally:
1429 1429 l.release()
1430 1430 else:
1431 1431 if opts['rev']:
1432 1432 revs = scmutil.revrange(repo, opts['rev'])
1433 1433 nodes = [repo[r].node() for r in revs]
1434 1434 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1435 1435 exclusive=opts['exclusive']))
1436 1436 markers.sort(key=lambda x: x._data)
1437 1437 else:
1438 1438 markers = obsutil.getmarkers(repo)
1439 1439
1440 1440 markerstoiter = markers
1441 1441 isrelevant = lambda m: True
1442 1442 if opts.get('rev') and opts.get('index'):
1443 1443 markerstoiter = obsutil.getmarkers(repo)
1444 1444 markerset = set(markers)
1445 1445 isrelevant = lambda m: m in markerset
1446 1446
1447 1447 fm = ui.formatter('debugobsolete', opts)
1448 1448 for i, m in enumerate(markerstoiter):
1449 1449 if not isrelevant(m):
1450 1450 # marker can be irrelevant when we're iterating over a set
1451 1451 # of markers (markerstoiter) which is bigger than the set
1452 1452 # of markers we want to display (markers)
1453 1453 # this can happen if both --index and --rev options are
1454 1454 # provided and thus we need to iterate over all of the markers
1455 1455 # to get the correct indices, but only display the ones that
1456 1456 # are relevant to --rev value
1457 1457 continue
1458 1458 fm.startitem()
1459 1459 ind = i if opts.get('index') else None
1460 1460 cmdutil.showmarker(fm, m, index=ind)
1461 1461 fm.end()
1462 1462
1463 1463 @command('debugpathcomplete',
1464 1464 [('f', 'full', None, _('complete an entire path')),
1465 1465 ('n', 'normal', None, _('show only normal files')),
1466 1466 ('a', 'added', None, _('show only added files')),
1467 1467 ('r', 'removed', None, _('show only removed files'))],
1468 1468 _('FILESPEC...'))
1469 1469 def debugpathcomplete(ui, repo, *specs, **opts):
1470 1470 '''complete part or all of a tracked path
1471 1471
1472 1472 This command supports shells that offer path name completion. It
1473 1473 currently completes only files already known to the dirstate.
1474 1474
1475 1475 Completion extends only to the next path segment unless
1476 1476 --full is specified, in which case entire paths are used.'''
1477 1477
1478 1478 def complete(path, acceptable):
1479 1479 dirstate = repo.dirstate
1480 1480 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1481 1481 rootdir = repo.root + pycompat.ossep
1482 1482 if spec != repo.root and not spec.startswith(rootdir):
1483 1483 return [], []
1484 1484 if os.path.isdir(spec):
1485 1485 spec += '/'
1486 1486 spec = spec[len(rootdir):]
1487 1487 fixpaths = pycompat.ossep != '/'
1488 1488 if fixpaths:
1489 1489 spec = spec.replace(pycompat.ossep, '/')
1490 1490 speclen = len(spec)
1491 1491 fullpaths = opts[r'full']
1492 1492 files, dirs = set(), set()
1493 1493 adddir, addfile = dirs.add, files.add
1494 1494 for f, st in dirstate.iteritems():
1495 1495 if f.startswith(spec) and st[0] in acceptable:
1496 1496 if fixpaths:
1497 1497 f = f.replace('/', pycompat.ossep)
1498 1498 if fullpaths:
1499 1499 addfile(f)
1500 1500 continue
1501 1501 s = f.find(pycompat.ossep, speclen)
1502 1502 if s >= 0:
1503 1503 adddir(f[:s])
1504 1504 else:
1505 1505 addfile(f)
1506 1506 return files, dirs
1507 1507
1508 1508 acceptable = ''
1509 1509 if opts[r'normal']:
1510 1510 acceptable += 'nm'
1511 1511 if opts[r'added']:
1512 1512 acceptable += 'a'
1513 1513 if opts[r'removed']:
1514 1514 acceptable += 'r'
1515 1515 cwd = repo.getcwd()
1516 1516 if not specs:
1517 1517 specs = ['.']
1518 1518
1519 1519 files, dirs = set(), set()
1520 1520 for spec in specs:
1521 1521 f, d = complete(spec, acceptable or 'nmar')
1522 1522 files.update(f)
1523 1523 dirs.update(d)
1524 1524 files.update(dirs)
1525 1525 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1526 1526 ui.write('\n')
1527 1527
1528 1528 @command('debugpickmergetool',
1529 1529 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1530 1530 ('', 'changedelete', None, _('emulate merging change and delete')),
1531 1531 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1532 1532 _('[PATTERN]...'),
1533 1533 inferrepo=True)
1534 1534 def debugpickmergetool(ui, repo, *pats, **opts):
1535 1535 """examine which merge tool is chosen for specified file
1536 1536
1537 1537 As described in :hg:`help merge-tools`, Mercurial examines
1538 1538 configurations below in this order to decide which merge tool is
1539 1539 chosen for specified file.
1540 1540
1541 1541 1. ``--tool`` option
1542 1542 2. ``HGMERGE`` environment variable
1543 1543 3. configurations in ``merge-patterns`` section
1544 1544 4. configuration of ``ui.merge``
1545 1545 5. configurations in ``merge-tools`` section
1546 1546 6. ``hgmerge`` tool (for historical reason only)
1547 1547 7. default tool for fallback (``:merge`` or ``:prompt``)
1548 1548
1549 1549 This command writes out examination result in the style below::
1550 1550
1551 1551 FILE = MERGETOOL
1552 1552
1553 1553 By default, all files known in the first parent context of the
1554 1554 working directory are examined. Use file patterns and/or -I/-X
1555 1555 options to limit target files. -r/--rev is also useful to examine
1556 1556 files in another context without actual updating to it.
1557 1557
1558 1558 With --debug, this command shows warning messages while matching
1559 1559 against ``merge-patterns`` and so on, too. It is recommended to
1560 1560 use this option with explicit file patterns and/or -I/-X options,
1561 1561 because this option increases amount of output per file according
1562 1562 to configurations in hgrc.
1563 1563
1564 1564 With -v/--verbose, this command shows configurations below at
1565 1565 first (only if specified).
1566 1566
1567 1567 - ``--tool`` option
1568 1568 - ``HGMERGE`` environment variable
1569 1569 - configuration of ``ui.merge``
1570 1570
1571 1571 If merge tool is chosen before matching against
1572 1572 ``merge-patterns``, this command can't show any helpful
1573 1573 information, even with --debug. In such case, information above is
1574 1574 useful to know why a merge tool is chosen.
1575 1575 """
1576 1576 opts = pycompat.byteskwargs(opts)
1577 1577 overrides = {}
1578 1578 if opts['tool']:
1579 1579 overrides[('ui', 'forcemerge')] = opts['tool']
1580 1580 ui.note(('with --tool %r\n') % (opts['tool']))
1581 1581
1582 1582 with ui.configoverride(overrides, 'debugmergepatterns'):
1583 1583 hgmerge = encoding.environ.get("HGMERGE")
1584 1584 if hgmerge is not None:
1585 1585 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1586 1586 uimerge = ui.config("ui", "merge")
1587 1587 if uimerge:
1588 1588 ui.note(('with ui.merge=%r\n') % (uimerge))
1589 1589
1590 1590 ctx = scmutil.revsingle(repo, opts.get('rev'))
1591 1591 m = scmutil.match(ctx, pats, opts)
1592 1592 changedelete = opts['changedelete']
1593 1593 for path in ctx.walk(m):
1594 1594 fctx = ctx[path]
1595 1595 try:
1596 1596 if not ui.debugflag:
1597 1597 ui.pushbuffer(error=True)
1598 1598 tool, toolpath = filemerge._picktool(repo, ui, path,
1599 1599 fctx.isbinary(),
1600 1600 'l' in fctx.flags(),
1601 1601 changedelete)
1602 1602 finally:
1603 1603 if not ui.debugflag:
1604 1604 ui.popbuffer()
1605 1605 ui.write(('%s = %s\n') % (path, tool))
1606 1606
1607 1607 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1608 1608 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1609 1609 '''access the pushkey key/value protocol
1610 1610
1611 1611 With two args, list the keys in the given namespace.
1612 1612
1613 1613 With five args, set a key to new if it currently is set to old.
1614 1614 Reports success or failure.
1615 1615 '''
1616 1616
1617 1617 target = hg.peer(ui, {}, repopath)
1618 1618 if keyinfo:
1619 1619 key, old, new = keyinfo
1620 1620 r = target.pushkey(namespace, key, old, new)
1621 1621 ui.status(str(r) + '\n')
1622 1622 return not r
1623 1623 else:
1624 1624 for k, v in sorted(target.listkeys(namespace).iteritems()):
1625 1625 ui.write("%s\t%s\n" % (util.escapestr(k),
1626 1626 util.escapestr(v)))
1627 1627
1628 1628 @command('debugpvec', [], _('A B'))
1629 1629 def debugpvec(ui, repo, a, b=None):
1630 1630 ca = scmutil.revsingle(repo, a)
1631 1631 cb = scmutil.revsingle(repo, b)
1632 1632 pa = pvec.ctxpvec(ca)
1633 1633 pb = pvec.ctxpvec(cb)
1634 1634 if pa == pb:
1635 1635 rel = "="
1636 1636 elif pa > pb:
1637 1637 rel = ">"
1638 1638 elif pa < pb:
1639 1639 rel = "<"
1640 1640 elif pa | pb:
1641 1641 rel = "|"
1642 1642 ui.write(_("a: %s\n") % pa)
1643 1643 ui.write(_("b: %s\n") % pb)
1644 1644 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1645 1645 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1646 1646 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1647 1647 pa.distance(pb), rel))
1648 1648
1649 1649 @command('debugrebuilddirstate|debugrebuildstate',
1650 1650 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1651 1651 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1652 1652 'the working copy parent')),
1653 1653 ],
1654 1654 _('[-r REV]'))
1655 1655 def debugrebuilddirstate(ui, repo, rev, **opts):
1656 1656 """rebuild the dirstate as it would look like for the given revision
1657 1657
1658 1658 If no revision is specified the first current parent will be used.
1659 1659
1660 1660 The dirstate will be set to the files of the given revision.
1661 1661 The actual working directory content or existing dirstate
1662 1662 information such as adds or removes is not considered.
1663 1663
1664 1664 ``minimal`` will only rebuild the dirstate status for files that claim to be
1665 1665 tracked but are not in the parent manifest, or that exist in the parent
1666 1666 manifest but are not in the dirstate. It will not change adds, removes, or
1667 1667 modified files that are in the working copy parent.
1668 1668
1669 1669 One use of this command is to make the next :hg:`status` invocation
1670 1670 check the actual file content.
1671 1671 """
1672 1672 ctx = scmutil.revsingle(repo, rev)
1673 1673 with repo.wlock():
1674 1674 dirstate = repo.dirstate
1675 1675 changedfiles = None
1676 1676 # See command doc for what minimal does.
1677 1677 if opts.get(r'minimal'):
1678 1678 manifestfiles = set(ctx.manifest().keys())
1679 1679 dirstatefiles = set(dirstate)
1680 1680 manifestonly = manifestfiles - dirstatefiles
1681 1681 dsonly = dirstatefiles - manifestfiles
1682 1682 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1683 1683 changedfiles = manifestonly | dsnotadded
1684 1684
1685 1685 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1686 1686
1687 1687 @command('debugrebuildfncache', [], '')
1688 1688 def debugrebuildfncache(ui, repo):
1689 1689 """rebuild the fncache file"""
1690 1690 repair.rebuildfncache(ui, repo)
1691 1691
1692 1692 @command('debugrename',
1693 1693 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1694 1694 _('[-r REV] FILE'))
1695 1695 def debugrename(ui, repo, file1, *pats, **opts):
1696 1696 """dump rename information"""
1697 1697
1698 1698 opts = pycompat.byteskwargs(opts)
1699 1699 ctx = scmutil.revsingle(repo, opts.get('rev'))
1700 1700 m = scmutil.match(ctx, (file1,) + pats, opts)
1701 1701 for abs in ctx.walk(m):
1702 1702 fctx = ctx[abs]
1703 1703 o = fctx.filelog().renamed(fctx.filenode())
1704 1704 rel = m.rel(abs)
1705 1705 if o:
1706 1706 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1707 1707 else:
1708 1708 ui.write(_("%s not renamed\n") % rel)
1709 1709
1710 1710 @command('debugrevlog', cmdutil.debugrevlogopts +
1711 1711 [('d', 'dump', False, _('dump index data'))],
1712 1712 _('-c|-m|FILE'),
1713 1713 optionalrepo=True)
1714 1714 def debugrevlog(ui, repo, file_=None, **opts):
1715 1715 """show data and statistics about a revlog"""
1716 1716 opts = pycompat.byteskwargs(opts)
1717 1717 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1718 1718
1719 1719 if opts.get("dump"):
1720 1720 numrevs = len(r)
1721 1721 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1722 1722 " rawsize totalsize compression heads chainlen\n"))
1723 1723 ts = 0
1724 1724 heads = set()
1725 1725
1726 1726 for rev in xrange(numrevs):
1727 1727 dbase = r.deltaparent(rev)
1728 1728 if dbase == -1:
1729 1729 dbase = rev
1730 1730 cbase = r.chainbase(rev)
1731 1731 clen = r.chainlen(rev)
1732 1732 p1, p2 = r.parentrevs(rev)
1733 1733 rs = r.rawsize(rev)
1734 1734 ts = ts + rs
1735 1735 heads -= set(r.parentrevs(rev))
1736 1736 heads.add(rev)
1737 1737 try:
1738 1738 compression = ts / r.end(rev)
1739 1739 except ZeroDivisionError:
1740 1740 compression = 0
1741 1741 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1742 1742 "%11d %5d %8d\n" %
1743 1743 (rev, p1, p2, r.start(rev), r.end(rev),
1744 1744 r.start(dbase), r.start(cbase),
1745 1745 r.start(p1), r.start(p2),
1746 1746 rs, ts, compression, len(heads), clen))
1747 1747 return 0
1748 1748
1749 1749 v = r.version
1750 1750 format = v & 0xFFFF
1751 1751 flags = []
1752 1752 gdelta = False
1753 1753 if v & revlog.FLAG_INLINE_DATA:
1754 1754 flags.append('inline')
1755 1755 if v & revlog.FLAG_GENERALDELTA:
1756 1756 gdelta = True
1757 1757 flags.append('generaldelta')
1758 1758 if not flags:
1759 1759 flags = ['(none)']
1760 1760
1761 1761 nummerges = 0
1762 1762 numfull = 0
1763 1763 numprev = 0
1764 1764 nump1 = 0
1765 1765 nump2 = 0
1766 1766 numother = 0
1767 1767 nump1prev = 0
1768 1768 nump2prev = 0
1769 1769 chainlengths = []
1770 1770 chainbases = []
1771 1771 chainspans = []
1772 1772
1773 1773 datasize = [None, 0, 0]
1774 1774 fullsize = [None, 0, 0]
1775 1775 deltasize = [None, 0, 0]
1776 1776 chunktypecounts = {}
1777 1777 chunktypesizes = {}
1778 1778
1779 1779 def addsize(size, l):
1780 1780 if l[0] is None or size < l[0]:
1781 1781 l[0] = size
1782 1782 if size > l[1]:
1783 1783 l[1] = size
1784 1784 l[2] += size
1785 1785
1786 1786 numrevs = len(r)
1787 1787 for rev in xrange(numrevs):
1788 1788 p1, p2 = r.parentrevs(rev)
1789 1789 delta = r.deltaparent(rev)
1790 1790 if format > 0:
1791 1791 addsize(r.rawsize(rev), datasize)
1792 1792 if p2 != nullrev:
1793 1793 nummerges += 1
1794 1794 size = r.length(rev)
1795 1795 if delta == nullrev:
1796 1796 chainlengths.append(0)
1797 1797 chainbases.append(r.start(rev))
1798 1798 chainspans.append(size)
1799 1799 numfull += 1
1800 1800 addsize(size, fullsize)
1801 1801 else:
1802 1802 chainlengths.append(chainlengths[delta] + 1)
1803 1803 baseaddr = chainbases[delta]
1804 1804 revaddr = r.start(rev)
1805 1805 chainbases.append(baseaddr)
1806 1806 chainspans.append((revaddr - baseaddr) + size)
1807 1807 addsize(size, deltasize)
1808 1808 if delta == rev - 1:
1809 1809 numprev += 1
1810 1810 if delta == p1:
1811 1811 nump1prev += 1
1812 1812 elif delta == p2:
1813 1813 nump2prev += 1
1814 1814 elif delta == p1:
1815 1815 nump1 += 1
1816 1816 elif delta == p2:
1817 1817 nump2 += 1
1818 1818 elif delta != nullrev:
1819 1819 numother += 1
1820 1820
1821 1821 # Obtain data on the raw chunks in the revlog.
1822 1822 segment = r._getsegmentforrevs(rev, rev)[1]
1823 1823 if segment:
1824 1824 chunktype = bytes(segment[0:1])
1825 1825 else:
1826 1826 chunktype = 'empty'
1827 1827
1828 1828 if chunktype not in chunktypecounts:
1829 1829 chunktypecounts[chunktype] = 0
1830 1830 chunktypesizes[chunktype] = 0
1831 1831
1832 1832 chunktypecounts[chunktype] += 1
1833 1833 chunktypesizes[chunktype] += size
1834 1834
1835 1835 # Adjust size min value for empty cases
1836 1836 for size in (datasize, fullsize, deltasize):
1837 1837 if size[0] is None:
1838 1838 size[0] = 0
1839 1839
1840 1840 numdeltas = numrevs - numfull
1841 1841 numoprev = numprev - nump1prev - nump2prev
1842 1842 totalrawsize = datasize[2]
1843 1843 datasize[2] /= numrevs
1844 1844 fulltotal = fullsize[2]
1845 1845 fullsize[2] /= numfull
1846 1846 deltatotal = deltasize[2]
1847 1847 if numrevs - numfull > 0:
1848 1848 deltasize[2] /= numrevs - numfull
1849 1849 totalsize = fulltotal + deltatotal
1850 1850 avgchainlen = sum(chainlengths) / numrevs
1851 1851 maxchainlen = max(chainlengths)
1852 1852 maxchainspan = max(chainspans)
1853 1853 compratio = 1
1854 1854 if totalsize:
1855 1855 compratio = totalrawsize / totalsize
1856 1856
1857 1857 basedfmtstr = '%%%dd\n'
1858 1858 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1859 1859
1860 1860 def dfmtstr(max):
1861 1861 return basedfmtstr % len(str(max))
1862 1862 def pcfmtstr(max, padding=0):
1863 1863 return basepcfmtstr % (len(str(max)), ' ' * padding)
1864 1864
1865 1865 def pcfmt(value, total):
1866 1866 if total:
1867 1867 return (value, 100 * float(value) / total)
1868 1868 else:
1869 1869 return value, 100.0
1870 1870
1871 1871 ui.write(('format : %d\n') % format)
1872 1872 ui.write(('flags : %s\n') % ', '.join(flags))
1873 1873
1874 1874 ui.write('\n')
1875 1875 fmt = pcfmtstr(totalsize)
1876 1876 fmt2 = dfmtstr(totalsize)
1877 1877 ui.write(('revisions : ') + fmt2 % numrevs)
1878 1878 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1879 1879 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1880 1880 ui.write(('revisions : ') + fmt2 % numrevs)
1881 1881 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1882 1882 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1883 1883 ui.write(('revision size : ') + fmt2 % totalsize)
1884 1884 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1885 1885 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1886 1886
1887 1887 def fmtchunktype(chunktype):
1888 1888 if chunktype == 'empty':
1889 1889 return ' %s : ' % chunktype
1890 1890 elif chunktype in pycompat.bytestr(string.ascii_letters):
1891 1891 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1892 1892 else:
1893 1893 return ' 0x%s : ' % hex(chunktype)
1894 1894
1895 1895 ui.write('\n')
1896 1896 ui.write(('chunks : ') + fmt2 % numrevs)
1897 1897 for chunktype in sorted(chunktypecounts):
1898 1898 ui.write(fmtchunktype(chunktype))
1899 1899 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1900 1900 ui.write(('chunks size : ') + fmt2 % totalsize)
1901 1901 for chunktype in sorted(chunktypecounts):
1902 1902 ui.write(fmtchunktype(chunktype))
1903 1903 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1904 1904
1905 1905 ui.write('\n')
1906 1906 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1907 1907 ui.write(('avg chain length : ') + fmt % avgchainlen)
1908 1908 ui.write(('max chain length : ') + fmt % maxchainlen)
1909 1909 ui.write(('max chain reach : ') + fmt % maxchainspan)
1910 1910 ui.write(('compression ratio : ') + fmt % compratio)
1911 1911
1912 1912 if format > 0:
1913 1913 ui.write('\n')
1914 1914 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1915 1915 % tuple(datasize))
1916 1916 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1917 1917 % tuple(fullsize))
1918 1918 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1919 1919 % tuple(deltasize))
1920 1920
1921 1921 if numdeltas > 0:
1922 1922 ui.write('\n')
1923 1923 fmt = pcfmtstr(numdeltas)
1924 1924 fmt2 = pcfmtstr(numdeltas, 4)
1925 1925 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1926 1926 if numprev > 0:
1927 1927 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1928 1928 numprev))
1929 1929 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1930 1930 numprev))
1931 1931 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1932 1932 numprev))
1933 1933 if gdelta:
1934 1934 ui.write(('deltas against p1 : ')
1935 1935 + fmt % pcfmt(nump1, numdeltas))
1936 1936 ui.write(('deltas against p2 : ')
1937 1937 + fmt % pcfmt(nump2, numdeltas))
1938 1938 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1939 1939 numdeltas))
1940 1940
1941 1941 @command('debugrevspec',
1942 1942 [('', 'optimize', None,
1943 1943 _('print parsed tree after optimizing (DEPRECATED)')),
1944 1944 ('', 'show-revs', True, _('print list of result revisions (default)')),
1945 1945 ('s', 'show-set', None, _('print internal representation of result set')),
1946 1946 ('p', 'show-stage', [],
1947 1947 _('print parsed tree at the given stage'), _('NAME')),
1948 1948 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1949 1949 ('', 'verify-optimized', False, _('verify optimized result')),
1950 1950 ],
1951 1951 ('REVSPEC'))
1952 1952 def debugrevspec(ui, repo, expr, **opts):
1953 1953 """parse and apply a revision specification
1954 1954
1955 1955 Use -p/--show-stage option to print the parsed tree at the given stages.
1956 1956 Use -p all to print tree at every stage.
1957 1957
1958 1958 Use --no-show-revs option with -s or -p to print only the set
1959 1959 representation or the parsed tree respectively.
1960 1960
1961 1961 Use --verify-optimized to compare the optimized result with the unoptimized
1962 1962 one. Returns 1 if the optimized result differs.
1963 1963 """
1964 1964 opts = pycompat.byteskwargs(opts)
1965 1965 aliases = ui.configitems('revsetalias')
1966 1966 stages = [
1967 1967 ('parsed', lambda tree: tree),
1968 1968 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
1969 1969 ui.warn)),
1970 1970 ('concatenated', revsetlang.foldconcat),
1971 1971 ('analyzed', revsetlang.analyze),
1972 1972 ('optimized', revsetlang.optimize),
1973 1973 ]
1974 1974 if opts['no_optimized']:
1975 1975 stages = stages[:-1]
1976 1976 if opts['verify_optimized'] and opts['no_optimized']:
1977 1977 raise error.Abort(_('cannot use --verify-optimized with '
1978 1978 '--no-optimized'))
1979 1979 stagenames = set(n for n, f in stages)
1980 1980
1981 1981 showalways = set()
1982 1982 showchanged = set()
1983 1983 if ui.verbose and not opts['show_stage']:
1984 1984 # show parsed tree by --verbose (deprecated)
1985 1985 showalways.add('parsed')
1986 1986 showchanged.update(['expanded', 'concatenated'])
1987 1987 if opts['optimize']:
1988 1988 showalways.add('optimized')
1989 1989 if opts['show_stage'] and opts['optimize']:
1990 1990 raise error.Abort(_('cannot use --optimize with --show-stage'))
1991 1991 if opts['show_stage'] == ['all']:
1992 1992 showalways.update(stagenames)
1993 1993 else:
1994 1994 for n in opts['show_stage']:
1995 1995 if n not in stagenames:
1996 1996 raise error.Abort(_('invalid stage name: %s') % n)
1997 1997 showalways.update(opts['show_stage'])
1998 1998
1999 1999 treebystage = {}
2000 2000 printedtree = None
2001 2001 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2002 2002 for n, f in stages:
2003 2003 treebystage[n] = tree = f(tree)
2004 2004 if n in showalways or (n in showchanged and tree != printedtree):
2005 2005 if opts['show_stage'] or n != 'parsed':
2006 2006 ui.write(("* %s:\n") % n)
2007 2007 ui.write(revsetlang.prettyformat(tree), "\n")
2008 2008 printedtree = tree
2009 2009
2010 2010 if opts['verify_optimized']:
2011 2011 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2012 2012 brevs = revset.makematcher(treebystage['optimized'])(repo)
2013 2013 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2014 2014 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2015 2015 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2016 2016 arevs = list(arevs)
2017 2017 brevs = list(brevs)
2018 2018 if arevs == brevs:
2019 2019 return 0
2020 2020 ui.write(('--- analyzed\n'), label='diff.file_a')
2021 2021 ui.write(('+++ optimized\n'), label='diff.file_b')
2022 2022 sm = difflib.SequenceMatcher(None, arevs, brevs)
2023 2023 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2024 2024 if tag in ('delete', 'replace'):
2025 2025 for c in arevs[alo:ahi]:
2026 2026 ui.write('-%s\n' % c, label='diff.deleted')
2027 2027 if tag in ('insert', 'replace'):
2028 2028 for c in brevs[blo:bhi]:
2029 2029 ui.write('+%s\n' % c, label='diff.inserted')
2030 2030 if tag == 'equal':
2031 2031 for c in arevs[alo:ahi]:
2032 2032 ui.write(' %s\n' % c)
2033 2033 return 1
2034 2034
2035 2035 func = revset.makematcher(tree)
2036 2036 revs = func(repo)
2037 2037 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2038 2038 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2039 2039 if not opts['show_revs']:
2040 2040 return
2041 2041 for c in revs:
2042 2042 ui.write("%s\n" % c)
2043 2043
2044 2044 @command('debugsetparents', [], _('REV1 [REV2]'))
2045 2045 def debugsetparents(ui, repo, rev1, rev2=None):
2046 2046 """manually set the parents of the current working directory
2047 2047
2048 2048 This is useful for writing repository conversion tools, but should
2049 2049 be used with care. For example, neither the working directory nor the
2050 2050 dirstate is updated, so file status may be incorrect after running this
2051 2051 command.
2052 2052
2053 2053 Returns 0 on success.
2054 2054 """
2055 2055
2056 2056 r1 = scmutil.revsingle(repo, rev1).node()
2057 2057 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2058 2058
2059 2059 with repo.wlock():
2060 2060 repo.setparents(r1, r2)
2061 2061
2062 2062 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2063 2063 def debugssl(ui, repo, source=None, **opts):
2064 2064 '''test a secure connection to a server
2065 2065
2066 2066 This builds the certificate chain for the server on Windows, installing the
2067 2067 missing intermediates and trusted root via Windows Update if necessary. It
2068 2068 does nothing on other platforms.
2069 2069
2070 2070 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2071 2071 that server is used. See :hg:`help urls` for more information.
2072 2072
2073 2073 If the update succeeds, retry the original operation. Otherwise, the cause
2074 2074 of the SSL error is likely another issue.
2075 2075 '''
2076 2076 if pycompat.osname != 'nt':
2077 2077 raise error.Abort(_('certificate chain building is only possible on '
2078 2078 'Windows'))
2079 2079
2080 2080 if not source:
2081 if not repo:
2082 raise error.Abort(_("there is no Mercurial repository here, and no "
2083 "server specified"))
2081 2084 source = "default"
2082 elif not repo:
2083 raise error.Abort(_("there is no Mercurial repository here, and no "
2084 "server specified"))
2085 2085
2086 2086 source, branches = hg.parseurl(ui.expandpath(source))
2087 2087 url = util.url(source)
2088 2088 addr = None
2089 2089
2090 2090 if url.scheme == 'https':
2091 2091 addr = (url.host, url.port or 443)
2092 2092 elif url.scheme == 'ssh':
2093 2093 addr = (url.host, url.port or 22)
2094 2094 else:
2095 2095 raise error.Abort(_("only https and ssh connections are supported"))
2096 2096
2097 2097 from . import win32
2098 2098
2099 2099 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2100 2100 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2101 2101
2102 2102 try:
2103 2103 s.connect(addr)
2104 2104 cert = s.getpeercert(True)
2105 2105
2106 2106 ui.status(_('checking the certificate chain for %s\n') % url.host)
2107 2107
2108 2108 complete = win32.checkcertificatechain(cert, build=False)
2109 2109
2110 2110 if not complete:
2111 2111 ui.status(_('certificate chain is incomplete, updating... '))
2112 2112
2113 2113 if not win32.checkcertificatechain(cert):
2114 2114 ui.status(_('failed.\n'))
2115 2115 else:
2116 2116 ui.status(_('done.\n'))
2117 2117 else:
2118 2118 ui.status(_('full certificate chain is available\n'))
2119 2119 finally:
2120 2120 s.close()
2121 2121
2122 2122 @command('debugsub',
2123 2123 [('r', 'rev', '',
2124 2124 _('revision to check'), _('REV'))],
2125 2125 _('[-r REV] [REV]'))
2126 2126 def debugsub(ui, repo, rev=None):
2127 2127 ctx = scmutil.revsingle(repo, rev, None)
2128 2128 for k, v in sorted(ctx.substate.items()):
2129 2129 ui.write(('path %s\n') % k)
2130 2130 ui.write((' source %s\n') % v[0])
2131 2131 ui.write((' revision %s\n') % v[1])
2132 2132
2133 2133 @command('debugsuccessorssets',
2134 2134 [('', 'closest', False, _('return closest successors sets only'))],
2135 2135 _('[REV]'))
2136 2136 def debugsuccessorssets(ui, repo, *revs, **opts):
2137 2137 """show set of successors for revision
2138 2138
2139 2139 A successors set of changeset A is a consistent group of revisions that
2140 2140 succeed A. It contains non-obsolete changesets only unless closests
2141 2141 successors set is set.
2142 2142
2143 2143 In most cases a changeset A has a single successors set containing a single
2144 2144 successor (changeset A replaced by A').
2145 2145
2146 2146 A changeset that is made obsolete with no successors are called "pruned".
2147 2147 Such changesets have no successors sets at all.
2148 2148
2149 2149 A changeset that has been "split" will have a successors set containing
2150 2150 more than one successor.
2151 2151
2152 2152 A changeset that has been rewritten in multiple different ways is called
2153 2153 "divergent". Such changesets have multiple successor sets (each of which
2154 2154 may also be split, i.e. have multiple successors).
2155 2155
2156 2156 Results are displayed as follows::
2157 2157
2158 2158 <rev1>
2159 2159 <successors-1A>
2160 2160 <rev2>
2161 2161 <successors-2A>
2162 2162 <successors-2B1> <successors-2B2> <successors-2B3>
2163 2163
2164 2164 Here rev2 has two possible (i.e. divergent) successors sets. The first
2165 2165 holds one element, whereas the second holds three (i.e. the changeset has
2166 2166 been split).
2167 2167 """
2168 2168 # passed to successorssets caching computation from one call to another
2169 2169 cache = {}
2170 2170 ctx2str = str
2171 2171 node2str = short
2172 2172 if ui.debug():
2173 2173 def ctx2str(ctx):
2174 2174 return ctx.hex()
2175 2175 node2str = hex
2176 2176 for rev in scmutil.revrange(repo, revs):
2177 2177 ctx = repo[rev]
2178 2178 ui.write('%s\n'% ctx2str(ctx))
2179 2179 for succsset in obsutil.successorssets(repo, ctx.node(),
2180 2180 closest=opts['closest'],
2181 2181 cache=cache):
2182 2182 if succsset:
2183 2183 ui.write(' ')
2184 2184 ui.write(node2str(succsset[0]))
2185 2185 for node in succsset[1:]:
2186 2186 ui.write(' ')
2187 2187 ui.write(node2str(node))
2188 2188 ui.write('\n')
2189 2189
2190 2190 @command('debugtemplate',
2191 2191 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2192 2192 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2193 2193 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2194 2194 optionalrepo=True)
2195 2195 def debugtemplate(ui, repo, tmpl, **opts):
2196 2196 """parse and apply a template
2197 2197
2198 2198 If -r/--rev is given, the template is processed as a log template and
2199 2199 applied to the given changesets. Otherwise, it is processed as a generic
2200 2200 template.
2201 2201
2202 2202 Use --verbose to print the parsed tree.
2203 2203 """
2204 2204 revs = None
2205 2205 if opts[r'rev']:
2206 2206 if repo is None:
2207 2207 raise error.RepoError(_('there is no Mercurial repository here '
2208 2208 '(.hg not found)'))
2209 2209 revs = scmutil.revrange(repo, opts[r'rev'])
2210 2210
2211 2211 props = {}
2212 2212 for d in opts[r'define']:
2213 2213 try:
2214 2214 k, v = (e.strip() for e in d.split('=', 1))
2215 2215 if not k or k == 'ui':
2216 2216 raise ValueError
2217 2217 props[k] = v
2218 2218 except ValueError:
2219 2219 raise error.Abort(_('malformed keyword definition: %s') % d)
2220 2220
2221 2221 if ui.verbose:
2222 2222 aliases = ui.configitems('templatealias')
2223 2223 tree = templater.parse(tmpl)
2224 2224 ui.note(templater.prettyformat(tree), '\n')
2225 2225 newtree = templater.expandaliases(tree, aliases)
2226 2226 if newtree != tree:
2227 2227 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2228 2228
2229 2229 if revs is None:
2230 2230 t = formatter.maketemplater(ui, tmpl)
2231 2231 props['ui'] = ui
2232 2232 ui.write(t.render(props))
2233 2233 else:
2234 2234 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2235 2235 for r in revs:
2236 2236 displayer.show(repo[r], **pycompat.strkwargs(props))
2237 2237 displayer.close()
2238 2238
2239 2239 @command('debugupdatecaches', [])
2240 2240 def debugupdatecaches(ui, repo, *pats, **opts):
2241 2241 """warm all known caches in the repository"""
2242 2242 with repo.wlock(), repo.lock():
2243 2243 repo.updatecaches()
2244 2244
2245 2245 @command('debugupgraderepo', [
2246 2246 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2247 2247 ('', 'run', False, _('performs an upgrade')),
2248 2248 ])
2249 2249 def debugupgraderepo(ui, repo, run=False, optimize=None):
2250 2250 """upgrade a repository to use different features
2251 2251
2252 2252 If no arguments are specified, the repository is evaluated for upgrade
2253 2253 and a list of problems and potential optimizations is printed.
2254 2254
2255 2255 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2256 2256 can be influenced via additional arguments. More details will be provided
2257 2257 by the command output when run without ``--run``.
2258 2258
2259 2259 During the upgrade, the repository will be locked and no writes will be
2260 2260 allowed.
2261 2261
2262 2262 At the end of the upgrade, the repository may not be readable while new
2263 2263 repository data is swapped in. This window will be as long as it takes to
2264 2264 rename some directories inside the ``.hg`` directory. On most machines, this
2265 2265 should complete almost instantaneously and the chances of a consumer being
2266 2266 unable to access the repository should be low.
2267 2267 """
2268 2268 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2269 2269
2270 2270 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2271 2271 inferrepo=True)
2272 2272 def debugwalk(ui, repo, *pats, **opts):
2273 2273 """show how files match on given patterns"""
2274 2274 opts = pycompat.byteskwargs(opts)
2275 2275 m = scmutil.match(repo[None], pats, opts)
2276 2276 ui.write(('matcher: %r\n' % m))
2277 2277 items = list(repo[None].walk(m))
2278 2278 if not items:
2279 2279 return
2280 2280 f = lambda fn: fn
2281 2281 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2282 2282 f = lambda fn: util.normpath(fn)
2283 2283 fmt = 'f %%-%ds %%-%ds %%s' % (
2284 2284 max([len(abs) for abs in items]),
2285 2285 max([len(m.rel(abs)) for abs in items]))
2286 2286 for abs in items:
2287 2287 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2288 2288 ui.write("%s\n" % line.rstrip())
2289 2289
2290 2290 @command('debugwireargs',
2291 2291 [('', 'three', '', 'three'),
2292 2292 ('', 'four', '', 'four'),
2293 2293 ('', 'five', '', 'five'),
2294 2294 ] + cmdutil.remoteopts,
2295 2295 _('REPO [OPTIONS]... [ONE [TWO]]'),
2296 2296 norepo=True)
2297 2297 def debugwireargs(ui, repopath, *vals, **opts):
2298 2298 opts = pycompat.byteskwargs(opts)
2299 2299 repo = hg.peer(ui, opts, repopath)
2300 2300 for opt in cmdutil.remoteopts:
2301 2301 del opts[opt[1]]
2302 2302 args = {}
2303 2303 for k, v in opts.iteritems():
2304 2304 if v:
2305 2305 args[k] = v
2306 2306 # run twice to check that we don't mess up the stream for the next command
2307 2307 res1 = repo.debugwireargs(*vals, **args)
2308 2308 res2 = repo.debugwireargs(*vals, **args)
2309 2309 ui.write("%s\n" % res1)
2310 2310 if res1 != res2:
2311 2311 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now