##// END OF EJS Templates
py3: use '%d' for integers instead of '%s'...
Pulkit Goyal -
r36417:a24c57f1 default
parent child Browse files
Show More
@@ -1,2500 +1,2500
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import socket
18 18 import ssl
19 19 import string
20 20 import sys
21 21 import tempfile
22 22 import time
23 23
24 24 from .i18n import _
25 25 from .node import (
26 26 bin,
27 27 hex,
28 28 nullhex,
29 29 nullid,
30 30 nullrev,
31 31 short,
32 32 )
33 33 from . import (
34 34 bundle2,
35 35 changegroup,
36 36 cmdutil,
37 37 color,
38 38 context,
39 39 dagparser,
40 40 dagutil,
41 41 encoding,
42 42 error,
43 43 exchange,
44 44 extensions,
45 45 filemerge,
46 46 fileset,
47 47 formatter,
48 48 hg,
49 49 localrepo,
50 50 lock as lockmod,
51 51 logcmdutil,
52 52 merge as mergemod,
53 53 obsolete,
54 54 obsutil,
55 55 phases,
56 56 policy,
57 57 pvec,
58 58 pycompat,
59 59 registrar,
60 60 repair,
61 61 revlog,
62 62 revset,
63 63 revsetlang,
64 64 scmutil,
65 65 setdiscovery,
66 66 simplemerge,
67 67 smartset,
68 68 sslutil,
69 69 streamclone,
70 70 templater,
71 71 treediscovery,
72 72 upgrade,
73 73 url as urlmod,
74 74 util,
75 75 vfs as vfsmod,
76 76 )
77 77
78 78 release = lockmod.release
79 79
80 80 command = registrar.command()
81 81
82 82 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
83 83 def debugancestor(ui, repo, *args):
84 84 """find the ancestor revision of two revisions in a given index"""
85 85 if len(args) == 3:
86 86 index, rev1, rev2 = args
87 87 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
88 88 lookup = r.lookup
89 89 elif len(args) == 2:
90 90 if not repo:
91 91 raise error.Abort(_('there is no Mercurial repository here '
92 92 '(.hg not found)'))
93 93 rev1, rev2 = args
94 94 r = repo.changelog
95 95 lookup = repo.lookup
96 96 else:
97 97 raise error.Abort(_('either two or three arguments required'))
98 98 a = r.ancestor(lookup(rev1), lookup(rev2))
99 99 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
100 100
101 101 @command('debugapplystreamclonebundle', [], 'FILE')
102 102 def debugapplystreamclonebundle(ui, repo, fname):
103 103 """apply a stream clone bundle file"""
104 104 f = hg.openpath(ui, fname)
105 105 gen = exchange.readbundle(ui, f, fname)
106 106 gen.apply(repo)
107 107
108 108 @command('debugbuilddag',
109 109 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
110 110 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
111 111 ('n', 'new-file', None, _('add new file at each rev'))],
112 112 _('[OPTION]... [TEXT]'))
113 113 def debugbuilddag(ui, repo, text=None,
114 114 mergeable_file=False,
115 115 overwritten_file=False,
116 116 new_file=False):
117 117 """builds a repo with a given DAG from scratch in the current empty repo
118 118
119 119 The description of the DAG is read from stdin if not given on the
120 120 command line.
121 121
122 122 Elements:
123 123
124 124 - "+n" is a linear run of n nodes based on the current default parent
125 125 - "." is a single node based on the current default parent
126 126 - "$" resets the default parent to null (implied at the start);
127 127 otherwise the default parent is always the last node created
128 128 - "<p" sets the default parent to the backref p
129 129 - "*p" is a fork at parent p, which is a backref
130 130 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
131 131 - "/p2" is a merge of the preceding node and p2
132 132 - ":tag" defines a local tag for the preceding node
133 133 - "@branch" sets the named branch for subsequent nodes
134 134 - "#...\\n" is a comment up to the end of the line
135 135
136 136 Whitespace between the above elements is ignored.
137 137
138 138 A backref is either
139 139
140 140 - a number n, which references the node curr-n, where curr is the current
141 141 node, or
142 142 - the name of a local tag you placed earlier using ":tag", or
143 143 - empty to denote the default parent.
144 144
145 145 All string valued-elements are either strictly alphanumeric, or must
146 146 be enclosed in double quotes ("..."), with "\\" as escape character.
147 147 """
148 148
149 149 if text is None:
150 150 ui.status(_("reading DAG from stdin\n"))
151 151 text = ui.fin.read()
152 152
153 153 cl = repo.changelog
154 154 if len(cl) > 0:
155 155 raise error.Abort(_('repository is not empty'))
156 156
157 157 # determine number of revs in DAG
158 158 total = 0
159 159 for type, data in dagparser.parsedag(text):
160 160 if type == 'n':
161 161 total += 1
162 162
163 163 if mergeable_file:
164 164 linesperrev = 2
165 165 # make a file with k lines per rev
166 166 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
167 167 initialmergedlines.append("")
168 168
169 169 tags = []
170 170
171 171 wlock = lock = tr = None
172 172 try:
173 173 wlock = repo.wlock()
174 174 lock = repo.lock()
175 175 tr = repo.transaction("builddag")
176 176
177 177 at = -1
178 178 atbranch = 'default'
179 179 nodeids = []
180 180 id = 0
181 181 ui.progress(_('building'), id, unit=_('revisions'), total=total)
182 182 for type, data in dagparser.parsedag(text):
183 183 if type == 'n':
184 184 ui.note(('node %s\n' % pycompat.bytestr(data)))
185 185 id, ps = data
186 186
187 187 files = []
188 188 filecontent = {}
189 189
190 190 p2 = None
191 191 if mergeable_file:
192 192 fn = "mf"
193 193 p1 = repo[ps[0]]
194 194 if len(ps) > 1:
195 195 p2 = repo[ps[1]]
196 196 pa = p1.ancestor(p2)
197 197 base, local, other = [x[fn].data() for x in (pa, p1,
198 198 p2)]
199 199 m3 = simplemerge.Merge3Text(base, local, other)
200 200 ml = [l.strip() for l in m3.merge_lines()]
201 201 ml.append("")
202 202 elif at > 0:
203 203 ml = p1[fn].data().split("\n")
204 204 else:
205 205 ml = initialmergedlines
206 206 ml[id * linesperrev] += " r%i" % id
207 207 mergedtext = "\n".join(ml)
208 208 files.append(fn)
209 209 filecontent[fn] = mergedtext
210 210
211 211 if overwritten_file:
212 212 fn = "of"
213 213 files.append(fn)
214 214 filecontent[fn] = "r%i\n" % id
215 215
216 216 if new_file:
217 217 fn = "nf%i" % id
218 218 files.append(fn)
219 219 filecontent[fn] = "r%i\n" % id
220 220 if len(ps) > 1:
221 221 if not p2:
222 222 p2 = repo[ps[1]]
223 223 for fn in p2:
224 224 if fn.startswith("nf"):
225 225 files.append(fn)
226 226 filecontent[fn] = p2[fn].data()
227 227
228 228 def fctxfn(repo, cx, path):
229 229 if path in filecontent:
230 230 return context.memfilectx(repo, cx, path,
231 231 filecontent[path])
232 232 return None
233 233
234 234 if len(ps) == 0 or ps[0] < 0:
235 235 pars = [None, None]
236 236 elif len(ps) == 1:
237 237 pars = [nodeids[ps[0]], None]
238 238 else:
239 239 pars = [nodeids[p] for p in ps]
240 240 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
241 241 date=(id, 0),
242 242 user="debugbuilddag",
243 243 extra={'branch': atbranch})
244 244 nodeid = repo.commitctx(cx)
245 245 nodeids.append(nodeid)
246 246 at = id
247 247 elif type == 'l':
248 248 id, name = data
249 249 ui.note(('tag %s\n' % name))
250 250 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
251 251 elif type == 'a':
252 252 ui.note(('branch %s\n' % data))
253 253 atbranch = data
254 254 ui.progress(_('building'), id, unit=_('revisions'), total=total)
255 255 tr.close()
256 256
257 257 if tags:
258 258 repo.vfs.write("localtags", "".join(tags))
259 259 finally:
260 260 ui.progress(_('building'), None)
261 261 release(tr, lock, wlock)
262 262
263 263 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
264 264 indent_string = ' ' * indent
265 265 if all:
266 266 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
267 267 % indent_string)
268 268
269 269 def showchunks(named):
270 270 ui.write("\n%s%s\n" % (indent_string, named))
271 271 for deltadata in gen.deltaiter():
272 272 node, p1, p2, cs, deltabase, delta, flags = deltadata
273 273 ui.write("%s%s %s %s %s %s %s\n" %
274 274 (indent_string, hex(node), hex(p1), hex(p2),
275 275 hex(cs), hex(deltabase), len(delta)))
276 276
277 277 chunkdata = gen.changelogheader()
278 278 showchunks("changelog")
279 279 chunkdata = gen.manifestheader()
280 280 showchunks("manifest")
281 281 for chunkdata in iter(gen.filelogheader, {}):
282 282 fname = chunkdata['filename']
283 283 showchunks(fname)
284 284 else:
285 285 if isinstance(gen, bundle2.unbundle20):
286 286 raise error.Abort(_('use debugbundle2 for this file'))
287 287 chunkdata = gen.changelogheader()
288 288 for deltadata in gen.deltaiter():
289 289 node, p1, p2, cs, deltabase, delta, flags = deltadata
290 290 ui.write("%s%s\n" % (indent_string, hex(node)))
291 291
292 292 def _debugobsmarkers(ui, part, indent=0, **opts):
293 293 """display version and markers contained in 'data'"""
294 294 opts = pycompat.byteskwargs(opts)
295 295 data = part.read()
296 296 indent_string = ' ' * indent
297 297 try:
298 298 version, markers = obsolete._readmarkers(data)
299 299 except error.UnknownVersion as exc:
300 300 msg = "%sunsupported version: %s (%d bytes)\n"
301 301 msg %= indent_string, exc.version, len(data)
302 302 ui.write(msg)
303 303 else:
304 304 msg = "%sversion: %d (%d bytes)\n"
305 305 msg %= indent_string, version, len(data)
306 306 ui.write(msg)
307 307 fm = ui.formatter('debugobsolete', opts)
308 308 for rawmarker in sorted(markers):
309 309 m = obsutil.marker(None, rawmarker)
310 310 fm.startitem()
311 311 fm.plain(indent_string)
312 312 cmdutil.showmarker(fm, m)
313 313 fm.end()
314 314
315 315 def _debugphaseheads(ui, data, indent=0):
316 316 """display version and markers contained in 'data'"""
317 317 indent_string = ' ' * indent
318 318 headsbyphase = phases.binarydecode(data)
319 319 for phase in phases.allphases:
320 320 for head in headsbyphase[phase]:
321 321 ui.write(indent_string)
322 322 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
323 323
324 324 def _quasirepr(thing):
325 325 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
326 326 return '{%s}' % (
327 327 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
328 328 return pycompat.bytestr(repr(thing))
329 329
330 330 def _debugbundle2(ui, gen, all=None, **opts):
331 331 """lists the contents of a bundle2"""
332 332 if not isinstance(gen, bundle2.unbundle20):
333 333 raise error.Abort(_('not a bundle2 file'))
334 334 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
335 335 parttypes = opts.get(r'part_type', [])
336 336 for part in gen.iterparts():
337 337 if parttypes and part.type not in parttypes:
338 338 continue
339 339 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
340 340 if part.type == 'changegroup':
341 341 version = part.params.get('version', '01')
342 342 cg = changegroup.getunbundler(version, part, 'UN')
343 343 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
344 344 if part.type == 'obsmarkers':
345 345 _debugobsmarkers(ui, part, indent=4, **opts)
346 346 if part.type == 'phase-heads':
347 347 _debugphaseheads(ui, part, indent=4)
348 348
349 349 @command('debugbundle',
350 350 [('a', 'all', None, _('show all details')),
351 351 ('', 'part-type', [], _('show only the named part type')),
352 352 ('', 'spec', None, _('print the bundlespec of the bundle'))],
353 353 _('FILE'),
354 354 norepo=True)
355 355 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
356 356 """lists the contents of a bundle"""
357 357 with hg.openpath(ui, bundlepath) as f:
358 358 if spec:
359 359 spec = exchange.getbundlespec(ui, f)
360 360 ui.write('%s\n' % spec)
361 361 return
362 362
363 363 gen = exchange.readbundle(ui, f, bundlepath)
364 364 if isinstance(gen, bundle2.unbundle20):
365 365 return _debugbundle2(ui, gen, all=all, **opts)
366 366 _debugchangegroup(ui, gen, all=all, **opts)
367 367
368 368 @command('debugcapabilities',
369 369 [], _('PATH'),
370 370 norepo=True)
371 371 def debugcapabilities(ui, path, **opts):
372 372 """lists the capabilities of a remote peer"""
373 373 opts = pycompat.byteskwargs(opts)
374 374 peer = hg.peer(ui, opts, path)
375 375 caps = peer.capabilities()
376 376 ui.write(('Main capabilities:\n'))
377 377 for c in sorted(caps):
378 378 ui.write((' %s\n') % c)
379 379 b2caps = bundle2.bundle2caps(peer)
380 380 if b2caps:
381 381 ui.write(('Bundle2 capabilities:\n'))
382 382 for key, values in sorted(b2caps.iteritems()):
383 383 ui.write((' %s\n') % key)
384 384 for v in values:
385 385 ui.write((' %s\n') % v)
386 386
387 387 @command('debugcheckstate', [], '')
388 388 def debugcheckstate(ui, repo):
389 389 """validate the correctness of the current dirstate"""
390 390 parent1, parent2 = repo.dirstate.parents()
391 391 m1 = repo[parent1].manifest()
392 392 m2 = repo[parent2].manifest()
393 393 errors = 0
394 394 for f in repo.dirstate:
395 395 state = repo.dirstate[f]
396 396 if state in "nr" and f not in m1:
397 397 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
398 398 errors += 1
399 399 if state in "a" and f in m1:
400 400 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
401 401 errors += 1
402 402 if state in "m" and f not in m1 and f not in m2:
403 403 ui.warn(_("%s in state %s, but not in either manifest\n") %
404 404 (f, state))
405 405 errors += 1
406 406 for f in m1:
407 407 state = repo.dirstate[f]
408 408 if state not in "nrm":
409 409 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
410 410 errors += 1
411 411 if errors:
412 412 error = _(".hg/dirstate inconsistent with current parent's manifest")
413 413 raise error.Abort(error)
414 414
415 415 @command('debugcolor',
416 416 [('', 'style', None, _('show all configured styles'))],
417 417 'hg debugcolor')
418 418 def debugcolor(ui, repo, **opts):
419 419 """show available color, effects or style"""
420 420 ui.write(('color mode: %s\n') % ui._colormode)
421 421 if opts.get(r'style'):
422 422 return _debugdisplaystyle(ui)
423 423 else:
424 424 return _debugdisplaycolor(ui)
425 425
426 426 def _debugdisplaycolor(ui):
427 427 ui = ui.copy()
428 428 ui._styles.clear()
429 429 for effect in color._activeeffects(ui).keys():
430 430 ui._styles[effect] = effect
431 431 if ui._terminfoparams:
432 432 for k, v in ui.configitems('color'):
433 433 if k.startswith('color.'):
434 434 ui._styles[k] = k[6:]
435 435 elif k.startswith('terminfo.'):
436 436 ui._styles[k] = k[9:]
437 437 ui.write(_('available colors:\n'))
438 438 # sort label with a '_' after the other to group '_background' entry.
439 439 items = sorted(ui._styles.items(),
440 440 key=lambda i: ('_' in i[0], i[0], i[1]))
441 441 for colorname, label in items:
442 442 ui.write(('%s\n') % colorname, label=label)
443 443
444 444 def _debugdisplaystyle(ui):
445 445 ui.write(_('available style:\n'))
446 446 width = max(len(s) for s in ui._styles)
447 447 for label, effects in sorted(ui._styles.items()):
448 448 ui.write('%s' % label, label=label)
449 449 if effects:
450 450 # 50
451 451 ui.write(': ')
452 452 ui.write(' ' * (max(0, width - len(label))))
453 453 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
454 454 ui.write('\n')
455 455
456 456 @command('debugcreatestreamclonebundle', [], 'FILE')
457 457 def debugcreatestreamclonebundle(ui, repo, fname):
458 458 """create a stream clone bundle file
459 459
460 460 Stream bundles are special bundles that are essentially archives of
461 461 revlog files. They are commonly used for cloning very quickly.
462 462 """
463 463 # TODO we may want to turn this into an abort when this functionality
464 464 # is moved into `hg bundle`.
465 465 if phases.hassecret(repo):
466 466 ui.warn(_('(warning: stream clone bundle will contain secret '
467 467 'revisions)\n'))
468 468
469 469 requirements, gen = streamclone.generatebundlev1(repo)
470 470 changegroup.writechunks(ui, gen, fname)
471 471
472 472 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
473 473
474 474 @command('debugdag',
475 475 [('t', 'tags', None, _('use tags as labels')),
476 476 ('b', 'branches', None, _('annotate with branch names')),
477 477 ('', 'dots', None, _('use dots for runs')),
478 478 ('s', 'spaces', None, _('separate elements by spaces'))],
479 479 _('[OPTION]... [FILE [REV]...]'),
480 480 optionalrepo=True)
481 481 def debugdag(ui, repo, file_=None, *revs, **opts):
482 482 """format the changelog or an index DAG as a concise textual description
483 483
484 484 If you pass a revlog index, the revlog's DAG is emitted. If you list
485 485 revision numbers, they get labeled in the output as rN.
486 486
487 487 Otherwise, the changelog DAG of the current repo is emitted.
488 488 """
489 489 spaces = opts.get(r'spaces')
490 490 dots = opts.get(r'dots')
491 491 if file_:
492 492 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
493 493 file_)
494 494 revs = set((int(r) for r in revs))
495 495 def events():
496 496 for r in rlog:
497 497 yield 'n', (r, list(p for p in rlog.parentrevs(r)
498 498 if p != -1))
499 499 if r in revs:
500 500 yield 'l', (r, "r%i" % r)
501 501 elif repo:
502 502 cl = repo.changelog
503 503 tags = opts.get(r'tags')
504 504 branches = opts.get(r'branches')
505 505 if tags:
506 506 labels = {}
507 507 for l, n in repo.tags().items():
508 508 labels.setdefault(cl.rev(n), []).append(l)
509 509 def events():
510 510 b = "default"
511 511 for r in cl:
512 512 if branches:
513 513 newb = cl.read(cl.node(r))[5]['branch']
514 514 if newb != b:
515 515 yield 'a', newb
516 516 b = newb
517 517 yield 'n', (r, list(p for p in cl.parentrevs(r)
518 518 if p != -1))
519 519 if tags:
520 520 ls = labels.get(r)
521 521 if ls:
522 522 for l in ls:
523 523 yield 'l', (r, l)
524 524 else:
525 525 raise error.Abort(_('need repo for changelog dag'))
526 526
527 527 for line in dagparser.dagtextlines(events(),
528 528 addspaces=spaces,
529 529 wraplabels=True,
530 530 wrapannotations=True,
531 531 wrapnonlinear=dots,
532 532 usedots=dots,
533 533 maxlinewidth=70):
534 534 ui.write(line)
535 535 ui.write("\n")
536 536
537 537 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
538 538 def debugdata(ui, repo, file_, rev=None, **opts):
539 539 """dump the contents of a data file revision"""
540 540 opts = pycompat.byteskwargs(opts)
541 541 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
542 542 if rev is not None:
543 543 raise error.CommandError('debugdata', _('invalid arguments'))
544 544 file_, rev = None, file_
545 545 elif rev is None:
546 546 raise error.CommandError('debugdata', _('invalid arguments'))
547 547 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
548 548 try:
549 549 ui.write(r.revision(r.lookup(rev), raw=True))
550 550 except KeyError:
551 551 raise error.Abort(_('invalid revision identifier %s') % rev)
552 552
553 553 @command('debugdate',
554 554 [('e', 'extended', None, _('try extended date formats'))],
555 555 _('[-e] DATE [RANGE]'),
556 556 norepo=True, optionalrepo=True)
557 557 def debugdate(ui, date, range=None, **opts):
558 558 """parse and display a date"""
559 559 if opts[r"extended"]:
560 560 d = util.parsedate(date, util.extendeddateformats)
561 561 else:
562 562 d = util.parsedate(date)
563 ui.write(("internal: %s %s\n") % d)
563 ui.write(("internal: %d %d\n") % d)
564 564 ui.write(("standard: %s\n") % util.datestr(d))
565 565 if range:
566 566 m = util.matchdate(range)
567 567 ui.write(("match: %s\n") % m(d[0]))
568 568
569 569 @command('debugdeltachain',
570 570 cmdutil.debugrevlogopts + cmdutil.formatteropts,
571 571 _('-c|-m|FILE'),
572 572 optionalrepo=True)
573 573 def debugdeltachain(ui, repo, file_=None, **opts):
574 574 """dump information about delta chains in a revlog
575 575
576 576 Output can be templatized. Available template keywords are:
577 577
578 578 :``rev``: revision number
579 579 :``chainid``: delta chain identifier (numbered by unique base)
580 580 :``chainlen``: delta chain length to this revision
581 581 :``prevrev``: previous revision in delta chain
582 582 :``deltatype``: role of delta / how it was computed
583 583 :``compsize``: compressed size of revision
584 584 :``uncompsize``: uncompressed size of revision
585 585 :``chainsize``: total size of compressed revisions in chain
586 586 :``chainratio``: total chain size divided by uncompressed revision size
587 587 (new delta chains typically start at ratio 2.00)
588 588 :``lindist``: linear distance from base revision in delta chain to end
589 589 of this revision
590 590 :``extradist``: total size of revisions not part of this delta chain from
591 591 base of delta chain to end of this revision; a measurement
592 592 of how much extra data we need to read/seek across to read
593 593 the delta chain for this revision
594 594 :``extraratio``: extradist divided by chainsize; another representation of
595 595 how much unrelated data is needed to load this delta chain
596 596
597 597 If the repository is configured to use the sparse read, additional keywords
598 598 are available:
599 599
600 600 :``readsize``: total size of data read from the disk for a revision
601 601 (sum of the sizes of all the blocks)
602 602 :``largestblock``: size of the largest block of data read from the disk
603 603 :``readdensity``: density of useful bytes in the data read from the disk
604 604 :``srchunks``: in how many data hunks the whole revision would be read
605 605
606 606 The sparse read can be enabled with experimental.sparse-read = True
607 607 """
608 608 opts = pycompat.byteskwargs(opts)
609 609 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
610 610 index = r.index
611 611 generaldelta = r.version & revlog.FLAG_GENERALDELTA
612 612 withsparseread = getattr(r, '_withsparseread', False)
613 613
614 614 def revinfo(rev):
615 615 e = index[rev]
616 616 compsize = e[1]
617 617 uncompsize = e[2]
618 618 chainsize = 0
619 619
620 620 if generaldelta:
621 621 if e[3] == e[5]:
622 622 deltatype = 'p1'
623 623 elif e[3] == e[6]:
624 624 deltatype = 'p2'
625 625 elif e[3] == rev - 1:
626 626 deltatype = 'prev'
627 627 elif e[3] == rev:
628 628 deltatype = 'base'
629 629 else:
630 630 deltatype = 'other'
631 631 else:
632 632 if e[3] == rev:
633 633 deltatype = 'base'
634 634 else:
635 635 deltatype = 'prev'
636 636
637 637 chain = r._deltachain(rev)[0]
638 638 for iterrev in chain:
639 639 e = index[iterrev]
640 640 chainsize += e[1]
641 641
642 642 return compsize, uncompsize, deltatype, chain, chainsize
643 643
644 644 fm = ui.formatter('debugdeltachain', opts)
645 645
646 646 fm.plain(' rev chain# chainlen prev delta '
647 647 'size rawsize chainsize ratio lindist extradist '
648 648 'extraratio')
649 649 if withsparseread:
650 650 fm.plain(' readsize largestblk rddensity srchunks')
651 651 fm.plain('\n')
652 652
653 653 chainbases = {}
654 654 for rev in r:
655 655 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
656 656 chainbase = chain[0]
657 657 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
658 658 start = r.start
659 659 length = r.length
660 660 basestart = start(chainbase)
661 661 revstart = start(rev)
662 662 lineardist = revstart + comp - basestart
663 663 extradist = lineardist - chainsize
664 664 try:
665 665 prevrev = chain[-2]
666 666 except IndexError:
667 667 prevrev = -1
668 668
669 669 chainratio = float(chainsize) / float(uncomp)
670 670 extraratio = float(extradist) / float(chainsize)
671 671
672 672 fm.startitem()
673 673 fm.write('rev chainid chainlen prevrev deltatype compsize '
674 674 'uncompsize chainsize chainratio lindist extradist '
675 675 'extraratio',
676 676 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
677 677 rev, chainid, len(chain), prevrev, deltatype, comp,
678 678 uncomp, chainsize, chainratio, lineardist, extradist,
679 679 extraratio,
680 680 rev=rev, chainid=chainid, chainlen=len(chain),
681 681 prevrev=prevrev, deltatype=deltatype, compsize=comp,
682 682 uncompsize=uncomp, chainsize=chainsize,
683 683 chainratio=chainratio, lindist=lineardist,
684 684 extradist=extradist, extraratio=extraratio)
685 685 if withsparseread:
686 686 readsize = 0
687 687 largestblock = 0
688 688 srchunks = 0
689 689
690 690 for revschunk in revlog._slicechunk(r, chain):
691 691 srchunks += 1
692 692 blkend = start(revschunk[-1]) + length(revschunk[-1])
693 693 blksize = blkend - start(revschunk[0])
694 694
695 695 readsize += blksize
696 696 if largestblock < blksize:
697 697 largestblock = blksize
698 698
699 699 readdensity = float(chainsize) / float(readsize)
700 700
701 701 fm.write('readsize largestblock readdensity srchunks',
702 702 ' %10d %10d %9.5f %8d',
703 703 readsize, largestblock, readdensity, srchunks,
704 704 readsize=readsize, largestblock=largestblock,
705 705 readdensity=readdensity, srchunks=srchunks)
706 706
707 707 fm.plain('\n')
708 708
709 709 fm.end()
710 710
711 711 @command('debugdirstate|debugstate',
712 712 [('', 'nodates', None, _('do not display the saved mtime')),
713 713 ('', 'datesort', None, _('sort by saved mtime'))],
714 714 _('[OPTION]...'))
715 715 def debugstate(ui, repo, **opts):
716 716 """show the contents of the current dirstate"""
717 717
718 718 nodates = opts.get(r'nodates')
719 719 datesort = opts.get(r'datesort')
720 720
721 721 timestr = ""
722 722 if datesort:
723 723 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
724 724 else:
725 725 keyfunc = None # sort by filename
726 726 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
727 727 if ent[3] == -1:
728 728 timestr = 'unset '
729 729 elif nodates:
730 730 timestr = 'set '
731 731 else:
732 732 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
733 733 time.localtime(ent[3]))
734 734 timestr = encoding.strtolocal(timestr)
735 735 if ent[1] & 0o20000:
736 736 mode = 'lnk'
737 737 else:
738 738 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
739 739 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
740 740 for f in repo.dirstate.copies():
741 741 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
742 742
743 743 @command('debugdiscovery',
744 744 [('', 'old', None, _('use old-style discovery')),
745 745 ('', 'nonheads', None,
746 746 _('use old-style discovery with non-heads included')),
747 747 ('', 'rev', [], 'restrict discovery to this set of revs'),
748 748 ] + cmdutil.remoteopts,
749 749 _('[--rev REV] [OTHER]'))
750 750 def debugdiscovery(ui, repo, remoteurl="default", **opts):
751 751 """runs the changeset discovery protocol in isolation"""
752 752 opts = pycompat.byteskwargs(opts)
753 753 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
754 754 remote = hg.peer(repo, opts, remoteurl)
755 755 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
756 756
757 757 # make sure tests are repeatable
758 758 random.seed(12323)
759 759
760 760 def doit(pushedrevs, remoteheads, remote=remote):
761 761 if opts.get('old'):
762 762 if not util.safehasattr(remote, 'branches'):
763 763 # enable in-client legacy support
764 764 remote = localrepo.locallegacypeer(remote.local())
765 765 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
766 766 force=True)
767 767 common = set(common)
768 768 if not opts.get('nonheads'):
769 769 ui.write(("unpruned common: %s\n") %
770 770 " ".join(sorted(short(n) for n in common)))
771 771 dag = dagutil.revlogdag(repo.changelog)
772 772 all = dag.ancestorset(dag.internalizeall(common))
773 773 common = dag.externalizeall(dag.headsetofconnecteds(all))
774 774 else:
775 775 nodes = None
776 776 if pushedrevs:
777 777 revs = scmutil.revrange(repo, pushedrevs)
778 778 nodes = [repo[r].node() for r in revs]
779 779 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
780 780 ancestorsof=nodes)
781 781 common = set(common)
782 782 rheads = set(hds)
783 783 lheads = set(repo.heads())
784 784 ui.write(("common heads: %s\n") %
785 785 " ".join(sorted(short(n) for n in common)))
786 786 if lheads <= common:
787 787 ui.write(("local is subset\n"))
788 788 elif rheads <= common:
789 789 ui.write(("remote is subset\n"))
790 790
791 791 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
792 792 localrevs = opts['rev']
793 793 doit(localrevs, remoterevs)
794 794
795 795 _chunksize = 4 << 10
796 796
797 797 @command('debugdownload',
798 798 [
799 799 ('o', 'output', '', _('path')),
800 800 ],
801 801 optionalrepo=True)
802 802 def debugdownload(ui, repo, url, output=None, **opts):
803 803 """download a resource using Mercurial logic and config
804 804 """
805 805 fh = urlmod.open(ui, url, output)
806 806
807 807 dest = ui
808 808 if output:
809 809 dest = open(output, "wb", _chunksize)
810 810 try:
811 811 data = fh.read(_chunksize)
812 812 while data:
813 813 dest.write(data)
814 814 data = fh.read(_chunksize)
815 815 finally:
816 816 if output:
817 817 dest.close()
818 818
819 819 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
820 820 def debugextensions(ui, **opts):
821 821 '''show information about active extensions'''
822 822 opts = pycompat.byteskwargs(opts)
823 823 exts = extensions.extensions(ui)
824 824 hgver = util.version()
825 825 fm = ui.formatter('debugextensions', opts)
826 826 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
827 827 isinternal = extensions.ismoduleinternal(extmod)
828 828 extsource = pycompat.fsencode(extmod.__file__)
829 829 if isinternal:
830 830 exttestedwith = [] # never expose magic string to users
831 831 else:
832 832 exttestedwith = getattr(extmod, 'testedwith', '').split()
833 833 extbuglink = getattr(extmod, 'buglink', None)
834 834
835 835 fm.startitem()
836 836
837 837 if ui.quiet or ui.verbose:
838 838 fm.write('name', '%s\n', extname)
839 839 else:
840 840 fm.write('name', '%s', extname)
841 841 if isinternal or hgver in exttestedwith:
842 842 fm.plain('\n')
843 843 elif not exttestedwith:
844 844 fm.plain(_(' (untested!)\n'))
845 845 else:
846 846 lasttestedversion = exttestedwith[-1]
847 847 fm.plain(' (%s!)\n' % lasttestedversion)
848 848
849 849 fm.condwrite(ui.verbose and extsource, 'source',
850 850 _(' location: %s\n'), extsource or "")
851 851
852 852 if ui.verbose:
853 853 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
854 854 fm.data(bundled=isinternal)
855 855
856 856 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
857 857 _(' tested with: %s\n'),
858 858 fm.formatlist(exttestedwith, name='ver'))
859 859
860 860 fm.condwrite(ui.verbose and extbuglink, 'buglink',
861 861 _(' bug reporting: %s\n'), extbuglink or "")
862 862
863 863 fm.end()
864 864
865 865 @command('debugfileset',
866 866 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
867 867 _('[-r REV] FILESPEC'))
868 868 def debugfileset(ui, repo, expr, **opts):
869 869 '''parse and apply a fileset specification'''
870 870 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
871 871 if ui.verbose:
872 872 tree = fileset.parse(expr)
873 873 ui.note(fileset.prettyformat(tree), "\n")
874 874
875 875 for f in ctx.getfileset(expr):
876 876 ui.write("%s\n" % f)
877 877
878 878 @command('debugformat',
879 879 [] + cmdutil.formatteropts,
880 880 _(''))
881 881 def debugformat(ui, repo, **opts):
882 882 """display format information about the current repository
883 883
884 884 Use --verbose to get extra information about current config value and
885 885 Mercurial default."""
886 886 opts = pycompat.byteskwargs(opts)
887 887 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
888 888 maxvariantlength = max(len('format-variant'), maxvariantlength)
889 889
890 890 def makeformatname(name):
891 891 return '%s:' + (' ' * (maxvariantlength - len(name)))
892 892
893 893 fm = ui.formatter('debugformat', opts)
894 894 if fm.isplain():
895 895 def formatvalue(value):
896 896 if util.safehasattr(value, 'startswith'):
897 897 return value
898 898 if value:
899 899 return 'yes'
900 900 else:
901 901 return 'no'
902 902 else:
903 903 formatvalue = pycompat.identity
904 904
905 905 fm.plain('format-variant')
906 906 fm.plain(' ' * (maxvariantlength - len('format-variant')))
907 907 fm.plain(' repo')
908 908 if ui.verbose:
909 909 fm.plain(' config default')
910 910 fm.plain('\n')
911 911 for fv in upgrade.allformatvariant:
912 912 fm.startitem()
913 913 repovalue = fv.fromrepo(repo)
914 914 configvalue = fv.fromconfig(repo)
915 915
916 916 if repovalue != configvalue:
917 917 namelabel = 'formatvariant.name.mismatchconfig'
918 918 repolabel = 'formatvariant.repo.mismatchconfig'
919 919 elif repovalue != fv.default:
920 920 namelabel = 'formatvariant.name.mismatchdefault'
921 921 repolabel = 'formatvariant.repo.mismatchdefault'
922 922 else:
923 923 namelabel = 'formatvariant.name.uptodate'
924 924 repolabel = 'formatvariant.repo.uptodate'
925 925
926 926 fm.write('name', makeformatname(fv.name), fv.name,
927 927 label=namelabel)
928 928 fm.write('repo', ' %3s', formatvalue(repovalue),
929 929 label=repolabel)
930 930 if fv.default != configvalue:
931 931 configlabel = 'formatvariant.config.special'
932 932 else:
933 933 configlabel = 'formatvariant.config.default'
934 934 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
935 935 label=configlabel)
936 936 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
937 937 label='formatvariant.default')
938 938 fm.plain('\n')
939 939 fm.end()
940 940
941 941 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
942 942 def debugfsinfo(ui, path="."):
943 943 """show information detected about current filesystem"""
944 944 ui.write(('path: %s\n') % path)
945 945 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
946 946 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
947 947 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
948 948 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
949 949 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
950 950 casesensitive = '(unknown)'
951 951 try:
952 952 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
953 953 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
954 954 except OSError:
955 955 pass
956 956 ui.write(('case-sensitive: %s\n') % casesensitive)
957 957
958 958 @command('debuggetbundle',
959 959 [('H', 'head', [], _('id of head node'), _('ID')),
960 960 ('C', 'common', [], _('id of common node'), _('ID')),
961 961 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
962 962 _('REPO FILE [-H|-C ID]...'),
963 963 norepo=True)
964 964 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
965 965 """retrieves a bundle from a repo
966 966
967 967 Every ID must be a full-length hex node id string. Saves the bundle to the
968 968 given file.
969 969 """
970 970 opts = pycompat.byteskwargs(opts)
971 971 repo = hg.peer(ui, opts, repopath)
972 972 if not repo.capable('getbundle'):
973 973 raise error.Abort("getbundle() not supported by target repository")
974 974 args = {}
975 975 if common:
976 976 args[r'common'] = [bin(s) for s in common]
977 977 if head:
978 978 args[r'heads'] = [bin(s) for s in head]
979 979 # TODO: get desired bundlecaps from command line.
980 980 args[r'bundlecaps'] = None
981 981 bundle = repo.getbundle('debug', **args)
982 982
983 983 bundletype = opts.get('type', 'bzip2').lower()
984 984 btypes = {'none': 'HG10UN',
985 985 'bzip2': 'HG10BZ',
986 986 'gzip': 'HG10GZ',
987 987 'bundle2': 'HG20'}
988 988 bundletype = btypes.get(bundletype)
989 989 if bundletype not in bundle2.bundletypes:
990 990 raise error.Abort(_('unknown bundle type specified with --type'))
991 991 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
992 992
993 993 @command('debugignore', [], '[FILE]')
994 994 def debugignore(ui, repo, *files, **opts):
995 995 """display the combined ignore pattern and information about ignored files
996 996
997 997 With no argument display the combined ignore pattern.
998 998
999 999 Given space separated file names, shows if the given file is ignored and
1000 1000 if so, show the ignore rule (file and line number) that matched it.
1001 1001 """
1002 1002 ignore = repo.dirstate._ignore
1003 1003 if not files:
1004 1004 # Show all the patterns
1005 1005 ui.write("%s\n" % repr(ignore))
1006 1006 else:
1007 1007 m = scmutil.match(repo[None], pats=files)
1008 1008 for f in m.files():
1009 1009 nf = util.normpath(f)
1010 1010 ignored = None
1011 1011 ignoredata = None
1012 1012 if nf != '.':
1013 1013 if ignore(nf):
1014 1014 ignored = nf
1015 1015 ignoredata = repo.dirstate._ignorefileandline(nf)
1016 1016 else:
1017 1017 for p in util.finddirs(nf):
1018 1018 if ignore(p):
1019 1019 ignored = p
1020 1020 ignoredata = repo.dirstate._ignorefileandline(p)
1021 1021 break
1022 1022 if ignored:
1023 1023 if ignored == nf:
1024 1024 ui.write(_("%s is ignored\n") % m.uipath(f))
1025 1025 else:
1026 1026 ui.write(_("%s is ignored because of "
1027 1027 "containing folder %s\n")
1028 1028 % (m.uipath(f), ignored))
1029 1029 ignorefile, lineno, line = ignoredata
1030 1030 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1031 1031 % (ignorefile, lineno, line))
1032 1032 else:
1033 1033 ui.write(_("%s is not ignored\n") % m.uipath(f))
1034 1034
1035 1035 @command('debugindex', cmdutil.debugrevlogopts +
1036 1036 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1037 1037 _('[-f FORMAT] -c|-m|FILE'),
1038 1038 optionalrepo=True)
1039 1039 def debugindex(ui, repo, file_=None, **opts):
1040 1040 """dump the contents of an index file"""
1041 1041 opts = pycompat.byteskwargs(opts)
1042 1042 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1043 1043 format = opts.get('format', 0)
1044 1044 if format not in (0, 1):
1045 1045 raise error.Abort(_("unknown format %d") % format)
1046 1046
1047 1047 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1048 1048 if generaldelta:
1049 1049 basehdr = ' delta'
1050 1050 else:
1051 1051 basehdr = ' base'
1052 1052
1053 1053 if ui.debugflag:
1054 1054 shortfn = hex
1055 1055 else:
1056 1056 shortfn = short
1057 1057
1058 1058 # There might not be anything in r, so have a sane default
1059 1059 idlen = 12
1060 1060 for i in r:
1061 1061 idlen = len(shortfn(r.node(i)))
1062 1062 break
1063 1063
1064 1064 if format == 0:
1065 1065 ui.write((" rev offset length " + basehdr + " linkrev"
1066 1066 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1067 1067 elif format == 1:
1068 1068 ui.write((" rev flag offset length"
1069 1069 " size " + basehdr + " link p1 p2"
1070 1070 " %s\n") % "nodeid".rjust(idlen))
1071 1071
1072 1072 for i in r:
1073 1073 node = r.node(i)
1074 1074 if generaldelta:
1075 1075 base = r.deltaparent(i)
1076 1076 else:
1077 1077 base = r.chainbase(i)
1078 1078 if format == 0:
1079 1079 try:
1080 1080 pp = r.parents(node)
1081 1081 except Exception:
1082 1082 pp = [nullid, nullid]
1083 1083 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1084 1084 i, r.start(i), r.length(i), base, r.linkrev(i),
1085 1085 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1086 1086 elif format == 1:
1087 1087 pr = r.parentrevs(i)
1088 1088 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1089 1089 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1090 1090 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1091 1091
1092 1092 @command('debugindexdot', cmdutil.debugrevlogopts,
1093 1093 _('-c|-m|FILE'), optionalrepo=True)
1094 1094 def debugindexdot(ui, repo, file_=None, **opts):
1095 1095 """dump an index DAG as a graphviz dot file"""
1096 1096 opts = pycompat.byteskwargs(opts)
1097 1097 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1098 1098 ui.write(("digraph G {\n"))
1099 1099 for i in r:
1100 1100 node = r.node(i)
1101 1101 pp = r.parents(node)
1102 1102 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1103 1103 if pp[1] != nullid:
1104 1104 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1105 1105 ui.write("}\n")
1106 1106
1107 1107 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1108 1108 def debuginstall(ui, **opts):
1109 1109 '''test Mercurial installation
1110 1110
1111 1111 Returns 0 on success.
1112 1112 '''
1113 1113 opts = pycompat.byteskwargs(opts)
1114 1114
1115 1115 def writetemp(contents):
1116 1116 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1117 1117 f = os.fdopen(fd, pycompat.sysstr("wb"))
1118 1118 f.write(contents)
1119 1119 f.close()
1120 1120 return name
1121 1121
1122 1122 problems = 0
1123 1123
1124 1124 fm = ui.formatter('debuginstall', opts)
1125 1125 fm.startitem()
1126 1126
1127 1127 # encoding
1128 1128 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1129 1129 err = None
1130 1130 try:
1131 1131 codecs.lookup(pycompat.sysstr(encoding.encoding))
1132 1132 except LookupError as inst:
1133 1133 err = util.forcebytestr(inst)
1134 1134 problems += 1
1135 1135 fm.condwrite(err, 'encodingerror', _(" %s\n"
1136 1136 " (check that your locale is properly set)\n"), err)
1137 1137
1138 1138 # Python
1139 1139 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1140 1140 pycompat.sysexecutable)
1141 1141 fm.write('pythonver', _("checking Python version (%s)\n"),
1142 1142 ("%d.%d.%d" % sys.version_info[:3]))
1143 1143 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1144 1144 os.path.dirname(pycompat.fsencode(os.__file__)))
1145 1145
1146 1146 security = set(sslutil.supportedprotocols)
1147 1147 if sslutil.hassni:
1148 1148 security.add('sni')
1149 1149
1150 1150 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1151 1151 fm.formatlist(sorted(security), name='protocol',
1152 1152 fmt='%s', sep=','))
1153 1153
1154 1154 # These are warnings, not errors. So don't increment problem count. This
1155 1155 # may change in the future.
1156 1156 if 'tls1.2' not in security:
1157 1157 fm.plain(_(' TLS 1.2 not supported by Python install; '
1158 1158 'network connections lack modern security\n'))
1159 1159 if 'sni' not in security:
1160 1160 fm.plain(_(' SNI not supported by Python install; may have '
1161 1161 'connectivity issues with some servers\n'))
1162 1162
1163 1163 # TODO print CA cert info
1164 1164
1165 1165 # hg version
1166 1166 hgver = util.version()
1167 1167 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1168 1168 hgver.split('+')[0])
1169 1169 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1170 1170 '+'.join(hgver.split('+')[1:]))
1171 1171
1172 1172 # compiled modules
1173 1173 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1174 1174 policy.policy)
1175 1175 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1176 1176 os.path.dirname(pycompat.fsencode(__file__)))
1177 1177
1178 1178 if policy.policy in ('c', 'allow'):
1179 1179 err = None
1180 1180 try:
1181 1181 from .cext import (
1182 1182 base85,
1183 1183 bdiff,
1184 1184 mpatch,
1185 1185 osutil,
1186 1186 )
1187 1187 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1188 1188 except Exception as inst:
1189 1189 err = util.forcebytestr(inst)
1190 1190 problems += 1
1191 1191 fm.condwrite(err, 'extensionserror', " %s\n", err)
1192 1192
1193 1193 compengines = util.compengines._engines.values()
1194 1194 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1195 1195 fm.formatlist(sorted(e.name() for e in compengines),
1196 1196 name='compengine', fmt='%s', sep=', '))
1197 1197 fm.write('compenginesavail', _('checking available compression engines '
1198 1198 '(%s)\n'),
1199 1199 fm.formatlist(sorted(e.name() for e in compengines
1200 1200 if e.available()),
1201 1201 name='compengine', fmt='%s', sep=', '))
1202 1202 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1203 1203 fm.write('compenginesserver', _('checking available compression engines '
1204 1204 'for wire protocol (%s)\n'),
1205 1205 fm.formatlist([e.name() for e in wirecompengines
1206 1206 if e.wireprotosupport()],
1207 1207 name='compengine', fmt='%s', sep=', '))
1208 1208 re2 = 'missing'
1209 1209 if util._re2:
1210 1210 re2 = 'available'
1211 1211 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1212 1212 fm.data(re2=bool(util._re2))
1213 1213
1214 1214 # templates
1215 1215 p = templater.templatepaths()
1216 1216 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1217 1217 fm.condwrite(not p, '', _(" no template directories found\n"))
1218 1218 if p:
1219 1219 m = templater.templatepath("map-cmdline.default")
1220 1220 if m:
1221 1221 # template found, check if it is working
1222 1222 err = None
1223 1223 try:
1224 1224 templater.templater.frommapfile(m)
1225 1225 except Exception as inst:
1226 1226 err = util.forcebytestr(inst)
1227 1227 p = None
1228 1228 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1229 1229 else:
1230 1230 p = None
1231 1231 fm.condwrite(p, 'defaulttemplate',
1232 1232 _("checking default template (%s)\n"), m)
1233 1233 fm.condwrite(not m, 'defaulttemplatenotfound',
1234 1234 _(" template '%s' not found\n"), "default")
1235 1235 if not p:
1236 1236 problems += 1
1237 1237 fm.condwrite(not p, '',
1238 1238 _(" (templates seem to have been installed incorrectly)\n"))
1239 1239
1240 1240 # editor
1241 1241 editor = ui.geteditor()
1242 1242 editor = util.expandpath(editor)
1243 1243 editorbin = pycompat.shlexsplit(editor, posix=not pycompat.iswindows)[0]
1244 1244 if pycompat.iswindows and editorbin[0] == '"' and editorbin[-1] == '"':
1245 1245 editorbin = editorbin[1:-1]
1246 1246 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1247 1247 cmdpath = util.findexe(editorbin)
1248 1248 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1249 1249 _(" No commit editor set and can't find %s in PATH\n"
1250 1250 " (specify a commit editor in your configuration"
1251 1251 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1252 1252 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1253 1253 _(" Can't find editor '%s' in PATH\n"
1254 1254 " (specify a commit editor in your configuration"
1255 1255 " file)\n"), not cmdpath and editorbin)
1256 1256 if not cmdpath and editor != 'vi':
1257 1257 problems += 1
1258 1258
1259 1259 # check username
1260 1260 username = None
1261 1261 err = None
1262 1262 try:
1263 1263 username = ui.username()
1264 1264 except error.Abort as e:
1265 1265 err = util.forcebytestr(e)
1266 1266 problems += 1
1267 1267
1268 1268 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1269 1269 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1270 1270 " (specify a username in your configuration file)\n"), err)
1271 1271
1272 1272 fm.condwrite(not problems, '',
1273 1273 _("no problems detected\n"))
1274 1274 if not problems:
1275 1275 fm.data(problems=problems)
1276 1276 fm.condwrite(problems, 'problems',
1277 1277 _("%d problems detected,"
1278 1278 " please check your install!\n"), problems)
1279 1279 fm.end()
1280 1280
1281 1281 return problems
1282 1282
1283 1283 @command('debugknown', [], _('REPO ID...'), norepo=True)
1284 1284 def debugknown(ui, repopath, *ids, **opts):
1285 1285 """test whether node ids are known to a repo
1286 1286
1287 1287 Every ID must be a full-length hex node id string. Returns a list of 0s
1288 1288 and 1s indicating unknown/known.
1289 1289 """
1290 1290 opts = pycompat.byteskwargs(opts)
1291 1291 repo = hg.peer(ui, opts, repopath)
1292 1292 if not repo.capable('known'):
1293 1293 raise error.Abort("known() not supported by target repository")
1294 1294 flags = repo.known([bin(s) for s in ids])
1295 1295 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1296 1296
1297 1297 @command('debuglabelcomplete', [], _('LABEL...'))
1298 1298 def debuglabelcomplete(ui, repo, *args):
1299 1299 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1300 1300 debugnamecomplete(ui, repo, *args)
1301 1301
1302 1302 @command('debuglocks',
1303 1303 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1304 1304 ('W', 'force-wlock', None,
1305 1305 _('free the working state lock (DANGEROUS)')),
1306 1306 ('s', 'set-lock', None, _('set the store lock until stopped')),
1307 1307 ('S', 'set-wlock', None,
1308 1308 _('set the working state lock until stopped'))],
1309 1309 _('[OPTION]...'))
1310 1310 def debuglocks(ui, repo, **opts):
1311 1311 """show or modify state of locks
1312 1312
1313 1313 By default, this command will show which locks are held. This
1314 1314 includes the user and process holding the lock, the amount of time
1315 1315 the lock has been held, and the machine name where the process is
1316 1316 running if it's not local.
1317 1317
1318 1318 Locks protect the integrity of Mercurial's data, so should be
1319 1319 treated with care. System crashes or other interruptions may cause
1320 1320 locks to not be properly released, though Mercurial will usually
1321 1321 detect and remove such stale locks automatically.
1322 1322
1323 1323 However, detecting stale locks may not always be possible (for
1324 1324 instance, on a shared filesystem). Removing locks may also be
1325 1325 blocked by filesystem permissions.
1326 1326
1327 1327 Setting a lock will prevent other commands from changing the data.
1328 1328 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1329 1329 The set locks are removed when the command exits.
1330 1330
1331 1331 Returns 0 if no locks are held.
1332 1332
1333 1333 """
1334 1334
1335 1335 if opts.get(r'force_lock'):
1336 1336 repo.svfs.unlink('lock')
1337 1337 if opts.get(r'force_wlock'):
1338 1338 repo.vfs.unlink('wlock')
1339 1339 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1340 1340 return 0
1341 1341
1342 1342 locks = []
1343 1343 try:
1344 1344 if opts.get(r'set_wlock'):
1345 1345 try:
1346 1346 locks.append(repo.wlock(False))
1347 1347 except error.LockHeld:
1348 1348 raise error.Abort(_('wlock is already held'))
1349 1349 if opts.get(r'set_lock'):
1350 1350 try:
1351 1351 locks.append(repo.lock(False))
1352 1352 except error.LockHeld:
1353 1353 raise error.Abort(_('lock is already held'))
1354 1354 if len(locks):
1355 1355 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1356 1356 return 0
1357 1357 finally:
1358 1358 release(*locks)
1359 1359
1360 1360 now = time.time()
1361 1361 held = 0
1362 1362
1363 1363 def report(vfs, name, method):
1364 1364 # this causes stale locks to get reaped for more accurate reporting
1365 1365 try:
1366 1366 l = method(False)
1367 1367 except error.LockHeld:
1368 1368 l = None
1369 1369
1370 1370 if l:
1371 1371 l.release()
1372 1372 else:
1373 1373 try:
1374 1374 stat = vfs.lstat(name)
1375 1375 age = now - stat.st_mtime
1376 1376 user = util.username(stat.st_uid)
1377 1377 locker = vfs.readlock(name)
1378 1378 if ":" in locker:
1379 1379 host, pid = locker.split(':')
1380 1380 if host == socket.gethostname():
1381 1381 locker = 'user %s, process %s' % (user, pid)
1382 1382 else:
1383 1383 locker = 'user %s, process %s, host %s' \
1384 1384 % (user, pid, host)
1385 1385 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1386 1386 return 1
1387 1387 except OSError as e:
1388 1388 if e.errno != errno.ENOENT:
1389 1389 raise
1390 1390
1391 1391 ui.write(("%-6s free\n") % (name + ":"))
1392 1392 return 0
1393 1393
1394 1394 held += report(repo.svfs, "lock", repo.lock)
1395 1395 held += report(repo.vfs, "wlock", repo.wlock)
1396 1396
1397 1397 return held
1398 1398
1399 1399 @command('debugmergestate', [], '')
1400 1400 def debugmergestate(ui, repo, *args):
1401 1401 """print merge state
1402 1402
1403 1403 Use --verbose to print out information about whether v1 or v2 merge state
1404 1404 was chosen."""
1405 1405 def _hashornull(h):
1406 1406 if h == nullhex:
1407 1407 return 'null'
1408 1408 else:
1409 1409 return h
1410 1410
1411 1411 def printrecords(version):
1412 1412 ui.write(('* version %d records\n') % version)
1413 1413 if version == 1:
1414 1414 records = v1records
1415 1415 else:
1416 1416 records = v2records
1417 1417
1418 1418 for rtype, record in records:
1419 1419 # pretty print some record types
1420 1420 if rtype == 'L':
1421 1421 ui.write(('local: %s\n') % record)
1422 1422 elif rtype == 'O':
1423 1423 ui.write(('other: %s\n') % record)
1424 1424 elif rtype == 'm':
1425 1425 driver, mdstate = record.split('\0', 1)
1426 1426 ui.write(('merge driver: %s (state "%s")\n')
1427 1427 % (driver, mdstate))
1428 1428 elif rtype in 'FDC':
1429 1429 r = record.split('\0')
1430 1430 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1431 1431 if version == 1:
1432 1432 onode = 'not stored in v1 format'
1433 1433 flags = r[7]
1434 1434 else:
1435 1435 onode, flags = r[7:9]
1436 1436 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1437 1437 % (f, rtype, state, _hashornull(hash)))
1438 1438 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1439 1439 ui.write((' ancestor path: %s (node %s)\n')
1440 1440 % (afile, _hashornull(anode)))
1441 1441 ui.write((' other path: %s (node %s)\n')
1442 1442 % (ofile, _hashornull(onode)))
1443 1443 elif rtype == 'f':
1444 1444 filename, rawextras = record.split('\0', 1)
1445 1445 extras = rawextras.split('\0')
1446 1446 i = 0
1447 1447 extrastrings = []
1448 1448 while i < len(extras):
1449 1449 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1450 1450 i += 2
1451 1451
1452 1452 ui.write(('file extras: %s (%s)\n')
1453 1453 % (filename, ', '.join(extrastrings)))
1454 1454 elif rtype == 'l':
1455 1455 labels = record.split('\0', 2)
1456 1456 labels = [l for l in labels if len(l) > 0]
1457 1457 ui.write(('labels:\n'))
1458 1458 ui.write((' local: %s\n' % labels[0]))
1459 1459 ui.write((' other: %s\n' % labels[1]))
1460 1460 if len(labels) > 2:
1461 1461 ui.write((' base: %s\n' % labels[2]))
1462 1462 else:
1463 1463 ui.write(('unrecognized entry: %s\t%s\n')
1464 1464 % (rtype, record.replace('\0', '\t')))
1465 1465
1466 1466 # Avoid mergestate.read() since it may raise an exception for unsupported
1467 1467 # merge state records. We shouldn't be doing this, but this is OK since this
1468 1468 # command is pretty low-level.
1469 1469 ms = mergemod.mergestate(repo)
1470 1470
1471 1471 # sort so that reasonable information is on top
1472 1472 v1records = ms._readrecordsv1()
1473 1473 v2records = ms._readrecordsv2()
1474 1474 order = 'LOml'
1475 1475 def key(r):
1476 1476 idx = order.find(r[0])
1477 1477 if idx == -1:
1478 1478 return (1, r[1])
1479 1479 else:
1480 1480 return (0, idx)
1481 1481 v1records.sort(key=key)
1482 1482 v2records.sort(key=key)
1483 1483
1484 1484 if not v1records and not v2records:
1485 1485 ui.write(('no merge state found\n'))
1486 1486 elif not v2records:
1487 1487 ui.note(('no version 2 merge state\n'))
1488 1488 printrecords(1)
1489 1489 elif ms._v1v2match(v1records, v2records):
1490 1490 ui.note(('v1 and v2 states match: using v2\n'))
1491 1491 printrecords(2)
1492 1492 else:
1493 1493 ui.note(('v1 and v2 states mismatch: using v1\n'))
1494 1494 printrecords(1)
1495 1495 if ui.verbose:
1496 1496 printrecords(2)
1497 1497
1498 1498 @command('debugnamecomplete', [], _('NAME...'))
1499 1499 def debugnamecomplete(ui, repo, *args):
1500 1500 '''complete "names" - tags, open branch names, bookmark names'''
1501 1501
1502 1502 names = set()
1503 1503 # since we previously only listed open branches, we will handle that
1504 1504 # specially (after this for loop)
1505 1505 for name, ns in repo.names.iteritems():
1506 1506 if name != 'branches':
1507 1507 names.update(ns.listnames(repo))
1508 1508 names.update(tag for (tag, heads, tip, closed)
1509 1509 in repo.branchmap().iterbranches() if not closed)
1510 1510 completions = set()
1511 1511 if not args:
1512 1512 args = ['']
1513 1513 for a in args:
1514 1514 completions.update(n for n in names if n.startswith(a))
1515 1515 ui.write('\n'.join(sorted(completions)))
1516 1516 ui.write('\n')
1517 1517
1518 1518 @command('debugobsolete',
1519 1519 [('', 'flags', 0, _('markers flag')),
1520 1520 ('', 'record-parents', False,
1521 1521 _('record parent information for the precursor')),
1522 1522 ('r', 'rev', [], _('display markers relevant to REV')),
1523 1523 ('', 'exclusive', False, _('restrict display to markers only '
1524 1524 'relevant to REV')),
1525 1525 ('', 'index', False, _('display index of the marker')),
1526 1526 ('', 'delete', [], _('delete markers specified by indices')),
1527 1527 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1528 1528 _('[OBSOLETED [REPLACEMENT ...]]'))
1529 1529 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1530 1530 """create arbitrary obsolete marker
1531 1531
1532 1532 With no arguments, displays the list of obsolescence markers."""
1533 1533
1534 1534 opts = pycompat.byteskwargs(opts)
1535 1535
1536 1536 def parsenodeid(s):
1537 1537 try:
1538 1538 # We do not use revsingle/revrange functions here to accept
1539 1539 # arbitrary node identifiers, possibly not present in the
1540 1540 # local repository.
1541 1541 n = bin(s)
1542 1542 if len(n) != len(nullid):
1543 1543 raise TypeError()
1544 1544 return n
1545 1545 except TypeError:
1546 1546 raise error.Abort('changeset references must be full hexadecimal '
1547 1547 'node identifiers')
1548 1548
1549 1549 if opts.get('delete'):
1550 1550 indices = []
1551 1551 for v in opts.get('delete'):
1552 1552 try:
1553 1553 indices.append(int(v))
1554 1554 except ValueError:
1555 1555 raise error.Abort(_('invalid index value: %r') % v,
1556 1556 hint=_('use integers for indices'))
1557 1557
1558 1558 if repo.currenttransaction():
1559 1559 raise error.Abort(_('cannot delete obsmarkers in the middle '
1560 1560 'of transaction.'))
1561 1561
1562 1562 with repo.lock():
1563 1563 n = repair.deleteobsmarkers(repo.obsstore, indices)
1564 1564 ui.write(_('deleted %i obsolescence markers\n') % n)
1565 1565
1566 1566 return
1567 1567
1568 1568 if precursor is not None:
1569 1569 if opts['rev']:
1570 1570 raise error.Abort('cannot select revision when creating marker')
1571 1571 metadata = {}
1572 1572 metadata['user'] = opts['user'] or ui.username()
1573 1573 succs = tuple(parsenodeid(succ) for succ in successors)
1574 1574 l = repo.lock()
1575 1575 try:
1576 1576 tr = repo.transaction('debugobsolete')
1577 1577 try:
1578 1578 date = opts.get('date')
1579 1579 if date:
1580 1580 date = util.parsedate(date)
1581 1581 else:
1582 1582 date = None
1583 1583 prec = parsenodeid(precursor)
1584 1584 parents = None
1585 1585 if opts['record_parents']:
1586 1586 if prec not in repo.unfiltered():
1587 1587 raise error.Abort('cannot used --record-parents on '
1588 1588 'unknown changesets')
1589 1589 parents = repo.unfiltered()[prec].parents()
1590 1590 parents = tuple(p.node() for p in parents)
1591 1591 repo.obsstore.create(tr, prec, succs, opts['flags'],
1592 1592 parents=parents, date=date,
1593 1593 metadata=metadata, ui=ui)
1594 1594 tr.close()
1595 1595 except ValueError as exc:
1596 1596 raise error.Abort(_('bad obsmarker input: %s') % exc)
1597 1597 finally:
1598 1598 tr.release()
1599 1599 finally:
1600 1600 l.release()
1601 1601 else:
1602 1602 if opts['rev']:
1603 1603 revs = scmutil.revrange(repo, opts['rev'])
1604 1604 nodes = [repo[r].node() for r in revs]
1605 1605 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1606 1606 exclusive=opts['exclusive']))
1607 1607 markers.sort(key=lambda x: x._data)
1608 1608 else:
1609 1609 markers = obsutil.getmarkers(repo)
1610 1610
1611 1611 markerstoiter = markers
1612 1612 isrelevant = lambda m: True
1613 1613 if opts.get('rev') and opts.get('index'):
1614 1614 markerstoiter = obsutil.getmarkers(repo)
1615 1615 markerset = set(markers)
1616 1616 isrelevant = lambda m: m in markerset
1617 1617
1618 1618 fm = ui.formatter('debugobsolete', opts)
1619 1619 for i, m in enumerate(markerstoiter):
1620 1620 if not isrelevant(m):
1621 1621 # marker can be irrelevant when we're iterating over a set
1622 1622 # of markers (markerstoiter) which is bigger than the set
1623 1623 # of markers we want to display (markers)
1624 1624 # this can happen if both --index and --rev options are
1625 1625 # provided and thus we need to iterate over all of the markers
1626 1626 # to get the correct indices, but only display the ones that
1627 1627 # are relevant to --rev value
1628 1628 continue
1629 1629 fm.startitem()
1630 1630 ind = i if opts.get('index') else None
1631 1631 cmdutil.showmarker(fm, m, index=ind)
1632 1632 fm.end()
1633 1633
1634 1634 @command('debugpathcomplete',
1635 1635 [('f', 'full', None, _('complete an entire path')),
1636 1636 ('n', 'normal', None, _('show only normal files')),
1637 1637 ('a', 'added', None, _('show only added files')),
1638 1638 ('r', 'removed', None, _('show only removed files'))],
1639 1639 _('FILESPEC...'))
1640 1640 def debugpathcomplete(ui, repo, *specs, **opts):
1641 1641 '''complete part or all of a tracked path
1642 1642
1643 1643 This command supports shells that offer path name completion. It
1644 1644 currently completes only files already known to the dirstate.
1645 1645
1646 1646 Completion extends only to the next path segment unless
1647 1647 --full is specified, in which case entire paths are used.'''
1648 1648
1649 1649 def complete(path, acceptable):
1650 1650 dirstate = repo.dirstate
1651 1651 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1652 1652 rootdir = repo.root + pycompat.ossep
1653 1653 if spec != repo.root and not spec.startswith(rootdir):
1654 1654 return [], []
1655 1655 if os.path.isdir(spec):
1656 1656 spec += '/'
1657 1657 spec = spec[len(rootdir):]
1658 1658 fixpaths = pycompat.ossep != '/'
1659 1659 if fixpaths:
1660 1660 spec = spec.replace(pycompat.ossep, '/')
1661 1661 speclen = len(spec)
1662 1662 fullpaths = opts[r'full']
1663 1663 files, dirs = set(), set()
1664 1664 adddir, addfile = dirs.add, files.add
1665 1665 for f, st in dirstate.iteritems():
1666 1666 if f.startswith(spec) and st[0] in acceptable:
1667 1667 if fixpaths:
1668 1668 f = f.replace('/', pycompat.ossep)
1669 1669 if fullpaths:
1670 1670 addfile(f)
1671 1671 continue
1672 1672 s = f.find(pycompat.ossep, speclen)
1673 1673 if s >= 0:
1674 1674 adddir(f[:s])
1675 1675 else:
1676 1676 addfile(f)
1677 1677 return files, dirs
1678 1678
1679 1679 acceptable = ''
1680 1680 if opts[r'normal']:
1681 1681 acceptable += 'nm'
1682 1682 if opts[r'added']:
1683 1683 acceptable += 'a'
1684 1684 if opts[r'removed']:
1685 1685 acceptable += 'r'
1686 1686 cwd = repo.getcwd()
1687 1687 if not specs:
1688 1688 specs = ['.']
1689 1689
1690 1690 files, dirs = set(), set()
1691 1691 for spec in specs:
1692 1692 f, d = complete(spec, acceptable or 'nmar')
1693 1693 files.update(f)
1694 1694 dirs.update(d)
1695 1695 files.update(dirs)
1696 1696 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1697 1697 ui.write('\n')
1698 1698
1699 1699 @command('debugpeer', [], _('PATH'), norepo=True)
1700 1700 def debugpeer(ui, path):
1701 1701 """establish a connection to a peer repository"""
1702 1702 # Always enable peer request logging. Requires --debug to display
1703 1703 # though.
1704 1704 overrides = {
1705 1705 ('devel', 'debug.peer-request'): True,
1706 1706 }
1707 1707
1708 1708 with ui.configoverride(overrides):
1709 1709 peer = hg.peer(ui, {}, path)
1710 1710
1711 1711 local = peer.local() is not None
1712 1712 canpush = peer.canpush()
1713 1713
1714 1714 ui.write(_('url: %s\n') % peer.url())
1715 1715 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1716 1716 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1717 1717
1718 1718 @command('debugpickmergetool',
1719 1719 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1720 1720 ('', 'changedelete', None, _('emulate merging change and delete')),
1721 1721 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1722 1722 _('[PATTERN]...'),
1723 1723 inferrepo=True)
1724 1724 def debugpickmergetool(ui, repo, *pats, **opts):
1725 1725 """examine which merge tool is chosen for specified file
1726 1726
1727 1727 As described in :hg:`help merge-tools`, Mercurial examines
1728 1728 configurations below in this order to decide which merge tool is
1729 1729 chosen for specified file.
1730 1730
1731 1731 1. ``--tool`` option
1732 1732 2. ``HGMERGE`` environment variable
1733 1733 3. configurations in ``merge-patterns`` section
1734 1734 4. configuration of ``ui.merge``
1735 1735 5. configurations in ``merge-tools`` section
1736 1736 6. ``hgmerge`` tool (for historical reason only)
1737 1737 7. default tool for fallback (``:merge`` or ``:prompt``)
1738 1738
1739 1739 This command writes out examination result in the style below::
1740 1740
1741 1741 FILE = MERGETOOL
1742 1742
1743 1743 By default, all files known in the first parent context of the
1744 1744 working directory are examined. Use file patterns and/or -I/-X
1745 1745 options to limit target files. -r/--rev is also useful to examine
1746 1746 files in another context without actual updating to it.
1747 1747
1748 1748 With --debug, this command shows warning messages while matching
1749 1749 against ``merge-patterns`` and so on, too. It is recommended to
1750 1750 use this option with explicit file patterns and/or -I/-X options,
1751 1751 because this option increases amount of output per file according
1752 1752 to configurations in hgrc.
1753 1753
1754 1754 With -v/--verbose, this command shows configurations below at
1755 1755 first (only if specified).
1756 1756
1757 1757 - ``--tool`` option
1758 1758 - ``HGMERGE`` environment variable
1759 1759 - configuration of ``ui.merge``
1760 1760
1761 1761 If merge tool is chosen before matching against
1762 1762 ``merge-patterns``, this command can't show any helpful
1763 1763 information, even with --debug. In such case, information above is
1764 1764 useful to know why a merge tool is chosen.
1765 1765 """
1766 1766 opts = pycompat.byteskwargs(opts)
1767 1767 overrides = {}
1768 1768 if opts['tool']:
1769 1769 overrides[('ui', 'forcemerge')] = opts['tool']
1770 1770 ui.note(('with --tool %r\n') % (opts['tool']))
1771 1771
1772 1772 with ui.configoverride(overrides, 'debugmergepatterns'):
1773 1773 hgmerge = encoding.environ.get("HGMERGE")
1774 1774 if hgmerge is not None:
1775 1775 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1776 1776 uimerge = ui.config("ui", "merge")
1777 1777 if uimerge:
1778 1778 ui.note(('with ui.merge=%r\n') % (uimerge))
1779 1779
1780 1780 ctx = scmutil.revsingle(repo, opts.get('rev'))
1781 1781 m = scmutil.match(ctx, pats, opts)
1782 1782 changedelete = opts['changedelete']
1783 1783 for path in ctx.walk(m):
1784 1784 fctx = ctx[path]
1785 1785 try:
1786 1786 if not ui.debugflag:
1787 1787 ui.pushbuffer(error=True)
1788 1788 tool, toolpath = filemerge._picktool(repo, ui, path,
1789 1789 fctx.isbinary(),
1790 1790 'l' in fctx.flags(),
1791 1791 changedelete)
1792 1792 finally:
1793 1793 if not ui.debugflag:
1794 1794 ui.popbuffer()
1795 1795 ui.write(('%s = %s\n') % (path, tool))
1796 1796
1797 1797 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1798 1798 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1799 1799 '''access the pushkey key/value protocol
1800 1800
1801 1801 With two args, list the keys in the given namespace.
1802 1802
1803 1803 With five args, set a key to new if it currently is set to old.
1804 1804 Reports success or failure.
1805 1805 '''
1806 1806
1807 1807 target = hg.peer(ui, {}, repopath)
1808 1808 if keyinfo:
1809 1809 key, old, new = keyinfo
1810 1810 r = target.pushkey(namespace, key, old, new)
1811 1811 ui.status(str(r) + '\n')
1812 1812 return not r
1813 1813 else:
1814 1814 for k, v in sorted(target.listkeys(namespace).iteritems()):
1815 1815 ui.write("%s\t%s\n" % (util.escapestr(k),
1816 1816 util.escapestr(v)))
1817 1817
1818 1818 @command('debugpvec', [], _('A B'))
1819 1819 def debugpvec(ui, repo, a, b=None):
1820 1820 ca = scmutil.revsingle(repo, a)
1821 1821 cb = scmutil.revsingle(repo, b)
1822 1822 pa = pvec.ctxpvec(ca)
1823 1823 pb = pvec.ctxpvec(cb)
1824 1824 if pa == pb:
1825 1825 rel = "="
1826 1826 elif pa > pb:
1827 1827 rel = ">"
1828 1828 elif pa < pb:
1829 1829 rel = "<"
1830 1830 elif pa | pb:
1831 1831 rel = "|"
1832 1832 ui.write(_("a: %s\n") % pa)
1833 1833 ui.write(_("b: %s\n") % pb)
1834 1834 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1835 1835 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1836 1836 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1837 1837 pa.distance(pb), rel))
1838 1838
1839 1839 @command('debugrebuilddirstate|debugrebuildstate',
1840 1840 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1841 1841 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1842 1842 'the working copy parent')),
1843 1843 ],
1844 1844 _('[-r REV]'))
1845 1845 def debugrebuilddirstate(ui, repo, rev, **opts):
1846 1846 """rebuild the dirstate as it would look like for the given revision
1847 1847
1848 1848 If no revision is specified the first current parent will be used.
1849 1849
1850 1850 The dirstate will be set to the files of the given revision.
1851 1851 The actual working directory content or existing dirstate
1852 1852 information such as adds or removes is not considered.
1853 1853
1854 1854 ``minimal`` will only rebuild the dirstate status for files that claim to be
1855 1855 tracked but are not in the parent manifest, or that exist in the parent
1856 1856 manifest but are not in the dirstate. It will not change adds, removes, or
1857 1857 modified files that are in the working copy parent.
1858 1858
1859 1859 One use of this command is to make the next :hg:`status` invocation
1860 1860 check the actual file content.
1861 1861 """
1862 1862 ctx = scmutil.revsingle(repo, rev)
1863 1863 with repo.wlock():
1864 1864 dirstate = repo.dirstate
1865 1865 changedfiles = None
1866 1866 # See command doc for what minimal does.
1867 1867 if opts.get(r'minimal'):
1868 1868 manifestfiles = set(ctx.manifest().keys())
1869 1869 dirstatefiles = set(dirstate)
1870 1870 manifestonly = manifestfiles - dirstatefiles
1871 1871 dsonly = dirstatefiles - manifestfiles
1872 1872 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1873 1873 changedfiles = manifestonly | dsnotadded
1874 1874
1875 1875 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1876 1876
1877 1877 @command('debugrebuildfncache', [], '')
1878 1878 def debugrebuildfncache(ui, repo):
1879 1879 """rebuild the fncache file"""
1880 1880 repair.rebuildfncache(ui, repo)
1881 1881
1882 1882 @command('debugrename',
1883 1883 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1884 1884 _('[-r REV] FILE'))
1885 1885 def debugrename(ui, repo, file1, *pats, **opts):
1886 1886 """dump rename information"""
1887 1887
1888 1888 opts = pycompat.byteskwargs(opts)
1889 1889 ctx = scmutil.revsingle(repo, opts.get('rev'))
1890 1890 m = scmutil.match(ctx, (file1,) + pats, opts)
1891 1891 for abs in ctx.walk(m):
1892 1892 fctx = ctx[abs]
1893 1893 o = fctx.filelog().renamed(fctx.filenode())
1894 1894 rel = m.rel(abs)
1895 1895 if o:
1896 1896 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1897 1897 else:
1898 1898 ui.write(_("%s not renamed\n") % rel)
1899 1899
1900 1900 @command('debugrevlog', cmdutil.debugrevlogopts +
1901 1901 [('d', 'dump', False, _('dump index data'))],
1902 1902 _('-c|-m|FILE'),
1903 1903 optionalrepo=True)
1904 1904 def debugrevlog(ui, repo, file_=None, **opts):
1905 1905 """show data and statistics about a revlog"""
1906 1906 opts = pycompat.byteskwargs(opts)
1907 1907 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1908 1908
1909 1909 if opts.get("dump"):
1910 1910 numrevs = len(r)
1911 1911 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1912 1912 " rawsize totalsize compression heads chainlen\n"))
1913 1913 ts = 0
1914 1914 heads = set()
1915 1915
1916 1916 for rev in xrange(numrevs):
1917 1917 dbase = r.deltaparent(rev)
1918 1918 if dbase == -1:
1919 1919 dbase = rev
1920 1920 cbase = r.chainbase(rev)
1921 1921 clen = r.chainlen(rev)
1922 1922 p1, p2 = r.parentrevs(rev)
1923 1923 rs = r.rawsize(rev)
1924 1924 ts = ts + rs
1925 1925 heads -= set(r.parentrevs(rev))
1926 1926 heads.add(rev)
1927 1927 try:
1928 1928 compression = ts / r.end(rev)
1929 1929 except ZeroDivisionError:
1930 1930 compression = 0
1931 1931 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1932 1932 "%11d %5d %8d\n" %
1933 1933 (rev, p1, p2, r.start(rev), r.end(rev),
1934 1934 r.start(dbase), r.start(cbase),
1935 1935 r.start(p1), r.start(p2),
1936 1936 rs, ts, compression, len(heads), clen))
1937 1937 return 0
1938 1938
1939 1939 v = r.version
1940 1940 format = v & 0xFFFF
1941 1941 flags = []
1942 1942 gdelta = False
1943 1943 if v & revlog.FLAG_INLINE_DATA:
1944 1944 flags.append('inline')
1945 1945 if v & revlog.FLAG_GENERALDELTA:
1946 1946 gdelta = True
1947 1947 flags.append('generaldelta')
1948 1948 if not flags:
1949 1949 flags = ['(none)']
1950 1950
1951 1951 nummerges = 0
1952 1952 numfull = 0
1953 1953 numprev = 0
1954 1954 nump1 = 0
1955 1955 nump2 = 0
1956 1956 numother = 0
1957 1957 nump1prev = 0
1958 1958 nump2prev = 0
1959 1959 chainlengths = []
1960 1960 chainbases = []
1961 1961 chainspans = []
1962 1962
1963 1963 datasize = [None, 0, 0]
1964 1964 fullsize = [None, 0, 0]
1965 1965 deltasize = [None, 0, 0]
1966 1966 chunktypecounts = {}
1967 1967 chunktypesizes = {}
1968 1968
1969 1969 def addsize(size, l):
1970 1970 if l[0] is None or size < l[0]:
1971 1971 l[0] = size
1972 1972 if size > l[1]:
1973 1973 l[1] = size
1974 1974 l[2] += size
1975 1975
1976 1976 numrevs = len(r)
1977 1977 for rev in xrange(numrevs):
1978 1978 p1, p2 = r.parentrevs(rev)
1979 1979 delta = r.deltaparent(rev)
1980 1980 if format > 0:
1981 1981 addsize(r.rawsize(rev), datasize)
1982 1982 if p2 != nullrev:
1983 1983 nummerges += 1
1984 1984 size = r.length(rev)
1985 1985 if delta == nullrev:
1986 1986 chainlengths.append(0)
1987 1987 chainbases.append(r.start(rev))
1988 1988 chainspans.append(size)
1989 1989 numfull += 1
1990 1990 addsize(size, fullsize)
1991 1991 else:
1992 1992 chainlengths.append(chainlengths[delta] + 1)
1993 1993 baseaddr = chainbases[delta]
1994 1994 revaddr = r.start(rev)
1995 1995 chainbases.append(baseaddr)
1996 1996 chainspans.append((revaddr - baseaddr) + size)
1997 1997 addsize(size, deltasize)
1998 1998 if delta == rev - 1:
1999 1999 numprev += 1
2000 2000 if delta == p1:
2001 2001 nump1prev += 1
2002 2002 elif delta == p2:
2003 2003 nump2prev += 1
2004 2004 elif delta == p1:
2005 2005 nump1 += 1
2006 2006 elif delta == p2:
2007 2007 nump2 += 1
2008 2008 elif delta != nullrev:
2009 2009 numother += 1
2010 2010
2011 2011 # Obtain data on the raw chunks in the revlog.
2012 2012 segment = r._getsegmentforrevs(rev, rev)[1]
2013 2013 if segment:
2014 2014 chunktype = bytes(segment[0:1])
2015 2015 else:
2016 2016 chunktype = 'empty'
2017 2017
2018 2018 if chunktype not in chunktypecounts:
2019 2019 chunktypecounts[chunktype] = 0
2020 2020 chunktypesizes[chunktype] = 0
2021 2021
2022 2022 chunktypecounts[chunktype] += 1
2023 2023 chunktypesizes[chunktype] += size
2024 2024
2025 2025 # Adjust size min value for empty cases
2026 2026 for size in (datasize, fullsize, deltasize):
2027 2027 if size[0] is None:
2028 2028 size[0] = 0
2029 2029
2030 2030 numdeltas = numrevs - numfull
2031 2031 numoprev = numprev - nump1prev - nump2prev
2032 2032 totalrawsize = datasize[2]
2033 2033 datasize[2] /= numrevs
2034 2034 fulltotal = fullsize[2]
2035 2035 fullsize[2] /= numfull
2036 2036 deltatotal = deltasize[2]
2037 2037 if numrevs - numfull > 0:
2038 2038 deltasize[2] /= numrevs - numfull
2039 2039 totalsize = fulltotal + deltatotal
2040 2040 avgchainlen = sum(chainlengths) / numrevs
2041 2041 maxchainlen = max(chainlengths)
2042 2042 maxchainspan = max(chainspans)
2043 2043 compratio = 1
2044 2044 if totalsize:
2045 2045 compratio = totalrawsize / totalsize
2046 2046
2047 2047 basedfmtstr = '%%%dd\n'
2048 2048 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2049 2049
2050 2050 def dfmtstr(max):
2051 2051 return basedfmtstr % len(str(max))
2052 2052 def pcfmtstr(max, padding=0):
2053 2053 return basepcfmtstr % (len(str(max)), ' ' * padding)
2054 2054
2055 2055 def pcfmt(value, total):
2056 2056 if total:
2057 2057 return (value, 100 * float(value) / total)
2058 2058 else:
2059 2059 return value, 100.0
2060 2060
2061 2061 ui.write(('format : %d\n') % format)
2062 2062 ui.write(('flags : %s\n') % ', '.join(flags))
2063 2063
2064 2064 ui.write('\n')
2065 2065 fmt = pcfmtstr(totalsize)
2066 2066 fmt2 = dfmtstr(totalsize)
2067 2067 ui.write(('revisions : ') + fmt2 % numrevs)
2068 2068 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2069 2069 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2070 2070 ui.write(('revisions : ') + fmt2 % numrevs)
2071 2071 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2072 2072 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2073 2073 ui.write(('revision size : ') + fmt2 % totalsize)
2074 2074 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2075 2075 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2076 2076
2077 2077 def fmtchunktype(chunktype):
2078 2078 if chunktype == 'empty':
2079 2079 return ' %s : ' % chunktype
2080 2080 elif chunktype in pycompat.bytestr(string.ascii_letters):
2081 2081 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2082 2082 else:
2083 2083 return ' 0x%s : ' % hex(chunktype)
2084 2084
2085 2085 ui.write('\n')
2086 2086 ui.write(('chunks : ') + fmt2 % numrevs)
2087 2087 for chunktype in sorted(chunktypecounts):
2088 2088 ui.write(fmtchunktype(chunktype))
2089 2089 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2090 2090 ui.write(('chunks size : ') + fmt2 % totalsize)
2091 2091 for chunktype in sorted(chunktypecounts):
2092 2092 ui.write(fmtchunktype(chunktype))
2093 2093 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2094 2094
2095 2095 ui.write('\n')
2096 2096 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2097 2097 ui.write(('avg chain length : ') + fmt % avgchainlen)
2098 2098 ui.write(('max chain length : ') + fmt % maxchainlen)
2099 2099 ui.write(('max chain reach : ') + fmt % maxchainspan)
2100 2100 ui.write(('compression ratio : ') + fmt % compratio)
2101 2101
2102 2102 if format > 0:
2103 2103 ui.write('\n')
2104 2104 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2105 2105 % tuple(datasize))
2106 2106 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2107 2107 % tuple(fullsize))
2108 2108 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2109 2109 % tuple(deltasize))
2110 2110
2111 2111 if numdeltas > 0:
2112 2112 ui.write('\n')
2113 2113 fmt = pcfmtstr(numdeltas)
2114 2114 fmt2 = pcfmtstr(numdeltas, 4)
2115 2115 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2116 2116 if numprev > 0:
2117 2117 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2118 2118 numprev))
2119 2119 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2120 2120 numprev))
2121 2121 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2122 2122 numprev))
2123 2123 if gdelta:
2124 2124 ui.write(('deltas against p1 : ')
2125 2125 + fmt % pcfmt(nump1, numdeltas))
2126 2126 ui.write(('deltas against p2 : ')
2127 2127 + fmt % pcfmt(nump2, numdeltas))
2128 2128 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2129 2129 numdeltas))
2130 2130
2131 2131 @command('debugrevspec',
2132 2132 [('', 'optimize', None,
2133 2133 _('print parsed tree after optimizing (DEPRECATED)')),
2134 2134 ('', 'show-revs', True, _('print list of result revisions (default)')),
2135 2135 ('s', 'show-set', None, _('print internal representation of result set')),
2136 2136 ('p', 'show-stage', [],
2137 2137 _('print parsed tree at the given stage'), _('NAME')),
2138 2138 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2139 2139 ('', 'verify-optimized', False, _('verify optimized result')),
2140 2140 ],
2141 2141 ('REVSPEC'))
2142 2142 def debugrevspec(ui, repo, expr, **opts):
2143 2143 """parse and apply a revision specification
2144 2144
2145 2145 Use -p/--show-stage option to print the parsed tree at the given stages.
2146 2146 Use -p all to print tree at every stage.
2147 2147
2148 2148 Use --no-show-revs option with -s or -p to print only the set
2149 2149 representation or the parsed tree respectively.
2150 2150
2151 2151 Use --verify-optimized to compare the optimized result with the unoptimized
2152 2152 one. Returns 1 if the optimized result differs.
2153 2153 """
2154 2154 opts = pycompat.byteskwargs(opts)
2155 2155 aliases = ui.configitems('revsetalias')
2156 2156 stages = [
2157 2157 ('parsed', lambda tree: tree),
2158 2158 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2159 2159 ui.warn)),
2160 2160 ('concatenated', revsetlang.foldconcat),
2161 2161 ('analyzed', revsetlang.analyze),
2162 2162 ('optimized', revsetlang.optimize),
2163 2163 ]
2164 2164 if opts['no_optimized']:
2165 2165 stages = stages[:-1]
2166 2166 if opts['verify_optimized'] and opts['no_optimized']:
2167 2167 raise error.Abort(_('cannot use --verify-optimized with '
2168 2168 '--no-optimized'))
2169 2169 stagenames = set(n for n, f in stages)
2170 2170
2171 2171 showalways = set()
2172 2172 showchanged = set()
2173 2173 if ui.verbose and not opts['show_stage']:
2174 2174 # show parsed tree by --verbose (deprecated)
2175 2175 showalways.add('parsed')
2176 2176 showchanged.update(['expanded', 'concatenated'])
2177 2177 if opts['optimize']:
2178 2178 showalways.add('optimized')
2179 2179 if opts['show_stage'] and opts['optimize']:
2180 2180 raise error.Abort(_('cannot use --optimize with --show-stage'))
2181 2181 if opts['show_stage'] == ['all']:
2182 2182 showalways.update(stagenames)
2183 2183 else:
2184 2184 for n in opts['show_stage']:
2185 2185 if n not in stagenames:
2186 2186 raise error.Abort(_('invalid stage name: %s') % n)
2187 2187 showalways.update(opts['show_stage'])
2188 2188
2189 2189 treebystage = {}
2190 2190 printedtree = None
2191 2191 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2192 2192 for n, f in stages:
2193 2193 treebystage[n] = tree = f(tree)
2194 2194 if n in showalways or (n in showchanged and tree != printedtree):
2195 2195 if opts['show_stage'] or n != 'parsed':
2196 2196 ui.write(("* %s:\n") % n)
2197 2197 ui.write(revsetlang.prettyformat(tree), "\n")
2198 2198 printedtree = tree
2199 2199
2200 2200 if opts['verify_optimized']:
2201 2201 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2202 2202 brevs = revset.makematcher(treebystage['optimized'])(repo)
2203 2203 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2204 2204 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2205 2205 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2206 2206 arevs = list(arevs)
2207 2207 brevs = list(brevs)
2208 2208 if arevs == brevs:
2209 2209 return 0
2210 2210 ui.write(('--- analyzed\n'), label='diff.file_a')
2211 2211 ui.write(('+++ optimized\n'), label='diff.file_b')
2212 2212 sm = difflib.SequenceMatcher(None, arevs, brevs)
2213 2213 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2214 2214 if tag in ('delete', 'replace'):
2215 2215 for c in arevs[alo:ahi]:
2216 2216 ui.write('-%s\n' % c, label='diff.deleted')
2217 2217 if tag in ('insert', 'replace'):
2218 2218 for c in brevs[blo:bhi]:
2219 2219 ui.write('+%s\n' % c, label='diff.inserted')
2220 2220 if tag == 'equal':
2221 2221 for c in arevs[alo:ahi]:
2222 2222 ui.write(' %s\n' % c)
2223 2223 return 1
2224 2224
2225 2225 func = revset.makematcher(tree)
2226 2226 revs = func(repo)
2227 2227 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2228 2228 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2229 2229 if not opts['show_revs']:
2230 2230 return
2231 2231 for c in revs:
2232 2232 ui.write("%d\n" % c)
2233 2233
2234 2234 @command('debugsetparents', [], _('REV1 [REV2]'))
2235 2235 def debugsetparents(ui, repo, rev1, rev2=None):
2236 2236 """manually set the parents of the current working directory
2237 2237
2238 2238 This is useful for writing repository conversion tools, but should
2239 2239 be used with care. For example, neither the working directory nor the
2240 2240 dirstate is updated, so file status may be incorrect after running this
2241 2241 command.
2242 2242
2243 2243 Returns 0 on success.
2244 2244 """
2245 2245
2246 2246 r1 = scmutil.revsingle(repo, rev1).node()
2247 2247 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2248 2248
2249 2249 with repo.wlock():
2250 2250 repo.setparents(r1, r2)
2251 2251
2252 2252 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2253 2253 def debugssl(ui, repo, source=None, **opts):
2254 2254 '''test a secure connection to a server
2255 2255
2256 2256 This builds the certificate chain for the server on Windows, installing the
2257 2257 missing intermediates and trusted root via Windows Update if necessary. It
2258 2258 does nothing on other platforms.
2259 2259
2260 2260 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2261 2261 that server is used. See :hg:`help urls` for more information.
2262 2262
2263 2263 If the update succeeds, retry the original operation. Otherwise, the cause
2264 2264 of the SSL error is likely another issue.
2265 2265 '''
2266 2266 if not pycompat.iswindows:
2267 2267 raise error.Abort(_('certificate chain building is only possible on '
2268 2268 'Windows'))
2269 2269
2270 2270 if not source:
2271 2271 if not repo:
2272 2272 raise error.Abort(_("there is no Mercurial repository here, and no "
2273 2273 "server specified"))
2274 2274 source = "default"
2275 2275
2276 2276 source, branches = hg.parseurl(ui.expandpath(source))
2277 2277 url = util.url(source)
2278 2278 addr = None
2279 2279
2280 2280 defaultport = {'https': 443, 'ssh': 22}
2281 2281 if url.scheme in defaultport:
2282 2282 try:
2283 2283 addr = (url.host, int(url.port or defaultport[url.scheme]))
2284 2284 except ValueError:
2285 2285 raise error.Abort(_("malformed port number in URL"))
2286 2286 else:
2287 2287 raise error.Abort(_("only https and ssh connections are supported"))
2288 2288
2289 2289 from . import win32
2290 2290
2291 2291 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2292 2292 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2293 2293
2294 2294 try:
2295 2295 s.connect(addr)
2296 2296 cert = s.getpeercert(True)
2297 2297
2298 2298 ui.status(_('checking the certificate chain for %s\n') % url.host)
2299 2299
2300 2300 complete = win32.checkcertificatechain(cert, build=False)
2301 2301
2302 2302 if not complete:
2303 2303 ui.status(_('certificate chain is incomplete, updating... '))
2304 2304
2305 2305 if not win32.checkcertificatechain(cert):
2306 2306 ui.status(_('failed.\n'))
2307 2307 else:
2308 2308 ui.status(_('done.\n'))
2309 2309 else:
2310 2310 ui.status(_('full certificate chain is available\n'))
2311 2311 finally:
2312 2312 s.close()
2313 2313
2314 2314 @command('debugsub',
2315 2315 [('r', 'rev', '',
2316 2316 _('revision to check'), _('REV'))],
2317 2317 _('[-r REV] [REV]'))
2318 2318 def debugsub(ui, repo, rev=None):
2319 2319 ctx = scmutil.revsingle(repo, rev, None)
2320 2320 for k, v in sorted(ctx.substate.items()):
2321 2321 ui.write(('path %s\n') % k)
2322 2322 ui.write((' source %s\n') % v[0])
2323 2323 ui.write((' revision %s\n') % v[1])
2324 2324
2325 2325 @command('debugsuccessorssets',
2326 2326 [('', 'closest', False, _('return closest successors sets only'))],
2327 2327 _('[REV]'))
2328 2328 def debugsuccessorssets(ui, repo, *revs, **opts):
2329 2329 """show set of successors for revision
2330 2330
2331 2331 A successors set of changeset A is a consistent group of revisions that
2332 2332 succeed A. It contains non-obsolete changesets only unless closests
2333 2333 successors set is set.
2334 2334
2335 2335 In most cases a changeset A has a single successors set containing a single
2336 2336 successor (changeset A replaced by A').
2337 2337
2338 2338 A changeset that is made obsolete with no successors are called "pruned".
2339 2339 Such changesets have no successors sets at all.
2340 2340
2341 2341 A changeset that has been "split" will have a successors set containing
2342 2342 more than one successor.
2343 2343
2344 2344 A changeset that has been rewritten in multiple different ways is called
2345 2345 "divergent". Such changesets have multiple successor sets (each of which
2346 2346 may also be split, i.e. have multiple successors).
2347 2347
2348 2348 Results are displayed as follows::
2349 2349
2350 2350 <rev1>
2351 2351 <successors-1A>
2352 2352 <rev2>
2353 2353 <successors-2A>
2354 2354 <successors-2B1> <successors-2B2> <successors-2B3>
2355 2355
2356 2356 Here rev2 has two possible (i.e. divergent) successors sets. The first
2357 2357 holds one element, whereas the second holds three (i.e. the changeset has
2358 2358 been split).
2359 2359 """
2360 2360 # passed to successorssets caching computation from one call to another
2361 2361 cache = {}
2362 2362 ctx2str = bytes
2363 2363 node2str = short
2364 2364 for rev in scmutil.revrange(repo, revs):
2365 2365 ctx = repo[rev]
2366 2366 ui.write('%s\n'% ctx2str(ctx))
2367 2367 for succsset in obsutil.successorssets(repo, ctx.node(),
2368 2368 closest=opts[r'closest'],
2369 2369 cache=cache):
2370 2370 if succsset:
2371 2371 ui.write(' ')
2372 2372 ui.write(node2str(succsset[0]))
2373 2373 for node in succsset[1:]:
2374 2374 ui.write(' ')
2375 2375 ui.write(node2str(node))
2376 2376 ui.write('\n')
2377 2377
2378 2378 @command('debugtemplate',
2379 2379 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2380 2380 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2381 2381 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2382 2382 optionalrepo=True)
2383 2383 def debugtemplate(ui, repo, tmpl, **opts):
2384 2384 """parse and apply a template
2385 2385
2386 2386 If -r/--rev is given, the template is processed as a log template and
2387 2387 applied to the given changesets. Otherwise, it is processed as a generic
2388 2388 template.
2389 2389
2390 2390 Use --verbose to print the parsed tree.
2391 2391 """
2392 2392 revs = None
2393 2393 if opts[r'rev']:
2394 2394 if repo is None:
2395 2395 raise error.RepoError(_('there is no Mercurial repository here '
2396 2396 '(.hg not found)'))
2397 2397 revs = scmutil.revrange(repo, opts[r'rev'])
2398 2398
2399 2399 props = {}
2400 2400 for d in opts[r'define']:
2401 2401 try:
2402 2402 k, v = (e.strip() for e in d.split('=', 1))
2403 2403 if not k or k == 'ui':
2404 2404 raise ValueError
2405 2405 props[k] = v
2406 2406 except ValueError:
2407 2407 raise error.Abort(_('malformed keyword definition: %s') % d)
2408 2408
2409 2409 if ui.verbose:
2410 2410 aliases = ui.configitems('templatealias')
2411 2411 tree = templater.parse(tmpl)
2412 2412 ui.note(templater.prettyformat(tree), '\n')
2413 2413 newtree = templater.expandaliases(tree, aliases)
2414 2414 if newtree != tree:
2415 2415 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2416 2416
2417 2417 if revs is None:
2418 2418 tres = formatter.templateresources(ui, repo)
2419 2419 t = formatter.maketemplater(ui, tmpl, resources=tres)
2420 2420 ui.write(t.render(props))
2421 2421 else:
2422 2422 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2423 2423 for r in revs:
2424 2424 displayer.show(repo[r], **pycompat.strkwargs(props))
2425 2425 displayer.close()
2426 2426
2427 2427 @command('debugupdatecaches', [])
2428 2428 def debugupdatecaches(ui, repo, *pats, **opts):
2429 2429 """warm all known caches in the repository"""
2430 2430 with repo.wlock(), repo.lock():
2431 2431 repo.updatecaches()
2432 2432
2433 2433 @command('debugupgraderepo', [
2434 2434 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2435 2435 ('', 'run', False, _('performs an upgrade')),
2436 2436 ])
2437 2437 def debugupgraderepo(ui, repo, run=False, optimize=None):
2438 2438 """upgrade a repository to use different features
2439 2439
2440 2440 If no arguments are specified, the repository is evaluated for upgrade
2441 2441 and a list of problems and potential optimizations is printed.
2442 2442
2443 2443 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2444 2444 can be influenced via additional arguments. More details will be provided
2445 2445 by the command output when run without ``--run``.
2446 2446
2447 2447 During the upgrade, the repository will be locked and no writes will be
2448 2448 allowed.
2449 2449
2450 2450 At the end of the upgrade, the repository may not be readable while new
2451 2451 repository data is swapped in. This window will be as long as it takes to
2452 2452 rename some directories inside the ``.hg`` directory. On most machines, this
2453 2453 should complete almost instantaneously and the chances of a consumer being
2454 2454 unable to access the repository should be low.
2455 2455 """
2456 2456 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2457 2457
2458 2458 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2459 2459 inferrepo=True)
2460 2460 def debugwalk(ui, repo, *pats, **opts):
2461 2461 """show how files match on given patterns"""
2462 2462 opts = pycompat.byteskwargs(opts)
2463 2463 m = scmutil.match(repo[None], pats, opts)
2464 2464 ui.write(('matcher: %r\n' % m))
2465 2465 items = list(repo[None].walk(m))
2466 2466 if not items:
2467 2467 return
2468 2468 f = lambda fn: fn
2469 2469 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2470 2470 f = lambda fn: util.normpath(fn)
2471 2471 fmt = 'f %%-%ds %%-%ds %%s' % (
2472 2472 max([len(abs) for abs in items]),
2473 2473 max([len(m.rel(abs)) for abs in items]))
2474 2474 for abs in items:
2475 2475 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2476 2476 ui.write("%s\n" % line.rstrip())
2477 2477
2478 2478 @command('debugwireargs',
2479 2479 [('', 'three', '', 'three'),
2480 2480 ('', 'four', '', 'four'),
2481 2481 ('', 'five', '', 'five'),
2482 2482 ] + cmdutil.remoteopts,
2483 2483 _('REPO [OPTIONS]... [ONE [TWO]]'),
2484 2484 norepo=True)
2485 2485 def debugwireargs(ui, repopath, *vals, **opts):
2486 2486 opts = pycompat.byteskwargs(opts)
2487 2487 repo = hg.peer(ui, opts, repopath)
2488 2488 for opt in cmdutil.remoteopts:
2489 2489 del opts[opt[1]]
2490 2490 args = {}
2491 2491 for k, v in opts.iteritems():
2492 2492 if v:
2493 2493 args[k] = v
2494 2494 args = pycompat.strkwargs(args)
2495 2495 # run twice to check that we don't mess up the stream for the next command
2496 2496 res1 = repo.debugwireargs(*vals, **args)
2497 2497 res2 = repo.debugwireargs(*vals, **args)
2498 2498 ui.write("%s\n" % res1)
2499 2499 if res1 != res2:
2500 2500 ui.warn("%s\n" % res2)
@@ -1,788 +1,788
1 1 # tags.py - read tag info from local repository
2 2 #
3 3 # Copyright 2009 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2009 Greg Ward <greg@gerg.ca>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 # Currently this module only deals with reading and caching tags.
10 10 # Eventually, it could take care of updating (adding/removing/moving)
11 11 # tags too.
12 12
13 13 from __future__ import absolute_import
14 14
15 15 import errno
16 16
17 17 from .node import (
18 18 bin,
19 19 hex,
20 20 nullid,
21 21 short,
22 22 )
23 23 from .i18n import _
24 24 from . import (
25 25 encoding,
26 26 error,
27 27 match as matchmod,
28 28 scmutil,
29 29 util,
30 30 )
31 31
32 32 # Tags computation can be expensive and caches exist to make it fast in
33 33 # the common case.
34 34 #
35 35 # The "hgtagsfnodes1" cache file caches the .hgtags filenode values for
36 36 # each revision in the repository. The file is effectively an array of
37 37 # fixed length records. Read the docs for "hgtagsfnodescache" for technical
38 38 # details.
39 39 #
40 40 # The .hgtags filenode cache grows in proportion to the length of the
41 41 # changelog. The file is truncated when the # changelog is stripped.
42 42 #
43 43 # The purpose of the filenode cache is to avoid the most expensive part
44 44 # of finding global tags, which is looking up the .hgtags filenode in the
45 45 # manifest for each head. This can take dozens or over 100ms for
46 46 # repositories with very large manifests. Multiplied by dozens or even
47 47 # hundreds of heads and there is a significant performance concern.
48 48 #
49 49 # There also exist a separate cache file for each repository filter.
50 50 # These "tags-*" files store information about the history of tags.
51 51 #
52 52 # The tags cache files consists of a cache validation line followed by
53 53 # a history of tags.
54 54 #
55 55 # The cache validation line has the format:
56 56 #
57 57 # <tiprev> <tipnode> [<filteredhash>]
58 58 #
59 59 # <tiprev> is an integer revision and <tipnode> is a 40 character hex
60 60 # node for that changeset. These redundantly identify the repository
61 61 # tip from the time the cache was written. In addition, <filteredhash>,
62 62 # if present, is a 40 character hex hash of the contents of the filtered
63 63 # revisions for this filter. If the set of filtered revs changes, the
64 64 # hash will change and invalidate the cache.
65 65 #
66 66 # The history part of the tags cache consists of lines of the form:
67 67 #
68 68 # <node> <tag>
69 69 #
70 70 # (This format is identical to that of .hgtags files.)
71 71 #
72 72 # <tag> is the tag name and <node> is the 40 character hex changeset
73 73 # the tag is associated with.
74 74 #
75 75 # Tags are written sorted by tag name.
76 76 #
77 77 # Tags associated with multiple changesets have an entry for each changeset.
78 78 # The most recent changeset (in terms of revlog ordering for the head
79 79 # setting it) for each tag is last.
80 80
81 81 def fnoderevs(ui, repo, revs):
82 82 """return the list of '.hgtags' fnodes used in a set revisions
83 83
84 84 This is returned as list of unique fnodes. We use a list instead of a set
85 85 because order matters when it comes to tags."""
86 86 unfi = repo.unfiltered()
87 87 tonode = unfi.changelog.node
88 88 nodes = [tonode(r) for r in revs]
89 89 fnodes = _getfnodes(ui, repo, nodes[::-1]) # reversed help the cache
90 90 fnodes = _filterfnodes(fnodes, nodes)
91 91 return fnodes
92 92
93 93 def _nulltonone(value):
94 94 """convert nullid to None
95 95
96 96 For tag value, nullid means "deleted". This small utility function helps
97 97 translating that to None."""
98 98 if value == nullid:
99 99 return None
100 100 return value
101 101
102 102 def difftags(ui, repo, oldfnodes, newfnodes):
103 103 """list differences between tags expressed in two set of file-nodes
104 104
105 105 The list contains entries in the form: (tagname, oldvalue, new value).
106 106 None is used to expressed missing value:
107 107 ('foo', None, 'abcd') is a new tag,
108 108 ('bar', 'ef01', None) is a deletion,
109 109 ('baz', 'abcd', 'ef01') is a tag movement.
110 110 """
111 111 if oldfnodes == newfnodes:
112 112 return []
113 113 oldtags = _tagsfromfnodes(ui, repo, oldfnodes)
114 114 newtags = _tagsfromfnodes(ui, repo, newfnodes)
115 115
116 116 # list of (tag, old, new): None means missing
117 117 entries = []
118 118 for tag, (new, __) in newtags.items():
119 119 new = _nulltonone(new)
120 120 old, __ = oldtags.pop(tag, (None, None))
121 121 old = _nulltonone(old)
122 122 if old != new:
123 123 entries.append((tag, old, new))
124 124 # handle deleted tags
125 125 for tag, (old, __) in oldtags.items():
126 126 old = _nulltonone(old)
127 127 if old is not None:
128 128 entries.append((tag, old, None))
129 129 entries.sort()
130 130 return entries
131 131
132 132 def writediff(fp, difflist):
133 133 """write tags diff information to a file.
134 134
135 135 Data are stored with a line based format:
136 136
137 137 <action> <hex-node> <tag-name>\n
138 138
139 139 Action are defined as follow:
140 140 -R tag is removed,
141 141 +A tag is added,
142 142 -M tag is moved (old value),
143 143 +M tag is moved (new value),
144 144
145 145 Example:
146 146
147 147 +A 875517b4806a848f942811a315a5bce30804ae85 t5
148 148
149 149 See documentation of difftags output for details about the input.
150 150 """
151 151 add = '+A %s %s\n'
152 152 remove = '-R %s %s\n'
153 153 updateold = '-M %s %s\n'
154 154 updatenew = '+M %s %s\n'
155 155 for tag, old, new in difflist:
156 156 # translate to hex
157 157 if old is not None:
158 158 old = hex(old)
159 159 if new is not None:
160 160 new = hex(new)
161 161 # write to file
162 162 if old is None:
163 163 fp.write(add % (new, tag))
164 164 elif new is None:
165 165 fp.write(remove % (old, tag))
166 166 else:
167 167 fp.write(updateold % (old, tag))
168 168 fp.write(updatenew % (new, tag))
169 169
170 170 def findglobaltags(ui, repo):
171 171 '''Find global tags in a repo: return a tagsmap
172 172
173 173 tagsmap: tag name to (node, hist) 2-tuples.
174 174
175 175 The tags cache is read and updated as a side-effect of calling.
176 176 '''
177 177 (heads, tagfnode, valid, cachetags, shouldwrite) = _readtagcache(ui, repo)
178 178 if cachetags is not None:
179 179 assert not shouldwrite
180 180 # XXX is this really 100% correct? are there oddball special
181 181 # cases where a global tag should outrank a local tag but won't,
182 182 # because cachetags does not contain rank info?
183 183 alltags = {}
184 184 _updatetags(cachetags, alltags)
185 185 return alltags
186 186
187 187 for head in reversed(heads): # oldest to newest
188 188 assert head in repo.changelog.nodemap, \
189 189 "tag cache returned bogus head %s" % short(head)
190 190 fnodes = _filterfnodes(tagfnode, reversed(heads))
191 191 alltags = _tagsfromfnodes(ui, repo, fnodes)
192 192
193 193 # and update the cache (if necessary)
194 194 if shouldwrite:
195 195 _writetagcache(ui, repo, valid, alltags)
196 196 return alltags
197 197
198 198 def _filterfnodes(tagfnode, nodes):
199 199 """return a list of unique fnodes
200 200
201 201 The order of this list matches the order of "nodes". Preserving this order
202 202 is important as reading tags in different order provides different
203 203 results."""
204 204 seen = set() # set of fnode
205 205 fnodes = []
206 206 for no in nodes: # oldest to newest
207 207 fnode = tagfnode.get(no)
208 208 if fnode and fnode not in seen:
209 209 seen.add(fnode)
210 210 fnodes.append(fnode)
211 211 return fnodes
212 212
213 213 def _tagsfromfnodes(ui, repo, fnodes):
214 214 """return a tagsmap from a list of file-node
215 215
216 216 tagsmap: tag name to (node, hist) 2-tuples.
217 217
218 218 The order of the list matters."""
219 219 alltags = {}
220 220 fctx = None
221 221 for fnode in fnodes:
222 222 if fctx is None:
223 223 fctx = repo.filectx('.hgtags', fileid=fnode)
224 224 else:
225 225 fctx = fctx.filectx(fnode)
226 226 filetags = _readtags(ui, repo, fctx.data().splitlines(), fctx)
227 227 _updatetags(filetags, alltags)
228 228 return alltags
229 229
230 230 def readlocaltags(ui, repo, alltags, tagtypes):
231 231 '''Read local tags in repo. Update alltags and tagtypes.'''
232 232 try:
233 233 data = repo.vfs.read("localtags")
234 234 except IOError as inst:
235 235 if inst.errno != errno.ENOENT:
236 236 raise
237 237 return
238 238
239 239 # localtags is in the local encoding; re-encode to UTF-8 on
240 240 # input for consistency with the rest of this module.
241 241 filetags = _readtags(
242 242 ui, repo, data.splitlines(), "localtags",
243 243 recode=encoding.fromlocal)
244 244
245 245 # remove tags pointing to invalid nodes
246 246 cl = repo.changelog
247 247 for t in list(filetags):
248 248 try:
249 249 cl.rev(filetags[t][0])
250 250 except (LookupError, ValueError):
251 251 del filetags[t]
252 252
253 253 _updatetags(filetags, alltags, 'local', tagtypes)
254 254
255 255 def _readtaghist(ui, repo, lines, fn, recode=None, calcnodelines=False):
256 256 '''Read tag definitions from a file (or any source of lines).
257 257
258 258 This function returns two sortdicts with similar information:
259 259
260 260 - the first dict, bintaghist, contains the tag information as expected by
261 261 the _readtags function, i.e. a mapping from tag name to (node, hist):
262 262 - node is the node id from the last line read for that name,
263 263 - hist is the list of node ids previously associated with it (in file
264 264 order). All node ids are binary, not hex.
265 265
266 266 - the second dict, hextaglines, is a mapping from tag name to a list of
267 267 [hexnode, line number] pairs, ordered from the oldest to the newest node.
268 268
269 269 When calcnodelines is False the hextaglines dict is not calculated (an
270 270 empty dict is returned). This is done to improve this function's
271 271 performance in cases where the line numbers are not needed.
272 272 '''
273 273
274 274 bintaghist = util.sortdict()
275 275 hextaglines = util.sortdict()
276 276 count = 0
277 277
278 278 def dbg(msg):
279 ui.debug("%s, line %s: %s\n" % (fn, count, msg))
279 ui.debug("%s, line %d: %s\n" % (fn, count, msg))
280 280
281 281 for nline, line in enumerate(lines):
282 282 count += 1
283 283 if not line:
284 284 continue
285 285 try:
286 286 (nodehex, name) = line.split(" ", 1)
287 287 except ValueError:
288 288 dbg("cannot parse entry")
289 289 continue
290 290 name = name.strip()
291 291 if recode:
292 292 name = recode(name)
293 293 try:
294 294 nodebin = bin(nodehex)
295 295 except TypeError:
296 296 dbg("node '%s' is not well formed" % nodehex)
297 297 continue
298 298
299 299 # update filetags
300 300 if calcnodelines:
301 301 # map tag name to a list of line numbers
302 302 if name not in hextaglines:
303 303 hextaglines[name] = []
304 304 hextaglines[name].append([nodehex, nline])
305 305 continue
306 306 # map tag name to (node, hist)
307 307 if name not in bintaghist:
308 308 bintaghist[name] = []
309 309 bintaghist[name].append(nodebin)
310 310 return bintaghist, hextaglines
311 311
312 312 def _readtags(ui, repo, lines, fn, recode=None, calcnodelines=False):
313 313 '''Read tag definitions from a file (or any source of lines).
314 314
315 315 Returns a mapping from tag name to (node, hist).
316 316
317 317 "node" is the node id from the last line read for that name. "hist"
318 318 is the list of node ids previously associated with it (in file order).
319 319 All node ids are binary, not hex.
320 320 '''
321 321 filetags, nodelines = _readtaghist(ui, repo, lines, fn, recode=recode,
322 322 calcnodelines=calcnodelines)
323 323 # util.sortdict().__setitem__ is much slower at replacing then inserting
324 324 # new entries. The difference can matter if there are thousands of tags.
325 325 # Create a new sortdict to avoid the performance penalty.
326 326 newtags = util.sortdict()
327 327 for tag, taghist in filetags.items():
328 328 newtags[tag] = (taghist[-1], taghist[:-1])
329 329 return newtags
330 330
331 331 def _updatetags(filetags, alltags, tagtype=None, tagtypes=None):
332 332 """Incorporate the tag info read from one file into dictionnaries
333 333
334 334 The first one, 'alltags', is a "tagmaps" (see 'findglobaltags' for details).
335 335
336 336 The second one, 'tagtypes', is optional and will be updated to track the
337 337 "tagtype" of entries in the tagmaps. When set, the 'tagtype' argument also
338 338 needs to be set."""
339 339 if tagtype is None:
340 340 assert tagtypes is None
341 341
342 342 for name, nodehist in filetags.iteritems():
343 343 if name not in alltags:
344 344 alltags[name] = nodehist
345 345 if tagtype is not None:
346 346 tagtypes[name] = tagtype
347 347 continue
348 348
349 349 # we prefer alltags[name] if:
350 350 # it supersedes us OR
351 351 # mutual supersedes and it has a higher rank
352 352 # otherwise we win because we're tip-most
353 353 anode, ahist = nodehist
354 354 bnode, bhist = alltags[name]
355 355 if (bnode != anode and anode in bhist and
356 356 (bnode not in ahist or len(bhist) > len(ahist))):
357 357 anode = bnode
358 358 elif tagtype is not None:
359 359 tagtypes[name] = tagtype
360 360 ahist.extend([n for n in bhist if n not in ahist])
361 361 alltags[name] = anode, ahist
362 362
363 363 def _filename(repo):
364 364 """name of a tagcache file for a given repo or repoview"""
365 365 filename = 'tags2'
366 366 if repo.filtername:
367 367 filename = '%s-%s' % (filename, repo.filtername)
368 368 return filename
369 369
370 370 def _readtagcache(ui, repo):
371 371 '''Read the tag cache.
372 372
373 373 Returns a tuple (heads, fnodes, validinfo, cachetags, shouldwrite).
374 374
375 375 If the cache is completely up-to-date, "cachetags" is a dict of the
376 376 form returned by _readtags() and "heads", "fnodes", and "validinfo" are
377 377 None and "shouldwrite" is False.
378 378
379 379 If the cache is not up to date, "cachetags" is None. "heads" is a list
380 380 of all heads currently in the repository, ordered from tip to oldest.
381 381 "validinfo" is a tuple describing cache validation info. This is used
382 382 when writing the tags cache. "fnodes" is a mapping from head to .hgtags
383 383 filenode. "shouldwrite" is True.
384 384
385 385 If the cache is not up to date, the caller is responsible for reading tag
386 386 info from each returned head. (See findglobaltags().)
387 387 '''
388 388 try:
389 389 cachefile = repo.cachevfs(_filename(repo), 'r')
390 390 # force reading the file for static-http
391 391 cachelines = iter(cachefile)
392 392 except IOError:
393 393 cachefile = None
394 394
395 395 cacherev = None
396 396 cachenode = None
397 397 cachehash = None
398 398 if cachefile:
399 399 try:
400 400 validline = next(cachelines)
401 401 validline = validline.split()
402 402 cacherev = int(validline[0])
403 403 cachenode = bin(validline[1])
404 404 if len(validline) > 2:
405 405 cachehash = bin(validline[2])
406 406 except Exception:
407 407 # corruption of the cache, just recompute it.
408 408 pass
409 409
410 410 tipnode = repo.changelog.tip()
411 411 tiprev = len(repo.changelog) - 1
412 412
413 413 # Case 1 (common): tip is the same, so nothing has changed.
414 414 # (Unchanged tip trivially means no changesets have been added.
415 415 # But, thanks to localrepository.destroyed(), it also means none
416 416 # have been destroyed by strip or rollback.)
417 417 if (cacherev == tiprev
418 418 and cachenode == tipnode
419 419 and cachehash == scmutil.filteredhash(repo, tiprev)):
420 420 tags = _readtags(ui, repo, cachelines, cachefile.name)
421 421 cachefile.close()
422 422 return (None, None, None, tags, False)
423 423 if cachefile:
424 424 cachefile.close() # ignore rest of file
425 425
426 426 valid = (tiprev, tipnode, scmutil.filteredhash(repo, tiprev))
427 427
428 428 repoheads = repo.heads()
429 429 # Case 2 (uncommon): empty repo; get out quickly and don't bother
430 430 # writing an empty cache.
431 431 if repoheads == [nullid]:
432 432 return ([], {}, valid, {}, False)
433 433
434 434 # Case 3 (uncommon): cache file missing or empty.
435 435
436 436 # Case 4 (uncommon): tip rev decreased. This should only happen
437 437 # when we're called from localrepository.destroyed(). Refresh the
438 438 # cache so future invocations will not see disappeared heads in the
439 439 # cache.
440 440
441 441 # Case 5 (common): tip has changed, so we've added/replaced heads.
442 442
443 443 # As it happens, the code to handle cases 3, 4, 5 is the same.
444 444
445 445 # N.B. in case 4 (nodes destroyed), "new head" really means "newly
446 446 # exposed".
447 447 if not len(repo.file('.hgtags')):
448 448 # No tags have ever been committed, so we can avoid a
449 449 # potentially expensive search.
450 450 return ([], {}, valid, None, True)
451 451
452 452
453 453 # Now we have to lookup the .hgtags filenode for every new head.
454 454 # This is the most expensive part of finding tags, so performance
455 455 # depends primarily on the size of newheads. Worst case: no cache
456 456 # file, so newheads == repoheads.
457 457 cachefnode = _getfnodes(ui, repo, repoheads)
458 458
459 459 # Caller has to iterate over all heads, but can use the filenodes in
460 460 # cachefnode to get to each .hgtags revision quickly.
461 461 return (repoheads, cachefnode, valid, None, True)
462 462
463 463 def _getfnodes(ui, repo, nodes):
464 464 """return .hgtags fnodes for a list of changeset nodes
465 465
466 466 Return value is a {node: fnode} mapping. There will be no entry for nodes
467 467 without a '.hgtags' file.
468 468 """
469 469 starttime = util.timer()
470 470 fnodescache = hgtagsfnodescache(repo.unfiltered())
471 471 cachefnode = {}
472 472 for node in reversed(nodes):
473 473 fnode = fnodescache.getfnode(node)
474 474 if fnode != nullid:
475 475 cachefnode[node] = fnode
476 476
477 477 fnodescache.write()
478 478
479 479 duration = util.timer() - starttime
480 480 ui.log('tagscache',
481 481 '%d/%d cache hits/lookups in %0.4f '
482 482 'seconds\n',
483 483 fnodescache.hitcount, fnodescache.lookupcount, duration)
484 484 return cachefnode
485 485
486 486 def _writetagcache(ui, repo, valid, cachetags):
487 487 filename = _filename(repo)
488 488 try:
489 489 cachefile = repo.cachevfs(filename, 'w', atomictemp=True)
490 490 except (OSError, IOError):
491 491 return
492 492
493 493 ui.log('tagscache', 'writing .hg/cache/%s with %d tags\n',
494 494 filename, len(cachetags))
495 495
496 496 if valid[2]:
497 497 cachefile.write('%d %s %s\n' % (valid[0], hex(valid[1]), hex(valid[2])))
498 498 else:
499 499 cachefile.write('%d %s\n' % (valid[0], hex(valid[1])))
500 500
501 501 # Tag names in the cache are in UTF-8 -- which is the whole reason
502 502 # we keep them in UTF-8 throughout this module. If we converted
503 503 # them local encoding on input, we would lose info writing them to
504 504 # the cache.
505 505 for (name, (node, hist)) in sorted(cachetags.iteritems()):
506 506 for n in hist:
507 507 cachefile.write("%s %s\n" % (hex(n), name))
508 508 cachefile.write("%s %s\n" % (hex(node), name))
509 509
510 510 try:
511 511 cachefile.close()
512 512 except (OSError, IOError):
513 513 pass
514 514
515 515 def tag(repo, names, node, message, local, user, date, editor=False):
516 516 '''tag a revision with one or more symbolic names.
517 517
518 518 names is a list of strings or, when adding a single tag, names may be a
519 519 string.
520 520
521 521 if local is True, the tags are stored in a per-repository file.
522 522 otherwise, they are stored in the .hgtags file, and a new
523 523 changeset is committed with the change.
524 524
525 525 keyword arguments:
526 526
527 527 local: whether to store tags in non-version-controlled file
528 528 (default False)
529 529
530 530 message: commit message to use if committing
531 531
532 532 user: name of user to use if committing
533 533
534 534 date: date tuple to use if committing'''
535 535
536 536 if not local:
537 537 m = matchmod.exact(repo.root, '', ['.hgtags'])
538 538 if any(repo.status(match=m, unknown=True, ignored=True)):
539 539 raise error.Abort(_('working copy of .hgtags is changed'),
540 540 hint=_('please commit .hgtags manually'))
541 541
542 542 with repo.wlock():
543 543 repo.tags() # instantiate the cache
544 544 _tag(repo, names, node, message, local, user, date,
545 545 editor=editor)
546 546
547 547 def _tag(repo, names, node, message, local, user, date, extra=None,
548 548 editor=False):
549 549 if isinstance(names, str):
550 550 names = (names,)
551 551
552 552 branches = repo.branchmap()
553 553 for name in names:
554 554 repo.hook('pretag', throw=True, node=hex(node), tag=name,
555 555 local=local)
556 556 if name in branches:
557 557 repo.ui.warn(_("warning: tag %s conflicts with existing"
558 558 " branch name\n") % name)
559 559
560 560 def writetags(fp, names, munge, prevtags):
561 561 fp.seek(0, 2)
562 562 if prevtags and prevtags[-1] != '\n':
563 563 fp.write('\n')
564 564 for name in names:
565 565 if munge:
566 566 m = munge(name)
567 567 else:
568 568 m = name
569 569
570 570 if (repo._tagscache.tagtypes and
571 571 name in repo._tagscache.tagtypes):
572 572 old = repo.tags().get(name, nullid)
573 573 fp.write('%s %s\n' % (hex(old), m))
574 574 fp.write('%s %s\n' % (hex(node), m))
575 575 fp.close()
576 576
577 577 prevtags = ''
578 578 if local:
579 579 try:
580 580 fp = repo.vfs('localtags', 'r+')
581 581 except IOError:
582 582 fp = repo.vfs('localtags', 'a')
583 583 else:
584 584 prevtags = fp.read()
585 585
586 586 # local tags are stored in the current charset
587 587 writetags(fp, names, None, prevtags)
588 588 for name in names:
589 589 repo.hook('tag', node=hex(node), tag=name, local=local)
590 590 return
591 591
592 592 try:
593 593 fp = repo.wvfs('.hgtags', 'rb+')
594 594 except IOError as e:
595 595 if e.errno != errno.ENOENT:
596 596 raise
597 597 fp = repo.wvfs('.hgtags', 'ab')
598 598 else:
599 599 prevtags = fp.read()
600 600
601 601 # committed tags are stored in UTF-8
602 602 writetags(fp, names, encoding.fromlocal, prevtags)
603 603
604 604 fp.close()
605 605
606 606 repo.invalidatecaches()
607 607
608 608 if '.hgtags' not in repo.dirstate:
609 609 repo[None].add(['.hgtags'])
610 610
611 611 m = matchmod.exact(repo.root, '', ['.hgtags'])
612 612 tagnode = repo.commit(message, user, date, extra=extra, match=m,
613 613 editor=editor)
614 614
615 615 for name in names:
616 616 repo.hook('tag', node=hex(node), tag=name, local=local)
617 617
618 618 return tagnode
619 619
620 620 _fnodescachefile = 'hgtagsfnodes1'
621 621 _fnodesrecsize = 4 + 20 # changeset fragment + filenode
622 622 _fnodesmissingrec = '\xff' * 24
623 623
624 624 class hgtagsfnodescache(object):
625 625 """Persistent cache mapping revisions to .hgtags filenodes.
626 626
627 627 The cache is an array of records. Each item in the array corresponds to
628 628 a changelog revision. Values in the array contain the first 4 bytes of
629 629 the node hash and the 20 bytes .hgtags filenode for that revision.
630 630
631 631 The first 4 bytes are present as a form of verification. Repository
632 632 stripping and rewriting may change the node at a numeric revision in the
633 633 changelog. The changeset fragment serves as a verifier to detect
634 634 rewriting. This logic is shared with the rev branch cache (see
635 635 branchmap.py).
636 636
637 637 The instance holds in memory the full cache content but entries are
638 638 only parsed on read.
639 639
640 640 Instances behave like lists. ``c[i]`` works where i is a rev or
641 641 changeset node. Missing indexes are populated automatically on access.
642 642 """
643 643 def __init__(self, repo):
644 644 assert repo.filtername is None
645 645
646 646 self._repo = repo
647 647
648 648 # Only for reporting purposes.
649 649 self.lookupcount = 0
650 650 self.hitcount = 0
651 651
652 652
653 653 try:
654 654 data = repo.cachevfs.read(_fnodescachefile)
655 655 except (OSError, IOError):
656 656 data = ""
657 657 self._raw = bytearray(data)
658 658
659 659 # The end state of self._raw is an array that is of the exact length
660 660 # required to hold a record for every revision in the repository.
661 661 # We truncate or extend the array as necessary. self._dirtyoffset is
662 662 # defined to be the start offset at which we need to write the output
663 663 # file. This offset is also adjusted when new entries are calculated
664 664 # for array members.
665 665 cllen = len(repo.changelog)
666 666 wantedlen = cllen * _fnodesrecsize
667 667 rawlen = len(self._raw)
668 668
669 669 self._dirtyoffset = None
670 670
671 671 if rawlen < wantedlen:
672 672 self._dirtyoffset = rawlen
673 673 self._raw.extend('\xff' * (wantedlen - rawlen))
674 674 elif rawlen > wantedlen:
675 675 # There's no easy way to truncate array instances. This seems
676 676 # slightly less evil than copying a potentially large array slice.
677 677 for i in range(rawlen - wantedlen):
678 678 self._raw.pop()
679 679 self._dirtyoffset = len(self._raw)
680 680
681 681 def getfnode(self, node, computemissing=True):
682 682 """Obtain the filenode of the .hgtags file at a specified revision.
683 683
684 684 If the value is in the cache, the entry will be validated and returned.
685 685 Otherwise, the filenode will be computed and returned unless
686 686 "computemissing" is False, in which case None will be returned without
687 687 any potentially expensive computation being performed.
688 688
689 689 If an .hgtags does not exist at the specified revision, nullid is
690 690 returned.
691 691 """
692 692 ctx = self._repo[node]
693 693 rev = ctx.rev()
694 694
695 695 self.lookupcount += 1
696 696
697 697 offset = rev * _fnodesrecsize
698 698 record = '%s' % self._raw[offset:offset + _fnodesrecsize]
699 699 properprefix = node[0:4]
700 700
701 701 # Validate and return existing entry.
702 702 if record != _fnodesmissingrec:
703 703 fileprefix = record[0:4]
704 704
705 705 if fileprefix == properprefix:
706 706 self.hitcount += 1
707 707 return record[4:]
708 708
709 709 # Fall through.
710 710
711 711 # If we get here, the entry is either missing or invalid.
712 712
713 713 if not computemissing:
714 714 return None
715 715
716 716 # Populate missing entry.
717 717 try:
718 718 fnode = ctx.filenode('.hgtags')
719 719 except error.LookupError:
720 720 # No .hgtags file on this revision.
721 721 fnode = nullid
722 722
723 723 self._writeentry(offset, properprefix, fnode)
724 724 return fnode
725 725
726 726 def setfnode(self, node, fnode):
727 727 """Set the .hgtags filenode for a given changeset."""
728 728 assert len(fnode) == 20
729 729 ctx = self._repo[node]
730 730
731 731 # Do a lookup first to avoid writing if nothing has changed.
732 732 if self.getfnode(ctx.node(), computemissing=False) == fnode:
733 733 return
734 734
735 735 self._writeentry(ctx.rev() * _fnodesrecsize, node[0:4], fnode)
736 736
737 737 def _writeentry(self, offset, prefix, fnode):
738 738 # Slices on array instances only accept other array.
739 739 entry = bytearray(prefix + fnode)
740 740 self._raw[offset:offset + _fnodesrecsize] = entry
741 741 # self._dirtyoffset could be None.
742 742 self._dirtyoffset = min(self._dirtyoffset or 0, offset or 0)
743 743
744 744 def write(self):
745 745 """Perform all necessary writes to cache file.
746 746
747 747 This may no-op if no writes are needed or if a write lock could
748 748 not be obtained.
749 749 """
750 750 if self._dirtyoffset is None:
751 751 return
752 752
753 753 data = self._raw[self._dirtyoffset:]
754 754 if not data:
755 755 return
756 756
757 757 repo = self._repo
758 758
759 759 try:
760 760 lock = repo.wlock(wait=False)
761 761 except error.LockError:
762 762 repo.ui.log('tagscache', 'not writing .hg/cache/%s because '
763 763 'lock cannot be acquired\n' % (_fnodescachefile))
764 764 return
765 765
766 766 try:
767 767 f = repo.cachevfs.open(_fnodescachefile, 'ab')
768 768 try:
769 769 # if the file has been truncated
770 770 actualoffset = f.tell()
771 771 if actualoffset < self._dirtyoffset:
772 772 self._dirtyoffset = actualoffset
773 773 data = self._raw[self._dirtyoffset:]
774 774 f.seek(self._dirtyoffset)
775 775 f.truncate()
776 776 repo.ui.log('tagscache',
777 777 'writing %d bytes to cache/%s\n' % (
778 778 len(data), _fnodescachefile))
779 779 f.write(data)
780 780 self._dirtyoffset = None
781 781 finally:
782 782 f.close()
783 783 except (IOError, OSError) as inst:
784 784 repo.ui.log('tagscache',
785 785 "couldn't write cache/%s: %s\n" % (
786 786 _fnodescachefile, util.forcebytestr(inst)))
787 787 finally:
788 788 lock.release()
General Comments 0
You need to be logged in to leave comments. Login now