##// END OF EJS Templates
manifestcache: support multiple cache addition in one debug command run...
marmoute -
r42124:1fe278aa default
parent child Browse files
Show More
@@ -1,3427 +1,3429 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from . import (
36 36 bundle2,
37 37 changegroup,
38 38 cmdutil,
39 39 color,
40 40 context,
41 41 copies,
42 42 dagparser,
43 43 encoding,
44 44 error,
45 45 exchange,
46 46 extensions,
47 47 filemerge,
48 48 filesetlang,
49 49 formatter,
50 50 hg,
51 51 httppeer,
52 52 localrepo,
53 53 lock as lockmod,
54 54 logcmdutil,
55 55 merge as mergemod,
56 56 obsolete,
57 57 obsutil,
58 58 phases,
59 59 policy,
60 60 pvec,
61 61 pycompat,
62 62 registrar,
63 63 repair,
64 64 revlog,
65 65 revset,
66 66 revsetlang,
67 67 scmutil,
68 68 setdiscovery,
69 69 simplemerge,
70 70 sshpeer,
71 71 sslutil,
72 72 streamclone,
73 73 templater,
74 74 treediscovery,
75 75 upgrade,
76 76 url as urlmod,
77 77 util,
78 78 vfs as vfsmod,
79 79 wireprotoframing,
80 80 wireprotoserver,
81 81 wireprotov2peer,
82 82 )
83 83 from .utils import (
84 84 cborutil,
85 85 dateutil,
86 86 procutil,
87 87 stringutil,
88 88 )
89 89
90 90 from .revlogutils import (
91 91 deltas as deltautil
92 92 )
93 93
94 94 release = lockmod.release
95 95
96 96 command = registrar.command()
97 97
98 98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
99 99 def debugancestor(ui, repo, *args):
100 100 """find the ancestor revision of two revisions in a given index"""
101 101 if len(args) == 3:
102 102 index, rev1, rev2 = args
103 103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
104 104 lookup = r.lookup
105 105 elif len(args) == 2:
106 106 if not repo:
107 107 raise error.Abort(_('there is no Mercurial repository here '
108 108 '(.hg not found)'))
109 109 rev1, rev2 = args
110 110 r = repo.changelog
111 111 lookup = repo.lookup
112 112 else:
113 113 raise error.Abort(_('either two or three arguments required'))
114 114 a = r.ancestor(lookup(rev1), lookup(rev2))
115 115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
116 116
117 117 @command('debugapplystreamclonebundle', [], 'FILE')
118 118 def debugapplystreamclonebundle(ui, repo, fname):
119 119 """apply a stream clone bundle file"""
120 120 f = hg.openpath(ui, fname)
121 121 gen = exchange.readbundle(ui, f, fname)
122 122 gen.apply(repo)
123 123
124 124 @command('debugbuilddag',
125 125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
126 126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
127 127 ('n', 'new-file', None, _('add new file at each rev'))],
128 128 _('[OPTION]... [TEXT]'))
129 129 def debugbuilddag(ui, repo, text=None,
130 130 mergeable_file=False,
131 131 overwritten_file=False,
132 132 new_file=False):
133 133 """builds a repo with a given DAG from scratch in the current empty repo
134 134
135 135 The description of the DAG is read from stdin if not given on the
136 136 command line.
137 137
138 138 Elements:
139 139
140 140 - "+n" is a linear run of n nodes based on the current default parent
141 141 - "." is a single node based on the current default parent
142 142 - "$" resets the default parent to null (implied at the start);
143 143 otherwise the default parent is always the last node created
144 144 - "<p" sets the default parent to the backref p
145 145 - "*p" is a fork at parent p, which is a backref
146 146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
147 147 - "/p2" is a merge of the preceding node and p2
148 148 - ":tag" defines a local tag for the preceding node
149 149 - "@branch" sets the named branch for subsequent nodes
150 150 - "#...\\n" is a comment up to the end of the line
151 151
152 152 Whitespace between the above elements is ignored.
153 153
154 154 A backref is either
155 155
156 156 - a number n, which references the node curr-n, where curr is the current
157 157 node, or
158 158 - the name of a local tag you placed earlier using ":tag", or
159 159 - empty to denote the default parent.
160 160
161 161 All string valued-elements are either strictly alphanumeric, or must
162 162 be enclosed in double quotes ("..."), with "\\" as escape character.
163 163 """
164 164
165 165 if text is None:
166 166 ui.status(_("reading DAG from stdin\n"))
167 167 text = ui.fin.read()
168 168
169 169 cl = repo.changelog
170 170 if len(cl) > 0:
171 171 raise error.Abort(_('repository is not empty'))
172 172
173 173 # determine number of revs in DAG
174 174 total = 0
175 175 for type, data in dagparser.parsedag(text):
176 176 if type == 'n':
177 177 total += 1
178 178
179 179 if mergeable_file:
180 180 linesperrev = 2
181 181 # make a file with k lines per rev
182 182 initialmergedlines = ['%d' % i
183 183 for i in pycompat.xrange(0, total * linesperrev)]
184 184 initialmergedlines.append("")
185 185
186 186 tags = []
187 187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
188 188 total=total)
189 189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
190 190 at = -1
191 191 atbranch = 'default'
192 192 nodeids = []
193 193 id = 0
194 194 progress.update(id)
195 195 for type, data in dagparser.parsedag(text):
196 196 if type == 'n':
197 197 ui.note(('node %s\n' % pycompat.bytestr(data)))
198 198 id, ps = data
199 199
200 200 files = []
201 201 filecontent = {}
202 202
203 203 p2 = None
204 204 if mergeable_file:
205 205 fn = "mf"
206 206 p1 = repo[ps[0]]
207 207 if len(ps) > 1:
208 208 p2 = repo[ps[1]]
209 209 pa = p1.ancestor(p2)
210 210 base, local, other = [x[fn].data() for x in (pa, p1,
211 211 p2)]
212 212 m3 = simplemerge.Merge3Text(base, local, other)
213 213 ml = [l.strip() for l in m3.merge_lines()]
214 214 ml.append("")
215 215 elif at > 0:
216 216 ml = p1[fn].data().split("\n")
217 217 else:
218 218 ml = initialmergedlines
219 219 ml[id * linesperrev] += " r%i" % id
220 220 mergedtext = "\n".join(ml)
221 221 files.append(fn)
222 222 filecontent[fn] = mergedtext
223 223
224 224 if overwritten_file:
225 225 fn = "of"
226 226 files.append(fn)
227 227 filecontent[fn] = "r%i\n" % id
228 228
229 229 if new_file:
230 230 fn = "nf%i" % id
231 231 files.append(fn)
232 232 filecontent[fn] = "r%i\n" % id
233 233 if len(ps) > 1:
234 234 if not p2:
235 235 p2 = repo[ps[1]]
236 236 for fn in p2:
237 237 if fn.startswith("nf"):
238 238 files.append(fn)
239 239 filecontent[fn] = p2[fn].data()
240 240
241 241 def fctxfn(repo, cx, path):
242 242 if path in filecontent:
243 243 return context.memfilectx(repo, cx, path,
244 244 filecontent[path])
245 245 return None
246 246
247 247 if len(ps) == 0 or ps[0] < 0:
248 248 pars = [None, None]
249 249 elif len(ps) == 1:
250 250 pars = [nodeids[ps[0]], None]
251 251 else:
252 252 pars = [nodeids[p] for p in ps]
253 253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
254 254 date=(id, 0),
255 255 user="debugbuilddag",
256 256 extra={'branch': atbranch})
257 257 nodeid = repo.commitctx(cx)
258 258 nodeids.append(nodeid)
259 259 at = id
260 260 elif type == 'l':
261 261 id, name = data
262 262 ui.note(('tag %s\n' % name))
263 263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
264 264 elif type == 'a':
265 265 ui.note(('branch %s\n' % data))
266 266 atbranch = data
267 267 progress.update(id)
268 268
269 269 if tags:
270 270 repo.vfs.write("localtags", "".join(tags))
271 271
272 272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
273 273 indent_string = ' ' * indent
274 274 if all:
275 275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
276 276 % indent_string)
277 277
278 278 def showchunks(named):
279 279 ui.write("\n%s%s\n" % (indent_string, named))
280 280 for deltadata in gen.deltaiter():
281 281 node, p1, p2, cs, deltabase, delta, flags = deltadata
282 282 ui.write("%s%s %s %s %s %s %d\n" %
283 283 (indent_string, hex(node), hex(p1), hex(p2),
284 284 hex(cs), hex(deltabase), len(delta)))
285 285
286 286 chunkdata = gen.changelogheader()
287 287 showchunks("changelog")
288 288 chunkdata = gen.manifestheader()
289 289 showchunks("manifest")
290 290 for chunkdata in iter(gen.filelogheader, {}):
291 291 fname = chunkdata['filename']
292 292 showchunks(fname)
293 293 else:
294 294 if isinstance(gen, bundle2.unbundle20):
295 295 raise error.Abort(_('use debugbundle2 for this file'))
296 296 chunkdata = gen.changelogheader()
297 297 for deltadata in gen.deltaiter():
298 298 node, p1, p2, cs, deltabase, delta, flags = deltadata
299 299 ui.write("%s%s\n" % (indent_string, hex(node)))
300 300
301 301 def _debugobsmarkers(ui, part, indent=0, **opts):
302 302 """display version and markers contained in 'data'"""
303 303 opts = pycompat.byteskwargs(opts)
304 304 data = part.read()
305 305 indent_string = ' ' * indent
306 306 try:
307 307 version, markers = obsolete._readmarkers(data)
308 308 except error.UnknownVersion as exc:
309 309 msg = "%sunsupported version: %s (%d bytes)\n"
310 310 msg %= indent_string, exc.version, len(data)
311 311 ui.write(msg)
312 312 else:
313 313 msg = "%sversion: %d (%d bytes)\n"
314 314 msg %= indent_string, version, len(data)
315 315 ui.write(msg)
316 316 fm = ui.formatter('debugobsolete', opts)
317 317 for rawmarker in sorted(markers):
318 318 m = obsutil.marker(None, rawmarker)
319 319 fm.startitem()
320 320 fm.plain(indent_string)
321 321 cmdutil.showmarker(fm, m)
322 322 fm.end()
323 323
324 324 def _debugphaseheads(ui, data, indent=0):
325 325 """display version and markers contained in 'data'"""
326 326 indent_string = ' ' * indent
327 327 headsbyphase = phases.binarydecode(data)
328 328 for phase in phases.allphases:
329 329 for head in headsbyphase[phase]:
330 330 ui.write(indent_string)
331 331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
332 332
333 333 def _quasirepr(thing):
334 334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
335 335 return '{%s}' % (
336 336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
337 337 return pycompat.bytestr(repr(thing))
338 338
339 339 def _debugbundle2(ui, gen, all=None, **opts):
340 340 """lists the contents of a bundle2"""
341 341 if not isinstance(gen, bundle2.unbundle20):
342 342 raise error.Abort(_('not a bundle2 file'))
343 343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
344 344 parttypes = opts.get(r'part_type', [])
345 345 for part in gen.iterparts():
346 346 if parttypes and part.type not in parttypes:
347 347 continue
348 348 msg = '%s -- %s (mandatory: %r)\n'
349 349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
350 350 if part.type == 'changegroup':
351 351 version = part.params.get('version', '01')
352 352 cg = changegroup.getunbundler(version, part, 'UN')
353 353 if not ui.quiet:
354 354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
355 355 if part.type == 'obsmarkers':
356 356 if not ui.quiet:
357 357 _debugobsmarkers(ui, part, indent=4, **opts)
358 358 if part.type == 'phase-heads':
359 359 if not ui.quiet:
360 360 _debugphaseheads(ui, part, indent=4)
361 361
362 362 @command('debugbundle',
363 363 [('a', 'all', None, _('show all details')),
364 364 ('', 'part-type', [], _('show only the named part type')),
365 365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
366 366 _('FILE'),
367 367 norepo=True)
368 368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
369 369 """lists the contents of a bundle"""
370 370 with hg.openpath(ui, bundlepath) as f:
371 371 if spec:
372 372 spec = exchange.getbundlespec(ui, f)
373 373 ui.write('%s\n' % spec)
374 374 return
375 375
376 376 gen = exchange.readbundle(ui, f, bundlepath)
377 377 if isinstance(gen, bundle2.unbundle20):
378 378 return _debugbundle2(ui, gen, all=all, **opts)
379 379 _debugchangegroup(ui, gen, all=all, **opts)
380 380
381 381 @command('debugcapabilities',
382 382 [], _('PATH'),
383 383 norepo=True)
384 384 def debugcapabilities(ui, path, **opts):
385 385 """lists the capabilities of a remote peer"""
386 386 opts = pycompat.byteskwargs(opts)
387 387 peer = hg.peer(ui, opts, path)
388 388 caps = peer.capabilities()
389 389 ui.write(('Main capabilities:\n'))
390 390 for c in sorted(caps):
391 391 ui.write((' %s\n') % c)
392 392 b2caps = bundle2.bundle2caps(peer)
393 393 if b2caps:
394 394 ui.write(('Bundle2 capabilities:\n'))
395 395 for key, values in sorted(b2caps.iteritems()):
396 396 ui.write((' %s\n') % key)
397 397 for v in values:
398 398 ui.write((' %s\n') % v)
399 399
400 400 @command('debugcheckstate', [], '')
401 401 def debugcheckstate(ui, repo):
402 402 """validate the correctness of the current dirstate"""
403 403 parent1, parent2 = repo.dirstate.parents()
404 404 m1 = repo[parent1].manifest()
405 405 m2 = repo[parent2].manifest()
406 406 errors = 0
407 407 for f in repo.dirstate:
408 408 state = repo.dirstate[f]
409 409 if state in "nr" and f not in m1:
410 410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
411 411 errors += 1
412 412 if state in "a" and f in m1:
413 413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
414 414 errors += 1
415 415 if state in "m" and f not in m1 and f not in m2:
416 416 ui.warn(_("%s in state %s, but not in either manifest\n") %
417 417 (f, state))
418 418 errors += 1
419 419 for f in m1:
420 420 state = repo.dirstate[f]
421 421 if state not in "nrm":
422 422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
423 423 errors += 1
424 424 if errors:
425 425 error = _(".hg/dirstate inconsistent with current parent's manifest")
426 426 raise error.Abort(error)
427 427
428 428 @command('debugcolor',
429 429 [('', 'style', None, _('show all configured styles'))],
430 430 'hg debugcolor')
431 431 def debugcolor(ui, repo, **opts):
432 432 """show available color, effects or style"""
433 433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
434 434 if opts.get(r'style'):
435 435 return _debugdisplaystyle(ui)
436 436 else:
437 437 return _debugdisplaycolor(ui)
438 438
439 439 def _debugdisplaycolor(ui):
440 440 ui = ui.copy()
441 441 ui._styles.clear()
442 442 for effect in color._activeeffects(ui).keys():
443 443 ui._styles[effect] = effect
444 444 if ui._terminfoparams:
445 445 for k, v in ui.configitems('color'):
446 446 if k.startswith('color.'):
447 447 ui._styles[k] = k[6:]
448 448 elif k.startswith('terminfo.'):
449 449 ui._styles[k] = k[9:]
450 450 ui.write(_('available colors:\n'))
451 451 # sort label with a '_' after the other to group '_background' entry.
452 452 items = sorted(ui._styles.items(),
453 453 key=lambda i: ('_' in i[0], i[0], i[1]))
454 454 for colorname, label in items:
455 455 ui.write(('%s\n') % colorname, label=label)
456 456
457 457 def _debugdisplaystyle(ui):
458 458 ui.write(_('available style:\n'))
459 459 if not ui._styles:
460 460 return
461 461 width = max(len(s) for s in ui._styles)
462 462 for label, effects in sorted(ui._styles.items()):
463 463 ui.write('%s' % label, label=label)
464 464 if effects:
465 465 # 50
466 466 ui.write(': ')
467 467 ui.write(' ' * (max(0, width - len(label))))
468 468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 469 ui.write('\n')
470 470
471 471 @command('debugcreatestreamclonebundle', [], 'FILE')
472 472 def debugcreatestreamclonebundle(ui, repo, fname):
473 473 """create a stream clone bundle file
474 474
475 475 Stream bundles are special bundles that are essentially archives of
476 476 revlog files. They are commonly used for cloning very quickly.
477 477 """
478 478 # TODO we may want to turn this into an abort when this functionality
479 479 # is moved into `hg bundle`.
480 480 if phases.hassecret(repo):
481 481 ui.warn(_('(warning: stream clone bundle will contain secret '
482 482 'revisions)\n'))
483 483
484 484 requirements, gen = streamclone.generatebundlev1(repo)
485 485 changegroup.writechunks(ui, gen, fname)
486 486
487 487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488 488
489 489 @command('debugdag',
490 490 [('t', 'tags', None, _('use tags as labels')),
491 491 ('b', 'branches', None, _('annotate with branch names')),
492 492 ('', 'dots', None, _('use dots for runs')),
493 493 ('s', 'spaces', None, _('separate elements by spaces'))],
494 494 _('[OPTION]... [FILE [REV]...]'),
495 495 optionalrepo=True)
496 496 def debugdag(ui, repo, file_=None, *revs, **opts):
497 497 """format the changelog or an index DAG as a concise textual description
498 498
499 499 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 500 revision numbers, they get labeled in the output as rN.
501 501
502 502 Otherwise, the changelog DAG of the current repo is emitted.
503 503 """
504 504 spaces = opts.get(r'spaces')
505 505 dots = opts.get(r'dots')
506 506 if file_:
507 507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
508 508 file_)
509 509 revs = set((int(r) for r in revs))
510 510 def events():
511 511 for r in rlog:
512 512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 513 if p != -1))
514 514 if r in revs:
515 515 yield 'l', (r, "r%i" % r)
516 516 elif repo:
517 517 cl = repo.changelog
518 518 tags = opts.get(r'tags')
519 519 branches = opts.get(r'branches')
520 520 if tags:
521 521 labels = {}
522 522 for l, n in repo.tags().items():
523 523 labels.setdefault(cl.rev(n), []).append(l)
524 524 def events():
525 525 b = "default"
526 526 for r in cl:
527 527 if branches:
528 528 newb = cl.read(cl.node(r))[5]['branch']
529 529 if newb != b:
530 530 yield 'a', newb
531 531 b = newb
532 532 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 533 if p != -1))
534 534 if tags:
535 535 ls = labels.get(r)
536 536 if ls:
537 537 for l in ls:
538 538 yield 'l', (r, l)
539 539 else:
540 540 raise error.Abort(_('need repo for changelog dag'))
541 541
542 542 for line in dagparser.dagtextlines(events(),
543 543 addspaces=spaces,
544 544 wraplabels=True,
545 545 wrapannotations=True,
546 546 wrapnonlinear=dots,
547 547 usedots=dots,
548 548 maxlinewidth=70):
549 549 ui.write(line)
550 550 ui.write("\n")
551 551
552 552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 553 def debugdata(ui, repo, file_, rev=None, **opts):
554 554 """dump the contents of a data file revision"""
555 555 opts = pycompat.byteskwargs(opts)
556 556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 557 if rev is not None:
558 558 raise error.CommandError('debugdata', _('invalid arguments'))
559 559 file_, rev = None, file_
560 560 elif rev is None:
561 561 raise error.CommandError('debugdata', _('invalid arguments'))
562 562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
563 563 try:
564 564 ui.write(r.revision(r.lookup(rev), raw=True))
565 565 except KeyError:
566 566 raise error.Abort(_('invalid revision identifier %s') % rev)
567 567
568 568 @command('debugdate',
569 569 [('e', 'extended', None, _('try extended date formats'))],
570 570 _('[-e] DATE [RANGE]'),
571 571 norepo=True, optionalrepo=True)
572 572 def debugdate(ui, date, range=None, **opts):
573 573 """parse and display a date"""
574 574 if opts[r"extended"]:
575 575 d = dateutil.parsedate(date, util.extendeddateformats)
576 576 else:
577 577 d = dateutil.parsedate(date)
578 578 ui.write(("internal: %d %d\n") % d)
579 579 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 580 if range:
581 581 m = dateutil.matchdate(range)
582 582 ui.write(("match: %s\n") % m(d[0]))
583 583
584 584 @command('debugdeltachain',
585 585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 586 _('-c|-m|FILE'),
587 587 optionalrepo=True)
588 588 def debugdeltachain(ui, repo, file_=None, **opts):
589 589 """dump information about delta chains in a revlog
590 590
591 591 Output can be templatized. Available template keywords are:
592 592
593 593 :``rev``: revision number
594 594 :``chainid``: delta chain identifier (numbered by unique base)
595 595 :``chainlen``: delta chain length to this revision
596 596 :``prevrev``: previous revision in delta chain
597 597 :``deltatype``: role of delta / how it was computed
598 598 :``compsize``: compressed size of revision
599 599 :``uncompsize``: uncompressed size of revision
600 600 :``chainsize``: total size of compressed revisions in chain
601 601 :``chainratio``: total chain size divided by uncompressed revision size
602 602 (new delta chains typically start at ratio 2.00)
603 603 :``lindist``: linear distance from base revision in delta chain to end
604 604 of this revision
605 605 :``extradist``: total size of revisions not part of this delta chain from
606 606 base of delta chain to end of this revision; a measurement
607 607 of how much extra data we need to read/seek across to read
608 608 the delta chain for this revision
609 609 :``extraratio``: extradist divided by chainsize; another representation of
610 610 how much unrelated data is needed to load this delta chain
611 611
612 612 If the repository is configured to use the sparse read, additional keywords
613 613 are available:
614 614
615 615 :``readsize``: total size of data read from the disk for a revision
616 616 (sum of the sizes of all the blocks)
617 617 :``largestblock``: size of the largest block of data read from the disk
618 618 :``readdensity``: density of useful bytes in the data read from the disk
619 619 :``srchunks``: in how many data hunks the whole revision would be read
620 620
621 621 The sparse read can be enabled with experimental.sparse-read = True
622 622 """
623 623 opts = pycompat.byteskwargs(opts)
624 624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 625 index = r.index
626 626 start = r.start
627 627 length = r.length
628 628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
629 629 withsparseread = getattr(r, '_withsparseread', False)
630 630
631 631 def revinfo(rev):
632 632 e = index[rev]
633 633 compsize = e[1]
634 634 uncompsize = e[2]
635 635 chainsize = 0
636 636
637 637 if generaldelta:
638 638 if e[3] == e[5]:
639 639 deltatype = 'p1'
640 640 elif e[3] == e[6]:
641 641 deltatype = 'p2'
642 642 elif e[3] == rev - 1:
643 643 deltatype = 'prev'
644 644 elif e[3] == rev:
645 645 deltatype = 'base'
646 646 else:
647 647 deltatype = 'other'
648 648 else:
649 649 if e[3] == rev:
650 650 deltatype = 'base'
651 651 else:
652 652 deltatype = 'prev'
653 653
654 654 chain = r._deltachain(rev)[0]
655 655 for iterrev in chain:
656 656 e = index[iterrev]
657 657 chainsize += e[1]
658 658
659 659 return compsize, uncompsize, deltatype, chain, chainsize
660 660
661 661 fm = ui.formatter('debugdeltachain', opts)
662 662
663 663 fm.plain(' rev chain# chainlen prev delta '
664 664 'size rawsize chainsize ratio lindist extradist '
665 665 'extraratio')
666 666 if withsparseread:
667 667 fm.plain(' readsize largestblk rddensity srchunks')
668 668 fm.plain('\n')
669 669
670 670 chainbases = {}
671 671 for rev in r:
672 672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
673 673 chainbase = chain[0]
674 674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
675 675 basestart = start(chainbase)
676 676 revstart = start(rev)
677 677 lineardist = revstart + comp - basestart
678 678 extradist = lineardist - chainsize
679 679 try:
680 680 prevrev = chain[-2]
681 681 except IndexError:
682 682 prevrev = -1
683 683
684 684 if uncomp != 0:
685 685 chainratio = float(chainsize) / float(uncomp)
686 686 else:
687 687 chainratio = chainsize
688 688
689 689 if chainsize != 0:
690 690 extraratio = float(extradist) / float(chainsize)
691 691 else:
692 692 extraratio = extradist
693 693
694 694 fm.startitem()
695 695 fm.write('rev chainid chainlen prevrev deltatype compsize '
696 696 'uncompsize chainsize chainratio lindist extradist '
697 697 'extraratio',
698 698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
699 699 rev, chainid, len(chain), prevrev, deltatype, comp,
700 700 uncomp, chainsize, chainratio, lineardist, extradist,
701 701 extraratio,
702 702 rev=rev, chainid=chainid, chainlen=len(chain),
703 703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
704 704 uncompsize=uncomp, chainsize=chainsize,
705 705 chainratio=chainratio, lindist=lineardist,
706 706 extradist=extradist, extraratio=extraratio)
707 707 if withsparseread:
708 708 readsize = 0
709 709 largestblock = 0
710 710 srchunks = 0
711 711
712 712 for revschunk in deltautil.slicechunk(r, chain):
713 713 srchunks += 1
714 714 blkend = start(revschunk[-1]) + length(revschunk[-1])
715 715 blksize = blkend - start(revschunk[0])
716 716
717 717 readsize += blksize
718 718 if largestblock < blksize:
719 719 largestblock = blksize
720 720
721 721 if readsize:
722 722 readdensity = float(chainsize) / float(readsize)
723 723 else:
724 724 readdensity = 1
725 725
726 726 fm.write('readsize largestblock readdensity srchunks',
727 727 ' %10d %10d %9.5f %8d',
728 728 readsize, largestblock, readdensity, srchunks,
729 729 readsize=readsize, largestblock=largestblock,
730 730 readdensity=readdensity, srchunks=srchunks)
731 731
732 732 fm.plain('\n')
733 733
734 734 fm.end()
735 735
736 736 @command('debugdirstate|debugstate',
737 737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
738 738 ('', 'dates', True, _('display the saved mtime')),
739 739 ('', 'datesort', None, _('sort by saved mtime'))],
740 740 _('[OPTION]...'))
741 741 def debugstate(ui, repo, **opts):
742 742 """show the contents of the current dirstate"""
743 743
744 744 nodates = not opts[r'dates']
745 745 if opts.get(r'nodates') is not None:
746 746 nodates = True
747 747 datesort = opts.get(r'datesort')
748 748
749 749 if datesort:
750 750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 751 else:
752 752 keyfunc = None # sort by filename
753 753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 754 if ent[3] == -1:
755 755 timestr = 'unset '
756 756 elif nodates:
757 757 timestr = 'set '
758 758 else:
759 759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 760 time.localtime(ent[3]))
761 761 timestr = encoding.strtolocal(timestr)
762 762 if ent[1] & 0o20000:
763 763 mode = 'lnk'
764 764 else:
765 765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 767 for f in repo.dirstate.copies():
768 768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769 769
770 770 @command('debugdiscovery',
771 771 [('', 'old', None, _('use old-style discovery')),
772 772 ('', 'nonheads', None,
773 773 _('use old-style discovery with non-heads included')),
774 774 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 775 ] + cmdutil.remoteopts,
776 776 _('[--rev REV] [OTHER]'))
777 777 def debugdiscovery(ui, repo, remoteurl="default", **opts):
778 778 """runs the changeset discovery protocol in isolation"""
779 779 opts = pycompat.byteskwargs(opts)
780 780 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
781 781 remote = hg.peer(repo, opts, remoteurl)
782 782 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
783 783
784 784 # make sure tests are repeatable
785 785 random.seed(12323)
786 786
787 787 def doit(pushedrevs, remoteheads, remote=remote):
788 788 if opts.get('old'):
789 789 if not util.safehasattr(remote, 'branches'):
790 790 # enable in-client legacy support
791 791 remote = localrepo.locallegacypeer(remote.local())
792 792 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
793 793 force=True)
794 794 common = set(common)
795 795 if not opts.get('nonheads'):
796 796 ui.write(("unpruned common: %s\n") %
797 797 " ".join(sorted(short(n) for n in common)))
798 798
799 799 clnode = repo.changelog.node
800 800 common = repo.revs('heads(::%ln)', common)
801 801 common = {clnode(r) for r in common}
802 802 else:
803 803 nodes = None
804 804 if pushedrevs:
805 805 revs = scmutil.revrange(repo, pushedrevs)
806 806 nodes = [repo[r].node() for r in revs]
807 807 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
808 808 ancestorsof=nodes)
809 809 common = set(common)
810 810 rheads = set(hds)
811 811 lheads = set(repo.heads())
812 812 ui.write(("common heads: %s\n") %
813 813 " ".join(sorted(short(n) for n in common)))
814 814 if lheads <= common:
815 815 ui.write(("local is subset\n"))
816 816 elif rheads <= common:
817 817 ui.write(("remote is subset\n"))
818 818
819 819 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
820 820 localrevs = opts['rev']
821 821 doit(localrevs, remoterevs)
822 822
823 823 _chunksize = 4 << 10
824 824
825 825 @command('debugdownload',
826 826 [
827 827 ('o', 'output', '', _('path')),
828 828 ],
829 829 optionalrepo=True)
830 830 def debugdownload(ui, repo, url, output=None, **opts):
831 831 """download a resource using Mercurial logic and config
832 832 """
833 833 fh = urlmod.open(ui, url, output)
834 834
835 835 dest = ui
836 836 if output:
837 837 dest = open(output, "wb", _chunksize)
838 838 try:
839 839 data = fh.read(_chunksize)
840 840 while data:
841 841 dest.write(data)
842 842 data = fh.read(_chunksize)
843 843 finally:
844 844 if output:
845 845 dest.close()
846 846
847 847 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
848 848 def debugextensions(ui, repo, **opts):
849 849 '''show information about active extensions'''
850 850 opts = pycompat.byteskwargs(opts)
851 851 exts = extensions.extensions(ui)
852 852 hgver = util.version()
853 853 fm = ui.formatter('debugextensions', opts)
854 854 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
855 855 isinternal = extensions.ismoduleinternal(extmod)
856 856 extsource = pycompat.fsencode(extmod.__file__)
857 857 if isinternal:
858 858 exttestedwith = [] # never expose magic string to users
859 859 else:
860 860 exttestedwith = getattr(extmod, 'testedwith', '').split()
861 861 extbuglink = getattr(extmod, 'buglink', None)
862 862
863 863 fm.startitem()
864 864
865 865 if ui.quiet or ui.verbose:
866 866 fm.write('name', '%s\n', extname)
867 867 else:
868 868 fm.write('name', '%s', extname)
869 869 if isinternal or hgver in exttestedwith:
870 870 fm.plain('\n')
871 871 elif not exttestedwith:
872 872 fm.plain(_(' (untested!)\n'))
873 873 else:
874 874 lasttestedversion = exttestedwith[-1]
875 875 fm.plain(' (%s!)\n' % lasttestedversion)
876 876
877 877 fm.condwrite(ui.verbose and extsource, 'source',
878 878 _(' location: %s\n'), extsource or "")
879 879
880 880 if ui.verbose:
881 881 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
882 882 fm.data(bundled=isinternal)
883 883
884 884 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
885 885 _(' tested with: %s\n'),
886 886 fm.formatlist(exttestedwith, name='ver'))
887 887
888 888 fm.condwrite(ui.verbose and extbuglink, 'buglink',
889 889 _(' bug reporting: %s\n'), extbuglink or "")
890 890
891 891 fm.end()
892 892
893 893 @command('debugfileset',
894 894 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
895 895 ('', 'all-files', False,
896 896 _('test files from all revisions and working directory')),
897 897 ('s', 'show-matcher', None,
898 898 _('print internal representation of matcher')),
899 899 ('p', 'show-stage', [],
900 900 _('print parsed tree at the given stage'), _('NAME'))],
901 901 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
902 902 def debugfileset(ui, repo, expr, **opts):
903 903 '''parse and apply a fileset specification'''
904 904 from . import fileset
905 905 fileset.symbols # force import of fileset so we have predicates to optimize
906 906 opts = pycompat.byteskwargs(opts)
907 907 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
908 908
909 909 stages = [
910 910 ('parsed', pycompat.identity),
911 911 ('analyzed', filesetlang.analyze),
912 912 ('optimized', filesetlang.optimize),
913 913 ]
914 914 stagenames = set(n for n, f in stages)
915 915
916 916 showalways = set()
917 917 if ui.verbose and not opts['show_stage']:
918 918 # show parsed tree by --verbose (deprecated)
919 919 showalways.add('parsed')
920 920 if opts['show_stage'] == ['all']:
921 921 showalways.update(stagenames)
922 922 else:
923 923 for n in opts['show_stage']:
924 924 if n not in stagenames:
925 925 raise error.Abort(_('invalid stage name: %s') % n)
926 926 showalways.update(opts['show_stage'])
927 927
928 928 tree = filesetlang.parse(expr)
929 929 for n, f in stages:
930 930 tree = f(tree)
931 931 if n in showalways:
932 932 if opts['show_stage'] or n != 'parsed':
933 933 ui.write(("* %s:\n") % n)
934 934 ui.write(filesetlang.prettyformat(tree), "\n")
935 935
936 936 files = set()
937 937 if opts['all_files']:
938 938 for r in repo:
939 939 c = repo[r]
940 940 files.update(c.files())
941 941 files.update(c.substate)
942 942 if opts['all_files'] or ctx.rev() is None:
943 943 wctx = repo[None]
944 944 files.update(repo.dirstate.walk(scmutil.matchall(repo),
945 945 subrepos=list(wctx.substate),
946 946 unknown=True, ignored=True))
947 947 files.update(wctx.substate)
948 948 else:
949 949 files.update(ctx.files())
950 950 files.update(ctx.substate)
951 951
952 952 m = ctx.matchfileset(expr)
953 953 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
954 954 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
955 955 for f in sorted(files):
956 956 if not m(f):
957 957 continue
958 958 ui.write("%s\n" % f)
959 959
960 960 @command('debugformat',
961 961 [] + cmdutil.formatteropts)
962 962 def debugformat(ui, repo, **opts):
963 963 """display format information about the current repository
964 964
965 965 Use --verbose to get extra information about current config value and
966 966 Mercurial default."""
967 967 opts = pycompat.byteskwargs(opts)
968 968 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
969 969 maxvariantlength = max(len('format-variant'), maxvariantlength)
970 970
971 971 def makeformatname(name):
972 972 return '%s:' + (' ' * (maxvariantlength - len(name)))
973 973
974 974 fm = ui.formatter('debugformat', opts)
975 975 if fm.isplain():
976 976 def formatvalue(value):
977 977 if util.safehasattr(value, 'startswith'):
978 978 return value
979 979 if value:
980 980 return 'yes'
981 981 else:
982 982 return 'no'
983 983 else:
984 984 formatvalue = pycompat.identity
985 985
986 986 fm.plain('format-variant')
987 987 fm.plain(' ' * (maxvariantlength - len('format-variant')))
988 988 fm.plain(' repo')
989 989 if ui.verbose:
990 990 fm.plain(' config default')
991 991 fm.plain('\n')
992 992 for fv in upgrade.allformatvariant:
993 993 fm.startitem()
994 994 repovalue = fv.fromrepo(repo)
995 995 configvalue = fv.fromconfig(repo)
996 996
997 997 if repovalue != configvalue:
998 998 namelabel = 'formatvariant.name.mismatchconfig'
999 999 repolabel = 'formatvariant.repo.mismatchconfig'
1000 1000 elif repovalue != fv.default:
1001 1001 namelabel = 'formatvariant.name.mismatchdefault'
1002 1002 repolabel = 'formatvariant.repo.mismatchdefault'
1003 1003 else:
1004 1004 namelabel = 'formatvariant.name.uptodate'
1005 1005 repolabel = 'formatvariant.repo.uptodate'
1006 1006
1007 1007 fm.write('name', makeformatname(fv.name), fv.name,
1008 1008 label=namelabel)
1009 1009 fm.write('repo', ' %3s', formatvalue(repovalue),
1010 1010 label=repolabel)
1011 1011 if fv.default != configvalue:
1012 1012 configlabel = 'formatvariant.config.special'
1013 1013 else:
1014 1014 configlabel = 'formatvariant.config.default'
1015 1015 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1016 1016 label=configlabel)
1017 1017 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1018 1018 label='formatvariant.default')
1019 1019 fm.plain('\n')
1020 1020 fm.end()
1021 1021
1022 1022 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1023 1023 def debugfsinfo(ui, path="."):
1024 1024 """show information detected about current filesystem"""
1025 1025 ui.write(('path: %s\n') % path)
1026 1026 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1027 1027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1028 1028 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1029 1029 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1030 1030 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1031 1031 casesensitive = '(unknown)'
1032 1032 try:
1033 1033 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1034 1034 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1035 1035 except OSError:
1036 1036 pass
1037 1037 ui.write(('case-sensitive: %s\n') % casesensitive)
1038 1038
1039 1039 @command('debuggetbundle',
1040 1040 [('H', 'head', [], _('id of head node'), _('ID')),
1041 1041 ('C', 'common', [], _('id of common node'), _('ID')),
1042 1042 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1043 1043 _('REPO FILE [-H|-C ID]...'),
1044 1044 norepo=True)
1045 1045 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1046 1046 """retrieves a bundle from a repo
1047 1047
1048 1048 Every ID must be a full-length hex node id string. Saves the bundle to the
1049 1049 given file.
1050 1050 """
1051 1051 opts = pycompat.byteskwargs(opts)
1052 1052 repo = hg.peer(ui, opts, repopath)
1053 1053 if not repo.capable('getbundle'):
1054 1054 raise error.Abort("getbundle() not supported by target repository")
1055 1055 args = {}
1056 1056 if common:
1057 1057 args[r'common'] = [bin(s) for s in common]
1058 1058 if head:
1059 1059 args[r'heads'] = [bin(s) for s in head]
1060 1060 # TODO: get desired bundlecaps from command line.
1061 1061 args[r'bundlecaps'] = None
1062 1062 bundle = repo.getbundle('debug', **args)
1063 1063
1064 1064 bundletype = opts.get('type', 'bzip2').lower()
1065 1065 btypes = {'none': 'HG10UN',
1066 1066 'bzip2': 'HG10BZ',
1067 1067 'gzip': 'HG10GZ',
1068 1068 'bundle2': 'HG20'}
1069 1069 bundletype = btypes.get(bundletype)
1070 1070 if bundletype not in bundle2.bundletypes:
1071 1071 raise error.Abort(_('unknown bundle type specified with --type'))
1072 1072 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1073 1073
1074 1074 @command('debugignore', [], '[FILE]')
1075 1075 def debugignore(ui, repo, *files, **opts):
1076 1076 """display the combined ignore pattern and information about ignored files
1077 1077
1078 1078 With no argument display the combined ignore pattern.
1079 1079
1080 1080 Given space separated file names, shows if the given file is ignored and
1081 1081 if so, show the ignore rule (file and line number) that matched it.
1082 1082 """
1083 1083 ignore = repo.dirstate._ignore
1084 1084 if not files:
1085 1085 # Show all the patterns
1086 1086 ui.write("%s\n" % pycompat.byterepr(ignore))
1087 1087 else:
1088 1088 m = scmutil.match(repo[None], pats=files)
1089 1089 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1090 1090 for f in m.files():
1091 1091 nf = util.normpath(f)
1092 1092 ignored = None
1093 1093 ignoredata = None
1094 1094 if nf != '.':
1095 1095 if ignore(nf):
1096 1096 ignored = nf
1097 1097 ignoredata = repo.dirstate._ignorefileandline(nf)
1098 1098 else:
1099 1099 for p in util.finddirs(nf):
1100 1100 if ignore(p):
1101 1101 ignored = p
1102 1102 ignoredata = repo.dirstate._ignorefileandline(p)
1103 1103 break
1104 1104 if ignored:
1105 1105 if ignored == nf:
1106 1106 ui.write(_("%s is ignored\n") % uipathfn(f))
1107 1107 else:
1108 1108 ui.write(_("%s is ignored because of "
1109 1109 "containing folder %s\n")
1110 1110 % (uipathfn(f), ignored))
1111 1111 ignorefile, lineno, line = ignoredata
1112 1112 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1113 1113 % (ignorefile, lineno, line))
1114 1114 else:
1115 1115 ui.write(_("%s is not ignored\n") % uipathfn(f))
1116 1116
1117 1117 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1118 1118 _('-c|-m|FILE'))
1119 1119 def debugindex(ui, repo, file_=None, **opts):
1120 1120 """dump index data for a storage primitive"""
1121 1121 opts = pycompat.byteskwargs(opts)
1122 1122 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1123 1123
1124 1124 if ui.debugflag:
1125 1125 shortfn = hex
1126 1126 else:
1127 1127 shortfn = short
1128 1128
1129 1129 idlen = 12
1130 1130 for i in store:
1131 1131 idlen = len(shortfn(store.node(i)))
1132 1132 break
1133 1133
1134 1134 fm = ui.formatter('debugindex', opts)
1135 1135 fm.plain(b' rev linkrev %s %s p2\n' % (
1136 1136 b'nodeid'.ljust(idlen),
1137 1137 b'p1'.ljust(idlen)))
1138 1138
1139 1139 for rev in store:
1140 1140 node = store.node(rev)
1141 1141 parents = store.parents(node)
1142 1142
1143 1143 fm.startitem()
1144 1144 fm.write(b'rev', b'%6d ', rev)
1145 1145 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1146 1146 fm.write(b'node', '%s ', shortfn(node))
1147 1147 fm.write(b'p1', '%s ', shortfn(parents[0]))
1148 1148 fm.write(b'p2', '%s', shortfn(parents[1]))
1149 1149 fm.plain(b'\n')
1150 1150
1151 1151 fm.end()
1152 1152
1153 1153 @command('debugindexdot', cmdutil.debugrevlogopts,
1154 1154 _('-c|-m|FILE'), optionalrepo=True)
1155 1155 def debugindexdot(ui, repo, file_=None, **opts):
1156 1156 """dump an index DAG as a graphviz dot file"""
1157 1157 opts = pycompat.byteskwargs(opts)
1158 1158 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1159 1159 ui.write(("digraph G {\n"))
1160 1160 for i in r:
1161 1161 node = r.node(i)
1162 1162 pp = r.parents(node)
1163 1163 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1164 1164 if pp[1] != nullid:
1165 1165 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1166 1166 ui.write("}\n")
1167 1167
1168 1168 @command('debugindexstats', [])
1169 1169 def debugindexstats(ui, repo):
1170 1170 """show stats related to the changelog index"""
1171 1171 repo.changelog.shortest(nullid, 1)
1172 1172 index = repo.changelog.index
1173 1173 if not util.safehasattr(index, 'stats'):
1174 1174 raise error.Abort(_('debugindexstats only works with native code'))
1175 1175 for k, v in sorted(index.stats().items()):
1176 1176 ui.write('%s: %d\n' % (k, v))
1177 1177
1178 1178 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1179 1179 def debuginstall(ui, **opts):
1180 1180 '''test Mercurial installation
1181 1181
1182 1182 Returns 0 on success.
1183 1183 '''
1184 1184 opts = pycompat.byteskwargs(opts)
1185 1185
1186 1186 problems = 0
1187 1187
1188 1188 fm = ui.formatter('debuginstall', opts)
1189 1189 fm.startitem()
1190 1190
1191 1191 # encoding
1192 1192 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1193 1193 err = None
1194 1194 try:
1195 1195 codecs.lookup(pycompat.sysstr(encoding.encoding))
1196 1196 except LookupError as inst:
1197 1197 err = stringutil.forcebytestr(inst)
1198 1198 problems += 1
1199 1199 fm.condwrite(err, 'encodingerror', _(" %s\n"
1200 1200 " (check that your locale is properly set)\n"), err)
1201 1201
1202 1202 # Python
1203 1203 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1204 1204 pycompat.sysexecutable)
1205 1205 fm.write('pythonver', _("checking Python version (%s)\n"),
1206 1206 ("%d.%d.%d" % sys.version_info[:3]))
1207 1207 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1208 1208 os.path.dirname(pycompat.fsencode(os.__file__)))
1209 1209
1210 1210 security = set(sslutil.supportedprotocols)
1211 1211 if sslutil.hassni:
1212 1212 security.add('sni')
1213 1213
1214 1214 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1215 1215 fm.formatlist(sorted(security), name='protocol',
1216 1216 fmt='%s', sep=','))
1217 1217
1218 1218 # These are warnings, not errors. So don't increment problem count. This
1219 1219 # may change in the future.
1220 1220 if 'tls1.2' not in security:
1221 1221 fm.plain(_(' TLS 1.2 not supported by Python install; '
1222 1222 'network connections lack modern security\n'))
1223 1223 if 'sni' not in security:
1224 1224 fm.plain(_(' SNI not supported by Python install; may have '
1225 1225 'connectivity issues with some servers\n'))
1226 1226
1227 1227 # TODO print CA cert info
1228 1228
1229 1229 # hg version
1230 1230 hgver = util.version()
1231 1231 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1232 1232 hgver.split('+')[0])
1233 1233 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1234 1234 '+'.join(hgver.split('+')[1:]))
1235 1235
1236 1236 # compiled modules
1237 1237 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1238 1238 policy.policy)
1239 1239 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1240 1240 os.path.dirname(pycompat.fsencode(__file__)))
1241 1241
1242 1242 if policy.policy in ('c', 'allow'):
1243 1243 err = None
1244 1244 try:
1245 1245 from .cext import (
1246 1246 base85,
1247 1247 bdiff,
1248 1248 mpatch,
1249 1249 osutil,
1250 1250 )
1251 1251 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1252 1252 except Exception as inst:
1253 1253 err = stringutil.forcebytestr(inst)
1254 1254 problems += 1
1255 1255 fm.condwrite(err, 'extensionserror', " %s\n", err)
1256 1256
1257 1257 compengines = util.compengines._engines.values()
1258 1258 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1259 1259 fm.formatlist(sorted(e.name() for e in compengines),
1260 1260 name='compengine', fmt='%s', sep=', '))
1261 1261 fm.write('compenginesavail', _('checking available compression engines '
1262 1262 '(%s)\n'),
1263 1263 fm.formatlist(sorted(e.name() for e in compengines
1264 1264 if e.available()),
1265 1265 name='compengine', fmt='%s', sep=', '))
1266 1266 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1267 1267 fm.write('compenginesserver', _('checking available compression engines '
1268 1268 'for wire protocol (%s)\n'),
1269 1269 fm.formatlist([e.name() for e in wirecompengines
1270 1270 if e.wireprotosupport()],
1271 1271 name='compengine', fmt='%s', sep=', '))
1272 1272 re2 = 'missing'
1273 1273 if util._re2:
1274 1274 re2 = 'available'
1275 1275 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1276 1276 fm.data(re2=bool(util._re2))
1277 1277
1278 1278 # templates
1279 1279 p = templater.templatepaths()
1280 1280 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1281 1281 fm.condwrite(not p, '', _(" no template directories found\n"))
1282 1282 if p:
1283 1283 m = templater.templatepath("map-cmdline.default")
1284 1284 if m:
1285 1285 # template found, check if it is working
1286 1286 err = None
1287 1287 try:
1288 1288 templater.templater.frommapfile(m)
1289 1289 except Exception as inst:
1290 1290 err = stringutil.forcebytestr(inst)
1291 1291 p = None
1292 1292 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1293 1293 else:
1294 1294 p = None
1295 1295 fm.condwrite(p, 'defaulttemplate',
1296 1296 _("checking default template (%s)\n"), m)
1297 1297 fm.condwrite(not m, 'defaulttemplatenotfound',
1298 1298 _(" template '%s' not found\n"), "default")
1299 1299 if not p:
1300 1300 problems += 1
1301 1301 fm.condwrite(not p, '',
1302 1302 _(" (templates seem to have been installed incorrectly)\n"))
1303 1303
1304 1304 # editor
1305 1305 editor = ui.geteditor()
1306 1306 editor = util.expandpath(editor)
1307 1307 editorbin = procutil.shellsplit(editor)[0]
1308 1308 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1309 1309 cmdpath = procutil.findexe(editorbin)
1310 1310 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1311 1311 _(" No commit editor set and can't find %s in PATH\n"
1312 1312 " (specify a commit editor in your configuration"
1313 1313 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1314 1314 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1315 1315 _(" Can't find editor '%s' in PATH\n"
1316 1316 " (specify a commit editor in your configuration"
1317 1317 " file)\n"), not cmdpath and editorbin)
1318 1318 if not cmdpath and editor != 'vi':
1319 1319 problems += 1
1320 1320
1321 1321 # check username
1322 1322 username = None
1323 1323 err = None
1324 1324 try:
1325 1325 username = ui.username()
1326 1326 except error.Abort as e:
1327 1327 err = stringutil.forcebytestr(e)
1328 1328 problems += 1
1329 1329
1330 1330 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1331 1331 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1332 1332 " (specify a username in your configuration file)\n"), err)
1333 1333
1334 1334 fm.condwrite(not problems, '',
1335 1335 _("no problems detected\n"))
1336 1336 if not problems:
1337 1337 fm.data(problems=problems)
1338 1338 fm.condwrite(problems, 'problems',
1339 1339 _("%d problems detected,"
1340 1340 " please check your install!\n"), problems)
1341 1341 fm.end()
1342 1342
1343 1343 return problems
1344 1344
1345 1345 @command('debugknown', [], _('REPO ID...'), norepo=True)
1346 1346 def debugknown(ui, repopath, *ids, **opts):
1347 1347 """test whether node ids are known to a repo
1348 1348
1349 1349 Every ID must be a full-length hex node id string. Returns a list of 0s
1350 1350 and 1s indicating unknown/known.
1351 1351 """
1352 1352 opts = pycompat.byteskwargs(opts)
1353 1353 repo = hg.peer(ui, opts, repopath)
1354 1354 if not repo.capable('known'):
1355 1355 raise error.Abort("known() not supported by target repository")
1356 1356 flags = repo.known([bin(s) for s in ids])
1357 1357 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1358 1358
1359 1359 @command('debuglabelcomplete', [], _('LABEL...'))
1360 1360 def debuglabelcomplete(ui, repo, *args):
1361 1361 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1362 1362 debugnamecomplete(ui, repo, *args)
1363 1363
1364 1364 @command('debuglocks',
1365 1365 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1366 1366 ('W', 'force-wlock', None,
1367 1367 _('free the working state lock (DANGEROUS)')),
1368 1368 ('s', 'set-lock', None, _('set the store lock until stopped')),
1369 1369 ('S', 'set-wlock', None,
1370 1370 _('set the working state lock until stopped'))],
1371 1371 _('[OPTION]...'))
1372 1372 def debuglocks(ui, repo, **opts):
1373 1373 """show or modify state of locks
1374 1374
1375 1375 By default, this command will show which locks are held. This
1376 1376 includes the user and process holding the lock, the amount of time
1377 1377 the lock has been held, and the machine name where the process is
1378 1378 running if it's not local.
1379 1379
1380 1380 Locks protect the integrity of Mercurial's data, so should be
1381 1381 treated with care. System crashes or other interruptions may cause
1382 1382 locks to not be properly released, though Mercurial will usually
1383 1383 detect and remove such stale locks automatically.
1384 1384
1385 1385 However, detecting stale locks may not always be possible (for
1386 1386 instance, on a shared filesystem). Removing locks may also be
1387 1387 blocked by filesystem permissions.
1388 1388
1389 1389 Setting a lock will prevent other commands from changing the data.
1390 1390 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1391 1391 The set locks are removed when the command exits.
1392 1392
1393 1393 Returns 0 if no locks are held.
1394 1394
1395 1395 """
1396 1396
1397 1397 if opts.get(r'force_lock'):
1398 1398 repo.svfs.unlink('lock')
1399 1399 if opts.get(r'force_wlock'):
1400 1400 repo.vfs.unlink('wlock')
1401 1401 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1402 1402 return 0
1403 1403
1404 1404 locks = []
1405 1405 try:
1406 1406 if opts.get(r'set_wlock'):
1407 1407 try:
1408 1408 locks.append(repo.wlock(False))
1409 1409 except error.LockHeld:
1410 1410 raise error.Abort(_('wlock is already held'))
1411 1411 if opts.get(r'set_lock'):
1412 1412 try:
1413 1413 locks.append(repo.lock(False))
1414 1414 except error.LockHeld:
1415 1415 raise error.Abort(_('lock is already held'))
1416 1416 if len(locks):
1417 1417 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1418 1418 return 0
1419 1419 finally:
1420 1420 release(*locks)
1421 1421
1422 1422 now = time.time()
1423 1423 held = 0
1424 1424
1425 1425 def report(vfs, name, method):
1426 1426 # this causes stale locks to get reaped for more accurate reporting
1427 1427 try:
1428 1428 l = method(False)
1429 1429 except error.LockHeld:
1430 1430 l = None
1431 1431
1432 1432 if l:
1433 1433 l.release()
1434 1434 else:
1435 1435 try:
1436 1436 st = vfs.lstat(name)
1437 1437 age = now - st[stat.ST_MTIME]
1438 1438 user = util.username(st.st_uid)
1439 1439 locker = vfs.readlock(name)
1440 1440 if ":" in locker:
1441 1441 host, pid = locker.split(':')
1442 1442 if host == socket.gethostname():
1443 1443 locker = 'user %s, process %s' % (user or b'None', pid)
1444 1444 else:
1445 1445 locker = ('user %s, process %s, host %s'
1446 1446 % (user or b'None', pid, host))
1447 1447 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1448 1448 return 1
1449 1449 except OSError as e:
1450 1450 if e.errno != errno.ENOENT:
1451 1451 raise
1452 1452
1453 1453 ui.write(("%-6s free\n") % (name + ":"))
1454 1454 return 0
1455 1455
1456 1456 held += report(repo.svfs, "lock", repo.lock)
1457 1457 held += report(repo.vfs, "wlock", repo.wlock)
1458 1458
1459 1459 return held
1460 1460
1461 1461 @command('debugmanifestfulltextcache', [
1462 1462 ('', 'clear', False, _('clear the cache')),
1463 ('a', 'add', '', _('add the given manifest node to the cache'),
1463 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1464 1464 _('NODE'))
1465 1465 ], '')
1466 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1466 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1467 1467 """show, clear or amend the contents of the manifest fulltext cache"""
1468 1468
1469 1469 def getcache():
1470 1470 r = repo.manifestlog.getstorage(b'')
1471 1471 try:
1472 1472 return r._fulltextcache
1473 1473 except AttributeError:
1474 1474 msg = _("Current revlog implementation doesn't appear to have a "
1475 1475 "manifest fulltext cache\n")
1476 1476 raise error.Abort(msg)
1477 1477
1478 1478 if opts.get(r'clear'):
1479 1479 with repo.lock():
1480 1480 cache = getcache()
1481 1481 cache.clear(clear_persisted_data=True)
1482 1482 return
1483 1483
1484 1484 if add:
1485 1485 with repo.lock():
1486 try:
1487 m = repo.manifestlog
1488 manifest = m[m.getstorage(b'').lookup(add)]
1489 except error.LookupError as e:
1490 raise error.Abort(e, hint="Check your manifest node id")
1491 manifest.read() # stores revisision in cache too
1486 m = repo.manifestlog
1487 store = m.getstorage(b'')
1488 for n in add:
1489 try:
1490 manifest = m[store.lookup(n)]
1491 except error.LookupError as e:
1492 raise error.Abort(e, hint="Check your manifest node id")
1493 manifest.read() # stores revisision in cache too
1492 1494 return
1493 1495
1494 1496 cache = getcache()
1495 1497 if not len(cache):
1496 1498 ui.write(_('cache empty\n'))
1497 1499 else:
1498 1500 ui.write(
1499 1501 _('cache contains %d manifest entries, in order of most to '
1500 1502 'least recent:\n') % (len(cache),))
1501 1503 totalsize = 0
1502 1504 for nodeid in cache:
1503 1505 # Use cache.get to not update the LRU order
1504 1506 data = cache.get(nodeid)
1505 1507 size = len(data)
1506 1508 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1507 1509 ui.write(_('id: %s, size %s\n') % (
1508 1510 hex(nodeid), util.bytecount(size)))
1509 1511 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1510 1512 ui.write(
1511 1513 _('total cache data size %s, on-disk %s\n') % (
1512 1514 util.bytecount(totalsize), util.bytecount(ondisk))
1513 1515 )
1514 1516
1515 1517 @command('debugmergestate', [], '')
1516 1518 def debugmergestate(ui, repo, *args):
1517 1519 """print merge state
1518 1520
1519 1521 Use --verbose to print out information about whether v1 or v2 merge state
1520 1522 was chosen."""
1521 1523 def _hashornull(h):
1522 1524 if h == nullhex:
1523 1525 return 'null'
1524 1526 else:
1525 1527 return h
1526 1528
1527 1529 def printrecords(version):
1528 1530 ui.write(('* version %d records\n') % version)
1529 1531 if version == 1:
1530 1532 records = v1records
1531 1533 else:
1532 1534 records = v2records
1533 1535
1534 1536 for rtype, record in records:
1535 1537 # pretty print some record types
1536 1538 if rtype == 'L':
1537 1539 ui.write(('local: %s\n') % record)
1538 1540 elif rtype == 'O':
1539 1541 ui.write(('other: %s\n') % record)
1540 1542 elif rtype == 'm':
1541 1543 driver, mdstate = record.split('\0', 1)
1542 1544 ui.write(('merge driver: %s (state "%s")\n')
1543 1545 % (driver, mdstate))
1544 1546 elif rtype in 'FDC':
1545 1547 r = record.split('\0')
1546 1548 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1547 1549 if version == 1:
1548 1550 onode = 'not stored in v1 format'
1549 1551 flags = r[7]
1550 1552 else:
1551 1553 onode, flags = r[7:9]
1552 1554 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1553 1555 % (f, rtype, state, _hashornull(hash)))
1554 1556 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1555 1557 ui.write((' ancestor path: %s (node %s)\n')
1556 1558 % (afile, _hashornull(anode)))
1557 1559 ui.write((' other path: %s (node %s)\n')
1558 1560 % (ofile, _hashornull(onode)))
1559 1561 elif rtype == 'f':
1560 1562 filename, rawextras = record.split('\0', 1)
1561 1563 extras = rawextras.split('\0')
1562 1564 i = 0
1563 1565 extrastrings = []
1564 1566 while i < len(extras):
1565 1567 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1566 1568 i += 2
1567 1569
1568 1570 ui.write(('file extras: %s (%s)\n')
1569 1571 % (filename, ', '.join(extrastrings)))
1570 1572 elif rtype == 'l':
1571 1573 labels = record.split('\0', 2)
1572 1574 labels = [l for l in labels if len(l) > 0]
1573 1575 ui.write(('labels:\n'))
1574 1576 ui.write((' local: %s\n' % labels[0]))
1575 1577 ui.write((' other: %s\n' % labels[1]))
1576 1578 if len(labels) > 2:
1577 1579 ui.write((' base: %s\n' % labels[2]))
1578 1580 else:
1579 1581 ui.write(('unrecognized entry: %s\t%s\n')
1580 1582 % (rtype, record.replace('\0', '\t')))
1581 1583
1582 1584 # Avoid mergestate.read() since it may raise an exception for unsupported
1583 1585 # merge state records. We shouldn't be doing this, but this is OK since this
1584 1586 # command is pretty low-level.
1585 1587 ms = mergemod.mergestate(repo)
1586 1588
1587 1589 # sort so that reasonable information is on top
1588 1590 v1records = ms._readrecordsv1()
1589 1591 v2records = ms._readrecordsv2()
1590 1592 order = 'LOml'
1591 1593 def key(r):
1592 1594 idx = order.find(r[0])
1593 1595 if idx == -1:
1594 1596 return (1, r[1])
1595 1597 else:
1596 1598 return (0, idx)
1597 1599 v1records.sort(key=key)
1598 1600 v2records.sort(key=key)
1599 1601
1600 1602 if not v1records and not v2records:
1601 1603 ui.write(('no merge state found\n'))
1602 1604 elif not v2records:
1603 1605 ui.note(('no version 2 merge state\n'))
1604 1606 printrecords(1)
1605 1607 elif ms._v1v2match(v1records, v2records):
1606 1608 ui.note(('v1 and v2 states match: using v2\n'))
1607 1609 printrecords(2)
1608 1610 else:
1609 1611 ui.note(('v1 and v2 states mismatch: using v1\n'))
1610 1612 printrecords(1)
1611 1613 if ui.verbose:
1612 1614 printrecords(2)
1613 1615
1614 1616 @command('debugnamecomplete', [], _('NAME...'))
1615 1617 def debugnamecomplete(ui, repo, *args):
1616 1618 '''complete "names" - tags, open branch names, bookmark names'''
1617 1619
1618 1620 names = set()
1619 1621 # since we previously only listed open branches, we will handle that
1620 1622 # specially (after this for loop)
1621 1623 for name, ns in repo.names.iteritems():
1622 1624 if name != 'branches':
1623 1625 names.update(ns.listnames(repo))
1624 1626 names.update(tag for (tag, heads, tip, closed)
1625 1627 in repo.branchmap().iterbranches() if not closed)
1626 1628 completions = set()
1627 1629 if not args:
1628 1630 args = ['']
1629 1631 for a in args:
1630 1632 completions.update(n for n in names if n.startswith(a))
1631 1633 ui.write('\n'.join(sorted(completions)))
1632 1634 ui.write('\n')
1633 1635
1634 1636 @command('debugobsolete',
1635 1637 [('', 'flags', 0, _('markers flag')),
1636 1638 ('', 'record-parents', False,
1637 1639 _('record parent information for the precursor')),
1638 1640 ('r', 'rev', [], _('display markers relevant to REV')),
1639 1641 ('', 'exclusive', False, _('restrict display to markers only '
1640 1642 'relevant to REV')),
1641 1643 ('', 'index', False, _('display index of the marker')),
1642 1644 ('', 'delete', [], _('delete markers specified by indices')),
1643 1645 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1644 1646 _('[OBSOLETED [REPLACEMENT ...]]'))
1645 1647 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1646 1648 """create arbitrary obsolete marker
1647 1649
1648 1650 With no arguments, displays the list of obsolescence markers."""
1649 1651
1650 1652 opts = pycompat.byteskwargs(opts)
1651 1653
1652 1654 def parsenodeid(s):
1653 1655 try:
1654 1656 # We do not use revsingle/revrange functions here to accept
1655 1657 # arbitrary node identifiers, possibly not present in the
1656 1658 # local repository.
1657 1659 n = bin(s)
1658 1660 if len(n) != len(nullid):
1659 1661 raise TypeError()
1660 1662 return n
1661 1663 except TypeError:
1662 1664 raise error.Abort('changeset references must be full hexadecimal '
1663 1665 'node identifiers')
1664 1666
1665 1667 if opts.get('delete'):
1666 1668 indices = []
1667 1669 for v in opts.get('delete'):
1668 1670 try:
1669 1671 indices.append(int(v))
1670 1672 except ValueError:
1671 1673 raise error.Abort(_('invalid index value: %r') % v,
1672 1674 hint=_('use integers for indices'))
1673 1675
1674 1676 if repo.currenttransaction():
1675 1677 raise error.Abort(_('cannot delete obsmarkers in the middle '
1676 1678 'of transaction.'))
1677 1679
1678 1680 with repo.lock():
1679 1681 n = repair.deleteobsmarkers(repo.obsstore, indices)
1680 1682 ui.write(_('deleted %i obsolescence markers\n') % n)
1681 1683
1682 1684 return
1683 1685
1684 1686 if precursor is not None:
1685 1687 if opts['rev']:
1686 1688 raise error.Abort('cannot select revision when creating marker')
1687 1689 metadata = {}
1688 1690 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1689 1691 succs = tuple(parsenodeid(succ) for succ in successors)
1690 1692 l = repo.lock()
1691 1693 try:
1692 1694 tr = repo.transaction('debugobsolete')
1693 1695 try:
1694 1696 date = opts.get('date')
1695 1697 if date:
1696 1698 date = dateutil.parsedate(date)
1697 1699 else:
1698 1700 date = None
1699 1701 prec = parsenodeid(precursor)
1700 1702 parents = None
1701 1703 if opts['record_parents']:
1702 1704 if prec not in repo.unfiltered():
1703 1705 raise error.Abort('cannot used --record-parents on '
1704 1706 'unknown changesets')
1705 1707 parents = repo.unfiltered()[prec].parents()
1706 1708 parents = tuple(p.node() for p in parents)
1707 1709 repo.obsstore.create(tr, prec, succs, opts['flags'],
1708 1710 parents=parents, date=date,
1709 1711 metadata=metadata, ui=ui)
1710 1712 tr.close()
1711 1713 except ValueError as exc:
1712 1714 raise error.Abort(_('bad obsmarker input: %s') %
1713 1715 pycompat.bytestr(exc))
1714 1716 finally:
1715 1717 tr.release()
1716 1718 finally:
1717 1719 l.release()
1718 1720 else:
1719 1721 if opts['rev']:
1720 1722 revs = scmutil.revrange(repo, opts['rev'])
1721 1723 nodes = [repo[r].node() for r in revs]
1722 1724 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1723 1725 exclusive=opts['exclusive']))
1724 1726 markers.sort(key=lambda x: x._data)
1725 1727 else:
1726 1728 markers = obsutil.getmarkers(repo)
1727 1729
1728 1730 markerstoiter = markers
1729 1731 isrelevant = lambda m: True
1730 1732 if opts.get('rev') and opts.get('index'):
1731 1733 markerstoiter = obsutil.getmarkers(repo)
1732 1734 markerset = set(markers)
1733 1735 isrelevant = lambda m: m in markerset
1734 1736
1735 1737 fm = ui.formatter('debugobsolete', opts)
1736 1738 for i, m in enumerate(markerstoiter):
1737 1739 if not isrelevant(m):
1738 1740 # marker can be irrelevant when we're iterating over a set
1739 1741 # of markers (markerstoiter) which is bigger than the set
1740 1742 # of markers we want to display (markers)
1741 1743 # this can happen if both --index and --rev options are
1742 1744 # provided and thus we need to iterate over all of the markers
1743 1745 # to get the correct indices, but only display the ones that
1744 1746 # are relevant to --rev value
1745 1747 continue
1746 1748 fm.startitem()
1747 1749 ind = i if opts.get('index') else None
1748 1750 cmdutil.showmarker(fm, m, index=ind)
1749 1751 fm.end()
1750 1752
1751 1753 @command('debugp1copies',
1752 1754 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1753 1755 _('[-r REV]'))
1754 1756 def debugp1copies(ui, repo, **opts):
1755 1757 """dump copy information compared to p1"""
1756 1758
1757 1759 opts = pycompat.byteskwargs(opts)
1758 1760 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1759 1761 for dst, src in ctx.p1copies().items():
1760 1762 ui.write('%s -> %s\n' % (src, dst))
1761 1763
1762 1764 @command('debugp2copies',
1763 1765 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1764 1766 _('[-r REV]'))
1765 1767 def debugp1copies(ui, repo, **opts):
1766 1768 """dump copy information compared to p2"""
1767 1769
1768 1770 opts = pycompat.byteskwargs(opts)
1769 1771 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1770 1772 for dst, src in ctx.p2copies().items():
1771 1773 ui.write('%s -> %s\n' % (src, dst))
1772 1774
1773 1775 @command('debugpathcomplete',
1774 1776 [('f', 'full', None, _('complete an entire path')),
1775 1777 ('n', 'normal', None, _('show only normal files')),
1776 1778 ('a', 'added', None, _('show only added files')),
1777 1779 ('r', 'removed', None, _('show only removed files'))],
1778 1780 _('FILESPEC...'))
1779 1781 def debugpathcomplete(ui, repo, *specs, **opts):
1780 1782 '''complete part or all of a tracked path
1781 1783
1782 1784 This command supports shells that offer path name completion. It
1783 1785 currently completes only files already known to the dirstate.
1784 1786
1785 1787 Completion extends only to the next path segment unless
1786 1788 --full is specified, in which case entire paths are used.'''
1787 1789
1788 1790 def complete(path, acceptable):
1789 1791 dirstate = repo.dirstate
1790 1792 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1791 1793 rootdir = repo.root + pycompat.ossep
1792 1794 if spec != repo.root and not spec.startswith(rootdir):
1793 1795 return [], []
1794 1796 if os.path.isdir(spec):
1795 1797 spec += '/'
1796 1798 spec = spec[len(rootdir):]
1797 1799 fixpaths = pycompat.ossep != '/'
1798 1800 if fixpaths:
1799 1801 spec = spec.replace(pycompat.ossep, '/')
1800 1802 speclen = len(spec)
1801 1803 fullpaths = opts[r'full']
1802 1804 files, dirs = set(), set()
1803 1805 adddir, addfile = dirs.add, files.add
1804 1806 for f, st in dirstate.iteritems():
1805 1807 if f.startswith(spec) and st[0] in acceptable:
1806 1808 if fixpaths:
1807 1809 f = f.replace('/', pycompat.ossep)
1808 1810 if fullpaths:
1809 1811 addfile(f)
1810 1812 continue
1811 1813 s = f.find(pycompat.ossep, speclen)
1812 1814 if s >= 0:
1813 1815 adddir(f[:s])
1814 1816 else:
1815 1817 addfile(f)
1816 1818 return files, dirs
1817 1819
1818 1820 acceptable = ''
1819 1821 if opts[r'normal']:
1820 1822 acceptable += 'nm'
1821 1823 if opts[r'added']:
1822 1824 acceptable += 'a'
1823 1825 if opts[r'removed']:
1824 1826 acceptable += 'r'
1825 1827 cwd = repo.getcwd()
1826 1828 if not specs:
1827 1829 specs = ['.']
1828 1830
1829 1831 files, dirs = set(), set()
1830 1832 for spec in specs:
1831 1833 f, d = complete(spec, acceptable or 'nmar')
1832 1834 files.update(f)
1833 1835 dirs.update(d)
1834 1836 files.update(dirs)
1835 1837 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1836 1838 ui.write('\n')
1837 1839
1838 1840 @command('debugpathcopies',
1839 1841 cmdutil.walkopts,
1840 1842 'hg debugpathcopies REV1 REV2 [FILE]',
1841 1843 inferrepo=True)
1842 1844 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1843 1845 """show copies between two revisions"""
1844 1846 ctx1 = scmutil.revsingle(repo, rev1)
1845 1847 ctx2 = scmutil.revsingle(repo, rev2)
1846 1848 m = scmutil.match(ctx1, pats, opts)
1847 1849 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1848 1850 ui.write('%s -> %s\n' % (src, dst))
1849 1851
1850 1852 @command('debugpeer', [], _('PATH'), norepo=True)
1851 1853 def debugpeer(ui, path):
1852 1854 """establish a connection to a peer repository"""
1853 1855 # Always enable peer request logging. Requires --debug to display
1854 1856 # though.
1855 1857 overrides = {
1856 1858 ('devel', 'debug.peer-request'): True,
1857 1859 }
1858 1860
1859 1861 with ui.configoverride(overrides):
1860 1862 peer = hg.peer(ui, {}, path)
1861 1863
1862 1864 local = peer.local() is not None
1863 1865 canpush = peer.canpush()
1864 1866
1865 1867 ui.write(_('url: %s\n') % peer.url())
1866 1868 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1867 1869 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1868 1870
1869 1871 @command('debugpickmergetool',
1870 1872 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1871 1873 ('', 'changedelete', None, _('emulate merging change and delete')),
1872 1874 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1873 1875 _('[PATTERN]...'),
1874 1876 inferrepo=True)
1875 1877 def debugpickmergetool(ui, repo, *pats, **opts):
1876 1878 """examine which merge tool is chosen for specified file
1877 1879
1878 1880 As described in :hg:`help merge-tools`, Mercurial examines
1879 1881 configurations below in this order to decide which merge tool is
1880 1882 chosen for specified file.
1881 1883
1882 1884 1. ``--tool`` option
1883 1885 2. ``HGMERGE`` environment variable
1884 1886 3. configurations in ``merge-patterns`` section
1885 1887 4. configuration of ``ui.merge``
1886 1888 5. configurations in ``merge-tools`` section
1887 1889 6. ``hgmerge`` tool (for historical reason only)
1888 1890 7. default tool for fallback (``:merge`` or ``:prompt``)
1889 1891
1890 1892 This command writes out examination result in the style below::
1891 1893
1892 1894 FILE = MERGETOOL
1893 1895
1894 1896 By default, all files known in the first parent context of the
1895 1897 working directory are examined. Use file patterns and/or -I/-X
1896 1898 options to limit target files. -r/--rev is also useful to examine
1897 1899 files in another context without actual updating to it.
1898 1900
1899 1901 With --debug, this command shows warning messages while matching
1900 1902 against ``merge-patterns`` and so on, too. It is recommended to
1901 1903 use this option with explicit file patterns and/or -I/-X options,
1902 1904 because this option increases amount of output per file according
1903 1905 to configurations in hgrc.
1904 1906
1905 1907 With -v/--verbose, this command shows configurations below at
1906 1908 first (only if specified).
1907 1909
1908 1910 - ``--tool`` option
1909 1911 - ``HGMERGE`` environment variable
1910 1912 - configuration of ``ui.merge``
1911 1913
1912 1914 If merge tool is chosen before matching against
1913 1915 ``merge-patterns``, this command can't show any helpful
1914 1916 information, even with --debug. In such case, information above is
1915 1917 useful to know why a merge tool is chosen.
1916 1918 """
1917 1919 opts = pycompat.byteskwargs(opts)
1918 1920 overrides = {}
1919 1921 if opts['tool']:
1920 1922 overrides[('ui', 'forcemerge')] = opts['tool']
1921 1923 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1922 1924
1923 1925 with ui.configoverride(overrides, 'debugmergepatterns'):
1924 1926 hgmerge = encoding.environ.get("HGMERGE")
1925 1927 if hgmerge is not None:
1926 1928 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1927 1929 uimerge = ui.config("ui", "merge")
1928 1930 if uimerge:
1929 1931 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1930 1932
1931 1933 ctx = scmutil.revsingle(repo, opts.get('rev'))
1932 1934 m = scmutil.match(ctx, pats, opts)
1933 1935 changedelete = opts['changedelete']
1934 1936 for path in ctx.walk(m):
1935 1937 fctx = ctx[path]
1936 1938 try:
1937 1939 if not ui.debugflag:
1938 1940 ui.pushbuffer(error=True)
1939 1941 tool, toolpath = filemerge._picktool(repo, ui, path,
1940 1942 fctx.isbinary(),
1941 1943 'l' in fctx.flags(),
1942 1944 changedelete)
1943 1945 finally:
1944 1946 if not ui.debugflag:
1945 1947 ui.popbuffer()
1946 1948 ui.write(('%s = %s\n') % (path, tool))
1947 1949
1948 1950 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1949 1951 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1950 1952 '''access the pushkey key/value protocol
1951 1953
1952 1954 With two args, list the keys in the given namespace.
1953 1955
1954 1956 With five args, set a key to new if it currently is set to old.
1955 1957 Reports success or failure.
1956 1958 '''
1957 1959
1958 1960 target = hg.peer(ui, {}, repopath)
1959 1961 if keyinfo:
1960 1962 key, old, new = keyinfo
1961 1963 with target.commandexecutor() as e:
1962 1964 r = e.callcommand('pushkey', {
1963 1965 'namespace': namespace,
1964 1966 'key': key,
1965 1967 'old': old,
1966 1968 'new': new,
1967 1969 }).result()
1968 1970
1969 1971 ui.status(pycompat.bytestr(r) + '\n')
1970 1972 return not r
1971 1973 else:
1972 1974 for k, v in sorted(target.listkeys(namespace).iteritems()):
1973 1975 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1974 1976 stringutil.escapestr(v)))
1975 1977
1976 1978 @command('debugpvec', [], _('A B'))
1977 1979 def debugpvec(ui, repo, a, b=None):
1978 1980 ca = scmutil.revsingle(repo, a)
1979 1981 cb = scmutil.revsingle(repo, b)
1980 1982 pa = pvec.ctxpvec(ca)
1981 1983 pb = pvec.ctxpvec(cb)
1982 1984 if pa == pb:
1983 1985 rel = "="
1984 1986 elif pa > pb:
1985 1987 rel = ">"
1986 1988 elif pa < pb:
1987 1989 rel = "<"
1988 1990 elif pa | pb:
1989 1991 rel = "|"
1990 1992 ui.write(_("a: %s\n") % pa)
1991 1993 ui.write(_("b: %s\n") % pb)
1992 1994 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1993 1995 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1994 1996 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1995 1997 pa.distance(pb), rel))
1996 1998
1997 1999 @command('debugrebuilddirstate|debugrebuildstate',
1998 2000 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1999 2001 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2000 2002 'the working copy parent')),
2001 2003 ],
2002 2004 _('[-r REV]'))
2003 2005 def debugrebuilddirstate(ui, repo, rev, **opts):
2004 2006 """rebuild the dirstate as it would look like for the given revision
2005 2007
2006 2008 If no revision is specified the first current parent will be used.
2007 2009
2008 2010 The dirstate will be set to the files of the given revision.
2009 2011 The actual working directory content or existing dirstate
2010 2012 information such as adds or removes is not considered.
2011 2013
2012 2014 ``minimal`` will only rebuild the dirstate status for files that claim to be
2013 2015 tracked but are not in the parent manifest, or that exist in the parent
2014 2016 manifest but are not in the dirstate. It will not change adds, removes, or
2015 2017 modified files that are in the working copy parent.
2016 2018
2017 2019 One use of this command is to make the next :hg:`status` invocation
2018 2020 check the actual file content.
2019 2021 """
2020 2022 ctx = scmutil.revsingle(repo, rev)
2021 2023 with repo.wlock():
2022 2024 dirstate = repo.dirstate
2023 2025 changedfiles = None
2024 2026 # See command doc for what minimal does.
2025 2027 if opts.get(r'minimal'):
2026 2028 manifestfiles = set(ctx.manifest().keys())
2027 2029 dirstatefiles = set(dirstate)
2028 2030 manifestonly = manifestfiles - dirstatefiles
2029 2031 dsonly = dirstatefiles - manifestfiles
2030 2032 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2031 2033 changedfiles = manifestonly | dsnotadded
2032 2034
2033 2035 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2034 2036
2035 2037 @command('debugrebuildfncache', [], '')
2036 2038 def debugrebuildfncache(ui, repo):
2037 2039 """rebuild the fncache file"""
2038 2040 repair.rebuildfncache(ui, repo)
2039 2041
2040 2042 @command('debugrename',
2041 2043 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2042 2044 _('[-r REV] [FILE]...'))
2043 2045 def debugrename(ui, repo, *pats, **opts):
2044 2046 """dump rename information"""
2045 2047
2046 2048 opts = pycompat.byteskwargs(opts)
2047 2049 ctx = scmutil.revsingle(repo, opts.get('rev'))
2048 2050 m = scmutil.match(ctx, pats, opts)
2049 2051 for abs in ctx.walk(m):
2050 2052 fctx = ctx[abs]
2051 2053 o = fctx.filelog().renamed(fctx.filenode())
2052 2054 rel = repo.pathto(abs)
2053 2055 if o:
2054 2056 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2055 2057 else:
2056 2058 ui.write(_("%s not renamed\n") % rel)
2057 2059
2058 2060 @command('debugrevlog', cmdutil.debugrevlogopts +
2059 2061 [('d', 'dump', False, _('dump index data'))],
2060 2062 _('-c|-m|FILE'),
2061 2063 optionalrepo=True)
2062 2064 def debugrevlog(ui, repo, file_=None, **opts):
2063 2065 """show data and statistics about a revlog"""
2064 2066 opts = pycompat.byteskwargs(opts)
2065 2067 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2066 2068
2067 2069 if opts.get("dump"):
2068 2070 numrevs = len(r)
2069 2071 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2070 2072 " rawsize totalsize compression heads chainlen\n"))
2071 2073 ts = 0
2072 2074 heads = set()
2073 2075
2074 2076 for rev in pycompat.xrange(numrevs):
2075 2077 dbase = r.deltaparent(rev)
2076 2078 if dbase == -1:
2077 2079 dbase = rev
2078 2080 cbase = r.chainbase(rev)
2079 2081 clen = r.chainlen(rev)
2080 2082 p1, p2 = r.parentrevs(rev)
2081 2083 rs = r.rawsize(rev)
2082 2084 ts = ts + rs
2083 2085 heads -= set(r.parentrevs(rev))
2084 2086 heads.add(rev)
2085 2087 try:
2086 2088 compression = ts / r.end(rev)
2087 2089 except ZeroDivisionError:
2088 2090 compression = 0
2089 2091 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2090 2092 "%11d %5d %8d\n" %
2091 2093 (rev, p1, p2, r.start(rev), r.end(rev),
2092 2094 r.start(dbase), r.start(cbase),
2093 2095 r.start(p1), r.start(p2),
2094 2096 rs, ts, compression, len(heads), clen))
2095 2097 return 0
2096 2098
2097 2099 v = r.version
2098 2100 format = v & 0xFFFF
2099 2101 flags = []
2100 2102 gdelta = False
2101 2103 if v & revlog.FLAG_INLINE_DATA:
2102 2104 flags.append('inline')
2103 2105 if v & revlog.FLAG_GENERALDELTA:
2104 2106 gdelta = True
2105 2107 flags.append('generaldelta')
2106 2108 if not flags:
2107 2109 flags = ['(none)']
2108 2110
2109 2111 ### tracks merge vs single parent
2110 2112 nummerges = 0
2111 2113
2112 2114 ### tracks ways the "delta" are build
2113 2115 # nodelta
2114 2116 numempty = 0
2115 2117 numemptytext = 0
2116 2118 numemptydelta = 0
2117 2119 # full file content
2118 2120 numfull = 0
2119 2121 # intermediate snapshot against a prior snapshot
2120 2122 numsemi = 0
2121 2123 # snapshot count per depth
2122 2124 numsnapdepth = collections.defaultdict(lambda: 0)
2123 2125 # delta against previous revision
2124 2126 numprev = 0
2125 2127 # delta against first or second parent (not prev)
2126 2128 nump1 = 0
2127 2129 nump2 = 0
2128 2130 # delta against neither prev nor parents
2129 2131 numother = 0
2130 2132 # delta against prev that are also first or second parent
2131 2133 # (details of `numprev`)
2132 2134 nump1prev = 0
2133 2135 nump2prev = 0
2134 2136
2135 2137 # data about delta chain of each revs
2136 2138 chainlengths = []
2137 2139 chainbases = []
2138 2140 chainspans = []
2139 2141
2140 2142 # data about each revision
2141 2143 datasize = [None, 0, 0]
2142 2144 fullsize = [None, 0, 0]
2143 2145 semisize = [None, 0, 0]
2144 2146 # snapshot count per depth
2145 2147 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2146 2148 deltasize = [None, 0, 0]
2147 2149 chunktypecounts = {}
2148 2150 chunktypesizes = {}
2149 2151
2150 2152 def addsize(size, l):
2151 2153 if l[0] is None or size < l[0]:
2152 2154 l[0] = size
2153 2155 if size > l[1]:
2154 2156 l[1] = size
2155 2157 l[2] += size
2156 2158
2157 2159 numrevs = len(r)
2158 2160 for rev in pycompat.xrange(numrevs):
2159 2161 p1, p2 = r.parentrevs(rev)
2160 2162 delta = r.deltaparent(rev)
2161 2163 if format > 0:
2162 2164 addsize(r.rawsize(rev), datasize)
2163 2165 if p2 != nullrev:
2164 2166 nummerges += 1
2165 2167 size = r.length(rev)
2166 2168 if delta == nullrev:
2167 2169 chainlengths.append(0)
2168 2170 chainbases.append(r.start(rev))
2169 2171 chainspans.append(size)
2170 2172 if size == 0:
2171 2173 numempty += 1
2172 2174 numemptytext += 1
2173 2175 else:
2174 2176 numfull += 1
2175 2177 numsnapdepth[0] += 1
2176 2178 addsize(size, fullsize)
2177 2179 addsize(size, snapsizedepth[0])
2178 2180 else:
2179 2181 chainlengths.append(chainlengths[delta] + 1)
2180 2182 baseaddr = chainbases[delta]
2181 2183 revaddr = r.start(rev)
2182 2184 chainbases.append(baseaddr)
2183 2185 chainspans.append((revaddr - baseaddr) + size)
2184 2186 if size == 0:
2185 2187 numempty += 1
2186 2188 numemptydelta += 1
2187 2189 elif r.issnapshot(rev):
2188 2190 addsize(size, semisize)
2189 2191 numsemi += 1
2190 2192 depth = r.snapshotdepth(rev)
2191 2193 numsnapdepth[depth] += 1
2192 2194 addsize(size, snapsizedepth[depth])
2193 2195 else:
2194 2196 addsize(size, deltasize)
2195 2197 if delta == rev - 1:
2196 2198 numprev += 1
2197 2199 if delta == p1:
2198 2200 nump1prev += 1
2199 2201 elif delta == p2:
2200 2202 nump2prev += 1
2201 2203 elif delta == p1:
2202 2204 nump1 += 1
2203 2205 elif delta == p2:
2204 2206 nump2 += 1
2205 2207 elif delta != nullrev:
2206 2208 numother += 1
2207 2209
2208 2210 # Obtain data on the raw chunks in the revlog.
2209 2211 if util.safehasattr(r, '_getsegmentforrevs'):
2210 2212 segment = r._getsegmentforrevs(rev, rev)[1]
2211 2213 else:
2212 2214 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2213 2215 if segment:
2214 2216 chunktype = bytes(segment[0:1])
2215 2217 else:
2216 2218 chunktype = 'empty'
2217 2219
2218 2220 if chunktype not in chunktypecounts:
2219 2221 chunktypecounts[chunktype] = 0
2220 2222 chunktypesizes[chunktype] = 0
2221 2223
2222 2224 chunktypecounts[chunktype] += 1
2223 2225 chunktypesizes[chunktype] += size
2224 2226
2225 2227 # Adjust size min value for empty cases
2226 2228 for size in (datasize, fullsize, semisize, deltasize):
2227 2229 if size[0] is None:
2228 2230 size[0] = 0
2229 2231
2230 2232 numdeltas = numrevs - numfull - numempty - numsemi
2231 2233 numoprev = numprev - nump1prev - nump2prev
2232 2234 totalrawsize = datasize[2]
2233 2235 datasize[2] /= numrevs
2234 2236 fulltotal = fullsize[2]
2235 2237 fullsize[2] /= numfull
2236 2238 semitotal = semisize[2]
2237 2239 snaptotal = {}
2238 2240 if numsemi > 0:
2239 2241 semisize[2] /= numsemi
2240 2242 for depth in snapsizedepth:
2241 2243 snaptotal[depth] = snapsizedepth[depth][2]
2242 2244 snapsizedepth[depth][2] /= numsnapdepth[depth]
2243 2245
2244 2246 deltatotal = deltasize[2]
2245 2247 if numdeltas > 0:
2246 2248 deltasize[2] /= numdeltas
2247 2249 totalsize = fulltotal + semitotal + deltatotal
2248 2250 avgchainlen = sum(chainlengths) / numrevs
2249 2251 maxchainlen = max(chainlengths)
2250 2252 maxchainspan = max(chainspans)
2251 2253 compratio = 1
2252 2254 if totalsize:
2253 2255 compratio = totalrawsize / totalsize
2254 2256
2255 2257 basedfmtstr = '%%%dd\n'
2256 2258 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2257 2259
2258 2260 def dfmtstr(max):
2259 2261 return basedfmtstr % len(str(max))
2260 2262 def pcfmtstr(max, padding=0):
2261 2263 return basepcfmtstr % (len(str(max)), ' ' * padding)
2262 2264
2263 2265 def pcfmt(value, total):
2264 2266 if total:
2265 2267 return (value, 100 * float(value) / total)
2266 2268 else:
2267 2269 return value, 100.0
2268 2270
2269 2271 ui.write(('format : %d\n') % format)
2270 2272 ui.write(('flags : %s\n') % ', '.join(flags))
2271 2273
2272 2274 ui.write('\n')
2273 2275 fmt = pcfmtstr(totalsize)
2274 2276 fmt2 = dfmtstr(totalsize)
2275 2277 ui.write(('revisions : ') + fmt2 % numrevs)
2276 2278 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2277 2279 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2278 2280 ui.write(('revisions : ') + fmt2 % numrevs)
2279 2281 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2280 2282 ui.write((' text : ')
2281 2283 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2282 2284 ui.write((' delta : ')
2283 2285 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2284 2286 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2285 2287 for depth in sorted(numsnapdepth):
2286 2288 ui.write((' lvl-%-3d : ' % depth)
2287 2289 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2288 2290 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2289 2291 ui.write(('revision size : ') + fmt2 % totalsize)
2290 2292 ui.write((' snapshot : ')
2291 2293 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2292 2294 for depth in sorted(numsnapdepth):
2293 2295 ui.write((' lvl-%-3d : ' % depth)
2294 2296 + fmt % pcfmt(snaptotal[depth], totalsize))
2295 2297 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2296 2298
2297 2299 def fmtchunktype(chunktype):
2298 2300 if chunktype == 'empty':
2299 2301 return ' %s : ' % chunktype
2300 2302 elif chunktype in pycompat.bytestr(string.ascii_letters):
2301 2303 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2302 2304 else:
2303 2305 return ' 0x%s : ' % hex(chunktype)
2304 2306
2305 2307 ui.write('\n')
2306 2308 ui.write(('chunks : ') + fmt2 % numrevs)
2307 2309 for chunktype in sorted(chunktypecounts):
2308 2310 ui.write(fmtchunktype(chunktype))
2309 2311 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2310 2312 ui.write(('chunks size : ') + fmt2 % totalsize)
2311 2313 for chunktype in sorted(chunktypecounts):
2312 2314 ui.write(fmtchunktype(chunktype))
2313 2315 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2314 2316
2315 2317 ui.write('\n')
2316 2318 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2317 2319 ui.write(('avg chain length : ') + fmt % avgchainlen)
2318 2320 ui.write(('max chain length : ') + fmt % maxchainlen)
2319 2321 ui.write(('max chain reach : ') + fmt % maxchainspan)
2320 2322 ui.write(('compression ratio : ') + fmt % compratio)
2321 2323
2322 2324 if format > 0:
2323 2325 ui.write('\n')
2324 2326 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2325 2327 % tuple(datasize))
2326 2328 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2327 2329 % tuple(fullsize))
2328 2330 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2329 2331 % tuple(semisize))
2330 2332 for depth in sorted(snapsizedepth):
2331 2333 if depth == 0:
2332 2334 continue
2333 2335 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2334 2336 % ((depth,) + tuple(snapsizedepth[depth])))
2335 2337 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2336 2338 % tuple(deltasize))
2337 2339
2338 2340 if numdeltas > 0:
2339 2341 ui.write('\n')
2340 2342 fmt = pcfmtstr(numdeltas)
2341 2343 fmt2 = pcfmtstr(numdeltas, 4)
2342 2344 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2343 2345 if numprev > 0:
2344 2346 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2345 2347 numprev))
2346 2348 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2347 2349 numprev))
2348 2350 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2349 2351 numprev))
2350 2352 if gdelta:
2351 2353 ui.write(('deltas against p1 : ')
2352 2354 + fmt % pcfmt(nump1, numdeltas))
2353 2355 ui.write(('deltas against p2 : ')
2354 2356 + fmt % pcfmt(nump2, numdeltas))
2355 2357 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2356 2358 numdeltas))
2357 2359
2358 2360 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2359 2361 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2360 2362 _('[-f FORMAT] -c|-m|FILE'),
2361 2363 optionalrepo=True)
2362 2364 def debugrevlogindex(ui, repo, file_=None, **opts):
2363 2365 """dump the contents of a revlog index"""
2364 2366 opts = pycompat.byteskwargs(opts)
2365 2367 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2366 2368 format = opts.get('format', 0)
2367 2369 if format not in (0, 1):
2368 2370 raise error.Abort(_("unknown format %d") % format)
2369 2371
2370 2372 if ui.debugflag:
2371 2373 shortfn = hex
2372 2374 else:
2373 2375 shortfn = short
2374 2376
2375 2377 # There might not be anything in r, so have a sane default
2376 2378 idlen = 12
2377 2379 for i in r:
2378 2380 idlen = len(shortfn(r.node(i)))
2379 2381 break
2380 2382
2381 2383 if format == 0:
2382 2384 if ui.verbose:
2383 2385 ui.write((" rev offset length linkrev"
2384 2386 " %s %s p2\n") % ("nodeid".ljust(idlen),
2385 2387 "p1".ljust(idlen)))
2386 2388 else:
2387 2389 ui.write((" rev linkrev %s %s p2\n") % (
2388 2390 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2389 2391 elif format == 1:
2390 2392 if ui.verbose:
2391 2393 ui.write((" rev flag offset length size link p1"
2392 2394 " p2 %s\n") % "nodeid".rjust(idlen))
2393 2395 else:
2394 2396 ui.write((" rev flag size link p1 p2 %s\n") %
2395 2397 "nodeid".rjust(idlen))
2396 2398
2397 2399 for i in r:
2398 2400 node = r.node(i)
2399 2401 if format == 0:
2400 2402 try:
2401 2403 pp = r.parents(node)
2402 2404 except Exception:
2403 2405 pp = [nullid, nullid]
2404 2406 if ui.verbose:
2405 2407 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2406 2408 i, r.start(i), r.length(i), r.linkrev(i),
2407 2409 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2408 2410 else:
2409 2411 ui.write("% 6d % 7d %s %s %s\n" % (
2410 2412 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2411 2413 shortfn(pp[1])))
2412 2414 elif format == 1:
2413 2415 pr = r.parentrevs(i)
2414 2416 if ui.verbose:
2415 2417 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2416 2418 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2417 2419 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2418 2420 else:
2419 2421 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2420 2422 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2421 2423 shortfn(node)))
2422 2424
2423 2425 @command('debugrevspec',
2424 2426 [('', 'optimize', None,
2425 2427 _('print parsed tree after optimizing (DEPRECATED)')),
2426 2428 ('', 'show-revs', True, _('print list of result revisions (default)')),
2427 2429 ('s', 'show-set', None, _('print internal representation of result set')),
2428 2430 ('p', 'show-stage', [],
2429 2431 _('print parsed tree at the given stage'), _('NAME')),
2430 2432 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2431 2433 ('', 'verify-optimized', False, _('verify optimized result')),
2432 2434 ],
2433 2435 ('REVSPEC'))
2434 2436 def debugrevspec(ui, repo, expr, **opts):
2435 2437 """parse and apply a revision specification
2436 2438
2437 2439 Use -p/--show-stage option to print the parsed tree at the given stages.
2438 2440 Use -p all to print tree at every stage.
2439 2441
2440 2442 Use --no-show-revs option with -s or -p to print only the set
2441 2443 representation or the parsed tree respectively.
2442 2444
2443 2445 Use --verify-optimized to compare the optimized result with the unoptimized
2444 2446 one. Returns 1 if the optimized result differs.
2445 2447 """
2446 2448 opts = pycompat.byteskwargs(opts)
2447 2449 aliases = ui.configitems('revsetalias')
2448 2450 stages = [
2449 2451 ('parsed', lambda tree: tree),
2450 2452 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2451 2453 ui.warn)),
2452 2454 ('concatenated', revsetlang.foldconcat),
2453 2455 ('analyzed', revsetlang.analyze),
2454 2456 ('optimized', revsetlang.optimize),
2455 2457 ]
2456 2458 if opts['no_optimized']:
2457 2459 stages = stages[:-1]
2458 2460 if opts['verify_optimized'] and opts['no_optimized']:
2459 2461 raise error.Abort(_('cannot use --verify-optimized with '
2460 2462 '--no-optimized'))
2461 2463 stagenames = set(n for n, f in stages)
2462 2464
2463 2465 showalways = set()
2464 2466 showchanged = set()
2465 2467 if ui.verbose and not opts['show_stage']:
2466 2468 # show parsed tree by --verbose (deprecated)
2467 2469 showalways.add('parsed')
2468 2470 showchanged.update(['expanded', 'concatenated'])
2469 2471 if opts['optimize']:
2470 2472 showalways.add('optimized')
2471 2473 if opts['show_stage'] and opts['optimize']:
2472 2474 raise error.Abort(_('cannot use --optimize with --show-stage'))
2473 2475 if opts['show_stage'] == ['all']:
2474 2476 showalways.update(stagenames)
2475 2477 else:
2476 2478 for n in opts['show_stage']:
2477 2479 if n not in stagenames:
2478 2480 raise error.Abort(_('invalid stage name: %s') % n)
2479 2481 showalways.update(opts['show_stage'])
2480 2482
2481 2483 treebystage = {}
2482 2484 printedtree = None
2483 2485 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2484 2486 for n, f in stages:
2485 2487 treebystage[n] = tree = f(tree)
2486 2488 if n in showalways or (n in showchanged and tree != printedtree):
2487 2489 if opts['show_stage'] or n != 'parsed':
2488 2490 ui.write(("* %s:\n") % n)
2489 2491 ui.write(revsetlang.prettyformat(tree), "\n")
2490 2492 printedtree = tree
2491 2493
2492 2494 if opts['verify_optimized']:
2493 2495 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2494 2496 brevs = revset.makematcher(treebystage['optimized'])(repo)
2495 2497 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2496 2498 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2497 2499 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2498 2500 arevs = list(arevs)
2499 2501 brevs = list(brevs)
2500 2502 if arevs == brevs:
2501 2503 return 0
2502 2504 ui.write(('--- analyzed\n'), label='diff.file_a')
2503 2505 ui.write(('+++ optimized\n'), label='diff.file_b')
2504 2506 sm = difflib.SequenceMatcher(None, arevs, brevs)
2505 2507 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2506 2508 if tag in (r'delete', r'replace'):
2507 2509 for c in arevs[alo:ahi]:
2508 2510 ui.write('-%d\n' % c, label='diff.deleted')
2509 2511 if tag in (r'insert', r'replace'):
2510 2512 for c in brevs[blo:bhi]:
2511 2513 ui.write('+%d\n' % c, label='diff.inserted')
2512 2514 if tag == r'equal':
2513 2515 for c in arevs[alo:ahi]:
2514 2516 ui.write(' %d\n' % c)
2515 2517 return 1
2516 2518
2517 2519 func = revset.makematcher(tree)
2518 2520 revs = func(repo)
2519 2521 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2520 2522 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2521 2523 if not opts['show_revs']:
2522 2524 return
2523 2525 for c in revs:
2524 2526 ui.write("%d\n" % c)
2525 2527
2526 2528 @command('debugserve', [
2527 2529 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2528 2530 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2529 2531 ('', 'logiofile', '', _('file to log server I/O to')),
2530 2532 ], '')
2531 2533 def debugserve(ui, repo, **opts):
2532 2534 """run a server with advanced settings
2533 2535
2534 2536 This command is similar to :hg:`serve`. It exists partially as a
2535 2537 workaround to the fact that ``hg serve --stdio`` must have specific
2536 2538 arguments for security reasons.
2537 2539 """
2538 2540 opts = pycompat.byteskwargs(opts)
2539 2541
2540 2542 if not opts['sshstdio']:
2541 2543 raise error.Abort(_('only --sshstdio is currently supported'))
2542 2544
2543 2545 logfh = None
2544 2546
2545 2547 if opts['logiofd'] and opts['logiofile']:
2546 2548 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2547 2549
2548 2550 if opts['logiofd']:
2549 2551 # Line buffered because output is line based.
2550 2552 try:
2551 2553 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2552 2554 except OSError as e:
2553 2555 if e.errno != errno.ESPIPE:
2554 2556 raise
2555 2557 # can't seek a pipe, so `ab` mode fails on py3
2556 2558 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2557 2559 elif opts['logiofile']:
2558 2560 logfh = open(opts['logiofile'], 'ab', 1)
2559 2561
2560 2562 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2561 2563 s.serve_forever()
2562 2564
2563 2565 @command('debugsetparents', [], _('REV1 [REV2]'))
2564 2566 def debugsetparents(ui, repo, rev1, rev2=None):
2565 2567 """manually set the parents of the current working directory
2566 2568
2567 2569 This is useful for writing repository conversion tools, but should
2568 2570 be used with care. For example, neither the working directory nor the
2569 2571 dirstate is updated, so file status may be incorrect after running this
2570 2572 command.
2571 2573
2572 2574 Returns 0 on success.
2573 2575 """
2574 2576
2575 2577 node1 = scmutil.revsingle(repo, rev1).node()
2576 2578 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2577 2579
2578 2580 with repo.wlock():
2579 2581 repo.setparents(node1, node2)
2580 2582
2581 2583 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2582 2584 def debugssl(ui, repo, source=None, **opts):
2583 2585 '''test a secure connection to a server
2584 2586
2585 2587 This builds the certificate chain for the server on Windows, installing the
2586 2588 missing intermediates and trusted root via Windows Update if necessary. It
2587 2589 does nothing on other platforms.
2588 2590
2589 2591 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2590 2592 that server is used. See :hg:`help urls` for more information.
2591 2593
2592 2594 If the update succeeds, retry the original operation. Otherwise, the cause
2593 2595 of the SSL error is likely another issue.
2594 2596 '''
2595 2597 if not pycompat.iswindows:
2596 2598 raise error.Abort(_('certificate chain building is only possible on '
2597 2599 'Windows'))
2598 2600
2599 2601 if not source:
2600 2602 if not repo:
2601 2603 raise error.Abort(_("there is no Mercurial repository here, and no "
2602 2604 "server specified"))
2603 2605 source = "default"
2604 2606
2605 2607 source, branches = hg.parseurl(ui.expandpath(source))
2606 2608 url = util.url(source)
2607 2609
2608 2610 defaultport = {'https': 443, 'ssh': 22}
2609 2611 if url.scheme in defaultport:
2610 2612 try:
2611 2613 addr = (url.host, int(url.port or defaultport[url.scheme]))
2612 2614 except ValueError:
2613 2615 raise error.Abort(_("malformed port number in URL"))
2614 2616 else:
2615 2617 raise error.Abort(_("only https and ssh connections are supported"))
2616 2618
2617 2619 from . import win32
2618 2620
2619 2621 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2620 2622 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2621 2623
2622 2624 try:
2623 2625 s.connect(addr)
2624 2626 cert = s.getpeercert(True)
2625 2627
2626 2628 ui.status(_('checking the certificate chain for %s\n') % url.host)
2627 2629
2628 2630 complete = win32.checkcertificatechain(cert, build=False)
2629 2631
2630 2632 if not complete:
2631 2633 ui.status(_('certificate chain is incomplete, updating... '))
2632 2634
2633 2635 if not win32.checkcertificatechain(cert):
2634 2636 ui.status(_('failed.\n'))
2635 2637 else:
2636 2638 ui.status(_('done.\n'))
2637 2639 else:
2638 2640 ui.status(_('full certificate chain is available\n'))
2639 2641 finally:
2640 2642 s.close()
2641 2643
2642 2644 @command('debugsub',
2643 2645 [('r', 'rev', '',
2644 2646 _('revision to check'), _('REV'))],
2645 2647 _('[-r REV] [REV]'))
2646 2648 def debugsub(ui, repo, rev=None):
2647 2649 ctx = scmutil.revsingle(repo, rev, None)
2648 2650 for k, v in sorted(ctx.substate.items()):
2649 2651 ui.write(('path %s\n') % k)
2650 2652 ui.write((' source %s\n') % v[0])
2651 2653 ui.write((' revision %s\n') % v[1])
2652 2654
2653 2655 @command('debugsuccessorssets',
2654 2656 [('', 'closest', False, _('return closest successors sets only'))],
2655 2657 _('[REV]'))
2656 2658 def debugsuccessorssets(ui, repo, *revs, **opts):
2657 2659 """show set of successors for revision
2658 2660
2659 2661 A successors set of changeset A is a consistent group of revisions that
2660 2662 succeed A. It contains non-obsolete changesets only unless closests
2661 2663 successors set is set.
2662 2664
2663 2665 In most cases a changeset A has a single successors set containing a single
2664 2666 successor (changeset A replaced by A').
2665 2667
2666 2668 A changeset that is made obsolete with no successors are called "pruned".
2667 2669 Such changesets have no successors sets at all.
2668 2670
2669 2671 A changeset that has been "split" will have a successors set containing
2670 2672 more than one successor.
2671 2673
2672 2674 A changeset that has been rewritten in multiple different ways is called
2673 2675 "divergent". Such changesets have multiple successor sets (each of which
2674 2676 may also be split, i.e. have multiple successors).
2675 2677
2676 2678 Results are displayed as follows::
2677 2679
2678 2680 <rev1>
2679 2681 <successors-1A>
2680 2682 <rev2>
2681 2683 <successors-2A>
2682 2684 <successors-2B1> <successors-2B2> <successors-2B3>
2683 2685
2684 2686 Here rev2 has two possible (i.e. divergent) successors sets. The first
2685 2687 holds one element, whereas the second holds three (i.e. the changeset has
2686 2688 been split).
2687 2689 """
2688 2690 # passed to successorssets caching computation from one call to another
2689 2691 cache = {}
2690 2692 ctx2str = bytes
2691 2693 node2str = short
2692 2694 for rev in scmutil.revrange(repo, revs):
2693 2695 ctx = repo[rev]
2694 2696 ui.write('%s\n'% ctx2str(ctx))
2695 2697 for succsset in obsutil.successorssets(repo, ctx.node(),
2696 2698 closest=opts[r'closest'],
2697 2699 cache=cache):
2698 2700 if succsset:
2699 2701 ui.write(' ')
2700 2702 ui.write(node2str(succsset[0]))
2701 2703 for node in succsset[1:]:
2702 2704 ui.write(' ')
2703 2705 ui.write(node2str(node))
2704 2706 ui.write('\n')
2705 2707
2706 2708 @command('debugtemplate',
2707 2709 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2708 2710 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2709 2711 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2710 2712 optionalrepo=True)
2711 2713 def debugtemplate(ui, repo, tmpl, **opts):
2712 2714 """parse and apply a template
2713 2715
2714 2716 If -r/--rev is given, the template is processed as a log template and
2715 2717 applied to the given changesets. Otherwise, it is processed as a generic
2716 2718 template.
2717 2719
2718 2720 Use --verbose to print the parsed tree.
2719 2721 """
2720 2722 revs = None
2721 2723 if opts[r'rev']:
2722 2724 if repo is None:
2723 2725 raise error.RepoError(_('there is no Mercurial repository here '
2724 2726 '(.hg not found)'))
2725 2727 revs = scmutil.revrange(repo, opts[r'rev'])
2726 2728
2727 2729 props = {}
2728 2730 for d in opts[r'define']:
2729 2731 try:
2730 2732 k, v = (e.strip() for e in d.split('=', 1))
2731 2733 if not k or k == 'ui':
2732 2734 raise ValueError
2733 2735 props[k] = v
2734 2736 except ValueError:
2735 2737 raise error.Abort(_('malformed keyword definition: %s') % d)
2736 2738
2737 2739 if ui.verbose:
2738 2740 aliases = ui.configitems('templatealias')
2739 2741 tree = templater.parse(tmpl)
2740 2742 ui.note(templater.prettyformat(tree), '\n')
2741 2743 newtree = templater.expandaliases(tree, aliases)
2742 2744 if newtree != tree:
2743 2745 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2744 2746
2745 2747 if revs is None:
2746 2748 tres = formatter.templateresources(ui, repo)
2747 2749 t = formatter.maketemplater(ui, tmpl, resources=tres)
2748 2750 if ui.verbose:
2749 2751 kwds, funcs = t.symbolsuseddefault()
2750 2752 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2751 2753 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2752 2754 ui.write(t.renderdefault(props))
2753 2755 else:
2754 2756 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2755 2757 if ui.verbose:
2756 2758 kwds, funcs = displayer.t.symbolsuseddefault()
2757 2759 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2758 2760 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2759 2761 for r in revs:
2760 2762 displayer.show(repo[r], **pycompat.strkwargs(props))
2761 2763 displayer.close()
2762 2764
2763 2765 @command('debuguigetpass', [
2764 2766 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2765 2767 ], _('[-p TEXT]'), norepo=True)
2766 2768 def debuguigetpass(ui, prompt=''):
2767 2769 """show prompt to type password"""
2768 2770 r = ui.getpass(prompt)
2769 2771 ui.write(('respose: %s\n') % r)
2770 2772
2771 2773 @command('debuguiprompt', [
2772 2774 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2773 2775 ], _('[-p TEXT]'), norepo=True)
2774 2776 def debuguiprompt(ui, prompt=''):
2775 2777 """show plain prompt"""
2776 2778 r = ui.prompt(prompt)
2777 2779 ui.write(('response: %s\n') % r)
2778 2780
2779 2781 @command('debugupdatecaches', [])
2780 2782 def debugupdatecaches(ui, repo, *pats, **opts):
2781 2783 """warm all known caches in the repository"""
2782 2784 with repo.wlock(), repo.lock():
2783 2785 repo.updatecaches(full=True)
2784 2786
2785 2787 @command('debugupgraderepo', [
2786 2788 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2787 2789 ('', 'run', False, _('performs an upgrade')),
2788 2790 ('', 'backup', True, _('keep the old repository content around')),
2789 2791 ])
2790 2792 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2791 2793 """upgrade a repository to use different features
2792 2794
2793 2795 If no arguments are specified, the repository is evaluated for upgrade
2794 2796 and a list of problems and potential optimizations is printed.
2795 2797
2796 2798 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2797 2799 can be influenced via additional arguments. More details will be provided
2798 2800 by the command output when run without ``--run``.
2799 2801
2800 2802 During the upgrade, the repository will be locked and no writes will be
2801 2803 allowed.
2802 2804
2803 2805 At the end of the upgrade, the repository may not be readable while new
2804 2806 repository data is swapped in. This window will be as long as it takes to
2805 2807 rename some directories inside the ``.hg`` directory. On most machines, this
2806 2808 should complete almost instantaneously and the chances of a consumer being
2807 2809 unable to access the repository should be low.
2808 2810 """
2809 2811 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2810 2812 backup=backup)
2811 2813
2812 2814 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2813 2815 inferrepo=True)
2814 2816 def debugwalk(ui, repo, *pats, **opts):
2815 2817 """show how files match on given patterns"""
2816 2818 opts = pycompat.byteskwargs(opts)
2817 2819 m = scmutil.match(repo[None], pats, opts)
2818 2820 if ui.verbose:
2819 2821 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2820 2822 items = list(repo[None].walk(m))
2821 2823 if not items:
2822 2824 return
2823 2825 f = lambda fn: fn
2824 2826 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2825 2827 f = lambda fn: util.normpath(fn)
2826 2828 fmt = 'f %%-%ds %%-%ds %%s' % (
2827 2829 max([len(abs) for abs in items]),
2828 2830 max([len(repo.pathto(abs)) for abs in items]))
2829 2831 for abs in items:
2830 2832 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2831 2833 ui.write("%s\n" % line.rstrip())
2832 2834
2833 2835 @command('debugwhyunstable', [], _('REV'))
2834 2836 def debugwhyunstable(ui, repo, rev):
2835 2837 """explain instabilities of a changeset"""
2836 2838 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2837 2839 dnodes = ''
2838 2840 if entry.get('divergentnodes'):
2839 2841 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2840 2842 for ctx in entry['divergentnodes']) + ' '
2841 2843 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2842 2844 entry['reason'], entry['node']))
2843 2845
2844 2846 @command('debugwireargs',
2845 2847 [('', 'three', '', 'three'),
2846 2848 ('', 'four', '', 'four'),
2847 2849 ('', 'five', '', 'five'),
2848 2850 ] + cmdutil.remoteopts,
2849 2851 _('REPO [OPTIONS]... [ONE [TWO]]'),
2850 2852 norepo=True)
2851 2853 def debugwireargs(ui, repopath, *vals, **opts):
2852 2854 opts = pycompat.byteskwargs(opts)
2853 2855 repo = hg.peer(ui, opts, repopath)
2854 2856 for opt in cmdutil.remoteopts:
2855 2857 del opts[opt[1]]
2856 2858 args = {}
2857 2859 for k, v in opts.iteritems():
2858 2860 if v:
2859 2861 args[k] = v
2860 2862 args = pycompat.strkwargs(args)
2861 2863 # run twice to check that we don't mess up the stream for the next command
2862 2864 res1 = repo.debugwireargs(*vals, **args)
2863 2865 res2 = repo.debugwireargs(*vals, **args)
2864 2866 ui.write("%s\n" % res1)
2865 2867 if res1 != res2:
2866 2868 ui.warn("%s\n" % res2)
2867 2869
2868 2870 def _parsewirelangblocks(fh):
2869 2871 activeaction = None
2870 2872 blocklines = []
2871 2873 lastindent = 0
2872 2874
2873 2875 for line in fh:
2874 2876 line = line.rstrip()
2875 2877 if not line:
2876 2878 continue
2877 2879
2878 2880 if line.startswith(b'#'):
2879 2881 continue
2880 2882
2881 2883 if not line.startswith(b' '):
2882 2884 # New block. Flush previous one.
2883 2885 if activeaction:
2884 2886 yield activeaction, blocklines
2885 2887
2886 2888 activeaction = line
2887 2889 blocklines = []
2888 2890 lastindent = 0
2889 2891 continue
2890 2892
2891 2893 # Else we start with an indent.
2892 2894
2893 2895 if not activeaction:
2894 2896 raise error.Abort(_('indented line outside of block'))
2895 2897
2896 2898 indent = len(line) - len(line.lstrip())
2897 2899
2898 2900 # If this line is indented more than the last line, concatenate it.
2899 2901 if indent > lastindent and blocklines:
2900 2902 blocklines[-1] += line.lstrip()
2901 2903 else:
2902 2904 blocklines.append(line)
2903 2905 lastindent = indent
2904 2906
2905 2907 # Flush last block.
2906 2908 if activeaction:
2907 2909 yield activeaction, blocklines
2908 2910
2909 2911 @command('debugwireproto',
2910 2912 [
2911 2913 ('', 'localssh', False, _('start an SSH server for this repo')),
2912 2914 ('', 'peer', '', _('construct a specific version of the peer')),
2913 2915 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2914 2916 ('', 'nologhandshake', False,
2915 2917 _('do not log I/O related to the peer handshake')),
2916 2918 ] + cmdutil.remoteopts,
2917 2919 _('[PATH]'),
2918 2920 optionalrepo=True)
2919 2921 def debugwireproto(ui, repo, path=None, **opts):
2920 2922 """send wire protocol commands to a server
2921 2923
2922 2924 This command can be used to issue wire protocol commands to remote
2923 2925 peers and to debug the raw data being exchanged.
2924 2926
2925 2927 ``--localssh`` will start an SSH server against the current repository
2926 2928 and connect to that. By default, the connection will perform a handshake
2927 2929 and establish an appropriate peer instance.
2928 2930
2929 2931 ``--peer`` can be used to bypass the handshake protocol and construct a
2930 2932 peer instance using the specified class type. Valid values are ``raw``,
2931 2933 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2932 2934 raw data payloads and don't support higher-level command actions.
2933 2935
2934 2936 ``--noreadstderr`` can be used to disable automatic reading from stderr
2935 2937 of the peer (for SSH connections only). Disabling automatic reading of
2936 2938 stderr is useful for making output more deterministic.
2937 2939
2938 2940 Commands are issued via a mini language which is specified via stdin.
2939 2941 The language consists of individual actions to perform. An action is
2940 2942 defined by a block. A block is defined as a line with no leading
2941 2943 space followed by 0 or more lines with leading space. Blocks are
2942 2944 effectively a high-level command with additional metadata.
2943 2945
2944 2946 Lines beginning with ``#`` are ignored.
2945 2947
2946 2948 The following sections denote available actions.
2947 2949
2948 2950 raw
2949 2951 ---
2950 2952
2951 2953 Send raw data to the server.
2952 2954
2953 2955 The block payload contains the raw data to send as one atomic send
2954 2956 operation. The data may not actually be delivered in a single system
2955 2957 call: it depends on the abilities of the transport being used.
2956 2958
2957 2959 Each line in the block is de-indented and concatenated. Then, that
2958 2960 value is evaluated as a Python b'' literal. This allows the use of
2959 2961 backslash escaping, etc.
2960 2962
2961 2963 raw+
2962 2964 ----
2963 2965
2964 2966 Behaves like ``raw`` except flushes output afterwards.
2965 2967
2966 2968 command <X>
2967 2969 -----------
2968 2970
2969 2971 Send a request to run a named command, whose name follows the ``command``
2970 2972 string.
2971 2973
2972 2974 Arguments to the command are defined as lines in this block. The format of
2973 2975 each line is ``<key> <value>``. e.g.::
2974 2976
2975 2977 command listkeys
2976 2978 namespace bookmarks
2977 2979
2978 2980 If the value begins with ``eval:``, it will be interpreted as a Python
2979 2981 literal expression. Otherwise values are interpreted as Python b'' literals.
2980 2982 This allows sending complex types and encoding special byte sequences via
2981 2983 backslash escaping.
2982 2984
2983 2985 The following arguments have special meaning:
2984 2986
2985 2987 ``PUSHFILE``
2986 2988 When defined, the *push* mechanism of the peer will be used instead
2987 2989 of the static request-response mechanism and the content of the
2988 2990 file specified in the value of this argument will be sent as the
2989 2991 command payload.
2990 2992
2991 2993 This can be used to submit a local bundle file to the remote.
2992 2994
2993 2995 batchbegin
2994 2996 ----------
2995 2997
2996 2998 Instruct the peer to begin a batched send.
2997 2999
2998 3000 All ``command`` blocks are queued for execution until the next
2999 3001 ``batchsubmit`` block.
3000 3002
3001 3003 batchsubmit
3002 3004 -----------
3003 3005
3004 3006 Submit previously queued ``command`` blocks as a batch request.
3005 3007
3006 3008 This action MUST be paired with a ``batchbegin`` action.
3007 3009
3008 3010 httprequest <method> <path>
3009 3011 ---------------------------
3010 3012
3011 3013 (HTTP peer only)
3012 3014
3013 3015 Send an HTTP request to the peer.
3014 3016
3015 3017 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3016 3018
3017 3019 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3018 3020 headers to add to the request. e.g. ``Accept: foo``.
3019 3021
3020 3022 The following arguments are special:
3021 3023
3022 3024 ``BODYFILE``
3023 3025 The content of the file defined as the value to this argument will be
3024 3026 transferred verbatim as the HTTP request body.
3025 3027
3026 3028 ``frame <type> <flags> <payload>``
3027 3029 Send a unified protocol frame as part of the request body.
3028 3030
3029 3031 All frames will be collected and sent as the body to the HTTP
3030 3032 request.
3031 3033
3032 3034 close
3033 3035 -----
3034 3036
3035 3037 Close the connection to the server.
3036 3038
3037 3039 flush
3038 3040 -----
3039 3041
3040 3042 Flush data written to the server.
3041 3043
3042 3044 readavailable
3043 3045 -------------
3044 3046
3045 3047 Close the write end of the connection and read all available data from
3046 3048 the server.
3047 3049
3048 3050 If the connection to the server encompasses multiple pipes, we poll both
3049 3051 pipes and read available data.
3050 3052
3051 3053 readline
3052 3054 --------
3053 3055
3054 3056 Read a line of output from the server. If there are multiple output
3055 3057 pipes, reads only the main pipe.
3056 3058
3057 3059 ereadline
3058 3060 ---------
3059 3061
3060 3062 Like ``readline``, but read from the stderr pipe, if available.
3061 3063
3062 3064 read <X>
3063 3065 --------
3064 3066
3065 3067 ``read()`` N bytes from the server's main output pipe.
3066 3068
3067 3069 eread <X>
3068 3070 ---------
3069 3071
3070 3072 ``read()`` N bytes from the server's stderr pipe, if available.
3071 3073
3072 3074 Specifying Unified Frame-Based Protocol Frames
3073 3075 ----------------------------------------------
3074 3076
3075 3077 It is possible to emit a *Unified Frame-Based Protocol* by using special
3076 3078 syntax.
3077 3079
3078 3080 A frame is composed as a type, flags, and payload. These can be parsed
3079 3081 from a string of the form:
3080 3082
3081 3083 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3082 3084
3083 3085 ``request-id`` and ``stream-id`` are integers defining the request and
3084 3086 stream identifiers.
3085 3087
3086 3088 ``type`` can be an integer value for the frame type or the string name
3087 3089 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3088 3090 ``command-name``.
3089 3091
3090 3092 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3091 3093 components. Each component (and there can be just one) can be an integer
3092 3094 or a flag name for stream flags or frame flags, respectively. Values are
3093 3095 resolved to integers and then bitwise OR'd together.
3094 3096
3095 3097 ``payload`` represents the raw frame payload. If it begins with
3096 3098 ``cbor:``, the following string is evaluated as Python code and the
3097 3099 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3098 3100 as a Python byte string literal.
3099 3101 """
3100 3102 opts = pycompat.byteskwargs(opts)
3101 3103
3102 3104 if opts['localssh'] and not repo:
3103 3105 raise error.Abort(_('--localssh requires a repository'))
3104 3106
3105 3107 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3106 3108 raise error.Abort(_('invalid value for --peer'),
3107 3109 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3108 3110
3109 3111 if path and opts['localssh']:
3110 3112 raise error.Abort(_('cannot specify --localssh with an explicit '
3111 3113 'path'))
3112 3114
3113 3115 if ui.interactive():
3114 3116 ui.write(_('(waiting for commands on stdin)\n'))
3115 3117
3116 3118 blocks = list(_parsewirelangblocks(ui.fin))
3117 3119
3118 3120 proc = None
3119 3121 stdin = None
3120 3122 stdout = None
3121 3123 stderr = None
3122 3124 opener = None
3123 3125
3124 3126 if opts['localssh']:
3125 3127 # We start the SSH server in its own process so there is process
3126 3128 # separation. This prevents a whole class of potential bugs around
3127 3129 # shared state from interfering with server operation.
3128 3130 args = procutil.hgcmd() + [
3129 3131 '-R', repo.root,
3130 3132 'debugserve', '--sshstdio',
3131 3133 ]
3132 3134 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3133 3135 stdin=subprocess.PIPE,
3134 3136 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3135 3137 bufsize=0)
3136 3138
3137 3139 stdin = proc.stdin
3138 3140 stdout = proc.stdout
3139 3141 stderr = proc.stderr
3140 3142
3141 3143 # We turn the pipes into observers so we can log I/O.
3142 3144 if ui.verbose or opts['peer'] == 'raw':
3143 3145 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3144 3146 logdata=True)
3145 3147 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3146 3148 logdata=True)
3147 3149 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3148 3150 logdata=True)
3149 3151
3150 3152 # --localssh also implies the peer connection settings.
3151 3153
3152 3154 url = 'ssh://localserver'
3153 3155 autoreadstderr = not opts['noreadstderr']
3154 3156
3155 3157 if opts['peer'] == 'ssh1':
3156 3158 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3157 3159 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3158 3160 None, autoreadstderr=autoreadstderr)
3159 3161 elif opts['peer'] == 'ssh2':
3160 3162 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3161 3163 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3162 3164 None, autoreadstderr=autoreadstderr)
3163 3165 elif opts['peer'] == 'raw':
3164 3166 ui.write(_('using raw connection to peer\n'))
3165 3167 peer = None
3166 3168 else:
3167 3169 ui.write(_('creating ssh peer from handshake results\n'))
3168 3170 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3169 3171 autoreadstderr=autoreadstderr)
3170 3172
3171 3173 elif path:
3172 3174 # We bypass hg.peer() so we can proxy the sockets.
3173 3175 # TODO consider not doing this because we skip
3174 3176 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3175 3177 u = util.url(path)
3176 3178 if u.scheme != 'http':
3177 3179 raise error.Abort(_('only http:// paths are currently supported'))
3178 3180
3179 3181 url, authinfo = u.authinfo()
3180 3182 openerargs = {
3181 3183 r'useragent': b'Mercurial debugwireproto',
3182 3184 }
3183 3185
3184 3186 # Turn pipes/sockets into observers so we can log I/O.
3185 3187 if ui.verbose:
3186 3188 openerargs.update({
3187 3189 r'loggingfh': ui,
3188 3190 r'loggingname': b's',
3189 3191 r'loggingopts': {
3190 3192 r'logdata': True,
3191 3193 r'logdataapis': False,
3192 3194 },
3193 3195 })
3194 3196
3195 3197 if ui.debugflag:
3196 3198 openerargs[r'loggingopts'][r'logdataapis'] = True
3197 3199
3198 3200 # Don't send default headers when in raw mode. This allows us to
3199 3201 # bypass most of the behavior of our URL handling code so we can
3200 3202 # have near complete control over what's sent on the wire.
3201 3203 if opts['peer'] == 'raw':
3202 3204 openerargs[r'sendaccept'] = False
3203 3205
3204 3206 opener = urlmod.opener(ui, authinfo, **openerargs)
3205 3207
3206 3208 if opts['peer'] == 'http2':
3207 3209 ui.write(_('creating http peer for wire protocol version 2\n'))
3208 3210 # We go through makepeer() because we need an API descriptor for
3209 3211 # the peer instance to be useful.
3210 3212 with ui.configoverride({
3211 3213 ('experimental', 'httppeer.advertise-v2'): True}):
3212 3214 if opts['nologhandshake']:
3213 3215 ui.pushbuffer()
3214 3216
3215 3217 peer = httppeer.makepeer(ui, path, opener=opener)
3216 3218
3217 3219 if opts['nologhandshake']:
3218 3220 ui.popbuffer()
3219 3221
3220 3222 if not isinstance(peer, httppeer.httpv2peer):
3221 3223 raise error.Abort(_('could not instantiate HTTP peer for '
3222 3224 'wire protocol version 2'),
3223 3225 hint=_('the server may not have the feature '
3224 3226 'enabled or is not allowing this '
3225 3227 'client version'))
3226 3228
3227 3229 elif opts['peer'] == 'raw':
3228 3230 ui.write(_('using raw connection to peer\n'))
3229 3231 peer = None
3230 3232 elif opts['peer']:
3231 3233 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3232 3234 opts['peer'])
3233 3235 else:
3234 3236 peer = httppeer.makepeer(ui, path, opener=opener)
3235 3237
3236 3238 # We /could/ populate stdin/stdout with sock.makefile()...
3237 3239 else:
3238 3240 raise error.Abort(_('unsupported connection configuration'))
3239 3241
3240 3242 batchedcommands = None
3241 3243
3242 3244 # Now perform actions based on the parsed wire language instructions.
3243 3245 for action, lines in blocks:
3244 3246 if action in ('raw', 'raw+'):
3245 3247 if not stdin:
3246 3248 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3247 3249
3248 3250 # Concatenate the data together.
3249 3251 data = ''.join(l.lstrip() for l in lines)
3250 3252 data = stringutil.unescapestr(data)
3251 3253 stdin.write(data)
3252 3254
3253 3255 if action == 'raw+':
3254 3256 stdin.flush()
3255 3257 elif action == 'flush':
3256 3258 if not stdin:
3257 3259 raise error.Abort(_('cannot call flush on this peer'))
3258 3260 stdin.flush()
3259 3261 elif action.startswith('command'):
3260 3262 if not peer:
3261 3263 raise error.Abort(_('cannot send commands unless peer instance '
3262 3264 'is available'))
3263 3265
3264 3266 command = action.split(' ', 1)[1]
3265 3267
3266 3268 args = {}
3267 3269 for line in lines:
3268 3270 # We need to allow empty values.
3269 3271 fields = line.lstrip().split(' ', 1)
3270 3272 if len(fields) == 1:
3271 3273 key = fields[0]
3272 3274 value = ''
3273 3275 else:
3274 3276 key, value = fields
3275 3277
3276 3278 if value.startswith('eval:'):
3277 3279 value = stringutil.evalpythonliteral(value[5:])
3278 3280 else:
3279 3281 value = stringutil.unescapestr(value)
3280 3282
3281 3283 args[key] = value
3282 3284
3283 3285 if batchedcommands is not None:
3284 3286 batchedcommands.append((command, args))
3285 3287 continue
3286 3288
3287 3289 ui.status(_('sending %s command\n') % command)
3288 3290
3289 3291 if 'PUSHFILE' in args:
3290 3292 with open(args['PUSHFILE'], r'rb') as fh:
3291 3293 del args['PUSHFILE']
3292 3294 res, output = peer._callpush(command, fh,
3293 3295 **pycompat.strkwargs(args))
3294 3296 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3295 3297 ui.status(_('remote output: %s\n') %
3296 3298 stringutil.escapestr(output))
3297 3299 else:
3298 3300 with peer.commandexecutor() as e:
3299 3301 res = e.callcommand(command, args).result()
3300 3302
3301 3303 if isinstance(res, wireprotov2peer.commandresponse):
3302 3304 val = res.objects()
3303 3305 ui.status(_('response: %s\n') %
3304 3306 stringutil.pprint(val, bprefix=True, indent=2))
3305 3307 else:
3306 3308 ui.status(_('response: %s\n') %
3307 3309 stringutil.pprint(res, bprefix=True, indent=2))
3308 3310
3309 3311 elif action == 'batchbegin':
3310 3312 if batchedcommands is not None:
3311 3313 raise error.Abort(_('nested batchbegin not allowed'))
3312 3314
3313 3315 batchedcommands = []
3314 3316 elif action == 'batchsubmit':
3315 3317 # There is a batching API we could go through. But it would be
3316 3318 # difficult to normalize requests into function calls. It is easier
3317 3319 # to bypass this layer and normalize to commands + args.
3318 3320 ui.status(_('sending batch with %d sub-commands\n') %
3319 3321 len(batchedcommands))
3320 3322 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3321 3323 ui.status(_('response #%d: %s\n') %
3322 3324 (i, stringutil.escapestr(chunk)))
3323 3325
3324 3326 batchedcommands = None
3325 3327
3326 3328 elif action.startswith('httprequest '):
3327 3329 if not opener:
3328 3330 raise error.Abort(_('cannot use httprequest without an HTTP '
3329 3331 'peer'))
3330 3332
3331 3333 request = action.split(' ', 2)
3332 3334 if len(request) != 3:
3333 3335 raise error.Abort(_('invalid httprequest: expected format is '
3334 3336 '"httprequest <method> <path>'))
3335 3337
3336 3338 method, httppath = request[1:]
3337 3339 headers = {}
3338 3340 body = None
3339 3341 frames = []
3340 3342 for line in lines:
3341 3343 line = line.lstrip()
3342 3344 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3343 3345 if m:
3344 3346 # Headers need to use native strings.
3345 3347 key = pycompat.strurl(m.group(1))
3346 3348 value = pycompat.strurl(m.group(2))
3347 3349 headers[key] = value
3348 3350 continue
3349 3351
3350 3352 if line.startswith(b'BODYFILE '):
3351 3353 with open(line.split(b' ', 1), 'rb') as fh:
3352 3354 body = fh.read()
3353 3355 elif line.startswith(b'frame '):
3354 3356 frame = wireprotoframing.makeframefromhumanstring(
3355 3357 line[len(b'frame '):])
3356 3358
3357 3359 frames.append(frame)
3358 3360 else:
3359 3361 raise error.Abort(_('unknown argument to httprequest: %s') %
3360 3362 line)
3361 3363
3362 3364 url = path + httppath
3363 3365
3364 3366 if frames:
3365 3367 body = b''.join(bytes(f) for f in frames)
3366 3368
3367 3369 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3368 3370
3369 3371 # urllib.Request insists on using has_data() as a proxy for
3370 3372 # determining the request method. Override that to use our
3371 3373 # explicitly requested method.
3372 3374 req.get_method = lambda: pycompat.sysstr(method)
3373 3375
3374 3376 try:
3375 3377 res = opener.open(req)
3376 3378 body = res.read()
3377 3379 except util.urlerr.urlerror as e:
3378 3380 # read() method must be called, but only exists in Python 2
3379 3381 getattr(e, 'read', lambda: None)()
3380 3382 continue
3381 3383
3382 3384 ct = res.headers.get(r'Content-Type')
3383 3385 if ct == r'application/mercurial-cbor':
3384 3386 ui.write(_('cbor> %s\n') %
3385 3387 stringutil.pprint(cborutil.decodeall(body),
3386 3388 bprefix=True,
3387 3389 indent=2))
3388 3390
3389 3391 elif action == 'close':
3390 3392 peer.close()
3391 3393 elif action == 'readavailable':
3392 3394 if not stdout or not stderr:
3393 3395 raise error.Abort(_('readavailable not available on this peer'))
3394 3396
3395 3397 stdin.close()
3396 3398 stdout.read()
3397 3399 stderr.read()
3398 3400
3399 3401 elif action == 'readline':
3400 3402 if not stdout:
3401 3403 raise error.Abort(_('readline not available on this peer'))
3402 3404 stdout.readline()
3403 3405 elif action == 'ereadline':
3404 3406 if not stderr:
3405 3407 raise error.Abort(_('ereadline not available on this peer'))
3406 3408 stderr.readline()
3407 3409 elif action.startswith('read '):
3408 3410 count = int(action.split(' ', 1)[1])
3409 3411 if not stdout:
3410 3412 raise error.Abort(_('read not available on this peer'))
3411 3413 stdout.read(count)
3412 3414 elif action.startswith('eread '):
3413 3415 count = int(action.split(' ', 1)[1])
3414 3416 if not stderr:
3415 3417 raise error.Abort(_('eread not available on this peer'))
3416 3418 stderr.read(count)
3417 3419 else:
3418 3420 raise error.Abort(_('unknown action: %s') % action)
3419 3421
3420 3422 if batchedcommands is not None:
3421 3423 raise error.Abort(_('unclosed "batchbegin" request'))
3422 3424
3423 3425 if peer:
3424 3426 peer.close()
3425 3427
3426 3428 if proc:
3427 3429 proc.kill()
@@ -1,150 +1,160 b''
1 1 Source bundle was generated with the following script:
2 2
3 3 # hg init
4 4 # echo a > a
5 5 # ln -s a l
6 6 # hg ci -Ama -d'0 0'
7 7 # mkdir b
8 8 # echo a > b/a
9 9 # chmod +x b/a
10 10 # hg ci -Amb -d'1 0'
11 11
12 12 $ hg init
13 13 $ hg unbundle "$TESTDIR/bundles/test-manifest.hg"
14 14 adding changesets
15 15 adding manifests
16 16 adding file changes
17 17 added 2 changesets with 3 changes to 3 files
18 18 new changesets b73562a03cfe:5bdc995175ba (2 drafts)
19 19 (run 'hg update' to get a working copy)
20 20
21 21 The next call is expected to return nothing:
22 22
23 23 $ hg manifest
24 24
25 25 $ hg co
26 26 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
27 27
28 28 $ hg manifest
29 29 a
30 30 b/a
31 31 l
32 32
33 33 $ hg files -vr .
34 34 2 a
35 35 2 x b/a
36 36 1 l l
37 37 $ hg files -r . -X b
38 38 a
39 39 l
40 40 $ hg files -T '{path} {size} {flags}\n'
41 41 a 2
42 42 b/a 2 x
43 43 l 1 l
44 44 $ hg files -T '{path} {node|shortest}\n' -r.
45 45 a 5bdc
46 46 b/a 5bdc
47 47 l 5bdc
48 48
49 49 $ hg manifest -v
50 50 644 a
51 51 755 * b/a
52 52 644 @ l
53 53 $ hg manifest -T '{path} {rev}\n'
54 54 a 1
55 55 b/a 1
56 56 l 1
57 57
58 58 $ hg manifest --debug
59 59 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 a
60 60 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 755 * b/a
61 61 047b75c6d7a3ef6a2243bd0e99f94f6ea6683597 644 @ l
62 62
63 63 $ hg manifest -r 0
64 64 a
65 65 l
66 66
67 67 $ hg manifest -r 1
68 68 a
69 69 b/a
70 70 l
71 71
72 72 $ hg manifest -r tip
73 73 a
74 74 b/a
75 75 l
76 76
77 77 $ hg manifest tip
78 78 a
79 79 b/a
80 80 l
81 81
82 82 $ hg manifest --all
83 83 a
84 84 b/a
85 85 l
86 86
87 87 The next two calls are expected to abort:
88 88
89 89 $ hg manifest -r 2
90 90 abort: unknown revision '2'!
91 91 [255]
92 92
93 93 $ hg manifest -r tip tip
94 94 abort: please specify just one revision
95 95 [255]
96 96
97 97 Testing the manifest full text cache utility
98 98 --------------------------------------------
99 99
100 100 Reminder of the manifest log content
101 101
102 102 $ hg log --debug | grep 'manifest:'
103 103 manifest: 1:1e01206b1d2f72bd55f2a33fa8ccad74144825b7
104 104 manifest: 0:fce2a30dedad1eef4da95ca1dc0004157aa527cf
105 105
106 106 Showing the content of the caches after the above operations
107 107
108 108 $ hg debugmanifestfulltextcache
109 109 cache empty
110 110
111 111 Adding a new persistent entry in the cache
112 112
113 113 $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7
114 114
115 115 $ hg debugmanifestfulltextcache
116 116 cache contains 1 manifest entries, in order of most to least recent:
117 117 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
118 118 total cache data size 157 bytes, on-disk 157 bytes
119 119
120 120 Check we don't duplicated entry (added from the debug command)
121 121
122 122 $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7
123 123 $ hg debugmanifestfulltextcache
124 124 cache contains 1 manifest entries, in order of most to least recent:
125 125 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
126 126 total cache data size 157 bytes, on-disk 157 bytes
127 127
128 128 Adding a second entry
129 129
130 130 $ hg debugmanifestfulltextcache --add fce2a30dedad1eef4da95ca1dc0004157aa527cf
131 131 $ hg debugmanifestfulltextcache
132 132 cache contains 2 manifest entries, in order of most to least recent:
133 133 id: fce2a30dedad1eef4da95ca1dc0004157aa527cf, size 87 bytes
134 134 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
135 135 total cache data size 268 bytes, on-disk 268 bytes
136 136
137 137 Accessing the initial entry again, refresh their order
138 138
139 139 $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7
140 140 $ hg debugmanifestfulltextcache
141 141 cache contains 2 manifest entries, in order of most to least recent:
142 142 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
143 143 id: fce2a30dedad1eef4da95ca1dc0004157aa527cf, size 87 bytes
144 144 total cache data size 268 bytes, on-disk 268 bytes
145 145
146 146 Check cache clearing
147 147
148 148 $ hg debugmanifestfulltextcache --clear
149 149 $ hg debugmanifestfulltextcache
150 150 cache empty
151
152 Check adding multiple entry in one go:
153
154 $ hg debugmanifestfulltextcache --add fce2a30dedad1eef4da95ca1dc0004157aa527cf --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7
155 $ hg debugmanifestfulltextcache
156 cache contains 2 manifest entries, in order of most to least recent:
157 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
158 id: fce2a30dedad1eef4da95ca1dc0004157aa527cf, size 87 bytes
159 total cache data size 268 bytes, on-disk 268 bytes
160 $ hg debugmanifestfulltextcache --clear
General Comments 0
You need to be logged in to leave comments. Login now