##// END OF EJS Templates
debugpathcopies: sort output so it's deterministic...
Martin von Zweigbergk -
r41915:f9698868 default
parent child Browse files
Show More
@@ -1,3398 +1,3398
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from . import (
36 36 bundle2,
37 37 changegroup,
38 38 cmdutil,
39 39 color,
40 40 context,
41 41 copies,
42 42 dagparser,
43 43 encoding,
44 44 error,
45 45 exchange,
46 46 extensions,
47 47 filemerge,
48 48 filesetlang,
49 49 formatter,
50 50 hg,
51 51 httppeer,
52 52 localrepo,
53 53 lock as lockmod,
54 54 logcmdutil,
55 55 merge as mergemod,
56 56 obsolete,
57 57 obsutil,
58 58 phases,
59 59 policy,
60 60 pvec,
61 61 pycompat,
62 62 registrar,
63 63 repair,
64 64 revlog,
65 65 revset,
66 66 revsetlang,
67 67 scmutil,
68 68 setdiscovery,
69 69 simplemerge,
70 70 sshpeer,
71 71 sslutil,
72 72 streamclone,
73 73 templater,
74 74 treediscovery,
75 75 upgrade,
76 76 url as urlmod,
77 77 util,
78 78 vfs as vfsmod,
79 79 wireprotoframing,
80 80 wireprotoserver,
81 81 wireprotov2peer,
82 82 )
83 83 from .utils import (
84 84 cborutil,
85 85 dateutil,
86 86 procutil,
87 87 stringutil,
88 88 )
89 89
90 90 from .revlogutils import (
91 91 deltas as deltautil
92 92 )
93 93
94 94 release = lockmod.release
95 95
96 96 command = registrar.command()
97 97
98 98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
99 99 def debugancestor(ui, repo, *args):
100 100 """find the ancestor revision of two revisions in a given index"""
101 101 if len(args) == 3:
102 102 index, rev1, rev2 = args
103 103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
104 104 lookup = r.lookup
105 105 elif len(args) == 2:
106 106 if not repo:
107 107 raise error.Abort(_('there is no Mercurial repository here '
108 108 '(.hg not found)'))
109 109 rev1, rev2 = args
110 110 r = repo.changelog
111 111 lookup = repo.lookup
112 112 else:
113 113 raise error.Abort(_('either two or three arguments required'))
114 114 a = r.ancestor(lookup(rev1), lookup(rev2))
115 115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
116 116
117 117 @command('debugapplystreamclonebundle', [], 'FILE')
118 118 def debugapplystreamclonebundle(ui, repo, fname):
119 119 """apply a stream clone bundle file"""
120 120 f = hg.openpath(ui, fname)
121 121 gen = exchange.readbundle(ui, f, fname)
122 122 gen.apply(repo)
123 123
124 124 @command('debugbuilddag',
125 125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
126 126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
127 127 ('n', 'new-file', None, _('add new file at each rev'))],
128 128 _('[OPTION]... [TEXT]'))
129 129 def debugbuilddag(ui, repo, text=None,
130 130 mergeable_file=False,
131 131 overwritten_file=False,
132 132 new_file=False):
133 133 """builds a repo with a given DAG from scratch in the current empty repo
134 134
135 135 The description of the DAG is read from stdin if not given on the
136 136 command line.
137 137
138 138 Elements:
139 139
140 140 - "+n" is a linear run of n nodes based on the current default parent
141 141 - "." is a single node based on the current default parent
142 142 - "$" resets the default parent to null (implied at the start);
143 143 otherwise the default parent is always the last node created
144 144 - "<p" sets the default parent to the backref p
145 145 - "*p" is a fork at parent p, which is a backref
146 146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
147 147 - "/p2" is a merge of the preceding node and p2
148 148 - ":tag" defines a local tag for the preceding node
149 149 - "@branch" sets the named branch for subsequent nodes
150 150 - "#...\\n" is a comment up to the end of the line
151 151
152 152 Whitespace between the above elements is ignored.
153 153
154 154 A backref is either
155 155
156 156 - a number n, which references the node curr-n, where curr is the current
157 157 node, or
158 158 - the name of a local tag you placed earlier using ":tag", or
159 159 - empty to denote the default parent.
160 160
161 161 All string valued-elements are either strictly alphanumeric, or must
162 162 be enclosed in double quotes ("..."), with "\\" as escape character.
163 163 """
164 164
165 165 if text is None:
166 166 ui.status(_("reading DAG from stdin\n"))
167 167 text = ui.fin.read()
168 168
169 169 cl = repo.changelog
170 170 if len(cl) > 0:
171 171 raise error.Abort(_('repository is not empty'))
172 172
173 173 # determine number of revs in DAG
174 174 total = 0
175 175 for type, data in dagparser.parsedag(text):
176 176 if type == 'n':
177 177 total += 1
178 178
179 179 if mergeable_file:
180 180 linesperrev = 2
181 181 # make a file with k lines per rev
182 182 initialmergedlines = ['%d' % i
183 183 for i in pycompat.xrange(0, total * linesperrev)]
184 184 initialmergedlines.append("")
185 185
186 186 tags = []
187 187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
188 188 total=total)
189 189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
190 190 at = -1
191 191 atbranch = 'default'
192 192 nodeids = []
193 193 id = 0
194 194 progress.update(id)
195 195 for type, data in dagparser.parsedag(text):
196 196 if type == 'n':
197 197 ui.note(('node %s\n' % pycompat.bytestr(data)))
198 198 id, ps = data
199 199
200 200 files = []
201 201 filecontent = {}
202 202
203 203 p2 = None
204 204 if mergeable_file:
205 205 fn = "mf"
206 206 p1 = repo[ps[0]]
207 207 if len(ps) > 1:
208 208 p2 = repo[ps[1]]
209 209 pa = p1.ancestor(p2)
210 210 base, local, other = [x[fn].data() for x in (pa, p1,
211 211 p2)]
212 212 m3 = simplemerge.Merge3Text(base, local, other)
213 213 ml = [l.strip() for l in m3.merge_lines()]
214 214 ml.append("")
215 215 elif at > 0:
216 216 ml = p1[fn].data().split("\n")
217 217 else:
218 218 ml = initialmergedlines
219 219 ml[id * linesperrev] += " r%i" % id
220 220 mergedtext = "\n".join(ml)
221 221 files.append(fn)
222 222 filecontent[fn] = mergedtext
223 223
224 224 if overwritten_file:
225 225 fn = "of"
226 226 files.append(fn)
227 227 filecontent[fn] = "r%i\n" % id
228 228
229 229 if new_file:
230 230 fn = "nf%i" % id
231 231 files.append(fn)
232 232 filecontent[fn] = "r%i\n" % id
233 233 if len(ps) > 1:
234 234 if not p2:
235 235 p2 = repo[ps[1]]
236 236 for fn in p2:
237 237 if fn.startswith("nf"):
238 238 files.append(fn)
239 239 filecontent[fn] = p2[fn].data()
240 240
241 241 def fctxfn(repo, cx, path):
242 242 if path in filecontent:
243 243 return context.memfilectx(repo, cx, path,
244 244 filecontent[path])
245 245 return None
246 246
247 247 if len(ps) == 0 or ps[0] < 0:
248 248 pars = [None, None]
249 249 elif len(ps) == 1:
250 250 pars = [nodeids[ps[0]], None]
251 251 else:
252 252 pars = [nodeids[p] for p in ps]
253 253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
254 254 date=(id, 0),
255 255 user="debugbuilddag",
256 256 extra={'branch': atbranch})
257 257 nodeid = repo.commitctx(cx)
258 258 nodeids.append(nodeid)
259 259 at = id
260 260 elif type == 'l':
261 261 id, name = data
262 262 ui.note(('tag %s\n' % name))
263 263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
264 264 elif type == 'a':
265 265 ui.note(('branch %s\n' % data))
266 266 atbranch = data
267 267 progress.update(id)
268 268
269 269 if tags:
270 270 repo.vfs.write("localtags", "".join(tags))
271 271
272 272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
273 273 indent_string = ' ' * indent
274 274 if all:
275 275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
276 276 % indent_string)
277 277
278 278 def showchunks(named):
279 279 ui.write("\n%s%s\n" % (indent_string, named))
280 280 for deltadata in gen.deltaiter():
281 281 node, p1, p2, cs, deltabase, delta, flags = deltadata
282 282 ui.write("%s%s %s %s %s %s %d\n" %
283 283 (indent_string, hex(node), hex(p1), hex(p2),
284 284 hex(cs), hex(deltabase), len(delta)))
285 285
286 286 chunkdata = gen.changelogheader()
287 287 showchunks("changelog")
288 288 chunkdata = gen.manifestheader()
289 289 showchunks("manifest")
290 290 for chunkdata in iter(gen.filelogheader, {}):
291 291 fname = chunkdata['filename']
292 292 showchunks(fname)
293 293 else:
294 294 if isinstance(gen, bundle2.unbundle20):
295 295 raise error.Abort(_('use debugbundle2 for this file'))
296 296 chunkdata = gen.changelogheader()
297 297 for deltadata in gen.deltaiter():
298 298 node, p1, p2, cs, deltabase, delta, flags = deltadata
299 299 ui.write("%s%s\n" % (indent_string, hex(node)))
300 300
301 301 def _debugobsmarkers(ui, part, indent=0, **opts):
302 302 """display version and markers contained in 'data'"""
303 303 opts = pycompat.byteskwargs(opts)
304 304 data = part.read()
305 305 indent_string = ' ' * indent
306 306 try:
307 307 version, markers = obsolete._readmarkers(data)
308 308 except error.UnknownVersion as exc:
309 309 msg = "%sunsupported version: %s (%d bytes)\n"
310 310 msg %= indent_string, exc.version, len(data)
311 311 ui.write(msg)
312 312 else:
313 313 msg = "%sversion: %d (%d bytes)\n"
314 314 msg %= indent_string, version, len(data)
315 315 ui.write(msg)
316 316 fm = ui.formatter('debugobsolete', opts)
317 317 for rawmarker in sorted(markers):
318 318 m = obsutil.marker(None, rawmarker)
319 319 fm.startitem()
320 320 fm.plain(indent_string)
321 321 cmdutil.showmarker(fm, m)
322 322 fm.end()
323 323
324 324 def _debugphaseheads(ui, data, indent=0):
325 325 """display version and markers contained in 'data'"""
326 326 indent_string = ' ' * indent
327 327 headsbyphase = phases.binarydecode(data)
328 328 for phase in phases.allphases:
329 329 for head in headsbyphase[phase]:
330 330 ui.write(indent_string)
331 331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
332 332
333 333 def _quasirepr(thing):
334 334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
335 335 return '{%s}' % (
336 336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
337 337 return pycompat.bytestr(repr(thing))
338 338
339 339 def _debugbundle2(ui, gen, all=None, **opts):
340 340 """lists the contents of a bundle2"""
341 341 if not isinstance(gen, bundle2.unbundle20):
342 342 raise error.Abort(_('not a bundle2 file'))
343 343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
344 344 parttypes = opts.get(r'part_type', [])
345 345 for part in gen.iterparts():
346 346 if parttypes and part.type not in parttypes:
347 347 continue
348 348 msg = '%s -- %s (mandatory: %r)\n'
349 349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
350 350 if part.type == 'changegroup':
351 351 version = part.params.get('version', '01')
352 352 cg = changegroup.getunbundler(version, part, 'UN')
353 353 if not ui.quiet:
354 354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
355 355 if part.type == 'obsmarkers':
356 356 if not ui.quiet:
357 357 _debugobsmarkers(ui, part, indent=4, **opts)
358 358 if part.type == 'phase-heads':
359 359 if not ui.quiet:
360 360 _debugphaseheads(ui, part, indent=4)
361 361
362 362 @command('debugbundle',
363 363 [('a', 'all', None, _('show all details')),
364 364 ('', 'part-type', [], _('show only the named part type')),
365 365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
366 366 _('FILE'),
367 367 norepo=True)
368 368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
369 369 """lists the contents of a bundle"""
370 370 with hg.openpath(ui, bundlepath) as f:
371 371 if spec:
372 372 spec = exchange.getbundlespec(ui, f)
373 373 ui.write('%s\n' % spec)
374 374 return
375 375
376 376 gen = exchange.readbundle(ui, f, bundlepath)
377 377 if isinstance(gen, bundle2.unbundle20):
378 378 return _debugbundle2(ui, gen, all=all, **opts)
379 379 _debugchangegroup(ui, gen, all=all, **opts)
380 380
381 381 @command('debugcapabilities',
382 382 [], _('PATH'),
383 383 norepo=True)
384 384 def debugcapabilities(ui, path, **opts):
385 385 """lists the capabilities of a remote peer"""
386 386 opts = pycompat.byteskwargs(opts)
387 387 peer = hg.peer(ui, opts, path)
388 388 caps = peer.capabilities()
389 389 ui.write(('Main capabilities:\n'))
390 390 for c in sorted(caps):
391 391 ui.write((' %s\n') % c)
392 392 b2caps = bundle2.bundle2caps(peer)
393 393 if b2caps:
394 394 ui.write(('Bundle2 capabilities:\n'))
395 395 for key, values in sorted(b2caps.iteritems()):
396 396 ui.write((' %s\n') % key)
397 397 for v in values:
398 398 ui.write((' %s\n') % v)
399 399
400 400 @command('debugcheckstate', [], '')
401 401 def debugcheckstate(ui, repo):
402 402 """validate the correctness of the current dirstate"""
403 403 parent1, parent2 = repo.dirstate.parents()
404 404 m1 = repo[parent1].manifest()
405 405 m2 = repo[parent2].manifest()
406 406 errors = 0
407 407 for f in repo.dirstate:
408 408 state = repo.dirstate[f]
409 409 if state in "nr" and f not in m1:
410 410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
411 411 errors += 1
412 412 if state in "a" and f in m1:
413 413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
414 414 errors += 1
415 415 if state in "m" and f not in m1 and f not in m2:
416 416 ui.warn(_("%s in state %s, but not in either manifest\n") %
417 417 (f, state))
418 418 errors += 1
419 419 for f in m1:
420 420 state = repo.dirstate[f]
421 421 if state not in "nrm":
422 422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
423 423 errors += 1
424 424 if errors:
425 425 error = _(".hg/dirstate inconsistent with current parent's manifest")
426 426 raise error.Abort(error)
427 427
428 428 @command('debugcolor',
429 429 [('', 'style', None, _('show all configured styles'))],
430 430 'hg debugcolor')
431 431 def debugcolor(ui, repo, **opts):
432 432 """show available color, effects or style"""
433 433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
434 434 if opts.get(r'style'):
435 435 return _debugdisplaystyle(ui)
436 436 else:
437 437 return _debugdisplaycolor(ui)
438 438
439 439 def _debugdisplaycolor(ui):
440 440 ui = ui.copy()
441 441 ui._styles.clear()
442 442 for effect in color._activeeffects(ui).keys():
443 443 ui._styles[effect] = effect
444 444 if ui._terminfoparams:
445 445 for k, v in ui.configitems('color'):
446 446 if k.startswith('color.'):
447 447 ui._styles[k] = k[6:]
448 448 elif k.startswith('terminfo.'):
449 449 ui._styles[k] = k[9:]
450 450 ui.write(_('available colors:\n'))
451 451 # sort label with a '_' after the other to group '_background' entry.
452 452 items = sorted(ui._styles.items(),
453 453 key=lambda i: ('_' in i[0], i[0], i[1]))
454 454 for colorname, label in items:
455 455 ui.write(('%s\n') % colorname, label=label)
456 456
457 457 def _debugdisplaystyle(ui):
458 458 ui.write(_('available style:\n'))
459 459 if not ui._styles:
460 460 return
461 461 width = max(len(s) for s in ui._styles)
462 462 for label, effects in sorted(ui._styles.items()):
463 463 ui.write('%s' % label, label=label)
464 464 if effects:
465 465 # 50
466 466 ui.write(': ')
467 467 ui.write(' ' * (max(0, width - len(label))))
468 468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 469 ui.write('\n')
470 470
471 471 @command('debugcreatestreamclonebundle', [], 'FILE')
472 472 def debugcreatestreamclonebundle(ui, repo, fname):
473 473 """create a stream clone bundle file
474 474
475 475 Stream bundles are special bundles that are essentially archives of
476 476 revlog files. They are commonly used for cloning very quickly.
477 477 """
478 478 # TODO we may want to turn this into an abort when this functionality
479 479 # is moved into `hg bundle`.
480 480 if phases.hassecret(repo):
481 481 ui.warn(_('(warning: stream clone bundle will contain secret '
482 482 'revisions)\n'))
483 483
484 484 requirements, gen = streamclone.generatebundlev1(repo)
485 485 changegroup.writechunks(ui, gen, fname)
486 486
487 487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488 488
489 489 @command('debugdag',
490 490 [('t', 'tags', None, _('use tags as labels')),
491 491 ('b', 'branches', None, _('annotate with branch names')),
492 492 ('', 'dots', None, _('use dots for runs')),
493 493 ('s', 'spaces', None, _('separate elements by spaces'))],
494 494 _('[OPTION]... [FILE [REV]...]'),
495 495 optionalrepo=True)
496 496 def debugdag(ui, repo, file_=None, *revs, **opts):
497 497 """format the changelog or an index DAG as a concise textual description
498 498
499 499 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 500 revision numbers, they get labeled in the output as rN.
501 501
502 502 Otherwise, the changelog DAG of the current repo is emitted.
503 503 """
504 504 spaces = opts.get(r'spaces')
505 505 dots = opts.get(r'dots')
506 506 if file_:
507 507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
508 508 file_)
509 509 revs = set((int(r) for r in revs))
510 510 def events():
511 511 for r in rlog:
512 512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 513 if p != -1))
514 514 if r in revs:
515 515 yield 'l', (r, "r%i" % r)
516 516 elif repo:
517 517 cl = repo.changelog
518 518 tags = opts.get(r'tags')
519 519 branches = opts.get(r'branches')
520 520 if tags:
521 521 labels = {}
522 522 for l, n in repo.tags().items():
523 523 labels.setdefault(cl.rev(n), []).append(l)
524 524 def events():
525 525 b = "default"
526 526 for r in cl:
527 527 if branches:
528 528 newb = cl.read(cl.node(r))[5]['branch']
529 529 if newb != b:
530 530 yield 'a', newb
531 531 b = newb
532 532 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 533 if p != -1))
534 534 if tags:
535 535 ls = labels.get(r)
536 536 if ls:
537 537 for l in ls:
538 538 yield 'l', (r, l)
539 539 else:
540 540 raise error.Abort(_('need repo for changelog dag'))
541 541
542 542 for line in dagparser.dagtextlines(events(),
543 543 addspaces=spaces,
544 544 wraplabels=True,
545 545 wrapannotations=True,
546 546 wrapnonlinear=dots,
547 547 usedots=dots,
548 548 maxlinewidth=70):
549 549 ui.write(line)
550 550 ui.write("\n")
551 551
552 552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 553 def debugdata(ui, repo, file_, rev=None, **opts):
554 554 """dump the contents of a data file revision"""
555 555 opts = pycompat.byteskwargs(opts)
556 556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 557 if rev is not None:
558 558 raise error.CommandError('debugdata', _('invalid arguments'))
559 559 file_, rev = None, file_
560 560 elif rev is None:
561 561 raise error.CommandError('debugdata', _('invalid arguments'))
562 562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
563 563 try:
564 564 ui.write(r.revision(r.lookup(rev), raw=True))
565 565 except KeyError:
566 566 raise error.Abort(_('invalid revision identifier %s') % rev)
567 567
568 568 @command('debugdate',
569 569 [('e', 'extended', None, _('try extended date formats'))],
570 570 _('[-e] DATE [RANGE]'),
571 571 norepo=True, optionalrepo=True)
572 572 def debugdate(ui, date, range=None, **opts):
573 573 """parse and display a date"""
574 574 if opts[r"extended"]:
575 575 d = dateutil.parsedate(date, util.extendeddateformats)
576 576 else:
577 577 d = dateutil.parsedate(date)
578 578 ui.write(("internal: %d %d\n") % d)
579 579 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 580 if range:
581 581 m = dateutil.matchdate(range)
582 582 ui.write(("match: %s\n") % m(d[0]))
583 583
584 584 @command('debugdeltachain',
585 585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 586 _('-c|-m|FILE'),
587 587 optionalrepo=True)
588 588 def debugdeltachain(ui, repo, file_=None, **opts):
589 589 """dump information about delta chains in a revlog
590 590
591 591 Output can be templatized. Available template keywords are:
592 592
593 593 :``rev``: revision number
594 594 :``chainid``: delta chain identifier (numbered by unique base)
595 595 :``chainlen``: delta chain length to this revision
596 596 :``prevrev``: previous revision in delta chain
597 597 :``deltatype``: role of delta / how it was computed
598 598 :``compsize``: compressed size of revision
599 599 :``uncompsize``: uncompressed size of revision
600 600 :``chainsize``: total size of compressed revisions in chain
601 601 :``chainratio``: total chain size divided by uncompressed revision size
602 602 (new delta chains typically start at ratio 2.00)
603 603 :``lindist``: linear distance from base revision in delta chain to end
604 604 of this revision
605 605 :``extradist``: total size of revisions not part of this delta chain from
606 606 base of delta chain to end of this revision; a measurement
607 607 of how much extra data we need to read/seek across to read
608 608 the delta chain for this revision
609 609 :``extraratio``: extradist divided by chainsize; another representation of
610 610 how much unrelated data is needed to load this delta chain
611 611
612 612 If the repository is configured to use the sparse read, additional keywords
613 613 are available:
614 614
615 615 :``readsize``: total size of data read from the disk for a revision
616 616 (sum of the sizes of all the blocks)
617 617 :``largestblock``: size of the largest block of data read from the disk
618 618 :``readdensity``: density of useful bytes in the data read from the disk
619 619 :``srchunks``: in how many data hunks the whole revision would be read
620 620
621 621 The sparse read can be enabled with experimental.sparse-read = True
622 622 """
623 623 opts = pycompat.byteskwargs(opts)
624 624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 625 index = r.index
626 626 start = r.start
627 627 length = r.length
628 628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
629 629 withsparseread = getattr(r, '_withsparseread', False)
630 630
631 631 def revinfo(rev):
632 632 e = index[rev]
633 633 compsize = e[1]
634 634 uncompsize = e[2]
635 635 chainsize = 0
636 636
637 637 if generaldelta:
638 638 if e[3] == e[5]:
639 639 deltatype = 'p1'
640 640 elif e[3] == e[6]:
641 641 deltatype = 'p2'
642 642 elif e[3] == rev - 1:
643 643 deltatype = 'prev'
644 644 elif e[3] == rev:
645 645 deltatype = 'base'
646 646 else:
647 647 deltatype = 'other'
648 648 else:
649 649 if e[3] == rev:
650 650 deltatype = 'base'
651 651 else:
652 652 deltatype = 'prev'
653 653
654 654 chain = r._deltachain(rev)[0]
655 655 for iterrev in chain:
656 656 e = index[iterrev]
657 657 chainsize += e[1]
658 658
659 659 return compsize, uncompsize, deltatype, chain, chainsize
660 660
661 661 fm = ui.formatter('debugdeltachain', opts)
662 662
663 663 fm.plain(' rev chain# chainlen prev delta '
664 664 'size rawsize chainsize ratio lindist extradist '
665 665 'extraratio')
666 666 if withsparseread:
667 667 fm.plain(' readsize largestblk rddensity srchunks')
668 668 fm.plain('\n')
669 669
670 670 chainbases = {}
671 671 for rev in r:
672 672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
673 673 chainbase = chain[0]
674 674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
675 675 basestart = start(chainbase)
676 676 revstart = start(rev)
677 677 lineardist = revstart + comp - basestart
678 678 extradist = lineardist - chainsize
679 679 try:
680 680 prevrev = chain[-2]
681 681 except IndexError:
682 682 prevrev = -1
683 683
684 684 if uncomp != 0:
685 685 chainratio = float(chainsize) / float(uncomp)
686 686 else:
687 687 chainratio = chainsize
688 688
689 689 if chainsize != 0:
690 690 extraratio = float(extradist) / float(chainsize)
691 691 else:
692 692 extraratio = extradist
693 693
694 694 fm.startitem()
695 695 fm.write('rev chainid chainlen prevrev deltatype compsize '
696 696 'uncompsize chainsize chainratio lindist extradist '
697 697 'extraratio',
698 698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
699 699 rev, chainid, len(chain), prevrev, deltatype, comp,
700 700 uncomp, chainsize, chainratio, lineardist, extradist,
701 701 extraratio,
702 702 rev=rev, chainid=chainid, chainlen=len(chain),
703 703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
704 704 uncompsize=uncomp, chainsize=chainsize,
705 705 chainratio=chainratio, lindist=lineardist,
706 706 extradist=extradist, extraratio=extraratio)
707 707 if withsparseread:
708 708 readsize = 0
709 709 largestblock = 0
710 710 srchunks = 0
711 711
712 712 for revschunk in deltautil.slicechunk(r, chain):
713 713 srchunks += 1
714 714 blkend = start(revschunk[-1]) + length(revschunk[-1])
715 715 blksize = blkend - start(revschunk[0])
716 716
717 717 readsize += blksize
718 718 if largestblock < blksize:
719 719 largestblock = blksize
720 720
721 721 if readsize:
722 722 readdensity = float(chainsize) / float(readsize)
723 723 else:
724 724 readdensity = 1
725 725
726 726 fm.write('readsize largestblock readdensity srchunks',
727 727 ' %10d %10d %9.5f %8d',
728 728 readsize, largestblock, readdensity, srchunks,
729 729 readsize=readsize, largestblock=largestblock,
730 730 readdensity=readdensity, srchunks=srchunks)
731 731
732 732 fm.plain('\n')
733 733
734 734 fm.end()
735 735
736 736 @command('debugdirstate|debugstate',
737 737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
738 738 ('', 'dates', True, _('display the saved mtime')),
739 739 ('', 'datesort', None, _('sort by saved mtime'))],
740 740 _('[OPTION]...'))
741 741 def debugstate(ui, repo, **opts):
742 742 """show the contents of the current dirstate"""
743 743
744 744 nodates = not opts[r'dates']
745 745 if opts.get(r'nodates') is not None:
746 746 nodates = True
747 747 datesort = opts.get(r'datesort')
748 748
749 749 if datesort:
750 750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 751 else:
752 752 keyfunc = None # sort by filename
753 753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 754 if ent[3] == -1:
755 755 timestr = 'unset '
756 756 elif nodates:
757 757 timestr = 'set '
758 758 else:
759 759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 760 time.localtime(ent[3]))
761 761 timestr = encoding.strtolocal(timestr)
762 762 if ent[1] & 0o20000:
763 763 mode = 'lnk'
764 764 else:
765 765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 767 for f in repo.dirstate.copies():
768 768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769 769
770 770 @command('debugdiscovery',
771 771 [('', 'old', None, _('use old-style discovery')),
772 772 ('', 'nonheads', None,
773 773 _('use old-style discovery with non-heads included')),
774 774 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 775 ] + cmdutil.remoteopts,
776 776 _('[--rev REV] [OTHER]'))
777 777 def debugdiscovery(ui, repo, remoteurl="default", **opts):
778 778 """runs the changeset discovery protocol in isolation"""
779 779 opts = pycompat.byteskwargs(opts)
780 780 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
781 781 remote = hg.peer(repo, opts, remoteurl)
782 782 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
783 783
784 784 # make sure tests are repeatable
785 785 random.seed(12323)
786 786
787 787 def doit(pushedrevs, remoteheads, remote=remote):
788 788 if opts.get('old'):
789 789 if not util.safehasattr(remote, 'branches'):
790 790 # enable in-client legacy support
791 791 remote = localrepo.locallegacypeer(remote.local())
792 792 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
793 793 force=True)
794 794 common = set(common)
795 795 if not opts.get('nonheads'):
796 796 ui.write(("unpruned common: %s\n") %
797 797 " ".join(sorted(short(n) for n in common)))
798 798
799 799 clnode = repo.changelog.node
800 800 common = repo.revs('heads(::%ln)', common)
801 801 common = {clnode(r) for r in common}
802 802 else:
803 803 nodes = None
804 804 if pushedrevs:
805 805 revs = scmutil.revrange(repo, pushedrevs)
806 806 nodes = [repo[r].node() for r in revs]
807 807 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
808 808 ancestorsof=nodes)
809 809 common = set(common)
810 810 rheads = set(hds)
811 811 lheads = set(repo.heads())
812 812 ui.write(("common heads: %s\n") %
813 813 " ".join(sorted(short(n) for n in common)))
814 814 if lheads <= common:
815 815 ui.write(("local is subset\n"))
816 816 elif rheads <= common:
817 817 ui.write(("remote is subset\n"))
818 818
819 819 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
820 820 localrevs = opts['rev']
821 821 doit(localrevs, remoterevs)
822 822
823 823 _chunksize = 4 << 10
824 824
825 825 @command('debugdownload',
826 826 [
827 827 ('o', 'output', '', _('path')),
828 828 ],
829 829 optionalrepo=True)
830 830 def debugdownload(ui, repo, url, output=None, **opts):
831 831 """download a resource using Mercurial logic and config
832 832 """
833 833 fh = urlmod.open(ui, url, output)
834 834
835 835 dest = ui
836 836 if output:
837 837 dest = open(output, "wb", _chunksize)
838 838 try:
839 839 data = fh.read(_chunksize)
840 840 while data:
841 841 dest.write(data)
842 842 data = fh.read(_chunksize)
843 843 finally:
844 844 if output:
845 845 dest.close()
846 846
847 847 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
848 848 def debugextensions(ui, repo, **opts):
849 849 '''show information about active extensions'''
850 850 opts = pycompat.byteskwargs(opts)
851 851 exts = extensions.extensions(ui)
852 852 hgver = util.version()
853 853 fm = ui.formatter('debugextensions', opts)
854 854 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
855 855 isinternal = extensions.ismoduleinternal(extmod)
856 856 extsource = pycompat.fsencode(extmod.__file__)
857 857 if isinternal:
858 858 exttestedwith = [] # never expose magic string to users
859 859 else:
860 860 exttestedwith = getattr(extmod, 'testedwith', '').split()
861 861 extbuglink = getattr(extmod, 'buglink', None)
862 862
863 863 fm.startitem()
864 864
865 865 if ui.quiet or ui.verbose:
866 866 fm.write('name', '%s\n', extname)
867 867 else:
868 868 fm.write('name', '%s', extname)
869 869 if isinternal or hgver in exttestedwith:
870 870 fm.plain('\n')
871 871 elif not exttestedwith:
872 872 fm.plain(_(' (untested!)\n'))
873 873 else:
874 874 lasttestedversion = exttestedwith[-1]
875 875 fm.plain(' (%s!)\n' % lasttestedversion)
876 876
877 877 fm.condwrite(ui.verbose and extsource, 'source',
878 878 _(' location: %s\n'), extsource or "")
879 879
880 880 if ui.verbose:
881 881 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
882 882 fm.data(bundled=isinternal)
883 883
884 884 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
885 885 _(' tested with: %s\n'),
886 886 fm.formatlist(exttestedwith, name='ver'))
887 887
888 888 fm.condwrite(ui.verbose and extbuglink, 'buglink',
889 889 _(' bug reporting: %s\n'), extbuglink or "")
890 890
891 891 fm.end()
892 892
893 893 @command('debugfileset',
894 894 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
895 895 ('', 'all-files', False,
896 896 _('test files from all revisions and working directory')),
897 897 ('s', 'show-matcher', None,
898 898 _('print internal representation of matcher')),
899 899 ('p', 'show-stage', [],
900 900 _('print parsed tree at the given stage'), _('NAME'))],
901 901 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
902 902 def debugfileset(ui, repo, expr, **opts):
903 903 '''parse and apply a fileset specification'''
904 904 from . import fileset
905 905 fileset.symbols # force import of fileset so we have predicates to optimize
906 906 opts = pycompat.byteskwargs(opts)
907 907 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
908 908
909 909 stages = [
910 910 ('parsed', pycompat.identity),
911 911 ('analyzed', filesetlang.analyze),
912 912 ('optimized', filesetlang.optimize),
913 913 ]
914 914 stagenames = set(n for n, f in stages)
915 915
916 916 showalways = set()
917 917 if ui.verbose and not opts['show_stage']:
918 918 # show parsed tree by --verbose (deprecated)
919 919 showalways.add('parsed')
920 920 if opts['show_stage'] == ['all']:
921 921 showalways.update(stagenames)
922 922 else:
923 923 for n in opts['show_stage']:
924 924 if n not in stagenames:
925 925 raise error.Abort(_('invalid stage name: %s') % n)
926 926 showalways.update(opts['show_stage'])
927 927
928 928 tree = filesetlang.parse(expr)
929 929 for n, f in stages:
930 930 tree = f(tree)
931 931 if n in showalways:
932 932 if opts['show_stage'] or n != 'parsed':
933 933 ui.write(("* %s:\n") % n)
934 934 ui.write(filesetlang.prettyformat(tree), "\n")
935 935
936 936 files = set()
937 937 if opts['all_files']:
938 938 for r in repo:
939 939 c = repo[r]
940 940 files.update(c.files())
941 941 files.update(c.substate)
942 942 if opts['all_files'] or ctx.rev() is None:
943 943 wctx = repo[None]
944 944 files.update(repo.dirstate.walk(scmutil.matchall(repo),
945 945 subrepos=list(wctx.substate),
946 946 unknown=True, ignored=True))
947 947 files.update(wctx.substate)
948 948 else:
949 949 files.update(ctx.files())
950 950 files.update(ctx.substate)
951 951
952 952 m = ctx.matchfileset(expr)
953 953 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
954 954 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
955 955 for f in sorted(files):
956 956 if not m(f):
957 957 continue
958 958 ui.write("%s\n" % f)
959 959
960 960 @command('debugformat',
961 961 [] + cmdutil.formatteropts)
962 962 def debugformat(ui, repo, **opts):
963 963 """display format information about the current repository
964 964
965 965 Use --verbose to get extra information about current config value and
966 966 Mercurial default."""
967 967 opts = pycompat.byteskwargs(opts)
968 968 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
969 969 maxvariantlength = max(len('format-variant'), maxvariantlength)
970 970
971 971 def makeformatname(name):
972 972 return '%s:' + (' ' * (maxvariantlength - len(name)))
973 973
974 974 fm = ui.formatter('debugformat', opts)
975 975 if fm.isplain():
976 976 def formatvalue(value):
977 977 if util.safehasattr(value, 'startswith'):
978 978 return value
979 979 if value:
980 980 return 'yes'
981 981 else:
982 982 return 'no'
983 983 else:
984 984 formatvalue = pycompat.identity
985 985
986 986 fm.plain('format-variant')
987 987 fm.plain(' ' * (maxvariantlength - len('format-variant')))
988 988 fm.plain(' repo')
989 989 if ui.verbose:
990 990 fm.plain(' config default')
991 991 fm.plain('\n')
992 992 for fv in upgrade.allformatvariant:
993 993 fm.startitem()
994 994 repovalue = fv.fromrepo(repo)
995 995 configvalue = fv.fromconfig(repo)
996 996
997 997 if repovalue != configvalue:
998 998 namelabel = 'formatvariant.name.mismatchconfig'
999 999 repolabel = 'formatvariant.repo.mismatchconfig'
1000 1000 elif repovalue != fv.default:
1001 1001 namelabel = 'formatvariant.name.mismatchdefault'
1002 1002 repolabel = 'formatvariant.repo.mismatchdefault'
1003 1003 else:
1004 1004 namelabel = 'formatvariant.name.uptodate'
1005 1005 repolabel = 'formatvariant.repo.uptodate'
1006 1006
1007 1007 fm.write('name', makeformatname(fv.name), fv.name,
1008 1008 label=namelabel)
1009 1009 fm.write('repo', ' %3s', formatvalue(repovalue),
1010 1010 label=repolabel)
1011 1011 if fv.default != configvalue:
1012 1012 configlabel = 'formatvariant.config.special'
1013 1013 else:
1014 1014 configlabel = 'formatvariant.config.default'
1015 1015 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1016 1016 label=configlabel)
1017 1017 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1018 1018 label='formatvariant.default')
1019 1019 fm.plain('\n')
1020 1020 fm.end()
1021 1021
1022 1022 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1023 1023 def debugfsinfo(ui, path="."):
1024 1024 """show information detected about current filesystem"""
1025 1025 ui.write(('path: %s\n') % path)
1026 1026 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1027 1027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1028 1028 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1029 1029 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1030 1030 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1031 1031 casesensitive = '(unknown)'
1032 1032 try:
1033 1033 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1034 1034 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1035 1035 except OSError:
1036 1036 pass
1037 1037 ui.write(('case-sensitive: %s\n') % casesensitive)
1038 1038
1039 1039 @command('debuggetbundle',
1040 1040 [('H', 'head', [], _('id of head node'), _('ID')),
1041 1041 ('C', 'common', [], _('id of common node'), _('ID')),
1042 1042 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1043 1043 _('REPO FILE [-H|-C ID]...'),
1044 1044 norepo=True)
1045 1045 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1046 1046 """retrieves a bundle from a repo
1047 1047
1048 1048 Every ID must be a full-length hex node id string. Saves the bundle to the
1049 1049 given file.
1050 1050 """
1051 1051 opts = pycompat.byteskwargs(opts)
1052 1052 repo = hg.peer(ui, opts, repopath)
1053 1053 if not repo.capable('getbundle'):
1054 1054 raise error.Abort("getbundle() not supported by target repository")
1055 1055 args = {}
1056 1056 if common:
1057 1057 args[r'common'] = [bin(s) for s in common]
1058 1058 if head:
1059 1059 args[r'heads'] = [bin(s) for s in head]
1060 1060 # TODO: get desired bundlecaps from command line.
1061 1061 args[r'bundlecaps'] = None
1062 1062 bundle = repo.getbundle('debug', **args)
1063 1063
1064 1064 bundletype = opts.get('type', 'bzip2').lower()
1065 1065 btypes = {'none': 'HG10UN',
1066 1066 'bzip2': 'HG10BZ',
1067 1067 'gzip': 'HG10GZ',
1068 1068 'bundle2': 'HG20'}
1069 1069 bundletype = btypes.get(bundletype)
1070 1070 if bundletype not in bundle2.bundletypes:
1071 1071 raise error.Abort(_('unknown bundle type specified with --type'))
1072 1072 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1073 1073
1074 1074 @command('debugignore', [], '[FILE]')
1075 1075 def debugignore(ui, repo, *files, **opts):
1076 1076 """display the combined ignore pattern and information about ignored files
1077 1077
1078 1078 With no argument display the combined ignore pattern.
1079 1079
1080 1080 Given space separated file names, shows if the given file is ignored and
1081 1081 if so, show the ignore rule (file and line number) that matched it.
1082 1082 """
1083 1083 ignore = repo.dirstate._ignore
1084 1084 if not files:
1085 1085 # Show all the patterns
1086 1086 ui.write("%s\n" % pycompat.byterepr(ignore))
1087 1087 else:
1088 1088 m = scmutil.match(repo[None], pats=files)
1089 1089 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1090 1090 for f in m.files():
1091 1091 nf = util.normpath(f)
1092 1092 ignored = None
1093 1093 ignoredata = None
1094 1094 if nf != '.':
1095 1095 if ignore(nf):
1096 1096 ignored = nf
1097 1097 ignoredata = repo.dirstate._ignorefileandline(nf)
1098 1098 else:
1099 1099 for p in util.finddirs(nf):
1100 1100 if ignore(p):
1101 1101 ignored = p
1102 1102 ignoredata = repo.dirstate._ignorefileandline(p)
1103 1103 break
1104 1104 if ignored:
1105 1105 if ignored == nf:
1106 1106 ui.write(_("%s is ignored\n") % uipathfn(f))
1107 1107 else:
1108 1108 ui.write(_("%s is ignored because of "
1109 1109 "containing folder %s\n")
1110 1110 % (uipathfn(f), ignored))
1111 1111 ignorefile, lineno, line = ignoredata
1112 1112 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1113 1113 % (ignorefile, lineno, line))
1114 1114 else:
1115 1115 ui.write(_("%s is not ignored\n") % uipathfn(f))
1116 1116
1117 1117 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1118 1118 _('-c|-m|FILE'))
1119 1119 def debugindex(ui, repo, file_=None, **opts):
1120 1120 """dump index data for a storage primitive"""
1121 1121 opts = pycompat.byteskwargs(opts)
1122 1122 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1123 1123
1124 1124 if ui.debugflag:
1125 1125 shortfn = hex
1126 1126 else:
1127 1127 shortfn = short
1128 1128
1129 1129 idlen = 12
1130 1130 for i in store:
1131 1131 idlen = len(shortfn(store.node(i)))
1132 1132 break
1133 1133
1134 1134 fm = ui.formatter('debugindex', opts)
1135 1135 fm.plain(b' rev linkrev %s %s p2\n' % (
1136 1136 b'nodeid'.ljust(idlen),
1137 1137 b'p1'.ljust(idlen)))
1138 1138
1139 1139 for rev in store:
1140 1140 node = store.node(rev)
1141 1141 parents = store.parents(node)
1142 1142
1143 1143 fm.startitem()
1144 1144 fm.write(b'rev', b'%6d ', rev)
1145 1145 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1146 1146 fm.write(b'node', '%s ', shortfn(node))
1147 1147 fm.write(b'p1', '%s ', shortfn(parents[0]))
1148 1148 fm.write(b'p2', '%s', shortfn(parents[1]))
1149 1149 fm.plain(b'\n')
1150 1150
1151 1151 fm.end()
1152 1152
1153 1153 @command('debugindexdot', cmdutil.debugrevlogopts,
1154 1154 _('-c|-m|FILE'), optionalrepo=True)
1155 1155 def debugindexdot(ui, repo, file_=None, **opts):
1156 1156 """dump an index DAG as a graphviz dot file"""
1157 1157 opts = pycompat.byteskwargs(opts)
1158 1158 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1159 1159 ui.write(("digraph G {\n"))
1160 1160 for i in r:
1161 1161 node = r.node(i)
1162 1162 pp = r.parents(node)
1163 1163 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1164 1164 if pp[1] != nullid:
1165 1165 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1166 1166 ui.write("}\n")
1167 1167
1168 1168 @command('debugindexstats', [])
1169 1169 def debugindexstats(ui, repo):
1170 1170 """show stats related to the changelog index"""
1171 1171 repo.changelog.shortest(nullid, 1)
1172 1172 index = repo.changelog.index
1173 1173 if not util.safehasattr(index, 'stats'):
1174 1174 raise error.Abort(_('debugindexstats only works with native code'))
1175 1175 for k, v in sorted(index.stats().items()):
1176 1176 ui.write('%s: %d\n' % (k, v))
1177 1177
1178 1178 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1179 1179 def debuginstall(ui, **opts):
1180 1180 '''test Mercurial installation
1181 1181
1182 1182 Returns 0 on success.
1183 1183 '''
1184 1184 opts = pycompat.byteskwargs(opts)
1185 1185
1186 1186 problems = 0
1187 1187
1188 1188 fm = ui.formatter('debuginstall', opts)
1189 1189 fm.startitem()
1190 1190
1191 1191 # encoding
1192 1192 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1193 1193 err = None
1194 1194 try:
1195 1195 codecs.lookup(pycompat.sysstr(encoding.encoding))
1196 1196 except LookupError as inst:
1197 1197 err = stringutil.forcebytestr(inst)
1198 1198 problems += 1
1199 1199 fm.condwrite(err, 'encodingerror', _(" %s\n"
1200 1200 " (check that your locale is properly set)\n"), err)
1201 1201
1202 1202 # Python
1203 1203 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1204 1204 pycompat.sysexecutable)
1205 1205 fm.write('pythonver', _("checking Python version (%s)\n"),
1206 1206 ("%d.%d.%d" % sys.version_info[:3]))
1207 1207 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1208 1208 os.path.dirname(pycompat.fsencode(os.__file__)))
1209 1209
1210 1210 security = set(sslutil.supportedprotocols)
1211 1211 if sslutil.hassni:
1212 1212 security.add('sni')
1213 1213
1214 1214 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1215 1215 fm.formatlist(sorted(security), name='protocol',
1216 1216 fmt='%s', sep=','))
1217 1217
1218 1218 # These are warnings, not errors. So don't increment problem count. This
1219 1219 # may change in the future.
1220 1220 if 'tls1.2' not in security:
1221 1221 fm.plain(_(' TLS 1.2 not supported by Python install; '
1222 1222 'network connections lack modern security\n'))
1223 1223 if 'sni' not in security:
1224 1224 fm.plain(_(' SNI not supported by Python install; may have '
1225 1225 'connectivity issues with some servers\n'))
1226 1226
1227 1227 # TODO print CA cert info
1228 1228
1229 1229 # hg version
1230 1230 hgver = util.version()
1231 1231 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1232 1232 hgver.split('+')[0])
1233 1233 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1234 1234 '+'.join(hgver.split('+')[1:]))
1235 1235
1236 1236 # compiled modules
1237 1237 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1238 1238 policy.policy)
1239 1239 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1240 1240 os.path.dirname(pycompat.fsencode(__file__)))
1241 1241
1242 1242 if policy.policy in ('c', 'allow'):
1243 1243 err = None
1244 1244 try:
1245 1245 from .cext import (
1246 1246 base85,
1247 1247 bdiff,
1248 1248 mpatch,
1249 1249 osutil,
1250 1250 )
1251 1251 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1252 1252 except Exception as inst:
1253 1253 err = stringutil.forcebytestr(inst)
1254 1254 problems += 1
1255 1255 fm.condwrite(err, 'extensionserror', " %s\n", err)
1256 1256
1257 1257 compengines = util.compengines._engines.values()
1258 1258 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1259 1259 fm.formatlist(sorted(e.name() for e in compengines),
1260 1260 name='compengine', fmt='%s', sep=', '))
1261 1261 fm.write('compenginesavail', _('checking available compression engines '
1262 1262 '(%s)\n'),
1263 1263 fm.formatlist(sorted(e.name() for e in compengines
1264 1264 if e.available()),
1265 1265 name='compengine', fmt='%s', sep=', '))
1266 1266 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1267 1267 fm.write('compenginesserver', _('checking available compression engines '
1268 1268 'for wire protocol (%s)\n'),
1269 1269 fm.formatlist([e.name() for e in wirecompengines
1270 1270 if e.wireprotosupport()],
1271 1271 name='compengine', fmt='%s', sep=', '))
1272 1272 re2 = 'missing'
1273 1273 if util._re2:
1274 1274 re2 = 'available'
1275 1275 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1276 1276 fm.data(re2=bool(util._re2))
1277 1277
1278 1278 # templates
1279 1279 p = templater.templatepaths()
1280 1280 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1281 1281 fm.condwrite(not p, '', _(" no template directories found\n"))
1282 1282 if p:
1283 1283 m = templater.templatepath("map-cmdline.default")
1284 1284 if m:
1285 1285 # template found, check if it is working
1286 1286 err = None
1287 1287 try:
1288 1288 templater.templater.frommapfile(m)
1289 1289 except Exception as inst:
1290 1290 err = stringutil.forcebytestr(inst)
1291 1291 p = None
1292 1292 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1293 1293 else:
1294 1294 p = None
1295 1295 fm.condwrite(p, 'defaulttemplate',
1296 1296 _("checking default template (%s)\n"), m)
1297 1297 fm.condwrite(not m, 'defaulttemplatenotfound',
1298 1298 _(" template '%s' not found\n"), "default")
1299 1299 if not p:
1300 1300 problems += 1
1301 1301 fm.condwrite(not p, '',
1302 1302 _(" (templates seem to have been installed incorrectly)\n"))
1303 1303
1304 1304 # editor
1305 1305 editor = ui.geteditor()
1306 1306 editor = util.expandpath(editor)
1307 1307 editorbin = procutil.shellsplit(editor)[0]
1308 1308 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1309 1309 cmdpath = procutil.findexe(editorbin)
1310 1310 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1311 1311 _(" No commit editor set and can't find %s in PATH\n"
1312 1312 " (specify a commit editor in your configuration"
1313 1313 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1314 1314 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1315 1315 _(" Can't find editor '%s' in PATH\n"
1316 1316 " (specify a commit editor in your configuration"
1317 1317 " file)\n"), not cmdpath and editorbin)
1318 1318 if not cmdpath and editor != 'vi':
1319 1319 problems += 1
1320 1320
1321 1321 # check username
1322 1322 username = None
1323 1323 err = None
1324 1324 try:
1325 1325 username = ui.username()
1326 1326 except error.Abort as e:
1327 1327 err = stringutil.forcebytestr(e)
1328 1328 problems += 1
1329 1329
1330 1330 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1331 1331 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1332 1332 " (specify a username in your configuration file)\n"), err)
1333 1333
1334 1334 fm.condwrite(not problems, '',
1335 1335 _("no problems detected\n"))
1336 1336 if not problems:
1337 1337 fm.data(problems=problems)
1338 1338 fm.condwrite(problems, 'problems',
1339 1339 _("%d problems detected,"
1340 1340 " please check your install!\n"), problems)
1341 1341 fm.end()
1342 1342
1343 1343 return problems
1344 1344
1345 1345 @command('debugknown', [], _('REPO ID...'), norepo=True)
1346 1346 def debugknown(ui, repopath, *ids, **opts):
1347 1347 """test whether node ids are known to a repo
1348 1348
1349 1349 Every ID must be a full-length hex node id string. Returns a list of 0s
1350 1350 and 1s indicating unknown/known.
1351 1351 """
1352 1352 opts = pycompat.byteskwargs(opts)
1353 1353 repo = hg.peer(ui, opts, repopath)
1354 1354 if not repo.capable('known'):
1355 1355 raise error.Abort("known() not supported by target repository")
1356 1356 flags = repo.known([bin(s) for s in ids])
1357 1357 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1358 1358
1359 1359 @command('debuglabelcomplete', [], _('LABEL...'))
1360 1360 def debuglabelcomplete(ui, repo, *args):
1361 1361 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1362 1362 debugnamecomplete(ui, repo, *args)
1363 1363
1364 1364 @command('debuglocks',
1365 1365 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1366 1366 ('W', 'force-wlock', None,
1367 1367 _('free the working state lock (DANGEROUS)')),
1368 1368 ('s', 'set-lock', None, _('set the store lock until stopped')),
1369 1369 ('S', 'set-wlock', None,
1370 1370 _('set the working state lock until stopped'))],
1371 1371 _('[OPTION]...'))
1372 1372 def debuglocks(ui, repo, **opts):
1373 1373 """show or modify state of locks
1374 1374
1375 1375 By default, this command will show which locks are held. This
1376 1376 includes the user and process holding the lock, the amount of time
1377 1377 the lock has been held, and the machine name where the process is
1378 1378 running if it's not local.
1379 1379
1380 1380 Locks protect the integrity of Mercurial's data, so should be
1381 1381 treated with care. System crashes or other interruptions may cause
1382 1382 locks to not be properly released, though Mercurial will usually
1383 1383 detect and remove such stale locks automatically.
1384 1384
1385 1385 However, detecting stale locks may not always be possible (for
1386 1386 instance, on a shared filesystem). Removing locks may also be
1387 1387 blocked by filesystem permissions.
1388 1388
1389 1389 Setting a lock will prevent other commands from changing the data.
1390 1390 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1391 1391 The set locks are removed when the command exits.
1392 1392
1393 1393 Returns 0 if no locks are held.
1394 1394
1395 1395 """
1396 1396
1397 1397 if opts.get(r'force_lock'):
1398 1398 repo.svfs.unlink('lock')
1399 1399 if opts.get(r'force_wlock'):
1400 1400 repo.vfs.unlink('wlock')
1401 1401 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1402 1402 return 0
1403 1403
1404 1404 locks = []
1405 1405 try:
1406 1406 if opts.get(r'set_wlock'):
1407 1407 try:
1408 1408 locks.append(repo.wlock(False))
1409 1409 except error.LockHeld:
1410 1410 raise error.Abort(_('wlock is already held'))
1411 1411 if opts.get(r'set_lock'):
1412 1412 try:
1413 1413 locks.append(repo.lock(False))
1414 1414 except error.LockHeld:
1415 1415 raise error.Abort(_('lock is already held'))
1416 1416 if len(locks):
1417 1417 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1418 1418 return 0
1419 1419 finally:
1420 1420 release(*locks)
1421 1421
1422 1422 now = time.time()
1423 1423 held = 0
1424 1424
1425 1425 def report(vfs, name, method):
1426 1426 # this causes stale locks to get reaped for more accurate reporting
1427 1427 try:
1428 1428 l = method(False)
1429 1429 except error.LockHeld:
1430 1430 l = None
1431 1431
1432 1432 if l:
1433 1433 l.release()
1434 1434 else:
1435 1435 try:
1436 1436 st = vfs.lstat(name)
1437 1437 age = now - st[stat.ST_MTIME]
1438 1438 user = util.username(st.st_uid)
1439 1439 locker = vfs.readlock(name)
1440 1440 if ":" in locker:
1441 1441 host, pid = locker.split(':')
1442 1442 if host == socket.gethostname():
1443 1443 locker = 'user %s, process %s' % (user or b'None', pid)
1444 1444 else:
1445 1445 locker = 'user %s, process %s, host %s' \
1446 1446 % (user or b'None', pid, host)
1447 1447 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1448 1448 return 1
1449 1449 except OSError as e:
1450 1450 if e.errno != errno.ENOENT:
1451 1451 raise
1452 1452
1453 1453 ui.write(("%-6s free\n") % (name + ":"))
1454 1454 return 0
1455 1455
1456 1456 held += report(repo.svfs, "lock", repo.lock)
1457 1457 held += report(repo.vfs, "wlock", repo.wlock)
1458 1458
1459 1459 return held
1460 1460
1461 1461 @command('debugmanifestfulltextcache', [
1462 1462 ('', 'clear', False, _('clear the cache')),
1463 1463 ('a', 'add', '', _('add the given manifest node to the cache'),
1464 1464 _('NODE'))
1465 1465 ], '')
1466 1466 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1467 1467 """show, clear or amend the contents of the manifest fulltext cache"""
1468 1468 with repo.lock():
1469 1469 r = repo.manifestlog.getstorage(b'')
1470 1470 try:
1471 1471 cache = r._fulltextcache
1472 1472 except AttributeError:
1473 1473 ui.warn(_(
1474 1474 "Current revlog implementation doesn't appear to have a "
1475 1475 'manifest fulltext cache\n'))
1476 1476 return
1477 1477
1478 1478 if opts.get(r'clear'):
1479 1479 cache.clear()
1480 1480
1481 1481 if add:
1482 1482 try:
1483 1483 manifest = repo.manifestlog[r.lookup(add)]
1484 1484 except error.LookupError as e:
1485 1485 raise error.Abort(e, hint="Check your manifest node id")
1486 1486 manifest.read() # stores revisision in cache too
1487 1487
1488 1488 if not len(cache):
1489 1489 ui.write(_('Cache empty'))
1490 1490 else:
1491 1491 ui.write(
1492 1492 _('Cache contains %d manifest entries, in order of most to '
1493 1493 'least recent:\n') % (len(cache),))
1494 1494 totalsize = 0
1495 1495 for nodeid in cache:
1496 1496 # Use cache.get to not update the LRU order
1497 1497 data = cache.get(nodeid)
1498 1498 size = len(data)
1499 1499 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1500 1500 ui.write(_('id: %s, size %s\n') % (
1501 1501 hex(nodeid), util.bytecount(size)))
1502 1502 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1503 1503 ui.write(
1504 1504 _('Total cache data size %s, on-disk %s\n') % (
1505 1505 util.bytecount(totalsize), util.bytecount(ondisk))
1506 1506 )
1507 1507
1508 1508 @command('debugmergestate', [], '')
1509 1509 def debugmergestate(ui, repo, *args):
1510 1510 """print merge state
1511 1511
1512 1512 Use --verbose to print out information about whether v1 or v2 merge state
1513 1513 was chosen."""
1514 1514 def _hashornull(h):
1515 1515 if h == nullhex:
1516 1516 return 'null'
1517 1517 else:
1518 1518 return h
1519 1519
1520 1520 def printrecords(version):
1521 1521 ui.write(('* version %d records\n') % version)
1522 1522 if version == 1:
1523 1523 records = v1records
1524 1524 else:
1525 1525 records = v2records
1526 1526
1527 1527 for rtype, record in records:
1528 1528 # pretty print some record types
1529 1529 if rtype == 'L':
1530 1530 ui.write(('local: %s\n') % record)
1531 1531 elif rtype == 'O':
1532 1532 ui.write(('other: %s\n') % record)
1533 1533 elif rtype == 'm':
1534 1534 driver, mdstate = record.split('\0', 1)
1535 1535 ui.write(('merge driver: %s (state "%s")\n')
1536 1536 % (driver, mdstate))
1537 1537 elif rtype in 'FDC':
1538 1538 r = record.split('\0')
1539 1539 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1540 1540 if version == 1:
1541 1541 onode = 'not stored in v1 format'
1542 1542 flags = r[7]
1543 1543 else:
1544 1544 onode, flags = r[7:9]
1545 1545 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1546 1546 % (f, rtype, state, _hashornull(hash)))
1547 1547 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1548 1548 ui.write((' ancestor path: %s (node %s)\n')
1549 1549 % (afile, _hashornull(anode)))
1550 1550 ui.write((' other path: %s (node %s)\n')
1551 1551 % (ofile, _hashornull(onode)))
1552 1552 elif rtype == 'f':
1553 1553 filename, rawextras = record.split('\0', 1)
1554 1554 extras = rawextras.split('\0')
1555 1555 i = 0
1556 1556 extrastrings = []
1557 1557 while i < len(extras):
1558 1558 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1559 1559 i += 2
1560 1560
1561 1561 ui.write(('file extras: %s (%s)\n')
1562 1562 % (filename, ', '.join(extrastrings)))
1563 1563 elif rtype == 'l':
1564 1564 labels = record.split('\0', 2)
1565 1565 labels = [l for l in labels if len(l) > 0]
1566 1566 ui.write(('labels:\n'))
1567 1567 ui.write((' local: %s\n' % labels[0]))
1568 1568 ui.write((' other: %s\n' % labels[1]))
1569 1569 if len(labels) > 2:
1570 1570 ui.write((' base: %s\n' % labels[2]))
1571 1571 else:
1572 1572 ui.write(('unrecognized entry: %s\t%s\n')
1573 1573 % (rtype, record.replace('\0', '\t')))
1574 1574
1575 1575 # Avoid mergestate.read() since it may raise an exception for unsupported
1576 1576 # merge state records. We shouldn't be doing this, but this is OK since this
1577 1577 # command is pretty low-level.
1578 1578 ms = mergemod.mergestate(repo)
1579 1579
1580 1580 # sort so that reasonable information is on top
1581 1581 v1records = ms._readrecordsv1()
1582 1582 v2records = ms._readrecordsv2()
1583 1583 order = 'LOml'
1584 1584 def key(r):
1585 1585 idx = order.find(r[0])
1586 1586 if idx == -1:
1587 1587 return (1, r[1])
1588 1588 else:
1589 1589 return (0, idx)
1590 1590 v1records.sort(key=key)
1591 1591 v2records.sort(key=key)
1592 1592
1593 1593 if not v1records and not v2records:
1594 1594 ui.write(('no merge state found\n'))
1595 1595 elif not v2records:
1596 1596 ui.note(('no version 2 merge state\n'))
1597 1597 printrecords(1)
1598 1598 elif ms._v1v2match(v1records, v2records):
1599 1599 ui.note(('v1 and v2 states match: using v2\n'))
1600 1600 printrecords(2)
1601 1601 else:
1602 1602 ui.note(('v1 and v2 states mismatch: using v1\n'))
1603 1603 printrecords(1)
1604 1604 if ui.verbose:
1605 1605 printrecords(2)
1606 1606
1607 1607 @command('debugnamecomplete', [], _('NAME...'))
1608 1608 def debugnamecomplete(ui, repo, *args):
1609 1609 '''complete "names" - tags, open branch names, bookmark names'''
1610 1610
1611 1611 names = set()
1612 1612 # since we previously only listed open branches, we will handle that
1613 1613 # specially (after this for loop)
1614 1614 for name, ns in repo.names.iteritems():
1615 1615 if name != 'branches':
1616 1616 names.update(ns.listnames(repo))
1617 1617 names.update(tag for (tag, heads, tip, closed)
1618 1618 in repo.branchmap().iterbranches() if not closed)
1619 1619 completions = set()
1620 1620 if not args:
1621 1621 args = ['']
1622 1622 for a in args:
1623 1623 completions.update(n for n in names if n.startswith(a))
1624 1624 ui.write('\n'.join(sorted(completions)))
1625 1625 ui.write('\n')
1626 1626
1627 1627 @command('debugobsolete',
1628 1628 [('', 'flags', 0, _('markers flag')),
1629 1629 ('', 'record-parents', False,
1630 1630 _('record parent information for the precursor')),
1631 1631 ('r', 'rev', [], _('display markers relevant to REV')),
1632 1632 ('', 'exclusive', False, _('restrict display to markers only '
1633 1633 'relevant to REV')),
1634 1634 ('', 'index', False, _('display index of the marker')),
1635 1635 ('', 'delete', [], _('delete markers specified by indices')),
1636 1636 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1637 1637 _('[OBSOLETED [REPLACEMENT ...]]'))
1638 1638 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1639 1639 """create arbitrary obsolete marker
1640 1640
1641 1641 With no arguments, displays the list of obsolescence markers."""
1642 1642
1643 1643 opts = pycompat.byteskwargs(opts)
1644 1644
1645 1645 def parsenodeid(s):
1646 1646 try:
1647 1647 # We do not use revsingle/revrange functions here to accept
1648 1648 # arbitrary node identifiers, possibly not present in the
1649 1649 # local repository.
1650 1650 n = bin(s)
1651 1651 if len(n) != len(nullid):
1652 1652 raise TypeError()
1653 1653 return n
1654 1654 except TypeError:
1655 1655 raise error.Abort('changeset references must be full hexadecimal '
1656 1656 'node identifiers')
1657 1657
1658 1658 if opts.get('delete'):
1659 1659 indices = []
1660 1660 for v in opts.get('delete'):
1661 1661 try:
1662 1662 indices.append(int(v))
1663 1663 except ValueError:
1664 1664 raise error.Abort(_('invalid index value: %r') % v,
1665 1665 hint=_('use integers for indices'))
1666 1666
1667 1667 if repo.currenttransaction():
1668 1668 raise error.Abort(_('cannot delete obsmarkers in the middle '
1669 1669 'of transaction.'))
1670 1670
1671 1671 with repo.lock():
1672 1672 n = repair.deleteobsmarkers(repo.obsstore, indices)
1673 1673 ui.write(_('deleted %i obsolescence markers\n') % n)
1674 1674
1675 1675 return
1676 1676
1677 1677 if precursor is not None:
1678 1678 if opts['rev']:
1679 1679 raise error.Abort('cannot select revision when creating marker')
1680 1680 metadata = {}
1681 1681 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1682 1682 succs = tuple(parsenodeid(succ) for succ in successors)
1683 1683 l = repo.lock()
1684 1684 try:
1685 1685 tr = repo.transaction('debugobsolete')
1686 1686 try:
1687 1687 date = opts.get('date')
1688 1688 if date:
1689 1689 date = dateutil.parsedate(date)
1690 1690 else:
1691 1691 date = None
1692 1692 prec = parsenodeid(precursor)
1693 1693 parents = None
1694 1694 if opts['record_parents']:
1695 1695 if prec not in repo.unfiltered():
1696 1696 raise error.Abort('cannot used --record-parents on '
1697 1697 'unknown changesets')
1698 1698 parents = repo.unfiltered()[prec].parents()
1699 1699 parents = tuple(p.node() for p in parents)
1700 1700 repo.obsstore.create(tr, prec, succs, opts['flags'],
1701 1701 parents=parents, date=date,
1702 1702 metadata=metadata, ui=ui)
1703 1703 tr.close()
1704 1704 except ValueError as exc:
1705 1705 raise error.Abort(_('bad obsmarker input: %s') %
1706 1706 pycompat.bytestr(exc))
1707 1707 finally:
1708 1708 tr.release()
1709 1709 finally:
1710 1710 l.release()
1711 1711 else:
1712 1712 if opts['rev']:
1713 1713 revs = scmutil.revrange(repo, opts['rev'])
1714 1714 nodes = [repo[r].node() for r in revs]
1715 1715 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1716 1716 exclusive=opts['exclusive']))
1717 1717 markers.sort(key=lambda x: x._data)
1718 1718 else:
1719 1719 markers = obsutil.getmarkers(repo)
1720 1720
1721 1721 markerstoiter = markers
1722 1722 isrelevant = lambda m: True
1723 1723 if opts.get('rev') and opts.get('index'):
1724 1724 markerstoiter = obsutil.getmarkers(repo)
1725 1725 markerset = set(markers)
1726 1726 isrelevant = lambda m: m in markerset
1727 1727
1728 1728 fm = ui.formatter('debugobsolete', opts)
1729 1729 for i, m in enumerate(markerstoiter):
1730 1730 if not isrelevant(m):
1731 1731 # marker can be irrelevant when we're iterating over a set
1732 1732 # of markers (markerstoiter) which is bigger than the set
1733 1733 # of markers we want to display (markers)
1734 1734 # this can happen if both --index and --rev options are
1735 1735 # provided and thus we need to iterate over all of the markers
1736 1736 # to get the correct indices, but only display the ones that
1737 1737 # are relevant to --rev value
1738 1738 continue
1739 1739 fm.startitem()
1740 1740 ind = i if opts.get('index') else None
1741 1741 cmdutil.showmarker(fm, m, index=ind)
1742 1742 fm.end()
1743 1743
1744 1744 @command('debugpathcomplete',
1745 1745 [('f', 'full', None, _('complete an entire path')),
1746 1746 ('n', 'normal', None, _('show only normal files')),
1747 1747 ('a', 'added', None, _('show only added files')),
1748 1748 ('r', 'removed', None, _('show only removed files'))],
1749 1749 _('FILESPEC...'))
1750 1750 def debugpathcomplete(ui, repo, *specs, **opts):
1751 1751 '''complete part or all of a tracked path
1752 1752
1753 1753 This command supports shells that offer path name completion. It
1754 1754 currently completes only files already known to the dirstate.
1755 1755
1756 1756 Completion extends only to the next path segment unless
1757 1757 --full is specified, in which case entire paths are used.'''
1758 1758
1759 1759 def complete(path, acceptable):
1760 1760 dirstate = repo.dirstate
1761 1761 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1762 1762 rootdir = repo.root + pycompat.ossep
1763 1763 if spec != repo.root and not spec.startswith(rootdir):
1764 1764 return [], []
1765 1765 if os.path.isdir(spec):
1766 1766 spec += '/'
1767 1767 spec = spec[len(rootdir):]
1768 1768 fixpaths = pycompat.ossep != '/'
1769 1769 if fixpaths:
1770 1770 spec = spec.replace(pycompat.ossep, '/')
1771 1771 speclen = len(spec)
1772 1772 fullpaths = opts[r'full']
1773 1773 files, dirs = set(), set()
1774 1774 adddir, addfile = dirs.add, files.add
1775 1775 for f, st in dirstate.iteritems():
1776 1776 if f.startswith(spec) and st[0] in acceptable:
1777 1777 if fixpaths:
1778 1778 f = f.replace('/', pycompat.ossep)
1779 1779 if fullpaths:
1780 1780 addfile(f)
1781 1781 continue
1782 1782 s = f.find(pycompat.ossep, speclen)
1783 1783 if s >= 0:
1784 1784 adddir(f[:s])
1785 1785 else:
1786 1786 addfile(f)
1787 1787 return files, dirs
1788 1788
1789 1789 acceptable = ''
1790 1790 if opts[r'normal']:
1791 1791 acceptable += 'nm'
1792 1792 if opts[r'added']:
1793 1793 acceptable += 'a'
1794 1794 if opts[r'removed']:
1795 1795 acceptable += 'r'
1796 1796 cwd = repo.getcwd()
1797 1797 if not specs:
1798 1798 specs = ['.']
1799 1799
1800 1800 files, dirs = set(), set()
1801 1801 for spec in specs:
1802 1802 f, d = complete(spec, acceptable or 'nmar')
1803 1803 files.update(f)
1804 1804 dirs.update(d)
1805 1805 files.update(dirs)
1806 1806 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1807 1807 ui.write('\n')
1808 1808
1809 1809 @command('debugpathcopies',
1810 1810 cmdutil.walkopts,
1811 1811 'hg debugpathcopies REV1 REV2 [FILE]',
1812 1812 inferrepo=True)
1813 1813 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1814 1814 """show copies between two revisions"""
1815 1815 ctx1 = scmutil.revsingle(repo, rev1)
1816 1816 ctx2 = scmutil.revsingle(repo, rev2)
1817 1817 m = scmutil.match(ctx1, pats, opts)
1818 for dst, src in copies.pathcopies(ctx1, ctx2, m).items():
1818 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1819 1819 ui.write('%s -> %s\n' % (src, dst))
1820 1820
1821 1821 @command('debugpeer', [], _('PATH'), norepo=True)
1822 1822 def debugpeer(ui, path):
1823 1823 """establish a connection to a peer repository"""
1824 1824 # Always enable peer request logging. Requires --debug to display
1825 1825 # though.
1826 1826 overrides = {
1827 1827 ('devel', 'debug.peer-request'): True,
1828 1828 }
1829 1829
1830 1830 with ui.configoverride(overrides):
1831 1831 peer = hg.peer(ui, {}, path)
1832 1832
1833 1833 local = peer.local() is not None
1834 1834 canpush = peer.canpush()
1835 1835
1836 1836 ui.write(_('url: %s\n') % peer.url())
1837 1837 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1838 1838 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1839 1839
1840 1840 @command('debugpickmergetool',
1841 1841 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1842 1842 ('', 'changedelete', None, _('emulate merging change and delete')),
1843 1843 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1844 1844 _('[PATTERN]...'),
1845 1845 inferrepo=True)
1846 1846 def debugpickmergetool(ui, repo, *pats, **opts):
1847 1847 """examine which merge tool is chosen for specified file
1848 1848
1849 1849 As described in :hg:`help merge-tools`, Mercurial examines
1850 1850 configurations below in this order to decide which merge tool is
1851 1851 chosen for specified file.
1852 1852
1853 1853 1. ``--tool`` option
1854 1854 2. ``HGMERGE`` environment variable
1855 1855 3. configurations in ``merge-patterns`` section
1856 1856 4. configuration of ``ui.merge``
1857 1857 5. configurations in ``merge-tools`` section
1858 1858 6. ``hgmerge`` tool (for historical reason only)
1859 1859 7. default tool for fallback (``:merge`` or ``:prompt``)
1860 1860
1861 1861 This command writes out examination result in the style below::
1862 1862
1863 1863 FILE = MERGETOOL
1864 1864
1865 1865 By default, all files known in the first parent context of the
1866 1866 working directory are examined. Use file patterns and/or -I/-X
1867 1867 options to limit target files. -r/--rev is also useful to examine
1868 1868 files in another context without actual updating to it.
1869 1869
1870 1870 With --debug, this command shows warning messages while matching
1871 1871 against ``merge-patterns`` and so on, too. It is recommended to
1872 1872 use this option with explicit file patterns and/or -I/-X options,
1873 1873 because this option increases amount of output per file according
1874 1874 to configurations in hgrc.
1875 1875
1876 1876 With -v/--verbose, this command shows configurations below at
1877 1877 first (only if specified).
1878 1878
1879 1879 - ``--tool`` option
1880 1880 - ``HGMERGE`` environment variable
1881 1881 - configuration of ``ui.merge``
1882 1882
1883 1883 If merge tool is chosen before matching against
1884 1884 ``merge-patterns``, this command can't show any helpful
1885 1885 information, even with --debug. In such case, information above is
1886 1886 useful to know why a merge tool is chosen.
1887 1887 """
1888 1888 opts = pycompat.byteskwargs(opts)
1889 1889 overrides = {}
1890 1890 if opts['tool']:
1891 1891 overrides[('ui', 'forcemerge')] = opts['tool']
1892 1892 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1893 1893
1894 1894 with ui.configoverride(overrides, 'debugmergepatterns'):
1895 1895 hgmerge = encoding.environ.get("HGMERGE")
1896 1896 if hgmerge is not None:
1897 1897 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1898 1898 uimerge = ui.config("ui", "merge")
1899 1899 if uimerge:
1900 1900 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1901 1901
1902 1902 ctx = scmutil.revsingle(repo, opts.get('rev'))
1903 1903 m = scmutil.match(ctx, pats, opts)
1904 1904 changedelete = opts['changedelete']
1905 1905 for path in ctx.walk(m):
1906 1906 fctx = ctx[path]
1907 1907 try:
1908 1908 if not ui.debugflag:
1909 1909 ui.pushbuffer(error=True)
1910 1910 tool, toolpath = filemerge._picktool(repo, ui, path,
1911 1911 fctx.isbinary(),
1912 1912 'l' in fctx.flags(),
1913 1913 changedelete)
1914 1914 finally:
1915 1915 if not ui.debugflag:
1916 1916 ui.popbuffer()
1917 1917 ui.write(('%s = %s\n') % (path, tool))
1918 1918
1919 1919 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1920 1920 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1921 1921 '''access the pushkey key/value protocol
1922 1922
1923 1923 With two args, list the keys in the given namespace.
1924 1924
1925 1925 With five args, set a key to new if it currently is set to old.
1926 1926 Reports success or failure.
1927 1927 '''
1928 1928
1929 1929 target = hg.peer(ui, {}, repopath)
1930 1930 if keyinfo:
1931 1931 key, old, new = keyinfo
1932 1932 with target.commandexecutor() as e:
1933 1933 r = e.callcommand('pushkey', {
1934 1934 'namespace': namespace,
1935 1935 'key': key,
1936 1936 'old': old,
1937 1937 'new': new,
1938 1938 }).result()
1939 1939
1940 1940 ui.status(pycompat.bytestr(r) + '\n')
1941 1941 return not r
1942 1942 else:
1943 1943 for k, v in sorted(target.listkeys(namespace).iteritems()):
1944 1944 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1945 1945 stringutil.escapestr(v)))
1946 1946
1947 1947 @command('debugpvec', [], _('A B'))
1948 1948 def debugpvec(ui, repo, a, b=None):
1949 1949 ca = scmutil.revsingle(repo, a)
1950 1950 cb = scmutil.revsingle(repo, b)
1951 1951 pa = pvec.ctxpvec(ca)
1952 1952 pb = pvec.ctxpvec(cb)
1953 1953 if pa == pb:
1954 1954 rel = "="
1955 1955 elif pa > pb:
1956 1956 rel = ">"
1957 1957 elif pa < pb:
1958 1958 rel = "<"
1959 1959 elif pa | pb:
1960 1960 rel = "|"
1961 1961 ui.write(_("a: %s\n") % pa)
1962 1962 ui.write(_("b: %s\n") % pb)
1963 1963 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1964 1964 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1965 1965 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1966 1966 pa.distance(pb), rel))
1967 1967
1968 1968 @command('debugrebuilddirstate|debugrebuildstate',
1969 1969 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1970 1970 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1971 1971 'the working copy parent')),
1972 1972 ],
1973 1973 _('[-r REV]'))
1974 1974 def debugrebuilddirstate(ui, repo, rev, **opts):
1975 1975 """rebuild the dirstate as it would look like for the given revision
1976 1976
1977 1977 If no revision is specified the first current parent will be used.
1978 1978
1979 1979 The dirstate will be set to the files of the given revision.
1980 1980 The actual working directory content or existing dirstate
1981 1981 information such as adds or removes is not considered.
1982 1982
1983 1983 ``minimal`` will only rebuild the dirstate status for files that claim to be
1984 1984 tracked but are not in the parent manifest, or that exist in the parent
1985 1985 manifest but are not in the dirstate. It will not change adds, removes, or
1986 1986 modified files that are in the working copy parent.
1987 1987
1988 1988 One use of this command is to make the next :hg:`status` invocation
1989 1989 check the actual file content.
1990 1990 """
1991 1991 ctx = scmutil.revsingle(repo, rev)
1992 1992 with repo.wlock():
1993 1993 dirstate = repo.dirstate
1994 1994 changedfiles = None
1995 1995 # See command doc for what minimal does.
1996 1996 if opts.get(r'minimal'):
1997 1997 manifestfiles = set(ctx.manifest().keys())
1998 1998 dirstatefiles = set(dirstate)
1999 1999 manifestonly = manifestfiles - dirstatefiles
2000 2000 dsonly = dirstatefiles - manifestfiles
2001 2001 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2002 2002 changedfiles = manifestonly | dsnotadded
2003 2003
2004 2004 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2005 2005
2006 2006 @command('debugrebuildfncache', [], '')
2007 2007 def debugrebuildfncache(ui, repo):
2008 2008 """rebuild the fncache file"""
2009 2009 repair.rebuildfncache(ui, repo)
2010 2010
2011 2011 @command('debugrename',
2012 2012 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2013 2013 _('[-r REV] [FILE]...'))
2014 2014 def debugrename(ui, repo, *pats, **opts):
2015 2015 """dump rename information"""
2016 2016
2017 2017 opts = pycompat.byteskwargs(opts)
2018 2018 ctx = scmutil.revsingle(repo, opts.get('rev'))
2019 2019 m = scmutil.match(ctx, pats, opts)
2020 2020 for abs in ctx.walk(m):
2021 2021 fctx = ctx[abs]
2022 2022 o = fctx.filelog().renamed(fctx.filenode())
2023 2023 rel = repo.pathto(abs)
2024 2024 if o:
2025 2025 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2026 2026 else:
2027 2027 ui.write(_("%s not renamed\n") % rel)
2028 2028
2029 2029 @command('debugrevlog', cmdutil.debugrevlogopts +
2030 2030 [('d', 'dump', False, _('dump index data'))],
2031 2031 _('-c|-m|FILE'),
2032 2032 optionalrepo=True)
2033 2033 def debugrevlog(ui, repo, file_=None, **opts):
2034 2034 """show data and statistics about a revlog"""
2035 2035 opts = pycompat.byteskwargs(opts)
2036 2036 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2037 2037
2038 2038 if opts.get("dump"):
2039 2039 numrevs = len(r)
2040 2040 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2041 2041 " rawsize totalsize compression heads chainlen\n"))
2042 2042 ts = 0
2043 2043 heads = set()
2044 2044
2045 2045 for rev in pycompat.xrange(numrevs):
2046 2046 dbase = r.deltaparent(rev)
2047 2047 if dbase == -1:
2048 2048 dbase = rev
2049 2049 cbase = r.chainbase(rev)
2050 2050 clen = r.chainlen(rev)
2051 2051 p1, p2 = r.parentrevs(rev)
2052 2052 rs = r.rawsize(rev)
2053 2053 ts = ts + rs
2054 2054 heads -= set(r.parentrevs(rev))
2055 2055 heads.add(rev)
2056 2056 try:
2057 2057 compression = ts / r.end(rev)
2058 2058 except ZeroDivisionError:
2059 2059 compression = 0
2060 2060 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2061 2061 "%11d %5d %8d\n" %
2062 2062 (rev, p1, p2, r.start(rev), r.end(rev),
2063 2063 r.start(dbase), r.start(cbase),
2064 2064 r.start(p1), r.start(p2),
2065 2065 rs, ts, compression, len(heads), clen))
2066 2066 return 0
2067 2067
2068 2068 v = r.version
2069 2069 format = v & 0xFFFF
2070 2070 flags = []
2071 2071 gdelta = False
2072 2072 if v & revlog.FLAG_INLINE_DATA:
2073 2073 flags.append('inline')
2074 2074 if v & revlog.FLAG_GENERALDELTA:
2075 2075 gdelta = True
2076 2076 flags.append('generaldelta')
2077 2077 if not flags:
2078 2078 flags = ['(none)']
2079 2079
2080 2080 ### tracks merge vs single parent
2081 2081 nummerges = 0
2082 2082
2083 2083 ### tracks ways the "delta" are build
2084 2084 # nodelta
2085 2085 numempty = 0
2086 2086 numemptytext = 0
2087 2087 numemptydelta = 0
2088 2088 # full file content
2089 2089 numfull = 0
2090 2090 # intermediate snapshot against a prior snapshot
2091 2091 numsemi = 0
2092 2092 # snapshot count per depth
2093 2093 numsnapdepth = collections.defaultdict(lambda: 0)
2094 2094 # delta against previous revision
2095 2095 numprev = 0
2096 2096 # delta against first or second parent (not prev)
2097 2097 nump1 = 0
2098 2098 nump2 = 0
2099 2099 # delta against neither prev nor parents
2100 2100 numother = 0
2101 2101 # delta against prev that are also first or second parent
2102 2102 # (details of `numprev`)
2103 2103 nump1prev = 0
2104 2104 nump2prev = 0
2105 2105
2106 2106 # data about delta chain of each revs
2107 2107 chainlengths = []
2108 2108 chainbases = []
2109 2109 chainspans = []
2110 2110
2111 2111 # data about each revision
2112 2112 datasize = [None, 0, 0]
2113 2113 fullsize = [None, 0, 0]
2114 2114 semisize = [None, 0, 0]
2115 2115 # snapshot count per depth
2116 2116 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2117 2117 deltasize = [None, 0, 0]
2118 2118 chunktypecounts = {}
2119 2119 chunktypesizes = {}
2120 2120
2121 2121 def addsize(size, l):
2122 2122 if l[0] is None or size < l[0]:
2123 2123 l[0] = size
2124 2124 if size > l[1]:
2125 2125 l[1] = size
2126 2126 l[2] += size
2127 2127
2128 2128 numrevs = len(r)
2129 2129 for rev in pycompat.xrange(numrevs):
2130 2130 p1, p2 = r.parentrevs(rev)
2131 2131 delta = r.deltaparent(rev)
2132 2132 if format > 0:
2133 2133 addsize(r.rawsize(rev), datasize)
2134 2134 if p2 != nullrev:
2135 2135 nummerges += 1
2136 2136 size = r.length(rev)
2137 2137 if delta == nullrev:
2138 2138 chainlengths.append(0)
2139 2139 chainbases.append(r.start(rev))
2140 2140 chainspans.append(size)
2141 2141 if size == 0:
2142 2142 numempty += 1
2143 2143 numemptytext += 1
2144 2144 else:
2145 2145 numfull += 1
2146 2146 numsnapdepth[0] += 1
2147 2147 addsize(size, fullsize)
2148 2148 addsize(size, snapsizedepth[0])
2149 2149 else:
2150 2150 chainlengths.append(chainlengths[delta] + 1)
2151 2151 baseaddr = chainbases[delta]
2152 2152 revaddr = r.start(rev)
2153 2153 chainbases.append(baseaddr)
2154 2154 chainspans.append((revaddr - baseaddr) + size)
2155 2155 if size == 0:
2156 2156 numempty += 1
2157 2157 numemptydelta += 1
2158 2158 elif r.issnapshot(rev):
2159 2159 addsize(size, semisize)
2160 2160 numsemi += 1
2161 2161 depth = r.snapshotdepth(rev)
2162 2162 numsnapdepth[depth] += 1
2163 2163 addsize(size, snapsizedepth[depth])
2164 2164 else:
2165 2165 addsize(size, deltasize)
2166 2166 if delta == rev - 1:
2167 2167 numprev += 1
2168 2168 if delta == p1:
2169 2169 nump1prev += 1
2170 2170 elif delta == p2:
2171 2171 nump2prev += 1
2172 2172 elif delta == p1:
2173 2173 nump1 += 1
2174 2174 elif delta == p2:
2175 2175 nump2 += 1
2176 2176 elif delta != nullrev:
2177 2177 numother += 1
2178 2178
2179 2179 # Obtain data on the raw chunks in the revlog.
2180 2180 if util.safehasattr(r, '_getsegmentforrevs'):
2181 2181 segment = r._getsegmentforrevs(rev, rev)[1]
2182 2182 else:
2183 2183 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2184 2184 if segment:
2185 2185 chunktype = bytes(segment[0:1])
2186 2186 else:
2187 2187 chunktype = 'empty'
2188 2188
2189 2189 if chunktype not in chunktypecounts:
2190 2190 chunktypecounts[chunktype] = 0
2191 2191 chunktypesizes[chunktype] = 0
2192 2192
2193 2193 chunktypecounts[chunktype] += 1
2194 2194 chunktypesizes[chunktype] += size
2195 2195
2196 2196 # Adjust size min value for empty cases
2197 2197 for size in (datasize, fullsize, semisize, deltasize):
2198 2198 if size[0] is None:
2199 2199 size[0] = 0
2200 2200
2201 2201 numdeltas = numrevs - numfull - numempty - numsemi
2202 2202 numoprev = numprev - nump1prev - nump2prev
2203 2203 totalrawsize = datasize[2]
2204 2204 datasize[2] /= numrevs
2205 2205 fulltotal = fullsize[2]
2206 2206 fullsize[2] /= numfull
2207 2207 semitotal = semisize[2]
2208 2208 snaptotal = {}
2209 2209 if numsemi > 0:
2210 2210 semisize[2] /= numsemi
2211 2211 for depth in snapsizedepth:
2212 2212 snaptotal[depth] = snapsizedepth[depth][2]
2213 2213 snapsizedepth[depth][2] /= numsnapdepth[depth]
2214 2214
2215 2215 deltatotal = deltasize[2]
2216 2216 if numdeltas > 0:
2217 2217 deltasize[2] /= numdeltas
2218 2218 totalsize = fulltotal + semitotal + deltatotal
2219 2219 avgchainlen = sum(chainlengths) / numrevs
2220 2220 maxchainlen = max(chainlengths)
2221 2221 maxchainspan = max(chainspans)
2222 2222 compratio = 1
2223 2223 if totalsize:
2224 2224 compratio = totalrawsize / totalsize
2225 2225
2226 2226 basedfmtstr = '%%%dd\n'
2227 2227 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2228 2228
2229 2229 def dfmtstr(max):
2230 2230 return basedfmtstr % len(str(max))
2231 2231 def pcfmtstr(max, padding=0):
2232 2232 return basepcfmtstr % (len(str(max)), ' ' * padding)
2233 2233
2234 2234 def pcfmt(value, total):
2235 2235 if total:
2236 2236 return (value, 100 * float(value) / total)
2237 2237 else:
2238 2238 return value, 100.0
2239 2239
2240 2240 ui.write(('format : %d\n') % format)
2241 2241 ui.write(('flags : %s\n') % ', '.join(flags))
2242 2242
2243 2243 ui.write('\n')
2244 2244 fmt = pcfmtstr(totalsize)
2245 2245 fmt2 = dfmtstr(totalsize)
2246 2246 ui.write(('revisions : ') + fmt2 % numrevs)
2247 2247 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2248 2248 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2249 2249 ui.write(('revisions : ') + fmt2 % numrevs)
2250 2250 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2251 2251 ui.write((' text : ')
2252 2252 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2253 2253 ui.write((' delta : ')
2254 2254 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2255 2255 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2256 2256 for depth in sorted(numsnapdepth):
2257 2257 ui.write((' lvl-%-3d : ' % depth)
2258 2258 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2259 2259 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2260 2260 ui.write(('revision size : ') + fmt2 % totalsize)
2261 2261 ui.write((' snapshot : ')
2262 2262 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2263 2263 for depth in sorted(numsnapdepth):
2264 2264 ui.write((' lvl-%-3d : ' % depth)
2265 2265 + fmt % pcfmt(snaptotal[depth], totalsize))
2266 2266 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2267 2267
2268 2268 def fmtchunktype(chunktype):
2269 2269 if chunktype == 'empty':
2270 2270 return ' %s : ' % chunktype
2271 2271 elif chunktype in pycompat.bytestr(string.ascii_letters):
2272 2272 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2273 2273 else:
2274 2274 return ' 0x%s : ' % hex(chunktype)
2275 2275
2276 2276 ui.write('\n')
2277 2277 ui.write(('chunks : ') + fmt2 % numrevs)
2278 2278 for chunktype in sorted(chunktypecounts):
2279 2279 ui.write(fmtchunktype(chunktype))
2280 2280 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2281 2281 ui.write(('chunks size : ') + fmt2 % totalsize)
2282 2282 for chunktype in sorted(chunktypecounts):
2283 2283 ui.write(fmtchunktype(chunktype))
2284 2284 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2285 2285
2286 2286 ui.write('\n')
2287 2287 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2288 2288 ui.write(('avg chain length : ') + fmt % avgchainlen)
2289 2289 ui.write(('max chain length : ') + fmt % maxchainlen)
2290 2290 ui.write(('max chain reach : ') + fmt % maxchainspan)
2291 2291 ui.write(('compression ratio : ') + fmt % compratio)
2292 2292
2293 2293 if format > 0:
2294 2294 ui.write('\n')
2295 2295 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2296 2296 % tuple(datasize))
2297 2297 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2298 2298 % tuple(fullsize))
2299 2299 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2300 2300 % tuple(semisize))
2301 2301 for depth in sorted(snapsizedepth):
2302 2302 if depth == 0:
2303 2303 continue
2304 2304 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2305 2305 % ((depth,) + tuple(snapsizedepth[depth])))
2306 2306 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2307 2307 % tuple(deltasize))
2308 2308
2309 2309 if numdeltas > 0:
2310 2310 ui.write('\n')
2311 2311 fmt = pcfmtstr(numdeltas)
2312 2312 fmt2 = pcfmtstr(numdeltas, 4)
2313 2313 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2314 2314 if numprev > 0:
2315 2315 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2316 2316 numprev))
2317 2317 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2318 2318 numprev))
2319 2319 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2320 2320 numprev))
2321 2321 if gdelta:
2322 2322 ui.write(('deltas against p1 : ')
2323 2323 + fmt % pcfmt(nump1, numdeltas))
2324 2324 ui.write(('deltas against p2 : ')
2325 2325 + fmt % pcfmt(nump2, numdeltas))
2326 2326 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2327 2327 numdeltas))
2328 2328
2329 2329 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2330 2330 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2331 2331 _('[-f FORMAT] -c|-m|FILE'),
2332 2332 optionalrepo=True)
2333 2333 def debugrevlogindex(ui, repo, file_=None, **opts):
2334 2334 """dump the contents of a revlog index"""
2335 2335 opts = pycompat.byteskwargs(opts)
2336 2336 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2337 2337 format = opts.get('format', 0)
2338 2338 if format not in (0, 1):
2339 2339 raise error.Abort(_("unknown format %d") % format)
2340 2340
2341 2341 if ui.debugflag:
2342 2342 shortfn = hex
2343 2343 else:
2344 2344 shortfn = short
2345 2345
2346 2346 # There might not be anything in r, so have a sane default
2347 2347 idlen = 12
2348 2348 for i in r:
2349 2349 idlen = len(shortfn(r.node(i)))
2350 2350 break
2351 2351
2352 2352 if format == 0:
2353 2353 if ui.verbose:
2354 2354 ui.write((" rev offset length linkrev"
2355 2355 " %s %s p2\n") % ("nodeid".ljust(idlen),
2356 2356 "p1".ljust(idlen)))
2357 2357 else:
2358 2358 ui.write((" rev linkrev %s %s p2\n") % (
2359 2359 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2360 2360 elif format == 1:
2361 2361 if ui.verbose:
2362 2362 ui.write((" rev flag offset length size link p1"
2363 2363 " p2 %s\n") % "nodeid".rjust(idlen))
2364 2364 else:
2365 2365 ui.write((" rev flag size link p1 p2 %s\n") %
2366 2366 "nodeid".rjust(idlen))
2367 2367
2368 2368 for i in r:
2369 2369 node = r.node(i)
2370 2370 if format == 0:
2371 2371 try:
2372 2372 pp = r.parents(node)
2373 2373 except Exception:
2374 2374 pp = [nullid, nullid]
2375 2375 if ui.verbose:
2376 2376 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2377 2377 i, r.start(i), r.length(i), r.linkrev(i),
2378 2378 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2379 2379 else:
2380 2380 ui.write("% 6d % 7d %s %s %s\n" % (
2381 2381 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2382 2382 shortfn(pp[1])))
2383 2383 elif format == 1:
2384 2384 pr = r.parentrevs(i)
2385 2385 if ui.verbose:
2386 2386 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2387 2387 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2388 2388 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2389 2389 else:
2390 2390 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2391 2391 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2392 2392 shortfn(node)))
2393 2393
2394 2394 @command('debugrevspec',
2395 2395 [('', 'optimize', None,
2396 2396 _('print parsed tree after optimizing (DEPRECATED)')),
2397 2397 ('', 'show-revs', True, _('print list of result revisions (default)')),
2398 2398 ('s', 'show-set', None, _('print internal representation of result set')),
2399 2399 ('p', 'show-stage', [],
2400 2400 _('print parsed tree at the given stage'), _('NAME')),
2401 2401 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2402 2402 ('', 'verify-optimized', False, _('verify optimized result')),
2403 2403 ],
2404 2404 ('REVSPEC'))
2405 2405 def debugrevspec(ui, repo, expr, **opts):
2406 2406 """parse and apply a revision specification
2407 2407
2408 2408 Use -p/--show-stage option to print the parsed tree at the given stages.
2409 2409 Use -p all to print tree at every stage.
2410 2410
2411 2411 Use --no-show-revs option with -s or -p to print only the set
2412 2412 representation or the parsed tree respectively.
2413 2413
2414 2414 Use --verify-optimized to compare the optimized result with the unoptimized
2415 2415 one. Returns 1 if the optimized result differs.
2416 2416 """
2417 2417 opts = pycompat.byteskwargs(opts)
2418 2418 aliases = ui.configitems('revsetalias')
2419 2419 stages = [
2420 2420 ('parsed', lambda tree: tree),
2421 2421 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2422 2422 ui.warn)),
2423 2423 ('concatenated', revsetlang.foldconcat),
2424 2424 ('analyzed', revsetlang.analyze),
2425 2425 ('optimized', revsetlang.optimize),
2426 2426 ]
2427 2427 if opts['no_optimized']:
2428 2428 stages = stages[:-1]
2429 2429 if opts['verify_optimized'] and opts['no_optimized']:
2430 2430 raise error.Abort(_('cannot use --verify-optimized with '
2431 2431 '--no-optimized'))
2432 2432 stagenames = set(n for n, f in stages)
2433 2433
2434 2434 showalways = set()
2435 2435 showchanged = set()
2436 2436 if ui.verbose and not opts['show_stage']:
2437 2437 # show parsed tree by --verbose (deprecated)
2438 2438 showalways.add('parsed')
2439 2439 showchanged.update(['expanded', 'concatenated'])
2440 2440 if opts['optimize']:
2441 2441 showalways.add('optimized')
2442 2442 if opts['show_stage'] and opts['optimize']:
2443 2443 raise error.Abort(_('cannot use --optimize with --show-stage'))
2444 2444 if opts['show_stage'] == ['all']:
2445 2445 showalways.update(stagenames)
2446 2446 else:
2447 2447 for n in opts['show_stage']:
2448 2448 if n not in stagenames:
2449 2449 raise error.Abort(_('invalid stage name: %s') % n)
2450 2450 showalways.update(opts['show_stage'])
2451 2451
2452 2452 treebystage = {}
2453 2453 printedtree = None
2454 2454 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2455 2455 for n, f in stages:
2456 2456 treebystage[n] = tree = f(tree)
2457 2457 if n in showalways or (n in showchanged and tree != printedtree):
2458 2458 if opts['show_stage'] or n != 'parsed':
2459 2459 ui.write(("* %s:\n") % n)
2460 2460 ui.write(revsetlang.prettyformat(tree), "\n")
2461 2461 printedtree = tree
2462 2462
2463 2463 if opts['verify_optimized']:
2464 2464 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2465 2465 brevs = revset.makematcher(treebystage['optimized'])(repo)
2466 2466 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2467 2467 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2468 2468 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2469 2469 arevs = list(arevs)
2470 2470 brevs = list(brevs)
2471 2471 if arevs == brevs:
2472 2472 return 0
2473 2473 ui.write(('--- analyzed\n'), label='diff.file_a')
2474 2474 ui.write(('+++ optimized\n'), label='diff.file_b')
2475 2475 sm = difflib.SequenceMatcher(None, arevs, brevs)
2476 2476 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2477 2477 if tag in (r'delete', r'replace'):
2478 2478 for c in arevs[alo:ahi]:
2479 2479 ui.write('-%d\n' % c, label='diff.deleted')
2480 2480 if tag in (r'insert', r'replace'):
2481 2481 for c in brevs[blo:bhi]:
2482 2482 ui.write('+%d\n' % c, label='diff.inserted')
2483 2483 if tag == r'equal':
2484 2484 for c in arevs[alo:ahi]:
2485 2485 ui.write(' %d\n' % c)
2486 2486 return 1
2487 2487
2488 2488 func = revset.makematcher(tree)
2489 2489 revs = func(repo)
2490 2490 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2491 2491 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2492 2492 if not opts['show_revs']:
2493 2493 return
2494 2494 for c in revs:
2495 2495 ui.write("%d\n" % c)
2496 2496
2497 2497 @command('debugserve', [
2498 2498 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2499 2499 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2500 2500 ('', 'logiofile', '', _('file to log server I/O to')),
2501 2501 ], '')
2502 2502 def debugserve(ui, repo, **opts):
2503 2503 """run a server with advanced settings
2504 2504
2505 2505 This command is similar to :hg:`serve`. It exists partially as a
2506 2506 workaround to the fact that ``hg serve --stdio`` must have specific
2507 2507 arguments for security reasons.
2508 2508 """
2509 2509 opts = pycompat.byteskwargs(opts)
2510 2510
2511 2511 if not opts['sshstdio']:
2512 2512 raise error.Abort(_('only --sshstdio is currently supported'))
2513 2513
2514 2514 logfh = None
2515 2515
2516 2516 if opts['logiofd'] and opts['logiofile']:
2517 2517 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2518 2518
2519 2519 if opts['logiofd']:
2520 2520 # Line buffered because output is line based.
2521 2521 try:
2522 2522 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2523 2523 except OSError as e:
2524 2524 if e.errno != errno.ESPIPE:
2525 2525 raise
2526 2526 # can't seek a pipe, so `ab` mode fails on py3
2527 2527 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2528 2528 elif opts['logiofile']:
2529 2529 logfh = open(opts['logiofile'], 'ab', 1)
2530 2530
2531 2531 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2532 2532 s.serve_forever()
2533 2533
2534 2534 @command('debugsetparents', [], _('REV1 [REV2]'))
2535 2535 def debugsetparents(ui, repo, rev1, rev2=None):
2536 2536 """manually set the parents of the current working directory
2537 2537
2538 2538 This is useful for writing repository conversion tools, but should
2539 2539 be used with care. For example, neither the working directory nor the
2540 2540 dirstate is updated, so file status may be incorrect after running this
2541 2541 command.
2542 2542
2543 2543 Returns 0 on success.
2544 2544 """
2545 2545
2546 2546 node1 = scmutil.revsingle(repo, rev1).node()
2547 2547 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2548 2548
2549 2549 with repo.wlock():
2550 2550 repo.setparents(node1, node2)
2551 2551
2552 2552 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2553 2553 def debugssl(ui, repo, source=None, **opts):
2554 2554 '''test a secure connection to a server
2555 2555
2556 2556 This builds the certificate chain for the server on Windows, installing the
2557 2557 missing intermediates and trusted root via Windows Update if necessary. It
2558 2558 does nothing on other platforms.
2559 2559
2560 2560 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2561 2561 that server is used. See :hg:`help urls` for more information.
2562 2562
2563 2563 If the update succeeds, retry the original operation. Otherwise, the cause
2564 2564 of the SSL error is likely another issue.
2565 2565 '''
2566 2566 if not pycompat.iswindows:
2567 2567 raise error.Abort(_('certificate chain building is only possible on '
2568 2568 'Windows'))
2569 2569
2570 2570 if not source:
2571 2571 if not repo:
2572 2572 raise error.Abort(_("there is no Mercurial repository here, and no "
2573 2573 "server specified"))
2574 2574 source = "default"
2575 2575
2576 2576 source, branches = hg.parseurl(ui.expandpath(source))
2577 2577 url = util.url(source)
2578 2578
2579 2579 defaultport = {'https': 443, 'ssh': 22}
2580 2580 if url.scheme in defaultport:
2581 2581 try:
2582 2582 addr = (url.host, int(url.port or defaultport[url.scheme]))
2583 2583 except ValueError:
2584 2584 raise error.Abort(_("malformed port number in URL"))
2585 2585 else:
2586 2586 raise error.Abort(_("only https and ssh connections are supported"))
2587 2587
2588 2588 from . import win32
2589 2589
2590 2590 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2591 2591 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2592 2592
2593 2593 try:
2594 2594 s.connect(addr)
2595 2595 cert = s.getpeercert(True)
2596 2596
2597 2597 ui.status(_('checking the certificate chain for %s\n') % url.host)
2598 2598
2599 2599 complete = win32.checkcertificatechain(cert, build=False)
2600 2600
2601 2601 if not complete:
2602 2602 ui.status(_('certificate chain is incomplete, updating... '))
2603 2603
2604 2604 if not win32.checkcertificatechain(cert):
2605 2605 ui.status(_('failed.\n'))
2606 2606 else:
2607 2607 ui.status(_('done.\n'))
2608 2608 else:
2609 2609 ui.status(_('full certificate chain is available\n'))
2610 2610 finally:
2611 2611 s.close()
2612 2612
2613 2613 @command('debugsub',
2614 2614 [('r', 'rev', '',
2615 2615 _('revision to check'), _('REV'))],
2616 2616 _('[-r REV] [REV]'))
2617 2617 def debugsub(ui, repo, rev=None):
2618 2618 ctx = scmutil.revsingle(repo, rev, None)
2619 2619 for k, v in sorted(ctx.substate.items()):
2620 2620 ui.write(('path %s\n') % k)
2621 2621 ui.write((' source %s\n') % v[0])
2622 2622 ui.write((' revision %s\n') % v[1])
2623 2623
2624 2624 @command('debugsuccessorssets',
2625 2625 [('', 'closest', False, _('return closest successors sets only'))],
2626 2626 _('[REV]'))
2627 2627 def debugsuccessorssets(ui, repo, *revs, **opts):
2628 2628 """show set of successors for revision
2629 2629
2630 2630 A successors set of changeset A is a consistent group of revisions that
2631 2631 succeed A. It contains non-obsolete changesets only unless closests
2632 2632 successors set is set.
2633 2633
2634 2634 In most cases a changeset A has a single successors set containing a single
2635 2635 successor (changeset A replaced by A').
2636 2636
2637 2637 A changeset that is made obsolete with no successors are called "pruned".
2638 2638 Such changesets have no successors sets at all.
2639 2639
2640 2640 A changeset that has been "split" will have a successors set containing
2641 2641 more than one successor.
2642 2642
2643 2643 A changeset that has been rewritten in multiple different ways is called
2644 2644 "divergent". Such changesets have multiple successor sets (each of which
2645 2645 may also be split, i.e. have multiple successors).
2646 2646
2647 2647 Results are displayed as follows::
2648 2648
2649 2649 <rev1>
2650 2650 <successors-1A>
2651 2651 <rev2>
2652 2652 <successors-2A>
2653 2653 <successors-2B1> <successors-2B2> <successors-2B3>
2654 2654
2655 2655 Here rev2 has two possible (i.e. divergent) successors sets. The first
2656 2656 holds one element, whereas the second holds three (i.e. the changeset has
2657 2657 been split).
2658 2658 """
2659 2659 # passed to successorssets caching computation from one call to another
2660 2660 cache = {}
2661 2661 ctx2str = bytes
2662 2662 node2str = short
2663 2663 for rev in scmutil.revrange(repo, revs):
2664 2664 ctx = repo[rev]
2665 2665 ui.write('%s\n'% ctx2str(ctx))
2666 2666 for succsset in obsutil.successorssets(repo, ctx.node(),
2667 2667 closest=opts[r'closest'],
2668 2668 cache=cache):
2669 2669 if succsset:
2670 2670 ui.write(' ')
2671 2671 ui.write(node2str(succsset[0]))
2672 2672 for node in succsset[1:]:
2673 2673 ui.write(' ')
2674 2674 ui.write(node2str(node))
2675 2675 ui.write('\n')
2676 2676
2677 2677 @command('debugtemplate',
2678 2678 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2679 2679 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2680 2680 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2681 2681 optionalrepo=True)
2682 2682 def debugtemplate(ui, repo, tmpl, **opts):
2683 2683 """parse and apply a template
2684 2684
2685 2685 If -r/--rev is given, the template is processed as a log template and
2686 2686 applied to the given changesets. Otherwise, it is processed as a generic
2687 2687 template.
2688 2688
2689 2689 Use --verbose to print the parsed tree.
2690 2690 """
2691 2691 revs = None
2692 2692 if opts[r'rev']:
2693 2693 if repo is None:
2694 2694 raise error.RepoError(_('there is no Mercurial repository here '
2695 2695 '(.hg not found)'))
2696 2696 revs = scmutil.revrange(repo, opts[r'rev'])
2697 2697
2698 2698 props = {}
2699 2699 for d in opts[r'define']:
2700 2700 try:
2701 2701 k, v = (e.strip() for e in d.split('=', 1))
2702 2702 if not k or k == 'ui':
2703 2703 raise ValueError
2704 2704 props[k] = v
2705 2705 except ValueError:
2706 2706 raise error.Abort(_('malformed keyword definition: %s') % d)
2707 2707
2708 2708 if ui.verbose:
2709 2709 aliases = ui.configitems('templatealias')
2710 2710 tree = templater.parse(tmpl)
2711 2711 ui.note(templater.prettyformat(tree), '\n')
2712 2712 newtree = templater.expandaliases(tree, aliases)
2713 2713 if newtree != tree:
2714 2714 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2715 2715
2716 2716 if revs is None:
2717 2717 tres = formatter.templateresources(ui, repo)
2718 2718 t = formatter.maketemplater(ui, tmpl, resources=tres)
2719 2719 if ui.verbose:
2720 2720 kwds, funcs = t.symbolsuseddefault()
2721 2721 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2722 2722 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2723 2723 ui.write(t.renderdefault(props))
2724 2724 else:
2725 2725 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2726 2726 if ui.verbose:
2727 2727 kwds, funcs = displayer.t.symbolsuseddefault()
2728 2728 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2729 2729 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2730 2730 for r in revs:
2731 2731 displayer.show(repo[r], **pycompat.strkwargs(props))
2732 2732 displayer.close()
2733 2733
2734 2734 @command('debuguigetpass', [
2735 2735 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2736 2736 ], _('[-p TEXT]'), norepo=True)
2737 2737 def debuguigetpass(ui, prompt=''):
2738 2738 """show prompt to type password"""
2739 2739 r = ui.getpass(prompt)
2740 2740 ui.write(('respose: %s\n') % r)
2741 2741
2742 2742 @command('debuguiprompt', [
2743 2743 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2744 2744 ], _('[-p TEXT]'), norepo=True)
2745 2745 def debuguiprompt(ui, prompt=''):
2746 2746 """show plain prompt"""
2747 2747 r = ui.prompt(prompt)
2748 2748 ui.write(('response: %s\n') % r)
2749 2749
2750 2750 @command('debugupdatecaches', [])
2751 2751 def debugupdatecaches(ui, repo, *pats, **opts):
2752 2752 """warm all known caches in the repository"""
2753 2753 with repo.wlock(), repo.lock():
2754 2754 repo.updatecaches(full=True)
2755 2755
2756 2756 @command('debugupgraderepo', [
2757 2757 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2758 2758 ('', 'run', False, _('performs an upgrade')),
2759 2759 ('', 'backup', True, _('keep the old repository content around')),
2760 2760 ])
2761 2761 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2762 2762 """upgrade a repository to use different features
2763 2763
2764 2764 If no arguments are specified, the repository is evaluated for upgrade
2765 2765 and a list of problems and potential optimizations is printed.
2766 2766
2767 2767 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2768 2768 can be influenced via additional arguments. More details will be provided
2769 2769 by the command output when run without ``--run``.
2770 2770
2771 2771 During the upgrade, the repository will be locked and no writes will be
2772 2772 allowed.
2773 2773
2774 2774 At the end of the upgrade, the repository may not be readable while new
2775 2775 repository data is swapped in. This window will be as long as it takes to
2776 2776 rename some directories inside the ``.hg`` directory. On most machines, this
2777 2777 should complete almost instantaneously and the chances of a consumer being
2778 2778 unable to access the repository should be low.
2779 2779 """
2780 2780 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2781 2781 backup=backup)
2782 2782
2783 2783 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2784 2784 inferrepo=True)
2785 2785 def debugwalk(ui, repo, *pats, **opts):
2786 2786 """show how files match on given patterns"""
2787 2787 opts = pycompat.byteskwargs(opts)
2788 2788 m = scmutil.match(repo[None], pats, opts)
2789 2789 if ui.verbose:
2790 2790 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2791 2791 items = list(repo[None].walk(m))
2792 2792 if not items:
2793 2793 return
2794 2794 f = lambda fn: fn
2795 2795 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2796 2796 f = lambda fn: util.normpath(fn)
2797 2797 fmt = 'f %%-%ds %%-%ds %%s' % (
2798 2798 max([len(abs) for abs in items]),
2799 2799 max([len(repo.pathto(abs)) for abs in items]))
2800 2800 for abs in items:
2801 2801 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2802 2802 ui.write("%s\n" % line.rstrip())
2803 2803
2804 2804 @command('debugwhyunstable', [], _('REV'))
2805 2805 def debugwhyunstable(ui, repo, rev):
2806 2806 """explain instabilities of a changeset"""
2807 2807 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2808 2808 dnodes = ''
2809 2809 if entry.get('divergentnodes'):
2810 2810 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2811 2811 for ctx in entry['divergentnodes']) + ' '
2812 2812 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2813 2813 entry['reason'], entry['node']))
2814 2814
2815 2815 @command('debugwireargs',
2816 2816 [('', 'three', '', 'three'),
2817 2817 ('', 'four', '', 'four'),
2818 2818 ('', 'five', '', 'five'),
2819 2819 ] + cmdutil.remoteopts,
2820 2820 _('REPO [OPTIONS]... [ONE [TWO]]'),
2821 2821 norepo=True)
2822 2822 def debugwireargs(ui, repopath, *vals, **opts):
2823 2823 opts = pycompat.byteskwargs(opts)
2824 2824 repo = hg.peer(ui, opts, repopath)
2825 2825 for opt in cmdutil.remoteopts:
2826 2826 del opts[opt[1]]
2827 2827 args = {}
2828 2828 for k, v in opts.iteritems():
2829 2829 if v:
2830 2830 args[k] = v
2831 2831 args = pycompat.strkwargs(args)
2832 2832 # run twice to check that we don't mess up the stream for the next command
2833 2833 res1 = repo.debugwireargs(*vals, **args)
2834 2834 res2 = repo.debugwireargs(*vals, **args)
2835 2835 ui.write("%s\n" % res1)
2836 2836 if res1 != res2:
2837 2837 ui.warn("%s\n" % res2)
2838 2838
2839 2839 def _parsewirelangblocks(fh):
2840 2840 activeaction = None
2841 2841 blocklines = []
2842 2842 lastindent = 0
2843 2843
2844 2844 for line in fh:
2845 2845 line = line.rstrip()
2846 2846 if not line:
2847 2847 continue
2848 2848
2849 2849 if line.startswith(b'#'):
2850 2850 continue
2851 2851
2852 2852 if not line.startswith(b' '):
2853 2853 # New block. Flush previous one.
2854 2854 if activeaction:
2855 2855 yield activeaction, blocklines
2856 2856
2857 2857 activeaction = line
2858 2858 blocklines = []
2859 2859 lastindent = 0
2860 2860 continue
2861 2861
2862 2862 # Else we start with an indent.
2863 2863
2864 2864 if not activeaction:
2865 2865 raise error.Abort(_('indented line outside of block'))
2866 2866
2867 2867 indent = len(line) - len(line.lstrip())
2868 2868
2869 2869 # If this line is indented more than the last line, concatenate it.
2870 2870 if indent > lastindent and blocklines:
2871 2871 blocklines[-1] += line.lstrip()
2872 2872 else:
2873 2873 blocklines.append(line)
2874 2874 lastindent = indent
2875 2875
2876 2876 # Flush last block.
2877 2877 if activeaction:
2878 2878 yield activeaction, blocklines
2879 2879
2880 2880 @command('debugwireproto',
2881 2881 [
2882 2882 ('', 'localssh', False, _('start an SSH server for this repo')),
2883 2883 ('', 'peer', '', _('construct a specific version of the peer')),
2884 2884 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2885 2885 ('', 'nologhandshake', False,
2886 2886 _('do not log I/O related to the peer handshake')),
2887 2887 ] + cmdutil.remoteopts,
2888 2888 _('[PATH]'),
2889 2889 optionalrepo=True)
2890 2890 def debugwireproto(ui, repo, path=None, **opts):
2891 2891 """send wire protocol commands to a server
2892 2892
2893 2893 This command can be used to issue wire protocol commands to remote
2894 2894 peers and to debug the raw data being exchanged.
2895 2895
2896 2896 ``--localssh`` will start an SSH server against the current repository
2897 2897 and connect to that. By default, the connection will perform a handshake
2898 2898 and establish an appropriate peer instance.
2899 2899
2900 2900 ``--peer`` can be used to bypass the handshake protocol and construct a
2901 2901 peer instance using the specified class type. Valid values are ``raw``,
2902 2902 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2903 2903 raw data payloads and don't support higher-level command actions.
2904 2904
2905 2905 ``--noreadstderr`` can be used to disable automatic reading from stderr
2906 2906 of the peer (for SSH connections only). Disabling automatic reading of
2907 2907 stderr is useful for making output more deterministic.
2908 2908
2909 2909 Commands are issued via a mini language which is specified via stdin.
2910 2910 The language consists of individual actions to perform. An action is
2911 2911 defined by a block. A block is defined as a line with no leading
2912 2912 space followed by 0 or more lines with leading space. Blocks are
2913 2913 effectively a high-level command with additional metadata.
2914 2914
2915 2915 Lines beginning with ``#`` are ignored.
2916 2916
2917 2917 The following sections denote available actions.
2918 2918
2919 2919 raw
2920 2920 ---
2921 2921
2922 2922 Send raw data to the server.
2923 2923
2924 2924 The block payload contains the raw data to send as one atomic send
2925 2925 operation. The data may not actually be delivered in a single system
2926 2926 call: it depends on the abilities of the transport being used.
2927 2927
2928 2928 Each line in the block is de-indented and concatenated. Then, that
2929 2929 value is evaluated as a Python b'' literal. This allows the use of
2930 2930 backslash escaping, etc.
2931 2931
2932 2932 raw+
2933 2933 ----
2934 2934
2935 2935 Behaves like ``raw`` except flushes output afterwards.
2936 2936
2937 2937 command <X>
2938 2938 -----------
2939 2939
2940 2940 Send a request to run a named command, whose name follows the ``command``
2941 2941 string.
2942 2942
2943 2943 Arguments to the command are defined as lines in this block. The format of
2944 2944 each line is ``<key> <value>``. e.g.::
2945 2945
2946 2946 command listkeys
2947 2947 namespace bookmarks
2948 2948
2949 2949 If the value begins with ``eval:``, it will be interpreted as a Python
2950 2950 literal expression. Otherwise values are interpreted as Python b'' literals.
2951 2951 This allows sending complex types and encoding special byte sequences via
2952 2952 backslash escaping.
2953 2953
2954 2954 The following arguments have special meaning:
2955 2955
2956 2956 ``PUSHFILE``
2957 2957 When defined, the *push* mechanism of the peer will be used instead
2958 2958 of the static request-response mechanism and the content of the
2959 2959 file specified in the value of this argument will be sent as the
2960 2960 command payload.
2961 2961
2962 2962 This can be used to submit a local bundle file to the remote.
2963 2963
2964 2964 batchbegin
2965 2965 ----------
2966 2966
2967 2967 Instruct the peer to begin a batched send.
2968 2968
2969 2969 All ``command`` blocks are queued for execution until the next
2970 2970 ``batchsubmit`` block.
2971 2971
2972 2972 batchsubmit
2973 2973 -----------
2974 2974
2975 2975 Submit previously queued ``command`` blocks as a batch request.
2976 2976
2977 2977 This action MUST be paired with a ``batchbegin`` action.
2978 2978
2979 2979 httprequest <method> <path>
2980 2980 ---------------------------
2981 2981
2982 2982 (HTTP peer only)
2983 2983
2984 2984 Send an HTTP request to the peer.
2985 2985
2986 2986 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2987 2987
2988 2988 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2989 2989 headers to add to the request. e.g. ``Accept: foo``.
2990 2990
2991 2991 The following arguments are special:
2992 2992
2993 2993 ``BODYFILE``
2994 2994 The content of the file defined as the value to this argument will be
2995 2995 transferred verbatim as the HTTP request body.
2996 2996
2997 2997 ``frame <type> <flags> <payload>``
2998 2998 Send a unified protocol frame as part of the request body.
2999 2999
3000 3000 All frames will be collected and sent as the body to the HTTP
3001 3001 request.
3002 3002
3003 3003 close
3004 3004 -----
3005 3005
3006 3006 Close the connection to the server.
3007 3007
3008 3008 flush
3009 3009 -----
3010 3010
3011 3011 Flush data written to the server.
3012 3012
3013 3013 readavailable
3014 3014 -------------
3015 3015
3016 3016 Close the write end of the connection and read all available data from
3017 3017 the server.
3018 3018
3019 3019 If the connection to the server encompasses multiple pipes, we poll both
3020 3020 pipes and read available data.
3021 3021
3022 3022 readline
3023 3023 --------
3024 3024
3025 3025 Read a line of output from the server. If there are multiple output
3026 3026 pipes, reads only the main pipe.
3027 3027
3028 3028 ereadline
3029 3029 ---------
3030 3030
3031 3031 Like ``readline``, but read from the stderr pipe, if available.
3032 3032
3033 3033 read <X>
3034 3034 --------
3035 3035
3036 3036 ``read()`` N bytes from the server's main output pipe.
3037 3037
3038 3038 eread <X>
3039 3039 ---------
3040 3040
3041 3041 ``read()`` N bytes from the server's stderr pipe, if available.
3042 3042
3043 3043 Specifying Unified Frame-Based Protocol Frames
3044 3044 ----------------------------------------------
3045 3045
3046 3046 It is possible to emit a *Unified Frame-Based Protocol* by using special
3047 3047 syntax.
3048 3048
3049 3049 A frame is composed as a type, flags, and payload. These can be parsed
3050 3050 from a string of the form:
3051 3051
3052 3052 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3053 3053
3054 3054 ``request-id`` and ``stream-id`` are integers defining the request and
3055 3055 stream identifiers.
3056 3056
3057 3057 ``type`` can be an integer value for the frame type or the string name
3058 3058 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3059 3059 ``command-name``.
3060 3060
3061 3061 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3062 3062 components. Each component (and there can be just one) can be an integer
3063 3063 or a flag name for stream flags or frame flags, respectively. Values are
3064 3064 resolved to integers and then bitwise OR'd together.
3065 3065
3066 3066 ``payload`` represents the raw frame payload. If it begins with
3067 3067 ``cbor:``, the following string is evaluated as Python code and the
3068 3068 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3069 3069 as a Python byte string literal.
3070 3070 """
3071 3071 opts = pycompat.byteskwargs(opts)
3072 3072
3073 3073 if opts['localssh'] and not repo:
3074 3074 raise error.Abort(_('--localssh requires a repository'))
3075 3075
3076 3076 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3077 3077 raise error.Abort(_('invalid value for --peer'),
3078 3078 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3079 3079
3080 3080 if path and opts['localssh']:
3081 3081 raise error.Abort(_('cannot specify --localssh with an explicit '
3082 3082 'path'))
3083 3083
3084 3084 if ui.interactive():
3085 3085 ui.write(_('(waiting for commands on stdin)\n'))
3086 3086
3087 3087 blocks = list(_parsewirelangblocks(ui.fin))
3088 3088
3089 3089 proc = None
3090 3090 stdin = None
3091 3091 stdout = None
3092 3092 stderr = None
3093 3093 opener = None
3094 3094
3095 3095 if opts['localssh']:
3096 3096 # We start the SSH server in its own process so there is process
3097 3097 # separation. This prevents a whole class of potential bugs around
3098 3098 # shared state from interfering with server operation.
3099 3099 args = procutil.hgcmd() + [
3100 3100 '-R', repo.root,
3101 3101 'debugserve', '--sshstdio',
3102 3102 ]
3103 3103 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3104 3104 stdin=subprocess.PIPE,
3105 3105 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3106 3106 bufsize=0)
3107 3107
3108 3108 stdin = proc.stdin
3109 3109 stdout = proc.stdout
3110 3110 stderr = proc.stderr
3111 3111
3112 3112 # We turn the pipes into observers so we can log I/O.
3113 3113 if ui.verbose or opts['peer'] == 'raw':
3114 3114 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3115 3115 logdata=True)
3116 3116 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3117 3117 logdata=True)
3118 3118 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3119 3119 logdata=True)
3120 3120
3121 3121 # --localssh also implies the peer connection settings.
3122 3122
3123 3123 url = 'ssh://localserver'
3124 3124 autoreadstderr = not opts['noreadstderr']
3125 3125
3126 3126 if opts['peer'] == 'ssh1':
3127 3127 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3128 3128 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3129 3129 None, autoreadstderr=autoreadstderr)
3130 3130 elif opts['peer'] == 'ssh2':
3131 3131 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3132 3132 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3133 3133 None, autoreadstderr=autoreadstderr)
3134 3134 elif opts['peer'] == 'raw':
3135 3135 ui.write(_('using raw connection to peer\n'))
3136 3136 peer = None
3137 3137 else:
3138 3138 ui.write(_('creating ssh peer from handshake results\n'))
3139 3139 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3140 3140 autoreadstderr=autoreadstderr)
3141 3141
3142 3142 elif path:
3143 3143 # We bypass hg.peer() so we can proxy the sockets.
3144 3144 # TODO consider not doing this because we skip
3145 3145 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3146 3146 u = util.url(path)
3147 3147 if u.scheme != 'http':
3148 3148 raise error.Abort(_('only http:// paths are currently supported'))
3149 3149
3150 3150 url, authinfo = u.authinfo()
3151 3151 openerargs = {
3152 3152 r'useragent': b'Mercurial debugwireproto',
3153 3153 }
3154 3154
3155 3155 # Turn pipes/sockets into observers so we can log I/O.
3156 3156 if ui.verbose:
3157 3157 openerargs.update({
3158 3158 r'loggingfh': ui,
3159 3159 r'loggingname': b's',
3160 3160 r'loggingopts': {
3161 3161 r'logdata': True,
3162 3162 r'logdataapis': False,
3163 3163 },
3164 3164 })
3165 3165
3166 3166 if ui.debugflag:
3167 3167 openerargs[r'loggingopts'][r'logdataapis'] = True
3168 3168
3169 3169 # Don't send default headers when in raw mode. This allows us to
3170 3170 # bypass most of the behavior of our URL handling code so we can
3171 3171 # have near complete control over what's sent on the wire.
3172 3172 if opts['peer'] == 'raw':
3173 3173 openerargs[r'sendaccept'] = False
3174 3174
3175 3175 opener = urlmod.opener(ui, authinfo, **openerargs)
3176 3176
3177 3177 if opts['peer'] == 'http2':
3178 3178 ui.write(_('creating http peer for wire protocol version 2\n'))
3179 3179 # We go through makepeer() because we need an API descriptor for
3180 3180 # the peer instance to be useful.
3181 3181 with ui.configoverride({
3182 3182 ('experimental', 'httppeer.advertise-v2'): True}):
3183 3183 if opts['nologhandshake']:
3184 3184 ui.pushbuffer()
3185 3185
3186 3186 peer = httppeer.makepeer(ui, path, opener=opener)
3187 3187
3188 3188 if opts['nologhandshake']:
3189 3189 ui.popbuffer()
3190 3190
3191 3191 if not isinstance(peer, httppeer.httpv2peer):
3192 3192 raise error.Abort(_('could not instantiate HTTP peer for '
3193 3193 'wire protocol version 2'),
3194 3194 hint=_('the server may not have the feature '
3195 3195 'enabled or is not allowing this '
3196 3196 'client version'))
3197 3197
3198 3198 elif opts['peer'] == 'raw':
3199 3199 ui.write(_('using raw connection to peer\n'))
3200 3200 peer = None
3201 3201 elif opts['peer']:
3202 3202 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3203 3203 opts['peer'])
3204 3204 else:
3205 3205 peer = httppeer.makepeer(ui, path, opener=opener)
3206 3206
3207 3207 # We /could/ populate stdin/stdout with sock.makefile()...
3208 3208 else:
3209 3209 raise error.Abort(_('unsupported connection configuration'))
3210 3210
3211 3211 batchedcommands = None
3212 3212
3213 3213 # Now perform actions based on the parsed wire language instructions.
3214 3214 for action, lines in blocks:
3215 3215 if action in ('raw', 'raw+'):
3216 3216 if not stdin:
3217 3217 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3218 3218
3219 3219 # Concatenate the data together.
3220 3220 data = ''.join(l.lstrip() for l in lines)
3221 3221 data = stringutil.unescapestr(data)
3222 3222 stdin.write(data)
3223 3223
3224 3224 if action == 'raw+':
3225 3225 stdin.flush()
3226 3226 elif action == 'flush':
3227 3227 if not stdin:
3228 3228 raise error.Abort(_('cannot call flush on this peer'))
3229 3229 stdin.flush()
3230 3230 elif action.startswith('command'):
3231 3231 if not peer:
3232 3232 raise error.Abort(_('cannot send commands unless peer instance '
3233 3233 'is available'))
3234 3234
3235 3235 command = action.split(' ', 1)[1]
3236 3236
3237 3237 args = {}
3238 3238 for line in lines:
3239 3239 # We need to allow empty values.
3240 3240 fields = line.lstrip().split(' ', 1)
3241 3241 if len(fields) == 1:
3242 3242 key = fields[0]
3243 3243 value = ''
3244 3244 else:
3245 3245 key, value = fields
3246 3246
3247 3247 if value.startswith('eval:'):
3248 3248 value = stringutil.evalpythonliteral(value[5:])
3249 3249 else:
3250 3250 value = stringutil.unescapestr(value)
3251 3251
3252 3252 args[key] = value
3253 3253
3254 3254 if batchedcommands is not None:
3255 3255 batchedcommands.append((command, args))
3256 3256 continue
3257 3257
3258 3258 ui.status(_('sending %s command\n') % command)
3259 3259
3260 3260 if 'PUSHFILE' in args:
3261 3261 with open(args['PUSHFILE'], r'rb') as fh:
3262 3262 del args['PUSHFILE']
3263 3263 res, output = peer._callpush(command, fh,
3264 3264 **pycompat.strkwargs(args))
3265 3265 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3266 3266 ui.status(_('remote output: %s\n') %
3267 3267 stringutil.escapestr(output))
3268 3268 else:
3269 3269 with peer.commandexecutor() as e:
3270 3270 res = e.callcommand(command, args).result()
3271 3271
3272 3272 if isinstance(res, wireprotov2peer.commandresponse):
3273 3273 val = res.objects()
3274 3274 ui.status(_('response: %s\n') %
3275 3275 stringutil.pprint(val, bprefix=True, indent=2))
3276 3276 else:
3277 3277 ui.status(_('response: %s\n') %
3278 3278 stringutil.pprint(res, bprefix=True, indent=2))
3279 3279
3280 3280 elif action == 'batchbegin':
3281 3281 if batchedcommands is not None:
3282 3282 raise error.Abort(_('nested batchbegin not allowed'))
3283 3283
3284 3284 batchedcommands = []
3285 3285 elif action == 'batchsubmit':
3286 3286 # There is a batching API we could go through. But it would be
3287 3287 # difficult to normalize requests into function calls. It is easier
3288 3288 # to bypass this layer and normalize to commands + args.
3289 3289 ui.status(_('sending batch with %d sub-commands\n') %
3290 3290 len(batchedcommands))
3291 3291 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3292 3292 ui.status(_('response #%d: %s\n') %
3293 3293 (i, stringutil.escapestr(chunk)))
3294 3294
3295 3295 batchedcommands = None
3296 3296
3297 3297 elif action.startswith('httprequest '):
3298 3298 if not opener:
3299 3299 raise error.Abort(_('cannot use httprequest without an HTTP '
3300 3300 'peer'))
3301 3301
3302 3302 request = action.split(' ', 2)
3303 3303 if len(request) != 3:
3304 3304 raise error.Abort(_('invalid httprequest: expected format is '
3305 3305 '"httprequest <method> <path>'))
3306 3306
3307 3307 method, httppath = request[1:]
3308 3308 headers = {}
3309 3309 body = None
3310 3310 frames = []
3311 3311 for line in lines:
3312 3312 line = line.lstrip()
3313 3313 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3314 3314 if m:
3315 3315 # Headers need to use native strings.
3316 3316 key = pycompat.strurl(m.group(1))
3317 3317 value = pycompat.strurl(m.group(2))
3318 3318 headers[key] = value
3319 3319 continue
3320 3320
3321 3321 if line.startswith(b'BODYFILE '):
3322 3322 with open(line.split(b' ', 1), 'rb') as fh:
3323 3323 body = fh.read()
3324 3324 elif line.startswith(b'frame '):
3325 3325 frame = wireprotoframing.makeframefromhumanstring(
3326 3326 line[len(b'frame '):])
3327 3327
3328 3328 frames.append(frame)
3329 3329 else:
3330 3330 raise error.Abort(_('unknown argument to httprequest: %s') %
3331 3331 line)
3332 3332
3333 3333 url = path + httppath
3334 3334
3335 3335 if frames:
3336 3336 body = b''.join(bytes(f) for f in frames)
3337 3337
3338 3338 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3339 3339
3340 3340 # urllib.Request insists on using has_data() as a proxy for
3341 3341 # determining the request method. Override that to use our
3342 3342 # explicitly requested method.
3343 3343 req.get_method = lambda: pycompat.sysstr(method)
3344 3344
3345 3345 try:
3346 3346 res = opener.open(req)
3347 3347 body = res.read()
3348 3348 except util.urlerr.urlerror as e:
3349 3349 # read() method must be called, but only exists in Python 2
3350 3350 getattr(e, 'read', lambda: None)()
3351 3351 continue
3352 3352
3353 3353 ct = res.headers.get(r'Content-Type')
3354 3354 if ct == r'application/mercurial-cbor':
3355 3355 ui.write(_('cbor> %s\n') %
3356 3356 stringutil.pprint(cborutil.decodeall(body),
3357 3357 bprefix=True,
3358 3358 indent=2))
3359 3359
3360 3360 elif action == 'close':
3361 3361 peer.close()
3362 3362 elif action == 'readavailable':
3363 3363 if not stdout or not stderr:
3364 3364 raise error.Abort(_('readavailable not available on this peer'))
3365 3365
3366 3366 stdin.close()
3367 3367 stdout.read()
3368 3368 stderr.read()
3369 3369
3370 3370 elif action == 'readline':
3371 3371 if not stdout:
3372 3372 raise error.Abort(_('readline not available on this peer'))
3373 3373 stdout.readline()
3374 3374 elif action == 'ereadline':
3375 3375 if not stderr:
3376 3376 raise error.Abort(_('ereadline not available on this peer'))
3377 3377 stderr.readline()
3378 3378 elif action.startswith('read '):
3379 3379 count = int(action.split(' ', 1)[1])
3380 3380 if not stdout:
3381 3381 raise error.Abort(_('read not available on this peer'))
3382 3382 stdout.read(count)
3383 3383 elif action.startswith('eread '):
3384 3384 count = int(action.split(' ', 1)[1])
3385 3385 if not stderr:
3386 3386 raise error.Abort(_('eread not available on this peer'))
3387 3387 stderr.read(count)
3388 3388 else:
3389 3389 raise error.Abort(_('unknown action: %s') % action)
3390 3390
3391 3391 if batchedcommands is not None:
3392 3392 raise error.Abort(_('unclosed "batchbegin" request'))
3393 3393
3394 3394 if peer:
3395 3395 peer.close()
3396 3396
3397 3397 if proc:
3398 3398 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now