##// END OF EJS Templates
debugdiscovery: small internal refactoring...
marmoute -
r42198:d31d8c52 default
parent child Browse files
Show More
@@ -1,3430 +1,3436 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from . import (
36 36 bundle2,
37 37 changegroup,
38 38 cmdutil,
39 39 color,
40 40 context,
41 41 copies,
42 42 dagparser,
43 43 encoding,
44 44 error,
45 45 exchange,
46 46 extensions,
47 47 filemerge,
48 48 filesetlang,
49 49 formatter,
50 50 hg,
51 51 httppeer,
52 52 localrepo,
53 53 lock as lockmod,
54 54 logcmdutil,
55 55 merge as mergemod,
56 56 obsolete,
57 57 obsutil,
58 58 phases,
59 59 policy,
60 60 pvec,
61 61 pycompat,
62 62 registrar,
63 63 repair,
64 64 revlog,
65 65 revset,
66 66 revsetlang,
67 67 scmutil,
68 68 setdiscovery,
69 69 simplemerge,
70 70 sshpeer,
71 71 sslutil,
72 72 streamclone,
73 73 templater,
74 74 treediscovery,
75 75 upgrade,
76 76 url as urlmod,
77 77 util,
78 78 vfs as vfsmod,
79 79 wireprotoframing,
80 80 wireprotoserver,
81 81 wireprotov2peer,
82 82 )
83 83 from .utils import (
84 84 cborutil,
85 85 dateutil,
86 86 procutil,
87 87 stringutil,
88 88 )
89 89
90 90 from .revlogutils import (
91 91 deltas as deltautil
92 92 )
93 93
94 94 release = lockmod.release
95 95
96 96 command = registrar.command()
97 97
98 98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
99 99 def debugancestor(ui, repo, *args):
100 100 """find the ancestor revision of two revisions in a given index"""
101 101 if len(args) == 3:
102 102 index, rev1, rev2 = args
103 103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
104 104 lookup = r.lookup
105 105 elif len(args) == 2:
106 106 if not repo:
107 107 raise error.Abort(_('there is no Mercurial repository here '
108 108 '(.hg not found)'))
109 109 rev1, rev2 = args
110 110 r = repo.changelog
111 111 lookup = repo.lookup
112 112 else:
113 113 raise error.Abort(_('either two or three arguments required'))
114 114 a = r.ancestor(lookup(rev1), lookup(rev2))
115 115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
116 116
117 117 @command('debugapplystreamclonebundle', [], 'FILE')
118 118 def debugapplystreamclonebundle(ui, repo, fname):
119 119 """apply a stream clone bundle file"""
120 120 f = hg.openpath(ui, fname)
121 121 gen = exchange.readbundle(ui, f, fname)
122 122 gen.apply(repo)
123 123
124 124 @command('debugbuilddag',
125 125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
126 126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
127 127 ('n', 'new-file', None, _('add new file at each rev'))],
128 128 _('[OPTION]... [TEXT]'))
129 129 def debugbuilddag(ui, repo, text=None,
130 130 mergeable_file=False,
131 131 overwritten_file=False,
132 132 new_file=False):
133 133 """builds a repo with a given DAG from scratch in the current empty repo
134 134
135 135 The description of the DAG is read from stdin if not given on the
136 136 command line.
137 137
138 138 Elements:
139 139
140 140 - "+n" is a linear run of n nodes based on the current default parent
141 141 - "." is a single node based on the current default parent
142 142 - "$" resets the default parent to null (implied at the start);
143 143 otherwise the default parent is always the last node created
144 144 - "<p" sets the default parent to the backref p
145 145 - "*p" is a fork at parent p, which is a backref
146 146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
147 147 - "/p2" is a merge of the preceding node and p2
148 148 - ":tag" defines a local tag for the preceding node
149 149 - "@branch" sets the named branch for subsequent nodes
150 150 - "#...\\n" is a comment up to the end of the line
151 151
152 152 Whitespace between the above elements is ignored.
153 153
154 154 A backref is either
155 155
156 156 - a number n, which references the node curr-n, where curr is the current
157 157 node, or
158 158 - the name of a local tag you placed earlier using ":tag", or
159 159 - empty to denote the default parent.
160 160
161 161 All string valued-elements are either strictly alphanumeric, or must
162 162 be enclosed in double quotes ("..."), with "\\" as escape character.
163 163 """
164 164
165 165 if text is None:
166 166 ui.status(_("reading DAG from stdin\n"))
167 167 text = ui.fin.read()
168 168
169 169 cl = repo.changelog
170 170 if len(cl) > 0:
171 171 raise error.Abort(_('repository is not empty'))
172 172
173 173 # determine number of revs in DAG
174 174 total = 0
175 175 for type, data in dagparser.parsedag(text):
176 176 if type == 'n':
177 177 total += 1
178 178
179 179 if mergeable_file:
180 180 linesperrev = 2
181 181 # make a file with k lines per rev
182 182 initialmergedlines = ['%d' % i
183 183 for i in pycompat.xrange(0, total * linesperrev)]
184 184 initialmergedlines.append("")
185 185
186 186 tags = []
187 187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
188 188 total=total)
189 189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
190 190 at = -1
191 191 atbranch = 'default'
192 192 nodeids = []
193 193 id = 0
194 194 progress.update(id)
195 195 for type, data in dagparser.parsedag(text):
196 196 if type == 'n':
197 197 ui.note(('node %s\n' % pycompat.bytestr(data)))
198 198 id, ps = data
199 199
200 200 files = []
201 201 filecontent = {}
202 202
203 203 p2 = None
204 204 if mergeable_file:
205 205 fn = "mf"
206 206 p1 = repo[ps[0]]
207 207 if len(ps) > 1:
208 208 p2 = repo[ps[1]]
209 209 pa = p1.ancestor(p2)
210 210 base, local, other = [x[fn].data() for x in (pa, p1,
211 211 p2)]
212 212 m3 = simplemerge.Merge3Text(base, local, other)
213 213 ml = [l.strip() for l in m3.merge_lines()]
214 214 ml.append("")
215 215 elif at > 0:
216 216 ml = p1[fn].data().split("\n")
217 217 else:
218 218 ml = initialmergedlines
219 219 ml[id * linesperrev] += " r%i" % id
220 220 mergedtext = "\n".join(ml)
221 221 files.append(fn)
222 222 filecontent[fn] = mergedtext
223 223
224 224 if overwritten_file:
225 225 fn = "of"
226 226 files.append(fn)
227 227 filecontent[fn] = "r%i\n" % id
228 228
229 229 if new_file:
230 230 fn = "nf%i" % id
231 231 files.append(fn)
232 232 filecontent[fn] = "r%i\n" % id
233 233 if len(ps) > 1:
234 234 if not p2:
235 235 p2 = repo[ps[1]]
236 236 for fn in p2:
237 237 if fn.startswith("nf"):
238 238 files.append(fn)
239 239 filecontent[fn] = p2[fn].data()
240 240
241 241 def fctxfn(repo, cx, path):
242 242 if path in filecontent:
243 243 return context.memfilectx(repo, cx, path,
244 244 filecontent[path])
245 245 return None
246 246
247 247 if len(ps) == 0 or ps[0] < 0:
248 248 pars = [None, None]
249 249 elif len(ps) == 1:
250 250 pars = [nodeids[ps[0]], None]
251 251 else:
252 252 pars = [nodeids[p] for p in ps]
253 253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
254 254 date=(id, 0),
255 255 user="debugbuilddag",
256 256 extra={'branch': atbranch})
257 257 nodeid = repo.commitctx(cx)
258 258 nodeids.append(nodeid)
259 259 at = id
260 260 elif type == 'l':
261 261 id, name = data
262 262 ui.note(('tag %s\n' % name))
263 263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
264 264 elif type == 'a':
265 265 ui.note(('branch %s\n' % data))
266 266 atbranch = data
267 267 progress.update(id)
268 268
269 269 if tags:
270 270 repo.vfs.write("localtags", "".join(tags))
271 271
272 272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
273 273 indent_string = ' ' * indent
274 274 if all:
275 275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
276 276 % indent_string)
277 277
278 278 def showchunks(named):
279 279 ui.write("\n%s%s\n" % (indent_string, named))
280 280 for deltadata in gen.deltaiter():
281 281 node, p1, p2, cs, deltabase, delta, flags = deltadata
282 282 ui.write("%s%s %s %s %s %s %d\n" %
283 283 (indent_string, hex(node), hex(p1), hex(p2),
284 284 hex(cs), hex(deltabase), len(delta)))
285 285
286 286 chunkdata = gen.changelogheader()
287 287 showchunks("changelog")
288 288 chunkdata = gen.manifestheader()
289 289 showchunks("manifest")
290 290 for chunkdata in iter(gen.filelogheader, {}):
291 291 fname = chunkdata['filename']
292 292 showchunks(fname)
293 293 else:
294 294 if isinstance(gen, bundle2.unbundle20):
295 295 raise error.Abort(_('use debugbundle2 for this file'))
296 296 chunkdata = gen.changelogheader()
297 297 for deltadata in gen.deltaiter():
298 298 node, p1, p2, cs, deltabase, delta, flags = deltadata
299 299 ui.write("%s%s\n" % (indent_string, hex(node)))
300 300
301 301 def _debugobsmarkers(ui, part, indent=0, **opts):
302 302 """display version and markers contained in 'data'"""
303 303 opts = pycompat.byteskwargs(opts)
304 304 data = part.read()
305 305 indent_string = ' ' * indent
306 306 try:
307 307 version, markers = obsolete._readmarkers(data)
308 308 except error.UnknownVersion as exc:
309 309 msg = "%sunsupported version: %s (%d bytes)\n"
310 310 msg %= indent_string, exc.version, len(data)
311 311 ui.write(msg)
312 312 else:
313 313 msg = "%sversion: %d (%d bytes)\n"
314 314 msg %= indent_string, version, len(data)
315 315 ui.write(msg)
316 316 fm = ui.formatter('debugobsolete', opts)
317 317 for rawmarker in sorted(markers):
318 318 m = obsutil.marker(None, rawmarker)
319 319 fm.startitem()
320 320 fm.plain(indent_string)
321 321 cmdutil.showmarker(fm, m)
322 322 fm.end()
323 323
324 324 def _debugphaseheads(ui, data, indent=0):
325 325 """display version and markers contained in 'data'"""
326 326 indent_string = ' ' * indent
327 327 headsbyphase = phases.binarydecode(data)
328 328 for phase in phases.allphases:
329 329 for head in headsbyphase[phase]:
330 330 ui.write(indent_string)
331 331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
332 332
333 333 def _quasirepr(thing):
334 334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
335 335 return '{%s}' % (
336 336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
337 337 return pycompat.bytestr(repr(thing))
338 338
339 339 def _debugbundle2(ui, gen, all=None, **opts):
340 340 """lists the contents of a bundle2"""
341 341 if not isinstance(gen, bundle2.unbundle20):
342 342 raise error.Abort(_('not a bundle2 file'))
343 343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
344 344 parttypes = opts.get(r'part_type', [])
345 345 for part in gen.iterparts():
346 346 if parttypes and part.type not in parttypes:
347 347 continue
348 348 msg = '%s -- %s (mandatory: %r)\n'
349 349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
350 350 if part.type == 'changegroup':
351 351 version = part.params.get('version', '01')
352 352 cg = changegroup.getunbundler(version, part, 'UN')
353 353 if not ui.quiet:
354 354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
355 355 if part.type == 'obsmarkers':
356 356 if not ui.quiet:
357 357 _debugobsmarkers(ui, part, indent=4, **opts)
358 358 if part.type == 'phase-heads':
359 359 if not ui.quiet:
360 360 _debugphaseheads(ui, part, indent=4)
361 361
362 362 @command('debugbundle',
363 363 [('a', 'all', None, _('show all details')),
364 364 ('', 'part-type', [], _('show only the named part type')),
365 365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
366 366 _('FILE'),
367 367 norepo=True)
368 368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
369 369 """lists the contents of a bundle"""
370 370 with hg.openpath(ui, bundlepath) as f:
371 371 if spec:
372 372 spec = exchange.getbundlespec(ui, f)
373 373 ui.write('%s\n' % spec)
374 374 return
375 375
376 376 gen = exchange.readbundle(ui, f, bundlepath)
377 377 if isinstance(gen, bundle2.unbundle20):
378 378 return _debugbundle2(ui, gen, all=all, **opts)
379 379 _debugchangegroup(ui, gen, all=all, **opts)
380 380
381 381 @command('debugcapabilities',
382 382 [], _('PATH'),
383 383 norepo=True)
384 384 def debugcapabilities(ui, path, **opts):
385 385 """lists the capabilities of a remote peer"""
386 386 opts = pycompat.byteskwargs(opts)
387 387 peer = hg.peer(ui, opts, path)
388 388 caps = peer.capabilities()
389 389 ui.write(('Main capabilities:\n'))
390 390 for c in sorted(caps):
391 391 ui.write((' %s\n') % c)
392 392 b2caps = bundle2.bundle2caps(peer)
393 393 if b2caps:
394 394 ui.write(('Bundle2 capabilities:\n'))
395 395 for key, values in sorted(b2caps.iteritems()):
396 396 ui.write((' %s\n') % key)
397 397 for v in values:
398 398 ui.write((' %s\n') % v)
399 399
400 400 @command('debugcheckstate', [], '')
401 401 def debugcheckstate(ui, repo):
402 402 """validate the correctness of the current dirstate"""
403 403 parent1, parent2 = repo.dirstate.parents()
404 404 m1 = repo[parent1].manifest()
405 405 m2 = repo[parent2].manifest()
406 406 errors = 0
407 407 for f in repo.dirstate:
408 408 state = repo.dirstate[f]
409 409 if state in "nr" and f not in m1:
410 410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
411 411 errors += 1
412 412 if state in "a" and f in m1:
413 413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
414 414 errors += 1
415 415 if state in "m" and f not in m1 and f not in m2:
416 416 ui.warn(_("%s in state %s, but not in either manifest\n") %
417 417 (f, state))
418 418 errors += 1
419 419 for f in m1:
420 420 state = repo.dirstate[f]
421 421 if state not in "nrm":
422 422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
423 423 errors += 1
424 424 if errors:
425 425 error = _(".hg/dirstate inconsistent with current parent's manifest")
426 426 raise error.Abort(error)
427 427
428 428 @command('debugcolor',
429 429 [('', 'style', None, _('show all configured styles'))],
430 430 'hg debugcolor')
431 431 def debugcolor(ui, repo, **opts):
432 432 """show available color, effects or style"""
433 433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
434 434 if opts.get(r'style'):
435 435 return _debugdisplaystyle(ui)
436 436 else:
437 437 return _debugdisplaycolor(ui)
438 438
439 439 def _debugdisplaycolor(ui):
440 440 ui = ui.copy()
441 441 ui._styles.clear()
442 442 for effect in color._activeeffects(ui).keys():
443 443 ui._styles[effect] = effect
444 444 if ui._terminfoparams:
445 445 for k, v in ui.configitems('color'):
446 446 if k.startswith('color.'):
447 447 ui._styles[k] = k[6:]
448 448 elif k.startswith('terminfo.'):
449 449 ui._styles[k] = k[9:]
450 450 ui.write(_('available colors:\n'))
451 451 # sort label with a '_' after the other to group '_background' entry.
452 452 items = sorted(ui._styles.items(),
453 453 key=lambda i: ('_' in i[0], i[0], i[1]))
454 454 for colorname, label in items:
455 455 ui.write(('%s\n') % colorname, label=label)
456 456
457 457 def _debugdisplaystyle(ui):
458 458 ui.write(_('available style:\n'))
459 459 if not ui._styles:
460 460 return
461 461 width = max(len(s) for s in ui._styles)
462 462 for label, effects in sorted(ui._styles.items()):
463 463 ui.write('%s' % label, label=label)
464 464 if effects:
465 465 # 50
466 466 ui.write(': ')
467 467 ui.write(' ' * (max(0, width - len(label))))
468 468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 469 ui.write('\n')
470 470
471 471 @command('debugcreatestreamclonebundle', [], 'FILE')
472 472 def debugcreatestreamclonebundle(ui, repo, fname):
473 473 """create a stream clone bundle file
474 474
475 475 Stream bundles are special bundles that are essentially archives of
476 476 revlog files. They are commonly used for cloning very quickly.
477 477 """
478 478 # TODO we may want to turn this into an abort when this functionality
479 479 # is moved into `hg bundle`.
480 480 if phases.hassecret(repo):
481 481 ui.warn(_('(warning: stream clone bundle will contain secret '
482 482 'revisions)\n'))
483 483
484 484 requirements, gen = streamclone.generatebundlev1(repo)
485 485 changegroup.writechunks(ui, gen, fname)
486 486
487 487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488 488
489 489 @command('debugdag',
490 490 [('t', 'tags', None, _('use tags as labels')),
491 491 ('b', 'branches', None, _('annotate with branch names')),
492 492 ('', 'dots', None, _('use dots for runs')),
493 493 ('s', 'spaces', None, _('separate elements by spaces'))],
494 494 _('[OPTION]... [FILE [REV]...]'),
495 495 optionalrepo=True)
496 496 def debugdag(ui, repo, file_=None, *revs, **opts):
497 497 """format the changelog or an index DAG as a concise textual description
498 498
499 499 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 500 revision numbers, they get labeled in the output as rN.
501 501
502 502 Otherwise, the changelog DAG of the current repo is emitted.
503 503 """
504 504 spaces = opts.get(r'spaces')
505 505 dots = opts.get(r'dots')
506 506 if file_:
507 507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
508 508 file_)
509 509 revs = set((int(r) for r in revs))
510 510 def events():
511 511 for r in rlog:
512 512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 513 if p != -1))
514 514 if r in revs:
515 515 yield 'l', (r, "r%i" % r)
516 516 elif repo:
517 517 cl = repo.changelog
518 518 tags = opts.get(r'tags')
519 519 branches = opts.get(r'branches')
520 520 if tags:
521 521 labels = {}
522 522 for l, n in repo.tags().items():
523 523 labels.setdefault(cl.rev(n), []).append(l)
524 524 def events():
525 525 b = "default"
526 526 for r in cl:
527 527 if branches:
528 528 newb = cl.read(cl.node(r))[5]['branch']
529 529 if newb != b:
530 530 yield 'a', newb
531 531 b = newb
532 532 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 533 if p != -1))
534 534 if tags:
535 535 ls = labels.get(r)
536 536 if ls:
537 537 for l in ls:
538 538 yield 'l', (r, l)
539 539 else:
540 540 raise error.Abort(_('need repo for changelog dag'))
541 541
542 542 for line in dagparser.dagtextlines(events(),
543 543 addspaces=spaces,
544 544 wraplabels=True,
545 545 wrapannotations=True,
546 546 wrapnonlinear=dots,
547 547 usedots=dots,
548 548 maxlinewidth=70):
549 549 ui.write(line)
550 550 ui.write("\n")
551 551
552 552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 553 def debugdata(ui, repo, file_, rev=None, **opts):
554 554 """dump the contents of a data file revision"""
555 555 opts = pycompat.byteskwargs(opts)
556 556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 557 if rev is not None:
558 558 raise error.CommandError('debugdata', _('invalid arguments'))
559 559 file_, rev = None, file_
560 560 elif rev is None:
561 561 raise error.CommandError('debugdata', _('invalid arguments'))
562 562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
563 563 try:
564 564 ui.write(r.revision(r.lookup(rev), raw=True))
565 565 except KeyError:
566 566 raise error.Abort(_('invalid revision identifier %s') % rev)
567 567
568 568 @command('debugdate',
569 569 [('e', 'extended', None, _('try extended date formats'))],
570 570 _('[-e] DATE [RANGE]'),
571 571 norepo=True, optionalrepo=True)
572 572 def debugdate(ui, date, range=None, **opts):
573 573 """parse and display a date"""
574 574 if opts[r"extended"]:
575 575 d = dateutil.parsedate(date, util.extendeddateformats)
576 576 else:
577 577 d = dateutil.parsedate(date)
578 578 ui.write(("internal: %d %d\n") % d)
579 579 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 580 if range:
581 581 m = dateutil.matchdate(range)
582 582 ui.write(("match: %s\n") % m(d[0]))
583 583
584 584 @command('debugdeltachain',
585 585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 586 _('-c|-m|FILE'),
587 587 optionalrepo=True)
588 588 def debugdeltachain(ui, repo, file_=None, **opts):
589 589 """dump information about delta chains in a revlog
590 590
591 591 Output can be templatized. Available template keywords are:
592 592
593 593 :``rev``: revision number
594 594 :``chainid``: delta chain identifier (numbered by unique base)
595 595 :``chainlen``: delta chain length to this revision
596 596 :``prevrev``: previous revision in delta chain
597 597 :``deltatype``: role of delta / how it was computed
598 598 :``compsize``: compressed size of revision
599 599 :``uncompsize``: uncompressed size of revision
600 600 :``chainsize``: total size of compressed revisions in chain
601 601 :``chainratio``: total chain size divided by uncompressed revision size
602 602 (new delta chains typically start at ratio 2.00)
603 603 :``lindist``: linear distance from base revision in delta chain to end
604 604 of this revision
605 605 :``extradist``: total size of revisions not part of this delta chain from
606 606 base of delta chain to end of this revision; a measurement
607 607 of how much extra data we need to read/seek across to read
608 608 the delta chain for this revision
609 609 :``extraratio``: extradist divided by chainsize; another representation of
610 610 how much unrelated data is needed to load this delta chain
611 611
612 612 If the repository is configured to use the sparse read, additional keywords
613 613 are available:
614 614
615 615 :``readsize``: total size of data read from the disk for a revision
616 616 (sum of the sizes of all the blocks)
617 617 :``largestblock``: size of the largest block of data read from the disk
618 618 :``readdensity``: density of useful bytes in the data read from the disk
619 619 :``srchunks``: in how many data hunks the whole revision would be read
620 620
621 621 The sparse read can be enabled with experimental.sparse-read = True
622 622 """
623 623 opts = pycompat.byteskwargs(opts)
624 624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 625 index = r.index
626 626 start = r.start
627 627 length = r.length
628 628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
629 629 withsparseread = getattr(r, '_withsparseread', False)
630 630
631 631 def revinfo(rev):
632 632 e = index[rev]
633 633 compsize = e[1]
634 634 uncompsize = e[2]
635 635 chainsize = 0
636 636
637 637 if generaldelta:
638 638 if e[3] == e[5]:
639 639 deltatype = 'p1'
640 640 elif e[3] == e[6]:
641 641 deltatype = 'p2'
642 642 elif e[3] == rev - 1:
643 643 deltatype = 'prev'
644 644 elif e[3] == rev:
645 645 deltatype = 'base'
646 646 else:
647 647 deltatype = 'other'
648 648 else:
649 649 if e[3] == rev:
650 650 deltatype = 'base'
651 651 else:
652 652 deltatype = 'prev'
653 653
654 654 chain = r._deltachain(rev)[0]
655 655 for iterrev in chain:
656 656 e = index[iterrev]
657 657 chainsize += e[1]
658 658
659 659 return compsize, uncompsize, deltatype, chain, chainsize
660 660
661 661 fm = ui.formatter('debugdeltachain', opts)
662 662
663 663 fm.plain(' rev chain# chainlen prev delta '
664 664 'size rawsize chainsize ratio lindist extradist '
665 665 'extraratio')
666 666 if withsparseread:
667 667 fm.plain(' readsize largestblk rddensity srchunks')
668 668 fm.plain('\n')
669 669
670 670 chainbases = {}
671 671 for rev in r:
672 672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
673 673 chainbase = chain[0]
674 674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
675 675 basestart = start(chainbase)
676 676 revstart = start(rev)
677 677 lineardist = revstart + comp - basestart
678 678 extradist = lineardist - chainsize
679 679 try:
680 680 prevrev = chain[-2]
681 681 except IndexError:
682 682 prevrev = -1
683 683
684 684 if uncomp != 0:
685 685 chainratio = float(chainsize) / float(uncomp)
686 686 else:
687 687 chainratio = chainsize
688 688
689 689 if chainsize != 0:
690 690 extraratio = float(extradist) / float(chainsize)
691 691 else:
692 692 extraratio = extradist
693 693
694 694 fm.startitem()
695 695 fm.write('rev chainid chainlen prevrev deltatype compsize '
696 696 'uncompsize chainsize chainratio lindist extradist '
697 697 'extraratio',
698 698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
699 699 rev, chainid, len(chain), prevrev, deltatype, comp,
700 700 uncomp, chainsize, chainratio, lineardist, extradist,
701 701 extraratio,
702 702 rev=rev, chainid=chainid, chainlen=len(chain),
703 703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
704 704 uncompsize=uncomp, chainsize=chainsize,
705 705 chainratio=chainratio, lindist=lineardist,
706 706 extradist=extradist, extraratio=extraratio)
707 707 if withsparseread:
708 708 readsize = 0
709 709 largestblock = 0
710 710 srchunks = 0
711 711
712 712 for revschunk in deltautil.slicechunk(r, chain):
713 713 srchunks += 1
714 714 blkend = start(revschunk[-1]) + length(revschunk[-1])
715 715 blksize = blkend - start(revschunk[0])
716 716
717 717 readsize += blksize
718 718 if largestblock < blksize:
719 719 largestblock = blksize
720 720
721 721 if readsize:
722 722 readdensity = float(chainsize) / float(readsize)
723 723 else:
724 724 readdensity = 1
725 725
726 726 fm.write('readsize largestblock readdensity srchunks',
727 727 ' %10d %10d %9.5f %8d',
728 728 readsize, largestblock, readdensity, srchunks,
729 729 readsize=readsize, largestblock=largestblock,
730 730 readdensity=readdensity, srchunks=srchunks)
731 731
732 732 fm.plain('\n')
733 733
734 734 fm.end()
735 735
736 736 @command('debugdirstate|debugstate',
737 737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
738 738 ('', 'dates', True, _('display the saved mtime')),
739 739 ('', 'datesort', None, _('sort by saved mtime'))],
740 740 _('[OPTION]...'))
741 741 def debugstate(ui, repo, **opts):
742 742 """show the contents of the current dirstate"""
743 743
744 744 nodates = not opts[r'dates']
745 745 if opts.get(r'nodates') is not None:
746 746 nodates = True
747 747 datesort = opts.get(r'datesort')
748 748
749 749 if datesort:
750 750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 751 else:
752 752 keyfunc = None # sort by filename
753 753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 754 if ent[3] == -1:
755 755 timestr = 'unset '
756 756 elif nodates:
757 757 timestr = 'set '
758 758 else:
759 759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 760 time.localtime(ent[3]))
761 761 timestr = encoding.strtolocal(timestr)
762 762 if ent[1] & 0o20000:
763 763 mode = 'lnk'
764 764 else:
765 765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 767 for f in repo.dirstate.copies():
768 768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769 769
770 770 @command('debugdiscovery',
771 771 [('', 'old', None, _('use old-style discovery')),
772 772 ('', 'nonheads', None,
773 773 _('use old-style discovery with non-heads included')),
774 774 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 775 ('', 'seed', '12323', 'specify the random seed use for discovery'),
776 776 ] + cmdutil.remoteopts,
777 777 _('[--rev REV] [OTHER]'))
778 778 def debugdiscovery(ui, repo, remoteurl="default", **opts):
779 779 """runs the changeset discovery protocol in isolation"""
780 780 opts = pycompat.byteskwargs(opts)
781 781 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
782 782 remote = hg.peer(repo, opts, remoteurl)
783 783 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
784 784
785 785 # make sure tests are repeatable
786 786 random.seed(int(opts['seed']))
787 787
788 def doit(pushedrevs, remoteheads, remote=remote):
789 if opts.get('old'):
788
789
790 if opts.get('old'):
791 def doit(pushedrevs, remoteheads, remote=remote):
790 792 if not util.safehasattr(remote, 'branches'):
791 793 # enable in-client legacy support
792 794 remote = localrepo.locallegacypeer(remote.local())
793 795 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
794 796 force=True)
795 797 common = set(common)
796 798 if not opts.get('nonheads'):
797 799 ui.write(("unpruned common: %s\n") %
798 800 " ".join(sorted(short(n) for n in common)))
799 801
800 802 clnode = repo.changelog.node
801 803 common = repo.revs('heads(::%ln)', common)
802 804 common = {clnode(r) for r in common}
803 else:
805 return common, hds
806 else:
807 def doit(pushedrevs, remoteheads, remote=remote):
804 808 nodes = None
805 809 if pushedrevs:
806 810 revs = scmutil.revrange(repo, pushedrevs)
807 811 nodes = [repo[r].node() for r in revs]
808 812 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
809 813 ancestorsof=nodes)
810 common = set(common)
811 rheads = set(hds)
812 lheads = set(repo.heads())
813 ui.write(("common heads: %s\n") %
814 " ".join(sorted(short(n) for n in common)))
815 if lheads <= common:
816 ui.write(("local is subset\n"))
817 elif rheads <= common:
818 ui.write(("remote is subset\n"))
814 return common, hds
819 815
820 816 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
821 817 localrevs = opts['rev']
822 doit(localrevs, remoterevs)
818 common, hds = doit(localrevs, remoterevs)
819
820 common = set(common)
821 rheads = set(hds)
822 lheads = set(repo.heads())
823 ui.write(("common heads: %s\n") %
824 " ".join(sorted(short(n) for n in common)))
825 if lheads <= common:
826 ui.write(("local is subset\n"))
827 elif rheads <= common:
828 ui.write(("remote is subset\n"))
823 829
824 830 _chunksize = 4 << 10
825 831
826 832 @command('debugdownload',
827 833 [
828 834 ('o', 'output', '', _('path')),
829 835 ],
830 836 optionalrepo=True)
831 837 def debugdownload(ui, repo, url, output=None, **opts):
832 838 """download a resource using Mercurial logic and config
833 839 """
834 840 fh = urlmod.open(ui, url, output)
835 841
836 842 dest = ui
837 843 if output:
838 844 dest = open(output, "wb", _chunksize)
839 845 try:
840 846 data = fh.read(_chunksize)
841 847 while data:
842 848 dest.write(data)
843 849 data = fh.read(_chunksize)
844 850 finally:
845 851 if output:
846 852 dest.close()
847 853
848 854 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
849 855 def debugextensions(ui, repo, **opts):
850 856 '''show information about active extensions'''
851 857 opts = pycompat.byteskwargs(opts)
852 858 exts = extensions.extensions(ui)
853 859 hgver = util.version()
854 860 fm = ui.formatter('debugextensions', opts)
855 861 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
856 862 isinternal = extensions.ismoduleinternal(extmod)
857 863 extsource = pycompat.fsencode(extmod.__file__)
858 864 if isinternal:
859 865 exttestedwith = [] # never expose magic string to users
860 866 else:
861 867 exttestedwith = getattr(extmod, 'testedwith', '').split()
862 868 extbuglink = getattr(extmod, 'buglink', None)
863 869
864 870 fm.startitem()
865 871
866 872 if ui.quiet or ui.verbose:
867 873 fm.write('name', '%s\n', extname)
868 874 else:
869 875 fm.write('name', '%s', extname)
870 876 if isinternal or hgver in exttestedwith:
871 877 fm.plain('\n')
872 878 elif not exttestedwith:
873 879 fm.plain(_(' (untested!)\n'))
874 880 else:
875 881 lasttestedversion = exttestedwith[-1]
876 882 fm.plain(' (%s!)\n' % lasttestedversion)
877 883
878 884 fm.condwrite(ui.verbose and extsource, 'source',
879 885 _(' location: %s\n'), extsource or "")
880 886
881 887 if ui.verbose:
882 888 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
883 889 fm.data(bundled=isinternal)
884 890
885 891 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
886 892 _(' tested with: %s\n'),
887 893 fm.formatlist(exttestedwith, name='ver'))
888 894
889 895 fm.condwrite(ui.verbose and extbuglink, 'buglink',
890 896 _(' bug reporting: %s\n'), extbuglink or "")
891 897
892 898 fm.end()
893 899
894 900 @command('debugfileset',
895 901 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
896 902 ('', 'all-files', False,
897 903 _('test files from all revisions and working directory')),
898 904 ('s', 'show-matcher', None,
899 905 _('print internal representation of matcher')),
900 906 ('p', 'show-stage', [],
901 907 _('print parsed tree at the given stage'), _('NAME'))],
902 908 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
903 909 def debugfileset(ui, repo, expr, **opts):
904 910 '''parse and apply a fileset specification'''
905 911 from . import fileset
906 912 fileset.symbols # force import of fileset so we have predicates to optimize
907 913 opts = pycompat.byteskwargs(opts)
908 914 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
909 915
910 916 stages = [
911 917 ('parsed', pycompat.identity),
912 918 ('analyzed', filesetlang.analyze),
913 919 ('optimized', filesetlang.optimize),
914 920 ]
915 921 stagenames = set(n for n, f in stages)
916 922
917 923 showalways = set()
918 924 if ui.verbose and not opts['show_stage']:
919 925 # show parsed tree by --verbose (deprecated)
920 926 showalways.add('parsed')
921 927 if opts['show_stage'] == ['all']:
922 928 showalways.update(stagenames)
923 929 else:
924 930 for n in opts['show_stage']:
925 931 if n not in stagenames:
926 932 raise error.Abort(_('invalid stage name: %s') % n)
927 933 showalways.update(opts['show_stage'])
928 934
929 935 tree = filesetlang.parse(expr)
930 936 for n, f in stages:
931 937 tree = f(tree)
932 938 if n in showalways:
933 939 if opts['show_stage'] or n != 'parsed':
934 940 ui.write(("* %s:\n") % n)
935 941 ui.write(filesetlang.prettyformat(tree), "\n")
936 942
937 943 files = set()
938 944 if opts['all_files']:
939 945 for r in repo:
940 946 c = repo[r]
941 947 files.update(c.files())
942 948 files.update(c.substate)
943 949 if opts['all_files'] or ctx.rev() is None:
944 950 wctx = repo[None]
945 951 files.update(repo.dirstate.walk(scmutil.matchall(repo),
946 952 subrepos=list(wctx.substate),
947 953 unknown=True, ignored=True))
948 954 files.update(wctx.substate)
949 955 else:
950 956 files.update(ctx.files())
951 957 files.update(ctx.substate)
952 958
953 959 m = ctx.matchfileset(expr)
954 960 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
955 961 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
956 962 for f in sorted(files):
957 963 if not m(f):
958 964 continue
959 965 ui.write("%s\n" % f)
960 966
961 967 @command('debugformat',
962 968 [] + cmdutil.formatteropts)
963 969 def debugformat(ui, repo, **opts):
964 970 """display format information about the current repository
965 971
966 972 Use --verbose to get extra information about current config value and
967 973 Mercurial default."""
968 974 opts = pycompat.byteskwargs(opts)
969 975 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
970 976 maxvariantlength = max(len('format-variant'), maxvariantlength)
971 977
972 978 def makeformatname(name):
973 979 return '%s:' + (' ' * (maxvariantlength - len(name)))
974 980
975 981 fm = ui.formatter('debugformat', opts)
976 982 if fm.isplain():
977 983 def formatvalue(value):
978 984 if util.safehasattr(value, 'startswith'):
979 985 return value
980 986 if value:
981 987 return 'yes'
982 988 else:
983 989 return 'no'
984 990 else:
985 991 formatvalue = pycompat.identity
986 992
987 993 fm.plain('format-variant')
988 994 fm.plain(' ' * (maxvariantlength - len('format-variant')))
989 995 fm.plain(' repo')
990 996 if ui.verbose:
991 997 fm.plain(' config default')
992 998 fm.plain('\n')
993 999 for fv in upgrade.allformatvariant:
994 1000 fm.startitem()
995 1001 repovalue = fv.fromrepo(repo)
996 1002 configvalue = fv.fromconfig(repo)
997 1003
998 1004 if repovalue != configvalue:
999 1005 namelabel = 'formatvariant.name.mismatchconfig'
1000 1006 repolabel = 'formatvariant.repo.mismatchconfig'
1001 1007 elif repovalue != fv.default:
1002 1008 namelabel = 'formatvariant.name.mismatchdefault'
1003 1009 repolabel = 'formatvariant.repo.mismatchdefault'
1004 1010 else:
1005 1011 namelabel = 'formatvariant.name.uptodate'
1006 1012 repolabel = 'formatvariant.repo.uptodate'
1007 1013
1008 1014 fm.write('name', makeformatname(fv.name), fv.name,
1009 1015 label=namelabel)
1010 1016 fm.write('repo', ' %3s', formatvalue(repovalue),
1011 1017 label=repolabel)
1012 1018 if fv.default != configvalue:
1013 1019 configlabel = 'formatvariant.config.special'
1014 1020 else:
1015 1021 configlabel = 'formatvariant.config.default'
1016 1022 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1017 1023 label=configlabel)
1018 1024 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1019 1025 label='formatvariant.default')
1020 1026 fm.plain('\n')
1021 1027 fm.end()
1022 1028
1023 1029 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1024 1030 def debugfsinfo(ui, path="."):
1025 1031 """show information detected about current filesystem"""
1026 1032 ui.write(('path: %s\n') % path)
1027 1033 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1028 1034 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1029 1035 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1030 1036 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1031 1037 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1032 1038 casesensitive = '(unknown)'
1033 1039 try:
1034 1040 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1035 1041 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1036 1042 except OSError:
1037 1043 pass
1038 1044 ui.write(('case-sensitive: %s\n') % casesensitive)
1039 1045
1040 1046 @command('debuggetbundle',
1041 1047 [('H', 'head', [], _('id of head node'), _('ID')),
1042 1048 ('C', 'common', [], _('id of common node'), _('ID')),
1043 1049 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1044 1050 _('REPO FILE [-H|-C ID]...'),
1045 1051 norepo=True)
1046 1052 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1047 1053 """retrieves a bundle from a repo
1048 1054
1049 1055 Every ID must be a full-length hex node id string. Saves the bundle to the
1050 1056 given file.
1051 1057 """
1052 1058 opts = pycompat.byteskwargs(opts)
1053 1059 repo = hg.peer(ui, opts, repopath)
1054 1060 if not repo.capable('getbundle'):
1055 1061 raise error.Abort("getbundle() not supported by target repository")
1056 1062 args = {}
1057 1063 if common:
1058 1064 args[r'common'] = [bin(s) for s in common]
1059 1065 if head:
1060 1066 args[r'heads'] = [bin(s) for s in head]
1061 1067 # TODO: get desired bundlecaps from command line.
1062 1068 args[r'bundlecaps'] = None
1063 1069 bundle = repo.getbundle('debug', **args)
1064 1070
1065 1071 bundletype = opts.get('type', 'bzip2').lower()
1066 1072 btypes = {'none': 'HG10UN',
1067 1073 'bzip2': 'HG10BZ',
1068 1074 'gzip': 'HG10GZ',
1069 1075 'bundle2': 'HG20'}
1070 1076 bundletype = btypes.get(bundletype)
1071 1077 if bundletype not in bundle2.bundletypes:
1072 1078 raise error.Abort(_('unknown bundle type specified with --type'))
1073 1079 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1074 1080
1075 1081 @command('debugignore', [], '[FILE]')
1076 1082 def debugignore(ui, repo, *files, **opts):
1077 1083 """display the combined ignore pattern and information about ignored files
1078 1084
1079 1085 With no argument display the combined ignore pattern.
1080 1086
1081 1087 Given space separated file names, shows if the given file is ignored and
1082 1088 if so, show the ignore rule (file and line number) that matched it.
1083 1089 """
1084 1090 ignore = repo.dirstate._ignore
1085 1091 if not files:
1086 1092 # Show all the patterns
1087 1093 ui.write("%s\n" % pycompat.byterepr(ignore))
1088 1094 else:
1089 1095 m = scmutil.match(repo[None], pats=files)
1090 1096 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1091 1097 for f in m.files():
1092 1098 nf = util.normpath(f)
1093 1099 ignored = None
1094 1100 ignoredata = None
1095 1101 if nf != '.':
1096 1102 if ignore(nf):
1097 1103 ignored = nf
1098 1104 ignoredata = repo.dirstate._ignorefileandline(nf)
1099 1105 else:
1100 1106 for p in util.finddirs(nf):
1101 1107 if ignore(p):
1102 1108 ignored = p
1103 1109 ignoredata = repo.dirstate._ignorefileandline(p)
1104 1110 break
1105 1111 if ignored:
1106 1112 if ignored == nf:
1107 1113 ui.write(_("%s is ignored\n") % uipathfn(f))
1108 1114 else:
1109 1115 ui.write(_("%s is ignored because of "
1110 1116 "containing folder %s\n")
1111 1117 % (uipathfn(f), ignored))
1112 1118 ignorefile, lineno, line = ignoredata
1113 1119 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1114 1120 % (ignorefile, lineno, line))
1115 1121 else:
1116 1122 ui.write(_("%s is not ignored\n") % uipathfn(f))
1117 1123
1118 1124 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1119 1125 _('-c|-m|FILE'))
1120 1126 def debugindex(ui, repo, file_=None, **opts):
1121 1127 """dump index data for a storage primitive"""
1122 1128 opts = pycompat.byteskwargs(opts)
1123 1129 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1124 1130
1125 1131 if ui.debugflag:
1126 1132 shortfn = hex
1127 1133 else:
1128 1134 shortfn = short
1129 1135
1130 1136 idlen = 12
1131 1137 for i in store:
1132 1138 idlen = len(shortfn(store.node(i)))
1133 1139 break
1134 1140
1135 1141 fm = ui.formatter('debugindex', opts)
1136 1142 fm.plain(b' rev linkrev %s %s p2\n' % (
1137 1143 b'nodeid'.ljust(idlen),
1138 1144 b'p1'.ljust(idlen)))
1139 1145
1140 1146 for rev in store:
1141 1147 node = store.node(rev)
1142 1148 parents = store.parents(node)
1143 1149
1144 1150 fm.startitem()
1145 1151 fm.write(b'rev', b'%6d ', rev)
1146 1152 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1147 1153 fm.write(b'node', '%s ', shortfn(node))
1148 1154 fm.write(b'p1', '%s ', shortfn(parents[0]))
1149 1155 fm.write(b'p2', '%s', shortfn(parents[1]))
1150 1156 fm.plain(b'\n')
1151 1157
1152 1158 fm.end()
1153 1159
1154 1160 @command('debugindexdot', cmdutil.debugrevlogopts,
1155 1161 _('-c|-m|FILE'), optionalrepo=True)
1156 1162 def debugindexdot(ui, repo, file_=None, **opts):
1157 1163 """dump an index DAG as a graphviz dot file"""
1158 1164 opts = pycompat.byteskwargs(opts)
1159 1165 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1160 1166 ui.write(("digraph G {\n"))
1161 1167 for i in r:
1162 1168 node = r.node(i)
1163 1169 pp = r.parents(node)
1164 1170 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1165 1171 if pp[1] != nullid:
1166 1172 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1167 1173 ui.write("}\n")
1168 1174
1169 1175 @command('debugindexstats', [])
1170 1176 def debugindexstats(ui, repo):
1171 1177 """show stats related to the changelog index"""
1172 1178 repo.changelog.shortest(nullid, 1)
1173 1179 index = repo.changelog.index
1174 1180 if not util.safehasattr(index, 'stats'):
1175 1181 raise error.Abort(_('debugindexstats only works with native code'))
1176 1182 for k, v in sorted(index.stats().items()):
1177 1183 ui.write('%s: %d\n' % (k, v))
1178 1184
1179 1185 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1180 1186 def debuginstall(ui, **opts):
1181 1187 '''test Mercurial installation
1182 1188
1183 1189 Returns 0 on success.
1184 1190 '''
1185 1191 opts = pycompat.byteskwargs(opts)
1186 1192
1187 1193 problems = 0
1188 1194
1189 1195 fm = ui.formatter('debuginstall', opts)
1190 1196 fm.startitem()
1191 1197
1192 1198 # encoding
1193 1199 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1194 1200 err = None
1195 1201 try:
1196 1202 codecs.lookup(pycompat.sysstr(encoding.encoding))
1197 1203 except LookupError as inst:
1198 1204 err = stringutil.forcebytestr(inst)
1199 1205 problems += 1
1200 1206 fm.condwrite(err, 'encodingerror', _(" %s\n"
1201 1207 " (check that your locale is properly set)\n"), err)
1202 1208
1203 1209 # Python
1204 1210 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1205 1211 pycompat.sysexecutable)
1206 1212 fm.write('pythonver', _("checking Python version (%s)\n"),
1207 1213 ("%d.%d.%d" % sys.version_info[:3]))
1208 1214 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1209 1215 os.path.dirname(pycompat.fsencode(os.__file__)))
1210 1216
1211 1217 security = set(sslutil.supportedprotocols)
1212 1218 if sslutil.hassni:
1213 1219 security.add('sni')
1214 1220
1215 1221 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1216 1222 fm.formatlist(sorted(security), name='protocol',
1217 1223 fmt='%s', sep=','))
1218 1224
1219 1225 # These are warnings, not errors. So don't increment problem count. This
1220 1226 # may change in the future.
1221 1227 if 'tls1.2' not in security:
1222 1228 fm.plain(_(' TLS 1.2 not supported by Python install; '
1223 1229 'network connections lack modern security\n'))
1224 1230 if 'sni' not in security:
1225 1231 fm.plain(_(' SNI not supported by Python install; may have '
1226 1232 'connectivity issues with some servers\n'))
1227 1233
1228 1234 # TODO print CA cert info
1229 1235
1230 1236 # hg version
1231 1237 hgver = util.version()
1232 1238 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1233 1239 hgver.split('+')[0])
1234 1240 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1235 1241 '+'.join(hgver.split('+')[1:]))
1236 1242
1237 1243 # compiled modules
1238 1244 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1239 1245 policy.policy)
1240 1246 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1241 1247 os.path.dirname(pycompat.fsencode(__file__)))
1242 1248
1243 1249 if policy.policy in ('c', 'allow'):
1244 1250 err = None
1245 1251 try:
1246 1252 from .cext import (
1247 1253 base85,
1248 1254 bdiff,
1249 1255 mpatch,
1250 1256 osutil,
1251 1257 )
1252 1258 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1253 1259 except Exception as inst:
1254 1260 err = stringutil.forcebytestr(inst)
1255 1261 problems += 1
1256 1262 fm.condwrite(err, 'extensionserror', " %s\n", err)
1257 1263
1258 1264 compengines = util.compengines._engines.values()
1259 1265 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1260 1266 fm.formatlist(sorted(e.name() for e in compengines),
1261 1267 name='compengine', fmt='%s', sep=', '))
1262 1268 fm.write('compenginesavail', _('checking available compression engines '
1263 1269 '(%s)\n'),
1264 1270 fm.formatlist(sorted(e.name() for e in compengines
1265 1271 if e.available()),
1266 1272 name='compengine', fmt='%s', sep=', '))
1267 1273 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1268 1274 fm.write('compenginesserver', _('checking available compression engines '
1269 1275 'for wire protocol (%s)\n'),
1270 1276 fm.formatlist([e.name() for e in wirecompengines
1271 1277 if e.wireprotosupport()],
1272 1278 name='compengine', fmt='%s', sep=', '))
1273 1279 re2 = 'missing'
1274 1280 if util._re2:
1275 1281 re2 = 'available'
1276 1282 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1277 1283 fm.data(re2=bool(util._re2))
1278 1284
1279 1285 # templates
1280 1286 p = templater.templatepaths()
1281 1287 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1282 1288 fm.condwrite(not p, '', _(" no template directories found\n"))
1283 1289 if p:
1284 1290 m = templater.templatepath("map-cmdline.default")
1285 1291 if m:
1286 1292 # template found, check if it is working
1287 1293 err = None
1288 1294 try:
1289 1295 templater.templater.frommapfile(m)
1290 1296 except Exception as inst:
1291 1297 err = stringutil.forcebytestr(inst)
1292 1298 p = None
1293 1299 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1294 1300 else:
1295 1301 p = None
1296 1302 fm.condwrite(p, 'defaulttemplate',
1297 1303 _("checking default template (%s)\n"), m)
1298 1304 fm.condwrite(not m, 'defaulttemplatenotfound',
1299 1305 _(" template '%s' not found\n"), "default")
1300 1306 if not p:
1301 1307 problems += 1
1302 1308 fm.condwrite(not p, '',
1303 1309 _(" (templates seem to have been installed incorrectly)\n"))
1304 1310
1305 1311 # editor
1306 1312 editor = ui.geteditor()
1307 1313 editor = util.expandpath(editor)
1308 1314 editorbin = procutil.shellsplit(editor)[0]
1309 1315 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1310 1316 cmdpath = procutil.findexe(editorbin)
1311 1317 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1312 1318 _(" No commit editor set and can't find %s in PATH\n"
1313 1319 " (specify a commit editor in your configuration"
1314 1320 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1315 1321 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1316 1322 _(" Can't find editor '%s' in PATH\n"
1317 1323 " (specify a commit editor in your configuration"
1318 1324 " file)\n"), not cmdpath and editorbin)
1319 1325 if not cmdpath and editor != 'vi':
1320 1326 problems += 1
1321 1327
1322 1328 # check username
1323 1329 username = None
1324 1330 err = None
1325 1331 try:
1326 1332 username = ui.username()
1327 1333 except error.Abort as e:
1328 1334 err = stringutil.forcebytestr(e)
1329 1335 problems += 1
1330 1336
1331 1337 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1332 1338 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1333 1339 " (specify a username in your configuration file)\n"), err)
1334 1340
1335 1341 fm.condwrite(not problems, '',
1336 1342 _("no problems detected\n"))
1337 1343 if not problems:
1338 1344 fm.data(problems=problems)
1339 1345 fm.condwrite(problems, 'problems',
1340 1346 _("%d problems detected,"
1341 1347 " please check your install!\n"), problems)
1342 1348 fm.end()
1343 1349
1344 1350 return problems
1345 1351
1346 1352 @command('debugknown', [], _('REPO ID...'), norepo=True)
1347 1353 def debugknown(ui, repopath, *ids, **opts):
1348 1354 """test whether node ids are known to a repo
1349 1355
1350 1356 Every ID must be a full-length hex node id string. Returns a list of 0s
1351 1357 and 1s indicating unknown/known.
1352 1358 """
1353 1359 opts = pycompat.byteskwargs(opts)
1354 1360 repo = hg.peer(ui, opts, repopath)
1355 1361 if not repo.capable('known'):
1356 1362 raise error.Abort("known() not supported by target repository")
1357 1363 flags = repo.known([bin(s) for s in ids])
1358 1364 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1359 1365
1360 1366 @command('debuglabelcomplete', [], _('LABEL...'))
1361 1367 def debuglabelcomplete(ui, repo, *args):
1362 1368 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1363 1369 debugnamecomplete(ui, repo, *args)
1364 1370
1365 1371 @command('debuglocks',
1366 1372 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1367 1373 ('W', 'force-wlock', None,
1368 1374 _('free the working state lock (DANGEROUS)')),
1369 1375 ('s', 'set-lock', None, _('set the store lock until stopped')),
1370 1376 ('S', 'set-wlock', None,
1371 1377 _('set the working state lock until stopped'))],
1372 1378 _('[OPTION]...'))
1373 1379 def debuglocks(ui, repo, **opts):
1374 1380 """show or modify state of locks
1375 1381
1376 1382 By default, this command will show which locks are held. This
1377 1383 includes the user and process holding the lock, the amount of time
1378 1384 the lock has been held, and the machine name where the process is
1379 1385 running if it's not local.
1380 1386
1381 1387 Locks protect the integrity of Mercurial's data, so should be
1382 1388 treated with care. System crashes or other interruptions may cause
1383 1389 locks to not be properly released, though Mercurial will usually
1384 1390 detect and remove such stale locks automatically.
1385 1391
1386 1392 However, detecting stale locks may not always be possible (for
1387 1393 instance, on a shared filesystem). Removing locks may also be
1388 1394 blocked by filesystem permissions.
1389 1395
1390 1396 Setting a lock will prevent other commands from changing the data.
1391 1397 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1392 1398 The set locks are removed when the command exits.
1393 1399
1394 1400 Returns 0 if no locks are held.
1395 1401
1396 1402 """
1397 1403
1398 1404 if opts.get(r'force_lock'):
1399 1405 repo.svfs.unlink('lock')
1400 1406 if opts.get(r'force_wlock'):
1401 1407 repo.vfs.unlink('wlock')
1402 1408 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1403 1409 return 0
1404 1410
1405 1411 locks = []
1406 1412 try:
1407 1413 if opts.get(r'set_wlock'):
1408 1414 try:
1409 1415 locks.append(repo.wlock(False))
1410 1416 except error.LockHeld:
1411 1417 raise error.Abort(_('wlock is already held'))
1412 1418 if opts.get(r'set_lock'):
1413 1419 try:
1414 1420 locks.append(repo.lock(False))
1415 1421 except error.LockHeld:
1416 1422 raise error.Abort(_('lock is already held'))
1417 1423 if len(locks):
1418 1424 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1419 1425 return 0
1420 1426 finally:
1421 1427 release(*locks)
1422 1428
1423 1429 now = time.time()
1424 1430 held = 0
1425 1431
1426 1432 def report(vfs, name, method):
1427 1433 # this causes stale locks to get reaped for more accurate reporting
1428 1434 try:
1429 1435 l = method(False)
1430 1436 except error.LockHeld:
1431 1437 l = None
1432 1438
1433 1439 if l:
1434 1440 l.release()
1435 1441 else:
1436 1442 try:
1437 1443 st = vfs.lstat(name)
1438 1444 age = now - st[stat.ST_MTIME]
1439 1445 user = util.username(st.st_uid)
1440 1446 locker = vfs.readlock(name)
1441 1447 if ":" in locker:
1442 1448 host, pid = locker.split(':')
1443 1449 if host == socket.gethostname():
1444 1450 locker = 'user %s, process %s' % (user or b'None', pid)
1445 1451 else:
1446 1452 locker = ('user %s, process %s, host %s'
1447 1453 % (user or b'None', pid, host))
1448 1454 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1449 1455 return 1
1450 1456 except OSError as e:
1451 1457 if e.errno != errno.ENOENT:
1452 1458 raise
1453 1459
1454 1460 ui.write(("%-6s free\n") % (name + ":"))
1455 1461 return 0
1456 1462
1457 1463 held += report(repo.svfs, "lock", repo.lock)
1458 1464 held += report(repo.vfs, "wlock", repo.wlock)
1459 1465
1460 1466 return held
1461 1467
1462 1468 @command('debugmanifestfulltextcache', [
1463 1469 ('', 'clear', False, _('clear the cache')),
1464 1470 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1465 1471 _('NODE'))
1466 1472 ], '')
1467 1473 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1468 1474 """show, clear or amend the contents of the manifest fulltext cache"""
1469 1475
1470 1476 def getcache():
1471 1477 r = repo.manifestlog.getstorage(b'')
1472 1478 try:
1473 1479 return r._fulltextcache
1474 1480 except AttributeError:
1475 1481 msg = _("Current revlog implementation doesn't appear to have a "
1476 1482 "manifest fulltext cache\n")
1477 1483 raise error.Abort(msg)
1478 1484
1479 1485 if opts.get(r'clear'):
1480 1486 with repo.wlock():
1481 1487 cache = getcache()
1482 1488 cache.clear(clear_persisted_data=True)
1483 1489 return
1484 1490
1485 1491 if add:
1486 1492 with repo.wlock():
1487 1493 m = repo.manifestlog
1488 1494 store = m.getstorage(b'')
1489 1495 for n in add:
1490 1496 try:
1491 1497 manifest = m[store.lookup(n)]
1492 1498 except error.LookupError as e:
1493 1499 raise error.Abort(e, hint="Check your manifest node id")
1494 1500 manifest.read() # stores revisision in cache too
1495 1501 return
1496 1502
1497 1503 cache = getcache()
1498 1504 if not len(cache):
1499 1505 ui.write(_('cache empty\n'))
1500 1506 else:
1501 1507 ui.write(
1502 1508 _('cache contains %d manifest entries, in order of most to '
1503 1509 'least recent:\n') % (len(cache),))
1504 1510 totalsize = 0
1505 1511 for nodeid in cache:
1506 1512 # Use cache.get to not update the LRU order
1507 1513 data = cache.peek(nodeid)
1508 1514 size = len(data)
1509 1515 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1510 1516 ui.write(_('id: %s, size %s\n') % (
1511 1517 hex(nodeid), util.bytecount(size)))
1512 1518 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1513 1519 ui.write(
1514 1520 _('total cache data size %s, on-disk %s\n') % (
1515 1521 util.bytecount(totalsize), util.bytecount(ondisk))
1516 1522 )
1517 1523
1518 1524 @command('debugmergestate', [], '')
1519 1525 def debugmergestate(ui, repo, *args):
1520 1526 """print merge state
1521 1527
1522 1528 Use --verbose to print out information about whether v1 or v2 merge state
1523 1529 was chosen."""
1524 1530 def _hashornull(h):
1525 1531 if h == nullhex:
1526 1532 return 'null'
1527 1533 else:
1528 1534 return h
1529 1535
1530 1536 def printrecords(version):
1531 1537 ui.write(('* version %d records\n') % version)
1532 1538 if version == 1:
1533 1539 records = v1records
1534 1540 else:
1535 1541 records = v2records
1536 1542
1537 1543 for rtype, record in records:
1538 1544 # pretty print some record types
1539 1545 if rtype == 'L':
1540 1546 ui.write(('local: %s\n') % record)
1541 1547 elif rtype == 'O':
1542 1548 ui.write(('other: %s\n') % record)
1543 1549 elif rtype == 'm':
1544 1550 driver, mdstate = record.split('\0', 1)
1545 1551 ui.write(('merge driver: %s (state "%s")\n')
1546 1552 % (driver, mdstate))
1547 1553 elif rtype in 'FDC':
1548 1554 r = record.split('\0')
1549 1555 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1550 1556 if version == 1:
1551 1557 onode = 'not stored in v1 format'
1552 1558 flags = r[7]
1553 1559 else:
1554 1560 onode, flags = r[7:9]
1555 1561 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1556 1562 % (f, rtype, state, _hashornull(hash)))
1557 1563 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1558 1564 ui.write((' ancestor path: %s (node %s)\n')
1559 1565 % (afile, _hashornull(anode)))
1560 1566 ui.write((' other path: %s (node %s)\n')
1561 1567 % (ofile, _hashornull(onode)))
1562 1568 elif rtype == 'f':
1563 1569 filename, rawextras = record.split('\0', 1)
1564 1570 extras = rawextras.split('\0')
1565 1571 i = 0
1566 1572 extrastrings = []
1567 1573 while i < len(extras):
1568 1574 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1569 1575 i += 2
1570 1576
1571 1577 ui.write(('file extras: %s (%s)\n')
1572 1578 % (filename, ', '.join(extrastrings)))
1573 1579 elif rtype == 'l':
1574 1580 labels = record.split('\0', 2)
1575 1581 labels = [l for l in labels if len(l) > 0]
1576 1582 ui.write(('labels:\n'))
1577 1583 ui.write((' local: %s\n' % labels[0]))
1578 1584 ui.write((' other: %s\n' % labels[1]))
1579 1585 if len(labels) > 2:
1580 1586 ui.write((' base: %s\n' % labels[2]))
1581 1587 else:
1582 1588 ui.write(('unrecognized entry: %s\t%s\n')
1583 1589 % (rtype, record.replace('\0', '\t')))
1584 1590
1585 1591 # Avoid mergestate.read() since it may raise an exception for unsupported
1586 1592 # merge state records. We shouldn't be doing this, but this is OK since this
1587 1593 # command is pretty low-level.
1588 1594 ms = mergemod.mergestate(repo)
1589 1595
1590 1596 # sort so that reasonable information is on top
1591 1597 v1records = ms._readrecordsv1()
1592 1598 v2records = ms._readrecordsv2()
1593 1599 order = 'LOml'
1594 1600 def key(r):
1595 1601 idx = order.find(r[0])
1596 1602 if idx == -1:
1597 1603 return (1, r[1])
1598 1604 else:
1599 1605 return (0, idx)
1600 1606 v1records.sort(key=key)
1601 1607 v2records.sort(key=key)
1602 1608
1603 1609 if not v1records and not v2records:
1604 1610 ui.write(('no merge state found\n'))
1605 1611 elif not v2records:
1606 1612 ui.note(('no version 2 merge state\n'))
1607 1613 printrecords(1)
1608 1614 elif ms._v1v2match(v1records, v2records):
1609 1615 ui.note(('v1 and v2 states match: using v2\n'))
1610 1616 printrecords(2)
1611 1617 else:
1612 1618 ui.note(('v1 and v2 states mismatch: using v1\n'))
1613 1619 printrecords(1)
1614 1620 if ui.verbose:
1615 1621 printrecords(2)
1616 1622
1617 1623 @command('debugnamecomplete', [], _('NAME...'))
1618 1624 def debugnamecomplete(ui, repo, *args):
1619 1625 '''complete "names" - tags, open branch names, bookmark names'''
1620 1626
1621 1627 names = set()
1622 1628 # since we previously only listed open branches, we will handle that
1623 1629 # specially (after this for loop)
1624 1630 for name, ns in repo.names.iteritems():
1625 1631 if name != 'branches':
1626 1632 names.update(ns.listnames(repo))
1627 1633 names.update(tag for (tag, heads, tip, closed)
1628 1634 in repo.branchmap().iterbranches() if not closed)
1629 1635 completions = set()
1630 1636 if not args:
1631 1637 args = ['']
1632 1638 for a in args:
1633 1639 completions.update(n for n in names if n.startswith(a))
1634 1640 ui.write('\n'.join(sorted(completions)))
1635 1641 ui.write('\n')
1636 1642
1637 1643 @command('debugobsolete',
1638 1644 [('', 'flags', 0, _('markers flag')),
1639 1645 ('', 'record-parents', False,
1640 1646 _('record parent information for the precursor')),
1641 1647 ('r', 'rev', [], _('display markers relevant to REV')),
1642 1648 ('', 'exclusive', False, _('restrict display to markers only '
1643 1649 'relevant to REV')),
1644 1650 ('', 'index', False, _('display index of the marker')),
1645 1651 ('', 'delete', [], _('delete markers specified by indices')),
1646 1652 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1647 1653 _('[OBSOLETED [REPLACEMENT ...]]'))
1648 1654 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1649 1655 """create arbitrary obsolete marker
1650 1656
1651 1657 With no arguments, displays the list of obsolescence markers."""
1652 1658
1653 1659 opts = pycompat.byteskwargs(opts)
1654 1660
1655 1661 def parsenodeid(s):
1656 1662 try:
1657 1663 # We do not use revsingle/revrange functions here to accept
1658 1664 # arbitrary node identifiers, possibly not present in the
1659 1665 # local repository.
1660 1666 n = bin(s)
1661 1667 if len(n) != len(nullid):
1662 1668 raise TypeError()
1663 1669 return n
1664 1670 except TypeError:
1665 1671 raise error.Abort('changeset references must be full hexadecimal '
1666 1672 'node identifiers')
1667 1673
1668 1674 if opts.get('delete'):
1669 1675 indices = []
1670 1676 for v in opts.get('delete'):
1671 1677 try:
1672 1678 indices.append(int(v))
1673 1679 except ValueError:
1674 1680 raise error.Abort(_('invalid index value: %r') % v,
1675 1681 hint=_('use integers for indices'))
1676 1682
1677 1683 if repo.currenttransaction():
1678 1684 raise error.Abort(_('cannot delete obsmarkers in the middle '
1679 1685 'of transaction.'))
1680 1686
1681 1687 with repo.lock():
1682 1688 n = repair.deleteobsmarkers(repo.obsstore, indices)
1683 1689 ui.write(_('deleted %i obsolescence markers\n') % n)
1684 1690
1685 1691 return
1686 1692
1687 1693 if precursor is not None:
1688 1694 if opts['rev']:
1689 1695 raise error.Abort('cannot select revision when creating marker')
1690 1696 metadata = {}
1691 1697 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1692 1698 succs = tuple(parsenodeid(succ) for succ in successors)
1693 1699 l = repo.lock()
1694 1700 try:
1695 1701 tr = repo.transaction('debugobsolete')
1696 1702 try:
1697 1703 date = opts.get('date')
1698 1704 if date:
1699 1705 date = dateutil.parsedate(date)
1700 1706 else:
1701 1707 date = None
1702 1708 prec = parsenodeid(precursor)
1703 1709 parents = None
1704 1710 if opts['record_parents']:
1705 1711 if prec not in repo.unfiltered():
1706 1712 raise error.Abort('cannot used --record-parents on '
1707 1713 'unknown changesets')
1708 1714 parents = repo.unfiltered()[prec].parents()
1709 1715 parents = tuple(p.node() for p in parents)
1710 1716 repo.obsstore.create(tr, prec, succs, opts['flags'],
1711 1717 parents=parents, date=date,
1712 1718 metadata=metadata, ui=ui)
1713 1719 tr.close()
1714 1720 except ValueError as exc:
1715 1721 raise error.Abort(_('bad obsmarker input: %s') %
1716 1722 pycompat.bytestr(exc))
1717 1723 finally:
1718 1724 tr.release()
1719 1725 finally:
1720 1726 l.release()
1721 1727 else:
1722 1728 if opts['rev']:
1723 1729 revs = scmutil.revrange(repo, opts['rev'])
1724 1730 nodes = [repo[r].node() for r in revs]
1725 1731 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1726 1732 exclusive=opts['exclusive']))
1727 1733 markers.sort(key=lambda x: x._data)
1728 1734 else:
1729 1735 markers = obsutil.getmarkers(repo)
1730 1736
1731 1737 markerstoiter = markers
1732 1738 isrelevant = lambda m: True
1733 1739 if opts.get('rev') and opts.get('index'):
1734 1740 markerstoiter = obsutil.getmarkers(repo)
1735 1741 markerset = set(markers)
1736 1742 isrelevant = lambda m: m in markerset
1737 1743
1738 1744 fm = ui.formatter('debugobsolete', opts)
1739 1745 for i, m in enumerate(markerstoiter):
1740 1746 if not isrelevant(m):
1741 1747 # marker can be irrelevant when we're iterating over a set
1742 1748 # of markers (markerstoiter) which is bigger than the set
1743 1749 # of markers we want to display (markers)
1744 1750 # this can happen if both --index and --rev options are
1745 1751 # provided and thus we need to iterate over all of the markers
1746 1752 # to get the correct indices, but only display the ones that
1747 1753 # are relevant to --rev value
1748 1754 continue
1749 1755 fm.startitem()
1750 1756 ind = i if opts.get('index') else None
1751 1757 cmdutil.showmarker(fm, m, index=ind)
1752 1758 fm.end()
1753 1759
1754 1760 @command('debugp1copies',
1755 1761 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1756 1762 _('[-r REV]'))
1757 1763 def debugp1copies(ui, repo, **opts):
1758 1764 """dump copy information compared to p1"""
1759 1765
1760 1766 opts = pycompat.byteskwargs(opts)
1761 1767 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1762 1768 for dst, src in ctx.p1copies().items():
1763 1769 ui.write('%s -> %s\n' % (src, dst))
1764 1770
1765 1771 @command('debugp2copies',
1766 1772 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1767 1773 _('[-r REV]'))
1768 1774 def debugp1copies(ui, repo, **opts):
1769 1775 """dump copy information compared to p2"""
1770 1776
1771 1777 opts = pycompat.byteskwargs(opts)
1772 1778 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1773 1779 for dst, src in ctx.p2copies().items():
1774 1780 ui.write('%s -> %s\n' % (src, dst))
1775 1781
1776 1782 @command('debugpathcomplete',
1777 1783 [('f', 'full', None, _('complete an entire path')),
1778 1784 ('n', 'normal', None, _('show only normal files')),
1779 1785 ('a', 'added', None, _('show only added files')),
1780 1786 ('r', 'removed', None, _('show only removed files'))],
1781 1787 _('FILESPEC...'))
1782 1788 def debugpathcomplete(ui, repo, *specs, **opts):
1783 1789 '''complete part or all of a tracked path
1784 1790
1785 1791 This command supports shells that offer path name completion. It
1786 1792 currently completes only files already known to the dirstate.
1787 1793
1788 1794 Completion extends only to the next path segment unless
1789 1795 --full is specified, in which case entire paths are used.'''
1790 1796
1791 1797 def complete(path, acceptable):
1792 1798 dirstate = repo.dirstate
1793 1799 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1794 1800 rootdir = repo.root + pycompat.ossep
1795 1801 if spec != repo.root and not spec.startswith(rootdir):
1796 1802 return [], []
1797 1803 if os.path.isdir(spec):
1798 1804 spec += '/'
1799 1805 spec = spec[len(rootdir):]
1800 1806 fixpaths = pycompat.ossep != '/'
1801 1807 if fixpaths:
1802 1808 spec = spec.replace(pycompat.ossep, '/')
1803 1809 speclen = len(spec)
1804 1810 fullpaths = opts[r'full']
1805 1811 files, dirs = set(), set()
1806 1812 adddir, addfile = dirs.add, files.add
1807 1813 for f, st in dirstate.iteritems():
1808 1814 if f.startswith(spec) and st[0] in acceptable:
1809 1815 if fixpaths:
1810 1816 f = f.replace('/', pycompat.ossep)
1811 1817 if fullpaths:
1812 1818 addfile(f)
1813 1819 continue
1814 1820 s = f.find(pycompat.ossep, speclen)
1815 1821 if s >= 0:
1816 1822 adddir(f[:s])
1817 1823 else:
1818 1824 addfile(f)
1819 1825 return files, dirs
1820 1826
1821 1827 acceptable = ''
1822 1828 if opts[r'normal']:
1823 1829 acceptable += 'nm'
1824 1830 if opts[r'added']:
1825 1831 acceptable += 'a'
1826 1832 if opts[r'removed']:
1827 1833 acceptable += 'r'
1828 1834 cwd = repo.getcwd()
1829 1835 if not specs:
1830 1836 specs = ['.']
1831 1837
1832 1838 files, dirs = set(), set()
1833 1839 for spec in specs:
1834 1840 f, d = complete(spec, acceptable or 'nmar')
1835 1841 files.update(f)
1836 1842 dirs.update(d)
1837 1843 files.update(dirs)
1838 1844 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1839 1845 ui.write('\n')
1840 1846
1841 1847 @command('debugpathcopies',
1842 1848 cmdutil.walkopts,
1843 1849 'hg debugpathcopies REV1 REV2 [FILE]',
1844 1850 inferrepo=True)
1845 1851 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1846 1852 """show copies between two revisions"""
1847 1853 ctx1 = scmutil.revsingle(repo, rev1)
1848 1854 ctx2 = scmutil.revsingle(repo, rev2)
1849 1855 m = scmutil.match(ctx1, pats, opts)
1850 1856 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1851 1857 ui.write('%s -> %s\n' % (src, dst))
1852 1858
1853 1859 @command('debugpeer', [], _('PATH'), norepo=True)
1854 1860 def debugpeer(ui, path):
1855 1861 """establish a connection to a peer repository"""
1856 1862 # Always enable peer request logging. Requires --debug to display
1857 1863 # though.
1858 1864 overrides = {
1859 1865 ('devel', 'debug.peer-request'): True,
1860 1866 }
1861 1867
1862 1868 with ui.configoverride(overrides):
1863 1869 peer = hg.peer(ui, {}, path)
1864 1870
1865 1871 local = peer.local() is not None
1866 1872 canpush = peer.canpush()
1867 1873
1868 1874 ui.write(_('url: %s\n') % peer.url())
1869 1875 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1870 1876 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1871 1877
1872 1878 @command('debugpickmergetool',
1873 1879 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1874 1880 ('', 'changedelete', None, _('emulate merging change and delete')),
1875 1881 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1876 1882 _('[PATTERN]...'),
1877 1883 inferrepo=True)
1878 1884 def debugpickmergetool(ui, repo, *pats, **opts):
1879 1885 """examine which merge tool is chosen for specified file
1880 1886
1881 1887 As described in :hg:`help merge-tools`, Mercurial examines
1882 1888 configurations below in this order to decide which merge tool is
1883 1889 chosen for specified file.
1884 1890
1885 1891 1. ``--tool`` option
1886 1892 2. ``HGMERGE`` environment variable
1887 1893 3. configurations in ``merge-patterns`` section
1888 1894 4. configuration of ``ui.merge``
1889 1895 5. configurations in ``merge-tools`` section
1890 1896 6. ``hgmerge`` tool (for historical reason only)
1891 1897 7. default tool for fallback (``:merge`` or ``:prompt``)
1892 1898
1893 1899 This command writes out examination result in the style below::
1894 1900
1895 1901 FILE = MERGETOOL
1896 1902
1897 1903 By default, all files known in the first parent context of the
1898 1904 working directory are examined. Use file patterns and/or -I/-X
1899 1905 options to limit target files. -r/--rev is also useful to examine
1900 1906 files in another context without actual updating to it.
1901 1907
1902 1908 With --debug, this command shows warning messages while matching
1903 1909 against ``merge-patterns`` and so on, too. It is recommended to
1904 1910 use this option with explicit file patterns and/or -I/-X options,
1905 1911 because this option increases amount of output per file according
1906 1912 to configurations in hgrc.
1907 1913
1908 1914 With -v/--verbose, this command shows configurations below at
1909 1915 first (only if specified).
1910 1916
1911 1917 - ``--tool`` option
1912 1918 - ``HGMERGE`` environment variable
1913 1919 - configuration of ``ui.merge``
1914 1920
1915 1921 If merge tool is chosen before matching against
1916 1922 ``merge-patterns``, this command can't show any helpful
1917 1923 information, even with --debug. In such case, information above is
1918 1924 useful to know why a merge tool is chosen.
1919 1925 """
1920 1926 opts = pycompat.byteskwargs(opts)
1921 1927 overrides = {}
1922 1928 if opts['tool']:
1923 1929 overrides[('ui', 'forcemerge')] = opts['tool']
1924 1930 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1925 1931
1926 1932 with ui.configoverride(overrides, 'debugmergepatterns'):
1927 1933 hgmerge = encoding.environ.get("HGMERGE")
1928 1934 if hgmerge is not None:
1929 1935 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1930 1936 uimerge = ui.config("ui", "merge")
1931 1937 if uimerge:
1932 1938 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1933 1939
1934 1940 ctx = scmutil.revsingle(repo, opts.get('rev'))
1935 1941 m = scmutil.match(ctx, pats, opts)
1936 1942 changedelete = opts['changedelete']
1937 1943 for path in ctx.walk(m):
1938 1944 fctx = ctx[path]
1939 1945 try:
1940 1946 if not ui.debugflag:
1941 1947 ui.pushbuffer(error=True)
1942 1948 tool, toolpath = filemerge._picktool(repo, ui, path,
1943 1949 fctx.isbinary(),
1944 1950 'l' in fctx.flags(),
1945 1951 changedelete)
1946 1952 finally:
1947 1953 if not ui.debugflag:
1948 1954 ui.popbuffer()
1949 1955 ui.write(('%s = %s\n') % (path, tool))
1950 1956
1951 1957 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1952 1958 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1953 1959 '''access the pushkey key/value protocol
1954 1960
1955 1961 With two args, list the keys in the given namespace.
1956 1962
1957 1963 With five args, set a key to new if it currently is set to old.
1958 1964 Reports success or failure.
1959 1965 '''
1960 1966
1961 1967 target = hg.peer(ui, {}, repopath)
1962 1968 if keyinfo:
1963 1969 key, old, new = keyinfo
1964 1970 with target.commandexecutor() as e:
1965 1971 r = e.callcommand('pushkey', {
1966 1972 'namespace': namespace,
1967 1973 'key': key,
1968 1974 'old': old,
1969 1975 'new': new,
1970 1976 }).result()
1971 1977
1972 1978 ui.status(pycompat.bytestr(r) + '\n')
1973 1979 return not r
1974 1980 else:
1975 1981 for k, v in sorted(target.listkeys(namespace).iteritems()):
1976 1982 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1977 1983 stringutil.escapestr(v)))
1978 1984
1979 1985 @command('debugpvec', [], _('A B'))
1980 1986 def debugpvec(ui, repo, a, b=None):
1981 1987 ca = scmutil.revsingle(repo, a)
1982 1988 cb = scmutil.revsingle(repo, b)
1983 1989 pa = pvec.ctxpvec(ca)
1984 1990 pb = pvec.ctxpvec(cb)
1985 1991 if pa == pb:
1986 1992 rel = "="
1987 1993 elif pa > pb:
1988 1994 rel = ">"
1989 1995 elif pa < pb:
1990 1996 rel = "<"
1991 1997 elif pa | pb:
1992 1998 rel = "|"
1993 1999 ui.write(_("a: %s\n") % pa)
1994 2000 ui.write(_("b: %s\n") % pb)
1995 2001 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1996 2002 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1997 2003 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1998 2004 pa.distance(pb), rel))
1999 2005
2000 2006 @command('debugrebuilddirstate|debugrebuildstate',
2001 2007 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2002 2008 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2003 2009 'the working copy parent')),
2004 2010 ],
2005 2011 _('[-r REV]'))
2006 2012 def debugrebuilddirstate(ui, repo, rev, **opts):
2007 2013 """rebuild the dirstate as it would look like for the given revision
2008 2014
2009 2015 If no revision is specified the first current parent will be used.
2010 2016
2011 2017 The dirstate will be set to the files of the given revision.
2012 2018 The actual working directory content or existing dirstate
2013 2019 information such as adds or removes is not considered.
2014 2020
2015 2021 ``minimal`` will only rebuild the dirstate status for files that claim to be
2016 2022 tracked but are not in the parent manifest, or that exist in the parent
2017 2023 manifest but are not in the dirstate. It will not change adds, removes, or
2018 2024 modified files that are in the working copy parent.
2019 2025
2020 2026 One use of this command is to make the next :hg:`status` invocation
2021 2027 check the actual file content.
2022 2028 """
2023 2029 ctx = scmutil.revsingle(repo, rev)
2024 2030 with repo.wlock():
2025 2031 dirstate = repo.dirstate
2026 2032 changedfiles = None
2027 2033 # See command doc for what minimal does.
2028 2034 if opts.get(r'minimal'):
2029 2035 manifestfiles = set(ctx.manifest().keys())
2030 2036 dirstatefiles = set(dirstate)
2031 2037 manifestonly = manifestfiles - dirstatefiles
2032 2038 dsonly = dirstatefiles - manifestfiles
2033 2039 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2034 2040 changedfiles = manifestonly | dsnotadded
2035 2041
2036 2042 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2037 2043
2038 2044 @command('debugrebuildfncache', [], '')
2039 2045 def debugrebuildfncache(ui, repo):
2040 2046 """rebuild the fncache file"""
2041 2047 repair.rebuildfncache(ui, repo)
2042 2048
2043 2049 @command('debugrename',
2044 2050 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2045 2051 _('[-r REV] [FILE]...'))
2046 2052 def debugrename(ui, repo, *pats, **opts):
2047 2053 """dump rename information"""
2048 2054
2049 2055 opts = pycompat.byteskwargs(opts)
2050 2056 ctx = scmutil.revsingle(repo, opts.get('rev'))
2051 2057 m = scmutil.match(ctx, pats, opts)
2052 2058 for abs in ctx.walk(m):
2053 2059 fctx = ctx[abs]
2054 2060 o = fctx.filelog().renamed(fctx.filenode())
2055 2061 rel = repo.pathto(abs)
2056 2062 if o:
2057 2063 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2058 2064 else:
2059 2065 ui.write(_("%s not renamed\n") % rel)
2060 2066
2061 2067 @command('debugrevlog', cmdutil.debugrevlogopts +
2062 2068 [('d', 'dump', False, _('dump index data'))],
2063 2069 _('-c|-m|FILE'),
2064 2070 optionalrepo=True)
2065 2071 def debugrevlog(ui, repo, file_=None, **opts):
2066 2072 """show data and statistics about a revlog"""
2067 2073 opts = pycompat.byteskwargs(opts)
2068 2074 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2069 2075
2070 2076 if opts.get("dump"):
2071 2077 numrevs = len(r)
2072 2078 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2073 2079 " rawsize totalsize compression heads chainlen\n"))
2074 2080 ts = 0
2075 2081 heads = set()
2076 2082
2077 2083 for rev in pycompat.xrange(numrevs):
2078 2084 dbase = r.deltaparent(rev)
2079 2085 if dbase == -1:
2080 2086 dbase = rev
2081 2087 cbase = r.chainbase(rev)
2082 2088 clen = r.chainlen(rev)
2083 2089 p1, p2 = r.parentrevs(rev)
2084 2090 rs = r.rawsize(rev)
2085 2091 ts = ts + rs
2086 2092 heads -= set(r.parentrevs(rev))
2087 2093 heads.add(rev)
2088 2094 try:
2089 2095 compression = ts / r.end(rev)
2090 2096 except ZeroDivisionError:
2091 2097 compression = 0
2092 2098 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2093 2099 "%11d %5d %8d\n" %
2094 2100 (rev, p1, p2, r.start(rev), r.end(rev),
2095 2101 r.start(dbase), r.start(cbase),
2096 2102 r.start(p1), r.start(p2),
2097 2103 rs, ts, compression, len(heads), clen))
2098 2104 return 0
2099 2105
2100 2106 v = r.version
2101 2107 format = v & 0xFFFF
2102 2108 flags = []
2103 2109 gdelta = False
2104 2110 if v & revlog.FLAG_INLINE_DATA:
2105 2111 flags.append('inline')
2106 2112 if v & revlog.FLAG_GENERALDELTA:
2107 2113 gdelta = True
2108 2114 flags.append('generaldelta')
2109 2115 if not flags:
2110 2116 flags = ['(none)']
2111 2117
2112 2118 ### tracks merge vs single parent
2113 2119 nummerges = 0
2114 2120
2115 2121 ### tracks ways the "delta" are build
2116 2122 # nodelta
2117 2123 numempty = 0
2118 2124 numemptytext = 0
2119 2125 numemptydelta = 0
2120 2126 # full file content
2121 2127 numfull = 0
2122 2128 # intermediate snapshot against a prior snapshot
2123 2129 numsemi = 0
2124 2130 # snapshot count per depth
2125 2131 numsnapdepth = collections.defaultdict(lambda: 0)
2126 2132 # delta against previous revision
2127 2133 numprev = 0
2128 2134 # delta against first or second parent (not prev)
2129 2135 nump1 = 0
2130 2136 nump2 = 0
2131 2137 # delta against neither prev nor parents
2132 2138 numother = 0
2133 2139 # delta against prev that are also first or second parent
2134 2140 # (details of `numprev`)
2135 2141 nump1prev = 0
2136 2142 nump2prev = 0
2137 2143
2138 2144 # data about delta chain of each revs
2139 2145 chainlengths = []
2140 2146 chainbases = []
2141 2147 chainspans = []
2142 2148
2143 2149 # data about each revision
2144 2150 datasize = [None, 0, 0]
2145 2151 fullsize = [None, 0, 0]
2146 2152 semisize = [None, 0, 0]
2147 2153 # snapshot count per depth
2148 2154 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2149 2155 deltasize = [None, 0, 0]
2150 2156 chunktypecounts = {}
2151 2157 chunktypesizes = {}
2152 2158
2153 2159 def addsize(size, l):
2154 2160 if l[0] is None or size < l[0]:
2155 2161 l[0] = size
2156 2162 if size > l[1]:
2157 2163 l[1] = size
2158 2164 l[2] += size
2159 2165
2160 2166 numrevs = len(r)
2161 2167 for rev in pycompat.xrange(numrevs):
2162 2168 p1, p2 = r.parentrevs(rev)
2163 2169 delta = r.deltaparent(rev)
2164 2170 if format > 0:
2165 2171 addsize(r.rawsize(rev), datasize)
2166 2172 if p2 != nullrev:
2167 2173 nummerges += 1
2168 2174 size = r.length(rev)
2169 2175 if delta == nullrev:
2170 2176 chainlengths.append(0)
2171 2177 chainbases.append(r.start(rev))
2172 2178 chainspans.append(size)
2173 2179 if size == 0:
2174 2180 numempty += 1
2175 2181 numemptytext += 1
2176 2182 else:
2177 2183 numfull += 1
2178 2184 numsnapdepth[0] += 1
2179 2185 addsize(size, fullsize)
2180 2186 addsize(size, snapsizedepth[0])
2181 2187 else:
2182 2188 chainlengths.append(chainlengths[delta] + 1)
2183 2189 baseaddr = chainbases[delta]
2184 2190 revaddr = r.start(rev)
2185 2191 chainbases.append(baseaddr)
2186 2192 chainspans.append((revaddr - baseaddr) + size)
2187 2193 if size == 0:
2188 2194 numempty += 1
2189 2195 numemptydelta += 1
2190 2196 elif r.issnapshot(rev):
2191 2197 addsize(size, semisize)
2192 2198 numsemi += 1
2193 2199 depth = r.snapshotdepth(rev)
2194 2200 numsnapdepth[depth] += 1
2195 2201 addsize(size, snapsizedepth[depth])
2196 2202 else:
2197 2203 addsize(size, deltasize)
2198 2204 if delta == rev - 1:
2199 2205 numprev += 1
2200 2206 if delta == p1:
2201 2207 nump1prev += 1
2202 2208 elif delta == p2:
2203 2209 nump2prev += 1
2204 2210 elif delta == p1:
2205 2211 nump1 += 1
2206 2212 elif delta == p2:
2207 2213 nump2 += 1
2208 2214 elif delta != nullrev:
2209 2215 numother += 1
2210 2216
2211 2217 # Obtain data on the raw chunks in the revlog.
2212 2218 if util.safehasattr(r, '_getsegmentforrevs'):
2213 2219 segment = r._getsegmentforrevs(rev, rev)[1]
2214 2220 else:
2215 2221 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2216 2222 if segment:
2217 2223 chunktype = bytes(segment[0:1])
2218 2224 else:
2219 2225 chunktype = 'empty'
2220 2226
2221 2227 if chunktype not in chunktypecounts:
2222 2228 chunktypecounts[chunktype] = 0
2223 2229 chunktypesizes[chunktype] = 0
2224 2230
2225 2231 chunktypecounts[chunktype] += 1
2226 2232 chunktypesizes[chunktype] += size
2227 2233
2228 2234 # Adjust size min value for empty cases
2229 2235 for size in (datasize, fullsize, semisize, deltasize):
2230 2236 if size[0] is None:
2231 2237 size[0] = 0
2232 2238
2233 2239 numdeltas = numrevs - numfull - numempty - numsemi
2234 2240 numoprev = numprev - nump1prev - nump2prev
2235 2241 totalrawsize = datasize[2]
2236 2242 datasize[2] /= numrevs
2237 2243 fulltotal = fullsize[2]
2238 2244 fullsize[2] /= numfull
2239 2245 semitotal = semisize[2]
2240 2246 snaptotal = {}
2241 2247 if numsemi > 0:
2242 2248 semisize[2] /= numsemi
2243 2249 for depth in snapsizedepth:
2244 2250 snaptotal[depth] = snapsizedepth[depth][2]
2245 2251 snapsizedepth[depth][2] /= numsnapdepth[depth]
2246 2252
2247 2253 deltatotal = deltasize[2]
2248 2254 if numdeltas > 0:
2249 2255 deltasize[2] /= numdeltas
2250 2256 totalsize = fulltotal + semitotal + deltatotal
2251 2257 avgchainlen = sum(chainlengths) / numrevs
2252 2258 maxchainlen = max(chainlengths)
2253 2259 maxchainspan = max(chainspans)
2254 2260 compratio = 1
2255 2261 if totalsize:
2256 2262 compratio = totalrawsize / totalsize
2257 2263
2258 2264 basedfmtstr = '%%%dd\n'
2259 2265 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2260 2266
2261 2267 def dfmtstr(max):
2262 2268 return basedfmtstr % len(str(max))
2263 2269 def pcfmtstr(max, padding=0):
2264 2270 return basepcfmtstr % (len(str(max)), ' ' * padding)
2265 2271
2266 2272 def pcfmt(value, total):
2267 2273 if total:
2268 2274 return (value, 100 * float(value) / total)
2269 2275 else:
2270 2276 return value, 100.0
2271 2277
2272 2278 ui.write(('format : %d\n') % format)
2273 2279 ui.write(('flags : %s\n') % ', '.join(flags))
2274 2280
2275 2281 ui.write('\n')
2276 2282 fmt = pcfmtstr(totalsize)
2277 2283 fmt2 = dfmtstr(totalsize)
2278 2284 ui.write(('revisions : ') + fmt2 % numrevs)
2279 2285 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2280 2286 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2281 2287 ui.write(('revisions : ') + fmt2 % numrevs)
2282 2288 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2283 2289 ui.write((' text : ')
2284 2290 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2285 2291 ui.write((' delta : ')
2286 2292 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2287 2293 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2288 2294 for depth in sorted(numsnapdepth):
2289 2295 ui.write((' lvl-%-3d : ' % depth)
2290 2296 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2291 2297 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2292 2298 ui.write(('revision size : ') + fmt2 % totalsize)
2293 2299 ui.write((' snapshot : ')
2294 2300 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2295 2301 for depth in sorted(numsnapdepth):
2296 2302 ui.write((' lvl-%-3d : ' % depth)
2297 2303 + fmt % pcfmt(snaptotal[depth], totalsize))
2298 2304 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2299 2305
2300 2306 def fmtchunktype(chunktype):
2301 2307 if chunktype == 'empty':
2302 2308 return ' %s : ' % chunktype
2303 2309 elif chunktype in pycompat.bytestr(string.ascii_letters):
2304 2310 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2305 2311 else:
2306 2312 return ' 0x%s : ' % hex(chunktype)
2307 2313
2308 2314 ui.write('\n')
2309 2315 ui.write(('chunks : ') + fmt2 % numrevs)
2310 2316 for chunktype in sorted(chunktypecounts):
2311 2317 ui.write(fmtchunktype(chunktype))
2312 2318 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2313 2319 ui.write(('chunks size : ') + fmt2 % totalsize)
2314 2320 for chunktype in sorted(chunktypecounts):
2315 2321 ui.write(fmtchunktype(chunktype))
2316 2322 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2317 2323
2318 2324 ui.write('\n')
2319 2325 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2320 2326 ui.write(('avg chain length : ') + fmt % avgchainlen)
2321 2327 ui.write(('max chain length : ') + fmt % maxchainlen)
2322 2328 ui.write(('max chain reach : ') + fmt % maxchainspan)
2323 2329 ui.write(('compression ratio : ') + fmt % compratio)
2324 2330
2325 2331 if format > 0:
2326 2332 ui.write('\n')
2327 2333 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2328 2334 % tuple(datasize))
2329 2335 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2330 2336 % tuple(fullsize))
2331 2337 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2332 2338 % tuple(semisize))
2333 2339 for depth in sorted(snapsizedepth):
2334 2340 if depth == 0:
2335 2341 continue
2336 2342 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2337 2343 % ((depth,) + tuple(snapsizedepth[depth])))
2338 2344 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2339 2345 % tuple(deltasize))
2340 2346
2341 2347 if numdeltas > 0:
2342 2348 ui.write('\n')
2343 2349 fmt = pcfmtstr(numdeltas)
2344 2350 fmt2 = pcfmtstr(numdeltas, 4)
2345 2351 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2346 2352 if numprev > 0:
2347 2353 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2348 2354 numprev))
2349 2355 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2350 2356 numprev))
2351 2357 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2352 2358 numprev))
2353 2359 if gdelta:
2354 2360 ui.write(('deltas against p1 : ')
2355 2361 + fmt % pcfmt(nump1, numdeltas))
2356 2362 ui.write(('deltas against p2 : ')
2357 2363 + fmt % pcfmt(nump2, numdeltas))
2358 2364 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2359 2365 numdeltas))
2360 2366
2361 2367 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2362 2368 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2363 2369 _('[-f FORMAT] -c|-m|FILE'),
2364 2370 optionalrepo=True)
2365 2371 def debugrevlogindex(ui, repo, file_=None, **opts):
2366 2372 """dump the contents of a revlog index"""
2367 2373 opts = pycompat.byteskwargs(opts)
2368 2374 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2369 2375 format = opts.get('format', 0)
2370 2376 if format not in (0, 1):
2371 2377 raise error.Abort(_("unknown format %d") % format)
2372 2378
2373 2379 if ui.debugflag:
2374 2380 shortfn = hex
2375 2381 else:
2376 2382 shortfn = short
2377 2383
2378 2384 # There might not be anything in r, so have a sane default
2379 2385 idlen = 12
2380 2386 for i in r:
2381 2387 idlen = len(shortfn(r.node(i)))
2382 2388 break
2383 2389
2384 2390 if format == 0:
2385 2391 if ui.verbose:
2386 2392 ui.write((" rev offset length linkrev"
2387 2393 " %s %s p2\n") % ("nodeid".ljust(idlen),
2388 2394 "p1".ljust(idlen)))
2389 2395 else:
2390 2396 ui.write((" rev linkrev %s %s p2\n") % (
2391 2397 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2392 2398 elif format == 1:
2393 2399 if ui.verbose:
2394 2400 ui.write((" rev flag offset length size link p1"
2395 2401 " p2 %s\n") % "nodeid".rjust(idlen))
2396 2402 else:
2397 2403 ui.write((" rev flag size link p1 p2 %s\n") %
2398 2404 "nodeid".rjust(idlen))
2399 2405
2400 2406 for i in r:
2401 2407 node = r.node(i)
2402 2408 if format == 0:
2403 2409 try:
2404 2410 pp = r.parents(node)
2405 2411 except Exception:
2406 2412 pp = [nullid, nullid]
2407 2413 if ui.verbose:
2408 2414 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2409 2415 i, r.start(i), r.length(i), r.linkrev(i),
2410 2416 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2411 2417 else:
2412 2418 ui.write("% 6d % 7d %s %s %s\n" % (
2413 2419 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2414 2420 shortfn(pp[1])))
2415 2421 elif format == 1:
2416 2422 pr = r.parentrevs(i)
2417 2423 if ui.verbose:
2418 2424 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2419 2425 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2420 2426 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2421 2427 else:
2422 2428 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2423 2429 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2424 2430 shortfn(node)))
2425 2431
2426 2432 @command('debugrevspec',
2427 2433 [('', 'optimize', None,
2428 2434 _('print parsed tree after optimizing (DEPRECATED)')),
2429 2435 ('', 'show-revs', True, _('print list of result revisions (default)')),
2430 2436 ('s', 'show-set', None, _('print internal representation of result set')),
2431 2437 ('p', 'show-stage', [],
2432 2438 _('print parsed tree at the given stage'), _('NAME')),
2433 2439 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2434 2440 ('', 'verify-optimized', False, _('verify optimized result')),
2435 2441 ],
2436 2442 ('REVSPEC'))
2437 2443 def debugrevspec(ui, repo, expr, **opts):
2438 2444 """parse and apply a revision specification
2439 2445
2440 2446 Use -p/--show-stage option to print the parsed tree at the given stages.
2441 2447 Use -p all to print tree at every stage.
2442 2448
2443 2449 Use --no-show-revs option with -s or -p to print only the set
2444 2450 representation or the parsed tree respectively.
2445 2451
2446 2452 Use --verify-optimized to compare the optimized result with the unoptimized
2447 2453 one. Returns 1 if the optimized result differs.
2448 2454 """
2449 2455 opts = pycompat.byteskwargs(opts)
2450 2456 aliases = ui.configitems('revsetalias')
2451 2457 stages = [
2452 2458 ('parsed', lambda tree: tree),
2453 2459 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2454 2460 ui.warn)),
2455 2461 ('concatenated', revsetlang.foldconcat),
2456 2462 ('analyzed', revsetlang.analyze),
2457 2463 ('optimized', revsetlang.optimize),
2458 2464 ]
2459 2465 if opts['no_optimized']:
2460 2466 stages = stages[:-1]
2461 2467 if opts['verify_optimized'] and opts['no_optimized']:
2462 2468 raise error.Abort(_('cannot use --verify-optimized with '
2463 2469 '--no-optimized'))
2464 2470 stagenames = set(n for n, f in stages)
2465 2471
2466 2472 showalways = set()
2467 2473 showchanged = set()
2468 2474 if ui.verbose and not opts['show_stage']:
2469 2475 # show parsed tree by --verbose (deprecated)
2470 2476 showalways.add('parsed')
2471 2477 showchanged.update(['expanded', 'concatenated'])
2472 2478 if opts['optimize']:
2473 2479 showalways.add('optimized')
2474 2480 if opts['show_stage'] and opts['optimize']:
2475 2481 raise error.Abort(_('cannot use --optimize with --show-stage'))
2476 2482 if opts['show_stage'] == ['all']:
2477 2483 showalways.update(stagenames)
2478 2484 else:
2479 2485 for n in opts['show_stage']:
2480 2486 if n not in stagenames:
2481 2487 raise error.Abort(_('invalid stage name: %s') % n)
2482 2488 showalways.update(opts['show_stage'])
2483 2489
2484 2490 treebystage = {}
2485 2491 printedtree = None
2486 2492 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2487 2493 for n, f in stages:
2488 2494 treebystage[n] = tree = f(tree)
2489 2495 if n in showalways or (n in showchanged and tree != printedtree):
2490 2496 if opts['show_stage'] or n != 'parsed':
2491 2497 ui.write(("* %s:\n") % n)
2492 2498 ui.write(revsetlang.prettyformat(tree), "\n")
2493 2499 printedtree = tree
2494 2500
2495 2501 if opts['verify_optimized']:
2496 2502 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2497 2503 brevs = revset.makematcher(treebystage['optimized'])(repo)
2498 2504 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2499 2505 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2500 2506 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2501 2507 arevs = list(arevs)
2502 2508 brevs = list(brevs)
2503 2509 if arevs == brevs:
2504 2510 return 0
2505 2511 ui.write(('--- analyzed\n'), label='diff.file_a')
2506 2512 ui.write(('+++ optimized\n'), label='diff.file_b')
2507 2513 sm = difflib.SequenceMatcher(None, arevs, brevs)
2508 2514 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2509 2515 if tag in (r'delete', r'replace'):
2510 2516 for c in arevs[alo:ahi]:
2511 2517 ui.write('-%d\n' % c, label='diff.deleted')
2512 2518 if tag in (r'insert', r'replace'):
2513 2519 for c in brevs[blo:bhi]:
2514 2520 ui.write('+%d\n' % c, label='diff.inserted')
2515 2521 if tag == r'equal':
2516 2522 for c in arevs[alo:ahi]:
2517 2523 ui.write(' %d\n' % c)
2518 2524 return 1
2519 2525
2520 2526 func = revset.makematcher(tree)
2521 2527 revs = func(repo)
2522 2528 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2523 2529 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2524 2530 if not opts['show_revs']:
2525 2531 return
2526 2532 for c in revs:
2527 2533 ui.write("%d\n" % c)
2528 2534
2529 2535 @command('debugserve', [
2530 2536 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2531 2537 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2532 2538 ('', 'logiofile', '', _('file to log server I/O to')),
2533 2539 ], '')
2534 2540 def debugserve(ui, repo, **opts):
2535 2541 """run a server with advanced settings
2536 2542
2537 2543 This command is similar to :hg:`serve`. It exists partially as a
2538 2544 workaround to the fact that ``hg serve --stdio`` must have specific
2539 2545 arguments for security reasons.
2540 2546 """
2541 2547 opts = pycompat.byteskwargs(opts)
2542 2548
2543 2549 if not opts['sshstdio']:
2544 2550 raise error.Abort(_('only --sshstdio is currently supported'))
2545 2551
2546 2552 logfh = None
2547 2553
2548 2554 if opts['logiofd'] and opts['logiofile']:
2549 2555 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2550 2556
2551 2557 if opts['logiofd']:
2552 2558 # Line buffered because output is line based.
2553 2559 try:
2554 2560 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2555 2561 except OSError as e:
2556 2562 if e.errno != errno.ESPIPE:
2557 2563 raise
2558 2564 # can't seek a pipe, so `ab` mode fails on py3
2559 2565 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2560 2566 elif opts['logiofile']:
2561 2567 logfh = open(opts['logiofile'], 'ab', 1)
2562 2568
2563 2569 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2564 2570 s.serve_forever()
2565 2571
2566 2572 @command('debugsetparents', [], _('REV1 [REV2]'))
2567 2573 def debugsetparents(ui, repo, rev1, rev2=None):
2568 2574 """manually set the parents of the current working directory
2569 2575
2570 2576 This is useful for writing repository conversion tools, but should
2571 2577 be used with care. For example, neither the working directory nor the
2572 2578 dirstate is updated, so file status may be incorrect after running this
2573 2579 command.
2574 2580
2575 2581 Returns 0 on success.
2576 2582 """
2577 2583
2578 2584 node1 = scmutil.revsingle(repo, rev1).node()
2579 2585 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2580 2586
2581 2587 with repo.wlock():
2582 2588 repo.setparents(node1, node2)
2583 2589
2584 2590 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2585 2591 def debugssl(ui, repo, source=None, **opts):
2586 2592 '''test a secure connection to a server
2587 2593
2588 2594 This builds the certificate chain for the server on Windows, installing the
2589 2595 missing intermediates and trusted root via Windows Update if necessary. It
2590 2596 does nothing on other platforms.
2591 2597
2592 2598 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2593 2599 that server is used. See :hg:`help urls` for more information.
2594 2600
2595 2601 If the update succeeds, retry the original operation. Otherwise, the cause
2596 2602 of the SSL error is likely another issue.
2597 2603 '''
2598 2604 if not pycompat.iswindows:
2599 2605 raise error.Abort(_('certificate chain building is only possible on '
2600 2606 'Windows'))
2601 2607
2602 2608 if not source:
2603 2609 if not repo:
2604 2610 raise error.Abort(_("there is no Mercurial repository here, and no "
2605 2611 "server specified"))
2606 2612 source = "default"
2607 2613
2608 2614 source, branches = hg.parseurl(ui.expandpath(source))
2609 2615 url = util.url(source)
2610 2616
2611 2617 defaultport = {'https': 443, 'ssh': 22}
2612 2618 if url.scheme in defaultport:
2613 2619 try:
2614 2620 addr = (url.host, int(url.port or defaultport[url.scheme]))
2615 2621 except ValueError:
2616 2622 raise error.Abort(_("malformed port number in URL"))
2617 2623 else:
2618 2624 raise error.Abort(_("only https and ssh connections are supported"))
2619 2625
2620 2626 from . import win32
2621 2627
2622 2628 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2623 2629 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2624 2630
2625 2631 try:
2626 2632 s.connect(addr)
2627 2633 cert = s.getpeercert(True)
2628 2634
2629 2635 ui.status(_('checking the certificate chain for %s\n') % url.host)
2630 2636
2631 2637 complete = win32.checkcertificatechain(cert, build=False)
2632 2638
2633 2639 if not complete:
2634 2640 ui.status(_('certificate chain is incomplete, updating... '))
2635 2641
2636 2642 if not win32.checkcertificatechain(cert):
2637 2643 ui.status(_('failed.\n'))
2638 2644 else:
2639 2645 ui.status(_('done.\n'))
2640 2646 else:
2641 2647 ui.status(_('full certificate chain is available\n'))
2642 2648 finally:
2643 2649 s.close()
2644 2650
2645 2651 @command('debugsub',
2646 2652 [('r', 'rev', '',
2647 2653 _('revision to check'), _('REV'))],
2648 2654 _('[-r REV] [REV]'))
2649 2655 def debugsub(ui, repo, rev=None):
2650 2656 ctx = scmutil.revsingle(repo, rev, None)
2651 2657 for k, v in sorted(ctx.substate.items()):
2652 2658 ui.write(('path %s\n') % k)
2653 2659 ui.write((' source %s\n') % v[0])
2654 2660 ui.write((' revision %s\n') % v[1])
2655 2661
2656 2662 @command('debugsuccessorssets',
2657 2663 [('', 'closest', False, _('return closest successors sets only'))],
2658 2664 _('[REV]'))
2659 2665 def debugsuccessorssets(ui, repo, *revs, **opts):
2660 2666 """show set of successors for revision
2661 2667
2662 2668 A successors set of changeset A is a consistent group of revisions that
2663 2669 succeed A. It contains non-obsolete changesets only unless closests
2664 2670 successors set is set.
2665 2671
2666 2672 In most cases a changeset A has a single successors set containing a single
2667 2673 successor (changeset A replaced by A').
2668 2674
2669 2675 A changeset that is made obsolete with no successors are called "pruned".
2670 2676 Such changesets have no successors sets at all.
2671 2677
2672 2678 A changeset that has been "split" will have a successors set containing
2673 2679 more than one successor.
2674 2680
2675 2681 A changeset that has been rewritten in multiple different ways is called
2676 2682 "divergent". Such changesets have multiple successor sets (each of which
2677 2683 may also be split, i.e. have multiple successors).
2678 2684
2679 2685 Results are displayed as follows::
2680 2686
2681 2687 <rev1>
2682 2688 <successors-1A>
2683 2689 <rev2>
2684 2690 <successors-2A>
2685 2691 <successors-2B1> <successors-2B2> <successors-2B3>
2686 2692
2687 2693 Here rev2 has two possible (i.e. divergent) successors sets. The first
2688 2694 holds one element, whereas the second holds three (i.e. the changeset has
2689 2695 been split).
2690 2696 """
2691 2697 # passed to successorssets caching computation from one call to another
2692 2698 cache = {}
2693 2699 ctx2str = bytes
2694 2700 node2str = short
2695 2701 for rev in scmutil.revrange(repo, revs):
2696 2702 ctx = repo[rev]
2697 2703 ui.write('%s\n'% ctx2str(ctx))
2698 2704 for succsset in obsutil.successorssets(repo, ctx.node(),
2699 2705 closest=opts[r'closest'],
2700 2706 cache=cache):
2701 2707 if succsset:
2702 2708 ui.write(' ')
2703 2709 ui.write(node2str(succsset[0]))
2704 2710 for node in succsset[1:]:
2705 2711 ui.write(' ')
2706 2712 ui.write(node2str(node))
2707 2713 ui.write('\n')
2708 2714
2709 2715 @command('debugtemplate',
2710 2716 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2711 2717 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2712 2718 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2713 2719 optionalrepo=True)
2714 2720 def debugtemplate(ui, repo, tmpl, **opts):
2715 2721 """parse and apply a template
2716 2722
2717 2723 If -r/--rev is given, the template is processed as a log template and
2718 2724 applied to the given changesets. Otherwise, it is processed as a generic
2719 2725 template.
2720 2726
2721 2727 Use --verbose to print the parsed tree.
2722 2728 """
2723 2729 revs = None
2724 2730 if opts[r'rev']:
2725 2731 if repo is None:
2726 2732 raise error.RepoError(_('there is no Mercurial repository here '
2727 2733 '(.hg not found)'))
2728 2734 revs = scmutil.revrange(repo, opts[r'rev'])
2729 2735
2730 2736 props = {}
2731 2737 for d in opts[r'define']:
2732 2738 try:
2733 2739 k, v = (e.strip() for e in d.split('=', 1))
2734 2740 if not k or k == 'ui':
2735 2741 raise ValueError
2736 2742 props[k] = v
2737 2743 except ValueError:
2738 2744 raise error.Abort(_('malformed keyword definition: %s') % d)
2739 2745
2740 2746 if ui.verbose:
2741 2747 aliases = ui.configitems('templatealias')
2742 2748 tree = templater.parse(tmpl)
2743 2749 ui.note(templater.prettyformat(tree), '\n')
2744 2750 newtree = templater.expandaliases(tree, aliases)
2745 2751 if newtree != tree:
2746 2752 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2747 2753
2748 2754 if revs is None:
2749 2755 tres = formatter.templateresources(ui, repo)
2750 2756 t = formatter.maketemplater(ui, tmpl, resources=tres)
2751 2757 if ui.verbose:
2752 2758 kwds, funcs = t.symbolsuseddefault()
2753 2759 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2754 2760 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2755 2761 ui.write(t.renderdefault(props))
2756 2762 else:
2757 2763 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2758 2764 if ui.verbose:
2759 2765 kwds, funcs = displayer.t.symbolsuseddefault()
2760 2766 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2761 2767 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2762 2768 for r in revs:
2763 2769 displayer.show(repo[r], **pycompat.strkwargs(props))
2764 2770 displayer.close()
2765 2771
2766 2772 @command('debuguigetpass', [
2767 2773 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2768 2774 ], _('[-p TEXT]'), norepo=True)
2769 2775 def debuguigetpass(ui, prompt=''):
2770 2776 """show prompt to type password"""
2771 2777 r = ui.getpass(prompt)
2772 2778 ui.write(('respose: %s\n') % r)
2773 2779
2774 2780 @command('debuguiprompt', [
2775 2781 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2776 2782 ], _('[-p TEXT]'), norepo=True)
2777 2783 def debuguiprompt(ui, prompt=''):
2778 2784 """show plain prompt"""
2779 2785 r = ui.prompt(prompt)
2780 2786 ui.write(('response: %s\n') % r)
2781 2787
2782 2788 @command('debugupdatecaches', [])
2783 2789 def debugupdatecaches(ui, repo, *pats, **opts):
2784 2790 """warm all known caches in the repository"""
2785 2791 with repo.wlock(), repo.lock():
2786 2792 repo.updatecaches(full=True)
2787 2793
2788 2794 @command('debugupgraderepo', [
2789 2795 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2790 2796 ('', 'run', False, _('performs an upgrade')),
2791 2797 ('', 'backup', True, _('keep the old repository content around')),
2792 2798 ])
2793 2799 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2794 2800 """upgrade a repository to use different features
2795 2801
2796 2802 If no arguments are specified, the repository is evaluated for upgrade
2797 2803 and a list of problems and potential optimizations is printed.
2798 2804
2799 2805 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2800 2806 can be influenced via additional arguments. More details will be provided
2801 2807 by the command output when run without ``--run``.
2802 2808
2803 2809 During the upgrade, the repository will be locked and no writes will be
2804 2810 allowed.
2805 2811
2806 2812 At the end of the upgrade, the repository may not be readable while new
2807 2813 repository data is swapped in. This window will be as long as it takes to
2808 2814 rename some directories inside the ``.hg`` directory. On most machines, this
2809 2815 should complete almost instantaneously and the chances of a consumer being
2810 2816 unable to access the repository should be low.
2811 2817 """
2812 2818 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2813 2819 backup=backup)
2814 2820
2815 2821 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2816 2822 inferrepo=True)
2817 2823 def debugwalk(ui, repo, *pats, **opts):
2818 2824 """show how files match on given patterns"""
2819 2825 opts = pycompat.byteskwargs(opts)
2820 2826 m = scmutil.match(repo[None], pats, opts)
2821 2827 if ui.verbose:
2822 2828 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2823 2829 items = list(repo[None].walk(m))
2824 2830 if not items:
2825 2831 return
2826 2832 f = lambda fn: fn
2827 2833 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2828 2834 f = lambda fn: util.normpath(fn)
2829 2835 fmt = 'f %%-%ds %%-%ds %%s' % (
2830 2836 max([len(abs) for abs in items]),
2831 2837 max([len(repo.pathto(abs)) for abs in items]))
2832 2838 for abs in items:
2833 2839 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2834 2840 ui.write("%s\n" % line.rstrip())
2835 2841
2836 2842 @command('debugwhyunstable', [], _('REV'))
2837 2843 def debugwhyunstable(ui, repo, rev):
2838 2844 """explain instabilities of a changeset"""
2839 2845 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2840 2846 dnodes = ''
2841 2847 if entry.get('divergentnodes'):
2842 2848 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2843 2849 for ctx in entry['divergentnodes']) + ' '
2844 2850 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2845 2851 entry['reason'], entry['node']))
2846 2852
2847 2853 @command('debugwireargs',
2848 2854 [('', 'three', '', 'three'),
2849 2855 ('', 'four', '', 'four'),
2850 2856 ('', 'five', '', 'five'),
2851 2857 ] + cmdutil.remoteopts,
2852 2858 _('REPO [OPTIONS]... [ONE [TWO]]'),
2853 2859 norepo=True)
2854 2860 def debugwireargs(ui, repopath, *vals, **opts):
2855 2861 opts = pycompat.byteskwargs(opts)
2856 2862 repo = hg.peer(ui, opts, repopath)
2857 2863 for opt in cmdutil.remoteopts:
2858 2864 del opts[opt[1]]
2859 2865 args = {}
2860 2866 for k, v in opts.iteritems():
2861 2867 if v:
2862 2868 args[k] = v
2863 2869 args = pycompat.strkwargs(args)
2864 2870 # run twice to check that we don't mess up the stream for the next command
2865 2871 res1 = repo.debugwireargs(*vals, **args)
2866 2872 res2 = repo.debugwireargs(*vals, **args)
2867 2873 ui.write("%s\n" % res1)
2868 2874 if res1 != res2:
2869 2875 ui.warn("%s\n" % res2)
2870 2876
2871 2877 def _parsewirelangblocks(fh):
2872 2878 activeaction = None
2873 2879 blocklines = []
2874 2880 lastindent = 0
2875 2881
2876 2882 for line in fh:
2877 2883 line = line.rstrip()
2878 2884 if not line:
2879 2885 continue
2880 2886
2881 2887 if line.startswith(b'#'):
2882 2888 continue
2883 2889
2884 2890 if not line.startswith(b' '):
2885 2891 # New block. Flush previous one.
2886 2892 if activeaction:
2887 2893 yield activeaction, blocklines
2888 2894
2889 2895 activeaction = line
2890 2896 blocklines = []
2891 2897 lastindent = 0
2892 2898 continue
2893 2899
2894 2900 # Else we start with an indent.
2895 2901
2896 2902 if not activeaction:
2897 2903 raise error.Abort(_('indented line outside of block'))
2898 2904
2899 2905 indent = len(line) - len(line.lstrip())
2900 2906
2901 2907 # If this line is indented more than the last line, concatenate it.
2902 2908 if indent > lastindent and blocklines:
2903 2909 blocklines[-1] += line.lstrip()
2904 2910 else:
2905 2911 blocklines.append(line)
2906 2912 lastindent = indent
2907 2913
2908 2914 # Flush last block.
2909 2915 if activeaction:
2910 2916 yield activeaction, blocklines
2911 2917
2912 2918 @command('debugwireproto',
2913 2919 [
2914 2920 ('', 'localssh', False, _('start an SSH server for this repo')),
2915 2921 ('', 'peer', '', _('construct a specific version of the peer')),
2916 2922 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2917 2923 ('', 'nologhandshake', False,
2918 2924 _('do not log I/O related to the peer handshake')),
2919 2925 ] + cmdutil.remoteopts,
2920 2926 _('[PATH]'),
2921 2927 optionalrepo=True)
2922 2928 def debugwireproto(ui, repo, path=None, **opts):
2923 2929 """send wire protocol commands to a server
2924 2930
2925 2931 This command can be used to issue wire protocol commands to remote
2926 2932 peers and to debug the raw data being exchanged.
2927 2933
2928 2934 ``--localssh`` will start an SSH server against the current repository
2929 2935 and connect to that. By default, the connection will perform a handshake
2930 2936 and establish an appropriate peer instance.
2931 2937
2932 2938 ``--peer`` can be used to bypass the handshake protocol and construct a
2933 2939 peer instance using the specified class type. Valid values are ``raw``,
2934 2940 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2935 2941 raw data payloads and don't support higher-level command actions.
2936 2942
2937 2943 ``--noreadstderr`` can be used to disable automatic reading from stderr
2938 2944 of the peer (for SSH connections only). Disabling automatic reading of
2939 2945 stderr is useful for making output more deterministic.
2940 2946
2941 2947 Commands are issued via a mini language which is specified via stdin.
2942 2948 The language consists of individual actions to perform. An action is
2943 2949 defined by a block. A block is defined as a line with no leading
2944 2950 space followed by 0 or more lines with leading space. Blocks are
2945 2951 effectively a high-level command with additional metadata.
2946 2952
2947 2953 Lines beginning with ``#`` are ignored.
2948 2954
2949 2955 The following sections denote available actions.
2950 2956
2951 2957 raw
2952 2958 ---
2953 2959
2954 2960 Send raw data to the server.
2955 2961
2956 2962 The block payload contains the raw data to send as one atomic send
2957 2963 operation. The data may not actually be delivered in a single system
2958 2964 call: it depends on the abilities of the transport being used.
2959 2965
2960 2966 Each line in the block is de-indented and concatenated. Then, that
2961 2967 value is evaluated as a Python b'' literal. This allows the use of
2962 2968 backslash escaping, etc.
2963 2969
2964 2970 raw+
2965 2971 ----
2966 2972
2967 2973 Behaves like ``raw`` except flushes output afterwards.
2968 2974
2969 2975 command <X>
2970 2976 -----------
2971 2977
2972 2978 Send a request to run a named command, whose name follows the ``command``
2973 2979 string.
2974 2980
2975 2981 Arguments to the command are defined as lines in this block. The format of
2976 2982 each line is ``<key> <value>``. e.g.::
2977 2983
2978 2984 command listkeys
2979 2985 namespace bookmarks
2980 2986
2981 2987 If the value begins with ``eval:``, it will be interpreted as a Python
2982 2988 literal expression. Otherwise values are interpreted as Python b'' literals.
2983 2989 This allows sending complex types and encoding special byte sequences via
2984 2990 backslash escaping.
2985 2991
2986 2992 The following arguments have special meaning:
2987 2993
2988 2994 ``PUSHFILE``
2989 2995 When defined, the *push* mechanism of the peer will be used instead
2990 2996 of the static request-response mechanism and the content of the
2991 2997 file specified in the value of this argument will be sent as the
2992 2998 command payload.
2993 2999
2994 3000 This can be used to submit a local bundle file to the remote.
2995 3001
2996 3002 batchbegin
2997 3003 ----------
2998 3004
2999 3005 Instruct the peer to begin a batched send.
3000 3006
3001 3007 All ``command`` blocks are queued for execution until the next
3002 3008 ``batchsubmit`` block.
3003 3009
3004 3010 batchsubmit
3005 3011 -----------
3006 3012
3007 3013 Submit previously queued ``command`` blocks as a batch request.
3008 3014
3009 3015 This action MUST be paired with a ``batchbegin`` action.
3010 3016
3011 3017 httprequest <method> <path>
3012 3018 ---------------------------
3013 3019
3014 3020 (HTTP peer only)
3015 3021
3016 3022 Send an HTTP request to the peer.
3017 3023
3018 3024 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3019 3025
3020 3026 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3021 3027 headers to add to the request. e.g. ``Accept: foo``.
3022 3028
3023 3029 The following arguments are special:
3024 3030
3025 3031 ``BODYFILE``
3026 3032 The content of the file defined as the value to this argument will be
3027 3033 transferred verbatim as the HTTP request body.
3028 3034
3029 3035 ``frame <type> <flags> <payload>``
3030 3036 Send a unified protocol frame as part of the request body.
3031 3037
3032 3038 All frames will be collected and sent as the body to the HTTP
3033 3039 request.
3034 3040
3035 3041 close
3036 3042 -----
3037 3043
3038 3044 Close the connection to the server.
3039 3045
3040 3046 flush
3041 3047 -----
3042 3048
3043 3049 Flush data written to the server.
3044 3050
3045 3051 readavailable
3046 3052 -------------
3047 3053
3048 3054 Close the write end of the connection and read all available data from
3049 3055 the server.
3050 3056
3051 3057 If the connection to the server encompasses multiple pipes, we poll both
3052 3058 pipes and read available data.
3053 3059
3054 3060 readline
3055 3061 --------
3056 3062
3057 3063 Read a line of output from the server. If there are multiple output
3058 3064 pipes, reads only the main pipe.
3059 3065
3060 3066 ereadline
3061 3067 ---------
3062 3068
3063 3069 Like ``readline``, but read from the stderr pipe, if available.
3064 3070
3065 3071 read <X>
3066 3072 --------
3067 3073
3068 3074 ``read()`` N bytes from the server's main output pipe.
3069 3075
3070 3076 eread <X>
3071 3077 ---------
3072 3078
3073 3079 ``read()`` N bytes from the server's stderr pipe, if available.
3074 3080
3075 3081 Specifying Unified Frame-Based Protocol Frames
3076 3082 ----------------------------------------------
3077 3083
3078 3084 It is possible to emit a *Unified Frame-Based Protocol* by using special
3079 3085 syntax.
3080 3086
3081 3087 A frame is composed as a type, flags, and payload. These can be parsed
3082 3088 from a string of the form:
3083 3089
3084 3090 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3085 3091
3086 3092 ``request-id`` and ``stream-id`` are integers defining the request and
3087 3093 stream identifiers.
3088 3094
3089 3095 ``type`` can be an integer value for the frame type or the string name
3090 3096 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3091 3097 ``command-name``.
3092 3098
3093 3099 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3094 3100 components. Each component (and there can be just one) can be an integer
3095 3101 or a flag name for stream flags or frame flags, respectively. Values are
3096 3102 resolved to integers and then bitwise OR'd together.
3097 3103
3098 3104 ``payload`` represents the raw frame payload. If it begins with
3099 3105 ``cbor:``, the following string is evaluated as Python code and the
3100 3106 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3101 3107 as a Python byte string literal.
3102 3108 """
3103 3109 opts = pycompat.byteskwargs(opts)
3104 3110
3105 3111 if opts['localssh'] and not repo:
3106 3112 raise error.Abort(_('--localssh requires a repository'))
3107 3113
3108 3114 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3109 3115 raise error.Abort(_('invalid value for --peer'),
3110 3116 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3111 3117
3112 3118 if path and opts['localssh']:
3113 3119 raise error.Abort(_('cannot specify --localssh with an explicit '
3114 3120 'path'))
3115 3121
3116 3122 if ui.interactive():
3117 3123 ui.write(_('(waiting for commands on stdin)\n'))
3118 3124
3119 3125 blocks = list(_parsewirelangblocks(ui.fin))
3120 3126
3121 3127 proc = None
3122 3128 stdin = None
3123 3129 stdout = None
3124 3130 stderr = None
3125 3131 opener = None
3126 3132
3127 3133 if opts['localssh']:
3128 3134 # We start the SSH server in its own process so there is process
3129 3135 # separation. This prevents a whole class of potential bugs around
3130 3136 # shared state from interfering with server operation.
3131 3137 args = procutil.hgcmd() + [
3132 3138 '-R', repo.root,
3133 3139 'debugserve', '--sshstdio',
3134 3140 ]
3135 3141 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3136 3142 stdin=subprocess.PIPE,
3137 3143 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3138 3144 bufsize=0)
3139 3145
3140 3146 stdin = proc.stdin
3141 3147 stdout = proc.stdout
3142 3148 stderr = proc.stderr
3143 3149
3144 3150 # We turn the pipes into observers so we can log I/O.
3145 3151 if ui.verbose or opts['peer'] == 'raw':
3146 3152 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3147 3153 logdata=True)
3148 3154 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3149 3155 logdata=True)
3150 3156 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3151 3157 logdata=True)
3152 3158
3153 3159 # --localssh also implies the peer connection settings.
3154 3160
3155 3161 url = 'ssh://localserver'
3156 3162 autoreadstderr = not opts['noreadstderr']
3157 3163
3158 3164 if opts['peer'] == 'ssh1':
3159 3165 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3160 3166 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3161 3167 None, autoreadstderr=autoreadstderr)
3162 3168 elif opts['peer'] == 'ssh2':
3163 3169 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3164 3170 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3165 3171 None, autoreadstderr=autoreadstderr)
3166 3172 elif opts['peer'] == 'raw':
3167 3173 ui.write(_('using raw connection to peer\n'))
3168 3174 peer = None
3169 3175 else:
3170 3176 ui.write(_('creating ssh peer from handshake results\n'))
3171 3177 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3172 3178 autoreadstderr=autoreadstderr)
3173 3179
3174 3180 elif path:
3175 3181 # We bypass hg.peer() so we can proxy the sockets.
3176 3182 # TODO consider not doing this because we skip
3177 3183 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3178 3184 u = util.url(path)
3179 3185 if u.scheme != 'http':
3180 3186 raise error.Abort(_('only http:// paths are currently supported'))
3181 3187
3182 3188 url, authinfo = u.authinfo()
3183 3189 openerargs = {
3184 3190 r'useragent': b'Mercurial debugwireproto',
3185 3191 }
3186 3192
3187 3193 # Turn pipes/sockets into observers so we can log I/O.
3188 3194 if ui.verbose:
3189 3195 openerargs.update({
3190 3196 r'loggingfh': ui,
3191 3197 r'loggingname': b's',
3192 3198 r'loggingopts': {
3193 3199 r'logdata': True,
3194 3200 r'logdataapis': False,
3195 3201 },
3196 3202 })
3197 3203
3198 3204 if ui.debugflag:
3199 3205 openerargs[r'loggingopts'][r'logdataapis'] = True
3200 3206
3201 3207 # Don't send default headers when in raw mode. This allows us to
3202 3208 # bypass most of the behavior of our URL handling code so we can
3203 3209 # have near complete control over what's sent on the wire.
3204 3210 if opts['peer'] == 'raw':
3205 3211 openerargs[r'sendaccept'] = False
3206 3212
3207 3213 opener = urlmod.opener(ui, authinfo, **openerargs)
3208 3214
3209 3215 if opts['peer'] == 'http2':
3210 3216 ui.write(_('creating http peer for wire protocol version 2\n'))
3211 3217 # We go through makepeer() because we need an API descriptor for
3212 3218 # the peer instance to be useful.
3213 3219 with ui.configoverride({
3214 3220 ('experimental', 'httppeer.advertise-v2'): True}):
3215 3221 if opts['nologhandshake']:
3216 3222 ui.pushbuffer()
3217 3223
3218 3224 peer = httppeer.makepeer(ui, path, opener=opener)
3219 3225
3220 3226 if opts['nologhandshake']:
3221 3227 ui.popbuffer()
3222 3228
3223 3229 if not isinstance(peer, httppeer.httpv2peer):
3224 3230 raise error.Abort(_('could not instantiate HTTP peer for '
3225 3231 'wire protocol version 2'),
3226 3232 hint=_('the server may not have the feature '
3227 3233 'enabled or is not allowing this '
3228 3234 'client version'))
3229 3235
3230 3236 elif opts['peer'] == 'raw':
3231 3237 ui.write(_('using raw connection to peer\n'))
3232 3238 peer = None
3233 3239 elif opts['peer']:
3234 3240 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3235 3241 opts['peer'])
3236 3242 else:
3237 3243 peer = httppeer.makepeer(ui, path, opener=opener)
3238 3244
3239 3245 # We /could/ populate stdin/stdout with sock.makefile()...
3240 3246 else:
3241 3247 raise error.Abort(_('unsupported connection configuration'))
3242 3248
3243 3249 batchedcommands = None
3244 3250
3245 3251 # Now perform actions based on the parsed wire language instructions.
3246 3252 for action, lines in blocks:
3247 3253 if action in ('raw', 'raw+'):
3248 3254 if not stdin:
3249 3255 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3250 3256
3251 3257 # Concatenate the data together.
3252 3258 data = ''.join(l.lstrip() for l in lines)
3253 3259 data = stringutil.unescapestr(data)
3254 3260 stdin.write(data)
3255 3261
3256 3262 if action == 'raw+':
3257 3263 stdin.flush()
3258 3264 elif action == 'flush':
3259 3265 if not stdin:
3260 3266 raise error.Abort(_('cannot call flush on this peer'))
3261 3267 stdin.flush()
3262 3268 elif action.startswith('command'):
3263 3269 if not peer:
3264 3270 raise error.Abort(_('cannot send commands unless peer instance '
3265 3271 'is available'))
3266 3272
3267 3273 command = action.split(' ', 1)[1]
3268 3274
3269 3275 args = {}
3270 3276 for line in lines:
3271 3277 # We need to allow empty values.
3272 3278 fields = line.lstrip().split(' ', 1)
3273 3279 if len(fields) == 1:
3274 3280 key = fields[0]
3275 3281 value = ''
3276 3282 else:
3277 3283 key, value = fields
3278 3284
3279 3285 if value.startswith('eval:'):
3280 3286 value = stringutil.evalpythonliteral(value[5:])
3281 3287 else:
3282 3288 value = stringutil.unescapestr(value)
3283 3289
3284 3290 args[key] = value
3285 3291
3286 3292 if batchedcommands is not None:
3287 3293 batchedcommands.append((command, args))
3288 3294 continue
3289 3295
3290 3296 ui.status(_('sending %s command\n') % command)
3291 3297
3292 3298 if 'PUSHFILE' in args:
3293 3299 with open(args['PUSHFILE'], r'rb') as fh:
3294 3300 del args['PUSHFILE']
3295 3301 res, output = peer._callpush(command, fh,
3296 3302 **pycompat.strkwargs(args))
3297 3303 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3298 3304 ui.status(_('remote output: %s\n') %
3299 3305 stringutil.escapestr(output))
3300 3306 else:
3301 3307 with peer.commandexecutor() as e:
3302 3308 res = e.callcommand(command, args).result()
3303 3309
3304 3310 if isinstance(res, wireprotov2peer.commandresponse):
3305 3311 val = res.objects()
3306 3312 ui.status(_('response: %s\n') %
3307 3313 stringutil.pprint(val, bprefix=True, indent=2))
3308 3314 else:
3309 3315 ui.status(_('response: %s\n') %
3310 3316 stringutil.pprint(res, bprefix=True, indent=2))
3311 3317
3312 3318 elif action == 'batchbegin':
3313 3319 if batchedcommands is not None:
3314 3320 raise error.Abort(_('nested batchbegin not allowed'))
3315 3321
3316 3322 batchedcommands = []
3317 3323 elif action == 'batchsubmit':
3318 3324 # There is a batching API we could go through. But it would be
3319 3325 # difficult to normalize requests into function calls. It is easier
3320 3326 # to bypass this layer and normalize to commands + args.
3321 3327 ui.status(_('sending batch with %d sub-commands\n') %
3322 3328 len(batchedcommands))
3323 3329 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3324 3330 ui.status(_('response #%d: %s\n') %
3325 3331 (i, stringutil.escapestr(chunk)))
3326 3332
3327 3333 batchedcommands = None
3328 3334
3329 3335 elif action.startswith('httprequest '):
3330 3336 if not opener:
3331 3337 raise error.Abort(_('cannot use httprequest without an HTTP '
3332 3338 'peer'))
3333 3339
3334 3340 request = action.split(' ', 2)
3335 3341 if len(request) != 3:
3336 3342 raise error.Abort(_('invalid httprequest: expected format is '
3337 3343 '"httprequest <method> <path>'))
3338 3344
3339 3345 method, httppath = request[1:]
3340 3346 headers = {}
3341 3347 body = None
3342 3348 frames = []
3343 3349 for line in lines:
3344 3350 line = line.lstrip()
3345 3351 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3346 3352 if m:
3347 3353 # Headers need to use native strings.
3348 3354 key = pycompat.strurl(m.group(1))
3349 3355 value = pycompat.strurl(m.group(2))
3350 3356 headers[key] = value
3351 3357 continue
3352 3358
3353 3359 if line.startswith(b'BODYFILE '):
3354 3360 with open(line.split(b' ', 1), 'rb') as fh:
3355 3361 body = fh.read()
3356 3362 elif line.startswith(b'frame '):
3357 3363 frame = wireprotoframing.makeframefromhumanstring(
3358 3364 line[len(b'frame '):])
3359 3365
3360 3366 frames.append(frame)
3361 3367 else:
3362 3368 raise error.Abort(_('unknown argument to httprequest: %s') %
3363 3369 line)
3364 3370
3365 3371 url = path + httppath
3366 3372
3367 3373 if frames:
3368 3374 body = b''.join(bytes(f) for f in frames)
3369 3375
3370 3376 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3371 3377
3372 3378 # urllib.Request insists on using has_data() as a proxy for
3373 3379 # determining the request method. Override that to use our
3374 3380 # explicitly requested method.
3375 3381 req.get_method = lambda: pycompat.sysstr(method)
3376 3382
3377 3383 try:
3378 3384 res = opener.open(req)
3379 3385 body = res.read()
3380 3386 except util.urlerr.urlerror as e:
3381 3387 # read() method must be called, but only exists in Python 2
3382 3388 getattr(e, 'read', lambda: None)()
3383 3389 continue
3384 3390
3385 3391 ct = res.headers.get(r'Content-Type')
3386 3392 if ct == r'application/mercurial-cbor':
3387 3393 ui.write(_('cbor> %s\n') %
3388 3394 stringutil.pprint(cborutil.decodeall(body),
3389 3395 bprefix=True,
3390 3396 indent=2))
3391 3397
3392 3398 elif action == 'close':
3393 3399 peer.close()
3394 3400 elif action == 'readavailable':
3395 3401 if not stdout or not stderr:
3396 3402 raise error.Abort(_('readavailable not available on this peer'))
3397 3403
3398 3404 stdin.close()
3399 3405 stdout.read()
3400 3406 stderr.read()
3401 3407
3402 3408 elif action == 'readline':
3403 3409 if not stdout:
3404 3410 raise error.Abort(_('readline not available on this peer'))
3405 3411 stdout.readline()
3406 3412 elif action == 'ereadline':
3407 3413 if not stderr:
3408 3414 raise error.Abort(_('ereadline not available on this peer'))
3409 3415 stderr.readline()
3410 3416 elif action.startswith('read '):
3411 3417 count = int(action.split(' ', 1)[1])
3412 3418 if not stdout:
3413 3419 raise error.Abort(_('read not available on this peer'))
3414 3420 stdout.read(count)
3415 3421 elif action.startswith('eread '):
3416 3422 count = int(action.split(' ', 1)[1])
3417 3423 if not stderr:
3418 3424 raise error.Abort(_('eread not available on this peer'))
3419 3425 stderr.read(count)
3420 3426 else:
3421 3427 raise error.Abort(_('unknown action: %s') % action)
3422 3428
3423 3429 if batchedcommands is not None:
3424 3430 raise error.Abort(_('unclosed "batchbegin" request'))
3425 3431
3426 3432 if peer:
3427 3433 peer.close()
3428 3434
3429 3435 if proc:
3430 3436 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now