##// END OF EJS Templates
debugcommands: work around logiofd being a pipe and unseekable...
Augie Fackler -
r38333:275cc461 default
parent child Browse files
Show More
@@ -1,3138 +1,3144
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .thirdparty import (
36 36 cbor,
37 37 )
38 38 from . import (
39 39 bundle2,
40 40 changegroup,
41 41 cmdutil,
42 42 color,
43 43 context,
44 44 dagparser,
45 45 dagutil,
46 46 encoding,
47 47 error,
48 48 exchange,
49 49 extensions,
50 50 filemerge,
51 51 fileset,
52 52 formatter,
53 53 hg,
54 54 httppeer,
55 55 localrepo,
56 56 lock as lockmod,
57 57 logcmdutil,
58 58 merge as mergemod,
59 59 obsolete,
60 60 obsutil,
61 61 phases,
62 62 policy,
63 63 pvec,
64 64 pycompat,
65 65 registrar,
66 66 repair,
67 67 revlog,
68 68 revset,
69 69 revsetlang,
70 70 scmutil,
71 71 setdiscovery,
72 72 simplemerge,
73 73 sshpeer,
74 74 sslutil,
75 75 streamclone,
76 76 templater,
77 77 treediscovery,
78 78 upgrade,
79 79 url as urlmod,
80 80 util,
81 81 vfs as vfsmod,
82 82 wireprotoframing,
83 83 wireprotoserver,
84 84 wireprotov2peer,
85 85 )
86 86 from .utils import (
87 87 dateutil,
88 88 procutil,
89 89 stringutil,
90 90 )
91 91
92 92 release = lockmod.release
93 93
94 94 command = registrar.command()
95 95
96 96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 97 def debugancestor(ui, repo, *args):
98 98 """find the ancestor revision of two revisions in a given index"""
99 99 if len(args) == 3:
100 100 index, rev1, rev2 = args
101 101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 102 lookup = r.lookup
103 103 elif len(args) == 2:
104 104 if not repo:
105 105 raise error.Abort(_('there is no Mercurial repository here '
106 106 '(.hg not found)'))
107 107 rev1, rev2 = args
108 108 r = repo.changelog
109 109 lookup = repo.lookup
110 110 else:
111 111 raise error.Abort(_('either two or three arguments required'))
112 112 a = r.ancestor(lookup(rev1), lookup(rev2))
113 113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114 114
115 115 @command('debugapplystreamclonebundle', [], 'FILE')
116 116 def debugapplystreamclonebundle(ui, repo, fname):
117 117 """apply a stream clone bundle file"""
118 118 f = hg.openpath(ui, fname)
119 119 gen = exchange.readbundle(ui, f, fname)
120 120 gen.apply(repo)
121 121
122 122 @command('debugbuilddag',
123 123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 125 ('n', 'new-file', None, _('add new file at each rev'))],
126 126 _('[OPTION]... [TEXT]'))
127 127 def debugbuilddag(ui, repo, text=None,
128 128 mergeable_file=False,
129 129 overwritten_file=False,
130 130 new_file=False):
131 131 """builds a repo with a given DAG from scratch in the current empty repo
132 132
133 133 The description of the DAG is read from stdin if not given on the
134 134 command line.
135 135
136 136 Elements:
137 137
138 138 - "+n" is a linear run of n nodes based on the current default parent
139 139 - "." is a single node based on the current default parent
140 140 - "$" resets the default parent to null (implied at the start);
141 141 otherwise the default parent is always the last node created
142 142 - "<p" sets the default parent to the backref p
143 143 - "*p" is a fork at parent p, which is a backref
144 144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 145 - "/p2" is a merge of the preceding node and p2
146 146 - ":tag" defines a local tag for the preceding node
147 147 - "@branch" sets the named branch for subsequent nodes
148 148 - "#...\\n" is a comment up to the end of the line
149 149
150 150 Whitespace between the above elements is ignored.
151 151
152 152 A backref is either
153 153
154 154 - a number n, which references the node curr-n, where curr is the current
155 155 node, or
156 156 - the name of a local tag you placed earlier using ":tag", or
157 157 - empty to denote the default parent.
158 158
159 159 All string valued-elements are either strictly alphanumeric, or must
160 160 be enclosed in double quotes ("..."), with "\\" as escape character.
161 161 """
162 162
163 163 if text is None:
164 164 ui.status(_("reading DAG from stdin\n"))
165 165 text = ui.fin.read()
166 166
167 167 cl = repo.changelog
168 168 if len(cl) > 0:
169 169 raise error.Abort(_('repository is not empty'))
170 170
171 171 # determine number of revs in DAG
172 172 total = 0
173 173 for type, data in dagparser.parsedag(text):
174 174 if type == 'n':
175 175 total += 1
176 176
177 177 if mergeable_file:
178 178 linesperrev = 2
179 179 # make a file with k lines per rev
180 180 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
181 181 initialmergedlines.append("")
182 182
183 183 tags = []
184 184
185 185 wlock = lock = tr = None
186 186 try:
187 187 wlock = repo.wlock()
188 188 lock = repo.lock()
189 189 tr = repo.transaction("builddag")
190 190
191 191 at = -1
192 192 atbranch = 'default'
193 193 nodeids = []
194 194 id = 0
195 195 ui.progress(_('building'), id, unit=_('revisions'), total=total)
196 196 for type, data in dagparser.parsedag(text):
197 197 if type == 'n':
198 198 ui.note(('node %s\n' % pycompat.bytestr(data)))
199 199 id, ps = data
200 200
201 201 files = []
202 202 filecontent = {}
203 203
204 204 p2 = None
205 205 if mergeable_file:
206 206 fn = "mf"
207 207 p1 = repo[ps[0]]
208 208 if len(ps) > 1:
209 209 p2 = repo[ps[1]]
210 210 pa = p1.ancestor(p2)
211 211 base, local, other = [x[fn].data() for x in (pa, p1,
212 212 p2)]
213 213 m3 = simplemerge.Merge3Text(base, local, other)
214 214 ml = [l.strip() for l in m3.merge_lines()]
215 215 ml.append("")
216 216 elif at > 0:
217 217 ml = p1[fn].data().split("\n")
218 218 else:
219 219 ml = initialmergedlines
220 220 ml[id * linesperrev] += " r%i" % id
221 221 mergedtext = "\n".join(ml)
222 222 files.append(fn)
223 223 filecontent[fn] = mergedtext
224 224
225 225 if overwritten_file:
226 226 fn = "of"
227 227 files.append(fn)
228 228 filecontent[fn] = "r%i\n" % id
229 229
230 230 if new_file:
231 231 fn = "nf%i" % id
232 232 files.append(fn)
233 233 filecontent[fn] = "r%i\n" % id
234 234 if len(ps) > 1:
235 235 if not p2:
236 236 p2 = repo[ps[1]]
237 237 for fn in p2:
238 238 if fn.startswith("nf"):
239 239 files.append(fn)
240 240 filecontent[fn] = p2[fn].data()
241 241
242 242 def fctxfn(repo, cx, path):
243 243 if path in filecontent:
244 244 return context.memfilectx(repo, cx, path,
245 245 filecontent[path])
246 246 return None
247 247
248 248 if len(ps) == 0 or ps[0] < 0:
249 249 pars = [None, None]
250 250 elif len(ps) == 1:
251 251 pars = [nodeids[ps[0]], None]
252 252 else:
253 253 pars = [nodeids[p] for p in ps]
254 254 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
255 255 date=(id, 0),
256 256 user="debugbuilddag",
257 257 extra={'branch': atbranch})
258 258 nodeid = repo.commitctx(cx)
259 259 nodeids.append(nodeid)
260 260 at = id
261 261 elif type == 'l':
262 262 id, name = data
263 263 ui.note(('tag %s\n' % name))
264 264 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
265 265 elif type == 'a':
266 266 ui.note(('branch %s\n' % data))
267 267 atbranch = data
268 268 ui.progress(_('building'), id, unit=_('revisions'), total=total)
269 269 tr.close()
270 270
271 271 if tags:
272 272 repo.vfs.write("localtags", "".join(tags))
273 273 finally:
274 274 ui.progress(_('building'), None)
275 275 release(tr, lock, wlock)
276 276
277 277 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
278 278 indent_string = ' ' * indent
279 279 if all:
280 280 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
281 281 % indent_string)
282 282
283 283 def showchunks(named):
284 284 ui.write("\n%s%s\n" % (indent_string, named))
285 285 for deltadata in gen.deltaiter():
286 286 node, p1, p2, cs, deltabase, delta, flags = deltadata
287 287 ui.write("%s%s %s %s %s %s %d\n" %
288 288 (indent_string, hex(node), hex(p1), hex(p2),
289 289 hex(cs), hex(deltabase), len(delta)))
290 290
291 291 chunkdata = gen.changelogheader()
292 292 showchunks("changelog")
293 293 chunkdata = gen.manifestheader()
294 294 showchunks("manifest")
295 295 for chunkdata in iter(gen.filelogheader, {}):
296 296 fname = chunkdata['filename']
297 297 showchunks(fname)
298 298 else:
299 299 if isinstance(gen, bundle2.unbundle20):
300 300 raise error.Abort(_('use debugbundle2 for this file'))
301 301 chunkdata = gen.changelogheader()
302 302 for deltadata in gen.deltaiter():
303 303 node, p1, p2, cs, deltabase, delta, flags = deltadata
304 304 ui.write("%s%s\n" % (indent_string, hex(node)))
305 305
306 306 def _debugobsmarkers(ui, part, indent=0, **opts):
307 307 """display version and markers contained in 'data'"""
308 308 opts = pycompat.byteskwargs(opts)
309 309 data = part.read()
310 310 indent_string = ' ' * indent
311 311 try:
312 312 version, markers = obsolete._readmarkers(data)
313 313 except error.UnknownVersion as exc:
314 314 msg = "%sunsupported version: %s (%d bytes)\n"
315 315 msg %= indent_string, exc.version, len(data)
316 316 ui.write(msg)
317 317 else:
318 318 msg = "%sversion: %d (%d bytes)\n"
319 319 msg %= indent_string, version, len(data)
320 320 ui.write(msg)
321 321 fm = ui.formatter('debugobsolete', opts)
322 322 for rawmarker in sorted(markers):
323 323 m = obsutil.marker(None, rawmarker)
324 324 fm.startitem()
325 325 fm.plain(indent_string)
326 326 cmdutil.showmarker(fm, m)
327 327 fm.end()
328 328
329 329 def _debugphaseheads(ui, data, indent=0):
330 330 """display version and markers contained in 'data'"""
331 331 indent_string = ' ' * indent
332 332 headsbyphase = phases.binarydecode(data)
333 333 for phase in phases.allphases:
334 334 for head in headsbyphase[phase]:
335 335 ui.write(indent_string)
336 336 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
337 337
338 338 def _quasirepr(thing):
339 339 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
340 340 return '{%s}' % (
341 341 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
342 342 return pycompat.bytestr(repr(thing))
343 343
344 344 def _debugbundle2(ui, gen, all=None, **opts):
345 345 """lists the contents of a bundle2"""
346 346 if not isinstance(gen, bundle2.unbundle20):
347 347 raise error.Abort(_('not a bundle2 file'))
348 348 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
349 349 parttypes = opts.get(r'part_type', [])
350 350 for part in gen.iterparts():
351 351 if parttypes and part.type not in parttypes:
352 352 continue
353 353 msg = '%s -- %s (mandatory: %r)\n'
354 354 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
355 355 if part.type == 'changegroup':
356 356 version = part.params.get('version', '01')
357 357 cg = changegroup.getunbundler(version, part, 'UN')
358 358 if not ui.quiet:
359 359 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
360 360 if part.type == 'obsmarkers':
361 361 if not ui.quiet:
362 362 _debugobsmarkers(ui, part, indent=4, **opts)
363 363 if part.type == 'phase-heads':
364 364 if not ui.quiet:
365 365 _debugphaseheads(ui, part, indent=4)
366 366
367 367 @command('debugbundle',
368 368 [('a', 'all', None, _('show all details')),
369 369 ('', 'part-type', [], _('show only the named part type')),
370 370 ('', 'spec', None, _('print the bundlespec of the bundle'))],
371 371 _('FILE'),
372 372 norepo=True)
373 373 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
374 374 """lists the contents of a bundle"""
375 375 with hg.openpath(ui, bundlepath) as f:
376 376 if spec:
377 377 spec = exchange.getbundlespec(ui, f)
378 378 ui.write('%s\n' % spec)
379 379 return
380 380
381 381 gen = exchange.readbundle(ui, f, bundlepath)
382 382 if isinstance(gen, bundle2.unbundle20):
383 383 return _debugbundle2(ui, gen, all=all, **opts)
384 384 _debugchangegroup(ui, gen, all=all, **opts)
385 385
386 386 @command('debugcapabilities',
387 387 [], _('PATH'),
388 388 norepo=True)
389 389 def debugcapabilities(ui, path, **opts):
390 390 """lists the capabilities of a remote peer"""
391 391 opts = pycompat.byteskwargs(opts)
392 392 peer = hg.peer(ui, opts, path)
393 393 caps = peer.capabilities()
394 394 ui.write(('Main capabilities:\n'))
395 395 for c in sorted(caps):
396 396 ui.write((' %s\n') % c)
397 397 b2caps = bundle2.bundle2caps(peer)
398 398 if b2caps:
399 399 ui.write(('Bundle2 capabilities:\n'))
400 400 for key, values in sorted(b2caps.iteritems()):
401 401 ui.write((' %s\n') % key)
402 402 for v in values:
403 403 ui.write((' %s\n') % v)
404 404
405 405 @command('debugcheckstate', [], '')
406 406 def debugcheckstate(ui, repo):
407 407 """validate the correctness of the current dirstate"""
408 408 parent1, parent2 = repo.dirstate.parents()
409 409 m1 = repo[parent1].manifest()
410 410 m2 = repo[parent2].manifest()
411 411 errors = 0
412 412 for f in repo.dirstate:
413 413 state = repo.dirstate[f]
414 414 if state in "nr" and f not in m1:
415 415 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
416 416 errors += 1
417 417 if state in "a" and f in m1:
418 418 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
419 419 errors += 1
420 420 if state in "m" and f not in m1 and f not in m2:
421 421 ui.warn(_("%s in state %s, but not in either manifest\n") %
422 422 (f, state))
423 423 errors += 1
424 424 for f in m1:
425 425 state = repo.dirstate[f]
426 426 if state not in "nrm":
427 427 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
428 428 errors += 1
429 429 if errors:
430 430 error = _(".hg/dirstate inconsistent with current parent's manifest")
431 431 raise error.Abort(error)
432 432
433 433 @command('debugcolor',
434 434 [('', 'style', None, _('show all configured styles'))],
435 435 'hg debugcolor')
436 436 def debugcolor(ui, repo, **opts):
437 437 """show available color, effects or style"""
438 438 ui.write(('color mode: %s\n') % ui._colormode)
439 439 if opts.get(r'style'):
440 440 return _debugdisplaystyle(ui)
441 441 else:
442 442 return _debugdisplaycolor(ui)
443 443
444 444 def _debugdisplaycolor(ui):
445 445 ui = ui.copy()
446 446 ui._styles.clear()
447 447 for effect in color._activeeffects(ui).keys():
448 448 ui._styles[effect] = effect
449 449 if ui._terminfoparams:
450 450 for k, v in ui.configitems('color'):
451 451 if k.startswith('color.'):
452 452 ui._styles[k] = k[6:]
453 453 elif k.startswith('terminfo.'):
454 454 ui._styles[k] = k[9:]
455 455 ui.write(_('available colors:\n'))
456 456 # sort label with a '_' after the other to group '_background' entry.
457 457 items = sorted(ui._styles.items(),
458 458 key=lambda i: ('_' in i[0], i[0], i[1]))
459 459 for colorname, label in items:
460 460 ui.write(('%s\n') % colorname, label=label)
461 461
462 462 def _debugdisplaystyle(ui):
463 463 ui.write(_('available style:\n'))
464 464 if not ui._styles:
465 465 return
466 466 width = max(len(s) for s in ui._styles)
467 467 for label, effects in sorted(ui._styles.items()):
468 468 ui.write('%s' % label, label=label)
469 469 if effects:
470 470 # 50
471 471 ui.write(': ')
472 472 ui.write(' ' * (max(0, width - len(label))))
473 473 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
474 474 ui.write('\n')
475 475
476 476 @command('debugcreatestreamclonebundle', [], 'FILE')
477 477 def debugcreatestreamclonebundle(ui, repo, fname):
478 478 """create a stream clone bundle file
479 479
480 480 Stream bundles are special bundles that are essentially archives of
481 481 revlog files. They are commonly used for cloning very quickly.
482 482 """
483 483 # TODO we may want to turn this into an abort when this functionality
484 484 # is moved into `hg bundle`.
485 485 if phases.hassecret(repo):
486 486 ui.warn(_('(warning: stream clone bundle will contain secret '
487 487 'revisions)\n'))
488 488
489 489 requirements, gen = streamclone.generatebundlev1(repo)
490 490 changegroup.writechunks(ui, gen, fname)
491 491
492 492 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
493 493
494 494 @command('debugdag',
495 495 [('t', 'tags', None, _('use tags as labels')),
496 496 ('b', 'branches', None, _('annotate with branch names')),
497 497 ('', 'dots', None, _('use dots for runs')),
498 498 ('s', 'spaces', None, _('separate elements by spaces'))],
499 499 _('[OPTION]... [FILE [REV]...]'),
500 500 optionalrepo=True)
501 501 def debugdag(ui, repo, file_=None, *revs, **opts):
502 502 """format the changelog or an index DAG as a concise textual description
503 503
504 504 If you pass a revlog index, the revlog's DAG is emitted. If you list
505 505 revision numbers, they get labeled in the output as rN.
506 506
507 507 Otherwise, the changelog DAG of the current repo is emitted.
508 508 """
509 509 spaces = opts.get(r'spaces')
510 510 dots = opts.get(r'dots')
511 511 if file_:
512 512 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
513 513 file_)
514 514 revs = set((int(r) for r in revs))
515 515 def events():
516 516 for r in rlog:
517 517 yield 'n', (r, list(p for p in rlog.parentrevs(r)
518 518 if p != -1))
519 519 if r in revs:
520 520 yield 'l', (r, "r%i" % r)
521 521 elif repo:
522 522 cl = repo.changelog
523 523 tags = opts.get(r'tags')
524 524 branches = opts.get(r'branches')
525 525 if tags:
526 526 labels = {}
527 527 for l, n in repo.tags().items():
528 528 labels.setdefault(cl.rev(n), []).append(l)
529 529 def events():
530 530 b = "default"
531 531 for r in cl:
532 532 if branches:
533 533 newb = cl.read(cl.node(r))[5]['branch']
534 534 if newb != b:
535 535 yield 'a', newb
536 536 b = newb
537 537 yield 'n', (r, list(p for p in cl.parentrevs(r)
538 538 if p != -1))
539 539 if tags:
540 540 ls = labels.get(r)
541 541 if ls:
542 542 for l in ls:
543 543 yield 'l', (r, l)
544 544 else:
545 545 raise error.Abort(_('need repo for changelog dag'))
546 546
547 547 for line in dagparser.dagtextlines(events(),
548 548 addspaces=spaces,
549 549 wraplabels=True,
550 550 wrapannotations=True,
551 551 wrapnonlinear=dots,
552 552 usedots=dots,
553 553 maxlinewidth=70):
554 554 ui.write(line)
555 555 ui.write("\n")
556 556
557 557 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
558 558 def debugdata(ui, repo, file_, rev=None, **opts):
559 559 """dump the contents of a data file revision"""
560 560 opts = pycompat.byteskwargs(opts)
561 561 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
562 562 if rev is not None:
563 563 raise error.CommandError('debugdata', _('invalid arguments'))
564 564 file_, rev = None, file_
565 565 elif rev is None:
566 566 raise error.CommandError('debugdata', _('invalid arguments'))
567 567 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
568 568 try:
569 569 ui.write(r.revision(r.lookup(rev), raw=True))
570 570 except KeyError:
571 571 raise error.Abort(_('invalid revision identifier %s') % rev)
572 572
573 573 @command('debugdate',
574 574 [('e', 'extended', None, _('try extended date formats'))],
575 575 _('[-e] DATE [RANGE]'),
576 576 norepo=True, optionalrepo=True)
577 577 def debugdate(ui, date, range=None, **opts):
578 578 """parse and display a date"""
579 579 if opts[r"extended"]:
580 580 d = dateutil.parsedate(date, util.extendeddateformats)
581 581 else:
582 582 d = dateutil.parsedate(date)
583 583 ui.write(("internal: %d %d\n") % d)
584 584 ui.write(("standard: %s\n") % dateutil.datestr(d))
585 585 if range:
586 586 m = dateutil.matchdate(range)
587 587 ui.write(("match: %s\n") % m(d[0]))
588 588
589 589 @command('debugdeltachain',
590 590 cmdutil.debugrevlogopts + cmdutil.formatteropts,
591 591 _('-c|-m|FILE'),
592 592 optionalrepo=True)
593 593 def debugdeltachain(ui, repo, file_=None, **opts):
594 594 """dump information about delta chains in a revlog
595 595
596 596 Output can be templatized. Available template keywords are:
597 597
598 598 :``rev``: revision number
599 599 :``chainid``: delta chain identifier (numbered by unique base)
600 600 :``chainlen``: delta chain length to this revision
601 601 :``prevrev``: previous revision in delta chain
602 602 :``deltatype``: role of delta / how it was computed
603 603 :``compsize``: compressed size of revision
604 604 :``uncompsize``: uncompressed size of revision
605 605 :``chainsize``: total size of compressed revisions in chain
606 606 :``chainratio``: total chain size divided by uncompressed revision size
607 607 (new delta chains typically start at ratio 2.00)
608 608 :``lindist``: linear distance from base revision in delta chain to end
609 609 of this revision
610 610 :``extradist``: total size of revisions not part of this delta chain from
611 611 base of delta chain to end of this revision; a measurement
612 612 of how much extra data we need to read/seek across to read
613 613 the delta chain for this revision
614 614 :``extraratio``: extradist divided by chainsize; another representation of
615 615 how much unrelated data is needed to load this delta chain
616 616
617 617 If the repository is configured to use the sparse read, additional keywords
618 618 are available:
619 619
620 620 :``readsize``: total size of data read from the disk for a revision
621 621 (sum of the sizes of all the blocks)
622 622 :``largestblock``: size of the largest block of data read from the disk
623 623 :``readdensity``: density of useful bytes in the data read from the disk
624 624 :``srchunks``: in how many data hunks the whole revision would be read
625 625
626 626 The sparse read can be enabled with experimental.sparse-read = True
627 627 """
628 628 opts = pycompat.byteskwargs(opts)
629 629 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
630 630 index = r.index
631 631 start = r.start
632 632 length = r.length
633 633 generaldelta = r.version & revlog.FLAG_GENERALDELTA
634 634 withsparseread = getattr(r, '_withsparseread', False)
635 635
636 636 def revinfo(rev):
637 637 e = index[rev]
638 638 compsize = e[1]
639 639 uncompsize = e[2]
640 640 chainsize = 0
641 641
642 642 if generaldelta:
643 643 if e[3] == e[5]:
644 644 deltatype = 'p1'
645 645 elif e[3] == e[6]:
646 646 deltatype = 'p2'
647 647 elif e[3] == rev - 1:
648 648 deltatype = 'prev'
649 649 elif e[3] == rev:
650 650 deltatype = 'base'
651 651 else:
652 652 deltatype = 'other'
653 653 else:
654 654 if e[3] == rev:
655 655 deltatype = 'base'
656 656 else:
657 657 deltatype = 'prev'
658 658
659 659 chain = r._deltachain(rev)[0]
660 660 for iterrev in chain:
661 661 e = index[iterrev]
662 662 chainsize += e[1]
663 663
664 664 return compsize, uncompsize, deltatype, chain, chainsize
665 665
666 666 fm = ui.formatter('debugdeltachain', opts)
667 667
668 668 fm.plain(' rev chain# chainlen prev delta '
669 669 'size rawsize chainsize ratio lindist extradist '
670 670 'extraratio')
671 671 if withsparseread:
672 672 fm.plain(' readsize largestblk rddensity srchunks')
673 673 fm.plain('\n')
674 674
675 675 chainbases = {}
676 676 for rev in r:
677 677 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
678 678 chainbase = chain[0]
679 679 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
680 680 basestart = start(chainbase)
681 681 revstart = start(rev)
682 682 lineardist = revstart + comp - basestart
683 683 extradist = lineardist - chainsize
684 684 try:
685 685 prevrev = chain[-2]
686 686 except IndexError:
687 687 prevrev = -1
688 688
689 689 chainratio = float(chainsize) / float(uncomp)
690 690 extraratio = float(extradist) / float(chainsize)
691 691
692 692 fm.startitem()
693 693 fm.write('rev chainid chainlen prevrev deltatype compsize '
694 694 'uncompsize chainsize chainratio lindist extradist '
695 695 'extraratio',
696 696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
697 697 rev, chainid, len(chain), prevrev, deltatype, comp,
698 698 uncomp, chainsize, chainratio, lineardist, extradist,
699 699 extraratio,
700 700 rev=rev, chainid=chainid, chainlen=len(chain),
701 701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
702 702 uncompsize=uncomp, chainsize=chainsize,
703 703 chainratio=chainratio, lindist=lineardist,
704 704 extradist=extradist, extraratio=extraratio)
705 705 if withsparseread:
706 706 readsize = 0
707 707 largestblock = 0
708 708 srchunks = 0
709 709
710 710 for revschunk in revlog._slicechunk(r, chain):
711 711 srchunks += 1
712 712 blkend = start(revschunk[-1]) + length(revschunk[-1])
713 713 blksize = blkend - start(revschunk[0])
714 714
715 715 readsize += blksize
716 716 if largestblock < blksize:
717 717 largestblock = blksize
718 718
719 719 readdensity = float(chainsize) / float(readsize)
720 720
721 721 fm.write('readsize largestblock readdensity srchunks',
722 722 ' %10d %10d %9.5f %8d',
723 723 readsize, largestblock, readdensity, srchunks,
724 724 readsize=readsize, largestblock=largestblock,
725 725 readdensity=readdensity, srchunks=srchunks)
726 726
727 727 fm.plain('\n')
728 728
729 729 fm.end()
730 730
731 731 @command('debugdirstate|debugstate',
732 732 [('', 'nodates', None, _('do not display the saved mtime')),
733 733 ('', 'datesort', None, _('sort by saved mtime'))],
734 734 _('[OPTION]...'))
735 735 def debugstate(ui, repo, **opts):
736 736 """show the contents of the current dirstate"""
737 737
738 738 nodates = opts.get(r'nodates')
739 739 datesort = opts.get(r'datesort')
740 740
741 741 timestr = ""
742 742 if datesort:
743 743 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
744 744 else:
745 745 keyfunc = None # sort by filename
746 746 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
747 747 if ent[3] == -1:
748 748 timestr = 'unset '
749 749 elif nodates:
750 750 timestr = 'set '
751 751 else:
752 752 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
753 753 time.localtime(ent[3]))
754 754 timestr = encoding.strtolocal(timestr)
755 755 if ent[1] & 0o20000:
756 756 mode = 'lnk'
757 757 else:
758 758 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
759 759 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
760 760 for f in repo.dirstate.copies():
761 761 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
762 762
763 763 @command('debugdiscovery',
764 764 [('', 'old', None, _('use old-style discovery')),
765 765 ('', 'nonheads', None,
766 766 _('use old-style discovery with non-heads included')),
767 767 ('', 'rev', [], 'restrict discovery to this set of revs'),
768 768 ] + cmdutil.remoteopts,
769 769 _('[--rev REV] [OTHER]'))
770 770 def debugdiscovery(ui, repo, remoteurl="default", **opts):
771 771 """runs the changeset discovery protocol in isolation"""
772 772 opts = pycompat.byteskwargs(opts)
773 773 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
774 774 remote = hg.peer(repo, opts, remoteurl)
775 775 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
776 776
777 777 # make sure tests are repeatable
778 778 random.seed(12323)
779 779
780 780 def doit(pushedrevs, remoteheads, remote=remote):
781 781 if opts.get('old'):
782 782 if not util.safehasattr(remote, 'branches'):
783 783 # enable in-client legacy support
784 784 remote = localrepo.locallegacypeer(remote.local())
785 785 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
786 786 force=True)
787 787 common = set(common)
788 788 if not opts.get('nonheads'):
789 789 ui.write(("unpruned common: %s\n") %
790 790 " ".join(sorted(short(n) for n in common)))
791 791 dag = dagutil.revlogdag(repo.changelog)
792 792 all = dag.ancestorset(dag.internalizeall(common))
793 793 common = dag.externalizeall(dag.headsetofconnecteds(all))
794 794 else:
795 795 nodes = None
796 796 if pushedrevs:
797 797 revs = scmutil.revrange(repo, pushedrevs)
798 798 nodes = [repo[r].node() for r in revs]
799 799 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
800 800 ancestorsof=nodes)
801 801 common = set(common)
802 802 rheads = set(hds)
803 803 lheads = set(repo.heads())
804 804 ui.write(("common heads: %s\n") %
805 805 " ".join(sorted(short(n) for n in common)))
806 806 if lheads <= common:
807 807 ui.write(("local is subset\n"))
808 808 elif rheads <= common:
809 809 ui.write(("remote is subset\n"))
810 810
811 811 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
812 812 localrevs = opts['rev']
813 813 doit(localrevs, remoterevs)
814 814
815 815 _chunksize = 4 << 10
816 816
817 817 @command('debugdownload',
818 818 [
819 819 ('o', 'output', '', _('path')),
820 820 ],
821 821 optionalrepo=True)
822 822 def debugdownload(ui, repo, url, output=None, **opts):
823 823 """download a resource using Mercurial logic and config
824 824 """
825 825 fh = urlmod.open(ui, url, output)
826 826
827 827 dest = ui
828 828 if output:
829 829 dest = open(output, "wb", _chunksize)
830 830 try:
831 831 data = fh.read(_chunksize)
832 832 while data:
833 833 dest.write(data)
834 834 data = fh.read(_chunksize)
835 835 finally:
836 836 if output:
837 837 dest.close()
838 838
839 839 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
840 840 def debugextensions(ui, repo, **opts):
841 841 '''show information about active extensions'''
842 842 opts = pycompat.byteskwargs(opts)
843 843 exts = extensions.extensions(ui)
844 844 hgver = util.version()
845 845 fm = ui.formatter('debugextensions', opts)
846 846 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
847 847 isinternal = extensions.ismoduleinternal(extmod)
848 848 extsource = pycompat.fsencode(extmod.__file__)
849 849 if isinternal:
850 850 exttestedwith = [] # never expose magic string to users
851 851 else:
852 852 exttestedwith = getattr(extmod, 'testedwith', '').split()
853 853 extbuglink = getattr(extmod, 'buglink', None)
854 854
855 855 fm.startitem()
856 856
857 857 if ui.quiet or ui.verbose:
858 858 fm.write('name', '%s\n', extname)
859 859 else:
860 860 fm.write('name', '%s', extname)
861 861 if isinternal or hgver in exttestedwith:
862 862 fm.plain('\n')
863 863 elif not exttestedwith:
864 864 fm.plain(_(' (untested!)\n'))
865 865 else:
866 866 lasttestedversion = exttestedwith[-1]
867 867 fm.plain(' (%s!)\n' % lasttestedversion)
868 868
869 869 fm.condwrite(ui.verbose and extsource, 'source',
870 870 _(' location: %s\n'), extsource or "")
871 871
872 872 if ui.verbose:
873 873 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
874 874 fm.data(bundled=isinternal)
875 875
876 876 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
877 877 _(' tested with: %s\n'),
878 878 fm.formatlist(exttestedwith, name='ver'))
879 879
880 880 fm.condwrite(ui.verbose and extbuglink, 'buglink',
881 881 _(' bug reporting: %s\n'), extbuglink or "")
882 882
883 883 fm.end()
884 884
885 885 @command('debugfileset',
886 886 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
887 887 _('[-r REV] FILESPEC'))
888 888 def debugfileset(ui, repo, expr, **opts):
889 889 '''parse and apply a fileset specification'''
890 890 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
891 891 if ui.verbose:
892 892 tree = fileset.parse(expr)
893 893 ui.note(fileset.prettyformat(tree), "\n")
894 894
895 895 for f in ctx.getfileset(expr):
896 896 ui.write("%s\n" % f)
897 897
898 898 @command('debugformat',
899 899 [] + cmdutil.formatteropts,
900 900 _(''))
901 901 def debugformat(ui, repo, **opts):
902 902 """display format information about the current repository
903 903
904 904 Use --verbose to get extra information about current config value and
905 905 Mercurial default."""
906 906 opts = pycompat.byteskwargs(opts)
907 907 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
908 908 maxvariantlength = max(len('format-variant'), maxvariantlength)
909 909
910 910 def makeformatname(name):
911 911 return '%s:' + (' ' * (maxvariantlength - len(name)))
912 912
913 913 fm = ui.formatter('debugformat', opts)
914 914 if fm.isplain():
915 915 def formatvalue(value):
916 916 if util.safehasattr(value, 'startswith'):
917 917 return value
918 918 if value:
919 919 return 'yes'
920 920 else:
921 921 return 'no'
922 922 else:
923 923 formatvalue = pycompat.identity
924 924
925 925 fm.plain('format-variant')
926 926 fm.plain(' ' * (maxvariantlength - len('format-variant')))
927 927 fm.plain(' repo')
928 928 if ui.verbose:
929 929 fm.plain(' config default')
930 930 fm.plain('\n')
931 931 for fv in upgrade.allformatvariant:
932 932 fm.startitem()
933 933 repovalue = fv.fromrepo(repo)
934 934 configvalue = fv.fromconfig(repo)
935 935
936 936 if repovalue != configvalue:
937 937 namelabel = 'formatvariant.name.mismatchconfig'
938 938 repolabel = 'formatvariant.repo.mismatchconfig'
939 939 elif repovalue != fv.default:
940 940 namelabel = 'formatvariant.name.mismatchdefault'
941 941 repolabel = 'formatvariant.repo.mismatchdefault'
942 942 else:
943 943 namelabel = 'formatvariant.name.uptodate'
944 944 repolabel = 'formatvariant.repo.uptodate'
945 945
946 946 fm.write('name', makeformatname(fv.name), fv.name,
947 947 label=namelabel)
948 948 fm.write('repo', ' %3s', formatvalue(repovalue),
949 949 label=repolabel)
950 950 if fv.default != configvalue:
951 951 configlabel = 'formatvariant.config.special'
952 952 else:
953 953 configlabel = 'formatvariant.config.default'
954 954 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
955 955 label=configlabel)
956 956 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
957 957 label='formatvariant.default')
958 958 fm.plain('\n')
959 959 fm.end()
960 960
961 961 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
962 962 def debugfsinfo(ui, path="."):
963 963 """show information detected about current filesystem"""
964 964 ui.write(('path: %s\n') % path)
965 965 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
966 966 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
967 967 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
968 968 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
969 969 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
970 970 casesensitive = '(unknown)'
971 971 try:
972 972 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
973 973 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
974 974 except OSError:
975 975 pass
976 976 ui.write(('case-sensitive: %s\n') % casesensitive)
977 977
978 978 @command('debuggetbundle',
979 979 [('H', 'head', [], _('id of head node'), _('ID')),
980 980 ('C', 'common', [], _('id of common node'), _('ID')),
981 981 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
982 982 _('REPO FILE [-H|-C ID]...'),
983 983 norepo=True)
984 984 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
985 985 """retrieves a bundle from a repo
986 986
987 987 Every ID must be a full-length hex node id string. Saves the bundle to the
988 988 given file.
989 989 """
990 990 opts = pycompat.byteskwargs(opts)
991 991 repo = hg.peer(ui, opts, repopath)
992 992 if not repo.capable('getbundle'):
993 993 raise error.Abort("getbundle() not supported by target repository")
994 994 args = {}
995 995 if common:
996 996 args[r'common'] = [bin(s) for s in common]
997 997 if head:
998 998 args[r'heads'] = [bin(s) for s in head]
999 999 # TODO: get desired bundlecaps from command line.
1000 1000 args[r'bundlecaps'] = None
1001 1001 bundle = repo.getbundle('debug', **args)
1002 1002
1003 1003 bundletype = opts.get('type', 'bzip2').lower()
1004 1004 btypes = {'none': 'HG10UN',
1005 1005 'bzip2': 'HG10BZ',
1006 1006 'gzip': 'HG10GZ',
1007 1007 'bundle2': 'HG20'}
1008 1008 bundletype = btypes.get(bundletype)
1009 1009 if bundletype not in bundle2.bundletypes:
1010 1010 raise error.Abort(_('unknown bundle type specified with --type'))
1011 1011 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1012 1012
1013 1013 @command('debugignore', [], '[FILE]')
1014 1014 def debugignore(ui, repo, *files, **opts):
1015 1015 """display the combined ignore pattern and information about ignored files
1016 1016
1017 1017 With no argument display the combined ignore pattern.
1018 1018
1019 1019 Given space separated file names, shows if the given file is ignored and
1020 1020 if so, show the ignore rule (file and line number) that matched it.
1021 1021 """
1022 1022 ignore = repo.dirstate._ignore
1023 1023 if not files:
1024 1024 # Show all the patterns
1025 1025 ui.write("%s\n" % pycompat.byterepr(ignore))
1026 1026 else:
1027 1027 m = scmutil.match(repo[None], pats=files)
1028 1028 for f in m.files():
1029 1029 nf = util.normpath(f)
1030 1030 ignored = None
1031 1031 ignoredata = None
1032 1032 if nf != '.':
1033 1033 if ignore(nf):
1034 1034 ignored = nf
1035 1035 ignoredata = repo.dirstate._ignorefileandline(nf)
1036 1036 else:
1037 1037 for p in util.finddirs(nf):
1038 1038 if ignore(p):
1039 1039 ignored = p
1040 1040 ignoredata = repo.dirstate._ignorefileandline(p)
1041 1041 break
1042 1042 if ignored:
1043 1043 if ignored == nf:
1044 1044 ui.write(_("%s is ignored\n") % m.uipath(f))
1045 1045 else:
1046 1046 ui.write(_("%s is ignored because of "
1047 1047 "containing folder %s\n")
1048 1048 % (m.uipath(f), ignored))
1049 1049 ignorefile, lineno, line = ignoredata
1050 1050 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1051 1051 % (ignorefile, lineno, line))
1052 1052 else:
1053 1053 ui.write(_("%s is not ignored\n") % m.uipath(f))
1054 1054
1055 1055 @command('debugindex', cmdutil.debugrevlogopts +
1056 1056 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1057 1057 _('[-f FORMAT] -c|-m|FILE'),
1058 1058 optionalrepo=True)
1059 1059 def debugindex(ui, repo, file_=None, **opts):
1060 1060 """dump the contents of an index file"""
1061 1061 opts = pycompat.byteskwargs(opts)
1062 1062 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1063 1063 format = opts.get('format', 0)
1064 1064 if format not in (0, 1):
1065 1065 raise error.Abort(_("unknown format %d") % format)
1066 1066
1067 1067 if ui.debugflag:
1068 1068 shortfn = hex
1069 1069 else:
1070 1070 shortfn = short
1071 1071
1072 1072 # There might not be anything in r, so have a sane default
1073 1073 idlen = 12
1074 1074 for i in r:
1075 1075 idlen = len(shortfn(r.node(i)))
1076 1076 break
1077 1077
1078 1078 if format == 0:
1079 1079 if ui.verbose:
1080 1080 ui.write((" rev offset length linkrev"
1081 1081 " %s %s p2\n") % ("nodeid".ljust(idlen),
1082 1082 "p1".ljust(idlen)))
1083 1083 else:
1084 1084 ui.write((" rev linkrev %s %s p2\n") % (
1085 1085 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1086 1086 elif format == 1:
1087 1087 if ui.verbose:
1088 1088 ui.write((" rev flag offset length size link p1"
1089 1089 " p2 %s\n") % "nodeid".rjust(idlen))
1090 1090 else:
1091 1091 ui.write((" rev flag size link p1 p2 %s\n") %
1092 1092 "nodeid".rjust(idlen))
1093 1093
1094 1094 for i in r:
1095 1095 node = r.node(i)
1096 1096 if format == 0:
1097 1097 try:
1098 1098 pp = r.parents(node)
1099 1099 except Exception:
1100 1100 pp = [nullid, nullid]
1101 1101 if ui.verbose:
1102 1102 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1103 1103 i, r.start(i), r.length(i), r.linkrev(i),
1104 1104 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1105 1105 else:
1106 1106 ui.write("% 6d % 7d %s %s %s\n" % (
1107 1107 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1108 1108 shortfn(pp[1])))
1109 1109 elif format == 1:
1110 1110 pr = r.parentrevs(i)
1111 1111 if ui.verbose:
1112 1112 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1113 1113 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1114 1114 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1115 1115 else:
1116 1116 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1117 1117 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1118 1118 shortfn(node)))
1119 1119
1120 1120 @command('debugindexdot', cmdutil.debugrevlogopts,
1121 1121 _('-c|-m|FILE'), optionalrepo=True)
1122 1122 def debugindexdot(ui, repo, file_=None, **opts):
1123 1123 """dump an index DAG as a graphviz dot file"""
1124 1124 opts = pycompat.byteskwargs(opts)
1125 1125 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1126 1126 ui.write(("digraph G {\n"))
1127 1127 for i in r:
1128 1128 node = r.node(i)
1129 1129 pp = r.parents(node)
1130 1130 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1131 1131 if pp[1] != nullid:
1132 1132 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1133 1133 ui.write("}\n")
1134 1134
1135 1135 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1136 1136 def debuginstall(ui, **opts):
1137 1137 '''test Mercurial installation
1138 1138
1139 1139 Returns 0 on success.
1140 1140 '''
1141 1141 opts = pycompat.byteskwargs(opts)
1142 1142
1143 1143 def writetemp(contents):
1144 1144 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1145 1145 f = os.fdopen(fd, r"wb")
1146 1146 f.write(contents)
1147 1147 f.close()
1148 1148 return name
1149 1149
1150 1150 problems = 0
1151 1151
1152 1152 fm = ui.formatter('debuginstall', opts)
1153 1153 fm.startitem()
1154 1154
1155 1155 # encoding
1156 1156 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1157 1157 err = None
1158 1158 try:
1159 1159 codecs.lookup(pycompat.sysstr(encoding.encoding))
1160 1160 except LookupError as inst:
1161 1161 err = stringutil.forcebytestr(inst)
1162 1162 problems += 1
1163 1163 fm.condwrite(err, 'encodingerror', _(" %s\n"
1164 1164 " (check that your locale is properly set)\n"), err)
1165 1165
1166 1166 # Python
1167 1167 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1168 1168 pycompat.sysexecutable)
1169 1169 fm.write('pythonver', _("checking Python version (%s)\n"),
1170 1170 ("%d.%d.%d" % sys.version_info[:3]))
1171 1171 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1172 1172 os.path.dirname(pycompat.fsencode(os.__file__)))
1173 1173
1174 1174 security = set(sslutil.supportedprotocols)
1175 1175 if sslutil.hassni:
1176 1176 security.add('sni')
1177 1177
1178 1178 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1179 1179 fm.formatlist(sorted(security), name='protocol',
1180 1180 fmt='%s', sep=','))
1181 1181
1182 1182 # These are warnings, not errors. So don't increment problem count. This
1183 1183 # may change in the future.
1184 1184 if 'tls1.2' not in security:
1185 1185 fm.plain(_(' TLS 1.2 not supported by Python install; '
1186 1186 'network connections lack modern security\n'))
1187 1187 if 'sni' not in security:
1188 1188 fm.plain(_(' SNI not supported by Python install; may have '
1189 1189 'connectivity issues with some servers\n'))
1190 1190
1191 1191 # TODO print CA cert info
1192 1192
1193 1193 # hg version
1194 1194 hgver = util.version()
1195 1195 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1196 1196 hgver.split('+')[0])
1197 1197 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1198 1198 '+'.join(hgver.split('+')[1:]))
1199 1199
1200 1200 # compiled modules
1201 1201 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1202 1202 policy.policy)
1203 1203 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1204 1204 os.path.dirname(pycompat.fsencode(__file__)))
1205 1205
1206 1206 if policy.policy in ('c', 'allow'):
1207 1207 err = None
1208 1208 try:
1209 1209 from .cext import (
1210 1210 base85,
1211 1211 bdiff,
1212 1212 mpatch,
1213 1213 osutil,
1214 1214 )
1215 1215 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1216 1216 except Exception as inst:
1217 1217 err = stringutil.forcebytestr(inst)
1218 1218 problems += 1
1219 1219 fm.condwrite(err, 'extensionserror', " %s\n", err)
1220 1220
1221 1221 compengines = util.compengines._engines.values()
1222 1222 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1223 1223 fm.formatlist(sorted(e.name() for e in compengines),
1224 1224 name='compengine', fmt='%s', sep=', '))
1225 1225 fm.write('compenginesavail', _('checking available compression engines '
1226 1226 '(%s)\n'),
1227 1227 fm.formatlist(sorted(e.name() for e in compengines
1228 1228 if e.available()),
1229 1229 name='compengine', fmt='%s', sep=', '))
1230 1230 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1231 1231 fm.write('compenginesserver', _('checking available compression engines '
1232 1232 'for wire protocol (%s)\n'),
1233 1233 fm.formatlist([e.name() for e in wirecompengines
1234 1234 if e.wireprotosupport()],
1235 1235 name='compengine', fmt='%s', sep=', '))
1236 1236 re2 = 'missing'
1237 1237 if util._re2:
1238 1238 re2 = 'available'
1239 1239 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1240 1240 fm.data(re2=bool(util._re2))
1241 1241
1242 1242 # templates
1243 1243 p = templater.templatepaths()
1244 1244 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1245 1245 fm.condwrite(not p, '', _(" no template directories found\n"))
1246 1246 if p:
1247 1247 m = templater.templatepath("map-cmdline.default")
1248 1248 if m:
1249 1249 # template found, check if it is working
1250 1250 err = None
1251 1251 try:
1252 1252 templater.templater.frommapfile(m)
1253 1253 except Exception as inst:
1254 1254 err = stringutil.forcebytestr(inst)
1255 1255 p = None
1256 1256 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1257 1257 else:
1258 1258 p = None
1259 1259 fm.condwrite(p, 'defaulttemplate',
1260 1260 _("checking default template (%s)\n"), m)
1261 1261 fm.condwrite(not m, 'defaulttemplatenotfound',
1262 1262 _(" template '%s' not found\n"), "default")
1263 1263 if not p:
1264 1264 problems += 1
1265 1265 fm.condwrite(not p, '',
1266 1266 _(" (templates seem to have been installed incorrectly)\n"))
1267 1267
1268 1268 # editor
1269 1269 editor = ui.geteditor()
1270 1270 editor = util.expandpath(editor)
1271 1271 editorbin = procutil.shellsplit(editor)[0]
1272 1272 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1273 1273 cmdpath = procutil.findexe(editorbin)
1274 1274 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1275 1275 _(" No commit editor set and can't find %s in PATH\n"
1276 1276 " (specify a commit editor in your configuration"
1277 1277 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1278 1278 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1279 1279 _(" Can't find editor '%s' in PATH\n"
1280 1280 " (specify a commit editor in your configuration"
1281 1281 " file)\n"), not cmdpath and editorbin)
1282 1282 if not cmdpath and editor != 'vi':
1283 1283 problems += 1
1284 1284
1285 1285 # check username
1286 1286 username = None
1287 1287 err = None
1288 1288 try:
1289 1289 username = ui.username()
1290 1290 except error.Abort as e:
1291 1291 err = stringutil.forcebytestr(e)
1292 1292 problems += 1
1293 1293
1294 1294 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1295 1295 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1296 1296 " (specify a username in your configuration file)\n"), err)
1297 1297
1298 1298 fm.condwrite(not problems, '',
1299 1299 _("no problems detected\n"))
1300 1300 if not problems:
1301 1301 fm.data(problems=problems)
1302 1302 fm.condwrite(problems, 'problems',
1303 1303 _("%d problems detected,"
1304 1304 " please check your install!\n"), problems)
1305 1305 fm.end()
1306 1306
1307 1307 return problems
1308 1308
1309 1309 @command('debugknown', [], _('REPO ID...'), norepo=True)
1310 1310 def debugknown(ui, repopath, *ids, **opts):
1311 1311 """test whether node ids are known to a repo
1312 1312
1313 1313 Every ID must be a full-length hex node id string. Returns a list of 0s
1314 1314 and 1s indicating unknown/known.
1315 1315 """
1316 1316 opts = pycompat.byteskwargs(opts)
1317 1317 repo = hg.peer(ui, opts, repopath)
1318 1318 if not repo.capable('known'):
1319 1319 raise error.Abort("known() not supported by target repository")
1320 1320 flags = repo.known([bin(s) for s in ids])
1321 1321 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1322 1322
1323 1323 @command('debuglabelcomplete', [], _('LABEL...'))
1324 1324 def debuglabelcomplete(ui, repo, *args):
1325 1325 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1326 1326 debugnamecomplete(ui, repo, *args)
1327 1327
1328 1328 @command('debuglocks',
1329 1329 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1330 1330 ('W', 'force-wlock', None,
1331 1331 _('free the working state lock (DANGEROUS)')),
1332 1332 ('s', 'set-lock', None, _('set the store lock until stopped')),
1333 1333 ('S', 'set-wlock', None,
1334 1334 _('set the working state lock until stopped'))],
1335 1335 _('[OPTION]...'))
1336 1336 def debuglocks(ui, repo, **opts):
1337 1337 """show or modify state of locks
1338 1338
1339 1339 By default, this command will show which locks are held. This
1340 1340 includes the user and process holding the lock, the amount of time
1341 1341 the lock has been held, and the machine name where the process is
1342 1342 running if it's not local.
1343 1343
1344 1344 Locks protect the integrity of Mercurial's data, so should be
1345 1345 treated with care. System crashes or other interruptions may cause
1346 1346 locks to not be properly released, though Mercurial will usually
1347 1347 detect and remove such stale locks automatically.
1348 1348
1349 1349 However, detecting stale locks may not always be possible (for
1350 1350 instance, on a shared filesystem). Removing locks may also be
1351 1351 blocked by filesystem permissions.
1352 1352
1353 1353 Setting a lock will prevent other commands from changing the data.
1354 1354 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1355 1355 The set locks are removed when the command exits.
1356 1356
1357 1357 Returns 0 if no locks are held.
1358 1358
1359 1359 """
1360 1360
1361 1361 if opts.get(r'force_lock'):
1362 1362 repo.svfs.unlink('lock')
1363 1363 if opts.get(r'force_wlock'):
1364 1364 repo.vfs.unlink('wlock')
1365 1365 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1366 1366 return 0
1367 1367
1368 1368 locks = []
1369 1369 try:
1370 1370 if opts.get(r'set_wlock'):
1371 1371 try:
1372 1372 locks.append(repo.wlock(False))
1373 1373 except error.LockHeld:
1374 1374 raise error.Abort(_('wlock is already held'))
1375 1375 if opts.get(r'set_lock'):
1376 1376 try:
1377 1377 locks.append(repo.lock(False))
1378 1378 except error.LockHeld:
1379 1379 raise error.Abort(_('lock is already held'))
1380 1380 if len(locks):
1381 1381 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1382 1382 return 0
1383 1383 finally:
1384 1384 release(*locks)
1385 1385
1386 1386 now = time.time()
1387 1387 held = 0
1388 1388
1389 1389 def report(vfs, name, method):
1390 1390 # this causes stale locks to get reaped for more accurate reporting
1391 1391 try:
1392 1392 l = method(False)
1393 1393 except error.LockHeld:
1394 1394 l = None
1395 1395
1396 1396 if l:
1397 1397 l.release()
1398 1398 else:
1399 1399 try:
1400 1400 st = vfs.lstat(name)
1401 1401 age = now - st[stat.ST_MTIME]
1402 1402 user = util.username(st.st_uid)
1403 1403 locker = vfs.readlock(name)
1404 1404 if ":" in locker:
1405 1405 host, pid = locker.split(':')
1406 1406 if host == socket.gethostname():
1407 1407 locker = 'user %s, process %s' % (user, pid)
1408 1408 else:
1409 1409 locker = 'user %s, process %s, host %s' \
1410 1410 % (user, pid, host)
1411 1411 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1412 1412 return 1
1413 1413 except OSError as e:
1414 1414 if e.errno != errno.ENOENT:
1415 1415 raise
1416 1416
1417 1417 ui.write(("%-6s free\n") % (name + ":"))
1418 1418 return 0
1419 1419
1420 1420 held += report(repo.svfs, "lock", repo.lock)
1421 1421 held += report(repo.vfs, "wlock", repo.wlock)
1422 1422
1423 1423 return held
1424 1424
1425 1425 @command('debugmergestate', [], '')
1426 1426 def debugmergestate(ui, repo, *args):
1427 1427 """print merge state
1428 1428
1429 1429 Use --verbose to print out information about whether v1 or v2 merge state
1430 1430 was chosen."""
1431 1431 def _hashornull(h):
1432 1432 if h == nullhex:
1433 1433 return 'null'
1434 1434 else:
1435 1435 return h
1436 1436
1437 1437 def printrecords(version):
1438 1438 ui.write(('* version %d records\n') % version)
1439 1439 if version == 1:
1440 1440 records = v1records
1441 1441 else:
1442 1442 records = v2records
1443 1443
1444 1444 for rtype, record in records:
1445 1445 # pretty print some record types
1446 1446 if rtype == 'L':
1447 1447 ui.write(('local: %s\n') % record)
1448 1448 elif rtype == 'O':
1449 1449 ui.write(('other: %s\n') % record)
1450 1450 elif rtype == 'm':
1451 1451 driver, mdstate = record.split('\0', 1)
1452 1452 ui.write(('merge driver: %s (state "%s")\n')
1453 1453 % (driver, mdstate))
1454 1454 elif rtype in 'FDC':
1455 1455 r = record.split('\0')
1456 1456 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1457 1457 if version == 1:
1458 1458 onode = 'not stored in v1 format'
1459 1459 flags = r[7]
1460 1460 else:
1461 1461 onode, flags = r[7:9]
1462 1462 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1463 1463 % (f, rtype, state, _hashornull(hash)))
1464 1464 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1465 1465 ui.write((' ancestor path: %s (node %s)\n')
1466 1466 % (afile, _hashornull(anode)))
1467 1467 ui.write((' other path: %s (node %s)\n')
1468 1468 % (ofile, _hashornull(onode)))
1469 1469 elif rtype == 'f':
1470 1470 filename, rawextras = record.split('\0', 1)
1471 1471 extras = rawextras.split('\0')
1472 1472 i = 0
1473 1473 extrastrings = []
1474 1474 while i < len(extras):
1475 1475 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1476 1476 i += 2
1477 1477
1478 1478 ui.write(('file extras: %s (%s)\n')
1479 1479 % (filename, ', '.join(extrastrings)))
1480 1480 elif rtype == 'l':
1481 1481 labels = record.split('\0', 2)
1482 1482 labels = [l for l in labels if len(l) > 0]
1483 1483 ui.write(('labels:\n'))
1484 1484 ui.write((' local: %s\n' % labels[0]))
1485 1485 ui.write((' other: %s\n' % labels[1]))
1486 1486 if len(labels) > 2:
1487 1487 ui.write((' base: %s\n' % labels[2]))
1488 1488 else:
1489 1489 ui.write(('unrecognized entry: %s\t%s\n')
1490 1490 % (rtype, record.replace('\0', '\t')))
1491 1491
1492 1492 # Avoid mergestate.read() since it may raise an exception for unsupported
1493 1493 # merge state records. We shouldn't be doing this, but this is OK since this
1494 1494 # command is pretty low-level.
1495 1495 ms = mergemod.mergestate(repo)
1496 1496
1497 1497 # sort so that reasonable information is on top
1498 1498 v1records = ms._readrecordsv1()
1499 1499 v2records = ms._readrecordsv2()
1500 1500 order = 'LOml'
1501 1501 def key(r):
1502 1502 idx = order.find(r[0])
1503 1503 if idx == -1:
1504 1504 return (1, r[1])
1505 1505 else:
1506 1506 return (0, idx)
1507 1507 v1records.sort(key=key)
1508 1508 v2records.sort(key=key)
1509 1509
1510 1510 if not v1records and not v2records:
1511 1511 ui.write(('no merge state found\n'))
1512 1512 elif not v2records:
1513 1513 ui.note(('no version 2 merge state\n'))
1514 1514 printrecords(1)
1515 1515 elif ms._v1v2match(v1records, v2records):
1516 1516 ui.note(('v1 and v2 states match: using v2\n'))
1517 1517 printrecords(2)
1518 1518 else:
1519 1519 ui.note(('v1 and v2 states mismatch: using v1\n'))
1520 1520 printrecords(1)
1521 1521 if ui.verbose:
1522 1522 printrecords(2)
1523 1523
1524 1524 @command('debugnamecomplete', [], _('NAME...'))
1525 1525 def debugnamecomplete(ui, repo, *args):
1526 1526 '''complete "names" - tags, open branch names, bookmark names'''
1527 1527
1528 1528 names = set()
1529 1529 # since we previously only listed open branches, we will handle that
1530 1530 # specially (after this for loop)
1531 1531 for name, ns in repo.names.iteritems():
1532 1532 if name != 'branches':
1533 1533 names.update(ns.listnames(repo))
1534 1534 names.update(tag for (tag, heads, tip, closed)
1535 1535 in repo.branchmap().iterbranches() if not closed)
1536 1536 completions = set()
1537 1537 if not args:
1538 1538 args = ['']
1539 1539 for a in args:
1540 1540 completions.update(n for n in names if n.startswith(a))
1541 1541 ui.write('\n'.join(sorted(completions)))
1542 1542 ui.write('\n')
1543 1543
1544 1544 @command('debugobsolete',
1545 1545 [('', 'flags', 0, _('markers flag')),
1546 1546 ('', 'record-parents', False,
1547 1547 _('record parent information for the precursor')),
1548 1548 ('r', 'rev', [], _('display markers relevant to REV')),
1549 1549 ('', 'exclusive', False, _('restrict display to markers only '
1550 1550 'relevant to REV')),
1551 1551 ('', 'index', False, _('display index of the marker')),
1552 1552 ('', 'delete', [], _('delete markers specified by indices')),
1553 1553 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1554 1554 _('[OBSOLETED [REPLACEMENT ...]]'))
1555 1555 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1556 1556 """create arbitrary obsolete marker
1557 1557
1558 1558 With no arguments, displays the list of obsolescence markers."""
1559 1559
1560 1560 opts = pycompat.byteskwargs(opts)
1561 1561
1562 1562 def parsenodeid(s):
1563 1563 try:
1564 1564 # We do not use revsingle/revrange functions here to accept
1565 1565 # arbitrary node identifiers, possibly not present in the
1566 1566 # local repository.
1567 1567 n = bin(s)
1568 1568 if len(n) != len(nullid):
1569 1569 raise TypeError()
1570 1570 return n
1571 1571 except TypeError:
1572 1572 raise error.Abort('changeset references must be full hexadecimal '
1573 1573 'node identifiers')
1574 1574
1575 1575 if opts.get('delete'):
1576 1576 indices = []
1577 1577 for v in opts.get('delete'):
1578 1578 try:
1579 1579 indices.append(int(v))
1580 1580 except ValueError:
1581 1581 raise error.Abort(_('invalid index value: %r') % v,
1582 1582 hint=_('use integers for indices'))
1583 1583
1584 1584 if repo.currenttransaction():
1585 1585 raise error.Abort(_('cannot delete obsmarkers in the middle '
1586 1586 'of transaction.'))
1587 1587
1588 1588 with repo.lock():
1589 1589 n = repair.deleteobsmarkers(repo.obsstore, indices)
1590 1590 ui.write(_('deleted %i obsolescence markers\n') % n)
1591 1591
1592 1592 return
1593 1593
1594 1594 if precursor is not None:
1595 1595 if opts['rev']:
1596 1596 raise error.Abort('cannot select revision when creating marker')
1597 1597 metadata = {}
1598 1598 metadata['user'] = opts['user'] or ui.username()
1599 1599 succs = tuple(parsenodeid(succ) for succ in successors)
1600 1600 l = repo.lock()
1601 1601 try:
1602 1602 tr = repo.transaction('debugobsolete')
1603 1603 try:
1604 1604 date = opts.get('date')
1605 1605 if date:
1606 1606 date = dateutil.parsedate(date)
1607 1607 else:
1608 1608 date = None
1609 1609 prec = parsenodeid(precursor)
1610 1610 parents = None
1611 1611 if opts['record_parents']:
1612 1612 if prec not in repo.unfiltered():
1613 1613 raise error.Abort('cannot used --record-parents on '
1614 1614 'unknown changesets')
1615 1615 parents = repo.unfiltered()[prec].parents()
1616 1616 parents = tuple(p.node() for p in parents)
1617 1617 repo.obsstore.create(tr, prec, succs, opts['flags'],
1618 1618 parents=parents, date=date,
1619 1619 metadata=metadata, ui=ui)
1620 1620 tr.close()
1621 1621 except ValueError as exc:
1622 1622 raise error.Abort(_('bad obsmarker input: %s') %
1623 1623 pycompat.bytestr(exc))
1624 1624 finally:
1625 1625 tr.release()
1626 1626 finally:
1627 1627 l.release()
1628 1628 else:
1629 1629 if opts['rev']:
1630 1630 revs = scmutil.revrange(repo, opts['rev'])
1631 1631 nodes = [repo[r].node() for r in revs]
1632 1632 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1633 1633 exclusive=opts['exclusive']))
1634 1634 markers.sort(key=lambda x: x._data)
1635 1635 else:
1636 1636 markers = obsutil.getmarkers(repo)
1637 1637
1638 1638 markerstoiter = markers
1639 1639 isrelevant = lambda m: True
1640 1640 if opts.get('rev') and opts.get('index'):
1641 1641 markerstoiter = obsutil.getmarkers(repo)
1642 1642 markerset = set(markers)
1643 1643 isrelevant = lambda m: m in markerset
1644 1644
1645 1645 fm = ui.formatter('debugobsolete', opts)
1646 1646 for i, m in enumerate(markerstoiter):
1647 1647 if not isrelevant(m):
1648 1648 # marker can be irrelevant when we're iterating over a set
1649 1649 # of markers (markerstoiter) which is bigger than the set
1650 1650 # of markers we want to display (markers)
1651 1651 # this can happen if both --index and --rev options are
1652 1652 # provided and thus we need to iterate over all of the markers
1653 1653 # to get the correct indices, but only display the ones that
1654 1654 # are relevant to --rev value
1655 1655 continue
1656 1656 fm.startitem()
1657 1657 ind = i if opts.get('index') else None
1658 1658 cmdutil.showmarker(fm, m, index=ind)
1659 1659 fm.end()
1660 1660
1661 1661 @command('debugpathcomplete',
1662 1662 [('f', 'full', None, _('complete an entire path')),
1663 1663 ('n', 'normal', None, _('show only normal files')),
1664 1664 ('a', 'added', None, _('show only added files')),
1665 1665 ('r', 'removed', None, _('show only removed files'))],
1666 1666 _('FILESPEC...'))
1667 1667 def debugpathcomplete(ui, repo, *specs, **opts):
1668 1668 '''complete part or all of a tracked path
1669 1669
1670 1670 This command supports shells that offer path name completion. It
1671 1671 currently completes only files already known to the dirstate.
1672 1672
1673 1673 Completion extends only to the next path segment unless
1674 1674 --full is specified, in which case entire paths are used.'''
1675 1675
1676 1676 def complete(path, acceptable):
1677 1677 dirstate = repo.dirstate
1678 1678 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1679 1679 rootdir = repo.root + pycompat.ossep
1680 1680 if spec != repo.root and not spec.startswith(rootdir):
1681 1681 return [], []
1682 1682 if os.path.isdir(spec):
1683 1683 spec += '/'
1684 1684 spec = spec[len(rootdir):]
1685 1685 fixpaths = pycompat.ossep != '/'
1686 1686 if fixpaths:
1687 1687 spec = spec.replace(pycompat.ossep, '/')
1688 1688 speclen = len(spec)
1689 1689 fullpaths = opts[r'full']
1690 1690 files, dirs = set(), set()
1691 1691 adddir, addfile = dirs.add, files.add
1692 1692 for f, st in dirstate.iteritems():
1693 1693 if f.startswith(spec) and st[0] in acceptable:
1694 1694 if fixpaths:
1695 1695 f = f.replace('/', pycompat.ossep)
1696 1696 if fullpaths:
1697 1697 addfile(f)
1698 1698 continue
1699 1699 s = f.find(pycompat.ossep, speclen)
1700 1700 if s >= 0:
1701 1701 adddir(f[:s])
1702 1702 else:
1703 1703 addfile(f)
1704 1704 return files, dirs
1705 1705
1706 1706 acceptable = ''
1707 1707 if opts[r'normal']:
1708 1708 acceptable += 'nm'
1709 1709 if opts[r'added']:
1710 1710 acceptable += 'a'
1711 1711 if opts[r'removed']:
1712 1712 acceptable += 'r'
1713 1713 cwd = repo.getcwd()
1714 1714 if not specs:
1715 1715 specs = ['.']
1716 1716
1717 1717 files, dirs = set(), set()
1718 1718 for spec in specs:
1719 1719 f, d = complete(spec, acceptable or 'nmar')
1720 1720 files.update(f)
1721 1721 dirs.update(d)
1722 1722 files.update(dirs)
1723 1723 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1724 1724 ui.write('\n')
1725 1725
1726 1726 @command('debugpeer', [], _('PATH'), norepo=True)
1727 1727 def debugpeer(ui, path):
1728 1728 """establish a connection to a peer repository"""
1729 1729 # Always enable peer request logging. Requires --debug to display
1730 1730 # though.
1731 1731 overrides = {
1732 1732 ('devel', 'debug.peer-request'): True,
1733 1733 }
1734 1734
1735 1735 with ui.configoverride(overrides):
1736 1736 peer = hg.peer(ui, {}, path)
1737 1737
1738 1738 local = peer.local() is not None
1739 1739 canpush = peer.canpush()
1740 1740
1741 1741 ui.write(_('url: %s\n') % peer.url())
1742 1742 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1743 1743 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1744 1744
1745 1745 @command('debugpickmergetool',
1746 1746 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1747 1747 ('', 'changedelete', None, _('emulate merging change and delete')),
1748 1748 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1749 1749 _('[PATTERN]...'),
1750 1750 inferrepo=True)
1751 1751 def debugpickmergetool(ui, repo, *pats, **opts):
1752 1752 """examine which merge tool is chosen for specified file
1753 1753
1754 1754 As described in :hg:`help merge-tools`, Mercurial examines
1755 1755 configurations below in this order to decide which merge tool is
1756 1756 chosen for specified file.
1757 1757
1758 1758 1. ``--tool`` option
1759 1759 2. ``HGMERGE`` environment variable
1760 1760 3. configurations in ``merge-patterns`` section
1761 1761 4. configuration of ``ui.merge``
1762 1762 5. configurations in ``merge-tools`` section
1763 1763 6. ``hgmerge`` tool (for historical reason only)
1764 1764 7. default tool for fallback (``:merge`` or ``:prompt``)
1765 1765
1766 1766 This command writes out examination result in the style below::
1767 1767
1768 1768 FILE = MERGETOOL
1769 1769
1770 1770 By default, all files known in the first parent context of the
1771 1771 working directory are examined. Use file patterns and/or -I/-X
1772 1772 options to limit target files. -r/--rev is also useful to examine
1773 1773 files in another context without actual updating to it.
1774 1774
1775 1775 With --debug, this command shows warning messages while matching
1776 1776 against ``merge-patterns`` and so on, too. It is recommended to
1777 1777 use this option with explicit file patterns and/or -I/-X options,
1778 1778 because this option increases amount of output per file according
1779 1779 to configurations in hgrc.
1780 1780
1781 1781 With -v/--verbose, this command shows configurations below at
1782 1782 first (only if specified).
1783 1783
1784 1784 - ``--tool`` option
1785 1785 - ``HGMERGE`` environment variable
1786 1786 - configuration of ``ui.merge``
1787 1787
1788 1788 If merge tool is chosen before matching against
1789 1789 ``merge-patterns``, this command can't show any helpful
1790 1790 information, even with --debug. In such case, information above is
1791 1791 useful to know why a merge tool is chosen.
1792 1792 """
1793 1793 opts = pycompat.byteskwargs(opts)
1794 1794 overrides = {}
1795 1795 if opts['tool']:
1796 1796 overrides[('ui', 'forcemerge')] = opts['tool']
1797 1797 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1798 1798
1799 1799 with ui.configoverride(overrides, 'debugmergepatterns'):
1800 1800 hgmerge = encoding.environ.get("HGMERGE")
1801 1801 if hgmerge is not None:
1802 1802 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1803 1803 uimerge = ui.config("ui", "merge")
1804 1804 if uimerge:
1805 1805 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1806 1806
1807 1807 ctx = scmutil.revsingle(repo, opts.get('rev'))
1808 1808 m = scmutil.match(ctx, pats, opts)
1809 1809 changedelete = opts['changedelete']
1810 1810 for path in ctx.walk(m):
1811 1811 fctx = ctx[path]
1812 1812 try:
1813 1813 if not ui.debugflag:
1814 1814 ui.pushbuffer(error=True)
1815 1815 tool, toolpath = filemerge._picktool(repo, ui, path,
1816 1816 fctx.isbinary(),
1817 1817 'l' in fctx.flags(),
1818 1818 changedelete)
1819 1819 finally:
1820 1820 if not ui.debugflag:
1821 1821 ui.popbuffer()
1822 1822 ui.write(('%s = %s\n') % (path, tool))
1823 1823
1824 1824 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1825 1825 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1826 1826 '''access the pushkey key/value protocol
1827 1827
1828 1828 With two args, list the keys in the given namespace.
1829 1829
1830 1830 With five args, set a key to new if it currently is set to old.
1831 1831 Reports success or failure.
1832 1832 '''
1833 1833
1834 1834 target = hg.peer(ui, {}, repopath)
1835 1835 if keyinfo:
1836 1836 key, old, new = keyinfo
1837 1837 with target.commandexecutor() as e:
1838 1838 r = e.callcommand('pushkey', {
1839 1839 'namespace': namespace,
1840 1840 'key': key,
1841 1841 'old': old,
1842 1842 'new': new,
1843 1843 }).result()
1844 1844
1845 1845 ui.status(pycompat.bytestr(r) + '\n')
1846 1846 return not r
1847 1847 else:
1848 1848 for k, v in sorted(target.listkeys(namespace).iteritems()):
1849 1849 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1850 1850 stringutil.escapestr(v)))
1851 1851
1852 1852 @command('debugpvec', [], _('A B'))
1853 1853 def debugpvec(ui, repo, a, b=None):
1854 1854 ca = scmutil.revsingle(repo, a)
1855 1855 cb = scmutil.revsingle(repo, b)
1856 1856 pa = pvec.ctxpvec(ca)
1857 1857 pb = pvec.ctxpvec(cb)
1858 1858 if pa == pb:
1859 1859 rel = "="
1860 1860 elif pa > pb:
1861 1861 rel = ">"
1862 1862 elif pa < pb:
1863 1863 rel = "<"
1864 1864 elif pa | pb:
1865 1865 rel = "|"
1866 1866 ui.write(_("a: %s\n") % pa)
1867 1867 ui.write(_("b: %s\n") % pb)
1868 1868 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1869 1869 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1870 1870 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1871 1871 pa.distance(pb), rel))
1872 1872
1873 1873 @command('debugrebuilddirstate|debugrebuildstate',
1874 1874 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1875 1875 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1876 1876 'the working copy parent')),
1877 1877 ],
1878 1878 _('[-r REV]'))
1879 1879 def debugrebuilddirstate(ui, repo, rev, **opts):
1880 1880 """rebuild the dirstate as it would look like for the given revision
1881 1881
1882 1882 If no revision is specified the first current parent will be used.
1883 1883
1884 1884 The dirstate will be set to the files of the given revision.
1885 1885 The actual working directory content or existing dirstate
1886 1886 information such as adds or removes is not considered.
1887 1887
1888 1888 ``minimal`` will only rebuild the dirstate status for files that claim to be
1889 1889 tracked but are not in the parent manifest, or that exist in the parent
1890 1890 manifest but are not in the dirstate. It will not change adds, removes, or
1891 1891 modified files that are in the working copy parent.
1892 1892
1893 1893 One use of this command is to make the next :hg:`status` invocation
1894 1894 check the actual file content.
1895 1895 """
1896 1896 ctx = scmutil.revsingle(repo, rev)
1897 1897 with repo.wlock():
1898 1898 dirstate = repo.dirstate
1899 1899 changedfiles = None
1900 1900 # See command doc for what minimal does.
1901 1901 if opts.get(r'minimal'):
1902 1902 manifestfiles = set(ctx.manifest().keys())
1903 1903 dirstatefiles = set(dirstate)
1904 1904 manifestonly = manifestfiles - dirstatefiles
1905 1905 dsonly = dirstatefiles - manifestfiles
1906 1906 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1907 1907 changedfiles = manifestonly | dsnotadded
1908 1908
1909 1909 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1910 1910
1911 1911 @command('debugrebuildfncache', [], '')
1912 1912 def debugrebuildfncache(ui, repo):
1913 1913 """rebuild the fncache file"""
1914 1914 repair.rebuildfncache(ui, repo)
1915 1915
1916 1916 @command('debugrename',
1917 1917 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1918 1918 _('[-r REV] FILE'))
1919 1919 def debugrename(ui, repo, file1, *pats, **opts):
1920 1920 """dump rename information"""
1921 1921
1922 1922 opts = pycompat.byteskwargs(opts)
1923 1923 ctx = scmutil.revsingle(repo, opts.get('rev'))
1924 1924 m = scmutil.match(ctx, (file1,) + pats, opts)
1925 1925 for abs in ctx.walk(m):
1926 1926 fctx = ctx[abs]
1927 1927 o = fctx.filelog().renamed(fctx.filenode())
1928 1928 rel = m.rel(abs)
1929 1929 if o:
1930 1930 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1931 1931 else:
1932 1932 ui.write(_("%s not renamed\n") % rel)
1933 1933
1934 1934 @command('debugrevlog', cmdutil.debugrevlogopts +
1935 1935 [('d', 'dump', False, _('dump index data'))],
1936 1936 _('-c|-m|FILE'),
1937 1937 optionalrepo=True)
1938 1938 def debugrevlog(ui, repo, file_=None, **opts):
1939 1939 """show data and statistics about a revlog"""
1940 1940 opts = pycompat.byteskwargs(opts)
1941 1941 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1942 1942
1943 1943 if opts.get("dump"):
1944 1944 numrevs = len(r)
1945 1945 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1946 1946 " rawsize totalsize compression heads chainlen\n"))
1947 1947 ts = 0
1948 1948 heads = set()
1949 1949
1950 1950 for rev in xrange(numrevs):
1951 1951 dbase = r.deltaparent(rev)
1952 1952 if dbase == -1:
1953 1953 dbase = rev
1954 1954 cbase = r.chainbase(rev)
1955 1955 clen = r.chainlen(rev)
1956 1956 p1, p2 = r.parentrevs(rev)
1957 1957 rs = r.rawsize(rev)
1958 1958 ts = ts + rs
1959 1959 heads -= set(r.parentrevs(rev))
1960 1960 heads.add(rev)
1961 1961 try:
1962 1962 compression = ts / r.end(rev)
1963 1963 except ZeroDivisionError:
1964 1964 compression = 0
1965 1965 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1966 1966 "%11d %5d %8d\n" %
1967 1967 (rev, p1, p2, r.start(rev), r.end(rev),
1968 1968 r.start(dbase), r.start(cbase),
1969 1969 r.start(p1), r.start(p2),
1970 1970 rs, ts, compression, len(heads), clen))
1971 1971 return 0
1972 1972
1973 1973 v = r.version
1974 1974 format = v & 0xFFFF
1975 1975 flags = []
1976 1976 gdelta = False
1977 1977 if v & revlog.FLAG_INLINE_DATA:
1978 1978 flags.append('inline')
1979 1979 if v & revlog.FLAG_GENERALDELTA:
1980 1980 gdelta = True
1981 1981 flags.append('generaldelta')
1982 1982 if not flags:
1983 1983 flags = ['(none)']
1984 1984
1985 1985 nummerges = 0
1986 1986 numfull = 0
1987 1987 numprev = 0
1988 1988 nump1 = 0
1989 1989 nump2 = 0
1990 1990 numother = 0
1991 1991 nump1prev = 0
1992 1992 nump2prev = 0
1993 1993 chainlengths = []
1994 1994 chainbases = []
1995 1995 chainspans = []
1996 1996
1997 1997 datasize = [None, 0, 0]
1998 1998 fullsize = [None, 0, 0]
1999 1999 deltasize = [None, 0, 0]
2000 2000 chunktypecounts = {}
2001 2001 chunktypesizes = {}
2002 2002
2003 2003 def addsize(size, l):
2004 2004 if l[0] is None or size < l[0]:
2005 2005 l[0] = size
2006 2006 if size > l[1]:
2007 2007 l[1] = size
2008 2008 l[2] += size
2009 2009
2010 2010 numrevs = len(r)
2011 2011 for rev in xrange(numrevs):
2012 2012 p1, p2 = r.parentrevs(rev)
2013 2013 delta = r.deltaparent(rev)
2014 2014 if format > 0:
2015 2015 addsize(r.rawsize(rev), datasize)
2016 2016 if p2 != nullrev:
2017 2017 nummerges += 1
2018 2018 size = r.length(rev)
2019 2019 if delta == nullrev:
2020 2020 chainlengths.append(0)
2021 2021 chainbases.append(r.start(rev))
2022 2022 chainspans.append(size)
2023 2023 numfull += 1
2024 2024 addsize(size, fullsize)
2025 2025 else:
2026 2026 chainlengths.append(chainlengths[delta] + 1)
2027 2027 baseaddr = chainbases[delta]
2028 2028 revaddr = r.start(rev)
2029 2029 chainbases.append(baseaddr)
2030 2030 chainspans.append((revaddr - baseaddr) + size)
2031 2031 addsize(size, deltasize)
2032 2032 if delta == rev - 1:
2033 2033 numprev += 1
2034 2034 if delta == p1:
2035 2035 nump1prev += 1
2036 2036 elif delta == p2:
2037 2037 nump2prev += 1
2038 2038 elif delta == p1:
2039 2039 nump1 += 1
2040 2040 elif delta == p2:
2041 2041 nump2 += 1
2042 2042 elif delta != nullrev:
2043 2043 numother += 1
2044 2044
2045 2045 # Obtain data on the raw chunks in the revlog.
2046 2046 segment = r._getsegmentforrevs(rev, rev)[1]
2047 2047 if segment:
2048 2048 chunktype = bytes(segment[0:1])
2049 2049 else:
2050 2050 chunktype = 'empty'
2051 2051
2052 2052 if chunktype not in chunktypecounts:
2053 2053 chunktypecounts[chunktype] = 0
2054 2054 chunktypesizes[chunktype] = 0
2055 2055
2056 2056 chunktypecounts[chunktype] += 1
2057 2057 chunktypesizes[chunktype] += size
2058 2058
2059 2059 # Adjust size min value for empty cases
2060 2060 for size in (datasize, fullsize, deltasize):
2061 2061 if size[0] is None:
2062 2062 size[0] = 0
2063 2063
2064 2064 numdeltas = numrevs - numfull
2065 2065 numoprev = numprev - nump1prev - nump2prev
2066 2066 totalrawsize = datasize[2]
2067 2067 datasize[2] /= numrevs
2068 2068 fulltotal = fullsize[2]
2069 2069 fullsize[2] /= numfull
2070 2070 deltatotal = deltasize[2]
2071 2071 if numrevs - numfull > 0:
2072 2072 deltasize[2] /= numrevs - numfull
2073 2073 totalsize = fulltotal + deltatotal
2074 2074 avgchainlen = sum(chainlengths) / numrevs
2075 2075 maxchainlen = max(chainlengths)
2076 2076 maxchainspan = max(chainspans)
2077 2077 compratio = 1
2078 2078 if totalsize:
2079 2079 compratio = totalrawsize / totalsize
2080 2080
2081 2081 basedfmtstr = '%%%dd\n'
2082 2082 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2083 2083
2084 2084 def dfmtstr(max):
2085 2085 return basedfmtstr % len(str(max))
2086 2086 def pcfmtstr(max, padding=0):
2087 2087 return basepcfmtstr % (len(str(max)), ' ' * padding)
2088 2088
2089 2089 def pcfmt(value, total):
2090 2090 if total:
2091 2091 return (value, 100 * float(value) / total)
2092 2092 else:
2093 2093 return value, 100.0
2094 2094
2095 2095 ui.write(('format : %d\n') % format)
2096 2096 ui.write(('flags : %s\n') % ', '.join(flags))
2097 2097
2098 2098 ui.write('\n')
2099 2099 fmt = pcfmtstr(totalsize)
2100 2100 fmt2 = dfmtstr(totalsize)
2101 2101 ui.write(('revisions : ') + fmt2 % numrevs)
2102 2102 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2103 2103 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2104 2104 ui.write(('revisions : ') + fmt2 % numrevs)
2105 2105 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2106 2106 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2107 2107 ui.write(('revision size : ') + fmt2 % totalsize)
2108 2108 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2109 2109 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2110 2110
2111 2111 def fmtchunktype(chunktype):
2112 2112 if chunktype == 'empty':
2113 2113 return ' %s : ' % chunktype
2114 2114 elif chunktype in pycompat.bytestr(string.ascii_letters):
2115 2115 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2116 2116 else:
2117 2117 return ' 0x%s : ' % hex(chunktype)
2118 2118
2119 2119 ui.write('\n')
2120 2120 ui.write(('chunks : ') + fmt2 % numrevs)
2121 2121 for chunktype in sorted(chunktypecounts):
2122 2122 ui.write(fmtchunktype(chunktype))
2123 2123 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2124 2124 ui.write(('chunks size : ') + fmt2 % totalsize)
2125 2125 for chunktype in sorted(chunktypecounts):
2126 2126 ui.write(fmtchunktype(chunktype))
2127 2127 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2128 2128
2129 2129 ui.write('\n')
2130 2130 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2131 2131 ui.write(('avg chain length : ') + fmt % avgchainlen)
2132 2132 ui.write(('max chain length : ') + fmt % maxchainlen)
2133 2133 ui.write(('max chain reach : ') + fmt % maxchainspan)
2134 2134 ui.write(('compression ratio : ') + fmt % compratio)
2135 2135
2136 2136 if format > 0:
2137 2137 ui.write('\n')
2138 2138 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2139 2139 % tuple(datasize))
2140 2140 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2141 2141 % tuple(fullsize))
2142 2142 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2143 2143 % tuple(deltasize))
2144 2144
2145 2145 if numdeltas > 0:
2146 2146 ui.write('\n')
2147 2147 fmt = pcfmtstr(numdeltas)
2148 2148 fmt2 = pcfmtstr(numdeltas, 4)
2149 2149 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2150 2150 if numprev > 0:
2151 2151 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2152 2152 numprev))
2153 2153 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2154 2154 numprev))
2155 2155 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2156 2156 numprev))
2157 2157 if gdelta:
2158 2158 ui.write(('deltas against p1 : ')
2159 2159 + fmt % pcfmt(nump1, numdeltas))
2160 2160 ui.write(('deltas against p2 : ')
2161 2161 + fmt % pcfmt(nump2, numdeltas))
2162 2162 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2163 2163 numdeltas))
2164 2164
2165 2165 @command('debugrevspec',
2166 2166 [('', 'optimize', None,
2167 2167 _('print parsed tree after optimizing (DEPRECATED)')),
2168 2168 ('', 'show-revs', True, _('print list of result revisions (default)')),
2169 2169 ('s', 'show-set', None, _('print internal representation of result set')),
2170 2170 ('p', 'show-stage', [],
2171 2171 _('print parsed tree at the given stage'), _('NAME')),
2172 2172 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2173 2173 ('', 'verify-optimized', False, _('verify optimized result')),
2174 2174 ],
2175 2175 ('REVSPEC'))
2176 2176 def debugrevspec(ui, repo, expr, **opts):
2177 2177 """parse and apply a revision specification
2178 2178
2179 2179 Use -p/--show-stage option to print the parsed tree at the given stages.
2180 2180 Use -p all to print tree at every stage.
2181 2181
2182 2182 Use --no-show-revs option with -s or -p to print only the set
2183 2183 representation or the parsed tree respectively.
2184 2184
2185 2185 Use --verify-optimized to compare the optimized result with the unoptimized
2186 2186 one. Returns 1 if the optimized result differs.
2187 2187 """
2188 2188 opts = pycompat.byteskwargs(opts)
2189 2189 aliases = ui.configitems('revsetalias')
2190 2190 stages = [
2191 2191 ('parsed', lambda tree: tree),
2192 2192 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2193 2193 ui.warn)),
2194 2194 ('concatenated', revsetlang.foldconcat),
2195 2195 ('analyzed', revsetlang.analyze),
2196 2196 ('optimized', revsetlang.optimize),
2197 2197 ]
2198 2198 if opts['no_optimized']:
2199 2199 stages = stages[:-1]
2200 2200 if opts['verify_optimized'] and opts['no_optimized']:
2201 2201 raise error.Abort(_('cannot use --verify-optimized with '
2202 2202 '--no-optimized'))
2203 2203 stagenames = set(n for n, f in stages)
2204 2204
2205 2205 showalways = set()
2206 2206 showchanged = set()
2207 2207 if ui.verbose and not opts['show_stage']:
2208 2208 # show parsed tree by --verbose (deprecated)
2209 2209 showalways.add('parsed')
2210 2210 showchanged.update(['expanded', 'concatenated'])
2211 2211 if opts['optimize']:
2212 2212 showalways.add('optimized')
2213 2213 if opts['show_stage'] and opts['optimize']:
2214 2214 raise error.Abort(_('cannot use --optimize with --show-stage'))
2215 2215 if opts['show_stage'] == ['all']:
2216 2216 showalways.update(stagenames)
2217 2217 else:
2218 2218 for n in opts['show_stage']:
2219 2219 if n not in stagenames:
2220 2220 raise error.Abort(_('invalid stage name: %s') % n)
2221 2221 showalways.update(opts['show_stage'])
2222 2222
2223 2223 treebystage = {}
2224 2224 printedtree = None
2225 2225 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2226 2226 for n, f in stages:
2227 2227 treebystage[n] = tree = f(tree)
2228 2228 if n in showalways or (n in showchanged and tree != printedtree):
2229 2229 if opts['show_stage'] or n != 'parsed':
2230 2230 ui.write(("* %s:\n") % n)
2231 2231 ui.write(revsetlang.prettyformat(tree), "\n")
2232 2232 printedtree = tree
2233 2233
2234 2234 if opts['verify_optimized']:
2235 2235 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2236 2236 brevs = revset.makematcher(treebystage['optimized'])(repo)
2237 2237 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2238 2238 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2239 2239 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2240 2240 arevs = list(arevs)
2241 2241 brevs = list(brevs)
2242 2242 if arevs == brevs:
2243 2243 return 0
2244 2244 ui.write(('--- analyzed\n'), label='diff.file_a')
2245 2245 ui.write(('+++ optimized\n'), label='diff.file_b')
2246 2246 sm = difflib.SequenceMatcher(None, arevs, brevs)
2247 2247 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2248 2248 if tag in ('delete', 'replace'):
2249 2249 for c in arevs[alo:ahi]:
2250 2250 ui.write('-%s\n' % c, label='diff.deleted')
2251 2251 if tag in ('insert', 'replace'):
2252 2252 for c in brevs[blo:bhi]:
2253 2253 ui.write('+%s\n' % c, label='diff.inserted')
2254 2254 if tag == 'equal':
2255 2255 for c in arevs[alo:ahi]:
2256 2256 ui.write(' %s\n' % c)
2257 2257 return 1
2258 2258
2259 2259 func = revset.makematcher(tree)
2260 2260 revs = func(repo)
2261 2261 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2262 2262 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2263 2263 if not opts['show_revs']:
2264 2264 return
2265 2265 for c in revs:
2266 2266 ui.write("%d\n" % c)
2267 2267
2268 2268 @command('debugserve', [
2269 2269 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2270 2270 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2271 2271 ('', 'logiofile', '', _('file to log server I/O to')),
2272 2272 ], '')
2273 2273 def debugserve(ui, repo, **opts):
2274 2274 """run a server with advanced settings
2275 2275
2276 2276 This command is similar to :hg:`serve`. It exists partially as a
2277 2277 workaround to the fact that ``hg serve --stdio`` must have specific
2278 2278 arguments for security reasons.
2279 2279 """
2280 2280 opts = pycompat.byteskwargs(opts)
2281 2281
2282 2282 if not opts['sshstdio']:
2283 2283 raise error.Abort(_('only --sshstdio is currently supported'))
2284 2284
2285 2285 logfh = None
2286 2286
2287 2287 if opts['logiofd'] and opts['logiofile']:
2288 2288 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2289 2289
2290 2290 if opts['logiofd']:
2291 2291 # Line buffered because output is line based.
2292 try:
2292 2293 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2294 except OSError as e:
2295 if e.errno != errno.ESPIPE:
2296 raise
2297 # can't seek a pipe, so `ab` mode fails on py3
2298 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2293 2299 elif opts['logiofile']:
2294 2300 logfh = open(opts['logiofile'], 'ab', 1)
2295 2301
2296 2302 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2297 2303 s.serve_forever()
2298 2304
2299 2305 @command('debugsetparents', [], _('REV1 [REV2]'))
2300 2306 def debugsetparents(ui, repo, rev1, rev2=None):
2301 2307 """manually set the parents of the current working directory
2302 2308
2303 2309 This is useful for writing repository conversion tools, but should
2304 2310 be used with care. For example, neither the working directory nor the
2305 2311 dirstate is updated, so file status may be incorrect after running this
2306 2312 command.
2307 2313
2308 2314 Returns 0 on success.
2309 2315 """
2310 2316
2311 2317 node1 = scmutil.revsingle(repo, rev1).node()
2312 2318 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2313 2319
2314 2320 with repo.wlock():
2315 2321 repo.setparents(node1, node2)
2316 2322
2317 2323 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2318 2324 def debugssl(ui, repo, source=None, **opts):
2319 2325 '''test a secure connection to a server
2320 2326
2321 2327 This builds the certificate chain for the server on Windows, installing the
2322 2328 missing intermediates and trusted root via Windows Update if necessary. It
2323 2329 does nothing on other platforms.
2324 2330
2325 2331 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2326 2332 that server is used. See :hg:`help urls` for more information.
2327 2333
2328 2334 If the update succeeds, retry the original operation. Otherwise, the cause
2329 2335 of the SSL error is likely another issue.
2330 2336 '''
2331 2337 if not pycompat.iswindows:
2332 2338 raise error.Abort(_('certificate chain building is only possible on '
2333 2339 'Windows'))
2334 2340
2335 2341 if not source:
2336 2342 if not repo:
2337 2343 raise error.Abort(_("there is no Mercurial repository here, and no "
2338 2344 "server specified"))
2339 2345 source = "default"
2340 2346
2341 2347 source, branches = hg.parseurl(ui.expandpath(source))
2342 2348 url = util.url(source)
2343 2349 addr = None
2344 2350
2345 2351 defaultport = {'https': 443, 'ssh': 22}
2346 2352 if url.scheme in defaultport:
2347 2353 try:
2348 2354 addr = (url.host, int(url.port or defaultport[url.scheme]))
2349 2355 except ValueError:
2350 2356 raise error.Abort(_("malformed port number in URL"))
2351 2357 else:
2352 2358 raise error.Abort(_("only https and ssh connections are supported"))
2353 2359
2354 2360 from . import win32
2355 2361
2356 2362 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2357 2363 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2358 2364
2359 2365 try:
2360 2366 s.connect(addr)
2361 2367 cert = s.getpeercert(True)
2362 2368
2363 2369 ui.status(_('checking the certificate chain for %s\n') % url.host)
2364 2370
2365 2371 complete = win32.checkcertificatechain(cert, build=False)
2366 2372
2367 2373 if not complete:
2368 2374 ui.status(_('certificate chain is incomplete, updating... '))
2369 2375
2370 2376 if not win32.checkcertificatechain(cert):
2371 2377 ui.status(_('failed.\n'))
2372 2378 else:
2373 2379 ui.status(_('done.\n'))
2374 2380 else:
2375 2381 ui.status(_('full certificate chain is available\n'))
2376 2382 finally:
2377 2383 s.close()
2378 2384
2379 2385 @command('debugsub',
2380 2386 [('r', 'rev', '',
2381 2387 _('revision to check'), _('REV'))],
2382 2388 _('[-r REV] [REV]'))
2383 2389 def debugsub(ui, repo, rev=None):
2384 2390 ctx = scmutil.revsingle(repo, rev, None)
2385 2391 for k, v in sorted(ctx.substate.items()):
2386 2392 ui.write(('path %s\n') % k)
2387 2393 ui.write((' source %s\n') % v[0])
2388 2394 ui.write((' revision %s\n') % v[1])
2389 2395
2390 2396 @command('debugsuccessorssets',
2391 2397 [('', 'closest', False, _('return closest successors sets only'))],
2392 2398 _('[REV]'))
2393 2399 def debugsuccessorssets(ui, repo, *revs, **opts):
2394 2400 """show set of successors for revision
2395 2401
2396 2402 A successors set of changeset A is a consistent group of revisions that
2397 2403 succeed A. It contains non-obsolete changesets only unless closests
2398 2404 successors set is set.
2399 2405
2400 2406 In most cases a changeset A has a single successors set containing a single
2401 2407 successor (changeset A replaced by A').
2402 2408
2403 2409 A changeset that is made obsolete with no successors are called "pruned".
2404 2410 Such changesets have no successors sets at all.
2405 2411
2406 2412 A changeset that has been "split" will have a successors set containing
2407 2413 more than one successor.
2408 2414
2409 2415 A changeset that has been rewritten in multiple different ways is called
2410 2416 "divergent". Such changesets have multiple successor sets (each of which
2411 2417 may also be split, i.e. have multiple successors).
2412 2418
2413 2419 Results are displayed as follows::
2414 2420
2415 2421 <rev1>
2416 2422 <successors-1A>
2417 2423 <rev2>
2418 2424 <successors-2A>
2419 2425 <successors-2B1> <successors-2B2> <successors-2B3>
2420 2426
2421 2427 Here rev2 has two possible (i.e. divergent) successors sets. The first
2422 2428 holds one element, whereas the second holds three (i.e. the changeset has
2423 2429 been split).
2424 2430 """
2425 2431 # passed to successorssets caching computation from one call to another
2426 2432 cache = {}
2427 2433 ctx2str = bytes
2428 2434 node2str = short
2429 2435 for rev in scmutil.revrange(repo, revs):
2430 2436 ctx = repo[rev]
2431 2437 ui.write('%s\n'% ctx2str(ctx))
2432 2438 for succsset in obsutil.successorssets(repo, ctx.node(),
2433 2439 closest=opts[r'closest'],
2434 2440 cache=cache):
2435 2441 if succsset:
2436 2442 ui.write(' ')
2437 2443 ui.write(node2str(succsset[0]))
2438 2444 for node in succsset[1:]:
2439 2445 ui.write(' ')
2440 2446 ui.write(node2str(node))
2441 2447 ui.write('\n')
2442 2448
2443 2449 @command('debugtemplate',
2444 2450 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2445 2451 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2446 2452 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2447 2453 optionalrepo=True)
2448 2454 def debugtemplate(ui, repo, tmpl, **opts):
2449 2455 """parse and apply a template
2450 2456
2451 2457 If -r/--rev is given, the template is processed as a log template and
2452 2458 applied to the given changesets. Otherwise, it is processed as a generic
2453 2459 template.
2454 2460
2455 2461 Use --verbose to print the parsed tree.
2456 2462 """
2457 2463 revs = None
2458 2464 if opts[r'rev']:
2459 2465 if repo is None:
2460 2466 raise error.RepoError(_('there is no Mercurial repository here '
2461 2467 '(.hg not found)'))
2462 2468 revs = scmutil.revrange(repo, opts[r'rev'])
2463 2469
2464 2470 props = {}
2465 2471 for d in opts[r'define']:
2466 2472 try:
2467 2473 k, v = (e.strip() for e in d.split('=', 1))
2468 2474 if not k or k == 'ui':
2469 2475 raise ValueError
2470 2476 props[k] = v
2471 2477 except ValueError:
2472 2478 raise error.Abort(_('malformed keyword definition: %s') % d)
2473 2479
2474 2480 if ui.verbose:
2475 2481 aliases = ui.configitems('templatealias')
2476 2482 tree = templater.parse(tmpl)
2477 2483 ui.note(templater.prettyformat(tree), '\n')
2478 2484 newtree = templater.expandaliases(tree, aliases)
2479 2485 if newtree != tree:
2480 2486 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2481 2487
2482 2488 if revs is None:
2483 2489 tres = formatter.templateresources(ui, repo)
2484 2490 t = formatter.maketemplater(ui, tmpl, resources=tres)
2485 2491 ui.write(t.renderdefault(props))
2486 2492 else:
2487 2493 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2488 2494 for r in revs:
2489 2495 displayer.show(repo[r], **pycompat.strkwargs(props))
2490 2496 displayer.close()
2491 2497
2492 2498 @command('debuguigetpass', [
2493 2499 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2494 2500 ], _('[-p TEXT]'), norepo=True)
2495 2501 def debuguigetpass(ui, prompt=''):
2496 2502 """show prompt to type password"""
2497 2503 r = ui.getpass(prompt)
2498 2504 ui.write(('respose: %s\n') % r)
2499 2505
2500 2506 @command('debuguiprompt', [
2501 2507 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2502 2508 ], _('[-p TEXT]'), norepo=True)
2503 2509 def debuguiprompt(ui, prompt=''):
2504 2510 """show plain prompt"""
2505 2511 r = ui.prompt(prompt)
2506 2512 ui.write(('response: %s\n') % r)
2507 2513
2508 2514 @command('debugupdatecaches', [])
2509 2515 def debugupdatecaches(ui, repo, *pats, **opts):
2510 2516 """warm all known caches in the repository"""
2511 2517 with repo.wlock(), repo.lock():
2512 2518 repo.updatecaches(full=True)
2513 2519
2514 2520 @command('debugupgraderepo', [
2515 2521 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2516 2522 ('', 'run', False, _('performs an upgrade')),
2517 2523 ])
2518 2524 def debugupgraderepo(ui, repo, run=False, optimize=None):
2519 2525 """upgrade a repository to use different features
2520 2526
2521 2527 If no arguments are specified, the repository is evaluated for upgrade
2522 2528 and a list of problems and potential optimizations is printed.
2523 2529
2524 2530 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2525 2531 can be influenced via additional arguments. More details will be provided
2526 2532 by the command output when run without ``--run``.
2527 2533
2528 2534 During the upgrade, the repository will be locked and no writes will be
2529 2535 allowed.
2530 2536
2531 2537 At the end of the upgrade, the repository may not be readable while new
2532 2538 repository data is swapped in. This window will be as long as it takes to
2533 2539 rename some directories inside the ``.hg`` directory. On most machines, this
2534 2540 should complete almost instantaneously and the chances of a consumer being
2535 2541 unable to access the repository should be low.
2536 2542 """
2537 2543 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2538 2544
2539 2545 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2540 2546 inferrepo=True)
2541 2547 def debugwalk(ui, repo, *pats, **opts):
2542 2548 """show how files match on given patterns"""
2543 2549 opts = pycompat.byteskwargs(opts)
2544 2550 m = scmutil.match(repo[None], pats, opts)
2545 2551 if ui.verbose:
2546 2552 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2547 2553 items = list(repo[None].walk(m))
2548 2554 if not items:
2549 2555 return
2550 2556 f = lambda fn: fn
2551 2557 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2552 2558 f = lambda fn: util.normpath(fn)
2553 2559 fmt = 'f %%-%ds %%-%ds %%s' % (
2554 2560 max([len(abs) for abs in items]),
2555 2561 max([len(m.rel(abs)) for abs in items]))
2556 2562 for abs in items:
2557 2563 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2558 2564 ui.write("%s\n" % line.rstrip())
2559 2565
2560 2566 @command('debugwhyunstable', [], _('REV'))
2561 2567 def debugwhyunstable(ui, repo, rev):
2562 2568 """explain instabilities of a changeset"""
2563 2569 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2564 2570 dnodes = ''
2565 2571 if entry.get('divergentnodes'):
2566 2572 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2567 2573 for ctx in entry['divergentnodes']) + ' '
2568 2574 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2569 2575 entry['reason'], entry['node']))
2570 2576
2571 2577 @command('debugwireargs',
2572 2578 [('', 'three', '', 'three'),
2573 2579 ('', 'four', '', 'four'),
2574 2580 ('', 'five', '', 'five'),
2575 2581 ] + cmdutil.remoteopts,
2576 2582 _('REPO [OPTIONS]... [ONE [TWO]]'),
2577 2583 norepo=True)
2578 2584 def debugwireargs(ui, repopath, *vals, **opts):
2579 2585 opts = pycompat.byteskwargs(opts)
2580 2586 repo = hg.peer(ui, opts, repopath)
2581 2587 for opt in cmdutil.remoteopts:
2582 2588 del opts[opt[1]]
2583 2589 args = {}
2584 2590 for k, v in opts.iteritems():
2585 2591 if v:
2586 2592 args[k] = v
2587 2593 args = pycompat.strkwargs(args)
2588 2594 # run twice to check that we don't mess up the stream for the next command
2589 2595 res1 = repo.debugwireargs(*vals, **args)
2590 2596 res2 = repo.debugwireargs(*vals, **args)
2591 2597 ui.write("%s\n" % res1)
2592 2598 if res1 != res2:
2593 2599 ui.warn("%s\n" % res2)
2594 2600
2595 2601 def _parsewirelangblocks(fh):
2596 2602 activeaction = None
2597 2603 blocklines = []
2598 2604
2599 2605 for line in fh:
2600 2606 line = line.rstrip()
2601 2607 if not line:
2602 2608 continue
2603 2609
2604 2610 if line.startswith(b'#'):
2605 2611 continue
2606 2612
2607 2613 if not line.startswith(' '):
2608 2614 # New block. Flush previous one.
2609 2615 if activeaction:
2610 2616 yield activeaction, blocklines
2611 2617
2612 2618 activeaction = line
2613 2619 blocklines = []
2614 2620 continue
2615 2621
2616 2622 # Else we start with an indent.
2617 2623
2618 2624 if not activeaction:
2619 2625 raise error.Abort(_('indented line outside of block'))
2620 2626
2621 2627 blocklines.append(line)
2622 2628
2623 2629 # Flush last block.
2624 2630 if activeaction:
2625 2631 yield activeaction, blocklines
2626 2632
2627 2633 @command('debugwireproto',
2628 2634 [
2629 2635 ('', 'localssh', False, _('start an SSH server for this repo')),
2630 2636 ('', 'peer', '', _('construct a specific version of the peer')),
2631 2637 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2632 2638 ('', 'nologhandshake', False,
2633 2639 _('do not log I/O related to the peer handshake')),
2634 2640 ] + cmdutil.remoteopts,
2635 2641 _('[PATH]'),
2636 2642 optionalrepo=True)
2637 2643 def debugwireproto(ui, repo, path=None, **opts):
2638 2644 """send wire protocol commands to a server
2639 2645
2640 2646 This command can be used to issue wire protocol commands to remote
2641 2647 peers and to debug the raw data being exchanged.
2642 2648
2643 2649 ``--localssh`` will start an SSH server against the current repository
2644 2650 and connect to that. By default, the connection will perform a handshake
2645 2651 and establish an appropriate peer instance.
2646 2652
2647 2653 ``--peer`` can be used to bypass the handshake protocol and construct a
2648 2654 peer instance using the specified class type. Valid values are ``raw``,
2649 2655 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2650 2656 raw data payloads and don't support higher-level command actions.
2651 2657
2652 2658 ``--noreadstderr`` can be used to disable automatic reading from stderr
2653 2659 of the peer (for SSH connections only). Disabling automatic reading of
2654 2660 stderr is useful for making output more deterministic.
2655 2661
2656 2662 Commands are issued via a mini language which is specified via stdin.
2657 2663 The language consists of individual actions to perform. An action is
2658 2664 defined by a block. A block is defined as a line with no leading
2659 2665 space followed by 0 or more lines with leading space. Blocks are
2660 2666 effectively a high-level command with additional metadata.
2661 2667
2662 2668 Lines beginning with ``#`` are ignored.
2663 2669
2664 2670 The following sections denote available actions.
2665 2671
2666 2672 raw
2667 2673 ---
2668 2674
2669 2675 Send raw data to the server.
2670 2676
2671 2677 The block payload contains the raw data to send as one atomic send
2672 2678 operation. The data may not actually be delivered in a single system
2673 2679 call: it depends on the abilities of the transport being used.
2674 2680
2675 2681 Each line in the block is de-indented and concatenated. Then, that
2676 2682 value is evaluated as a Python b'' literal. This allows the use of
2677 2683 backslash escaping, etc.
2678 2684
2679 2685 raw+
2680 2686 ----
2681 2687
2682 2688 Behaves like ``raw`` except flushes output afterwards.
2683 2689
2684 2690 command <X>
2685 2691 -----------
2686 2692
2687 2693 Send a request to run a named command, whose name follows the ``command``
2688 2694 string.
2689 2695
2690 2696 Arguments to the command are defined as lines in this block. The format of
2691 2697 each line is ``<key> <value>``. e.g.::
2692 2698
2693 2699 command listkeys
2694 2700 namespace bookmarks
2695 2701
2696 2702 If the value begins with ``eval:``, it will be interpreted as a Python
2697 2703 literal expression. Otherwise values are interpreted as Python b'' literals.
2698 2704 This allows sending complex types and encoding special byte sequences via
2699 2705 backslash escaping.
2700 2706
2701 2707 The following arguments have special meaning:
2702 2708
2703 2709 ``PUSHFILE``
2704 2710 When defined, the *push* mechanism of the peer will be used instead
2705 2711 of the static request-response mechanism and the content of the
2706 2712 file specified in the value of this argument will be sent as the
2707 2713 command payload.
2708 2714
2709 2715 This can be used to submit a local bundle file to the remote.
2710 2716
2711 2717 batchbegin
2712 2718 ----------
2713 2719
2714 2720 Instruct the peer to begin a batched send.
2715 2721
2716 2722 All ``command`` blocks are queued for execution until the next
2717 2723 ``batchsubmit`` block.
2718 2724
2719 2725 batchsubmit
2720 2726 -----------
2721 2727
2722 2728 Submit previously queued ``command`` blocks as a batch request.
2723 2729
2724 2730 This action MUST be paired with a ``batchbegin`` action.
2725 2731
2726 2732 httprequest <method> <path>
2727 2733 ---------------------------
2728 2734
2729 2735 (HTTP peer only)
2730 2736
2731 2737 Send an HTTP request to the peer.
2732 2738
2733 2739 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2734 2740
2735 2741 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2736 2742 headers to add to the request. e.g. ``Accept: foo``.
2737 2743
2738 2744 The following arguments are special:
2739 2745
2740 2746 ``BODYFILE``
2741 2747 The content of the file defined as the value to this argument will be
2742 2748 transferred verbatim as the HTTP request body.
2743 2749
2744 2750 ``frame <type> <flags> <payload>``
2745 2751 Send a unified protocol frame as part of the request body.
2746 2752
2747 2753 All frames will be collected and sent as the body to the HTTP
2748 2754 request.
2749 2755
2750 2756 close
2751 2757 -----
2752 2758
2753 2759 Close the connection to the server.
2754 2760
2755 2761 flush
2756 2762 -----
2757 2763
2758 2764 Flush data written to the server.
2759 2765
2760 2766 readavailable
2761 2767 -------------
2762 2768
2763 2769 Close the write end of the connection and read all available data from
2764 2770 the server.
2765 2771
2766 2772 If the connection to the server encompasses multiple pipes, we poll both
2767 2773 pipes and read available data.
2768 2774
2769 2775 readline
2770 2776 --------
2771 2777
2772 2778 Read a line of output from the server. If there are multiple output
2773 2779 pipes, reads only the main pipe.
2774 2780
2775 2781 ereadline
2776 2782 ---------
2777 2783
2778 2784 Like ``readline``, but read from the stderr pipe, if available.
2779 2785
2780 2786 read <X>
2781 2787 --------
2782 2788
2783 2789 ``read()`` N bytes from the server's main output pipe.
2784 2790
2785 2791 eread <X>
2786 2792 ---------
2787 2793
2788 2794 ``read()`` N bytes from the server's stderr pipe, if available.
2789 2795
2790 2796 Specifying Unified Frame-Based Protocol Frames
2791 2797 ----------------------------------------------
2792 2798
2793 2799 It is possible to emit a *Unified Frame-Based Protocol* by using special
2794 2800 syntax.
2795 2801
2796 2802 A frame is composed as a type, flags, and payload. These can be parsed
2797 2803 from a string of the form:
2798 2804
2799 2805 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2800 2806
2801 2807 ``request-id`` and ``stream-id`` are integers defining the request and
2802 2808 stream identifiers.
2803 2809
2804 2810 ``type`` can be an integer value for the frame type or the string name
2805 2811 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2806 2812 ``command-name``.
2807 2813
2808 2814 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2809 2815 components. Each component (and there can be just one) can be an integer
2810 2816 or a flag name for stream flags or frame flags, respectively. Values are
2811 2817 resolved to integers and then bitwise OR'd together.
2812 2818
2813 2819 ``payload`` represents the raw frame payload. If it begins with
2814 2820 ``cbor:``, the following string is evaluated as Python code and the
2815 2821 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2816 2822 as a Python byte string literal.
2817 2823 """
2818 2824 opts = pycompat.byteskwargs(opts)
2819 2825
2820 2826 if opts['localssh'] and not repo:
2821 2827 raise error.Abort(_('--localssh requires a repository'))
2822 2828
2823 2829 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2824 2830 raise error.Abort(_('invalid value for --peer'),
2825 2831 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2826 2832
2827 2833 if path and opts['localssh']:
2828 2834 raise error.Abort(_('cannot specify --localssh with an explicit '
2829 2835 'path'))
2830 2836
2831 2837 if ui.interactive():
2832 2838 ui.write(_('(waiting for commands on stdin)\n'))
2833 2839
2834 2840 blocks = list(_parsewirelangblocks(ui.fin))
2835 2841
2836 2842 proc = None
2837 2843 stdin = None
2838 2844 stdout = None
2839 2845 stderr = None
2840 2846 opener = None
2841 2847
2842 2848 if opts['localssh']:
2843 2849 # We start the SSH server in its own process so there is process
2844 2850 # separation. This prevents a whole class of potential bugs around
2845 2851 # shared state from interfering with server operation.
2846 2852 args = procutil.hgcmd() + [
2847 2853 '-R', repo.root,
2848 2854 'debugserve', '--sshstdio',
2849 2855 ]
2850 2856 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2851 2857 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2852 2858 bufsize=0)
2853 2859
2854 2860 stdin = proc.stdin
2855 2861 stdout = proc.stdout
2856 2862 stderr = proc.stderr
2857 2863
2858 2864 # We turn the pipes into observers so we can log I/O.
2859 2865 if ui.verbose or opts['peer'] == 'raw':
2860 2866 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2861 2867 logdata=True)
2862 2868 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2863 2869 logdata=True)
2864 2870 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2865 2871 logdata=True)
2866 2872
2867 2873 # --localssh also implies the peer connection settings.
2868 2874
2869 2875 url = 'ssh://localserver'
2870 2876 autoreadstderr = not opts['noreadstderr']
2871 2877
2872 2878 if opts['peer'] == 'ssh1':
2873 2879 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2874 2880 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2875 2881 None, autoreadstderr=autoreadstderr)
2876 2882 elif opts['peer'] == 'ssh2':
2877 2883 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2878 2884 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2879 2885 None, autoreadstderr=autoreadstderr)
2880 2886 elif opts['peer'] == 'raw':
2881 2887 ui.write(_('using raw connection to peer\n'))
2882 2888 peer = None
2883 2889 else:
2884 2890 ui.write(_('creating ssh peer from handshake results\n'))
2885 2891 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2886 2892 autoreadstderr=autoreadstderr)
2887 2893
2888 2894 elif path:
2889 2895 # We bypass hg.peer() so we can proxy the sockets.
2890 2896 # TODO consider not doing this because we skip
2891 2897 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2892 2898 u = util.url(path)
2893 2899 if u.scheme != 'http':
2894 2900 raise error.Abort(_('only http:// paths are currently supported'))
2895 2901
2896 2902 url, authinfo = u.authinfo()
2897 2903 openerargs = {
2898 2904 r'useragent': b'Mercurial debugwireproto',
2899 2905 }
2900 2906
2901 2907 # Turn pipes/sockets into observers so we can log I/O.
2902 2908 if ui.verbose:
2903 2909 openerargs.update({
2904 2910 r'loggingfh': ui,
2905 2911 r'loggingname': b's',
2906 2912 r'loggingopts': {
2907 2913 r'logdata': True,
2908 2914 r'logdataapis': False,
2909 2915 },
2910 2916 })
2911 2917
2912 2918 if ui.debugflag:
2913 2919 openerargs[r'loggingopts'][r'logdataapis'] = True
2914 2920
2915 2921 # Don't send default headers when in raw mode. This allows us to
2916 2922 # bypass most of the behavior of our URL handling code so we can
2917 2923 # have near complete control over what's sent on the wire.
2918 2924 if opts['peer'] == 'raw':
2919 2925 openerargs[r'sendaccept'] = False
2920 2926
2921 2927 opener = urlmod.opener(ui, authinfo, **openerargs)
2922 2928
2923 2929 if opts['peer'] == 'http2':
2924 2930 ui.write(_('creating http peer for wire protocol version 2\n'))
2925 2931 # We go through makepeer() because we need an API descriptor for
2926 2932 # the peer instance to be useful.
2927 2933 with ui.configoverride({
2928 2934 ('experimental', 'httppeer.advertise-v2'): True}):
2929 2935 if opts['nologhandshake']:
2930 2936 ui.pushbuffer()
2931 2937
2932 2938 peer = httppeer.makepeer(ui, path, opener=opener)
2933 2939
2934 2940 if opts['nologhandshake']:
2935 2941 ui.popbuffer()
2936 2942
2937 2943 if not isinstance(peer, httppeer.httpv2peer):
2938 2944 raise error.Abort(_('could not instantiate HTTP peer for '
2939 2945 'wire protocol version 2'),
2940 2946 hint=_('the server may not have the feature '
2941 2947 'enabled or is not allowing this '
2942 2948 'client version'))
2943 2949
2944 2950 elif opts['peer'] == 'raw':
2945 2951 ui.write(_('using raw connection to peer\n'))
2946 2952 peer = None
2947 2953 elif opts['peer']:
2948 2954 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2949 2955 opts['peer'])
2950 2956 else:
2951 2957 peer = httppeer.makepeer(ui, path, opener=opener)
2952 2958
2953 2959 # We /could/ populate stdin/stdout with sock.makefile()...
2954 2960 else:
2955 2961 raise error.Abort(_('unsupported connection configuration'))
2956 2962
2957 2963 batchedcommands = None
2958 2964
2959 2965 # Now perform actions based on the parsed wire language instructions.
2960 2966 for action, lines in blocks:
2961 2967 if action in ('raw', 'raw+'):
2962 2968 if not stdin:
2963 2969 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2964 2970
2965 2971 # Concatenate the data together.
2966 2972 data = ''.join(l.lstrip() for l in lines)
2967 2973 data = stringutil.unescapestr(data)
2968 2974 stdin.write(data)
2969 2975
2970 2976 if action == 'raw+':
2971 2977 stdin.flush()
2972 2978 elif action == 'flush':
2973 2979 if not stdin:
2974 2980 raise error.Abort(_('cannot call flush on this peer'))
2975 2981 stdin.flush()
2976 2982 elif action.startswith('command'):
2977 2983 if not peer:
2978 2984 raise error.Abort(_('cannot send commands unless peer instance '
2979 2985 'is available'))
2980 2986
2981 2987 command = action.split(' ', 1)[1]
2982 2988
2983 2989 args = {}
2984 2990 for line in lines:
2985 2991 # We need to allow empty values.
2986 2992 fields = line.lstrip().split(' ', 1)
2987 2993 if len(fields) == 1:
2988 2994 key = fields[0]
2989 2995 value = ''
2990 2996 else:
2991 2997 key, value = fields
2992 2998
2993 2999 if value.startswith('eval:'):
2994 3000 value = stringutil.evalpythonliteral(value[5:])
2995 3001 else:
2996 3002 value = stringutil.unescapestr(value)
2997 3003
2998 3004 args[key] = value
2999 3005
3000 3006 if batchedcommands is not None:
3001 3007 batchedcommands.append((command, args))
3002 3008 continue
3003 3009
3004 3010 ui.status(_('sending %s command\n') % command)
3005 3011
3006 3012 if 'PUSHFILE' in args:
3007 3013 with open(args['PUSHFILE'], r'rb') as fh:
3008 3014 del args['PUSHFILE']
3009 3015 res, output = peer._callpush(command, fh,
3010 3016 **pycompat.strkwargs(args))
3011 3017 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3012 3018 ui.status(_('remote output: %s\n') %
3013 3019 stringutil.escapestr(output))
3014 3020 else:
3015 3021 with peer.commandexecutor() as e:
3016 3022 res = e.callcommand(command, args).result()
3017 3023
3018 3024 if isinstance(res, wireprotov2peer.commandresponse):
3019 3025 val = list(res.cborobjects())
3020 3026 ui.status(_('response: %s\n') %
3021 3027 stringutil.pprint(val, bprefix=True))
3022 3028
3023 3029 else:
3024 3030 ui.status(_('response: %s\n') %
3025 3031 stringutil.pprint(res, bprefix=True))
3026 3032
3027 3033 elif action == 'batchbegin':
3028 3034 if batchedcommands is not None:
3029 3035 raise error.Abort(_('nested batchbegin not allowed'))
3030 3036
3031 3037 batchedcommands = []
3032 3038 elif action == 'batchsubmit':
3033 3039 # There is a batching API we could go through. But it would be
3034 3040 # difficult to normalize requests into function calls. It is easier
3035 3041 # to bypass this layer and normalize to commands + args.
3036 3042 ui.status(_('sending batch with %d sub-commands\n') %
3037 3043 len(batchedcommands))
3038 3044 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3039 3045 ui.status(_('response #%d: %s\n') %
3040 3046 (i, stringutil.escapestr(chunk)))
3041 3047
3042 3048 batchedcommands = None
3043 3049
3044 3050 elif action.startswith('httprequest '):
3045 3051 if not opener:
3046 3052 raise error.Abort(_('cannot use httprequest without an HTTP '
3047 3053 'peer'))
3048 3054
3049 3055 request = action.split(' ', 2)
3050 3056 if len(request) != 3:
3051 3057 raise error.Abort(_('invalid httprequest: expected format is '
3052 3058 '"httprequest <method> <path>'))
3053 3059
3054 3060 method, httppath = request[1:]
3055 3061 headers = {}
3056 3062 body = None
3057 3063 frames = []
3058 3064 for line in lines:
3059 3065 line = line.lstrip()
3060 3066 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3061 3067 if m:
3062 3068 headers[m.group(1)] = m.group(2)
3063 3069 continue
3064 3070
3065 3071 if line.startswith(b'BODYFILE '):
3066 3072 with open(line.split(b' ', 1), 'rb') as fh:
3067 3073 body = fh.read()
3068 3074 elif line.startswith(b'frame '):
3069 3075 frame = wireprotoframing.makeframefromhumanstring(
3070 3076 line[len(b'frame '):])
3071 3077
3072 3078 frames.append(frame)
3073 3079 else:
3074 3080 raise error.Abort(_('unknown argument to httprequest: %s') %
3075 3081 line)
3076 3082
3077 3083 url = path + httppath
3078 3084
3079 3085 if frames:
3080 3086 body = b''.join(bytes(f) for f in frames)
3081 3087
3082 3088 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3083 3089
3084 3090 # urllib.Request insists on using has_data() as a proxy for
3085 3091 # determining the request method. Override that to use our
3086 3092 # explicitly requested method.
3087 3093 req.get_method = lambda: method
3088 3094
3089 3095 try:
3090 3096 res = opener.open(req)
3091 3097 body = res.read()
3092 3098 except util.urlerr.urlerror as e:
3093 3099 e.read()
3094 3100 continue
3095 3101
3096 3102 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3097 3103 ui.write(_('cbor> %s\n') %
3098 3104 stringutil.pprint(cbor.loads(body), bprefix=True))
3099 3105
3100 3106 elif action == 'close':
3101 3107 peer.close()
3102 3108 elif action == 'readavailable':
3103 3109 if not stdout or not stderr:
3104 3110 raise error.Abort(_('readavailable not available on this peer'))
3105 3111
3106 3112 stdin.close()
3107 3113 stdout.read()
3108 3114 stderr.read()
3109 3115
3110 3116 elif action == 'readline':
3111 3117 if not stdout:
3112 3118 raise error.Abort(_('readline not available on this peer'))
3113 3119 stdout.readline()
3114 3120 elif action == 'ereadline':
3115 3121 if not stderr:
3116 3122 raise error.Abort(_('ereadline not available on this peer'))
3117 3123 stderr.readline()
3118 3124 elif action.startswith('read '):
3119 3125 count = int(action.split(' ', 1)[1])
3120 3126 if not stdout:
3121 3127 raise error.Abort(_('read not available on this peer'))
3122 3128 stdout.read(count)
3123 3129 elif action.startswith('eread '):
3124 3130 count = int(action.split(' ', 1)[1])
3125 3131 if not stderr:
3126 3132 raise error.Abort(_('eread not available on this peer'))
3127 3133 stderr.read(count)
3128 3134 else:
3129 3135 raise error.Abort(_('unknown action: %s') % action)
3130 3136
3131 3137 if batchedcommands is not None:
3132 3138 raise error.Abort(_('unclosed "batchbegin" request'))
3133 3139
3134 3140 if peer:
3135 3141 peer.close()
3136 3142
3137 3143 if proc:
3138 3144 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now