##// END OF EJS Templates
debugbuilddag: use progress helper...
Martin von Zweigbergk -
r38394:fce1c174 default
parent child Browse files
Show More
@@ -1,3152 +1,3154 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .thirdparty import (
36 36 cbor,
37 37 )
38 38 from . import (
39 39 bundle2,
40 40 changegroup,
41 41 cmdutil,
42 42 color,
43 43 context,
44 44 dagparser,
45 45 dagutil,
46 46 encoding,
47 47 error,
48 48 exchange,
49 49 extensions,
50 50 filemerge,
51 51 fileset,
52 52 formatter,
53 53 hg,
54 54 httppeer,
55 55 localrepo,
56 56 lock as lockmod,
57 57 logcmdutil,
58 58 merge as mergemod,
59 59 obsolete,
60 60 obsutil,
61 61 phases,
62 62 policy,
63 63 pvec,
64 64 pycompat,
65 65 registrar,
66 66 repair,
67 67 revlog,
68 68 revset,
69 69 revsetlang,
70 70 scmutil,
71 71 setdiscovery,
72 72 simplemerge,
73 73 sshpeer,
74 74 sslutil,
75 75 streamclone,
76 76 templater,
77 77 treediscovery,
78 78 upgrade,
79 79 url as urlmod,
80 80 util,
81 81 vfs as vfsmod,
82 82 wireprotoframing,
83 83 wireprotoserver,
84 84 wireprotov2peer,
85 85 )
86 86 from .utils import (
87 87 dateutil,
88 88 procutil,
89 89 stringutil,
90 90 )
91 91
92 92 release = lockmod.release
93 93
94 94 command = registrar.command()
95 95
96 96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 97 def debugancestor(ui, repo, *args):
98 98 """find the ancestor revision of two revisions in a given index"""
99 99 if len(args) == 3:
100 100 index, rev1, rev2 = args
101 101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 102 lookup = r.lookup
103 103 elif len(args) == 2:
104 104 if not repo:
105 105 raise error.Abort(_('there is no Mercurial repository here '
106 106 '(.hg not found)'))
107 107 rev1, rev2 = args
108 108 r = repo.changelog
109 109 lookup = repo.lookup
110 110 else:
111 111 raise error.Abort(_('either two or three arguments required'))
112 112 a = r.ancestor(lookup(rev1), lookup(rev2))
113 113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114 114
115 115 @command('debugapplystreamclonebundle', [], 'FILE')
116 116 def debugapplystreamclonebundle(ui, repo, fname):
117 117 """apply a stream clone bundle file"""
118 118 f = hg.openpath(ui, fname)
119 119 gen = exchange.readbundle(ui, f, fname)
120 120 gen.apply(repo)
121 121
122 122 @command('debugbuilddag',
123 123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 125 ('n', 'new-file', None, _('add new file at each rev'))],
126 126 _('[OPTION]... [TEXT]'))
127 127 def debugbuilddag(ui, repo, text=None,
128 128 mergeable_file=False,
129 129 overwritten_file=False,
130 130 new_file=False):
131 131 """builds a repo with a given DAG from scratch in the current empty repo
132 132
133 133 The description of the DAG is read from stdin if not given on the
134 134 command line.
135 135
136 136 Elements:
137 137
138 138 - "+n" is a linear run of n nodes based on the current default parent
139 139 - "." is a single node based on the current default parent
140 140 - "$" resets the default parent to null (implied at the start);
141 141 otherwise the default parent is always the last node created
142 142 - "<p" sets the default parent to the backref p
143 143 - "*p" is a fork at parent p, which is a backref
144 144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 145 - "/p2" is a merge of the preceding node and p2
146 146 - ":tag" defines a local tag for the preceding node
147 147 - "@branch" sets the named branch for subsequent nodes
148 148 - "#...\\n" is a comment up to the end of the line
149 149
150 150 Whitespace between the above elements is ignored.
151 151
152 152 A backref is either
153 153
154 154 - a number n, which references the node curr-n, where curr is the current
155 155 node, or
156 156 - the name of a local tag you placed earlier using ":tag", or
157 157 - empty to denote the default parent.
158 158
159 159 All string valued-elements are either strictly alphanumeric, or must
160 160 be enclosed in double quotes ("..."), with "\\" as escape character.
161 161 """
162 162
163 163 if text is None:
164 164 ui.status(_("reading DAG from stdin\n"))
165 165 text = ui.fin.read()
166 166
167 167 cl = repo.changelog
168 168 if len(cl) > 0:
169 169 raise error.Abort(_('repository is not empty'))
170 170
171 171 # determine number of revs in DAG
172 172 total = 0
173 173 for type, data in dagparser.parsedag(text):
174 174 if type == 'n':
175 175 total += 1
176 176
177 177 if mergeable_file:
178 178 linesperrev = 2
179 179 # make a file with k lines per rev
180 180 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
181 181 initialmergedlines.append("")
182 182
183 183 tags = []
184 184
185 185 wlock = lock = tr = None
186 progress = ui.makeprogress(_('building'), unit=_('revisions'),
187 total=total)
186 188 try:
187 189 wlock = repo.wlock()
188 190 lock = repo.lock()
189 191 tr = repo.transaction("builddag")
190 192
191 193 at = -1
192 194 atbranch = 'default'
193 195 nodeids = []
194 196 id = 0
195 ui.progress(_('building'), id, unit=_('revisions'), total=total)
197 progress.update(id)
196 198 for type, data in dagparser.parsedag(text):
197 199 if type == 'n':
198 200 ui.note(('node %s\n' % pycompat.bytestr(data)))
199 201 id, ps = data
200 202
201 203 files = []
202 204 filecontent = {}
203 205
204 206 p2 = None
205 207 if mergeable_file:
206 208 fn = "mf"
207 209 p1 = repo[ps[0]]
208 210 if len(ps) > 1:
209 211 p2 = repo[ps[1]]
210 212 pa = p1.ancestor(p2)
211 213 base, local, other = [x[fn].data() for x in (pa, p1,
212 214 p2)]
213 215 m3 = simplemerge.Merge3Text(base, local, other)
214 216 ml = [l.strip() for l in m3.merge_lines()]
215 217 ml.append("")
216 218 elif at > 0:
217 219 ml = p1[fn].data().split("\n")
218 220 else:
219 221 ml = initialmergedlines
220 222 ml[id * linesperrev] += " r%i" % id
221 223 mergedtext = "\n".join(ml)
222 224 files.append(fn)
223 225 filecontent[fn] = mergedtext
224 226
225 227 if overwritten_file:
226 228 fn = "of"
227 229 files.append(fn)
228 230 filecontent[fn] = "r%i\n" % id
229 231
230 232 if new_file:
231 233 fn = "nf%i" % id
232 234 files.append(fn)
233 235 filecontent[fn] = "r%i\n" % id
234 236 if len(ps) > 1:
235 237 if not p2:
236 238 p2 = repo[ps[1]]
237 239 for fn in p2:
238 240 if fn.startswith("nf"):
239 241 files.append(fn)
240 242 filecontent[fn] = p2[fn].data()
241 243
242 244 def fctxfn(repo, cx, path):
243 245 if path in filecontent:
244 246 return context.memfilectx(repo, cx, path,
245 247 filecontent[path])
246 248 return None
247 249
248 250 if len(ps) == 0 or ps[0] < 0:
249 251 pars = [None, None]
250 252 elif len(ps) == 1:
251 253 pars = [nodeids[ps[0]], None]
252 254 else:
253 255 pars = [nodeids[p] for p in ps]
254 256 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
255 257 date=(id, 0),
256 258 user="debugbuilddag",
257 259 extra={'branch': atbranch})
258 260 nodeid = repo.commitctx(cx)
259 261 nodeids.append(nodeid)
260 262 at = id
261 263 elif type == 'l':
262 264 id, name = data
263 265 ui.note(('tag %s\n' % name))
264 266 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
265 267 elif type == 'a':
266 268 ui.note(('branch %s\n' % data))
267 269 atbranch = data
268 ui.progress(_('building'), id, unit=_('revisions'), total=total)
270 progress.update(id)
269 271 tr.close()
270 272
271 273 if tags:
272 274 repo.vfs.write("localtags", "".join(tags))
273 275 finally:
274 ui.progress(_('building'), None)
276 progress.complete()
275 277 release(tr, lock, wlock)
276 278
277 279 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
278 280 indent_string = ' ' * indent
279 281 if all:
280 282 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
281 283 % indent_string)
282 284
283 285 def showchunks(named):
284 286 ui.write("\n%s%s\n" % (indent_string, named))
285 287 for deltadata in gen.deltaiter():
286 288 node, p1, p2, cs, deltabase, delta, flags = deltadata
287 289 ui.write("%s%s %s %s %s %s %d\n" %
288 290 (indent_string, hex(node), hex(p1), hex(p2),
289 291 hex(cs), hex(deltabase), len(delta)))
290 292
291 293 chunkdata = gen.changelogheader()
292 294 showchunks("changelog")
293 295 chunkdata = gen.manifestheader()
294 296 showchunks("manifest")
295 297 for chunkdata in iter(gen.filelogheader, {}):
296 298 fname = chunkdata['filename']
297 299 showchunks(fname)
298 300 else:
299 301 if isinstance(gen, bundle2.unbundle20):
300 302 raise error.Abort(_('use debugbundle2 for this file'))
301 303 chunkdata = gen.changelogheader()
302 304 for deltadata in gen.deltaiter():
303 305 node, p1, p2, cs, deltabase, delta, flags = deltadata
304 306 ui.write("%s%s\n" % (indent_string, hex(node)))
305 307
306 308 def _debugobsmarkers(ui, part, indent=0, **opts):
307 309 """display version and markers contained in 'data'"""
308 310 opts = pycompat.byteskwargs(opts)
309 311 data = part.read()
310 312 indent_string = ' ' * indent
311 313 try:
312 314 version, markers = obsolete._readmarkers(data)
313 315 except error.UnknownVersion as exc:
314 316 msg = "%sunsupported version: %s (%d bytes)\n"
315 317 msg %= indent_string, exc.version, len(data)
316 318 ui.write(msg)
317 319 else:
318 320 msg = "%sversion: %d (%d bytes)\n"
319 321 msg %= indent_string, version, len(data)
320 322 ui.write(msg)
321 323 fm = ui.formatter('debugobsolete', opts)
322 324 for rawmarker in sorted(markers):
323 325 m = obsutil.marker(None, rawmarker)
324 326 fm.startitem()
325 327 fm.plain(indent_string)
326 328 cmdutil.showmarker(fm, m)
327 329 fm.end()
328 330
329 331 def _debugphaseheads(ui, data, indent=0):
330 332 """display version and markers contained in 'data'"""
331 333 indent_string = ' ' * indent
332 334 headsbyphase = phases.binarydecode(data)
333 335 for phase in phases.allphases:
334 336 for head in headsbyphase[phase]:
335 337 ui.write(indent_string)
336 338 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
337 339
338 340 def _quasirepr(thing):
339 341 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
340 342 return '{%s}' % (
341 343 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
342 344 return pycompat.bytestr(repr(thing))
343 345
344 346 def _debugbundle2(ui, gen, all=None, **opts):
345 347 """lists the contents of a bundle2"""
346 348 if not isinstance(gen, bundle2.unbundle20):
347 349 raise error.Abort(_('not a bundle2 file'))
348 350 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
349 351 parttypes = opts.get(r'part_type', [])
350 352 for part in gen.iterparts():
351 353 if parttypes and part.type not in parttypes:
352 354 continue
353 355 msg = '%s -- %s (mandatory: %r)\n'
354 356 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
355 357 if part.type == 'changegroup':
356 358 version = part.params.get('version', '01')
357 359 cg = changegroup.getunbundler(version, part, 'UN')
358 360 if not ui.quiet:
359 361 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
360 362 if part.type == 'obsmarkers':
361 363 if not ui.quiet:
362 364 _debugobsmarkers(ui, part, indent=4, **opts)
363 365 if part.type == 'phase-heads':
364 366 if not ui.quiet:
365 367 _debugphaseheads(ui, part, indent=4)
366 368
367 369 @command('debugbundle',
368 370 [('a', 'all', None, _('show all details')),
369 371 ('', 'part-type', [], _('show only the named part type')),
370 372 ('', 'spec', None, _('print the bundlespec of the bundle'))],
371 373 _('FILE'),
372 374 norepo=True)
373 375 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
374 376 """lists the contents of a bundle"""
375 377 with hg.openpath(ui, bundlepath) as f:
376 378 if spec:
377 379 spec = exchange.getbundlespec(ui, f)
378 380 ui.write('%s\n' % spec)
379 381 return
380 382
381 383 gen = exchange.readbundle(ui, f, bundlepath)
382 384 if isinstance(gen, bundle2.unbundle20):
383 385 return _debugbundle2(ui, gen, all=all, **opts)
384 386 _debugchangegroup(ui, gen, all=all, **opts)
385 387
386 388 @command('debugcapabilities',
387 389 [], _('PATH'),
388 390 norepo=True)
389 391 def debugcapabilities(ui, path, **opts):
390 392 """lists the capabilities of a remote peer"""
391 393 opts = pycompat.byteskwargs(opts)
392 394 peer = hg.peer(ui, opts, path)
393 395 caps = peer.capabilities()
394 396 ui.write(('Main capabilities:\n'))
395 397 for c in sorted(caps):
396 398 ui.write((' %s\n') % c)
397 399 b2caps = bundle2.bundle2caps(peer)
398 400 if b2caps:
399 401 ui.write(('Bundle2 capabilities:\n'))
400 402 for key, values in sorted(b2caps.iteritems()):
401 403 ui.write((' %s\n') % key)
402 404 for v in values:
403 405 ui.write((' %s\n') % v)
404 406
405 407 @command('debugcheckstate', [], '')
406 408 def debugcheckstate(ui, repo):
407 409 """validate the correctness of the current dirstate"""
408 410 parent1, parent2 = repo.dirstate.parents()
409 411 m1 = repo[parent1].manifest()
410 412 m2 = repo[parent2].manifest()
411 413 errors = 0
412 414 for f in repo.dirstate:
413 415 state = repo.dirstate[f]
414 416 if state in "nr" and f not in m1:
415 417 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
416 418 errors += 1
417 419 if state in "a" and f in m1:
418 420 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
419 421 errors += 1
420 422 if state in "m" and f not in m1 and f not in m2:
421 423 ui.warn(_("%s in state %s, but not in either manifest\n") %
422 424 (f, state))
423 425 errors += 1
424 426 for f in m1:
425 427 state = repo.dirstate[f]
426 428 if state not in "nrm":
427 429 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
428 430 errors += 1
429 431 if errors:
430 432 error = _(".hg/dirstate inconsistent with current parent's manifest")
431 433 raise error.Abort(error)
432 434
433 435 @command('debugcolor',
434 436 [('', 'style', None, _('show all configured styles'))],
435 437 'hg debugcolor')
436 438 def debugcolor(ui, repo, **opts):
437 439 """show available color, effects or style"""
438 440 ui.write(('color mode: %s\n') % ui._colormode)
439 441 if opts.get(r'style'):
440 442 return _debugdisplaystyle(ui)
441 443 else:
442 444 return _debugdisplaycolor(ui)
443 445
444 446 def _debugdisplaycolor(ui):
445 447 ui = ui.copy()
446 448 ui._styles.clear()
447 449 for effect in color._activeeffects(ui).keys():
448 450 ui._styles[effect] = effect
449 451 if ui._terminfoparams:
450 452 for k, v in ui.configitems('color'):
451 453 if k.startswith('color.'):
452 454 ui._styles[k] = k[6:]
453 455 elif k.startswith('terminfo.'):
454 456 ui._styles[k] = k[9:]
455 457 ui.write(_('available colors:\n'))
456 458 # sort label with a '_' after the other to group '_background' entry.
457 459 items = sorted(ui._styles.items(),
458 460 key=lambda i: ('_' in i[0], i[0], i[1]))
459 461 for colorname, label in items:
460 462 ui.write(('%s\n') % colorname, label=label)
461 463
462 464 def _debugdisplaystyle(ui):
463 465 ui.write(_('available style:\n'))
464 466 if not ui._styles:
465 467 return
466 468 width = max(len(s) for s in ui._styles)
467 469 for label, effects in sorted(ui._styles.items()):
468 470 ui.write('%s' % label, label=label)
469 471 if effects:
470 472 # 50
471 473 ui.write(': ')
472 474 ui.write(' ' * (max(0, width - len(label))))
473 475 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
474 476 ui.write('\n')
475 477
476 478 @command('debugcreatestreamclonebundle', [], 'FILE')
477 479 def debugcreatestreamclonebundle(ui, repo, fname):
478 480 """create a stream clone bundle file
479 481
480 482 Stream bundles are special bundles that are essentially archives of
481 483 revlog files. They are commonly used for cloning very quickly.
482 484 """
483 485 # TODO we may want to turn this into an abort when this functionality
484 486 # is moved into `hg bundle`.
485 487 if phases.hassecret(repo):
486 488 ui.warn(_('(warning: stream clone bundle will contain secret '
487 489 'revisions)\n'))
488 490
489 491 requirements, gen = streamclone.generatebundlev1(repo)
490 492 changegroup.writechunks(ui, gen, fname)
491 493
492 494 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
493 495
494 496 @command('debugdag',
495 497 [('t', 'tags', None, _('use tags as labels')),
496 498 ('b', 'branches', None, _('annotate with branch names')),
497 499 ('', 'dots', None, _('use dots for runs')),
498 500 ('s', 'spaces', None, _('separate elements by spaces'))],
499 501 _('[OPTION]... [FILE [REV]...]'),
500 502 optionalrepo=True)
501 503 def debugdag(ui, repo, file_=None, *revs, **opts):
502 504 """format the changelog or an index DAG as a concise textual description
503 505
504 506 If you pass a revlog index, the revlog's DAG is emitted. If you list
505 507 revision numbers, they get labeled in the output as rN.
506 508
507 509 Otherwise, the changelog DAG of the current repo is emitted.
508 510 """
509 511 spaces = opts.get(r'spaces')
510 512 dots = opts.get(r'dots')
511 513 if file_:
512 514 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
513 515 file_)
514 516 revs = set((int(r) for r in revs))
515 517 def events():
516 518 for r in rlog:
517 519 yield 'n', (r, list(p for p in rlog.parentrevs(r)
518 520 if p != -1))
519 521 if r in revs:
520 522 yield 'l', (r, "r%i" % r)
521 523 elif repo:
522 524 cl = repo.changelog
523 525 tags = opts.get(r'tags')
524 526 branches = opts.get(r'branches')
525 527 if tags:
526 528 labels = {}
527 529 for l, n in repo.tags().items():
528 530 labels.setdefault(cl.rev(n), []).append(l)
529 531 def events():
530 532 b = "default"
531 533 for r in cl:
532 534 if branches:
533 535 newb = cl.read(cl.node(r))[5]['branch']
534 536 if newb != b:
535 537 yield 'a', newb
536 538 b = newb
537 539 yield 'n', (r, list(p for p in cl.parentrevs(r)
538 540 if p != -1))
539 541 if tags:
540 542 ls = labels.get(r)
541 543 if ls:
542 544 for l in ls:
543 545 yield 'l', (r, l)
544 546 else:
545 547 raise error.Abort(_('need repo for changelog dag'))
546 548
547 549 for line in dagparser.dagtextlines(events(),
548 550 addspaces=spaces,
549 551 wraplabels=True,
550 552 wrapannotations=True,
551 553 wrapnonlinear=dots,
552 554 usedots=dots,
553 555 maxlinewidth=70):
554 556 ui.write(line)
555 557 ui.write("\n")
556 558
557 559 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
558 560 def debugdata(ui, repo, file_, rev=None, **opts):
559 561 """dump the contents of a data file revision"""
560 562 opts = pycompat.byteskwargs(opts)
561 563 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
562 564 if rev is not None:
563 565 raise error.CommandError('debugdata', _('invalid arguments'))
564 566 file_, rev = None, file_
565 567 elif rev is None:
566 568 raise error.CommandError('debugdata', _('invalid arguments'))
567 569 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
568 570 try:
569 571 ui.write(r.revision(r.lookup(rev), raw=True))
570 572 except KeyError:
571 573 raise error.Abort(_('invalid revision identifier %s') % rev)
572 574
573 575 @command('debugdate',
574 576 [('e', 'extended', None, _('try extended date formats'))],
575 577 _('[-e] DATE [RANGE]'),
576 578 norepo=True, optionalrepo=True)
577 579 def debugdate(ui, date, range=None, **opts):
578 580 """parse and display a date"""
579 581 if opts[r"extended"]:
580 582 d = dateutil.parsedate(date, util.extendeddateformats)
581 583 else:
582 584 d = dateutil.parsedate(date)
583 585 ui.write(("internal: %d %d\n") % d)
584 586 ui.write(("standard: %s\n") % dateutil.datestr(d))
585 587 if range:
586 588 m = dateutil.matchdate(range)
587 589 ui.write(("match: %s\n") % m(d[0]))
588 590
589 591 @command('debugdeltachain',
590 592 cmdutil.debugrevlogopts + cmdutil.formatteropts,
591 593 _('-c|-m|FILE'),
592 594 optionalrepo=True)
593 595 def debugdeltachain(ui, repo, file_=None, **opts):
594 596 """dump information about delta chains in a revlog
595 597
596 598 Output can be templatized. Available template keywords are:
597 599
598 600 :``rev``: revision number
599 601 :``chainid``: delta chain identifier (numbered by unique base)
600 602 :``chainlen``: delta chain length to this revision
601 603 :``prevrev``: previous revision in delta chain
602 604 :``deltatype``: role of delta / how it was computed
603 605 :``compsize``: compressed size of revision
604 606 :``uncompsize``: uncompressed size of revision
605 607 :``chainsize``: total size of compressed revisions in chain
606 608 :``chainratio``: total chain size divided by uncompressed revision size
607 609 (new delta chains typically start at ratio 2.00)
608 610 :``lindist``: linear distance from base revision in delta chain to end
609 611 of this revision
610 612 :``extradist``: total size of revisions not part of this delta chain from
611 613 base of delta chain to end of this revision; a measurement
612 614 of how much extra data we need to read/seek across to read
613 615 the delta chain for this revision
614 616 :``extraratio``: extradist divided by chainsize; another representation of
615 617 how much unrelated data is needed to load this delta chain
616 618
617 619 If the repository is configured to use the sparse read, additional keywords
618 620 are available:
619 621
620 622 :``readsize``: total size of data read from the disk for a revision
621 623 (sum of the sizes of all the blocks)
622 624 :``largestblock``: size of the largest block of data read from the disk
623 625 :``readdensity``: density of useful bytes in the data read from the disk
624 626 :``srchunks``: in how many data hunks the whole revision would be read
625 627
626 628 The sparse read can be enabled with experimental.sparse-read = True
627 629 """
628 630 opts = pycompat.byteskwargs(opts)
629 631 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
630 632 index = r.index
631 633 start = r.start
632 634 length = r.length
633 635 generaldelta = r.version & revlog.FLAG_GENERALDELTA
634 636 withsparseread = getattr(r, '_withsparseread', False)
635 637
636 638 def revinfo(rev):
637 639 e = index[rev]
638 640 compsize = e[1]
639 641 uncompsize = e[2]
640 642 chainsize = 0
641 643
642 644 if generaldelta:
643 645 if e[3] == e[5]:
644 646 deltatype = 'p1'
645 647 elif e[3] == e[6]:
646 648 deltatype = 'p2'
647 649 elif e[3] == rev - 1:
648 650 deltatype = 'prev'
649 651 elif e[3] == rev:
650 652 deltatype = 'base'
651 653 else:
652 654 deltatype = 'other'
653 655 else:
654 656 if e[3] == rev:
655 657 deltatype = 'base'
656 658 else:
657 659 deltatype = 'prev'
658 660
659 661 chain = r._deltachain(rev)[0]
660 662 for iterrev in chain:
661 663 e = index[iterrev]
662 664 chainsize += e[1]
663 665
664 666 return compsize, uncompsize, deltatype, chain, chainsize
665 667
666 668 fm = ui.formatter('debugdeltachain', opts)
667 669
668 670 fm.plain(' rev chain# chainlen prev delta '
669 671 'size rawsize chainsize ratio lindist extradist '
670 672 'extraratio')
671 673 if withsparseread:
672 674 fm.plain(' readsize largestblk rddensity srchunks')
673 675 fm.plain('\n')
674 676
675 677 chainbases = {}
676 678 for rev in r:
677 679 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
678 680 chainbase = chain[0]
679 681 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
680 682 basestart = start(chainbase)
681 683 revstart = start(rev)
682 684 lineardist = revstart + comp - basestart
683 685 extradist = lineardist - chainsize
684 686 try:
685 687 prevrev = chain[-2]
686 688 except IndexError:
687 689 prevrev = -1
688 690
689 691 chainratio = float(chainsize) / float(uncomp)
690 692 extraratio = float(extradist) / float(chainsize)
691 693
692 694 fm.startitem()
693 695 fm.write('rev chainid chainlen prevrev deltatype compsize '
694 696 'uncompsize chainsize chainratio lindist extradist '
695 697 'extraratio',
696 698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
697 699 rev, chainid, len(chain), prevrev, deltatype, comp,
698 700 uncomp, chainsize, chainratio, lineardist, extradist,
699 701 extraratio,
700 702 rev=rev, chainid=chainid, chainlen=len(chain),
701 703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
702 704 uncompsize=uncomp, chainsize=chainsize,
703 705 chainratio=chainratio, lindist=lineardist,
704 706 extradist=extradist, extraratio=extraratio)
705 707 if withsparseread:
706 708 readsize = 0
707 709 largestblock = 0
708 710 srchunks = 0
709 711
710 712 for revschunk in revlog._slicechunk(r, chain):
711 713 srchunks += 1
712 714 blkend = start(revschunk[-1]) + length(revschunk[-1])
713 715 blksize = blkend - start(revschunk[0])
714 716
715 717 readsize += blksize
716 718 if largestblock < blksize:
717 719 largestblock = blksize
718 720
719 721 readdensity = float(chainsize) / float(readsize)
720 722
721 723 fm.write('readsize largestblock readdensity srchunks',
722 724 ' %10d %10d %9.5f %8d',
723 725 readsize, largestblock, readdensity, srchunks,
724 726 readsize=readsize, largestblock=largestblock,
725 727 readdensity=readdensity, srchunks=srchunks)
726 728
727 729 fm.plain('\n')
728 730
729 731 fm.end()
730 732
731 733 @command('debugdirstate|debugstate',
732 734 [('', 'nodates', None, _('do not display the saved mtime')),
733 735 ('', 'datesort', None, _('sort by saved mtime'))],
734 736 _('[OPTION]...'))
735 737 def debugstate(ui, repo, **opts):
736 738 """show the contents of the current dirstate"""
737 739
738 740 nodates = opts.get(r'nodates')
739 741 datesort = opts.get(r'datesort')
740 742
741 743 timestr = ""
742 744 if datesort:
743 745 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
744 746 else:
745 747 keyfunc = None # sort by filename
746 748 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
747 749 if ent[3] == -1:
748 750 timestr = 'unset '
749 751 elif nodates:
750 752 timestr = 'set '
751 753 else:
752 754 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
753 755 time.localtime(ent[3]))
754 756 timestr = encoding.strtolocal(timestr)
755 757 if ent[1] & 0o20000:
756 758 mode = 'lnk'
757 759 else:
758 760 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
759 761 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
760 762 for f in repo.dirstate.copies():
761 763 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
762 764
763 765 @command('debugdiscovery',
764 766 [('', 'old', None, _('use old-style discovery')),
765 767 ('', 'nonheads', None,
766 768 _('use old-style discovery with non-heads included')),
767 769 ('', 'rev', [], 'restrict discovery to this set of revs'),
768 770 ] + cmdutil.remoteopts,
769 771 _('[--rev REV] [OTHER]'))
770 772 def debugdiscovery(ui, repo, remoteurl="default", **opts):
771 773 """runs the changeset discovery protocol in isolation"""
772 774 opts = pycompat.byteskwargs(opts)
773 775 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
774 776 remote = hg.peer(repo, opts, remoteurl)
775 777 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
776 778
777 779 # make sure tests are repeatable
778 780 random.seed(12323)
779 781
780 782 def doit(pushedrevs, remoteheads, remote=remote):
781 783 if opts.get('old'):
782 784 if not util.safehasattr(remote, 'branches'):
783 785 # enable in-client legacy support
784 786 remote = localrepo.locallegacypeer(remote.local())
785 787 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
786 788 force=True)
787 789 common = set(common)
788 790 if not opts.get('nonheads'):
789 791 ui.write(("unpruned common: %s\n") %
790 792 " ".join(sorted(short(n) for n in common)))
791 793 dag = dagutil.revlogdag(repo.changelog)
792 794 all = dag.ancestorset(dag.internalizeall(common))
793 795 common = dag.externalizeall(dag.headsetofconnecteds(all))
794 796 else:
795 797 nodes = None
796 798 if pushedrevs:
797 799 revs = scmutil.revrange(repo, pushedrevs)
798 800 nodes = [repo[r].node() for r in revs]
799 801 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
800 802 ancestorsof=nodes)
801 803 common = set(common)
802 804 rheads = set(hds)
803 805 lheads = set(repo.heads())
804 806 ui.write(("common heads: %s\n") %
805 807 " ".join(sorted(short(n) for n in common)))
806 808 if lheads <= common:
807 809 ui.write(("local is subset\n"))
808 810 elif rheads <= common:
809 811 ui.write(("remote is subset\n"))
810 812
811 813 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
812 814 localrevs = opts['rev']
813 815 doit(localrevs, remoterevs)
814 816
815 817 _chunksize = 4 << 10
816 818
817 819 @command('debugdownload',
818 820 [
819 821 ('o', 'output', '', _('path')),
820 822 ],
821 823 optionalrepo=True)
822 824 def debugdownload(ui, repo, url, output=None, **opts):
823 825 """download a resource using Mercurial logic and config
824 826 """
825 827 fh = urlmod.open(ui, url, output)
826 828
827 829 dest = ui
828 830 if output:
829 831 dest = open(output, "wb", _chunksize)
830 832 try:
831 833 data = fh.read(_chunksize)
832 834 while data:
833 835 dest.write(data)
834 836 data = fh.read(_chunksize)
835 837 finally:
836 838 if output:
837 839 dest.close()
838 840
839 841 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
840 842 def debugextensions(ui, repo, **opts):
841 843 '''show information about active extensions'''
842 844 opts = pycompat.byteskwargs(opts)
843 845 exts = extensions.extensions(ui)
844 846 hgver = util.version()
845 847 fm = ui.formatter('debugextensions', opts)
846 848 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
847 849 isinternal = extensions.ismoduleinternal(extmod)
848 850 extsource = pycompat.fsencode(extmod.__file__)
849 851 if isinternal:
850 852 exttestedwith = [] # never expose magic string to users
851 853 else:
852 854 exttestedwith = getattr(extmod, 'testedwith', '').split()
853 855 extbuglink = getattr(extmod, 'buglink', None)
854 856
855 857 fm.startitem()
856 858
857 859 if ui.quiet or ui.verbose:
858 860 fm.write('name', '%s\n', extname)
859 861 else:
860 862 fm.write('name', '%s', extname)
861 863 if isinternal or hgver in exttestedwith:
862 864 fm.plain('\n')
863 865 elif not exttestedwith:
864 866 fm.plain(_(' (untested!)\n'))
865 867 else:
866 868 lasttestedversion = exttestedwith[-1]
867 869 fm.plain(' (%s!)\n' % lasttestedversion)
868 870
869 871 fm.condwrite(ui.verbose and extsource, 'source',
870 872 _(' location: %s\n'), extsource or "")
871 873
872 874 if ui.verbose:
873 875 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
874 876 fm.data(bundled=isinternal)
875 877
876 878 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
877 879 _(' tested with: %s\n'),
878 880 fm.formatlist(exttestedwith, name='ver'))
879 881
880 882 fm.condwrite(ui.verbose and extbuglink, 'buglink',
881 883 _(' bug reporting: %s\n'), extbuglink or "")
882 884
883 885 fm.end()
884 886
885 887 @command('debugfileset',
886 888 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
887 889 _('[-r REV] FILESPEC'))
888 890 def debugfileset(ui, repo, expr, **opts):
889 891 '''parse and apply a fileset specification'''
890 892 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
891 893 if ui.verbose:
892 894 tree = fileset.parse(expr)
893 895 ui.note(fileset.prettyformat(tree), "\n")
894 896
895 897 for f in ctx.getfileset(expr):
896 898 ui.write("%s\n" % f)
897 899
898 900 @command('debugformat',
899 901 [] + cmdutil.formatteropts,
900 902 _(''))
901 903 def debugformat(ui, repo, **opts):
902 904 """display format information about the current repository
903 905
904 906 Use --verbose to get extra information about current config value and
905 907 Mercurial default."""
906 908 opts = pycompat.byteskwargs(opts)
907 909 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
908 910 maxvariantlength = max(len('format-variant'), maxvariantlength)
909 911
910 912 def makeformatname(name):
911 913 return '%s:' + (' ' * (maxvariantlength - len(name)))
912 914
913 915 fm = ui.formatter('debugformat', opts)
914 916 if fm.isplain():
915 917 def formatvalue(value):
916 918 if util.safehasattr(value, 'startswith'):
917 919 return value
918 920 if value:
919 921 return 'yes'
920 922 else:
921 923 return 'no'
922 924 else:
923 925 formatvalue = pycompat.identity
924 926
925 927 fm.plain('format-variant')
926 928 fm.plain(' ' * (maxvariantlength - len('format-variant')))
927 929 fm.plain(' repo')
928 930 if ui.verbose:
929 931 fm.plain(' config default')
930 932 fm.plain('\n')
931 933 for fv in upgrade.allformatvariant:
932 934 fm.startitem()
933 935 repovalue = fv.fromrepo(repo)
934 936 configvalue = fv.fromconfig(repo)
935 937
936 938 if repovalue != configvalue:
937 939 namelabel = 'formatvariant.name.mismatchconfig'
938 940 repolabel = 'formatvariant.repo.mismatchconfig'
939 941 elif repovalue != fv.default:
940 942 namelabel = 'formatvariant.name.mismatchdefault'
941 943 repolabel = 'formatvariant.repo.mismatchdefault'
942 944 else:
943 945 namelabel = 'formatvariant.name.uptodate'
944 946 repolabel = 'formatvariant.repo.uptodate'
945 947
946 948 fm.write('name', makeformatname(fv.name), fv.name,
947 949 label=namelabel)
948 950 fm.write('repo', ' %3s', formatvalue(repovalue),
949 951 label=repolabel)
950 952 if fv.default != configvalue:
951 953 configlabel = 'formatvariant.config.special'
952 954 else:
953 955 configlabel = 'formatvariant.config.default'
954 956 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
955 957 label=configlabel)
956 958 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
957 959 label='formatvariant.default')
958 960 fm.plain('\n')
959 961 fm.end()
960 962
961 963 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
962 964 def debugfsinfo(ui, path="."):
963 965 """show information detected about current filesystem"""
964 966 ui.write(('path: %s\n') % path)
965 967 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
966 968 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
967 969 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
968 970 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
969 971 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
970 972 casesensitive = '(unknown)'
971 973 try:
972 974 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
973 975 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
974 976 except OSError:
975 977 pass
976 978 ui.write(('case-sensitive: %s\n') % casesensitive)
977 979
978 980 @command('debuggetbundle',
979 981 [('H', 'head', [], _('id of head node'), _('ID')),
980 982 ('C', 'common', [], _('id of common node'), _('ID')),
981 983 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
982 984 _('REPO FILE [-H|-C ID]...'),
983 985 norepo=True)
984 986 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
985 987 """retrieves a bundle from a repo
986 988
987 989 Every ID must be a full-length hex node id string. Saves the bundle to the
988 990 given file.
989 991 """
990 992 opts = pycompat.byteskwargs(opts)
991 993 repo = hg.peer(ui, opts, repopath)
992 994 if not repo.capable('getbundle'):
993 995 raise error.Abort("getbundle() not supported by target repository")
994 996 args = {}
995 997 if common:
996 998 args[r'common'] = [bin(s) for s in common]
997 999 if head:
998 1000 args[r'heads'] = [bin(s) for s in head]
999 1001 # TODO: get desired bundlecaps from command line.
1000 1002 args[r'bundlecaps'] = None
1001 1003 bundle = repo.getbundle('debug', **args)
1002 1004
1003 1005 bundletype = opts.get('type', 'bzip2').lower()
1004 1006 btypes = {'none': 'HG10UN',
1005 1007 'bzip2': 'HG10BZ',
1006 1008 'gzip': 'HG10GZ',
1007 1009 'bundle2': 'HG20'}
1008 1010 bundletype = btypes.get(bundletype)
1009 1011 if bundletype not in bundle2.bundletypes:
1010 1012 raise error.Abort(_('unknown bundle type specified with --type'))
1011 1013 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1012 1014
1013 1015 @command('debugignore', [], '[FILE]')
1014 1016 def debugignore(ui, repo, *files, **opts):
1015 1017 """display the combined ignore pattern and information about ignored files
1016 1018
1017 1019 With no argument display the combined ignore pattern.
1018 1020
1019 1021 Given space separated file names, shows if the given file is ignored and
1020 1022 if so, show the ignore rule (file and line number) that matched it.
1021 1023 """
1022 1024 ignore = repo.dirstate._ignore
1023 1025 if not files:
1024 1026 # Show all the patterns
1025 1027 ui.write("%s\n" % pycompat.byterepr(ignore))
1026 1028 else:
1027 1029 m = scmutil.match(repo[None], pats=files)
1028 1030 for f in m.files():
1029 1031 nf = util.normpath(f)
1030 1032 ignored = None
1031 1033 ignoredata = None
1032 1034 if nf != '.':
1033 1035 if ignore(nf):
1034 1036 ignored = nf
1035 1037 ignoredata = repo.dirstate._ignorefileandline(nf)
1036 1038 else:
1037 1039 for p in util.finddirs(nf):
1038 1040 if ignore(p):
1039 1041 ignored = p
1040 1042 ignoredata = repo.dirstate._ignorefileandline(p)
1041 1043 break
1042 1044 if ignored:
1043 1045 if ignored == nf:
1044 1046 ui.write(_("%s is ignored\n") % m.uipath(f))
1045 1047 else:
1046 1048 ui.write(_("%s is ignored because of "
1047 1049 "containing folder %s\n")
1048 1050 % (m.uipath(f), ignored))
1049 1051 ignorefile, lineno, line = ignoredata
1050 1052 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1051 1053 % (ignorefile, lineno, line))
1052 1054 else:
1053 1055 ui.write(_("%s is not ignored\n") % m.uipath(f))
1054 1056
1055 1057 @command('debugindex', cmdutil.debugrevlogopts +
1056 1058 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1057 1059 _('[-f FORMAT] -c|-m|FILE'),
1058 1060 optionalrepo=True)
1059 1061 def debugindex(ui, repo, file_=None, **opts):
1060 1062 """dump the contents of an index file"""
1061 1063 opts = pycompat.byteskwargs(opts)
1062 1064 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1063 1065 format = opts.get('format', 0)
1064 1066 if format not in (0, 1):
1065 1067 raise error.Abort(_("unknown format %d") % format)
1066 1068
1067 1069 if ui.debugflag:
1068 1070 shortfn = hex
1069 1071 else:
1070 1072 shortfn = short
1071 1073
1072 1074 # There might not be anything in r, so have a sane default
1073 1075 idlen = 12
1074 1076 for i in r:
1075 1077 idlen = len(shortfn(r.node(i)))
1076 1078 break
1077 1079
1078 1080 if format == 0:
1079 1081 if ui.verbose:
1080 1082 ui.write((" rev offset length linkrev"
1081 1083 " %s %s p2\n") % ("nodeid".ljust(idlen),
1082 1084 "p1".ljust(idlen)))
1083 1085 else:
1084 1086 ui.write((" rev linkrev %s %s p2\n") % (
1085 1087 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1086 1088 elif format == 1:
1087 1089 if ui.verbose:
1088 1090 ui.write((" rev flag offset length size link p1"
1089 1091 " p2 %s\n") % "nodeid".rjust(idlen))
1090 1092 else:
1091 1093 ui.write((" rev flag size link p1 p2 %s\n") %
1092 1094 "nodeid".rjust(idlen))
1093 1095
1094 1096 for i in r:
1095 1097 node = r.node(i)
1096 1098 if format == 0:
1097 1099 try:
1098 1100 pp = r.parents(node)
1099 1101 except Exception:
1100 1102 pp = [nullid, nullid]
1101 1103 if ui.verbose:
1102 1104 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1103 1105 i, r.start(i), r.length(i), r.linkrev(i),
1104 1106 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1105 1107 else:
1106 1108 ui.write("% 6d % 7d %s %s %s\n" % (
1107 1109 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1108 1110 shortfn(pp[1])))
1109 1111 elif format == 1:
1110 1112 pr = r.parentrevs(i)
1111 1113 if ui.verbose:
1112 1114 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1113 1115 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1114 1116 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1115 1117 else:
1116 1118 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1117 1119 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1118 1120 shortfn(node)))
1119 1121
1120 1122 @command('debugindexdot', cmdutil.debugrevlogopts,
1121 1123 _('-c|-m|FILE'), optionalrepo=True)
1122 1124 def debugindexdot(ui, repo, file_=None, **opts):
1123 1125 """dump an index DAG as a graphviz dot file"""
1124 1126 opts = pycompat.byteskwargs(opts)
1125 1127 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1126 1128 ui.write(("digraph G {\n"))
1127 1129 for i in r:
1128 1130 node = r.node(i)
1129 1131 pp = r.parents(node)
1130 1132 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1131 1133 if pp[1] != nullid:
1132 1134 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1133 1135 ui.write("}\n")
1134 1136
1135 1137 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1136 1138 def debuginstall(ui, **opts):
1137 1139 '''test Mercurial installation
1138 1140
1139 1141 Returns 0 on success.
1140 1142 '''
1141 1143 opts = pycompat.byteskwargs(opts)
1142 1144
1143 1145 def writetemp(contents):
1144 1146 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1145 1147 f = os.fdopen(fd, r"wb")
1146 1148 f.write(contents)
1147 1149 f.close()
1148 1150 return name
1149 1151
1150 1152 problems = 0
1151 1153
1152 1154 fm = ui.formatter('debuginstall', opts)
1153 1155 fm.startitem()
1154 1156
1155 1157 # encoding
1156 1158 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1157 1159 err = None
1158 1160 try:
1159 1161 codecs.lookup(pycompat.sysstr(encoding.encoding))
1160 1162 except LookupError as inst:
1161 1163 err = stringutil.forcebytestr(inst)
1162 1164 problems += 1
1163 1165 fm.condwrite(err, 'encodingerror', _(" %s\n"
1164 1166 " (check that your locale is properly set)\n"), err)
1165 1167
1166 1168 # Python
1167 1169 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1168 1170 pycompat.sysexecutable)
1169 1171 fm.write('pythonver', _("checking Python version (%s)\n"),
1170 1172 ("%d.%d.%d" % sys.version_info[:3]))
1171 1173 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1172 1174 os.path.dirname(pycompat.fsencode(os.__file__)))
1173 1175
1174 1176 security = set(sslutil.supportedprotocols)
1175 1177 if sslutil.hassni:
1176 1178 security.add('sni')
1177 1179
1178 1180 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1179 1181 fm.formatlist(sorted(security), name='protocol',
1180 1182 fmt='%s', sep=','))
1181 1183
1182 1184 # These are warnings, not errors. So don't increment problem count. This
1183 1185 # may change in the future.
1184 1186 if 'tls1.2' not in security:
1185 1187 fm.plain(_(' TLS 1.2 not supported by Python install; '
1186 1188 'network connections lack modern security\n'))
1187 1189 if 'sni' not in security:
1188 1190 fm.plain(_(' SNI not supported by Python install; may have '
1189 1191 'connectivity issues with some servers\n'))
1190 1192
1191 1193 # TODO print CA cert info
1192 1194
1193 1195 # hg version
1194 1196 hgver = util.version()
1195 1197 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1196 1198 hgver.split('+')[0])
1197 1199 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1198 1200 '+'.join(hgver.split('+')[1:]))
1199 1201
1200 1202 # compiled modules
1201 1203 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1202 1204 policy.policy)
1203 1205 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1204 1206 os.path.dirname(pycompat.fsencode(__file__)))
1205 1207
1206 1208 if policy.policy in ('c', 'allow'):
1207 1209 err = None
1208 1210 try:
1209 1211 from .cext import (
1210 1212 base85,
1211 1213 bdiff,
1212 1214 mpatch,
1213 1215 osutil,
1214 1216 )
1215 1217 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1216 1218 except Exception as inst:
1217 1219 err = stringutil.forcebytestr(inst)
1218 1220 problems += 1
1219 1221 fm.condwrite(err, 'extensionserror', " %s\n", err)
1220 1222
1221 1223 compengines = util.compengines._engines.values()
1222 1224 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1223 1225 fm.formatlist(sorted(e.name() for e in compengines),
1224 1226 name='compengine', fmt='%s', sep=', '))
1225 1227 fm.write('compenginesavail', _('checking available compression engines '
1226 1228 '(%s)\n'),
1227 1229 fm.formatlist(sorted(e.name() for e in compengines
1228 1230 if e.available()),
1229 1231 name='compengine', fmt='%s', sep=', '))
1230 1232 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1231 1233 fm.write('compenginesserver', _('checking available compression engines '
1232 1234 'for wire protocol (%s)\n'),
1233 1235 fm.formatlist([e.name() for e in wirecompengines
1234 1236 if e.wireprotosupport()],
1235 1237 name='compengine', fmt='%s', sep=', '))
1236 1238 re2 = 'missing'
1237 1239 if util._re2:
1238 1240 re2 = 'available'
1239 1241 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1240 1242 fm.data(re2=bool(util._re2))
1241 1243
1242 1244 # templates
1243 1245 p = templater.templatepaths()
1244 1246 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1245 1247 fm.condwrite(not p, '', _(" no template directories found\n"))
1246 1248 if p:
1247 1249 m = templater.templatepath("map-cmdline.default")
1248 1250 if m:
1249 1251 # template found, check if it is working
1250 1252 err = None
1251 1253 try:
1252 1254 templater.templater.frommapfile(m)
1253 1255 except Exception as inst:
1254 1256 err = stringutil.forcebytestr(inst)
1255 1257 p = None
1256 1258 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1257 1259 else:
1258 1260 p = None
1259 1261 fm.condwrite(p, 'defaulttemplate',
1260 1262 _("checking default template (%s)\n"), m)
1261 1263 fm.condwrite(not m, 'defaulttemplatenotfound',
1262 1264 _(" template '%s' not found\n"), "default")
1263 1265 if not p:
1264 1266 problems += 1
1265 1267 fm.condwrite(not p, '',
1266 1268 _(" (templates seem to have been installed incorrectly)\n"))
1267 1269
1268 1270 # editor
1269 1271 editor = ui.geteditor()
1270 1272 editor = util.expandpath(editor)
1271 1273 editorbin = procutil.shellsplit(editor)[0]
1272 1274 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1273 1275 cmdpath = procutil.findexe(editorbin)
1274 1276 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1275 1277 _(" No commit editor set and can't find %s in PATH\n"
1276 1278 " (specify a commit editor in your configuration"
1277 1279 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1278 1280 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1279 1281 _(" Can't find editor '%s' in PATH\n"
1280 1282 " (specify a commit editor in your configuration"
1281 1283 " file)\n"), not cmdpath and editorbin)
1282 1284 if not cmdpath and editor != 'vi':
1283 1285 problems += 1
1284 1286
1285 1287 # check username
1286 1288 username = None
1287 1289 err = None
1288 1290 try:
1289 1291 username = ui.username()
1290 1292 except error.Abort as e:
1291 1293 err = stringutil.forcebytestr(e)
1292 1294 problems += 1
1293 1295
1294 1296 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1295 1297 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1296 1298 " (specify a username in your configuration file)\n"), err)
1297 1299
1298 1300 fm.condwrite(not problems, '',
1299 1301 _("no problems detected\n"))
1300 1302 if not problems:
1301 1303 fm.data(problems=problems)
1302 1304 fm.condwrite(problems, 'problems',
1303 1305 _("%d problems detected,"
1304 1306 " please check your install!\n"), problems)
1305 1307 fm.end()
1306 1308
1307 1309 return problems
1308 1310
1309 1311 @command('debugknown', [], _('REPO ID...'), norepo=True)
1310 1312 def debugknown(ui, repopath, *ids, **opts):
1311 1313 """test whether node ids are known to a repo
1312 1314
1313 1315 Every ID must be a full-length hex node id string. Returns a list of 0s
1314 1316 and 1s indicating unknown/known.
1315 1317 """
1316 1318 opts = pycompat.byteskwargs(opts)
1317 1319 repo = hg.peer(ui, opts, repopath)
1318 1320 if not repo.capable('known'):
1319 1321 raise error.Abort("known() not supported by target repository")
1320 1322 flags = repo.known([bin(s) for s in ids])
1321 1323 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1322 1324
1323 1325 @command('debuglabelcomplete', [], _('LABEL...'))
1324 1326 def debuglabelcomplete(ui, repo, *args):
1325 1327 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1326 1328 debugnamecomplete(ui, repo, *args)
1327 1329
1328 1330 @command('debuglocks',
1329 1331 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1330 1332 ('W', 'force-wlock', None,
1331 1333 _('free the working state lock (DANGEROUS)')),
1332 1334 ('s', 'set-lock', None, _('set the store lock until stopped')),
1333 1335 ('S', 'set-wlock', None,
1334 1336 _('set the working state lock until stopped'))],
1335 1337 _('[OPTION]...'))
1336 1338 def debuglocks(ui, repo, **opts):
1337 1339 """show or modify state of locks
1338 1340
1339 1341 By default, this command will show which locks are held. This
1340 1342 includes the user and process holding the lock, the amount of time
1341 1343 the lock has been held, and the machine name where the process is
1342 1344 running if it's not local.
1343 1345
1344 1346 Locks protect the integrity of Mercurial's data, so should be
1345 1347 treated with care. System crashes or other interruptions may cause
1346 1348 locks to not be properly released, though Mercurial will usually
1347 1349 detect and remove such stale locks automatically.
1348 1350
1349 1351 However, detecting stale locks may not always be possible (for
1350 1352 instance, on a shared filesystem). Removing locks may also be
1351 1353 blocked by filesystem permissions.
1352 1354
1353 1355 Setting a lock will prevent other commands from changing the data.
1354 1356 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1355 1357 The set locks are removed when the command exits.
1356 1358
1357 1359 Returns 0 if no locks are held.
1358 1360
1359 1361 """
1360 1362
1361 1363 if opts.get(r'force_lock'):
1362 1364 repo.svfs.unlink('lock')
1363 1365 if opts.get(r'force_wlock'):
1364 1366 repo.vfs.unlink('wlock')
1365 1367 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1366 1368 return 0
1367 1369
1368 1370 locks = []
1369 1371 try:
1370 1372 if opts.get(r'set_wlock'):
1371 1373 try:
1372 1374 locks.append(repo.wlock(False))
1373 1375 except error.LockHeld:
1374 1376 raise error.Abort(_('wlock is already held'))
1375 1377 if opts.get(r'set_lock'):
1376 1378 try:
1377 1379 locks.append(repo.lock(False))
1378 1380 except error.LockHeld:
1379 1381 raise error.Abort(_('lock is already held'))
1380 1382 if len(locks):
1381 1383 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1382 1384 return 0
1383 1385 finally:
1384 1386 release(*locks)
1385 1387
1386 1388 now = time.time()
1387 1389 held = 0
1388 1390
1389 1391 def report(vfs, name, method):
1390 1392 # this causes stale locks to get reaped for more accurate reporting
1391 1393 try:
1392 1394 l = method(False)
1393 1395 except error.LockHeld:
1394 1396 l = None
1395 1397
1396 1398 if l:
1397 1399 l.release()
1398 1400 else:
1399 1401 try:
1400 1402 st = vfs.lstat(name)
1401 1403 age = now - st[stat.ST_MTIME]
1402 1404 user = util.username(st.st_uid)
1403 1405 locker = vfs.readlock(name)
1404 1406 if ":" in locker:
1405 1407 host, pid = locker.split(':')
1406 1408 if host == socket.gethostname():
1407 1409 locker = 'user %s, process %s' % (user, pid)
1408 1410 else:
1409 1411 locker = 'user %s, process %s, host %s' \
1410 1412 % (user, pid, host)
1411 1413 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1412 1414 return 1
1413 1415 except OSError as e:
1414 1416 if e.errno != errno.ENOENT:
1415 1417 raise
1416 1418
1417 1419 ui.write(("%-6s free\n") % (name + ":"))
1418 1420 return 0
1419 1421
1420 1422 held += report(repo.svfs, "lock", repo.lock)
1421 1423 held += report(repo.vfs, "wlock", repo.wlock)
1422 1424
1423 1425 return held
1424 1426
1425 1427 @command('debugmergestate', [], '')
1426 1428 def debugmergestate(ui, repo, *args):
1427 1429 """print merge state
1428 1430
1429 1431 Use --verbose to print out information about whether v1 or v2 merge state
1430 1432 was chosen."""
1431 1433 def _hashornull(h):
1432 1434 if h == nullhex:
1433 1435 return 'null'
1434 1436 else:
1435 1437 return h
1436 1438
1437 1439 def printrecords(version):
1438 1440 ui.write(('* version %d records\n') % version)
1439 1441 if version == 1:
1440 1442 records = v1records
1441 1443 else:
1442 1444 records = v2records
1443 1445
1444 1446 for rtype, record in records:
1445 1447 # pretty print some record types
1446 1448 if rtype == 'L':
1447 1449 ui.write(('local: %s\n') % record)
1448 1450 elif rtype == 'O':
1449 1451 ui.write(('other: %s\n') % record)
1450 1452 elif rtype == 'm':
1451 1453 driver, mdstate = record.split('\0', 1)
1452 1454 ui.write(('merge driver: %s (state "%s")\n')
1453 1455 % (driver, mdstate))
1454 1456 elif rtype in 'FDC':
1455 1457 r = record.split('\0')
1456 1458 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1457 1459 if version == 1:
1458 1460 onode = 'not stored in v1 format'
1459 1461 flags = r[7]
1460 1462 else:
1461 1463 onode, flags = r[7:9]
1462 1464 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1463 1465 % (f, rtype, state, _hashornull(hash)))
1464 1466 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1465 1467 ui.write((' ancestor path: %s (node %s)\n')
1466 1468 % (afile, _hashornull(anode)))
1467 1469 ui.write((' other path: %s (node %s)\n')
1468 1470 % (ofile, _hashornull(onode)))
1469 1471 elif rtype == 'f':
1470 1472 filename, rawextras = record.split('\0', 1)
1471 1473 extras = rawextras.split('\0')
1472 1474 i = 0
1473 1475 extrastrings = []
1474 1476 while i < len(extras):
1475 1477 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1476 1478 i += 2
1477 1479
1478 1480 ui.write(('file extras: %s (%s)\n')
1479 1481 % (filename, ', '.join(extrastrings)))
1480 1482 elif rtype == 'l':
1481 1483 labels = record.split('\0', 2)
1482 1484 labels = [l for l in labels if len(l) > 0]
1483 1485 ui.write(('labels:\n'))
1484 1486 ui.write((' local: %s\n' % labels[0]))
1485 1487 ui.write((' other: %s\n' % labels[1]))
1486 1488 if len(labels) > 2:
1487 1489 ui.write((' base: %s\n' % labels[2]))
1488 1490 else:
1489 1491 ui.write(('unrecognized entry: %s\t%s\n')
1490 1492 % (rtype, record.replace('\0', '\t')))
1491 1493
1492 1494 # Avoid mergestate.read() since it may raise an exception for unsupported
1493 1495 # merge state records. We shouldn't be doing this, but this is OK since this
1494 1496 # command is pretty low-level.
1495 1497 ms = mergemod.mergestate(repo)
1496 1498
1497 1499 # sort so that reasonable information is on top
1498 1500 v1records = ms._readrecordsv1()
1499 1501 v2records = ms._readrecordsv2()
1500 1502 order = 'LOml'
1501 1503 def key(r):
1502 1504 idx = order.find(r[0])
1503 1505 if idx == -1:
1504 1506 return (1, r[1])
1505 1507 else:
1506 1508 return (0, idx)
1507 1509 v1records.sort(key=key)
1508 1510 v2records.sort(key=key)
1509 1511
1510 1512 if not v1records and not v2records:
1511 1513 ui.write(('no merge state found\n'))
1512 1514 elif not v2records:
1513 1515 ui.note(('no version 2 merge state\n'))
1514 1516 printrecords(1)
1515 1517 elif ms._v1v2match(v1records, v2records):
1516 1518 ui.note(('v1 and v2 states match: using v2\n'))
1517 1519 printrecords(2)
1518 1520 else:
1519 1521 ui.note(('v1 and v2 states mismatch: using v1\n'))
1520 1522 printrecords(1)
1521 1523 if ui.verbose:
1522 1524 printrecords(2)
1523 1525
1524 1526 @command('debugnamecomplete', [], _('NAME...'))
1525 1527 def debugnamecomplete(ui, repo, *args):
1526 1528 '''complete "names" - tags, open branch names, bookmark names'''
1527 1529
1528 1530 names = set()
1529 1531 # since we previously only listed open branches, we will handle that
1530 1532 # specially (after this for loop)
1531 1533 for name, ns in repo.names.iteritems():
1532 1534 if name != 'branches':
1533 1535 names.update(ns.listnames(repo))
1534 1536 names.update(tag for (tag, heads, tip, closed)
1535 1537 in repo.branchmap().iterbranches() if not closed)
1536 1538 completions = set()
1537 1539 if not args:
1538 1540 args = ['']
1539 1541 for a in args:
1540 1542 completions.update(n for n in names if n.startswith(a))
1541 1543 ui.write('\n'.join(sorted(completions)))
1542 1544 ui.write('\n')
1543 1545
1544 1546 @command('debugobsolete',
1545 1547 [('', 'flags', 0, _('markers flag')),
1546 1548 ('', 'record-parents', False,
1547 1549 _('record parent information for the precursor')),
1548 1550 ('r', 'rev', [], _('display markers relevant to REV')),
1549 1551 ('', 'exclusive', False, _('restrict display to markers only '
1550 1552 'relevant to REV')),
1551 1553 ('', 'index', False, _('display index of the marker')),
1552 1554 ('', 'delete', [], _('delete markers specified by indices')),
1553 1555 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1554 1556 _('[OBSOLETED [REPLACEMENT ...]]'))
1555 1557 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1556 1558 """create arbitrary obsolete marker
1557 1559
1558 1560 With no arguments, displays the list of obsolescence markers."""
1559 1561
1560 1562 opts = pycompat.byteskwargs(opts)
1561 1563
1562 1564 def parsenodeid(s):
1563 1565 try:
1564 1566 # We do not use revsingle/revrange functions here to accept
1565 1567 # arbitrary node identifiers, possibly not present in the
1566 1568 # local repository.
1567 1569 n = bin(s)
1568 1570 if len(n) != len(nullid):
1569 1571 raise TypeError()
1570 1572 return n
1571 1573 except TypeError:
1572 1574 raise error.Abort('changeset references must be full hexadecimal '
1573 1575 'node identifiers')
1574 1576
1575 1577 if opts.get('delete'):
1576 1578 indices = []
1577 1579 for v in opts.get('delete'):
1578 1580 try:
1579 1581 indices.append(int(v))
1580 1582 except ValueError:
1581 1583 raise error.Abort(_('invalid index value: %r') % v,
1582 1584 hint=_('use integers for indices'))
1583 1585
1584 1586 if repo.currenttransaction():
1585 1587 raise error.Abort(_('cannot delete obsmarkers in the middle '
1586 1588 'of transaction.'))
1587 1589
1588 1590 with repo.lock():
1589 1591 n = repair.deleteobsmarkers(repo.obsstore, indices)
1590 1592 ui.write(_('deleted %i obsolescence markers\n') % n)
1591 1593
1592 1594 return
1593 1595
1594 1596 if precursor is not None:
1595 1597 if opts['rev']:
1596 1598 raise error.Abort('cannot select revision when creating marker')
1597 1599 metadata = {}
1598 1600 metadata['user'] = opts['user'] or ui.username()
1599 1601 succs = tuple(parsenodeid(succ) for succ in successors)
1600 1602 l = repo.lock()
1601 1603 try:
1602 1604 tr = repo.transaction('debugobsolete')
1603 1605 try:
1604 1606 date = opts.get('date')
1605 1607 if date:
1606 1608 date = dateutil.parsedate(date)
1607 1609 else:
1608 1610 date = None
1609 1611 prec = parsenodeid(precursor)
1610 1612 parents = None
1611 1613 if opts['record_parents']:
1612 1614 if prec not in repo.unfiltered():
1613 1615 raise error.Abort('cannot used --record-parents on '
1614 1616 'unknown changesets')
1615 1617 parents = repo.unfiltered()[prec].parents()
1616 1618 parents = tuple(p.node() for p in parents)
1617 1619 repo.obsstore.create(tr, prec, succs, opts['flags'],
1618 1620 parents=parents, date=date,
1619 1621 metadata=metadata, ui=ui)
1620 1622 tr.close()
1621 1623 except ValueError as exc:
1622 1624 raise error.Abort(_('bad obsmarker input: %s') %
1623 1625 pycompat.bytestr(exc))
1624 1626 finally:
1625 1627 tr.release()
1626 1628 finally:
1627 1629 l.release()
1628 1630 else:
1629 1631 if opts['rev']:
1630 1632 revs = scmutil.revrange(repo, opts['rev'])
1631 1633 nodes = [repo[r].node() for r in revs]
1632 1634 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1633 1635 exclusive=opts['exclusive']))
1634 1636 markers.sort(key=lambda x: x._data)
1635 1637 else:
1636 1638 markers = obsutil.getmarkers(repo)
1637 1639
1638 1640 markerstoiter = markers
1639 1641 isrelevant = lambda m: True
1640 1642 if opts.get('rev') and opts.get('index'):
1641 1643 markerstoiter = obsutil.getmarkers(repo)
1642 1644 markerset = set(markers)
1643 1645 isrelevant = lambda m: m in markerset
1644 1646
1645 1647 fm = ui.formatter('debugobsolete', opts)
1646 1648 for i, m in enumerate(markerstoiter):
1647 1649 if not isrelevant(m):
1648 1650 # marker can be irrelevant when we're iterating over a set
1649 1651 # of markers (markerstoiter) which is bigger than the set
1650 1652 # of markers we want to display (markers)
1651 1653 # this can happen if both --index and --rev options are
1652 1654 # provided and thus we need to iterate over all of the markers
1653 1655 # to get the correct indices, but only display the ones that
1654 1656 # are relevant to --rev value
1655 1657 continue
1656 1658 fm.startitem()
1657 1659 ind = i if opts.get('index') else None
1658 1660 cmdutil.showmarker(fm, m, index=ind)
1659 1661 fm.end()
1660 1662
1661 1663 @command('debugpathcomplete',
1662 1664 [('f', 'full', None, _('complete an entire path')),
1663 1665 ('n', 'normal', None, _('show only normal files')),
1664 1666 ('a', 'added', None, _('show only added files')),
1665 1667 ('r', 'removed', None, _('show only removed files'))],
1666 1668 _('FILESPEC...'))
1667 1669 def debugpathcomplete(ui, repo, *specs, **opts):
1668 1670 '''complete part or all of a tracked path
1669 1671
1670 1672 This command supports shells that offer path name completion. It
1671 1673 currently completes only files already known to the dirstate.
1672 1674
1673 1675 Completion extends only to the next path segment unless
1674 1676 --full is specified, in which case entire paths are used.'''
1675 1677
1676 1678 def complete(path, acceptable):
1677 1679 dirstate = repo.dirstate
1678 1680 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1679 1681 rootdir = repo.root + pycompat.ossep
1680 1682 if spec != repo.root and not spec.startswith(rootdir):
1681 1683 return [], []
1682 1684 if os.path.isdir(spec):
1683 1685 spec += '/'
1684 1686 spec = spec[len(rootdir):]
1685 1687 fixpaths = pycompat.ossep != '/'
1686 1688 if fixpaths:
1687 1689 spec = spec.replace(pycompat.ossep, '/')
1688 1690 speclen = len(spec)
1689 1691 fullpaths = opts[r'full']
1690 1692 files, dirs = set(), set()
1691 1693 adddir, addfile = dirs.add, files.add
1692 1694 for f, st in dirstate.iteritems():
1693 1695 if f.startswith(spec) and st[0] in acceptable:
1694 1696 if fixpaths:
1695 1697 f = f.replace('/', pycompat.ossep)
1696 1698 if fullpaths:
1697 1699 addfile(f)
1698 1700 continue
1699 1701 s = f.find(pycompat.ossep, speclen)
1700 1702 if s >= 0:
1701 1703 adddir(f[:s])
1702 1704 else:
1703 1705 addfile(f)
1704 1706 return files, dirs
1705 1707
1706 1708 acceptable = ''
1707 1709 if opts[r'normal']:
1708 1710 acceptable += 'nm'
1709 1711 if opts[r'added']:
1710 1712 acceptable += 'a'
1711 1713 if opts[r'removed']:
1712 1714 acceptable += 'r'
1713 1715 cwd = repo.getcwd()
1714 1716 if not specs:
1715 1717 specs = ['.']
1716 1718
1717 1719 files, dirs = set(), set()
1718 1720 for spec in specs:
1719 1721 f, d = complete(spec, acceptable or 'nmar')
1720 1722 files.update(f)
1721 1723 dirs.update(d)
1722 1724 files.update(dirs)
1723 1725 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1724 1726 ui.write('\n')
1725 1727
1726 1728 @command('debugpeer', [], _('PATH'), norepo=True)
1727 1729 def debugpeer(ui, path):
1728 1730 """establish a connection to a peer repository"""
1729 1731 # Always enable peer request logging. Requires --debug to display
1730 1732 # though.
1731 1733 overrides = {
1732 1734 ('devel', 'debug.peer-request'): True,
1733 1735 }
1734 1736
1735 1737 with ui.configoverride(overrides):
1736 1738 peer = hg.peer(ui, {}, path)
1737 1739
1738 1740 local = peer.local() is not None
1739 1741 canpush = peer.canpush()
1740 1742
1741 1743 ui.write(_('url: %s\n') % peer.url())
1742 1744 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1743 1745 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1744 1746
1745 1747 @command('debugpickmergetool',
1746 1748 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1747 1749 ('', 'changedelete', None, _('emulate merging change and delete')),
1748 1750 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1749 1751 _('[PATTERN]...'),
1750 1752 inferrepo=True)
1751 1753 def debugpickmergetool(ui, repo, *pats, **opts):
1752 1754 """examine which merge tool is chosen for specified file
1753 1755
1754 1756 As described in :hg:`help merge-tools`, Mercurial examines
1755 1757 configurations below in this order to decide which merge tool is
1756 1758 chosen for specified file.
1757 1759
1758 1760 1. ``--tool`` option
1759 1761 2. ``HGMERGE`` environment variable
1760 1762 3. configurations in ``merge-patterns`` section
1761 1763 4. configuration of ``ui.merge``
1762 1764 5. configurations in ``merge-tools`` section
1763 1765 6. ``hgmerge`` tool (for historical reason only)
1764 1766 7. default tool for fallback (``:merge`` or ``:prompt``)
1765 1767
1766 1768 This command writes out examination result in the style below::
1767 1769
1768 1770 FILE = MERGETOOL
1769 1771
1770 1772 By default, all files known in the first parent context of the
1771 1773 working directory are examined. Use file patterns and/or -I/-X
1772 1774 options to limit target files. -r/--rev is also useful to examine
1773 1775 files in another context without actual updating to it.
1774 1776
1775 1777 With --debug, this command shows warning messages while matching
1776 1778 against ``merge-patterns`` and so on, too. It is recommended to
1777 1779 use this option with explicit file patterns and/or -I/-X options,
1778 1780 because this option increases amount of output per file according
1779 1781 to configurations in hgrc.
1780 1782
1781 1783 With -v/--verbose, this command shows configurations below at
1782 1784 first (only if specified).
1783 1785
1784 1786 - ``--tool`` option
1785 1787 - ``HGMERGE`` environment variable
1786 1788 - configuration of ``ui.merge``
1787 1789
1788 1790 If merge tool is chosen before matching against
1789 1791 ``merge-patterns``, this command can't show any helpful
1790 1792 information, even with --debug. In such case, information above is
1791 1793 useful to know why a merge tool is chosen.
1792 1794 """
1793 1795 opts = pycompat.byteskwargs(opts)
1794 1796 overrides = {}
1795 1797 if opts['tool']:
1796 1798 overrides[('ui', 'forcemerge')] = opts['tool']
1797 1799 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1798 1800
1799 1801 with ui.configoverride(overrides, 'debugmergepatterns'):
1800 1802 hgmerge = encoding.environ.get("HGMERGE")
1801 1803 if hgmerge is not None:
1802 1804 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1803 1805 uimerge = ui.config("ui", "merge")
1804 1806 if uimerge:
1805 1807 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1806 1808
1807 1809 ctx = scmutil.revsingle(repo, opts.get('rev'))
1808 1810 m = scmutil.match(ctx, pats, opts)
1809 1811 changedelete = opts['changedelete']
1810 1812 for path in ctx.walk(m):
1811 1813 fctx = ctx[path]
1812 1814 try:
1813 1815 if not ui.debugflag:
1814 1816 ui.pushbuffer(error=True)
1815 1817 tool, toolpath = filemerge._picktool(repo, ui, path,
1816 1818 fctx.isbinary(),
1817 1819 'l' in fctx.flags(),
1818 1820 changedelete)
1819 1821 finally:
1820 1822 if not ui.debugflag:
1821 1823 ui.popbuffer()
1822 1824 ui.write(('%s = %s\n') % (path, tool))
1823 1825
1824 1826 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1825 1827 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1826 1828 '''access the pushkey key/value protocol
1827 1829
1828 1830 With two args, list the keys in the given namespace.
1829 1831
1830 1832 With five args, set a key to new if it currently is set to old.
1831 1833 Reports success or failure.
1832 1834 '''
1833 1835
1834 1836 target = hg.peer(ui, {}, repopath)
1835 1837 if keyinfo:
1836 1838 key, old, new = keyinfo
1837 1839 with target.commandexecutor() as e:
1838 1840 r = e.callcommand('pushkey', {
1839 1841 'namespace': namespace,
1840 1842 'key': key,
1841 1843 'old': old,
1842 1844 'new': new,
1843 1845 }).result()
1844 1846
1845 1847 ui.status(pycompat.bytestr(r) + '\n')
1846 1848 return not r
1847 1849 else:
1848 1850 for k, v in sorted(target.listkeys(namespace).iteritems()):
1849 1851 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1850 1852 stringutil.escapestr(v)))
1851 1853
1852 1854 @command('debugpvec', [], _('A B'))
1853 1855 def debugpvec(ui, repo, a, b=None):
1854 1856 ca = scmutil.revsingle(repo, a)
1855 1857 cb = scmutil.revsingle(repo, b)
1856 1858 pa = pvec.ctxpvec(ca)
1857 1859 pb = pvec.ctxpvec(cb)
1858 1860 if pa == pb:
1859 1861 rel = "="
1860 1862 elif pa > pb:
1861 1863 rel = ">"
1862 1864 elif pa < pb:
1863 1865 rel = "<"
1864 1866 elif pa | pb:
1865 1867 rel = "|"
1866 1868 ui.write(_("a: %s\n") % pa)
1867 1869 ui.write(_("b: %s\n") % pb)
1868 1870 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1869 1871 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1870 1872 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1871 1873 pa.distance(pb), rel))
1872 1874
1873 1875 @command('debugrebuilddirstate|debugrebuildstate',
1874 1876 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1875 1877 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1876 1878 'the working copy parent')),
1877 1879 ],
1878 1880 _('[-r REV]'))
1879 1881 def debugrebuilddirstate(ui, repo, rev, **opts):
1880 1882 """rebuild the dirstate as it would look like for the given revision
1881 1883
1882 1884 If no revision is specified the first current parent will be used.
1883 1885
1884 1886 The dirstate will be set to the files of the given revision.
1885 1887 The actual working directory content or existing dirstate
1886 1888 information such as adds or removes is not considered.
1887 1889
1888 1890 ``minimal`` will only rebuild the dirstate status for files that claim to be
1889 1891 tracked but are not in the parent manifest, or that exist in the parent
1890 1892 manifest but are not in the dirstate. It will not change adds, removes, or
1891 1893 modified files that are in the working copy parent.
1892 1894
1893 1895 One use of this command is to make the next :hg:`status` invocation
1894 1896 check the actual file content.
1895 1897 """
1896 1898 ctx = scmutil.revsingle(repo, rev)
1897 1899 with repo.wlock():
1898 1900 dirstate = repo.dirstate
1899 1901 changedfiles = None
1900 1902 # See command doc for what minimal does.
1901 1903 if opts.get(r'minimal'):
1902 1904 manifestfiles = set(ctx.manifest().keys())
1903 1905 dirstatefiles = set(dirstate)
1904 1906 manifestonly = manifestfiles - dirstatefiles
1905 1907 dsonly = dirstatefiles - manifestfiles
1906 1908 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1907 1909 changedfiles = manifestonly | dsnotadded
1908 1910
1909 1911 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1910 1912
1911 1913 @command('debugrebuildfncache', [], '')
1912 1914 def debugrebuildfncache(ui, repo):
1913 1915 """rebuild the fncache file"""
1914 1916 repair.rebuildfncache(ui, repo)
1915 1917
1916 1918 @command('debugrename',
1917 1919 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1918 1920 _('[-r REV] FILE'))
1919 1921 def debugrename(ui, repo, file1, *pats, **opts):
1920 1922 """dump rename information"""
1921 1923
1922 1924 opts = pycompat.byteskwargs(opts)
1923 1925 ctx = scmutil.revsingle(repo, opts.get('rev'))
1924 1926 m = scmutil.match(ctx, (file1,) + pats, opts)
1925 1927 for abs in ctx.walk(m):
1926 1928 fctx = ctx[abs]
1927 1929 o = fctx.filelog().renamed(fctx.filenode())
1928 1930 rel = m.rel(abs)
1929 1931 if o:
1930 1932 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1931 1933 else:
1932 1934 ui.write(_("%s not renamed\n") % rel)
1933 1935
1934 1936 @command('debugrevlog', cmdutil.debugrevlogopts +
1935 1937 [('d', 'dump', False, _('dump index data'))],
1936 1938 _('-c|-m|FILE'),
1937 1939 optionalrepo=True)
1938 1940 def debugrevlog(ui, repo, file_=None, **opts):
1939 1941 """show data and statistics about a revlog"""
1940 1942 opts = pycompat.byteskwargs(opts)
1941 1943 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1942 1944
1943 1945 if opts.get("dump"):
1944 1946 numrevs = len(r)
1945 1947 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1946 1948 " rawsize totalsize compression heads chainlen\n"))
1947 1949 ts = 0
1948 1950 heads = set()
1949 1951
1950 1952 for rev in xrange(numrevs):
1951 1953 dbase = r.deltaparent(rev)
1952 1954 if dbase == -1:
1953 1955 dbase = rev
1954 1956 cbase = r.chainbase(rev)
1955 1957 clen = r.chainlen(rev)
1956 1958 p1, p2 = r.parentrevs(rev)
1957 1959 rs = r.rawsize(rev)
1958 1960 ts = ts + rs
1959 1961 heads -= set(r.parentrevs(rev))
1960 1962 heads.add(rev)
1961 1963 try:
1962 1964 compression = ts / r.end(rev)
1963 1965 except ZeroDivisionError:
1964 1966 compression = 0
1965 1967 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1966 1968 "%11d %5d %8d\n" %
1967 1969 (rev, p1, p2, r.start(rev), r.end(rev),
1968 1970 r.start(dbase), r.start(cbase),
1969 1971 r.start(p1), r.start(p2),
1970 1972 rs, ts, compression, len(heads), clen))
1971 1973 return 0
1972 1974
1973 1975 v = r.version
1974 1976 format = v & 0xFFFF
1975 1977 flags = []
1976 1978 gdelta = False
1977 1979 if v & revlog.FLAG_INLINE_DATA:
1978 1980 flags.append('inline')
1979 1981 if v & revlog.FLAG_GENERALDELTA:
1980 1982 gdelta = True
1981 1983 flags.append('generaldelta')
1982 1984 if not flags:
1983 1985 flags = ['(none)']
1984 1986
1985 1987 nummerges = 0
1986 1988 numfull = 0
1987 1989 numprev = 0
1988 1990 nump1 = 0
1989 1991 nump2 = 0
1990 1992 numother = 0
1991 1993 nump1prev = 0
1992 1994 nump2prev = 0
1993 1995 chainlengths = []
1994 1996 chainbases = []
1995 1997 chainspans = []
1996 1998
1997 1999 datasize = [None, 0, 0]
1998 2000 fullsize = [None, 0, 0]
1999 2001 deltasize = [None, 0, 0]
2000 2002 chunktypecounts = {}
2001 2003 chunktypesizes = {}
2002 2004
2003 2005 def addsize(size, l):
2004 2006 if l[0] is None or size < l[0]:
2005 2007 l[0] = size
2006 2008 if size > l[1]:
2007 2009 l[1] = size
2008 2010 l[2] += size
2009 2011
2010 2012 numrevs = len(r)
2011 2013 for rev in xrange(numrevs):
2012 2014 p1, p2 = r.parentrevs(rev)
2013 2015 delta = r.deltaparent(rev)
2014 2016 if format > 0:
2015 2017 addsize(r.rawsize(rev), datasize)
2016 2018 if p2 != nullrev:
2017 2019 nummerges += 1
2018 2020 size = r.length(rev)
2019 2021 if delta == nullrev:
2020 2022 chainlengths.append(0)
2021 2023 chainbases.append(r.start(rev))
2022 2024 chainspans.append(size)
2023 2025 numfull += 1
2024 2026 addsize(size, fullsize)
2025 2027 else:
2026 2028 chainlengths.append(chainlengths[delta] + 1)
2027 2029 baseaddr = chainbases[delta]
2028 2030 revaddr = r.start(rev)
2029 2031 chainbases.append(baseaddr)
2030 2032 chainspans.append((revaddr - baseaddr) + size)
2031 2033 addsize(size, deltasize)
2032 2034 if delta == rev - 1:
2033 2035 numprev += 1
2034 2036 if delta == p1:
2035 2037 nump1prev += 1
2036 2038 elif delta == p2:
2037 2039 nump2prev += 1
2038 2040 elif delta == p1:
2039 2041 nump1 += 1
2040 2042 elif delta == p2:
2041 2043 nump2 += 1
2042 2044 elif delta != nullrev:
2043 2045 numother += 1
2044 2046
2045 2047 # Obtain data on the raw chunks in the revlog.
2046 2048 segment = r._getsegmentforrevs(rev, rev)[1]
2047 2049 if segment:
2048 2050 chunktype = bytes(segment[0:1])
2049 2051 else:
2050 2052 chunktype = 'empty'
2051 2053
2052 2054 if chunktype not in chunktypecounts:
2053 2055 chunktypecounts[chunktype] = 0
2054 2056 chunktypesizes[chunktype] = 0
2055 2057
2056 2058 chunktypecounts[chunktype] += 1
2057 2059 chunktypesizes[chunktype] += size
2058 2060
2059 2061 # Adjust size min value for empty cases
2060 2062 for size in (datasize, fullsize, deltasize):
2061 2063 if size[0] is None:
2062 2064 size[0] = 0
2063 2065
2064 2066 numdeltas = numrevs - numfull
2065 2067 numoprev = numprev - nump1prev - nump2prev
2066 2068 totalrawsize = datasize[2]
2067 2069 datasize[2] /= numrevs
2068 2070 fulltotal = fullsize[2]
2069 2071 fullsize[2] /= numfull
2070 2072 deltatotal = deltasize[2]
2071 2073 if numrevs - numfull > 0:
2072 2074 deltasize[2] /= numrevs - numfull
2073 2075 totalsize = fulltotal + deltatotal
2074 2076 avgchainlen = sum(chainlengths) / numrevs
2075 2077 maxchainlen = max(chainlengths)
2076 2078 maxchainspan = max(chainspans)
2077 2079 compratio = 1
2078 2080 if totalsize:
2079 2081 compratio = totalrawsize / totalsize
2080 2082
2081 2083 basedfmtstr = '%%%dd\n'
2082 2084 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2083 2085
2084 2086 def dfmtstr(max):
2085 2087 return basedfmtstr % len(str(max))
2086 2088 def pcfmtstr(max, padding=0):
2087 2089 return basepcfmtstr % (len(str(max)), ' ' * padding)
2088 2090
2089 2091 def pcfmt(value, total):
2090 2092 if total:
2091 2093 return (value, 100 * float(value) / total)
2092 2094 else:
2093 2095 return value, 100.0
2094 2096
2095 2097 ui.write(('format : %d\n') % format)
2096 2098 ui.write(('flags : %s\n') % ', '.join(flags))
2097 2099
2098 2100 ui.write('\n')
2099 2101 fmt = pcfmtstr(totalsize)
2100 2102 fmt2 = dfmtstr(totalsize)
2101 2103 ui.write(('revisions : ') + fmt2 % numrevs)
2102 2104 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2103 2105 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2104 2106 ui.write(('revisions : ') + fmt2 % numrevs)
2105 2107 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2106 2108 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2107 2109 ui.write(('revision size : ') + fmt2 % totalsize)
2108 2110 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2109 2111 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2110 2112
2111 2113 def fmtchunktype(chunktype):
2112 2114 if chunktype == 'empty':
2113 2115 return ' %s : ' % chunktype
2114 2116 elif chunktype in pycompat.bytestr(string.ascii_letters):
2115 2117 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2116 2118 else:
2117 2119 return ' 0x%s : ' % hex(chunktype)
2118 2120
2119 2121 ui.write('\n')
2120 2122 ui.write(('chunks : ') + fmt2 % numrevs)
2121 2123 for chunktype in sorted(chunktypecounts):
2122 2124 ui.write(fmtchunktype(chunktype))
2123 2125 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2124 2126 ui.write(('chunks size : ') + fmt2 % totalsize)
2125 2127 for chunktype in sorted(chunktypecounts):
2126 2128 ui.write(fmtchunktype(chunktype))
2127 2129 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2128 2130
2129 2131 ui.write('\n')
2130 2132 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2131 2133 ui.write(('avg chain length : ') + fmt % avgchainlen)
2132 2134 ui.write(('max chain length : ') + fmt % maxchainlen)
2133 2135 ui.write(('max chain reach : ') + fmt % maxchainspan)
2134 2136 ui.write(('compression ratio : ') + fmt % compratio)
2135 2137
2136 2138 if format > 0:
2137 2139 ui.write('\n')
2138 2140 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2139 2141 % tuple(datasize))
2140 2142 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2141 2143 % tuple(fullsize))
2142 2144 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2143 2145 % tuple(deltasize))
2144 2146
2145 2147 if numdeltas > 0:
2146 2148 ui.write('\n')
2147 2149 fmt = pcfmtstr(numdeltas)
2148 2150 fmt2 = pcfmtstr(numdeltas, 4)
2149 2151 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2150 2152 if numprev > 0:
2151 2153 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2152 2154 numprev))
2153 2155 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2154 2156 numprev))
2155 2157 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2156 2158 numprev))
2157 2159 if gdelta:
2158 2160 ui.write(('deltas against p1 : ')
2159 2161 + fmt % pcfmt(nump1, numdeltas))
2160 2162 ui.write(('deltas against p2 : ')
2161 2163 + fmt % pcfmt(nump2, numdeltas))
2162 2164 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2163 2165 numdeltas))
2164 2166
2165 2167 @command('debugrevspec',
2166 2168 [('', 'optimize', None,
2167 2169 _('print parsed tree after optimizing (DEPRECATED)')),
2168 2170 ('', 'show-revs', True, _('print list of result revisions (default)')),
2169 2171 ('s', 'show-set', None, _('print internal representation of result set')),
2170 2172 ('p', 'show-stage', [],
2171 2173 _('print parsed tree at the given stage'), _('NAME')),
2172 2174 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2173 2175 ('', 'verify-optimized', False, _('verify optimized result')),
2174 2176 ],
2175 2177 ('REVSPEC'))
2176 2178 def debugrevspec(ui, repo, expr, **opts):
2177 2179 """parse and apply a revision specification
2178 2180
2179 2181 Use -p/--show-stage option to print the parsed tree at the given stages.
2180 2182 Use -p all to print tree at every stage.
2181 2183
2182 2184 Use --no-show-revs option with -s or -p to print only the set
2183 2185 representation or the parsed tree respectively.
2184 2186
2185 2187 Use --verify-optimized to compare the optimized result with the unoptimized
2186 2188 one. Returns 1 if the optimized result differs.
2187 2189 """
2188 2190 opts = pycompat.byteskwargs(opts)
2189 2191 aliases = ui.configitems('revsetalias')
2190 2192 stages = [
2191 2193 ('parsed', lambda tree: tree),
2192 2194 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2193 2195 ui.warn)),
2194 2196 ('concatenated', revsetlang.foldconcat),
2195 2197 ('analyzed', revsetlang.analyze),
2196 2198 ('optimized', revsetlang.optimize),
2197 2199 ]
2198 2200 if opts['no_optimized']:
2199 2201 stages = stages[:-1]
2200 2202 if opts['verify_optimized'] and opts['no_optimized']:
2201 2203 raise error.Abort(_('cannot use --verify-optimized with '
2202 2204 '--no-optimized'))
2203 2205 stagenames = set(n for n, f in stages)
2204 2206
2205 2207 showalways = set()
2206 2208 showchanged = set()
2207 2209 if ui.verbose and not opts['show_stage']:
2208 2210 # show parsed tree by --verbose (deprecated)
2209 2211 showalways.add('parsed')
2210 2212 showchanged.update(['expanded', 'concatenated'])
2211 2213 if opts['optimize']:
2212 2214 showalways.add('optimized')
2213 2215 if opts['show_stage'] and opts['optimize']:
2214 2216 raise error.Abort(_('cannot use --optimize with --show-stage'))
2215 2217 if opts['show_stage'] == ['all']:
2216 2218 showalways.update(stagenames)
2217 2219 else:
2218 2220 for n in opts['show_stage']:
2219 2221 if n not in stagenames:
2220 2222 raise error.Abort(_('invalid stage name: %s') % n)
2221 2223 showalways.update(opts['show_stage'])
2222 2224
2223 2225 treebystage = {}
2224 2226 printedtree = None
2225 2227 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2226 2228 for n, f in stages:
2227 2229 treebystage[n] = tree = f(tree)
2228 2230 if n in showalways or (n in showchanged and tree != printedtree):
2229 2231 if opts['show_stage'] or n != 'parsed':
2230 2232 ui.write(("* %s:\n") % n)
2231 2233 ui.write(revsetlang.prettyformat(tree), "\n")
2232 2234 printedtree = tree
2233 2235
2234 2236 if opts['verify_optimized']:
2235 2237 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2236 2238 brevs = revset.makematcher(treebystage['optimized'])(repo)
2237 2239 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2238 2240 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2239 2241 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2240 2242 arevs = list(arevs)
2241 2243 brevs = list(brevs)
2242 2244 if arevs == brevs:
2243 2245 return 0
2244 2246 ui.write(('--- analyzed\n'), label='diff.file_a')
2245 2247 ui.write(('+++ optimized\n'), label='diff.file_b')
2246 2248 sm = difflib.SequenceMatcher(None, arevs, brevs)
2247 2249 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2248 2250 if tag in ('delete', 'replace'):
2249 2251 for c in arevs[alo:ahi]:
2250 2252 ui.write('-%s\n' % c, label='diff.deleted')
2251 2253 if tag in ('insert', 'replace'):
2252 2254 for c in brevs[blo:bhi]:
2253 2255 ui.write('+%s\n' % c, label='diff.inserted')
2254 2256 if tag == 'equal':
2255 2257 for c in arevs[alo:ahi]:
2256 2258 ui.write(' %s\n' % c)
2257 2259 return 1
2258 2260
2259 2261 func = revset.makematcher(tree)
2260 2262 revs = func(repo)
2261 2263 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2262 2264 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2263 2265 if not opts['show_revs']:
2264 2266 return
2265 2267 for c in revs:
2266 2268 ui.write("%d\n" % c)
2267 2269
2268 2270 @command('debugserve', [
2269 2271 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2270 2272 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2271 2273 ('', 'logiofile', '', _('file to log server I/O to')),
2272 2274 ], '')
2273 2275 def debugserve(ui, repo, **opts):
2274 2276 """run a server with advanced settings
2275 2277
2276 2278 This command is similar to :hg:`serve`. It exists partially as a
2277 2279 workaround to the fact that ``hg serve --stdio`` must have specific
2278 2280 arguments for security reasons.
2279 2281 """
2280 2282 opts = pycompat.byteskwargs(opts)
2281 2283
2282 2284 if not opts['sshstdio']:
2283 2285 raise error.Abort(_('only --sshstdio is currently supported'))
2284 2286
2285 2287 logfh = None
2286 2288
2287 2289 if opts['logiofd'] and opts['logiofile']:
2288 2290 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2289 2291
2290 2292 if opts['logiofd']:
2291 2293 # Line buffered because output is line based.
2292 2294 try:
2293 2295 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2294 2296 except OSError as e:
2295 2297 if e.errno != errno.ESPIPE:
2296 2298 raise
2297 2299 # can't seek a pipe, so `ab` mode fails on py3
2298 2300 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2299 2301 elif opts['logiofile']:
2300 2302 logfh = open(opts['logiofile'], 'ab', 1)
2301 2303
2302 2304 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2303 2305 s.serve_forever()
2304 2306
2305 2307 @command('debugsetparents', [], _('REV1 [REV2]'))
2306 2308 def debugsetparents(ui, repo, rev1, rev2=None):
2307 2309 """manually set the parents of the current working directory
2308 2310
2309 2311 This is useful for writing repository conversion tools, but should
2310 2312 be used with care. For example, neither the working directory nor the
2311 2313 dirstate is updated, so file status may be incorrect after running this
2312 2314 command.
2313 2315
2314 2316 Returns 0 on success.
2315 2317 """
2316 2318
2317 2319 node1 = scmutil.revsingle(repo, rev1).node()
2318 2320 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2319 2321
2320 2322 with repo.wlock():
2321 2323 repo.setparents(node1, node2)
2322 2324
2323 2325 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2324 2326 def debugssl(ui, repo, source=None, **opts):
2325 2327 '''test a secure connection to a server
2326 2328
2327 2329 This builds the certificate chain for the server on Windows, installing the
2328 2330 missing intermediates and trusted root via Windows Update if necessary. It
2329 2331 does nothing on other platforms.
2330 2332
2331 2333 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2332 2334 that server is used. See :hg:`help urls` for more information.
2333 2335
2334 2336 If the update succeeds, retry the original operation. Otherwise, the cause
2335 2337 of the SSL error is likely another issue.
2336 2338 '''
2337 2339 if not pycompat.iswindows:
2338 2340 raise error.Abort(_('certificate chain building is only possible on '
2339 2341 'Windows'))
2340 2342
2341 2343 if not source:
2342 2344 if not repo:
2343 2345 raise error.Abort(_("there is no Mercurial repository here, and no "
2344 2346 "server specified"))
2345 2347 source = "default"
2346 2348
2347 2349 source, branches = hg.parseurl(ui.expandpath(source))
2348 2350 url = util.url(source)
2349 2351 addr = None
2350 2352
2351 2353 defaultport = {'https': 443, 'ssh': 22}
2352 2354 if url.scheme in defaultport:
2353 2355 try:
2354 2356 addr = (url.host, int(url.port or defaultport[url.scheme]))
2355 2357 except ValueError:
2356 2358 raise error.Abort(_("malformed port number in URL"))
2357 2359 else:
2358 2360 raise error.Abort(_("only https and ssh connections are supported"))
2359 2361
2360 2362 from . import win32
2361 2363
2362 2364 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2363 2365 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2364 2366
2365 2367 try:
2366 2368 s.connect(addr)
2367 2369 cert = s.getpeercert(True)
2368 2370
2369 2371 ui.status(_('checking the certificate chain for %s\n') % url.host)
2370 2372
2371 2373 complete = win32.checkcertificatechain(cert, build=False)
2372 2374
2373 2375 if not complete:
2374 2376 ui.status(_('certificate chain is incomplete, updating... '))
2375 2377
2376 2378 if not win32.checkcertificatechain(cert):
2377 2379 ui.status(_('failed.\n'))
2378 2380 else:
2379 2381 ui.status(_('done.\n'))
2380 2382 else:
2381 2383 ui.status(_('full certificate chain is available\n'))
2382 2384 finally:
2383 2385 s.close()
2384 2386
2385 2387 @command('debugsub',
2386 2388 [('r', 'rev', '',
2387 2389 _('revision to check'), _('REV'))],
2388 2390 _('[-r REV] [REV]'))
2389 2391 def debugsub(ui, repo, rev=None):
2390 2392 ctx = scmutil.revsingle(repo, rev, None)
2391 2393 for k, v in sorted(ctx.substate.items()):
2392 2394 ui.write(('path %s\n') % k)
2393 2395 ui.write((' source %s\n') % v[0])
2394 2396 ui.write((' revision %s\n') % v[1])
2395 2397
2396 2398 @command('debugsuccessorssets',
2397 2399 [('', 'closest', False, _('return closest successors sets only'))],
2398 2400 _('[REV]'))
2399 2401 def debugsuccessorssets(ui, repo, *revs, **opts):
2400 2402 """show set of successors for revision
2401 2403
2402 2404 A successors set of changeset A is a consistent group of revisions that
2403 2405 succeed A. It contains non-obsolete changesets only unless closests
2404 2406 successors set is set.
2405 2407
2406 2408 In most cases a changeset A has a single successors set containing a single
2407 2409 successor (changeset A replaced by A').
2408 2410
2409 2411 A changeset that is made obsolete with no successors are called "pruned".
2410 2412 Such changesets have no successors sets at all.
2411 2413
2412 2414 A changeset that has been "split" will have a successors set containing
2413 2415 more than one successor.
2414 2416
2415 2417 A changeset that has been rewritten in multiple different ways is called
2416 2418 "divergent". Such changesets have multiple successor sets (each of which
2417 2419 may also be split, i.e. have multiple successors).
2418 2420
2419 2421 Results are displayed as follows::
2420 2422
2421 2423 <rev1>
2422 2424 <successors-1A>
2423 2425 <rev2>
2424 2426 <successors-2A>
2425 2427 <successors-2B1> <successors-2B2> <successors-2B3>
2426 2428
2427 2429 Here rev2 has two possible (i.e. divergent) successors sets. The first
2428 2430 holds one element, whereas the second holds three (i.e. the changeset has
2429 2431 been split).
2430 2432 """
2431 2433 # passed to successorssets caching computation from one call to another
2432 2434 cache = {}
2433 2435 ctx2str = bytes
2434 2436 node2str = short
2435 2437 for rev in scmutil.revrange(repo, revs):
2436 2438 ctx = repo[rev]
2437 2439 ui.write('%s\n'% ctx2str(ctx))
2438 2440 for succsset in obsutil.successorssets(repo, ctx.node(),
2439 2441 closest=opts[r'closest'],
2440 2442 cache=cache):
2441 2443 if succsset:
2442 2444 ui.write(' ')
2443 2445 ui.write(node2str(succsset[0]))
2444 2446 for node in succsset[1:]:
2445 2447 ui.write(' ')
2446 2448 ui.write(node2str(node))
2447 2449 ui.write('\n')
2448 2450
2449 2451 @command('debugtemplate',
2450 2452 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2451 2453 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2452 2454 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2453 2455 optionalrepo=True)
2454 2456 def debugtemplate(ui, repo, tmpl, **opts):
2455 2457 """parse and apply a template
2456 2458
2457 2459 If -r/--rev is given, the template is processed as a log template and
2458 2460 applied to the given changesets. Otherwise, it is processed as a generic
2459 2461 template.
2460 2462
2461 2463 Use --verbose to print the parsed tree.
2462 2464 """
2463 2465 revs = None
2464 2466 if opts[r'rev']:
2465 2467 if repo is None:
2466 2468 raise error.RepoError(_('there is no Mercurial repository here '
2467 2469 '(.hg not found)'))
2468 2470 revs = scmutil.revrange(repo, opts[r'rev'])
2469 2471
2470 2472 props = {}
2471 2473 for d in opts[r'define']:
2472 2474 try:
2473 2475 k, v = (e.strip() for e in d.split('=', 1))
2474 2476 if not k or k == 'ui':
2475 2477 raise ValueError
2476 2478 props[k] = v
2477 2479 except ValueError:
2478 2480 raise error.Abort(_('malformed keyword definition: %s') % d)
2479 2481
2480 2482 if ui.verbose:
2481 2483 aliases = ui.configitems('templatealias')
2482 2484 tree = templater.parse(tmpl)
2483 2485 ui.note(templater.prettyformat(tree), '\n')
2484 2486 newtree = templater.expandaliases(tree, aliases)
2485 2487 if newtree != tree:
2486 2488 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2487 2489
2488 2490 if revs is None:
2489 2491 tres = formatter.templateresources(ui, repo)
2490 2492 t = formatter.maketemplater(ui, tmpl, resources=tres)
2491 2493 if ui.verbose:
2492 2494 kwds, funcs = t.symbolsuseddefault()
2493 2495 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2494 2496 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2495 2497 ui.write(t.renderdefault(props))
2496 2498 else:
2497 2499 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2498 2500 if ui.verbose:
2499 2501 kwds, funcs = displayer.t.symbolsuseddefault()
2500 2502 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2501 2503 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2502 2504 for r in revs:
2503 2505 displayer.show(repo[r], **pycompat.strkwargs(props))
2504 2506 displayer.close()
2505 2507
2506 2508 @command('debuguigetpass', [
2507 2509 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2508 2510 ], _('[-p TEXT]'), norepo=True)
2509 2511 def debuguigetpass(ui, prompt=''):
2510 2512 """show prompt to type password"""
2511 2513 r = ui.getpass(prompt)
2512 2514 ui.write(('respose: %s\n') % r)
2513 2515
2514 2516 @command('debuguiprompt', [
2515 2517 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2516 2518 ], _('[-p TEXT]'), norepo=True)
2517 2519 def debuguiprompt(ui, prompt=''):
2518 2520 """show plain prompt"""
2519 2521 r = ui.prompt(prompt)
2520 2522 ui.write(('response: %s\n') % r)
2521 2523
2522 2524 @command('debugupdatecaches', [])
2523 2525 def debugupdatecaches(ui, repo, *pats, **opts):
2524 2526 """warm all known caches in the repository"""
2525 2527 with repo.wlock(), repo.lock():
2526 2528 repo.updatecaches(full=True)
2527 2529
2528 2530 @command('debugupgraderepo', [
2529 2531 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2530 2532 ('', 'run', False, _('performs an upgrade')),
2531 2533 ])
2532 2534 def debugupgraderepo(ui, repo, run=False, optimize=None):
2533 2535 """upgrade a repository to use different features
2534 2536
2535 2537 If no arguments are specified, the repository is evaluated for upgrade
2536 2538 and a list of problems and potential optimizations is printed.
2537 2539
2538 2540 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2539 2541 can be influenced via additional arguments. More details will be provided
2540 2542 by the command output when run without ``--run``.
2541 2543
2542 2544 During the upgrade, the repository will be locked and no writes will be
2543 2545 allowed.
2544 2546
2545 2547 At the end of the upgrade, the repository may not be readable while new
2546 2548 repository data is swapped in. This window will be as long as it takes to
2547 2549 rename some directories inside the ``.hg`` directory. On most machines, this
2548 2550 should complete almost instantaneously and the chances of a consumer being
2549 2551 unable to access the repository should be low.
2550 2552 """
2551 2553 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2552 2554
2553 2555 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2554 2556 inferrepo=True)
2555 2557 def debugwalk(ui, repo, *pats, **opts):
2556 2558 """show how files match on given patterns"""
2557 2559 opts = pycompat.byteskwargs(opts)
2558 2560 m = scmutil.match(repo[None], pats, opts)
2559 2561 if ui.verbose:
2560 2562 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2561 2563 items = list(repo[None].walk(m))
2562 2564 if not items:
2563 2565 return
2564 2566 f = lambda fn: fn
2565 2567 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2566 2568 f = lambda fn: util.normpath(fn)
2567 2569 fmt = 'f %%-%ds %%-%ds %%s' % (
2568 2570 max([len(abs) for abs in items]),
2569 2571 max([len(m.rel(abs)) for abs in items]))
2570 2572 for abs in items:
2571 2573 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2572 2574 ui.write("%s\n" % line.rstrip())
2573 2575
2574 2576 @command('debugwhyunstable', [], _('REV'))
2575 2577 def debugwhyunstable(ui, repo, rev):
2576 2578 """explain instabilities of a changeset"""
2577 2579 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2578 2580 dnodes = ''
2579 2581 if entry.get('divergentnodes'):
2580 2582 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2581 2583 for ctx in entry['divergentnodes']) + ' '
2582 2584 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2583 2585 entry['reason'], entry['node']))
2584 2586
2585 2587 @command('debugwireargs',
2586 2588 [('', 'three', '', 'three'),
2587 2589 ('', 'four', '', 'four'),
2588 2590 ('', 'five', '', 'five'),
2589 2591 ] + cmdutil.remoteopts,
2590 2592 _('REPO [OPTIONS]... [ONE [TWO]]'),
2591 2593 norepo=True)
2592 2594 def debugwireargs(ui, repopath, *vals, **opts):
2593 2595 opts = pycompat.byteskwargs(opts)
2594 2596 repo = hg.peer(ui, opts, repopath)
2595 2597 for opt in cmdutil.remoteopts:
2596 2598 del opts[opt[1]]
2597 2599 args = {}
2598 2600 for k, v in opts.iteritems():
2599 2601 if v:
2600 2602 args[k] = v
2601 2603 args = pycompat.strkwargs(args)
2602 2604 # run twice to check that we don't mess up the stream for the next command
2603 2605 res1 = repo.debugwireargs(*vals, **args)
2604 2606 res2 = repo.debugwireargs(*vals, **args)
2605 2607 ui.write("%s\n" % res1)
2606 2608 if res1 != res2:
2607 2609 ui.warn("%s\n" % res2)
2608 2610
2609 2611 def _parsewirelangblocks(fh):
2610 2612 activeaction = None
2611 2613 blocklines = []
2612 2614
2613 2615 for line in fh:
2614 2616 line = line.rstrip()
2615 2617 if not line:
2616 2618 continue
2617 2619
2618 2620 if line.startswith(b'#'):
2619 2621 continue
2620 2622
2621 2623 if not line.startswith(' '):
2622 2624 # New block. Flush previous one.
2623 2625 if activeaction:
2624 2626 yield activeaction, blocklines
2625 2627
2626 2628 activeaction = line
2627 2629 blocklines = []
2628 2630 continue
2629 2631
2630 2632 # Else we start with an indent.
2631 2633
2632 2634 if not activeaction:
2633 2635 raise error.Abort(_('indented line outside of block'))
2634 2636
2635 2637 blocklines.append(line)
2636 2638
2637 2639 # Flush last block.
2638 2640 if activeaction:
2639 2641 yield activeaction, blocklines
2640 2642
2641 2643 @command('debugwireproto',
2642 2644 [
2643 2645 ('', 'localssh', False, _('start an SSH server for this repo')),
2644 2646 ('', 'peer', '', _('construct a specific version of the peer')),
2645 2647 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2646 2648 ('', 'nologhandshake', False,
2647 2649 _('do not log I/O related to the peer handshake')),
2648 2650 ] + cmdutil.remoteopts,
2649 2651 _('[PATH]'),
2650 2652 optionalrepo=True)
2651 2653 def debugwireproto(ui, repo, path=None, **opts):
2652 2654 """send wire protocol commands to a server
2653 2655
2654 2656 This command can be used to issue wire protocol commands to remote
2655 2657 peers and to debug the raw data being exchanged.
2656 2658
2657 2659 ``--localssh`` will start an SSH server against the current repository
2658 2660 and connect to that. By default, the connection will perform a handshake
2659 2661 and establish an appropriate peer instance.
2660 2662
2661 2663 ``--peer`` can be used to bypass the handshake protocol and construct a
2662 2664 peer instance using the specified class type. Valid values are ``raw``,
2663 2665 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2664 2666 raw data payloads and don't support higher-level command actions.
2665 2667
2666 2668 ``--noreadstderr`` can be used to disable automatic reading from stderr
2667 2669 of the peer (for SSH connections only). Disabling automatic reading of
2668 2670 stderr is useful for making output more deterministic.
2669 2671
2670 2672 Commands are issued via a mini language which is specified via stdin.
2671 2673 The language consists of individual actions to perform. An action is
2672 2674 defined by a block. A block is defined as a line with no leading
2673 2675 space followed by 0 or more lines with leading space. Blocks are
2674 2676 effectively a high-level command with additional metadata.
2675 2677
2676 2678 Lines beginning with ``#`` are ignored.
2677 2679
2678 2680 The following sections denote available actions.
2679 2681
2680 2682 raw
2681 2683 ---
2682 2684
2683 2685 Send raw data to the server.
2684 2686
2685 2687 The block payload contains the raw data to send as one atomic send
2686 2688 operation. The data may not actually be delivered in a single system
2687 2689 call: it depends on the abilities of the transport being used.
2688 2690
2689 2691 Each line in the block is de-indented and concatenated. Then, that
2690 2692 value is evaluated as a Python b'' literal. This allows the use of
2691 2693 backslash escaping, etc.
2692 2694
2693 2695 raw+
2694 2696 ----
2695 2697
2696 2698 Behaves like ``raw`` except flushes output afterwards.
2697 2699
2698 2700 command <X>
2699 2701 -----------
2700 2702
2701 2703 Send a request to run a named command, whose name follows the ``command``
2702 2704 string.
2703 2705
2704 2706 Arguments to the command are defined as lines in this block. The format of
2705 2707 each line is ``<key> <value>``. e.g.::
2706 2708
2707 2709 command listkeys
2708 2710 namespace bookmarks
2709 2711
2710 2712 If the value begins with ``eval:``, it will be interpreted as a Python
2711 2713 literal expression. Otherwise values are interpreted as Python b'' literals.
2712 2714 This allows sending complex types and encoding special byte sequences via
2713 2715 backslash escaping.
2714 2716
2715 2717 The following arguments have special meaning:
2716 2718
2717 2719 ``PUSHFILE``
2718 2720 When defined, the *push* mechanism of the peer will be used instead
2719 2721 of the static request-response mechanism and the content of the
2720 2722 file specified in the value of this argument will be sent as the
2721 2723 command payload.
2722 2724
2723 2725 This can be used to submit a local bundle file to the remote.
2724 2726
2725 2727 batchbegin
2726 2728 ----------
2727 2729
2728 2730 Instruct the peer to begin a batched send.
2729 2731
2730 2732 All ``command`` blocks are queued for execution until the next
2731 2733 ``batchsubmit`` block.
2732 2734
2733 2735 batchsubmit
2734 2736 -----------
2735 2737
2736 2738 Submit previously queued ``command`` blocks as a batch request.
2737 2739
2738 2740 This action MUST be paired with a ``batchbegin`` action.
2739 2741
2740 2742 httprequest <method> <path>
2741 2743 ---------------------------
2742 2744
2743 2745 (HTTP peer only)
2744 2746
2745 2747 Send an HTTP request to the peer.
2746 2748
2747 2749 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2748 2750
2749 2751 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2750 2752 headers to add to the request. e.g. ``Accept: foo``.
2751 2753
2752 2754 The following arguments are special:
2753 2755
2754 2756 ``BODYFILE``
2755 2757 The content of the file defined as the value to this argument will be
2756 2758 transferred verbatim as the HTTP request body.
2757 2759
2758 2760 ``frame <type> <flags> <payload>``
2759 2761 Send a unified protocol frame as part of the request body.
2760 2762
2761 2763 All frames will be collected and sent as the body to the HTTP
2762 2764 request.
2763 2765
2764 2766 close
2765 2767 -----
2766 2768
2767 2769 Close the connection to the server.
2768 2770
2769 2771 flush
2770 2772 -----
2771 2773
2772 2774 Flush data written to the server.
2773 2775
2774 2776 readavailable
2775 2777 -------------
2776 2778
2777 2779 Close the write end of the connection and read all available data from
2778 2780 the server.
2779 2781
2780 2782 If the connection to the server encompasses multiple pipes, we poll both
2781 2783 pipes and read available data.
2782 2784
2783 2785 readline
2784 2786 --------
2785 2787
2786 2788 Read a line of output from the server. If there are multiple output
2787 2789 pipes, reads only the main pipe.
2788 2790
2789 2791 ereadline
2790 2792 ---------
2791 2793
2792 2794 Like ``readline``, but read from the stderr pipe, if available.
2793 2795
2794 2796 read <X>
2795 2797 --------
2796 2798
2797 2799 ``read()`` N bytes from the server's main output pipe.
2798 2800
2799 2801 eread <X>
2800 2802 ---------
2801 2803
2802 2804 ``read()`` N bytes from the server's stderr pipe, if available.
2803 2805
2804 2806 Specifying Unified Frame-Based Protocol Frames
2805 2807 ----------------------------------------------
2806 2808
2807 2809 It is possible to emit a *Unified Frame-Based Protocol* by using special
2808 2810 syntax.
2809 2811
2810 2812 A frame is composed as a type, flags, and payload. These can be parsed
2811 2813 from a string of the form:
2812 2814
2813 2815 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2814 2816
2815 2817 ``request-id`` and ``stream-id`` are integers defining the request and
2816 2818 stream identifiers.
2817 2819
2818 2820 ``type`` can be an integer value for the frame type or the string name
2819 2821 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2820 2822 ``command-name``.
2821 2823
2822 2824 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2823 2825 components. Each component (and there can be just one) can be an integer
2824 2826 or a flag name for stream flags or frame flags, respectively. Values are
2825 2827 resolved to integers and then bitwise OR'd together.
2826 2828
2827 2829 ``payload`` represents the raw frame payload. If it begins with
2828 2830 ``cbor:``, the following string is evaluated as Python code and the
2829 2831 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2830 2832 as a Python byte string literal.
2831 2833 """
2832 2834 opts = pycompat.byteskwargs(opts)
2833 2835
2834 2836 if opts['localssh'] and not repo:
2835 2837 raise error.Abort(_('--localssh requires a repository'))
2836 2838
2837 2839 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2838 2840 raise error.Abort(_('invalid value for --peer'),
2839 2841 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2840 2842
2841 2843 if path and opts['localssh']:
2842 2844 raise error.Abort(_('cannot specify --localssh with an explicit '
2843 2845 'path'))
2844 2846
2845 2847 if ui.interactive():
2846 2848 ui.write(_('(waiting for commands on stdin)\n'))
2847 2849
2848 2850 blocks = list(_parsewirelangblocks(ui.fin))
2849 2851
2850 2852 proc = None
2851 2853 stdin = None
2852 2854 stdout = None
2853 2855 stderr = None
2854 2856 opener = None
2855 2857
2856 2858 if opts['localssh']:
2857 2859 # We start the SSH server in its own process so there is process
2858 2860 # separation. This prevents a whole class of potential bugs around
2859 2861 # shared state from interfering with server operation.
2860 2862 args = procutil.hgcmd() + [
2861 2863 '-R', repo.root,
2862 2864 'debugserve', '--sshstdio',
2863 2865 ]
2864 2866 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2865 2867 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2866 2868 bufsize=0)
2867 2869
2868 2870 stdin = proc.stdin
2869 2871 stdout = proc.stdout
2870 2872 stderr = proc.stderr
2871 2873
2872 2874 # We turn the pipes into observers so we can log I/O.
2873 2875 if ui.verbose or opts['peer'] == 'raw':
2874 2876 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2875 2877 logdata=True)
2876 2878 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2877 2879 logdata=True)
2878 2880 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2879 2881 logdata=True)
2880 2882
2881 2883 # --localssh also implies the peer connection settings.
2882 2884
2883 2885 url = 'ssh://localserver'
2884 2886 autoreadstderr = not opts['noreadstderr']
2885 2887
2886 2888 if opts['peer'] == 'ssh1':
2887 2889 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2888 2890 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2889 2891 None, autoreadstderr=autoreadstderr)
2890 2892 elif opts['peer'] == 'ssh2':
2891 2893 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2892 2894 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2893 2895 None, autoreadstderr=autoreadstderr)
2894 2896 elif opts['peer'] == 'raw':
2895 2897 ui.write(_('using raw connection to peer\n'))
2896 2898 peer = None
2897 2899 else:
2898 2900 ui.write(_('creating ssh peer from handshake results\n'))
2899 2901 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2900 2902 autoreadstderr=autoreadstderr)
2901 2903
2902 2904 elif path:
2903 2905 # We bypass hg.peer() so we can proxy the sockets.
2904 2906 # TODO consider not doing this because we skip
2905 2907 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2906 2908 u = util.url(path)
2907 2909 if u.scheme != 'http':
2908 2910 raise error.Abort(_('only http:// paths are currently supported'))
2909 2911
2910 2912 url, authinfo = u.authinfo()
2911 2913 openerargs = {
2912 2914 r'useragent': b'Mercurial debugwireproto',
2913 2915 }
2914 2916
2915 2917 # Turn pipes/sockets into observers so we can log I/O.
2916 2918 if ui.verbose:
2917 2919 openerargs.update({
2918 2920 r'loggingfh': ui,
2919 2921 r'loggingname': b's',
2920 2922 r'loggingopts': {
2921 2923 r'logdata': True,
2922 2924 r'logdataapis': False,
2923 2925 },
2924 2926 })
2925 2927
2926 2928 if ui.debugflag:
2927 2929 openerargs[r'loggingopts'][r'logdataapis'] = True
2928 2930
2929 2931 # Don't send default headers when in raw mode. This allows us to
2930 2932 # bypass most of the behavior of our URL handling code so we can
2931 2933 # have near complete control over what's sent on the wire.
2932 2934 if opts['peer'] == 'raw':
2933 2935 openerargs[r'sendaccept'] = False
2934 2936
2935 2937 opener = urlmod.opener(ui, authinfo, **openerargs)
2936 2938
2937 2939 if opts['peer'] == 'http2':
2938 2940 ui.write(_('creating http peer for wire protocol version 2\n'))
2939 2941 # We go through makepeer() because we need an API descriptor for
2940 2942 # the peer instance to be useful.
2941 2943 with ui.configoverride({
2942 2944 ('experimental', 'httppeer.advertise-v2'): True}):
2943 2945 if opts['nologhandshake']:
2944 2946 ui.pushbuffer()
2945 2947
2946 2948 peer = httppeer.makepeer(ui, path, opener=opener)
2947 2949
2948 2950 if opts['nologhandshake']:
2949 2951 ui.popbuffer()
2950 2952
2951 2953 if not isinstance(peer, httppeer.httpv2peer):
2952 2954 raise error.Abort(_('could not instantiate HTTP peer for '
2953 2955 'wire protocol version 2'),
2954 2956 hint=_('the server may not have the feature '
2955 2957 'enabled or is not allowing this '
2956 2958 'client version'))
2957 2959
2958 2960 elif opts['peer'] == 'raw':
2959 2961 ui.write(_('using raw connection to peer\n'))
2960 2962 peer = None
2961 2963 elif opts['peer']:
2962 2964 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2963 2965 opts['peer'])
2964 2966 else:
2965 2967 peer = httppeer.makepeer(ui, path, opener=opener)
2966 2968
2967 2969 # We /could/ populate stdin/stdout with sock.makefile()...
2968 2970 else:
2969 2971 raise error.Abort(_('unsupported connection configuration'))
2970 2972
2971 2973 batchedcommands = None
2972 2974
2973 2975 # Now perform actions based on the parsed wire language instructions.
2974 2976 for action, lines in blocks:
2975 2977 if action in ('raw', 'raw+'):
2976 2978 if not stdin:
2977 2979 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2978 2980
2979 2981 # Concatenate the data together.
2980 2982 data = ''.join(l.lstrip() for l in lines)
2981 2983 data = stringutil.unescapestr(data)
2982 2984 stdin.write(data)
2983 2985
2984 2986 if action == 'raw+':
2985 2987 stdin.flush()
2986 2988 elif action == 'flush':
2987 2989 if not stdin:
2988 2990 raise error.Abort(_('cannot call flush on this peer'))
2989 2991 stdin.flush()
2990 2992 elif action.startswith('command'):
2991 2993 if not peer:
2992 2994 raise error.Abort(_('cannot send commands unless peer instance '
2993 2995 'is available'))
2994 2996
2995 2997 command = action.split(' ', 1)[1]
2996 2998
2997 2999 args = {}
2998 3000 for line in lines:
2999 3001 # We need to allow empty values.
3000 3002 fields = line.lstrip().split(' ', 1)
3001 3003 if len(fields) == 1:
3002 3004 key = fields[0]
3003 3005 value = ''
3004 3006 else:
3005 3007 key, value = fields
3006 3008
3007 3009 if value.startswith('eval:'):
3008 3010 value = stringutil.evalpythonliteral(value[5:])
3009 3011 else:
3010 3012 value = stringutil.unescapestr(value)
3011 3013
3012 3014 args[key] = value
3013 3015
3014 3016 if batchedcommands is not None:
3015 3017 batchedcommands.append((command, args))
3016 3018 continue
3017 3019
3018 3020 ui.status(_('sending %s command\n') % command)
3019 3021
3020 3022 if 'PUSHFILE' in args:
3021 3023 with open(args['PUSHFILE'], r'rb') as fh:
3022 3024 del args['PUSHFILE']
3023 3025 res, output = peer._callpush(command, fh,
3024 3026 **pycompat.strkwargs(args))
3025 3027 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3026 3028 ui.status(_('remote output: %s\n') %
3027 3029 stringutil.escapestr(output))
3028 3030 else:
3029 3031 with peer.commandexecutor() as e:
3030 3032 res = e.callcommand(command, args).result()
3031 3033
3032 3034 if isinstance(res, wireprotov2peer.commandresponse):
3033 3035 val = list(res.cborobjects())
3034 3036 ui.status(_('response: %s\n') %
3035 3037 stringutil.pprint(val, bprefix=True))
3036 3038
3037 3039 else:
3038 3040 ui.status(_('response: %s\n') %
3039 3041 stringutil.pprint(res, bprefix=True))
3040 3042
3041 3043 elif action == 'batchbegin':
3042 3044 if batchedcommands is not None:
3043 3045 raise error.Abort(_('nested batchbegin not allowed'))
3044 3046
3045 3047 batchedcommands = []
3046 3048 elif action == 'batchsubmit':
3047 3049 # There is a batching API we could go through. But it would be
3048 3050 # difficult to normalize requests into function calls. It is easier
3049 3051 # to bypass this layer and normalize to commands + args.
3050 3052 ui.status(_('sending batch with %d sub-commands\n') %
3051 3053 len(batchedcommands))
3052 3054 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3053 3055 ui.status(_('response #%d: %s\n') %
3054 3056 (i, stringutil.escapestr(chunk)))
3055 3057
3056 3058 batchedcommands = None
3057 3059
3058 3060 elif action.startswith('httprequest '):
3059 3061 if not opener:
3060 3062 raise error.Abort(_('cannot use httprequest without an HTTP '
3061 3063 'peer'))
3062 3064
3063 3065 request = action.split(' ', 2)
3064 3066 if len(request) != 3:
3065 3067 raise error.Abort(_('invalid httprequest: expected format is '
3066 3068 '"httprequest <method> <path>'))
3067 3069
3068 3070 method, httppath = request[1:]
3069 3071 headers = {}
3070 3072 body = None
3071 3073 frames = []
3072 3074 for line in lines:
3073 3075 line = line.lstrip()
3074 3076 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3075 3077 if m:
3076 3078 headers[m.group(1)] = m.group(2)
3077 3079 continue
3078 3080
3079 3081 if line.startswith(b'BODYFILE '):
3080 3082 with open(line.split(b' ', 1), 'rb') as fh:
3081 3083 body = fh.read()
3082 3084 elif line.startswith(b'frame '):
3083 3085 frame = wireprotoframing.makeframefromhumanstring(
3084 3086 line[len(b'frame '):])
3085 3087
3086 3088 frames.append(frame)
3087 3089 else:
3088 3090 raise error.Abort(_('unknown argument to httprequest: %s') %
3089 3091 line)
3090 3092
3091 3093 url = path + httppath
3092 3094
3093 3095 if frames:
3094 3096 body = b''.join(bytes(f) for f in frames)
3095 3097
3096 3098 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3097 3099
3098 3100 # urllib.Request insists on using has_data() as a proxy for
3099 3101 # determining the request method. Override that to use our
3100 3102 # explicitly requested method.
3101 3103 req.get_method = lambda: method
3102 3104
3103 3105 try:
3104 3106 res = opener.open(req)
3105 3107 body = res.read()
3106 3108 except util.urlerr.urlerror as e:
3107 3109 e.read()
3108 3110 continue
3109 3111
3110 3112 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3111 3113 ui.write(_('cbor> %s\n') %
3112 3114 stringutil.pprint(cbor.loads(body), bprefix=True))
3113 3115
3114 3116 elif action == 'close':
3115 3117 peer.close()
3116 3118 elif action == 'readavailable':
3117 3119 if not stdout or not stderr:
3118 3120 raise error.Abort(_('readavailable not available on this peer'))
3119 3121
3120 3122 stdin.close()
3121 3123 stdout.read()
3122 3124 stderr.read()
3123 3125
3124 3126 elif action == 'readline':
3125 3127 if not stdout:
3126 3128 raise error.Abort(_('readline not available on this peer'))
3127 3129 stdout.readline()
3128 3130 elif action == 'ereadline':
3129 3131 if not stderr:
3130 3132 raise error.Abort(_('ereadline not available on this peer'))
3131 3133 stderr.readline()
3132 3134 elif action.startswith('read '):
3133 3135 count = int(action.split(' ', 1)[1])
3134 3136 if not stdout:
3135 3137 raise error.Abort(_('read not available on this peer'))
3136 3138 stdout.read(count)
3137 3139 elif action.startswith('eread '):
3138 3140 count = int(action.split(' ', 1)[1])
3139 3141 if not stderr:
3140 3142 raise error.Abort(_('eread not available on this peer'))
3141 3143 stderr.read(count)
3142 3144 else:
3143 3145 raise error.Abort(_('unknown action: %s') % action)
3144 3146
3145 3147 if batchedcommands is not None:
3146 3148 raise error.Abort(_('unclosed "batchbegin" request'))
3147 3149
3148 3150 if peer:
3149 3151 peer.close()
3150 3152
3151 3153 if proc:
3152 3154 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now