##// END OF EJS Templates
debugsetparents: avoid using "r1/r2" variable names for nodeids...
Martin von Zweigbergk -
r37161:8bac14ce default
parent child Browse files
Show More
@@ -1,3067 +1,3067
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import tempfile
25 25 import time
26 26
27 27 from .i18n import _
28 28 from .node import (
29 29 bin,
30 30 hex,
31 31 nullhex,
32 32 nullid,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from . import (
37 37 bundle2,
38 38 changegroup,
39 39 cmdutil,
40 40 color,
41 41 context,
42 42 dagparser,
43 43 dagutil,
44 44 encoding,
45 45 error,
46 46 exchange,
47 47 extensions,
48 48 filemerge,
49 49 fileset,
50 50 formatter,
51 51 hg,
52 52 httppeer,
53 53 localrepo,
54 54 lock as lockmod,
55 55 logcmdutil,
56 56 merge as mergemod,
57 57 obsolete,
58 58 obsutil,
59 59 phases,
60 60 policy,
61 61 pvec,
62 62 pycompat,
63 63 registrar,
64 64 repair,
65 65 revlog,
66 66 revset,
67 67 revsetlang,
68 68 scmutil,
69 69 setdiscovery,
70 70 simplemerge,
71 71 smartset,
72 72 sshpeer,
73 73 sslutil,
74 74 streamclone,
75 75 templater,
76 76 treediscovery,
77 77 upgrade,
78 78 url as urlmod,
79 79 util,
80 80 vfs as vfsmod,
81 81 wireprotoframing,
82 82 wireprotoserver,
83 83 )
84 84 from .utils import (
85 85 dateutil,
86 86 procutil,
87 87 stringutil,
88 88 )
89 89
90 90 release = lockmod.release
91 91
92 92 command = registrar.command()
93 93
94 94 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
95 95 def debugancestor(ui, repo, *args):
96 96 """find the ancestor revision of two revisions in a given index"""
97 97 if len(args) == 3:
98 98 index, rev1, rev2 = args
99 99 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
100 100 lookup = r.lookup
101 101 elif len(args) == 2:
102 102 if not repo:
103 103 raise error.Abort(_('there is no Mercurial repository here '
104 104 '(.hg not found)'))
105 105 rev1, rev2 = args
106 106 r = repo.changelog
107 107 lookup = repo.lookup
108 108 else:
109 109 raise error.Abort(_('either two or three arguments required'))
110 110 a = r.ancestor(lookup(rev1), lookup(rev2))
111 111 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
112 112
113 113 @command('debugapplystreamclonebundle', [], 'FILE')
114 114 def debugapplystreamclonebundle(ui, repo, fname):
115 115 """apply a stream clone bundle file"""
116 116 f = hg.openpath(ui, fname)
117 117 gen = exchange.readbundle(ui, f, fname)
118 118 gen.apply(repo)
119 119
120 120 @command('debugbuilddag',
121 121 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
122 122 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
123 123 ('n', 'new-file', None, _('add new file at each rev'))],
124 124 _('[OPTION]... [TEXT]'))
125 125 def debugbuilddag(ui, repo, text=None,
126 126 mergeable_file=False,
127 127 overwritten_file=False,
128 128 new_file=False):
129 129 """builds a repo with a given DAG from scratch in the current empty repo
130 130
131 131 The description of the DAG is read from stdin if not given on the
132 132 command line.
133 133
134 134 Elements:
135 135
136 136 - "+n" is a linear run of n nodes based on the current default parent
137 137 - "." is a single node based on the current default parent
138 138 - "$" resets the default parent to null (implied at the start);
139 139 otherwise the default parent is always the last node created
140 140 - "<p" sets the default parent to the backref p
141 141 - "*p" is a fork at parent p, which is a backref
142 142 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
143 143 - "/p2" is a merge of the preceding node and p2
144 144 - ":tag" defines a local tag for the preceding node
145 145 - "@branch" sets the named branch for subsequent nodes
146 146 - "#...\\n" is a comment up to the end of the line
147 147
148 148 Whitespace between the above elements is ignored.
149 149
150 150 A backref is either
151 151
152 152 - a number n, which references the node curr-n, where curr is the current
153 153 node, or
154 154 - the name of a local tag you placed earlier using ":tag", or
155 155 - empty to denote the default parent.
156 156
157 157 All string valued-elements are either strictly alphanumeric, or must
158 158 be enclosed in double quotes ("..."), with "\\" as escape character.
159 159 """
160 160
161 161 if text is None:
162 162 ui.status(_("reading DAG from stdin\n"))
163 163 text = ui.fin.read()
164 164
165 165 cl = repo.changelog
166 166 if len(cl) > 0:
167 167 raise error.Abort(_('repository is not empty'))
168 168
169 169 # determine number of revs in DAG
170 170 total = 0
171 171 for type, data in dagparser.parsedag(text):
172 172 if type == 'n':
173 173 total += 1
174 174
175 175 if mergeable_file:
176 176 linesperrev = 2
177 177 # make a file with k lines per rev
178 178 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
179 179 initialmergedlines.append("")
180 180
181 181 tags = []
182 182
183 183 wlock = lock = tr = None
184 184 try:
185 185 wlock = repo.wlock()
186 186 lock = repo.lock()
187 187 tr = repo.transaction("builddag")
188 188
189 189 at = -1
190 190 atbranch = 'default'
191 191 nodeids = []
192 192 id = 0
193 193 ui.progress(_('building'), id, unit=_('revisions'), total=total)
194 194 for type, data in dagparser.parsedag(text):
195 195 if type == 'n':
196 196 ui.note(('node %s\n' % pycompat.bytestr(data)))
197 197 id, ps = data
198 198
199 199 files = []
200 200 filecontent = {}
201 201
202 202 p2 = None
203 203 if mergeable_file:
204 204 fn = "mf"
205 205 p1 = repo[ps[0]]
206 206 if len(ps) > 1:
207 207 p2 = repo[ps[1]]
208 208 pa = p1.ancestor(p2)
209 209 base, local, other = [x[fn].data() for x in (pa, p1,
210 210 p2)]
211 211 m3 = simplemerge.Merge3Text(base, local, other)
212 212 ml = [l.strip() for l in m3.merge_lines()]
213 213 ml.append("")
214 214 elif at > 0:
215 215 ml = p1[fn].data().split("\n")
216 216 else:
217 217 ml = initialmergedlines
218 218 ml[id * linesperrev] += " r%i" % id
219 219 mergedtext = "\n".join(ml)
220 220 files.append(fn)
221 221 filecontent[fn] = mergedtext
222 222
223 223 if overwritten_file:
224 224 fn = "of"
225 225 files.append(fn)
226 226 filecontent[fn] = "r%i\n" % id
227 227
228 228 if new_file:
229 229 fn = "nf%i" % id
230 230 files.append(fn)
231 231 filecontent[fn] = "r%i\n" % id
232 232 if len(ps) > 1:
233 233 if not p2:
234 234 p2 = repo[ps[1]]
235 235 for fn in p2:
236 236 if fn.startswith("nf"):
237 237 files.append(fn)
238 238 filecontent[fn] = p2[fn].data()
239 239
240 240 def fctxfn(repo, cx, path):
241 241 if path in filecontent:
242 242 return context.memfilectx(repo, cx, path,
243 243 filecontent[path])
244 244 return None
245 245
246 246 if len(ps) == 0 or ps[0] < 0:
247 247 pars = [None, None]
248 248 elif len(ps) == 1:
249 249 pars = [nodeids[ps[0]], None]
250 250 else:
251 251 pars = [nodeids[p] for p in ps]
252 252 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
253 253 date=(id, 0),
254 254 user="debugbuilddag",
255 255 extra={'branch': atbranch})
256 256 nodeid = repo.commitctx(cx)
257 257 nodeids.append(nodeid)
258 258 at = id
259 259 elif type == 'l':
260 260 id, name = data
261 261 ui.note(('tag %s\n' % name))
262 262 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
263 263 elif type == 'a':
264 264 ui.note(('branch %s\n' % data))
265 265 atbranch = data
266 266 ui.progress(_('building'), id, unit=_('revisions'), total=total)
267 267 tr.close()
268 268
269 269 if tags:
270 270 repo.vfs.write("localtags", "".join(tags))
271 271 finally:
272 272 ui.progress(_('building'), None)
273 273 release(tr, lock, wlock)
274 274
275 275 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
276 276 indent_string = ' ' * indent
277 277 if all:
278 278 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
279 279 % indent_string)
280 280
281 281 def showchunks(named):
282 282 ui.write("\n%s%s\n" % (indent_string, named))
283 283 for deltadata in gen.deltaiter():
284 284 node, p1, p2, cs, deltabase, delta, flags = deltadata
285 285 ui.write("%s%s %s %s %s %s %d\n" %
286 286 (indent_string, hex(node), hex(p1), hex(p2),
287 287 hex(cs), hex(deltabase), len(delta)))
288 288
289 289 chunkdata = gen.changelogheader()
290 290 showchunks("changelog")
291 291 chunkdata = gen.manifestheader()
292 292 showchunks("manifest")
293 293 for chunkdata in iter(gen.filelogheader, {}):
294 294 fname = chunkdata['filename']
295 295 showchunks(fname)
296 296 else:
297 297 if isinstance(gen, bundle2.unbundle20):
298 298 raise error.Abort(_('use debugbundle2 for this file'))
299 299 chunkdata = gen.changelogheader()
300 300 for deltadata in gen.deltaiter():
301 301 node, p1, p2, cs, deltabase, delta, flags = deltadata
302 302 ui.write("%s%s\n" % (indent_string, hex(node)))
303 303
304 304 def _debugobsmarkers(ui, part, indent=0, **opts):
305 305 """display version and markers contained in 'data'"""
306 306 opts = pycompat.byteskwargs(opts)
307 307 data = part.read()
308 308 indent_string = ' ' * indent
309 309 try:
310 310 version, markers = obsolete._readmarkers(data)
311 311 except error.UnknownVersion as exc:
312 312 msg = "%sunsupported version: %s (%d bytes)\n"
313 313 msg %= indent_string, exc.version, len(data)
314 314 ui.write(msg)
315 315 else:
316 316 msg = "%sversion: %d (%d bytes)\n"
317 317 msg %= indent_string, version, len(data)
318 318 ui.write(msg)
319 319 fm = ui.formatter('debugobsolete', opts)
320 320 for rawmarker in sorted(markers):
321 321 m = obsutil.marker(None, rawmarker)
322 322 fm.startitem()
323 323 fm.plain(indent_string)
324 324 cmdutil.showmarker(fm, m)
325 325 fm.end()
326 326
327 327 def _debugphaseheads(ui, data, indent=0):
328 328 """display version and markers contained in 'data'"""
329 329 indent_string = ' ' * indent
330 330 headsbyphase = phases.binarydecode(data)
331 331 for phase in phases.allphases:
332 332 for head in headsbyphase[phase]:
333 333 ui.write(indent_string)
334 334 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
335 335
336 336 def _quasirepr(thing):
337 337 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
338 338 return '{%s}' % (
339 339 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
340 340 return pycompat.bytestr(repr(thing))
341 341
342 342 def _debugbundle2(ui, gen, all=None, **opts):
343 343 """lists the contents of a bundle2"""
344 344 if not isinstance(gen, bundle2.unbundle20):
345 345 raise error.Abort(_('not a bundle2 file'))
346 346 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
347 347 parttypes = opts.get(r'part_type', [])
348 348 for part in gen.iterparts():
349 349 if parttypes and part.type not in parttypes:
350 350 continue
351 351 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
352 352 if part.type == 'changegroup':
353 353 version = part.params.get('version', '01')
354 354 cg = changegroup.getunbundler(version, part, 'UN')
355 355 if not ui.quiet:
356 356 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
357 357 if part.type == 'obsmarkers':
358 358 if not ui.quiet:
359 359 _debugobsmarkers(ui, part, indent=4, **opts)
360 360 if part.type == 'phase-heads':
361 361 if not ui.quiet:
362 362 _debugphaseheads(ui, part, indent=4)
363 363
364 364 @command('debugbundle',
365 365 [('a', 'all', None, _('show all details')),
366 366 ('', 'part-type', [], _('show only the named part type')),
367 367 ('', 'spec', None, _('print the bundlespec of the bundle'))],
368 368 _('FILE'),
369 369 norepo=True)
370 370 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
371 371 """lists the contents of a bundle"""
372 372 with hg.openpath(ui, bundlepath) as f:
373 373 if spec:
374 374 spec = exchange.getbundlespec(ui, f)
375 375 ui.write('%s\n' % spec)
376 376 return
377 377
378 378 gen = exchange.readbundle(ui, f, bundlepath)
379 379 if isinstance(gen, bundle2.unbundle20):
380 380 return _debugbundle2(ui, gen, all=all, **opts)
381 381 _debugchangegroup(ui, gen, all=all, **opts)
382 382
383 383 @command('debugcapabilities',
384 384 [], _('PATH'),
385 385 norepo=True)
386 386 def debugcapabilities(ui, path, **opts):
387 387 """lists the capabilities of a remote peer"""
388 388 opts = pycompat.byteskwargs(opts)
389 389 peer = hg.peer(ui, opts, path)
390 390 caps = peer.capabilities()
391 391 ui.write(('Main capabilities:\n'))
392 392 for c in sorted(caps):
393 393 ui.write((' %s\n') % c)
394 394 b2caps = bundle2.bundle2caps(peer)
395 395 if b2caps:
396 396 ui.write(('Bundle2 capabilities:\n'))
397 397 for key, values in sorted(b2caps.iteritems()):
398 398 ui.write((' %s\n') % key)
399 399 for v in values:
400 400 ui.write((' %s\n') % v)
401 401
402 402 @command('debugcheckstate', [], '')
403 403 def debugcheckstate(ui, repo):
404 404 """validate the correctness of the current dirstate"""
405 405 parent1, parent2 = repo.dirstate.parents()
406 406 m1 = repo[parent1].manifest()
407 407 m2 = repo[parent2].manifest()
408 408 errors = 0
409 409 for f in repo.dirstate:
410 410 state = repo.dirstate[f]
411 411 if state in "nr" and f not in m1:
412 412 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
413 413 errors += 1
414 414 if state in "a" and f in m1:
415 415 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
416 416 errors += 1
417 417 if state in "m" and f not in m1 and f not in m2:
418 418 ui.warn(_("%s in state %s, but not in either manifest\n") %
419 419 (f, state))
420 420 errors += 1
421 421 for f in m1:
422 422 state = repo.dirstate[f]
423 423 if state not in "nrm":
424 424 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
425 425 errors += 1
426 426 if errors:
427 427 error = _(".hg/dirstate inconsistent with current parent's manifest")
428 428 raise error.Abort(error)
429 429
430 430 @command('debugcolor',
431 431 [('', 'style', None, _('show all configured styles'))],
432 432 'hg debugcolor')
433 433 def debugcolor(ui, repo, **opts):
434 434 """show available color, effects or style"""
435 435 ui.write(('color mode: %s\n') % ui._colormode)
436 436 if opts.get(r'style'):
437 437 return _debugdisplaystyle(ui)
438 438 else:
439 439 return _debugdisplaycolor(ui)
440 440
441 441 def _debugdisplaycolor(ui):
442 442 ui = ui.copy()
443 443 ui._styles.clear()
444 444 for effect in color._activeeffects(ui).keys():
445 445 ui._styles[effect] = effect
446 446 if ui._terminfoparams:
447 447 for k, v in ui.configitems('color'):
448 448 if k.startswith('color.'):
449 449 ui._styles[k] = k[6:]
450 450 elif k.startswith('terminfo.'):
451 451 ui._styles[k] = k[9:]
452 452 ui.write(_('available colors:\n'))
453 453 # sort label with a '_' after the other to group '_background' entry.
454 454 items = sorted(ui._styles.items(),
455 455 key=lambda i: ('_' in i[0], i[0], i[1]))
456 456 for colorname, label in items:
457 457 ui.write(('%s\n') % colorname, label=label)
458 458
459 459 def _debugdisplaystyle(ui):
460 460 ui.write(_('available style:\n'))
461 461 width = max(len(s) for s in ui._styles)
462 462 for label, effects in sorted(ui._styles.items()):
463 463 ui.write('%s' % label, label=label)
464 464 if effects:
465 465 # 50
466 466 ui.write(': ')
467 467 ui.write(' ' * (max(0, width - len(label))))
468 468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 469 ui.write('\n')
470 470
471 471 @command('debugcreatestreamclonebundle', [], 'FILE')
472 472 def debugcreatestreamclonebundle(ui, repo, fname):
473 473 """create a stream clone bundle file
474 474
475 475 Stream bundles are special bundles that are essentially archives of
476 476 revlog files. They are commonly used for cloning very quickly.
477 477 """
478 478 # TODO we may want to turn this into an abort when this functionality
479 479 # is moved into `hg bundle`.
480 480 if phases.hassecret(repo):
481 481 ui.warn(_('(warning: stream clone bundle will contain secret '
482 482 'revisions)\n'))
483 483
484 484 requirements, gen = streamclone.generatebundlev1(repo)
485 485 changegroup.writechunks(ui, gen, fname)
486 486
487 487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488 488
489 489 @command('debugdag',
490 490 [('t', 'tags', None, _('use tags as labels')),
491 491 ('b', 'branches', None, _('annotate with branch names')),
492 492 ('', 'dots', None, _('use dots for runs')),
493 493 ('s', 'spaces', None, _('separate elements by spaces'))],
494 494 _('[OPTION]... [FILE [REV]...]'),
495 495 optionalrepo=True)
496 496 def debugdag(ui, repo, file_=None, *revs, **opts):
497 497 """format the changelog or an index DAG as a concise textual description
498 498
499 499 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 500 revision numbers, they get labeled in the output as rN.
501 501
502 502 Otherwise, the changelog DAG of the current repo is emitted.
503 503 """
504 504 spaces = opts.get(r'spaces')
505 505 dots = opts.get(r'dots')
506 506 if file_:
507 507 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
508 508 file_)
509 509 revs = set((int(r) for r in revs))
510 510 def events():
511 511 for r in rlog:
512 512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 513 if p != -1))
514 514 if r in revs:
515 515 yield 'l', (r, "r%i" % r)
516 516 elif repo:
517 517 cl = repo.changelog
518 518 tags = opts.get(r'tags')
519 519 branches = opts.get(r'branches')
520 520 if tags:
521 521 labels = {}
522 522 for l, n in repo.tags().items():
523 523 labels.setdefault(cl.rev(n), []).append(l)
524 524 def events():
525 525 b = "default"
526 526 for r in cl:
527 527 if branches:
528 528 newb = cl.read(cl.node(r))[5]['branch']
529 529 if newb != b:
530 530 yield 'a', newb
531 531 b = newb
532 532 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 533 if p != -1))
534 534 if tags:
535 535 ls = labels.get(r)
536 536 if ls:
537 537 for l in ls:
538 538 yield 'l', (r, l)
539 539 else:
540 540 raise error.Abort(_('need repo for changelog dag'))
541 541
542 542 for line in dagparser.dagtextlines(events(),
543 543 addspaces=spaces,
544 544 wraplabels=True,
545 545 wrapannotations=True,
546 546 wrapnonlinear=dots,
547 547 usedots=dots,
548 548 maxlinewidth=70):
549 549 ui.write(line)
550 550 ui.write("\n")
551 551
552 552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 553 def debugdata(ui, repo, file_, rev=None, **opts):
554 554 """dump the contents of a data file revision"""
555 555 opts = pycompat.byteskwargs(opts)
556 556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 557 if rev is not None:
558 558 raise error.CommandError('debugdata', _('invalid arguments'))
559 559 file_, rev = None, file_
560 560 elif rev is None:
561 561 raise error.CommandError('debugdata', _('invalid arguments'))
562 562 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
563 563 try:
564 564 ui.write(r.revision(r.lookup(rev), raw=True))
565 565 except KeyError:
566 566 raise error.Abort(_('invalid revision identifier %s') % rev)
567 567
568 568 @command('debugdate',
569 569 [('e', 'extended', None, _('try extended date formats'))],
570 570 _('[-e] DATE [RANGE]'),
571 571 norepo=True, optionalrepo=True)
572 572 def debugdate(ui, date, range=None, **opts):
573 573 """parse and display a date"""
574 574 if opts[r"extended"]:
575 575 d = dateutil.parsedate(date, util.extendeddateformats)
576 576 else:
577 577 d = dateutil.parsedate(date)
578 578 ui.write(("internal: %d %d\n") % d)
579 579 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 580 if range:
581 581 m = dateutil.matchdate(range)
582 582 ui.write(("match: %s\n") % m(d[0]))
583 583
584 584 @command('debugdeltachain',
585 585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 586 _('-c|-m|FILE'),
587 587 optionalrepo=True)
588 588 def debugdeltachain(ui, repo, file_=None, **opts):
589 589 """dump information about delta chains in a revlog
590 590
591 591 Output can be templatized. Available template keywords are:
592 592
593 593 :``rev``: revision number
594 594 :``chainid``: delta chain identifier (numbered by unique base)
595 595 :``chainlen``: delta chain length to this revision
596 596 :``prevrev``: previous revision in delta chain
597 597 :``deltatype``: role of delta / how it was computed
598 598 :``compsize``: compressed size of revision
599 599 :``uncompsize``: uncompressed size of revision
600 600 :``chainsize``: total size of compressed revisions in chain
601 601 :``chainratio``: total chain size divided by uncompressed revision size
602 602 (new delta chains typically start at ratio 2.00)
603 603 :``lindist``: linear distance from base revision in delta chain to end
604 604 of this revision
605 605 :``extradist``: total size of revisions not part of this delta chain from
606 606 base of delta chain to end of this revision; a measurement
607 607 of how much extra data we need to read/seek across to read
608 608 the delta chain for this revision
609 609 :``extraratio``: extradist divided by chainsize; another representation of
610 610 how much unrelated data is needed to load this delta chain
611 611
612 612 If the repository is configured to use the sparse read, additional keywords
613 613 are available:
614 614
615 615 :``readsize``: total size of data read from the disk for a revision
616 616 (sum of the sizes of all the blocks)
617 617 :``largestblock``: size of the largest block of data read from the disk
618 618 :``readdensity``: density of useful bytes in the data read from the disk
619 619 :``srchunks``: in how many data hunks the whole revision would be read
620 620
621 621 The sparse read can be enabled with experimental.sparse-read = True
622 622 """
623 623 opts = pycompat.byteskwargs(opts)
624 624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 625 index = r.index
626 626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
627 627 withsparseread = getattr(r, '_withsparseread', False)
628 628
629 629 def revinfo(rev):
630 630 e = index[rev]
631 631 compsize = e[1]
632 632 uncompsize = e[2]
633 633 chainsize = 0
634 634
635 635 if generaldelta:
636 636 if e[3] == e[5]:
637 637 deltatype = 'p1'
638 638 elif e[3] == e[6]:
639 639 deltatype = 'p2'
640 640 elif e[3] == rev - 1:
641 641 deltatype = 'prev'
642 642 elif e[3] == rev:
643 643 deltatype = 'base'
644 644 else:
645 645 deltatype = 'other'
646 646 else:
647 647 if e[3] == rev:
648 648 deltatype = 'base'
649 649 else:
650 650 deltatype = 'prev'
651 651
652 652 chain = r._deltachain(rev)[0]
653 653 for iterrev in chain:
654 654 e = index[iterrev]
655 655 chainsize += e[1]
656 656
657 657 return compsize, uncompsize, deltatype, chain, chainsize
658 658
659 659 fm = ui.formatter('debugdeltachain', opts)
660 660
661 661 fm.plain(' rev chain# chainlen prev delta '
662 662 'size rawsize chainsize ratio lindist extradist '
663 663 'extraratio')
664 664 if withsparseread:
665 665 fm.plain(' readsize largestblk rddensity srchunks')
666 666 fm.plain('\n')
667 667
668 668 chainbases = {}
669 669 for rev in r:
670 670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
671 671 chainbase = chain[0]
672 672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
673 673 start = r.start
674 674 length = r.length
675 675 basestart = start(chainbase)
676 676 revstart = start(rev)
677 677 lineardist = revstart + comp - basestart
678 678 extradist = lineardist - chainsize
679 679 try:
680 680 prevrev = chain[-2]
681 681 except IndexError:
682 682 prevrev = -1
683 683
684 684 chainratio = float(chainsize) / float(uncomp)
685 685 extraratio = float(extradist) / float(chainsize)
686 686
687 687 fm.startitem()
688 688 fm.write('rev chainid chainlen prevrev deltatype compsize '
689 689 'uncompsize chainsize chainratio lindist extradist '
690 690 'extraratio',
691 691 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
692 692 rev, chainid, len(chain), prevrev, deltatype, comp,
693 693 uncomp, chainsize, chainratio, lineardist, extradist,
694 694 extraratio,
695 695 rev=rev, chainid=chainid, chainlen=len(chain),
696 696 prevrev=prevrev, deltatype=deltatype, compsize=comp,
697 697 uncompsize=uncomp, chainsize=chainsize,
698 698 chainratio=chainratio, lindist=lineardist,
699 699 extradist=extradist, extraratio=extraratio)
700 700 if withsparseread:
701 701 readsize = 0
702 702 largestblock = 0
703 703 srchunks = 0
704 704
705 705 for revschunk in revlog._slicechunk(r, chain):
706 706 srchunks += 1
707 707 blkend = start(revschunk[-1]) + length(revschunk[-1])
708 708 blksize = blkend - start(revschunk[0])
709 709
710 710 readsize += blksize
711 711 if largestblock < blksize:
712 712 largestblock = blksize
713 713
714 714 readdensity = float(chainsize) / float(readsize)
715 715
716 716 fm.write('readsize largestblock readdensity srchunks',
717 717 ' %10d %10d %9.5f %8d',
718 718 readsize, largestblock, readdensity, srchunks,
719 719 readsize=readsize, largestblock=largestblock,
720 720 readdensity=readdensity, srchunks=srchunks)
721 721
722 722 fm.plain('\n')
723 723
724 724 fm.end()
725 725
726 726 @command('debugdirstate|debugstate',
727 727 [('', 'nodates', None, _('do not display the saved mtime')),
728 728 ('', 'datesort', None, _('sort by saved mtime'))],
729 729 _('[OPTION]...'))
730 730 def debugstate(ui, repo, **opts):
731 731 """show the contents of the current dirstate"""
732 732
733 733 nodates = opts.get(r'nodates')
734 734 datesort = opts.get(r'datesort')
735 735
736 736 timestr = ""
737 737 if datesort:
738 738 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
739 739 else:
740 740 keyfunc = None # sort by filename
741 741 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
742 742 if ent[3] == -1:
743 743 timestr = 'unset '
744 744 elif nodates:
745 745 timestr = 'set '
746 746 else:
747 747 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
748 748 time.localtime(ent[3]))
749 749 timestr = encoding.strtolocal(timestr)
750 750 if ent[1] & 0o20000:
751 751 mode = 'lnk'
752 752 else:
753 753 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
754 754 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
755 755 for f in repo.dirstate.copies():
756 756 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
757 757
758 758 @command('debugdiscovery',
759 759 [('', 'old', None, _('use old-style discovery')),
760 760 ('', 'nonheads', None,
761 761 _('use old-style discovery with non-heads included')),
762 762 ('', 'rev', [], 'restrict discovery to this set of revs'),
763 763 ] + cmdutil.remoteopts,
764 764 _('[--rev REV] [OTHER]'))
765 765 def debugdiscovery(ui, repo, remoteurl="default", **opts):
766 766 """runs the changeset discovery protocol in isolation"""
767 767 opts = pycompat.byteskwargs(opts)
768 768 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
769 769 remote = hg.peer(repo, opts, remoteurl)
770 770 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
771 771
772 772 # make sure tests are repeatable
773 773 random.seed(12323)
774 774
775 775 def doit(pushedrevs, remoteheads, remote=remote):
776 776 if opts.get('old'):
777 777 if not util.safehasattr(remote, 'branches'):
778 778 # enable in-client legacy support
779 779 remote = localrepo.locallegacypeer(remote.local())
780 780 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
781 781 force=True)
782 782 common = set(common)
783 783 if not opts.get('nonheads'):
784 784 ui.write(("unpruned common: %s\n") %
785 785 " ".join(sorted(short(n) for n in common)))
786 786 dag = dagutil.revlogdag(repo.changelog)
787 787 all = dag.ancestorset(dag.internalizeall(common))
788 788 common = dag.externalizeall(dag.headsetofconnecteds(all))
789 789 else:
790 790 nodes = None
791 791 if pushedrevs:
792 792 revs = scmutil.revrange(repo, pushedrevs)
793 793 nodes = [repo[r].node() for r in revs]
794 794 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
795 795 ancestorsof=nodes)
796 796 common = set(common)
797 797 rheads = set(hds)
798 798 lheads = set(repo.heads())
799 799 ui.write(("common heads: %s\n") %
800 800 " ".join(sorted(short(n) for n in common)))
801 801 if lheads <= common:
802 802 ui.write(("local is subset\n"))
803 803 elif rheads <= common:
804 804 ui.write(("remote is subset\n"))
805 805
806 806 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
807 807 localrevs = opts['rev']
808 808 doit(localrevs, remoterevs)
809 809
810 810 _chunksize = 4 << 10
811 811
812 812 @command('debugdownload',
813 813 [
814 814 ('o', 'output', '', _('path')),
815 815 ],
816 816 optionalrepo=True)
817 817 def debugdownload(ui, repo, url, output=None, **opts):
818 818 """download a resource using Mercurial logic and config
819 819 """
820 820 fh = urlmod.open(ui, url, output)
821 821
822 822 dest = ui
823 823 if output:
824 824 dest = open(output, "wb", _chunksize)
825 825 try:
826 826 data = fh.read(_chunksize)
827 827 while data:
828 828 dest.write(data)
829 829 data = fh.read(_chunksize)
830 830 finally:
831 831 if output:
832 832 dest.close()
833 833
834 834 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
835 835 def debugextensions(ui, **opts):
836 836 '''show information about active extensions'''
837 837 opts = pycompat.byteskwargs(opts)
838 838 exts = extensions.extensions(ui)
839 839 hgver = util.version()
840 840 fm = ui.formatter('debugextensions', opts)
841 841 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
842 842 isinternal = extensions.ismoduleinternal(extmod)
843 843 extsource = pycompat.fsencode(extmod.__file__)
844 844 if isinternal:
845 845 exttestedwith = [] # never expose magic string to users
846 846 else:
847 847 exttestedwith = getattr(extmod, 'testedwith', '').split()
848 848 extbuglink = getattr(extmod, 'buglink', None)
849 849
850 850 fm.startitem()
851 851
852 852 if ui.quiet or ui.verbose:
853 853 fm.write('name', '%s\n', extname)
854 854 else:
855 855 fm.write('name', '%s', extname)
856 856 if isinternal or hgver in exttestedwith:
857 857 fm.plain('\n')
858 858 elif not exttestedwith:
859 859 fm.plain(_(' (untested!)\n'))
860 860 else:
861 861 lasttestedversion = exttestedwith[-1]
862 862 fm.plain(' (%s!)\n' % lasttestedversion)
863 863
864 864 fm.condwrite(ui.verbose and extsource, 'source',
865 865 _(' location: %s\n'), extsource or "")
866 866
867 867 if ui.verbose:
868 868 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
869 869 fm.data(bundled=isinternal)
870 870
871 871 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
872 872 _(' tested with: %s\n'),
873 873 fm.formatlist(exttestedwith, name='ver'))
874 874
875 875 fm.condwrite(ui.verbose and extbuglink, 'buglink',
876 876 _(' bug reporting: %s\n'), extbuglink or "")
877 877
878 878 fm.end()
879 879
880 880 @command('debugfileset',
881 881 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
882 882 _('[-r REV] FILESPEC'))
883 883 def debugfileset(ui, repo, expr, **opts):
884 884 '''parse and apply a fileset specification'''
885 885 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
886 886 if ui.verbose:
887 887 tree = fileset.parse(expr)
888 888 ui.note(fileset.prettyformat(tree), "\n")
889 889
890 890 for f in ctx.getfileset(expr):
891 891 ui.write("%s\n" % f)
892 892
893 893 @command('debugformat',
894 894 [] + cmdutil.formatteropts,
895 895 _(''))
896 896 def debugformat(ui, repo, **opts):
897 897 """display format information about the current repository
898 898
899 899 Use --verbose to get extra information about current config value and
900 900 Mercurial default."""
901 901 opts = pycompat.byteskwargs(opts)
902 902 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
903 903 maxvariantlength = max(len('format-variant'), maxvariantlength)
904 904
905 905 def makeformatname(name):
906 906 return '%s:' + (' ' * (maxvariantlength - len(name)))
907 907
908 908 fm = ui.formatter('debugformat', opts)
909 909 if fm.isplain():
910 910 def formatvalue(value):
911 911 if util.safehasattr(value, 'startswith'):
912 912 return value
913 913 if value:
914 914 return 'yes'
915 915 else:
916 916 return 'no'
917 917 else:
918 918 formatvalue = pycompat.identity
919 919
920 920 fm.plain('format-variant')
921 921 fm.plain(' ' * (maxvariantlength - len('format-variant')))
922 922 fm.plain(' repo')
923 923 if ui.verbose:
924 924 fm.plain(' config default')
925 925 fm.plain('\n')
926 926 for fv in upgrade.allformatvariant:
927 927 fm.startitem()
928 928 repovalue = fv.fromrepo(repo)
929 929 configvalue = fv.fromconfig(repo)
930 930
931 931 if repovalue != configvalue:
932 932 namelabel = 'formatvariant.name.mismatchconfig'
933 933 repolabel = 'formatvariant.repo.mismatchconfig'
934 934 elif repovalue != fv.default:
935 935 namelabel = 'formatvariant.name.mismatchdefault'
936 936 repolabel = 'formatvariant.repo.mismatchdefault'
937 937 else:
938 938 namelabel = 'formatvariant.name.uptodate'
939 939 repolabel = 'formatvariant.repo.uptodate'
940 940
941 941 fm.write('name', makeformatname(fv.name), fv.name,
942 942 label=namelabel)
943 943 fm.write('repo', ' %3s', formatvalue(repovalue),
944 944 label=repolabel)
945 945 if fv.default != configvalue:
946 946 configlabel = 'formatvariant.config.special'
947 947 else:
948 948 configlabel = 'formatvariant.config.default'
949 949 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
950 950 label=configlabel)
951 951 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
952 952 label='formatvariant.default')
953 953 fm.plain('\n')
954 954 fm.end()
955 955
956 956 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
957 957 def debugfsinfo(ui, path="."):
958 958 """show information detected about current filesystem"""
959 959 ui.write(('path: %s\n') % path)
960 960 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
961 961 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
962 962 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
963 963 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
964 964 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
965 965 casesensitive = '(unknown)'
966 966 try:
967 967 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
968 968 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
969 969 except OSError:
970 970 pass
971 971 ui.write(('case-sensitive: %s\n') % casesensitive)
972 972
973 973 @command('debuggetbundle',
974 974 [('H', 'head', [], _('id of head node'), _('ID')),
975 975 ('C', 'common', [], _('id of common node'), _('ID')),
976 976 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
977 977 _('REPO FILE [-H|-C ID]...'),
978 978 norepo=True)
979 979 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
980 980 """retrieves a bundle from a repo
981 981
982 982 Every ID must be a full-length hex node id string. Saves the bundle to the
983 983 given file.
984 984 """
985 985 opts = pycompat.byteskwargs(opts)
986 986 repo = hg.peer(ui, opts, repopath)
987 987 if not repo.capable('getbundle'):
988 988 raise error.Abort("getbundle() not supported by target repository")
989 989 args = {}
990 990 if common:
991 991 args[r'common'] = [bin(s) for s in common]
992 992 if head:
993 993 args[r'heads'] = [bin(s) for s in head]
994 994 # TODO: get desired bundlecaps from command line.
995 995 args[r'bundlecaps'] = None
996 996 bundle = repo.getbundle('debug', **args)
997 997
998 998 bundletype = opts.get('type', 'bzip2').lower()
999 999 btypes = {'none': 'HG10UN',
1000 1000 'bzip2': 'HG10BZ',
1001 1001 'gzip': 'HG10GZ',
1002 1002 'bundle2': 'HG20'}
1003 1003 bundletype = btypes.get(bundletype)
1004 1004 if bundletype not in bundle2.bundletypes:
1005 1005 raise error.Abort(_('unknown bundle type specified with --type'))
1006 1006 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1007 1007
1008 1008 @command('debugignore', [], '[FILE]')
1009 1009 def debugignore(ui, repo, *files, **opts):
1010 1010 """display the combined ignore pattern and information about ignored files
1011 1011
1012 1012 With no argument display the combined ignore pattern.
1013 1013
1014 1014 Given space separated file names, shows if the given file is ignored and
1015 1015 if so, show the ignore rule (file and line number) that matched it.
1016 1016 """
1017 1017 ignore = repo.dirstate._ignore
1018 1018 if not files:
1019 1019 # Show all the patterns
1020 1020 ui.write("%s\n" % pycompat.byterepr(ignore))
1021 1021 else:
1022 1022 m = scmutil.match(repo[None], pats=files)
1023 1023 for f in m.files():
1024 1024 nf = util.normpath(f)
1025 1025 ignored = None
1026 1026 ignoredata = None
1027 1027 if nf != '.':
1028 1028 if ignore(nf):
1029 1029 ignored = nf
1030 1030 ignoredata = repo.dirstate._ignorefileandline(nf)
1031 1031 else:
1032 1032 for p in util.finddirs(nf):
1033 1033 if ignore(p):
1034 1034 ignored = p
1035 1035 ignoredata = repo.dirstate._ignorefileandline(p)
1036 1036 break
1037 1037 if ignored:
1038 1038 if ignored == nf:
1039 1039 ui.write(_("%s is ignored\n") % m.uipath(f))
1040 1040 else:
1041 1041 ui.write(_("%s is ignored because of "
1042 1042 "containing folder %s\n")
1043 1043 % (m.uipath(f), ignored))
1044 1044 ignorefile, lineno, line = ignoredata
1045 1045 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1046 1046 % (ignorefile, lineno, line))
1047 1047 else:
1048 1048 ui.write(_("%s is not ignored\n") % m.uipath(f))
1049 1049
1050 1050 @command('debugindex', cmdutil.debugrevlogopts +
1051 1051 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1052 1052 _('[-f FORMAT] -c|-m|FILE'),
1053 1053 optionalrepo=True)
1054 1054 def debugindex(ui, repo, file_=None, **opts):
1055 1055 """dump the contents of an index file"""
1056 1056 opts = pycompat.byteskwargs(opts)
1057 1057 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1058 1058 format = opts.get('format', 0)
1059 1059 if format not in (0, 1):
1060 1060 raise error.Abort(_("unknown format %d") % format)
1061 1061
1062 1062 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1063 1063 if generaldelta:
1064 1064 basehdr = ' delta'
1065 1065 else:
1066 1066 basehdr = ' base'
1067 1067
1068 1068 if ui.debugflag:
1069 1069 shortfn = hex
1070 1070 else:
1071 1071 shortfn = short
1072 1072
1073 1073 # There might not be anything in r, so have a sane default
1074 1074 idlen = 12
1075 1075 for i in r:
1076 1076 idlen = len(shortfn(r.node(i)))
1077 1077 break
1078 1078
1079 1079 if format == 0:
1080 1080 ui.write((" rev offset length " + basehdr + " linkrev"
1081 1081 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1082 1082 elif format == 1:
1083 1083 ui.write((" rev flag offset length"
1084 1084 " size " + basehdr + " link p1 p2"
1085 1085 " %s\n") % "nodeid".rjust(idlen))
1086 1086
1087 1087 for i in r:
1088 1088 node = r.node(i)
1089 1089 if generaldelta:
1090 1090 base = r.deltaparent(i)
1091 1091 else:
1092 1092 base = r.chainbase(i)
1093 1093 if format == 0:
1094 1094 try:
1095 1095 pp = r.parents(node)
1096 1096 except Exception:
1097 1097 pp = [nullid, nullid]
1098 1098 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1099 1099 i, r.start(i), r.length(i), base, r.linkrev(i),
1100 1100 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1101 1101 elif format == 1:
1102 1102 pr = r.parentrevs(i)
1103 1103 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1104 1104 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1105 1105 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1106 1106
1107 1107 @command('debugindexdot', cmdutil.debugrevlogopts,
1108 1108 _('-c|-m|FILE'), optionalrepo=True)
1109 1109 def debugindexdot(ui, repo, file_=None, **opts):
1110 1110 """dump an index DAG as a graphviz dot file"""
1111 1111 opts = pycompat.byteskwargs(opts)
1112 1112 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1113 1113 ui.write(("digraph G {\n"))
1114 1114 for i in r:
1115 1115 node = r.node(i)
1116 1116 pp = r.parents(node)
1117 1117 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1118 1118 if pp[1] != nullid:
1119 1119 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1120 1120 ui.write("}\n")
1121 1121
1122 1122 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1123 1123 def debuginstall(ui, **opts):
1124 1124 '''test Mercurial installation
1125 1125
1126 1126 Returns 0 on success.
1127 1127 '''
1128 1128 opts = pycompat.byteskwargs(opts)
1129 1129
1130 1130 def writetemp(contents):
1131 1131 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1132 1132 f = os.fdopen(fd, r"wb")
1133 1133 f.write(contents)
1134 1134 f.close()
1135 1135 return name
1136 1136
1137 1137 problems = 0
1138 1138
1139 1139 fm = ui.formatter('debuginstall', opts)
1140 1140 fm.startitem()
1141 1141
1142 1142 # encoding
1143 1143 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1144 1144 err = None
1145 1145 try:
1146 1146 codecs.lookup(pycompat.sysstr(encoding.encoding))
1147 1147 except LookupError as inst:
1148 1148 err = stringutil.forcebytestr(inst)
1149 1149 problems += 1
1150 1150 fm.condwrite(err, 'encodingerror', _(" %s\n"
1151 1151 " (check that your locale is properly set)\n"), err)
1152 1152
1153 1153 # Python
1154 1154 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1155 1155 pycompat.sysexecutable)
1156 1156 fm.write('pythonver', _("checking Python version (%s)\n"),
1157 1157 ("%d.%d.%d" % sys.version_info[:3]))
1158 1158 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1159 1159 os.path.dirname(pycompat.fsencode(os.__file__)))
1160 1160
1161 1161 security = set(sslutil.supportedprotocols)
1162 1162 if sslutil.hassni:
1163 1163 security.add('sni')
1164 1164
1165 1165 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1166 1166 fm.formatlist(sorted(security), name='protocol',
1167 1167 fmt='%s', sep=','))
1168 1168
1169 1169 # These are warnings, not errors. So don't increment problem count. This
1170 1170 # may change in the future.
1171 1171 if 'tls1.2' not in security:
1172 1172 fm.plain(_(' TLS 1.2 not supported by Python install; '
1173 1173 'network connections lack modern security\n'))
1174 1174 if 'sni' not in security:
1175 1175 fm.plain(_(' SNI not supported by Python install; may have '
1176 1176 'connectivity issues with some servers\n'))
1177 1177
1178 1178 # TODO print CA cert info
1179 1179
1180 1180 # hg version
1181 1181 hgver = util.version()
1182 1182 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1183 1183 hgver.split('+')[0])
1184 1184 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1185 1185 '+'.join(hgver.split('+')[1:]))
1186 1186
1187 1187 # compiled modules
1188 1188 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1189 1189 policy.policy)
1190 1190 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1191 1191 os.path.dirname(pycompat.fsencode(__file__)))
1192 1192
1193 1193 if policy.policy in ('c', 'allow'):
1194 1194 err = None
1195 1195 try:
1196 1196 from .cext import (
1197 1197 base85,
1198 1198 bdiff,
1199 1199 mpatch,
1200 1200 osutil,
1201 1201 )
1202 1202 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1203 1203 except Exception as inst:
1204 1204 err = stringutil.forcebytestr(inst)
1205 1205 problems += 1
1206 1206 fm.condwrite(err, 'extensionserror', " %s\n", err)
1207 1207
1208 1208 compengines = util.compengines._engines.values()
1209 1209 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1210 1210 fm.formatlist(sorted(e.name() for e in compengines),
1211 1211 name='compengine', fmt='%s', sep=', '))
1212 1212 fm.write('compenginesavail', _('checking available compression engines '
1213 1213 '(%s)\n'),
1214 1214 fm.formatlist(sorted(e.name() for e in compengines
1215 1215 if e.available()),
1216 1216 name='compengine', fmt='%s', sep=', '))
1217 1217 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1218 1218 fm.write('compenginesserver', _('checking available compression engines '
1219 1219 'for wire protocol (%s)\n'),
1220 1220 fm.formatlist([e.name() for e in wirecompengines
1221 1221 if e.wireprotosupport()],
1222 1222 name='compengine', fmt='%s', sep=', '))
1223 1223 re2 = 'missing'
1224 1224 if util._re2:
1225 1225 re2 = 'available'
1226 1226 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1227 1227 fm.data(re2=bool(util._re2))
1228 1228
1229 1229 # templates
1230 1230 p = templater.templatepaths()
1231 1231 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1232 1232 fm.condwrite(not p, '', _(" no template directories found\n"))
1233 1233 if p:
1234 1234 m = templater.templatepath("map-cmdline.default")
1235 1235 if m:
1236 1236 # template found, check if it is working
1237 1237 err = None
1238 1238 try:
1239 1239 templater.templater.frommapfile(m)
1240 1240 except Exception as inst:
1241 1241 err = stringutil.forcebytestr(inst)
1242 1242 p = None
1243 1243 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1244 1244 else:
1245 1245 p = None
1246 1246 fm.condwrite(p, 'defaulttemplate',
1247 1247 _("checking default template (%s)\n"), m)
1248 1248 fm.condwrite(not m, 'defaulttemplatenotfound',
1249 1249 _(" template '%s' not found\n"), "default")
1250 1250 if not p:
1251 1251 problems += 1
1252 1252 fm.condwrite(not p, '',
1253 1253 _(" (templates seem to have been installed incorrectly)\n"))
1254 1254
1255 1255 # editor
1256 1256 editor = ui.geteditor()
1257 1257 editor = util.expandpath(editor)
1258 1258 editorbin = procutil.shellsplit(editor)[0]
1259 1259 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1260 1260 cmdpath = procutil.findexe(editorbin)
1261 1261 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1262 1262 _(" No commit editor set and can't find %s in PATH\n"
1263 1263 " (specify a commit editor in your configuration"
1264 1264 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1265 1265 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1266 1266 _(" Can't find editor '%s' in PATH\n"
1267 1267 " (specify a commit editor in your configuration"
1268 1268 " file)\n"), not cmdpath and editorbin)
1269 1269 if not cmdpath and editor != 'vi':
1270 1270 problems += 1
1271 1271
1272 1272 # check username
1273 1273 username = None
1274 1274 err = None
1275 1275 try:
1276 1276 username = ui.username()
1277 1277 except error.Abort as e:
1278 1278 err = stringutil.forcebytestr(e)
1279 1279 problems += 1
1280 1280
1281 1281 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1282 1282 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1283 1283 " (specify a username in your configuration file)\n"), err)
1284 1284
1285 1285 fm.condwrite(not problems, '',
1286 1286 _("no problems detected\n"))
1287 1287 if not problems:
1288 1288 fm.data(problems=problems)
1289 1289 fm.condwrite(problems, 'problems',
1290 1290 _("%d problems detected,"
1291 1291 " please check your install!\n"), problems)
1292 1292 fm.end()
1293 1293
1294 1294 return problems
1295 1295
1296 1296 @command('debugknown', [], _('REPO ID...'), norepo=True)
1297 1297 def debugknown(ui, repopath, *ids, **opts):
1298 1298 """test whether node ids are known to a repo
1299 1299
1300 1300 Every ID must be a full-length hex node id string. Returns a list of 0s
1301 1301 and 1s indicating unknown/known.
1302 1302 """
1303 1303 opts = pycompat.byteskwargs(opts)
1304 1304 repo = hg.peer(ui, opts, repopath)
1305 1305 if not repo.capable('known'):
1306 1306 raise error.Abort("known() not supported by target repository")
1307 1307 flags = repo.known([bin(s) for s in ids])
1308 1308 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1309 1309
1310 1310 @command('debuglabelcomplete', [], _('LABEL...'))
1311 1311 def debuglabelcomplete(ui, repo, *args):
1312 1312 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1313 1313 debugnamecomplete(ui, repo, *args)
1314 1314
1315 1315 @command('debuglocks',
1316 1316 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1317 1317 ('W', 'force-wlock', None,
1318 1318 _('free the working state lock (DANGEROUS)')),
1319 1319 ('s', 'set-lock', None, _('set the store lock until stopped')),
1320 1320 ('S', 'set-wlock', None,
1321 1321 _('set the working state lock until stopped'))],
1322 1322 _('[OPTION]...'))
1323 1323 def debuglocks(ui, repo, **opts):
1324 1324 """show or modify state of locks
1325 1325
1326 1326 By default, this command will show which locks are held. This
1327 1327 includes the user and process holding the lock, the amount of time
1328 1328 the lock has been held, and the machine name where the process is
1329 1329 running if it's not local.
1330 1330
1331 1331 Locks protect the integrity of Mercurial's data, so should be
1332 1332 treated with care. System crashes or other interruptions may cause
1333 1333 locks to not be properly released, though Mercurial will usually
1334 1334 detect and remove such stale locks automatically.
1335 1335
1336 1336 However, detecting stale locks may not always be possible (for
1337 1337 instance, on a shared filesystem). Removing locks may also be
1338 1338 blocked by filesystem permissions.
1339 1339
1340 1340 Setting a lock will prevent other commands from changing the data.
1341 1341 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1342 1342 The set locks are removed when the command exits.
1343 1343
1344 1344 Returns 0 if no locks are held.
1345 1345
1346 1346 """
1347 1347
1348 1348 if opts.get(r'force_lock'):
1349 1349 repo.svfs.unlink('lock')
1350 1350 if opts.get(r'force_wlock'):
1351 1351 repo.vfs.unlink('wlock')
1352 1352 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1353 1353 return 0
1354 1354
1355 1355 locks = []
1356 1356 try:
1357 1357 if opts.get(r'set_wlock'):
1358 1358 try:
1359 1359 locks.append(repo.wlock(False))
1360 1360 except error.LockHeld:
1361 1361 raise error.Abort(_('wlock is already held'))
1362 1362 if opts.get(r'set_lock'):
1363 1363 try:
1364 1364 locks.append(repo.lock(False))
1365 1365 except error.LockHeld:
1366 1366 raise error.Abort(_('lock is already held'))
1367 1367 if len(locks):
1368 1368 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1369 1369 return 0
1370 1370 finally:
1371 1371 release(*locks)
1372 1372
1373 1373 now = time.time()
1374 1374 held = 0
1375 1375
1376 1376 def report(vfs, name, method):
1377 1377 # this causes stale locks to get reaped for more accurate reporting
1378 1378 try:
1379 1379 l = method(False)
1380 1380 except error.LockHeld:
1381 1381 l = None
1382 1382
1383 1383 if l:
1384 1384 l.release()
1385 1385 else:
1386 1386 try:
1387 1387 st = vfs.lstat(name)
1388 1388 age = now - st[stat.ST_MTIME]
1389 1389 user = util.username(st.st_uid)
1390 1390 locker = vfs.readlock(name)
1391 1391 if ":" in locker:
1392 1392 host, pid = locker.split(':')
1393 1393 if host == socket.gethostname():
1394 1394 locker = 'user %s, process %s' % (user, pid)
1395 1395 else:
1396 1396 locker = 'user %s, process %s, host %s' \
1397 1397 % (user, pid, host)
1398 1398 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1399 1399 return 1
1400 1400 except OSError as e:
1401 1401 if e.errno != errno.ENOENT:
1402 1402 raise
1403 1403
1404 1404 ui.write(("%-6s free\n") % (name + ":"))
1405 1405 return 0
1406 1406
1407 1407 held += report(repo.svfs, "lock", repo.lock)
1408 1408 held += report(repo.vfs, "wlock", repo.wlock)
1409 1409
1410 1410 return held
1411 1411
1412 1412 @command('debugmergestate', [], '')
1413 1413 def debugmergestate(ui, repo, *args):
1414 1414 """print merge state
1415 1415
1416 1416 Use --verbose to print out information about whether v1 or v2 merge state
1417 1417 was chosen."""
1418 1418 def _hashornull(h):
1419 1419 if h == nullhex:
1420 1420 return 'null'
1421 1421 else:
1422 1422 return h
1423 1423
1424 1424 def printrecords(version):
1425 1425 ui.write(('* version %d records\n') % version)
1426 1426 if version == 1:
1427 1427 records = v1records
1428 1428 else:
1429 1429 records = v2records
1430 1430
1431 1431 for rtype, record in records:
1432 1432 # pretty print some record types
1433 1433 if rtype == 'L':
1434 1434 ui.write(('local: %s\n') % record)
1435 1435 elif rtype == 'O':
1436 1436 ui.write(('other: %s\n') % record)
1437 1437 elif rtype == 'm':
1438 1438 driver, mdstate = record.split('\0', 1)
1439 1439 ui.write(('merge driver: %s (state "%s")\n')
1440 1440 % (driver, mdstate))
1441 1441 elif rtype in 'FDC':
1442 1442 r = record.split('\0')
1443 1443 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1444 1444 if version == 1:
1445 1445 onode = 'not stored in v1 format'
1446 1446 flags = r[7]
1447 1447 else:
1448 1448 onode, flags = r[7:9]
1449 1449 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1450 1450 % (f, rtype, state, _hashornull(hash)))
1451 1451 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1452 1452 ui.write((' ancestor path: %s (node %s)\n')
1453 1453 % (afile, _hashornull(anode)))
1454 1454 ui.write((' other path: %s (node %s)\n')
1455 1455 % (ofile, _hashornull(onode)))
1456 1456 elif rtype == 'f':
1457 1457 filename, rawextras = record.split('\0', 1)
1458 1458 extras = rawextras.split('\0')
1459 1459 i = 0
1460 1460 extrastrings = []
1461 1461 while i < len(extras):
1462 1462 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1463 1463 i += 2
1464 1464
1465 1465 ui.write(('file extras: %s (%s)\n')
1466 1466 % (filename, ', '.join(extrastrings)))
1467 1467 elif rtype == 'l':
1468 1468 labels = record.split('\0', 2)
1469 1469 labels = [l for l in labels if len(l) > 0]
1470 1470 ui.write(('labels:\n'))
1471 1471 ui.write((' local: %s\n' % labels[0]))
1472 1472 ui.write((' other: %s\n' % labels[1]))
1473 1473 if len(labels) > 2:
1474 1474 ui.write((' base: %s\n' % labels[2]))
1475 1475 else:
1476 1476 ui.write(('unrecognized entry: %s\t%s\n')
1477 1477 % (rtype, record.replace('\0', '\t')))
1478 1478
1479 1479 # Avoid mergestate.read() since it may raise an exception for unsupported
1480 1480 # merge state records. We shouldn't be doing this, but this is OK since this
1481 1481 # command is pretty low-level.
1482 1482 ms = mergemod.mergestate(repo)
1483 1483
1484 1484 # sort so that reasonable information is on top
1485 1485 v1records = ms._readrecordsv1()
1486 1486 v2records = ms._readrecordsv2()
1487 1487 order = 'LOml'
1488 1488 def key(r):
1489 1489 idx = order.find(r[0])
1490 1490 if idx == -1:
1491 1491 return (1, r[1])
1492 1492 else:
1493 1493 return (0, idx)
1494 1494 v1records.sort(key=key)
1495 1495 v2records.sort(key=key)
1496 1496
1497 1497 if not v1records and not v2records:
1498 1498 ui.write(('no merge state found\n'))
1499 1499 elif not v2records:
1500 1500 ui.note(('no version 2 merge state\n'))
1501 1501 printrecords(1)
1502 1502 elif ms._v1v2match(v1records, v2records):
1503 1503 ui.note(('v1 and v2 states match: using v2\n'))
1504 1504 printrecords(2)
1505 1505 else:
1506 1506 ui.note(('v1 and v2 states mismatch: using v1\n'))
1507 1507 printrecords(1)
1508 1508 if ui.verbose:
1509 1509 printrecords(2)
1510 1510
1511 1511 @command('debugnamecomplete', [], _('NAME...'))
1512 1512 def debugnamecomplete(ui, repo, *args):
1513 1513 '''complete "names" - tags, open branch names, bookmark names'''
1514 1514
1515 1515 names = set()
1516 1516 # since we previously only listed open branches, we will handle that
1517 1517 # specially (after this for loop)
1518 1518 for name, ns in repo.names.iteritems():
1519 1519 if name != 'branches':
1520 1520 names.update(ns.listnames(repo))
1521 1521 names.update(tag for (tag, heads, tip, closed)
1522 1522 in repo.branchmap().iterbranches() if not closed)
1523 1523 completions = set()
1524 1524 if not args:
1525 1525 args = ['']
1526 1526 for a in args:
1527 1527 completions.update(n for n in names if n.startswith(a))
1528 1528 ui.write('\n'.join(sorted(completions)))
1529 1529 ui.write('\n')
1530 1530
1531 1531 @command('debugobsolete',
1532 1532 [('', 'flags', 0, _('markers flag')),
1533 1533 ('', 'record-parents', False,
1534 1534 _('record parent information for the precursor')),
1535 1535 ('r', 'rev', [], _('display markers relevant to REV')),
1536 1536 ('', 'exclusive', False, _('restrict display to markers only '
1537 1537 'relevant to REV')),
1538 1538 ('', 'index', False, _('display index of the marker')),
1539 1539 ('', 'delete', [], _('delete markers specified by indices')),
1540 1540 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1541 1541 _('[OBSOLETED [REPLACEMENT ...]]'))
1542 1542 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1543 1543 """create arbitrary obsolete marker
1544 1544
1545 1545 With no arguments, displays the list of obsolescence markers."""
1546 1546
1547 1547 opts = pycompat.byteskwargs(opts)
1548 1548
1549 1549 def parsenodeid(s):
1550 1550 try:
1551 1551 # We do not use revsingle/revrange functions here to accept
1552 1552 # arbitrary node identifiers, possibly not present in the
1553 1553 # local repository.
1554 1554 n = bin(s)
1555 1555 if len(n) != len(nullid):
1556 1556 raise TypeError()
1557 1557 return n
1558 1558 except TypeError:
1559 1559 raise error.Abort('changeset references must be full hexadecimal '
1560 1560 'node identifiers')
1561 1561
1562 1562 if opts.get('delete'):
1563 1563 indices = []
1564 1564 for v in opts.get('delete'):
1565 1565 try:
1566 1566 indices.append(int(v))
1567 1567 except ValueError:
1568 1568 raise error.Abort(_('invalid index value: %r') % v,
1569 1569 hint=_('use integers for indices'))
1570 1570
1571 1571 if repo.currenttransaction():
1572 1572 raise error.Abort(_('cannot delete obsmarkers in the middle '
1573 1573 'of transaction.'))
1574 1574
1575 1575 with repo.lock():
1576 1576 n = repair.deleteobsmarkers(repo.obsstore, indices)
1577 1577 ui.write(_('deleted %i obsolescence markers\n') % n)
1578 1578
1579 1579 return
1580 1580
1581 1581 if precursor is not None:
1582 1582 if opts['rev']:
1583 1583 raise error.Abort('cannot select revision when creating marker')
1584 1584 metadata = {}
1585 1585 metadata['user'] = opts['user'] or ui.username()
1586 1586 succs = tuple(parsenodeid(succ) for succ in successors)
1587 1587 l = repo.lock()
1588 1588 try:
1589 1589 tr = repo.transaction('debugobsolete')
1590 1590 try:
1591 1591 date = opts.get('date')
1592 1592 if date:
1593 1593 date = dateutil.parsedate(date)
1594 1594 else:
1595 1595 date = None
1596 1596 prec = parsenodeid(precursor)
1597 1597 parents = None
1598 1598 if opts['record_parents']:
1599 1599 if prec not in repo.unfiltered():
1600 1600 raise error.Abort('cannot used --record-parents on '
1601 1601 'unknown changesets')
1602 1602 parents = repo.unfiltered()[prec].parents()
1603 1603 parents = tuple(p.node() for p in parents)
1604 1604 repo.obsstore.create(tr, prec, succs, opts['flags'],
1605 1605 parents=parents, date=date,
1606 1606 metadata=metadata, ui=ui)
1607 1607 tr.close()
1608 1608 except ValueError as exc:
1609 1609 raise error.Abort(_('bad obsmarker input: %s') %
1610 1610 pycompat.bytestr(exc))
1611 1611 finally:
1612 1612 tr.release()
1613 1613 finally:
1614 1614 l.release()
1615 1615 else:
1616 1616 if opts['rev']:
1617 1617 revs = scmutil.revrange(repo, opts['rev'])
1618 1618 nodes = [repo[r].node() for r in revs]
1619 1619 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1620 1620 exclusive=opts['exclusive']))
1621 1621 markers.sort(key=lambda x: x._data)
1622 1622 else:
1623 1623 markers = obsutil.getmarkers(repo)
1624 1624
1625 1625 markerstoiter = markers
1626 1626 isrelevant = lambda m: True
1627 1627 if opts.get('rev') and opts.get('index'):
1628 1628 markerstoiter = obsutil.getmarkers(repo)
1629 1629 markerset = set(markers)
1630 1630 isrelevant = lambda m: m in markerset
1631 1631
1632 1632 fm = ui.formatter('debugobsolete', opts)
1633 1633 for i, m in enumerate(markerstoiter):
1634 1634 if not isrelevant(m):
1635 1635 # marker can be irrelevant when we're iterating over a set
1636 1636 # of markers (markerstoiter) which is bigger than the set
1637 1637 # of markers we want to display (markers)
1638 1638 # this can happen if both --index and --rev options are
1639 1639 # provided and thus we need to iterate over all of the markers
1640 1640 # to get the correct indices, but only display the ones that
1641 1641 # are relevant to --rev value
1642 1642 continue
1643 1643 fm.startitem()
1644 1644 ind = i if opts.get('index') else None
1645 1645 cmdutil.showmarker(fm, m, index=ind)
1646 1646 fm.end()
1647 1647
1648 1648 @command('debugpathcomplete',
1649 1649 [('f', 'full', None, _('complete an entire path')),
1650 1650 ('n', 'normal', None, _('show only normal files')),
1651 1651 ('a', 'added', None, _('show only added files')),
1652 1652 ('r', 'removed', None, _('show only removed files'))],
1653 1653 _('FILESPEC...'))
1654 1654 def debugpathcomplete(ui, repo, *specs, **opts):
1655 1655 '''complete part or all of a tracked path
1656 1656
1657 1657 This command supports shells that offer path name completion. It
1658 1658 currently completes only files already known to the dirstate.
1659 1659
1660 1660 Completion extends only to the next path segment unless
1661 1661 --full is specified, in which case entire paths are used.'''
1662 1662
1663 1663 def complete(path, acceptable):
1664 1664 dirstate = repo.dirstate
1665 1665 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1666 1666 rootdir = repo.root + pycompat.ossep
1667 1667 if spec != repo.root and not spec.startswith(rootdir):
1668 1668 return [], []
1669 1669 if os.path.isdir(spec):
1670 1670 spec += '/'
1671 1671 spec = spec[len(rootdir):]
1672 1672 fixpaths = pycompat.ossep != '/'
1673 1673 if fixpaths:
1674 1674 spec = spec.replace(pycompat.ossep, '/')
1675 1675 speclen = len(spec)
1676 1676 fullpaths = opts[r'full']
1677 1677 files, dirs = set(), set()
1678 1678 adddir, addfile = dirs.add, files.add
1679 1679 for f, st in dirstate.iteritems():
1680 1680 if f.startswith(spec) and st[0] in acceptable:
1681 1681 if fixpaths:
1682 1682 f = f.replace('/', pycompat.ossep)
1683 1683 if fullpaths:
1684 1684 addfile(f)
1685 1685 continue
1686 1686 s = f.find(pycompat.ossep, speclen)
1687 1687 if s >= 0:
1688 1688 adddir(f[:s])
1689 1689 else:
1690 1690 addfile(f)
1691 1691 return files, dirs
1692 1692
1693 1693 acceptable = ''
1694 1694 if opts[r'normal']:
1695 1695 acceptable += 'nm'
1696 1696 if opts[r'added']:
1697 1697 acceptable += 'a'
1698 1698 if opts[r'removed']:
1699 1699 acceptable += 'r'
1700 1700 cwd = repo.getcwd()
1701 1701 if not specs:
1702 1702 specs = ['.']
1703 1703
1704 1704 files, dirs = set(), set()
1705 1705 for spec in specs:
1706 1706 f, d = complete(spec, acceptable or 'nmar')
1707 1707 files.update(f)
1708 1708 dirs.update(d)
1709 1709 files.update(dirs)
1710 1710 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1711 1711 ui.write('\n')
1712 1712
1713 1713 @command('debugpeer', [], _('PATH'), norepo=True)
1714 1714 def debugpeer(ui, path):
1715 1715 """establish a connection to a peer repository"""
1716 1716 # Always enable peer request logging. Requires --debug to display
1717 1717 # though.
1718 1718 overrides = {
1719 1719 ('devel', 'debug.peer-request'): True,
1720 1720 }
1721 1721
1722 1722 with ui.configoverride(overrides):
1723 1723 peer = hg.peer(ui, {}, path)
1724 1724
1725 1725 local = peer.local() is not None
1726 1726 canpush = peer.canpush()
1727 1727
1728 1728 ui.write(_('url: %s\n') % peer.url())
1729 1729 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1730 1730 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1731 1731
1732 1732 @command('debugpickmergetool',
1733 1733 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1734 1734 ('', 'changedelete', None, _('emulate merging change and delete')),
1735 1735 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1736 1736 _('[PATTERN]...'),
1737 1737 inferrepo=True)
1738 1738 def debugpickmergetool(ui, repo, *pats, **opts):
1739 1739 """examine which merge tool is chosen for specified file
1740 1740
1741 1741 As described in :hg:`help merge-tools`, Mercurial examines
1742 1742 configurations below in this order to decide which merge tool is
1743 1743 chosen for specified file.
1744 1744
1745 1745 1. ``--tool`` option
1746 1746 2. ``HGMERGE`` environment variable
1747 1747 3. configurations in ``merge-patterns`` section
1748 1748 4. configuration of ``ui.merge``
1749 1749 5. configurations in ``merge-tools`` section
1750 1750 6. ``hgmerge`` tool (for historical reason only)
1751 1751 7. default tool for fallback (``:merge`` or ``:prompt``)
1752 1752
1753 1753 This command writes out examination result in the style below::
1754 1754
1755 1755 FILE = MERGETOOL
1756 1756
1757 1757 By default, all files known in the first parent context of the
1758 1758 working directory are examined. Use file patterns and/or -I/-X
1759 1759 options to limit target files. -r/--rev is also useful to examine
1760 1760 files in another context without actual updating to it.
1761 1761
1762 1762 With --debug, this command shows warning messages while matching
1763 1763 against ``merge-patterns`` and so on, too. It is recommended to
1764 1764 use this option with explicit file patterns and/or -I/-X options,
1765 1765 because this option increases amount of output per file according
1766 1766 to configurations in hgrc.
1767 1767
1768 1768 With -v/--verbose, this command shows configurations below at
1769 1769 first (only if specified).
1770 1770
1771 1771 - ``--tool`` option
1772 1772 - ``HGMERGE`` environment variable
1773 1773 - configuration of ``ui.merge``
1774 1774
1775 1775 If merge tool is chosen before matching against
1776 1776 ``merge-patterns``, this command can't show any helpful
1777 1777 information, even with --debug. In such case, information above is
1778 1778 useful to know why a merge tool is chosen.
1779 1779 """
1780 1780 opts = pycompat.byteskwargs(opts)
1781 1781 overrides = {}
1782 1782 if opts['tool']:
1783 1783 overrides[('ui', 'forcemerge')] = opts['tool']
1784 1784 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1785 1785
1786 1786 with ui.configoverride(overrides, 'debugmergepatterns'):
1787 1787 hgmerge = encoding.environ.get("HGMERGE")
1788 1788 if hgmerge is not None:
1789 1789 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1790 1790 uimerge = ui.config("ui", "merge")
1791 1791 if uimerge:
1792 1792 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1793 1793
1794 1794 ctx = scmutil.revsingle(repo, opts.get('rev'))
1795 1795 m = scmutil.match(ctx, pats, opts)
1796 1796 changedelete = opts['changedelete']
1797 1797 for path in ctx.walk(m):
1798 1798 fctx = ctx[path]
1799 1799 try:
1800 1800 if not ui.debugflag:
1801 1801 ui.pushbuffer(error=True)
1802 1802 tool, toolpath = filemerge._picktool(repo, ui, path,
1803 1803 fctx.isbinary(),
1804 1804 'l' in fctx.flags(),
1805 1805 changedelete)
1806 1806 finally:
1807 1807 if not ui.debugflag:
1808 1808 ui.popbuffer()
1809 1809 ui.write(('%s = %s\n') % (path, tool))
1810 1810
1811 1811 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1812 1812 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1813 1813 '''access the pushkey key/value protocol
1814 1814
1815 1815 With two args, list the keys in the given namespace.
1816 1816
1817 1817 With five args, set a key to new if it currently is set to old.
1818 1818 Reports success or failure.
1819 1819 '''
1820 1820
1821 1821 target = hg.peer(ui, {}, repopath)
1822 1822 if keyinfo:
1823 1823 key, old, new = keyinfo
1824 1824 r = target.pushkey(namespace, key, old, new)
1825 1825 ui.status(pycompat.bytestr(r) + '\n')
1826 1826 return not r
1827 1827 else:
1828 1828 for k, v in sorted(target.listkeys(namespace).iteritems()):
1829 1829 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1830 1830 stringutil.escapestr(v)))
1831 1831
1832 1832 @command('debugpvec', [], _('A B'))
1833 1833 def debugpvec(ui, repo, a, b=None):
1834 1834 ca = scmutil.revsingle(repo, a)
1835 1835 cb = scmutil.revsingle(repo, b)
1836 1836 pa = pvec.ctxpvec(ca)
1837 1837 pb = pvec.ctxpvec(cb)
1838 1838 if pa == pb:
1839 1839 rel = "="
1840 1840 elif pa > pb:
1841 1841 rel = ">"
1842 1842 elif pa < pb:
1843 1843 rel = "<"
1844 1844 elif pa | pb:
1845 1845 rel = "|"
1846 1846 ui.write(_("a: %s\n") % pa)
1847 1847 ui.write(_("b: %s\n") % pb)
1848 1848 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1849 1849 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1850 1850 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1851 1851 pa.distance(pb), rel))
1852 1852
1853 1853 @command('debugrebuilddirstate|debugrebuildstate',
1854 1854 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1855 1855 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1856 1856 'the working copy parent')),
1857 1857 ],
1858 1858 _('[-r REV]'))
1859 1859 def debugrebuilddirstate(ui, repo, rev, **opts):
1860 1860 """rebuild the dirstate as it would look like for the given revision
1861 1861
1862 1862 If no revision is specified the first current parent will be used.
1863 1863
1864 1864 The dirstate will be set to the files of the given revision.
1865 1865 The actual working directory content or existing dirstate
1866 1866 information such as adds or removes is not considered.
1867 1867
1868 1868 ``minimal`` will only rebuild the dirstate status for files that claim to be
1869 1869 tracked but are not in the parent manifest, or that exist in the parent
1870 1870 manifest but are not in the dirstate. It will not change adds, removes, or
1871 1871 modified files that are in the working copy parent.
1872 1872
1873 1873 One use of this command is to make the next :hg:`status` invocation
1874 1874 check the actual file content.
1875 1875 """
1876 1876 ctx = scmutil.revsingle(repo, rev)
1877 1877 with repo.wlock():
1878 1878 dirstate = repo.dirstate
1879 1879 changedfiles = None
1880 1880 # See command doc for what minimal does.
1881 1881 if opts.get(r'minimal'):
1882 1882 manifestfiles = set(ctx.manifest().keys())
1883 1883 dirstatefiles = set(dirstate)
1884 1884 manifestonly = manifestfiles - dirstatefiles
1885 1885 dsonly = dirstatefiles - manifestfiles
1886 1886 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1887 1887 changedfiles = manifestonly | dsnotadded
1888 1888
1889 1889 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1890 1890
1891 1891 @command('debugrebuildfncache', [], '')
1892 1892 def debugrebuildfncache(ui, repo):
1893 1893 """rebuild the fncache file"""
1894 1894 repair.rebuildfncache(ui, repo)
1895 1895
1896 1896 @command('debugrename',
1897 1897 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1898 1898 _('[-r REV] FILE'))
1899 1899 def debugrename(ui, repo, file1, *pats, **opts):
1900 1900 """dump rename information"""
1901 1901
1902 1902 opts = pycompat.byteskwargs(opts)
1903 1903 ctx = scmutil.revsingle(repo, opts.get('rev'))
1904 1904 m = scmutil.match(ctx, (file1,) + pats, opts)
1905 1905 for abs in ctx.walk(m):
1906 1906 fctx = ctx[abs]
1907 1907 o = fctx.filelog().renamed(fctx.filenode())
1908 1908 rel = m.rel(abs)
1909 1909 if o:
1910 1910 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1911 1911 else:
1912 1912 ui.write(_("%s not renamed\n") % rel)
1913 1913
1914 1914 @command('debugrevlog', cmdutil.debugrevlogopts +
1915 1915 [('d', 'dump', False, _('dump index data'))],
1916 1916 _('-c|-m|FILE'),
1917 1917 optionalrepo=True)
1918 1918 def debugrevlog(ui, repo, file_=None, **opts):
1919 1919 """show data and statistics about a revlog"""
1920 1920 opts = pycompat.byteskwargs(opts)
1921 1921 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1922 1922
1923 1923 if opts.get("dump"):
1924 1924 numrevs = len(r)
1925 1925 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1926 1926 " rawsize totalsize compression heads chainlen\n"))
1927 1927 ts = 0
1928 1928 heads = set()
1929 1929
1930 1930 for rev in xrange(numrevs):
1931 1931 dbase = r.deltaparent(rev)
1932 1932 if dbase == -1:
1933 1933 dbase = rev
1934 1934 cbase = r.chainbase(rev)
1935 1935 clen = r.chainlen(rev)
1936 1936 p1, p2 = r.parentrevs(rev)
1937 1937 rs = r.rawsize(rev)
1938 1938 ts = ts + rs
1939 1939 heads -= set(r.parentrevs(rev))
1940 1940 heads.add(rev)
1941 1941 try:
1942 1942 compression = ts / r.end(rev)
1943 1943 except ZeroDivisionError:
1944 1944 compression = 0
1945 1945 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1946 1946 "%11d %5d %8d\n" %
1947 1947 (rev, p1, p2, r.start(rev), r.end(rev),
1948 1948 r.start(dbase), r.start(cbase),
1949 1949 r.start(p1), r.start(p2),
1950 1950 rs, ts, compression, len(heads), clen))
1951 1951 return 0
1952 1952
1953 1953 v = r.version
1954 1954 format = v & 0xFFFF
1955 1955 flags = []
1956 1956 gdelta = False
1957 1957 if v & revlog.FLAG_INLINE_DATA:
1958 1958 flags.append('inline')
1959 1959 if v & revlog.FLAG_GENERALDELTA:
1960 1960 gdelta = True
1961 1961 flags.append('generaldelta')
1962 1962 if not flags:
1963 1963 flags = ['(none)']
1964 1964
1965 1965 nummerges = 0
1966 1966 numfull = 0
1967 1967 numprev = 0
1968 1968 nump1 = 0
1969 1969 nump2 = 0
1970 1970 numother = 0
1971 1971 nump1prev = 0
1972 1972 nump2prev = 0
1973 1973 chainlengths = []
1974 1974 chainbases = []
1975 1975 chainspans = []
1976 1976
1977 1977 datasize = [None, 0, 0]
1978 1978 fullsize = [None, 0, 0]
1979 1979 deltasize = [None, 0, 0]
1980 1980 chunktypecounts = {}
1981 1981 chunktypesizes = {}
1982 1982
1983 1983 def addsize(size, l):
1984 1984 if l[0] is None or size < l[0]:
1985 1985 l[0] = size
1986 1986 if size > l[1]:
1987 1987 l[1] = size
1988 1988 l[2] += size
1989 1989
1990 1990 numrevs = len(r)
1991 1991 for rev in xrange(numrevs):
1992 1992 p1, p2 = r.parentrevs(rev)
1993 1993 delta = r.deltaparent(rev)
1994 1994 if format > 0:
1995 1995 addsize(r.rawsize(rev), datasize)
1996 1996 if p2 != nullrev:
1997 1997 nummerges += 1
1998 1998 size = r.length(rev)
1999 1999 if delta == nullrev:
2000 2000 chainlengths.append(0)
2001 2001 chainbases.append(r.start(rev))
2002 2002 chainspans.append(size)
2003 2003 numfull += 1
2004 2004 addsize(size, fullsize)
2005 2005 else:
2006 2006 chainlengths.append(chainlengths[delta] + 1)
2007 2007 baseaddr = chainbases[delta]
2008 2008 revaddr = r.start(rev)
2009 2009 chainbases.append(baseaddr)
2010 2010 chainspans.append((revaddr - baseaddr) + size)
2011 2011 addsize(size, deltasize)
2012 2012 if delta == rev - 1:
2013 2013 numprev += 1
2014 2014 if delta == p1:
2015 2015 nump1prev += 1
2016 2016 elif delta == p2:
2017 2017 nump2prev += 1
2018 2018 elif delta == p1:
2019 2019 nump1 += 1
2020 2020 elif delta == p2:
2021 2021 nump2 += 1
2022 2022 elif delta != nullrev:
2023 2023 numother += 1
2024 2024
2025 2025 # Obtain data on the raw chunks in the revlog.
2026 2026 segment = r._getsegmentforrevs(rev, rev)[1]
2027 2027 if segment:
2028 2028 chunktype = bytes(segment[0:1])
2029 2029 else:
2030 2030 chunktype = 'empty'
2031 2031
2032 2032 if chunktype not in chunktypecounts:
2033 2033 chunktypecounts[chunktype] = 0
2034 2034 chunktypesizes[chunktype] = 0
2035 2035
2036 2036 chunktypecounts[chunktype] += 1
2037 2037 chunktypesizes[chunktype] += size
2038 2038
2039 2039 # Adjust size min value for empty cases
2040 2040 for size in (datasize, fullsize, deltasize):
2041 2041 if size[0] is None:
2042 2042 size[0] = 0
2043 2043
2044 2044 numdeltas = numrevs - numfull
2045 2045 numoprev = numprev - nump1prev - nump2prev
2046 2046 totalrawsize = datasize[2]
2047 2047 datasize[2] /= numrevs
2048 2048 fulltotal = fullsize[2]
2049 2049 fullsize[2] /= numfull
2050 2050 deltatotal = deltasize[2]
2051 2051 if numrevs - numfull > 0:
2052 2052 deltasize[2] /= numrevs - numfull
2053 2053 totalsize = fulltotal + deltatotal
2054 2054 avgchainlen = sum(chainlengths) / numrevs
2055 2055 maxchainlen = max(chainlengths)
2056 2056 maxchainspan = max(chainspans)
2057 2057 compratio = 1
2058 2058 if totalsize:
2059 2059 compratio = totalrawsize / totalsize
2060 2060
2061 2061 basedfmtstr = '%%%dd\n'
2062 2062 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2063 2063
2064 2064 def dfmtstr(max):
2065 2065 return basedfmtstr % len(str(max))
2066 2066 def pcfmtstr(max, padding=0):
2067 2067 return basepcfmtstr % (len(str(max)), ' ' * padding)
2068 2068
2069 2069 def pcfmt(value, total):
2070 2070 if total:
2071 2071 return (value, 100 * float(value) / total)
2072 2072 else:
2073 2073 return value, 100.0
2074 2074
2075 2075 ui.write(('format : %d\n') % format)
2076 2076 ui.write(('flags : %s\n') % ', '.join(flags))
2077 2077
2078 2078 ui.write('\n')
2079 2079 fmt = pcfmtstr(totalsize)
2080 2080 fmt2 = dfmtstr(totalsize)
2081 2081 ui.write(('revisions : ') + fmt2 % numrevs)
2082 2082 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2083 2083 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2084 2084 ui.write(('revisions : ') + fmt2 % numrevs)
2085 2085 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2086 2086 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2087 2087 ui.write(('revision size : ') + fmt2 % totalsize)
2088 2088 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2089 2089 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2090 2090
2091 2091 def fmtchunktype(chunktype):
2092 2092 if chunktype == 'empty':
2093 2093 return ' %s : ' % chunktype
2094 2094 elif chunktype in pycompat.bytestr(string.ascii_letters):
2095 2095 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2096 2096 else:
2097 2097 return ' 0x%s : ' % hex(chunktype)
2098 2098
2099 2099 ui.write('\n')
2100 2100 ui.write(('chunks : ') + fmt2 % numrevs)
2101 2101 for chunktype in sorted(chunktypecounts):
2102 2102 ui.write(fmtchunktype(chunktype))
2103 2103 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2104 2104 ui.write(('chunks size : ') + fmt2 % totalsize)
2105 2105 for chunktype in sorted(chunktypecounts):
2106 2106 ui.write(fmtchunktype(chunktype))
2107 2107 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2108 2108
2109 2109 ui.write('\n')
2110 2110 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2111 2111 ui.write(('avg chain length : ') + fmt % avgchainlen)
2112 2112 ui.write(('max chain length : ') + fmt % maxchainlen)
2113 2113 ui.write(('max chain reach : ') + fmt % maxchainspan)
2114 2114 ui.write(('compression ratio : ') + fmt % compratio)
2115 2115
2116 2116 if format > 0:
2117 2117 ui.write('\n')
2118 2118 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2119 2119 % tuple(datasize))
2120 2120 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2121 2121 % tuple(fullsize))
2122 2122 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2123 2123 % tuple(deltasize))
2124 2124
2125 2125 if numdeltas > 0:
2126 2126 ui.write('\n')
2127 2127 fmt = pcfmtstr(numdeltas)
2128 2128 fmt2 = pcfmtstr(numdeltas, 4)
2129 2129 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2130 2130 if numprev > 0:
2131 2131 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2132 2132 numprev))
2133 2133 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2134 2134 numprev))
2135 2135 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2136 2136 numprev))
2137 2137 if gdelta:
2138 2138 ui.write(('deltas against p1 : ')
2139 2139 + fmt % pcfmt(nump1, numdeltas))
2140 2140 ui.write(('deltas against p2 : ')
2141 2141 + fmt % pcfmt(nump2, numdeltas))
2142 2142 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2143 2143 numdeltas))
2144 2144
2145 2145 @command('debugrevspec',
2146 2146 [('', 'optimize', None,
2147 2147 _('print parsed tree after optimizing (DEPRECATED)')),
2148 2148 ('', 'show-revs', True, _('print list of result revisions (default)')),
2149 2149 ('s', 'show-set', None, _('print internal representation of result set')),
2150 2150 ('p', 'show-stage', [],
2151 2151 _('print parsed tree at the given stage'), _('NAME')),
2152 2152 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2153 2153 ('', 'verify-optimized', False, _('verify optimized result')),
2154 2154 ],
2155 2155 ('REVSPEC'))
2156 2156 def debugrevspec(ui, repo, expr, **opts):
2157 2157 """parse and apply a revision specification
2158 2158
2159 2159 Use -p/--show-stage option to print the parsed tree at the given stages.
2160 2160 Use -p all to print tree at every stage.
2161 2161
2162 2162 Use --no-show-revs option with -s or -p to print only the set
2163 2163 representation or the parsed tree respectively.
2164 2164
2165 2165 Use --verify-optimized to compare the optimized result with the unoptimized
2166 2166 one. Returns 1 if the optimized result differs.
2167 2167 """
2168 2168 opts = pycompat.byteskwargs(opts)
2169 2169 aliases = ui.configitems('revsetalias')
2170 2170 stages = [
2171 2171 ('parsed', lambda tree: tree),
2172 2172 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2173 2173 ui.warn)),
2174 2174 ('concatenated', revsetlang.foldconcat),
2175 2175 ('analyzed', revsetlang.analyze),
2176 2176 ('optimized', revsetlang.optimize),
2177 2177 ]
2178 2178 if opts['no_optimized']:
2179 2179 stages = stages[:-1]
2180 2180 if opts['verify_optimized'] and opts['no_optimized']:
2181 2181 raise error.Abort(_('cannot use --verify-optimized with '
2182 2182 '--no-optimized'))
2183 2183 stagenames = set(n for n, f in stages)
2184 2184
2185 2185 showalways = set()
2186 2186 showchanged = set()
2187 2187 if ui.verbose and not opts['show_stage']:
2188 2188 # show parsed tree by --verbose (deprecated)
2189 2189 showalways.add('parsed')
2190 2190 showchanged.update(['expanded', 'concatenated'])
2191 2191 if opts['optimize']:
2192 2192 showalways.add('optimized')
2193 2193 if opts['show_stage'] and opts['optimize']:
2194 2194 raise error.Abort(_('cannot use --optimize with --show-stage'))
2195 2195 if opts['show_stage'] == ['all']:
2196 2196 showalways.update(stagenames)
2197 2197 else:
2198 2198 for n in opts['show_stage']:
2199 2199 if n not in stagenames:
2200 2200 raise error.Abort(_('invalid stage name: %s') % n)
2201 2201 showalways.update(opts['show_stage'])
2202 2202
2203 2203 treebystage = {}
2204 2204 printedtree = None
2205 2205 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2206 2206 for n, f in stages:
2207 2207 treebystage[n] = tree = f(tree)
2208 2208 if n in showalways or (n in showchanged and tree != printedtree):
2209 2209 if opts['show_stage'] or n != 'parsed':
2210 2210 ui.write(("* %s:\n") % n)
2211 2211 ui.write(revsetlang.prettyformat(tree), "\n")
2212 2212 printedtree = tree
2213 2213
2214 2214 if opts['verify_optimized']:
2215 2215 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2216 2216 brevs = revset.makematcher(treebystage['optimized'])(repo)
2217 2217 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2218 2218 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2219 2219 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2220 2220 arevs = list(arevs)
2221 2221 brevs = list(brevs)
2222 2222 if arevs == brevs:
2223 2223 return 0
2224 2224 ui.write(('--- analyzed\n'), label='diff.file_a')
2225 2225 ui.write(('+++ optimized\n'), label='diff.file_b')
2226 2226 sm = difflib.SequenceMatcher(None, arevs, brevs)
2227 2227 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2228 2228 if tag in ('delete', 'replace'):
2229 2229 for c in arevs[alo:ahi]:
2230 2230 ui.write('-%s\n' % c, label='diff.deleted')
2231 2231 if tag in ('insert', 'replace'):
2232 2232 for c in brevs[blo:bhi]:
2233 2233 ui.write('+%s\n' % c, label='diff.inserted')
2234 2234 if tag == 'equal':
2235 2235 for c in arevs[alo:ahi]:
2236 2236 ui.write(' %s\n' % c)
2237 2237 return 1
2238 2238
2239 2239 func = revset.makematcher(tree)
2240 2240 revs = func(repo)
2241 2241 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2242 2242 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2243 2243 if not opts['show_revs']:
2244 2244 return
2245 2245 for c in revs:
2246 2246 ui.write("%d\n" % c)
2247 2247
2248 2248 @command('debugserve', [
2249 2249 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2250 2250 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2251 2251 ('', 'logiofile', '', _('file to log server I/O to')),
2252 2252 ], '')
2253 2253 def debugserve(ui, repo, **opts):
2254 2254 """run a server with advanced settings
2255 2255
2256 2256 This command is similar to :hg:`serve`. It exists partially as a
2257 2257 workaround to the fact that ``hg serve --stdio`` must have specific
2258 2258 arguments for security reasons.
2259 2259 """
2260 2260 opts = pycompat.byteskwargs(opts)
2261 2261
2262 2262 if not opts['sshstdio']:
2263 2263 raise error.Abort(_('only --sshstdio is currently supported'))
2264 2264
2265 2265 logfh = None
2266 2266
2267 2267 if opts['logiofd'] and opts['logiofile']:
2268 2268 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2269 2269
2270 2270 if opts['logiofd']:
2271 2271 # Line buffered because output is line based.
2272 2272 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2273 2273 elif opts['logiofile']:
2274 2274 logfh = open(opts['logiofile'], 'ab', 1)
2275 2275
2276 2276 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2277 2277 s.serve_forever()
2278 2278
2279 2279 @command('debugsetparents', [], _('REV1 [REV2]'))
2280 2280 def debugsetparents(ui, repo, rev1, rev2=None):
2281 2281 """manually set the parents of the current working directory
2282 2282
2283 2283 This is useful for writing repository conversion tools, but should
2284 2284 be used with care. For example, neither the working directory nor the
2285 2285 dirstate is updated, so file status may be incorrect after running this
2286 2286 command.
2287 2287
2288 2288 Returns 0 on success.
2289 2289 """
2290 2290
2291 r1 = scmutil.revsingle(repo, rev1).node()
2292 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2291 node1 = scmutil.revsingle(repo, rev1).node()
2292 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2293 2293
2294 2294 with repo.wlock():
2295 repo.setparents(r1, r2)
2295 repo.setparents(node1, node2)
2296 2296
2297 2297 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2298 2298 def debugssl(ui, repo, source=None, **opts):
2299 2299 '''test a secure connection to a server
2300 2300
2301 2301 This builds the certificate chain for the server on Windows, installing the
2302 2302 missing intermediates and trusted root via Windows Update if necessary. It
2303 2303 does nothing on other platforms.
2304 2304
2305 2305 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2306 2306 that server is used. See :hg:`help urls` for more information.
2307 2307
2308 2308 If the update succeeds, retry the original operation. Otherwise, the cause
2309 2309 of the SSL error is likely another issue.
2310 2310 '''
2311 2311 if not pycompat.iswindows:
2312 2312 raise error.Abort(_('certificate chain building is only possible on '
2313 2313 'Windows'))
2314 2314
2315 2315 if not source:
2316 2316 if not repo:
2317 2317 raise error.Abort(_("there is no Mercurial repository here, and no "
2318 2318 "server specified"))
2319 2319 source = "default"
2320 2320
2321 2321 source, branches = hg.parseurl(ui.expandpath(source))
2322 2322 url = util.url(source)
2323 2323 addr = None
2324 2324
2325 2325 defaultport = {'https': 443, 'ssh': 22}
2326 2326 if url.scheme in defaultport:
2327 2327 try:
2328 2328 addr = (url.host, int(url.port or defaultport[url.scheme]))
2329 2329 except ValueError:
2330 2330 raise error.Abort(_("malformed port number in URL"))
2331 2331 else:
2332 2332 raise error.Abort(_("only https and ssh connections are supported"))
2333 2333
2334 2334 from . import win32
2335 2335
2336 2336 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2337 2337 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2338 2338
2339 2339 try:
2340 2340 s.connect(addr)
2341 2341 cert = s.getpeercert(True)
2342 2342
2343 2343 ui.status(_('checking the certificate chain for %s\n') % url.host)
2344 2344
2345 2345 complete = win32.checkcertificatechain(cert, build=False)
2346 2346
2347 2347 if not complete:
2348 2348 ui.status(_('certificate chain is incomplete, updating... '))
2349 2349
2350 2350 if not win32.checkcertificatechain(cert):
2351 2351 ui.status(_('failed.\n'))
2352 2352 else:
2353 2353 ui.status(_('done.\n'))
2354 2354 else:
2355 2355 ui.status(_('full certificate chain is available\n'))
2356 2356 finally:
2357 2357 s.close()
2358 2358
2359 2359 @command('debugsub',
2360 2360 [('r', 'rev', '',
2361 2361 _('revision to check'), _('REV'))],
2362 2362 _('[-r REV] [REV]'))
2363 2363 def debugsub(ui, repo, rev=None):
2364 2364 ctx = scmutil.revsingle(repo, rev, None)
2365 2365 for k, v in sorted(ctx.substate.items()):
2366 2366 ui.write(('path %s\n') % k)
2367 2367 ui.write((' source %s\n') % v[0])
2368 2368 ui.write((' revision %s\n') % v[1])
2369 2369
2370 2370 @command('debugsuccessorssets',
2371 2371 [('', 'closest', False, _('return closest successors sets only'))],
2372 2372 _('[REV]'))
2373 2373 def debugsuccessorssets(ui, repo, *revs, **opts):
2374 2374 """show set of successors for revision
2375 2375
2376 2376 A successors set of changeset A is a consistent group of revisions that
2377 2377 succeed A. It contains non-obsolete changesets only unless closests
2378 2378 successors set is set.
2379 2379
2380 2380 In most cases a changeset A has a single successors set containing a single
2381 2381 successor (changeset A replaced by A').
2382 2382
2383 2383 A changeset that is made obsolete with no successors are called "pruned".
2384 2384 Such changesets have no successors sets at all.
2385 2385
2386 2386 A changeset that has been "split" will have a successors set containing
2387 2387 more than one successor.
2388 2388
2389 2389 A changeset that has been rewritten in multiple different ways is called
2390 2390 "divergent". Such changesets have multiple successor sets (each of which
2391 2391 may also be split, i.e. have multiple successors).
2392 2392
2393 2393 Results are displayed as follows::
2394 2394
2395 2395 <rev1>
2396 2396 <successors-1A>
2397 2397 <rev2>
2398 2398 <successors-2A>
2399 2399 <successors-2B1> <successors-2B2> <successors-2B3>
2400 2400
2401 2401 Here rev2 has two possible (i.e. divergent) successors sets. The first
2402 2402 holds one element, whereas the second holds three (i.e. the changeset has
2403 2403 been split).
2404 2404 """
2405 2405 # passed to successorssets caching computation from one call to another
2406 2406 cache = {}
2407 2407 ctx2str = bytes
2408 2408 node2str = short
2409 2409 for rev in scmutil.revrange(repo, revs):
2410 2410 ctx = repo[rev]
2411 2411 ui.write('%s\n'% ctx2str(ctx))
2412 2412 for succsset in obsutil.successorssets(repo, ctx.node(),
2413 2413 closest=opts[r'closest'],
2414 2414 cache=cache):
2415 2415 if succsset:
2416 2416 ui.write(' ')
2417 2417 ui.write(node2str(succsset[0]))
2418 2418 for node in succsset[1:]:
2419 2419 ui.write(' ')
2420 2420 ui.write(node2str(node))
2421 2421 ui.write('\n')
2422 2422
2423 2423 @command('debugtemplate',
2424 2424 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2425 2425 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2426 2426 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2427 2427 optionalrepo=True)
2428 2428 def debugtemplate(ui, repo, tmpl, **opts):
2429 2429 """parse and apply a template
2430 2430
2431 2431 If -r/--rev is given, the template is processed as a log template and
2432 2432 applied to the given changesets. Otherwise, it is processed as a generic
2433 2433 template.
2434 2434
2435 2435 Use --verbose to print the parsed tree.
2436 2436 """
2437 2437 revs = None
2438 2438 if opts[r'rev']:
2439 2439 if repo is None:
2440 2440 raise error.RepoError(_('there is no Mercurial repository here '
2441 2441 '(.hg not found)'))
2442 2442 revs = scmutil.revrange(repo, opts[r'rev'])
2443 2443
2444 2444 props = {}
2445 2445 for d in opts[r'define']:
2446 2446 try:
2447 2447 k, v = (e.strip() for e in d.split('=', 1))
2448 2448 if not k or k == 'ui':
2449 2449 raise ValueError
2450 2450 props[k] = v
2451 2451 except ValueError:
2452 2452 raise error.Abort(_('malformed keyword definition: %s') % d)
2453 2453
2454 2454 if ui.verbose:
2455 2455 aliases = ui.configitems('templatealias')
2456 2456 tree = templater.parse(tmpl)
2457 2457 ui.note(templater.prettyformat(tree), '\n')
2458 2458 newtree = templater.expandaliases(tree, aliases)
2459 2459 if newtree != tree:
2460 2460 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2461 2461
2462 2462 if revs is None:
2463 2463 tres = formatter.templateresources(ui, repo)
2464 2464 t = formatter.maketemplater(ui, tmpl, resources=tres)
2465 2465 ui.write(t.renderdefault(props))
2466 2466 else:
2467 2467 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2468 2468 for r in revs:
2469 2469 displayer.show(repo[r], **pycompat.strkwargs(props))
2470 2470 displayer.close()
2471 2471
2472 2472 @command('debuguigetpass', [
2473 2473 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2474 2474 ], _('[-p TEXT]'), norepo=True)
2475 2475 def debuguigetpass(ui, prompt=''):
2476 2476 """show prompt to type password"""
2477 2477 r = ui.getpass(prompt)
2478 2478 ui.write(('respose: %s\n') % r)
2479 2479
2480 2480 @command('debuguiprompt', [
2481 2481 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2482 2482 ], _('[-p TEXT]'), norepo=True)
2483 2483 def debuguiprompt(ui, prompt=''):
2484 2484 """show plain prompt"""
2485 2485 r = ui.prompt(prompt)
2486 2486 ui.write(('response: %s\n') % r)
2487 2487
2488 2488 @command('debugupdatecaches', [])
2489 2489 def debugupdatecaches(ui, repo, *pats, **opts):
2490 2490 """warm all known caches in the repository"""
2491 2491 with repo.wlock(), repo.lock():
2492 2492 repo.updatecaches(full=True)
2493 2493
2494 2494 @command('debugupgraderepo', [
2495 2495 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2496 2496 ('', 'run', False, _('performs an upgrade')),
2497 2497 ])
2498 2498 def debugupgraderepo(ui, repo, run=False, optimize=None):
2499 2499 """upgrade a repository to use different features
2500 2500
2501 2501 If no arguments are specified, the repository is evaluated for upgrade
2502 2502 and a list of problems and potential optimizations is printed.
2503 2503
2504 2504 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2505 2505 can be influenced via additional arguments. More details will be provided
2506 2506 by the command output when run without ``--run``.
2507 2507
2508 2508 During the upgrade, the repository will be locked and no writes will be
2509 2509 allowed.
2510 2510
2511 2511 At the end of the upgrade, the repository may not be readable while new
2512 2512 repository data is swapped in. This window will be as long as it takes to
2513 2513 rename some directories inside the ``.hg`` directory. On most machines, this
2514 2514 should complete almost instantaneously and the chances of a consumer being
2515 2515 unable to access the repository should be low.
2516 2516 """
2517 2517 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2518 2518
2519 2519 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2520 2520 inferrepo=True)
2521 2521 def debugwalk(ui, repo, *pats, **opts):
2522 2522 """show how files match on given patterns"""
2523 2523 opts = pycompat.byteskwargs(opts)
2524 2524 m = scmutil.match(repo[None], pats, opts)
2525 2525 ui.write(('matcher: %r\n' % m))
2526 2526 items = list(repo[None].walk(m))
2527 2527 if not items:
2528 2528 return
2529 2529 f = lambda fn: fn
2530 2530 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2531 2531 f = lambda fn: util.normpath(fn)
2532 2532 fmt = 'f %%-%ds %%-%ds %%s' % (
2533 2533 max([len(abs) for abs in items]),
2534 2534 max([len(m.rel(abs)) for abs in items]))
2535 2535 for abs in items:
2536 2536 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2537 2537 ui.write("%s\n" % line.rstrip())
2538 2538
2539 2539 @command('debugwhyunstable', [], _('REV'))
2540 2540 def debugwhyunstable(ui, repo, rev):
2541 2541 """explain instabilities of a changeset"""
2542 2542 for entry in obsutil.whyunstable(repo, repo[rev]):
2543 2543 dnodes = ''
2544 2544 if entry.get('divergentnodes'):
2545 2545 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2546 2546 for ctx in entry['divergentnodes']) + ' '
2547 2547 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2548 2548 entry['reason'], entry['node']))
2549 2549
2550 2550 @command('debugwireargs',
2551 2551 [('', 'three', '', 'three'),
2552 2552 ('', 'four', '', 'four'),
2553 2553 ('', 'five', '', 'five'),
2554 2554 ] + cmdutil.remoteopts,
2555 2555 _('REPO [OPTIONS]... [ONE [TWO]]'),
2556 2556 norepo=True)
2557 2557 def debugwireargs(ui, repopath, *vals, **opts):
2558 2558 opts = pycompat.byteskwargs(opts)
2559 2559 repo = hg.peer(ui, opts, repopath)
2560 2560 for opt in cmdutil.remoteopts:
2561 2561 del opts[opt[1]]
2562 2562 args = {}
2563 2563 for k, v in opts.iteritems():
2564 2564 if v:
2565 2565 args[k] = v
2566 2566 args = pycompat.strkwargs(args)
2567 2567 # run twice to check that we don't mess up the stream for the next command
2568 2568 res1 = repo.debugwireargs(*vals, **args)
2569 2569 res2 = repo.debugwireargs(*vals, **args)
2570 2570 ui.write("%s\n" % res1)
2571 2571 if res1 != res2:
2572 2572 ui.warn("%s\n" % res2)
2573 2573
2574 2574 def _parsewirelangblocks(fh):
2575 2575 activeaction = None
2576 2576 blocklines = []
2577 2577
2578 2578 for line in fh:
2579 2579 line = line.rstrip()
2580 2580 if not line:
2581 2581 continue
2582 2582
2583 2583 if line.startswith(b'#'):
2584 2584 continue
2585 2585
2586 2586 if not line.startswith(' '):
2587 2587 # New block. Flush previous one.
2588 2588 if activeaction:
2589 2589 yield activeaction, blocklines
2590 2590
2591 2591 activeaction = line
2592 2592 blocklines = []
2593 2593 continue
2594 2594
2595 2595 # Else we start with an indent.
2596 2596
2597 2597 if not activeaction:
2598 2598 raise error.Abort(_('indented line outside of block'))
2599 2599
2600 2600 blocklines.append(line)
2601 2601
2602 2602 # Flush last block.
2603 2603 if activeaction:
2604 2604 yield activeaction, blocklines
2605 2605
2606 2606 @command('debugwireproto',
2607 2607 [
2608 2608 ('', 'localssh', False, _('start an SSH server for this repo')),
2609 2609 ('', 'peer', '', _('construct a specific version of the peer')),
2610 2610 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2611 2611 ] + cmdutil.remoteopts,
2612 2612 _('[PATH]'),
2613 2613 optionalrepo=True)
2614 2614 def debugwireproto(ui, repo, path=None, **opts):
2615 2615 """send wire protocol commands to a server
2616 2616
2617 2617 This command can be used to issue wire protocol commands to remote
2618 2618 peers and to debug the raw data being exchanged.
2619 2619
2620 2620 ``--localssh`` will start an SSH server against the current repository
2621 2621 and connect to that. By default, the connection will perform a handshake
2622 2622 and establish an appropriate peer instance.
2623 2623
2624 2624 ``--peer`` can be used to bypass the handshake protocol and construct a
2625 2625 peer instance using the specified class type. Valid values are ``raw``,
2626 2626 ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending raw data
2627 2627 payloads and don't support higher-level command actions.
2628 2628
2629 2629 ``--noreadstderr`` can be used to disable automatic reading from stderr
2630 2630 of the peer (for SSH connections only). Disabling automatic reading of
2631 2631 stderr is useful for making output more deterministic.
2632 2632
2633 2633 Commands are issued via a mini language which is specified via stdin.
2634 2634 The language consists of individual actions to perform. An action is
2635 2635 defined by a block. A block is defined as a line with no leading
2636 2636 space followed by 0 or more lines with leading space. Blocks are
2637 2637 effectively a high-level command with additional metadata.
2638 2638
2639 2639 Lines beginning with ``#`` are ignored.
2640 2640
2641 2641 The following sections denote available actions.
2642 2642
2643 2643 raw
2644 2644 ---
2645 2645
2646 2646 Send raw data to the server.
2647 2647
2648 2648 The block payload contains the raw data to send as one atomic send
2649 2649 operation. The data may not actually be delivered in a single system
2650 2650 call: it depends on the abilities of the transport being used.
2651 2651
2652 2652 Each line in the block is de-indented and concatenated. Then, that
2653 2653 value is evaluated as a Python b'' literal. This allows the use of
2654 2654 backslash escaping, etc.
2655 2655
2656 2656 raw+
2657 2657 ----
2658 2658
2659 2659 Behaves like ``raw`` except flushes output afterwards.
2660 2660
2661 2661 command <X>
2662 2662 -----------
2663 2663
2664 2664 Send a request to run a named command, whose name follows the ``command``
2665 2665 string.
2666 2666
2667 2667 Arguments to the command are defined as lines in this block. The format of
2668 2668 each line is ``<key> <value>``. e.g.::
2669 2669
2670 2670 command listkeys
2671 2671 namespace bookmarks
2672 2672
2673 2673 Values are interpreted as Python b'' literals. This allows encoding
2674 2674 special byte sequences via backslash escaping.
2675 2675
2676 2676 The following arguments have special meaning:
2677 2677
2678 2678 ``PUSHFILE``
2679 2679 When defined, the *push* mechanism of the peer will be used instead
2680 2680 of the static request-response mechanism and the content of the
2681 2681 file specified in the value of this argument will be sent as the
2682 2682 command payload.
2683 2683
2684 2684 This can be used to submit a local bundle file to the remote.
2685 2685
2686 2686 batchbegin
2687 2687 ----------
2688 2688
2689 2689 Instruct the peer to begin a batched send.
2690 2690
2691 2691 All ``command`` blocks are queued for execution until the next
2692 2692 ``batchsubmit`` block.
2693 2693
2694 2694 batchsubmit
2695 2695 -----------
2696 2696
2697 2697 Submit previously queued ``command`` blocks as a batch request.
2698 2698
2699 2699 This action MUST be paired with a ``batchbegin`` action.
2700 2700
2701 2701 httprequest <method> <path>
2702 2702 ---------------------------
2703 2703
2704 2704 (HTTP peer only)
2705 2705
2706 2706 Send an HTTP request to the peer.
2707 2707
2708 2708 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2709 2709
2710 2710 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2711 2711 headers to add to the request. e.g. ``Accept: foo``.
2712 2712
2713 2713 The following arguments are special:
2714 2714
2715 2715 ``BODYFILE``
2716 2716 The content of the file defined as the value to this argument will be
2717 2717 transferred verbatim as the HTTP request body.
2718 2718
2719 2719 ``frame <type> <flags> <payload>``
2720 2720 Send a unified protocol frame as part of the request body.
2721 2721
2722 2722 All frames will be collected and sent as the body to the HTTP
2723 2723 request.
2724 2724
2725 2725 close
2726 2726 -----
2727 2727
2728 2728 Close the connection to the server.
2729 2729
2730 2730 flush
2731 2731 -----
2732 2732
2733 2733 Flush data written to the server.
2734 2734
2735 2735 readavailable
2736 2736 -------------
2737 2737
2738 2738 Close the write end of the connection and read all available data from
2739 2739 the server.
2740 2740
2741 2741 If the connection to the server encompasses multiple pipes, we poll both
2742 2742 pipes and read available data.
2743 2743
2744 2744 readline
2745 2745 --------
2746 2746
2747 2747 Read a line of output from the server. If there are multiple output
2748 2748 pipes, reads only the main pipe.
2749 2749
2750 2750 ereadline
2751 2751 ---------
2752 2752
2753 2753 Like ``readline``, but read from the stderr pipe, if available.
2754 2754
2755 2755 read <X>
2756 2756 --------
2757 2757
2758 2758 ``read()`` N bytes from the server's main output pipe.
2759 2759
2760 2760 eread <X>
2761 2761 ---------
2762 2762
2763 2763 ``read()`` N bytes from the server's stderr pipe, if available.
2764 2764
2765 2765 Specifying Unified Frame-Based Protocol Frames
2766 2766 ----------------------------------------------
2767 2767
2768 2768 It is possible to emit a *Unified Frame-Based Protocol* by using special
2769 2769 syntax.
2770 2770
2771 2771 A frame is composed as a type, flags, and payload. These can be parsed
2772 2772 from a string of the form ``<requestid> <type> <flags> <payload>``. That is,
2773 2773 4 space-delimited strings.
2774 2774
2775 2775 ``payload`` is the simplest: it is evaluated as a Python byte string
2776 2776 literal.
2777 2777
2778 2778 ``requestid`` is an integer defining the request identifier.
2779 2779
2780 2780 ``type`` can be an integer value for the frame type or the string name
2781 2781 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2782 2782 ``command-name``.
2783 2783
2784 2784 ``flags`` is a ``|`` delimited list of flag components. Each component
2785 2785 (and there can be just one) can be an integer or a flag name for the
2786 2786 specified frame type. Values are resolved to integers and then bitwise
2787 2787 OR'd together.
2788 2788 """
2789 2789 opts = pycompat.byteskwargs(opts)
2790 2790
2791 2791 if opts['localssh'] and not repo:
2792 2792 raise error.Abort(_('--localssh requires a repository'))
2793 2793
2794 2794 if opts['peer'] and opts['peer'] not in ('raw', 'ssh1', 'ssh2'):
2795 2795 raise error.Abort(_('invalid value for --peer'),
2796 2796 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2797 2797
2798 2798 if path and opts['localssh']:
2799 2799 raise error.Abort(_('cannot specify --localssh with an explicit '
2800 2800 'path'))
2801 2801
2802 2802 if ui.interactive():
2803 2803 ui.write(_('(waiting for commands on stdin)\n'))
2804 2804
2805 2805 blocks = list(_parsewirelangblocks(ui.fin))
2806 2806
2807 2807 proc = None
2808 2808 stdin = None
2809 2809 stdout = None
2810 2810 stderr = None
2811 2811 opener = None
2812 2812
2813 2813 if opts['localssh']:
2814 2814 # We start the SSH server in its own process so there is process
2815 2815 # separation. This prevents a whole class of potential bugs around
2816 2816 # shared state from interfering with server operation.
2817 2817 args = procutil.hgcmd() + [
2818 2818 '-R', repo.root,
2819 2819 'debugserve', '--sshstdio',
2820 2820 ]
2821 2821 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2822 2822 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2823 2823 bufsize=0)
2824 2824
2825 2825 stdin = proc.stdin
2826 2826 stdout = proc.stdout
2827 2827 stderr = proc.stderr
2828 2828
2829 2829 # We turn the pipes into observers so we can log I/O.
2830 2830 if ui.verbose or opts['peer'] == 'raw':
2831 2831 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2832 2832 logdata=True)
2833 2833 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2834 2834 logdata=True)
2835 2835 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2836 2836 logdata=True)
2837 2837
2838 2838 # --localssh also implies the peer connection settings.
2839 2839
2840 2840 url = 'ssh://localserver'
2841 2841 autoreadstderr = not opts['noreadstderr']
2842 2842
2843 2843 if opts['peer'] == 'ssh1':
2844 2844 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2845 2845 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2846 2846 None, autoreadstderr=autoreadstderr)
2847 2847 elif opts['peer'] == 'ssh2':
2848 2848 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2849 2849 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2850 2850 None, autoreadstderr=autoreadstderr)
2851 2851 elif opts['peer'] == 'raw':
2852 2852 ui.write(_('using raw connection to peer\n'))
2853 2853 peer = None
2854 2854 else:
2855 2855 ui.write(_('creating ssh peer from handshake results\n'))
2856 2856 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2857 2857 autoreadstderr=autoreadstderr)
2858 2858
2859 2859 elif path:
2860 2860 # We bypass hg.peer() so we can proxy the sockets.
2861 2861 # TODO consider not doing this because we skip
2862 2862 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2863 2863 u = util.url(path)
2864 2864 if u.scheme != 'http':
2865 2865 raise error.Abort(_('only http:// paths are currently supported'))
2866 2866
2867 2867 url, authinfo = u.authinfo()
2868 2868 openerargs = {}
2869 2869
2870 2870 # Turn pipes/sockets into observers so we can log I/O.
2871 2871 if ui.verbose:
2872 2872 openerargs = {
2873 2873 r'loggingfh': ui,
2874 2874 r'loggingname': b's',
2875 2875 r'loggingopts': {
2876 2876 r'logdata': True,
2877 2877 r'logdataapis': False,
2878 2878 },
2879 2879 }
2880 2880
2881 2881 if ui.debugflag:
2882 2882 openerargs[r'loggingopts'][r'logdataapis'] = True
2883 2883
2884 2884 # Don't send default headers when in raw mode. This allows us to
2885 2885 # bypass most of the behavior of our URL handling code so we can
2886 2886 # have near complete control over what's sent on the wire.
2887 2887 if opts['peer'] == 'raw':
2888 2888 openerargs[r'sendaccept'] = False
2889 2889
2890 2890 opener = urlmod.opener(ui, authinfo, **openerargs)
2891 2891
2892 2892 if opts['peer'] == 'raw':
2893 2893 ui.write(_('using raw connection to peer\n'))
2894 2894 peer = None
2895 2895 elif opts['peer']:
2896 2896 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2897 2897 opts['peer'])
2898 2898 else:
2899 2899 peer = httppeer.httppeer(ui, path, url, opener)
2900 2900 peer._fetchcaps()
2901 2901
2902 2902 # We /could/ populate stdin/stdout with sock.makefile()...
2903 2903 else:
2904 2904 raise error.Abort(_('unsupported connection configuration'))
2905 2905
2906 2906 batchedcommands = None
2907 2907
2908 2908 # Now perform actions based on the parsed wire language instructions.
2909 2909 for action, lines in blocks:
2910 2910 if action in ('raw', 'raw+'):
2911 2911 if not stdin:
2912 2912 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2913 2913
2914 2914 # Concatenate the data together.
2915 2915 data = ''.join(l.lstrip() for l in lines)
2916 2916 data = stringutil.unescapestr(data)
2917 2917 stdin.write(data)
2918 2918
2919 2919 if action == 'raw+':
2920 2920 stdin.flush()
2921 2921 elif action == 'flush':
2922 2922 if not stdin:
2923 2923 raise error.Abort(_('cannot call flush on this peer'))
2924 2924 stdin.flush()
2925 2925 elif action.startswith('command'):
2926 2926 if not peer:
2927 2927 raise error.Abort(_('cannot send commands unless peer instance '
2928 2928 'is available'))
2929 2929
2930 2930 command = action.split(' ', 1)[1]
2931 2931
2932 2932 args = {}
2933 2933 for line in lines:
2934 2934 # We need to allow empty values.
2935 2935 fields = line.lstrip().split(' ', 1)
2936 2936 if len(fields) == 1:
2937 2937 key = fields[0]
2938 2938 value = ''
2939 2939 else:
2940 2940 key, value = fields
2941 2941
2942 2942 args[key] = stringutil.unescapestr(value)
2943 2943
2944 2944 if batchedcommands is not None:
2945 2945 batchedcommands.append((command, args))
2946 2946 continue
2947 2947
2948 2948 ui.status(_('sending %s command\n') % command)
2949 2949
2950 2950 if 'PUSHFILE' in args:
2951 2951 with open(args['PUSHFILE'], r'rb') as fh:
2952 2952 del args['PUSHFILE']
2953 2953 res, output = peer._callpush(command, fh,
2954 2954 **pycompat.strkwargs(args))
2955 2955 ui.status(_('result: %s\n') % stringutil.escapedata(res))
2956 2956 ui.status(_('remote output: %s\n') %
2957 2957 stringutil.escapedata(output))
2958 2958 else:
2959 2959 res = peer._call(command, **pycompat.strkwargs(args))
2960 2960 ui.status(_('response: %s\n') % stringutil.escapedata(res))
2961 2961
2962 2962 elif action == 'batchbegin':
2963 2963 if batchedcommands is not None:
2964 2964 raise error.Abort(_('nested batchbegin not allowed'))
2965 2965
2966 2966 batchedcommands = []
2967 2967 elif action == 'batchsubmit':
2968 2968 # There is a batching API we could go through. But it would be
2969 2969 # difficult to normalize requests into function calls. It is easier
2970 2970 # to bypass this layer and normalize to commands + args.
2971 2971 ui.status(_('sending batch with %d sub-commands\n') %
2972 2972 len(batchedcommands))
2973 2973 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
2974 2974 ui.status(_('response #%d: %s\n') %
2975 2975 (i, stringutil.escapedata(chunk)))
2976 2976
2977 2977 batchedcommands = None
2978 2978
2979 2979 elif action.startswith('httprequest '):
2980 2980 if not opener:
2981 2981 raise error.Abort(_('cannot use httprequest without an HTTP '
2982 2982 'peer'))
2983 2983
2984 2984 request = action.split(' ', 2)
2985 2985 if len(request) != 3:
2986 2986 raise error.Abort(_('invalid httprequest: expected format is '
2987 2987 '"httprequest <method> <path>'))
2988 2988
2989 2989 method, httppath = request[1:]
2990 2990 headers = {}
2991 2991 body = None
2992 2992 frames = []
2993 2993 for line in lines:
2994 2994 line = line.lstrip()
2995 2995 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
2996 2996 if m:
2997 2997 headers[m.group(1)] = m.group(2)
2998 2998 continue
2999 2999
3000 3000 if line.startswith(b'BODYFILE '):
3001 3001 with open(line.split(b' ', 1), 'rb') as fh:
3002 3002 body = fh.read()
3003 3003 elif line.startswith(b'frame '):
3004 3004 frame = wireprotoframing.makeframefromhumanstring(
3005 3005 line[len(b'frame '):])
3006 3006
3007 3007 frames.append(frame)
3008 3008 else:
3009 3009 raise error.Abort(_('unknown argument to httprequest: %s') %
3010 3010 line)
3011 3011
3012 3012 url = path + httppath
3013 3013
3014 3014 if frames:
3015 3015 body = b''.join(bytes(f) for f in frames)
3016 3016
3017 3017 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3018 3018
3019 3019 # urllib.Request insists on using has_data() as a proxy for
3020 3020 # determining the request method. Override that to use our
3021 3021 # explicitly requested method.
3022 3022 req.get_method = lambda: method
3023 3023
3024 3024 try:
3025 3025 opener.open(req).read()
3026 3026 except util.urlerr.urlerror as e:
3027 3027 e.read()
3028 3028
3029 3029 elif action == 'close':
3030 3030 peer.close()
3031 3031 elif action == 'readavailable':
3032 3032 if not stdout or not stderr:
3033 3033 raise error.Abort(_('readavailable not available on this peer'))
3034 3034
3035 3035 stdin.close()
3036 3036 stdout.read()
3037 3037 stderr.read()
3038 3038
3039 3039 elif action == 'readline':
3040 3040 if not stdout:
3041 3041 raise error.Abort(_('readline not available on this peer'))
3042 3042 stdout.readline()
3043 3043 elif action == 'ereadline':
3044 3044 if not stderr:
3045 3045 raise error.Abort(_('ereadline not available on this peer'))
3046 3046 stderr.readline()
3047 3047 elif action.startswith('read '):
3048 3048 count = int(action.split(' ', 1)[1])
3049 3049 if not stdout:
3050 3050 raise error.Abort(_('read not available on this peer'))
3051 3051 stdout.read(count)
3052 3052 elif action.startswith('eread '):
3053 3053 count = int(action.split(' ', 1)[1])
3054 3054 if not stderr:
3055 3055 raise error.Abort(_('eread not available on this peer'))
3056 3056 stderr.read(count)
3057 3057 else:
3058 3058 raise error.Abort(_('unknown action: %s') % action)
3059 3059
3060 3060 if batchedcommands is not None:
3061 3061 raise error.Abort(_('unclosed "batchbegin" request'))
3062 3062
3063 3063 if peer:
3064 3064 peer.close()
3065 3065
3066 3066 if proc:
3067 3067 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now