##// END OF EJS Templates
debugdeltachain: protect against 0 readsize...
Boris Feld -
r38669:0f4c2c70 default
parent child Browse files
Show More
@@ -1,3173 +1,3176
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .thirdparty import (
36 36 cbor,
37 37 )
38 38 from . import (
39 39 bundle2,
40 40 changegroup,
41 41 cmdutil,
42 42 color,
43 43 context,
44 44 dagparser,
45 45 dagutil,
46 46 encoding,
47 47 error,
48 48 exchange,
49 49 extensions,
50 50 filemerge,
51 51 fileset,
52 52 formatter,
53 53 hg,
54 54 httppeer,
55 55 localrepo,
56 56 lock as lockmod,
57 57 logcmdutil,
58 58 merge as mergemod,
59 59 obsolete,
60 60 obsutil,
61 61 phases,
62 62 policy,
63 63 pvec,
64 64 pycompat,
65 65 registrar,
66 66 repair,
67 67 revlog,
68 68 revset,
69 69 revsetlang,
70 70 scmutil,
71 71 setdiscovery,
72 72 simplemerge,
73 73 sshpeer,
74 74 sslutil,
75 75 streamclone,
76 76 templater,
77 77 treediscovery,
78 78 upgrade,
79 79 url as urlmod,
80 80 util,
81 81 vfs as vfsmod,
82 82 wireprotoframing,
83 83 wireprotoserver,
84 84 wireprotov2peer,
85 85 )
86 86 from .utils import (
87 87 dateutil,
88 88 procutil,
89 89 stringutil,
90 90 )
91 91
92 92 release = lockmod.release
93 93
94 94 command = registrar.command()
95 95
96 96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 97 def debugancestor(ui, repo, *args):
98 98 """find the ancestor revision of two revisions in a given index"""
99 99 if len(args) == 3:
100 100 index, rev1, rev2 = args
101 101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 102 lookup = r.lookup
103 103 elif len(args) == 2:
104 104 if not repo:
105 105 raise error.Abort(_('there is no Mercurial repository here '
106 106 '(.hg not found)'))
107 107 rev1, rev2 = args
108 108 r = repo.changelog
109 109 lookup = repo.lookup
110 110 else:
111 111 raise error.Abort(_('either two or three arguments required'))
112 112 a = r.ancestor(lookup(rev1), lookup(rev2))
113 113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114 114
115 115 @command('debugapplystreamclonebundle', [], 'FILE')
116 116 def debugapplystreamclonebundle(ui, repo, fname):
117 117 """apply a stream clone bundle file"""
118 118 f = hg.openpath(ui, fname)
119 119 gen = exchange.readbundle(ui, f, fname)
120 120 gen.apply(repo)
121 121
122 122 @command('debugbuilddag',
123 123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 125 ('n', 'new-file', None, _('add new file at each rev'))],
126 126 _('[OPTION]... [TEXT]'))
127 127 def debugbuilddag(ui, repo, text=None,
128 128 mergeable_file=False,
129 129 overwritten_file=False,
130 130 new_file=False):
131 131 """builds a repo with a given DAG from scratch in the current empty repo
132 132
133 133 The description of the DAG is read from stdin if not given on the
134 134 command line.
135 135
136 136 Elements:
137 137
138 138 - "+n" is a linear run of n nodes based on the current default parent
139 139 - "." is a single node based on the current default parent
140 140 - "$" resets the default parent to null (implied at the start);
141 141 otherwise the default parent is always the last node created
142 142 - "<p" sets the default parent to the backref p
143 143 - "*p" is a fork at parent p, which is a backref
144 144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 145 - "/p2" is a merge of the preceding node and p2
146 146 - ":tag" defines a local tag for the preceding node
147 147 - "@branch" sets the named branch for subsequent nodes
148 148 - "#...\\n" is a comment up to the end of the line
149 149
150 150 Whitespace between the above elements is ignored.
151 151
152 152 A backref is either
153 153
154 154 - a number n, which references the node curr-n, where curr is the current
155 155 node, or
156 156 - the name of a local tag you placed earlier using ":tag", or
157 157 - empty to denote the default parent.
158 158
159 159 All string valued-elements are either strictly alphanumeric, or must
160 160 be enclosed in double quotes ("..."), with "\\" as escape character.
161 161 """
162 162
163 163 if text is None:
164 164 ui.status(_("reading DAG from stdin\n"))
165 165 text = ui.fin.read()
166 166
167 167 cl = repo.changelog
168 168 if len(cl) > 0:
169 169 raise error.Abort(_('repository is not empty'))
170 170
171 171 # determine number of revs in DAG
172 172 total = 0
173 173 for type, data in dagparser.parsedag(text):
174 174 if type == 'n':
175 175 total += 1
176 176
177 177 if mergeable_file:
178 178 linesperrev = 2
179 179 # make a file with k lines per rev
180 180 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
181 181 initialmergedlines.append("")
182 182
183 183 tags = []
184 184 progress = ui.makeprogress(_('building'), unit=_('revisions'),
185 185 total=total)
186 186 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
187 187 at = -1
188 188 atbranch = 'default'
189 189 nodeids = []
190 190 id = 0
191 191 progress.update(id)
192 192 for type, data in dagparser.parsedag(text):
193 193 if type == 'n':
194 194 ui.note(('node %s\n' % pycompat.bytestr(data)))
195 195 id, ps = data
196 196
197 197 files = []
198 198 filecontent = {}
199 199
200 200 p2 = None
201 201 if mergeable_file:
202 202 fn = "mf"
203 203 p1 = repo[ps[0]]
204 204 if len(ps) > 1:
205 205 p2 = repo[ps[1]]
206 206 pa = p1.ancestor(p2)
207 207 base, local, other = [x[fn].data() for x in (pa, p1,
208 208 p2)]
209 209 m3 = simplemerge.Merge3Text(base, local, other)
210 210 ml = [l.strip() for l in m3.merge_lines()]
211 211 ml.append("")
212 212 elif at > 0:
213 213 ml = p1[fn].data().split("\n")
214 214 else:
215 215 ml = initialmergedlines
216 216 ml[id * linesperrev] += " r%i" % id
217 217 mergedtext = "\n".join(ml)
218 218 files.append(fn)
219 219 filecontent[fn] = mergedtext
220 220
221 221 if overwritten_file:
222 222 fn = "of"
223 223 files.append(fn)
224 224 filecontent[fn] = "r%i\n" % id
225 225
226 226 if new_file:
227 227 fn = "nf%i" % id
228 228 files.append(fn)
229 229 filecontent[fn] = "r%i\n" % id
230 230 if len(ps) > 1:
231 231 if not p2:
232 232 p2 = repo[ps[1]]
233 233 for fn in p2:
234 234 if fn.startswith("nf"):
235 235 files.append(fn)
236 236 filecontent[fn] = p2[fn].data()
237 237
238 238 def fctxfn(repo, cx, path):
239 239 if path in filecontent:
240 240 return context.memfilectx(repo, cx, path,
241 241 filecontent[path])
242 242 return None
243 243
244 244 if len(ps) == 0 or ps[0] < 0:
245 245 pars = [None, None]
246 246 elif len(ps) == 1:
247 247 pars = [nodeids[ps[0]], None]
248 248 else:
249 249 pars = [nodeids[p] for p in ps]
250 250 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
251 251 date=(id, 0),
252 252 user="debugbuilddag",
253 253 extra={'branch': atbranch})
254 254 nodeid = repo.commitctx(cx)
255 255 nodeids.append(nodeid)
256 256 at = id
257 257 elif type == 'l':
258 258 id, name = data
259 259 ui.note(('tag %s\n' % name))
260 260 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
261 261 elif type == 'a':
262 262 ui.note(('branch %s\n' % data))
263 263 atbranch = data
264 264 progress.update(id)
265 265
266 266 if tags:
267 267 repo.vfs.write("localtags", "".join(tags))
268 268
269 269 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
270 270 indent_string = ' ' * indent
271 271 if all:
272 272 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
273 273 % indent_string)
274 274
275 275 def showchunks(named):
276 276 ui.write("\n%s%s\n" % (indent_string, named))
277 277 for deltadata in gen.deltaiter():
278 278 node, p1, p2, cs, deltabase, delta, flags = deltadata
279 279 ui.write("%s%s %s %s %s %s %d\n" %
280 280 (indent_string, hex(node), hex(p1), hex(p2),
281 281 hex(cs), hex(deltabase), len(delta)))
282 282
283 283 chunkdata = gen.changelogheader()
284 284 showchunks("changelog")
285 285 chunkdata = gen.manifestheader()
286 286 showchunks("manifest")
287 287 for chunkdata in iter(gen.filelogheader, {}):
288 288 fname = chunkdata['filename']
289 289 showchunks(fname)
290 290 else:
291 291 if isinstance(gen, bundle2.unbundle20):
292 292 raise error.Abort(_('use debugbundle2 for this file'))
293 293 chunkdata = gen.changelogheader()
294 294 for deltadata in gen.deltaiter():
295 295 node, p1, p2, cs, deltabase, delta, flags = deltadata
296 296 ui.write("%s%s\n" % (indent_string, hex(node)))
297 297
298 298 def _debugobsmarkers(ui, part, indent=0, **opts):
299 299 """display version and markers contained in 'data'"""
300 300 opts = pycompat.byteskwargs(opts)
301 301 data = part.read()
302 302 indent_string = ' ' * indent
303 303 try:
304 304 version, markers = obsolete._readmarkers(data)
305 305 except error.UnknownVersion as exc:
306 306 msg = "%sunsupported version: %s (%d bytes)\n"
307 307 msg %= indent_string, exc.version, len(data)
308 308 ui.write(msg)
309 309 else:
310 310 msg = "%sversion: %d (%d bytes)\n"
311 311 msg %= indent_string, version, len(data)
312 312 ui.write(msg)
313 313 fm = ui.formatter('debugobsolete', opts)
314 314 for rawmarker in sorted(markers):
315 315 m = obsutil.marker(None, rawmarker)
316 316 fm.startitem()
317 317 fm.plain(indent_string)
318 318 cmdutil.showmarker(fm, m)
319 319 fm.end()
320 320
321 321 def _debugphaseheads(ui, data, indent=0):
322 322 """display version and markers contained in 'data'"""
323 323 indent_string = ' ' * indent
324 324 headsbyphase = phases.binarydecode(data)
325 325 for phase in phases.allphases:
326 326 for head in headsbyphase[phase]:
327 327 ui.write(indent_string)
328 328 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
329 329
330 330 def _quasirepr(thing):
331 331 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
332 332 return '{%s}' % (
333 333 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
334 334 return pycompat.bytestr(repr(thing))
335 335
336 336 def _debugbundle2(ui, gen, all=None, **opts):
337 337 """lists the contents of a bundle2"""
338 338 if not isinstance(gen, bundle2.unbundle20):
339 339 raise error.Abort(_('not a bundle2 file'))
340 340 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
341 341 parttypes = opts.get(r'part_type', [])
342 342 for part in gen.iterparts():
343 343 if parttypes and part.type not in parttypes:
344 344 continue
345 345 msg = '%s -- %s (mandatory: %r)\n'
346 346 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
347 347 if part.type == 'changegroup':
348 348 version = part.params.get('version', '01')
349 349 cg = changegroup.getunbundler(version, part, 'UN')
350 350 if not ui.quiet:
351 351 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
352 352 if part.type == 'obsmarkers':
353 353 if not ui.quiet:
354 354 _debugobsmarkers(ui, part, indent=4, **opts)
355 355 if part.type == 'phase-heads':
356 356 if not ui.quiet:
357 357 _debugphaseheads(ui, part, indent=4)
358 358
359 359 @command('debugbundle',
360 360 [('a', 'all', None, _('show all details')),
361 361 ('', 'part-type', [], _('show only the named part type')),
362 362 ('', 'spec', None, _('print the bundlespec of the bundle'))],
363 363 _('FILE'),
364 364 norepo=True)
365 365 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
366 366 """lists the contents of a bundle"""
367 367 with hg.openpath(ui, bundlepath) as f:
368 368 if spec:
369 369 spec = exchange.getbundlespec(ui, f)
370 370 ui.write('%s\n' % spec)
371 371 return
372 372
373 373 gen = exchange.readbundle(ui, f, bundlepath)
374 374 if isinstance(gen, bundle2.unbundle20):
375 375 return _debugbundle2(ui, gen, all=all, **opts)
376 376 _debugchangegroup(ui, gen, all=all, **opts)
377 377
378 378 @command('debugcapabilities',
379 379 [], _('PATH'),
380 380 norepo=True)
381 381 def debugcapabilities(ui, path, **opts):
382 382 """lists the capabilities of a remote peer"""
383 383 opts = pycompat.byteskwargs(opts)
384 384 peer = hg.peer(ui, opts, path)
385 385 caps = peer.capabilities()
386 386 ui.write(('Main capabilities:\n'))
387 387 for c in sorted(caps):
388 388 ui.write((' %s\n') % c)
389 389 b2caps = bundle2.bundle2caps(peer)
390 390 if b2caps:
391 391 ui.write(('Bundle2 capabilities:\n'))
392 392 for key, values in sorted(b2caps.iteritems()):
393 393 ui.write((' %s\n') % key)
394 394 for v in values:
395 395 ui.write((' %s\n') % v)
396 396
397 397 @command('debugcheckstate', [], '')
398 398 def debugcheckstate(ui, repo):
399 399 """validate the correctness of the current dirstate"""
400 400 parent1, parent2 = repo.dirstate.parents()
401 401 m1 = repo[parent1].manifest()
402 402 m2 = repo[parent2].manifest()
403 403 errors = 0
404 404 for f in repo.dirstate:
405 405 state = repo.dirstate[f]
406 406 if state in "nr" and f not in m1:
407 407 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
408 408 errors += 1
409 409 if state in "a" and f in m1:
410 410 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
411 411 errors += 1
412 412 if state in "m" and f not in m1 and f not in m2:
413 413 ui.warn(_("%s in state %s, but not in either manifest\n") %
414 414 (f, state))
415 415 errors += 1
416 416 for f in m1:
417 417 state = repo.dirstate[f]
418 418 if state not in "nrm":
419 419 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
420 420 errors += 1
421 421 if errors:
422 422 error = _(".hg/dirstate inconsistent with current parent's manifest")
423 423 raise error.Abort(error)
424 424
425 425 @command('debugcolor',
426 426 [('', 'style', None, _('show all configured styles'))],
427 427 'hg debugcolor')
428 428 def debugcolor(ui, repo, **opts):
429 429 """show available color, effects or style"""
430 430 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
431 431 if opts.get(r'style'):
432 432 return _debugdisplaystyle(ui)
433 433 else:
434 434 return _debugdisplaycolor(ui)
435 435
436 436 def _debugdisplaycolor(ui):
437 437 ui = ui.copy()
438 438 ui._styles.clear()
439 439 for effect in color._activeeffects(ui).keys():
440 440 ui._styles[effect] = effect
441 441 if ui._terminfoparams:
442 442 for k, v in ui.configitems('color'):
443 443 if k.startswith('color.'):
444 444 ui._styles[k] = k[6:]
445 445 elif k.startswith('terminfo.'):
446 446 ui._styles[k] = k[9:]
447 447 ui.write(_('available colors:\n'))
448 448 # sort label with a '_' after the other to group '_background' entry.
449 449 items = sorted(ui._styles.items(),
450 450 key=lambda i: ('_' in i[0], i[0], i[1]))
451 451 for colorname, label in items:
452 452 ui.write(('%s\n') % colorname, label=label)
453 453
454 454 def _debugdisplaystyle(ui):
455 455 ui.write(_('available style:\n'))
456 456 if not ui._styles:
457 457 return
458 458 width = max(len(s) for s in ui._styles)
459 459 for label, effects in sorted(ui._styles.items()):
460 460 ui.write('%s' % label, label=label)
461 461 if effects:
462 462 # 50
463 463 ui.write(': ')
464 464 ui.write(' ' * (max(0, width - len(label))))
465 465 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
466 466 ui.write('\n')
467 467
468 468 @command('debugcreatestreamclonebundle', [], 'FILE')
469 469 def debugcreatestreamclonebundle(ui, repo, fname):
470 470 """create a stream clone bundle file
471 471
472 472 Stream bundles are special bundles that are essentially archives of
473 473 revlog files. They are commonly used for cloning very quickly.
474 474 """
475 475 # TODO we may want to turn this into an abort when this functionality
476 476 # is moved into `hg bundle`.
477 477 if phases.hassecret(repo):
478 478 ui.warn(_('(warning: stream clone bundle will contain secret '
479 479 'revisions)\n'))
480 480
481 481 requirements, gen = streamclone.generatebundlev1(repo)
482 482 changegroup.writechunks(ui, gen, fname)
483 483
484 484 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
485 485
486 486 @command('debugdag',
487 487 [('t', 'tags', None, _('use tags as labels')),
488 488 ('b', 'branches', None, _('annotate with branch names')),
489 489 ('', 'dots', None, _('use dots for runs')),
490 490 ('s', 'spaces', None, _('separate elements by spaces'))],
491 491 _('[OPTION]... [FILE [REV]...]'),
492 492 optionalrepo=True)
493 493 def debugdag(ui, repo, file_=None, *revs, **opts):
494 494 """format the changelog or an index DAG as a concise textual description
495 495
496 496 If you pass a revlog index, the revlog's DAG is emitted. If you list
497 497 revision numbers, they get labeled in the output as rN.
498 498
499 499 Otherwise, the changelog DAG of the current repo is emitted.
500 500 """
501 501 spaces = opts.get(r'spaces')
502 502 dots = opts.get(r'dots')
503 503 if file_:
504 504 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
505 505 file_)
506 506 revs = set((int(r) for r in revs))
507 507 def events():
508 508 for r in rlog:
509 509 yield 'n', (r, list(p for p in rlog.parentrevs(r)
510 510 if p != -1))
511 511 if r in revs:
512 512 yield 'l', (r, "r%i" % r)
513 513 elif repo:
514 514 cl = repo.changelog
515 515 tags = opts.get(r'tags')
516 516 branches = opts.get(r'branches')
517 517 if tags:
518 518 labels = {}
519 519 for l, n in repo.tags().items():
520 520 labels.setdefault(cl.rev(n), []).append(l)
521 521 def events():
522 522 b = "default"
523 523 for r in cl:
524 524 if branches:
525 525 newb = cl.read(cl.node(r))[5]['branch']
526 526 if newb != b:
527 527 yield 'a', newb
528 528 b = newb
529 529 yield 'n', (r, list(p for p in cl.parentrevs(r)
530 530 if p != -1))
531 531 if tags:
532 532 ls = labels.get(r)
533 533 if ls:
534 534 for l in ls:
535 535 yield 'l', (r, l)
536 536 else:
537 537 raise error.Abort(_('need repo for changelog dag'))
538 538
539 539 for line in dagparser.dagtextlines(events(),
540 540 addspaces=spaces,
541 541 wraplabels=True,
542 542 wrapannotations=True,
543 543 wrapnonlinear=dots,
544 544 usedots=dots,
545 545 maxlinewidth=70):
546 546 ui.write(line)
547 547 ui.write("\n")
548 548
549 549 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
550 550 def debugdata(ui, repo, file_, rev=None, **opts):
551 551 """dump the contents of a data file revision"""
552 552 opts = pycompat.byteskwargs(opts)
553 553 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
554 554 if rev is not None:
555 555 raise error.CommandError('debugdata', _('invalid arguments'))
556 556 file_, rev = None, file_
557 557 elif rev is None:
558 558 raise error.CommandError('debugdata', _('invalid arguments'))
559 559 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
560 560 try:
561 561 ui.write(r.revision(r.lookup(rev), raw=True))
562 562 except KeyError:
563 563 raise error.Abort(_('invalid revision identifier %s') % rev)
564 564
565 565 @command('debugdate',
566 566 [('e', 'extended', None, _('try extended date formats'))],
567 567 _('[-e] DATE [RANGE]'),
568 568 norepo=True, optionalrepo=True)
569 569 def debugdate(ui, date, range=None, **opts):
570 570 """parse and display a date"""
571 571 if opts[r"extended"]:
572 572 d = dateutil.parsedate(date, util.extendeddateformats)
573 573 else:
574 574 d = dateutil.parsedate(date)
575 575 ui.write(("internal: %d %d\n") % d)
576 576 ui.write(("standard: %s\n") % dateutil.datestr(d))
577 577 if range:
578 578 m = dateutil.matchdate(range)
579 579 ui.write(("match: %s\n") % m(d[0]))
580 580
581 581 @command('debugdeltachain',
582 582 cmdutil.debugrevlogopts + cmdutil.formatteropts,
583 583 _('-c|-m|FILE'),
584 584 optionalrepo=True)
585 585 def debugdeltachain(ui, repo, file_=None, **opts):
586 586 """dump information about delta chains in a revlog
587 587
588 588 Output can be templatized. Available template keywords are:
589 589
590 590 :``rev``: revision number
591 591 :``chainid``: delta chain identifier (numbered by unique base)
592 592 :``chainlen``: delta chain length to this revision
593 593 :``prevrev``: previous revision in delta chain
594 594 :``deltatype``: role of delta / how it was computed
595 595 :``compsize``: compressed size of revision
596 596 :``uncompsize``: uncompressed size of revision
597 597 :``chainsize``: total size of compressed revisions in chain
598 598 :``chainratio``: total chain size divided by uncompressed revision size
599 599 (new delta chains typically start at ratio 2.00)
600 600 :``lindist``: linear distance from base revision in delta chain to end
601 601 of this revision
602 602 :``extradist``: total size of revisions not part of this delta chain from
603 603 base of delta chain to end of this revision; a measurement
604 604 of how much extra data we need to read/seek across to read
605 605 the delta chain for this revision
606 606 :``extraratio``: extradist divided by chainsize; another representation of
607 607 how much unrelated data is needed to load this delta chain
608 608
609 609 If the repository is configured to use the sparse read, additional keywords
610 610 are available:
611 611
612 612 :``readsize``: total size of data read from the disk for a revision
613 613 (sum of the sizes of all the blocks)
614 614 :``largestblock``: size of the largest block of data read from the disk
615 615 :``readdensity``: density of useful bytes in the data read from the disk
616 616 :``srchunks``: in how many data hunks the whole revision would be read
617 617
618 618 The sparse read can be enabled with experimental.sparse-read = True
619 619 """
620 620 opts = pycompat.byteskwargs(opts)
621 621 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
622 622 index = r.index
623 623 start = r.start
624 624 length = r.length
625 625 generaldelta = r.version & revlog.FLAG_GENERALDELTA
626 626 withsparseread = getattr(r, '_withsparseread', False)
627 627
628 628 def revinfo(rev):
629 629 e = index[rev]
630 630 compsize = e[1]
631 631 uncompsize = e[2]
632 632 chainsize = 0
633 633
634 634 if generaldelta:
635 635 if e[3] == e[5]:
636 636 deltatype = 'p1'
637 637 elif e[3] == e[6]:
638 638 deltatype = 'p2'
639 639 elif e[3] == rev - 1:
640 640 deltatype = 'prev'
641 641 elif e[3] == rev:
642 642 deltatype = 'base'
643 643 else:
644 644 deltatype = 'other'
645 645 else:
646 646 if e[3] == rev:
647 647 deltatype = 'base'
648 648 else:
649 649 deltatype = 'prev'
650 650
651 651 chain = r._deltachain(rev)[0]
652 652 for iterrev in chain:
653 653 e = index[iterrev]
654 654 chainsize += e[1]
655 655
656 656 return compsize, uncompsize, deltatype, chain, chainsize
657 657
658 658 fm = ui.formatter('debugdeltachain', opts)
659 659
660 660 fm.plain(' rev chain# chainlen prev delta '
661 661 'size rawsize chainsize ratio lindist extradist '
662 662 'extraratio')
663 663 if withsparseread:
664 664 fm.plain(' readsize largestblk rddensity srchunks')
665 665 fm.plain('\n')
666 666
667 667 chainbases = {}
668 668 for rev in r:
669 669 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
670 670 chainbase = chain[0]
671 671 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
672 672 basestart = start(chainbase)
673 673 revstart = start(rev)
674 674 lineardist = revstart + comp - basestart
675 675 extradist = lineardist - chainsize
676 676 try:
677 677 prevrev = chain[-2]
678 678 except IndexError:
679 679 prevrev = -1
680 680
681 681 if uncomp != 0:
682 682 chainratio = float(chainsize) / float(uncomp)
683 683 else:
684 684 chainratio = chainsize
685 685
686 686 if chainsize != 0:
687 687 extraratio = float(extradist) / float(chainsize)
688 688 else:
689 689 extraratio = extradist
690 690
691 691 fm.startitem()
692 692 fm.write('rev chainid chainlen prevrev deltatype compsize '
693 693 'uncompsize chainsize chainratio lindist extradist '
694 694 'extraratio',
695 695 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
696 696 rev, chainid, len(chain), prevrev, deltatype, comp,
697 697 uncomp, chainsize, chainratio, lineardist, extradist,
698 698 extraratio,
699 699 rev=rev, chainid=chainid, chainlen=len(chain),
700 700 prevrev=prevrev, deltatype=deltatype, compsize=comp,
701 701 uncompsize=uncomp, chainsize=chainsize,
702 702 chainratio=chainratio, lindist=lineardist,
703 703 extradist=extradist, extraratio=extraratio)
704 704 if withsparseread:
705 705 readsize = 0
706 706 largestblock = 0
707 707 srchunks = 0
708 708
709 709 for revschunk in revlog._slicechunk(r, chain):
710 710 srchunks += 1
711 711 blkend = start(revschunk[-1]) + length(revschunk[-1])
712 712 blksize = blkend - start(revschunk[0])
713 713
714 714 readsize += blksize
715 715 if largestblock < blksize:
716 716 largestblock = blksize
717 717
718 if readsize:
718 719 readdensity = float(chainsize) / float(readsize)
720 else:
721 readdensity = 1
719 722
720 723 fm.write('readsize largestblock readdensity srchunks',
721 724 ' %10d %10d %9.5f %8d',
722 725 readsize, largestblock, readdensity, srchunks,
723 726 readsize=readsize, largestblock=largestblock,
724 727 readdensity=readdensity, srchunks=srchunks)
725 728
726 729 fm.plain('\n')
727 730
728 731 fm.end()
729 732
730 733 @command('debugdirstate|debugstate',
731 734 [('', 'nodates', None, _('do not display the saved mtime')),
732 735 ('', 'datesort', None, _('sort by saved mtime'))],
733 736 _('[OPTION]...'))
734 737 def debugstate(ui, repo, **opts):
735 738 """show the contents of the current dirstate"""
736 739
737 740 nodates = opts.get(r'nodates')
738 741 datesort = opts.get(r'datesort')
739 742
740 743 timestr = ""
741 744 if datesort:
742 745 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
743 746 else:
744 747 keyfunc = None # sort by filename
745 748 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
746 749 if ent[3] == -1:
747 750 timestr = 'unset '
748 751 elif nodates:
749 752 timestr = 'set '
750 753 else:
751 754 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
752 755 time.localtime(ent[3]))
753 756 timestr = encoding.strtolocal(timestr)
754 757 if ent[1] & 0o20000:
755 758 mode = 'lnk'
756 759 else:
757 760 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
758 761 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
759 762 for f in repo.dirstate.copies():
760 763 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
761 764
762 765 @command('debugdiscovery',
763 766 [('', 'old', None, _('use old-style discovery')),
764 767 ('', 'nonheads', None,
765 768 _('use old-style discovery with non-heads included')),
766 769 ('', 'rev', [], 'restrict discovery to this set of revs'),
767 770 ] + cmdutil.remoteopts,
768 771 _('[--rev REV] [OTHER]'))
769 772 def debugdiscovery(ui, repo, remoteurl="default", **opts):
770 773 """runs the changeset discovery protocol in isolation"""
771 774 opts = pycompat.byteskwargs(opts)
772 775 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
773 776 remote = hg.peer(repo, opts, remoteurl)
774 777 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
775 778
776 779 # make sure tests are repeatable
777 780 random.seed(12323)
778 781
779 782 def doit(pushedrevs, remoteheads, remote=remote):
780 783 if opts.get('old'):
781 784 if not util.safehasattr(remote, 'branches'):
782 785 # enable in-client legacy support
783 786 remote = localrepo.locallegacypeer(remote.local())
784 787 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
785 788 force=True)
786 789 common = set(common)
787 790 if not opts.get('nonheads'):
788 791 ui.write(("unpruned common: %s\n") %
789 792 " ".join(sorted(short(n) for n in common)))
790 793 dag = dagutil.revlogdag(repo.changelog)
791 794 all = dag.ancestorset(dag.internalizeall(common))
792 795 common = dag.externalizeall(dag.headsetofconnecteds(all))
793 796 else:
794 797 nodes = None
795 798 if pushedrevs:
796 799 revs = scmutil.revrange(repo, pushedrevs)
797 800 nodes = [repo[r].node() for r in revs]
798 801 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
799 802 ancestorsof=nodes)
800 803 common = set(common)
801 804 rheads = set(hds)
802 805 lheads = set(repo.heads())
803 806 ui.write(("common heads: %s\n") %
804 807 " ".join(sorted(short(n) for n in common)))
805 808 if lheads <= common:
806 809 ui.write(("local is subset\n"))
807 810 elif rheads <= common:
808 811 ui.write(("remote is subset\n"))
809 812
810 813 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
811 814 localrevs = opts['rev']
812 815 doit(localrevs, remoterevs)
813 816
814 817 _chunksize = 4 << 10
815 818
816 819 @command('debugdownload',
817 820 [
818 821 ('o', 'output', '', _('path')),
819 822 ],
820 823 optionalrepo=True)
821 824 def debugdownload(ui, repo, url, output=None, **opts):
822 825 """download a resource using Mercurial logic and config
823 826 """
824 827 fh = urlmod.open(ui, url, output)
825 828
826 829 dest = ui
827 830 if output:
828 831 dest = open(output, "wb", _chunksize)
829 832 try:
830 833 data = fh.read(_chunksize)
831 834 while data:
832 835 dest.write(data)
833 836 data = fh.read(_chunksize)
834 837 finally:
835 838 if output:
836 839 dest.close()
837 840
838 841 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
839 842 def debugextensions(ui, repo, **opts):
840 843 '''show information about active extensions'''
841 844 opts = pycompat.byteskwargs(opts)
842 845 exts = extensions.extensions(ui)
843 846 hgver = util.version()
844 847 fm = ui.formatter('debugextensions', opts)
845 848 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
846 849 isinternal = extensions.ismoduleinternal(extmod)
847 850 extsource = pycompat.fsencode(extmod.__file__)
848 851 if isinternal:
849 852 exttestedwith = [] # never expose magic string to users
850 853 else:
851 854 exttestedwith = getattr(extmod, 'testedwith', '').split()
852 855 extbuglink = getattr(extmod, 'buglink', None)
853 856
854 857 fm.startitem()
855 858
856 859 if ui.quiet or ui.verbose:
857 860 fm.write('name', '%s\n', extname)
858 861 else:
859 862 fm.write('name', '%s', extname)
860 863 if isinternal or hgver in exttestedwith:
861 864 fm.plain('\n')
862 865 elif not exttestedwith:
863 866 fm.plain(_(' (untested!)\n'))
864 867 else:
865 868 lasttestedversion = exttestedwith[-1]
866 869 fm.plain(' (%s!)\n' % lasttestedversion)
867 870
868 871 fm.condwrite(ui.verbose and extsource, 'source',
869 872 _(' location: %s\n'), extsource or "")
870 873
871 874 if ui.verbose:
872 875 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
873 876 fm.data(bundled=isinternal)
874 877
875 878 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
876 879 _(' tested with: %s\n'),
877 880 fm.formatlist(exttestedwith, name='ver'))
878 881
879 882 fm.condwrite(ui.verbose and extbuglink, 'buglink',
880 883 _(' bug reporting: %s\n'), extbuglink or "")
881 884
882 885 fm.end()
883 886
884 887 @command('debugfileset',
885 888 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
886 889 ('', 'all-files', False,
887 890 _('test files from all revisions and working directory'))],
888 891 _('[-r REV] [--all-files] FILESPEC'))
889 892 def debugfileset(ui, repo, expr, **opts):
890 893 '''parse and apply a fileset specification'''
891 894 opts = pycompat.byteskwargs(opts)
892 895 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
893 896 if ui.verbose:
894 897 tree = fileset.parse(expr)
895 898 ui.note(fileset.prettyformat(tree), "\n")
896 899
897 900 files = set()
898 901 if opts['all_files']:
899 902 for r in repo:
900 903 c = repo[r]
901 904 files.update(c.files())
902 905 files.update(c.substate)
903 906 if opts['all_files'] or ctx.rev() is None:
904 907 wctx = repo[None]
905 908 files.update(repo.dirstate.walk(scmutil.matchall(repo),
906 909 subrepos=list(wctx.substate),
907 910 unknown=True, ignored=True))
908 911 files.update(wctx.substate)
909 912 else:
910 913 files.update(ctx.files())
911 914 files.update(ctx.substate)
912 915
913 916 m = ctx.matchfileset(expr)
914 917 for f in sorted(files):
915 918 if not m(f):
916 919 continue
917 920 ui.write("%s\n" % f)
918 921
919 922 @command('debugformat',
920 923 [] + cmdutil.formatteropts,
921 924 _(''))
922 925 def debugformat(ui, repo, **opts):
923 926 """display format information about the current repository
924 927
925 928 Use --verbose to get extra information about current config value and
926 929 Mercurial default."""
927 930 opts = pycompat.byteskwargs(opts)
928 931 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
929 932 maxvariantlength = max(len('format-variant'), maxvariantlength)
930 933
931 934 def makeformatname(name):
932 935 return '%s:' + (' ' * (maxvariantlength - len(name)))
933 936
934 937 fm = ui.formatter('debugformat', opts)
935 938 if fm.isplain():
936 939 def formatvalue(value):
937 940 if util.safehasattr(value, 'startswith'):
938 941 return value
939 942 if value:
940 943 return 'yes'
941 944 else:
942 945 return 'no'
943 946 else:
944 947 formatvalue = pycompat.identity
945 948
946 949 fm.plain('format-variant')
947 950 fm.plain(' ' * (maxvariantlength - len('format-variant')))
948 951 fm.plain(' repo')
949 952 if ui.verbose:
950 953 fm.plain(' config default')
951 954 fm.plain('\n')
952 955 for fv in upgrade.allformatvariant:
953 956 fm.startitem()
954 957 repovalue = fv.fromrepo(repo)
955 958 configvalue = fv.fromconfig(repo)
956 959
957 960 if repovalue != configvalue:
958 961 namelabel = 'formatvariant.name.mismatchconfig'
959 962 repolabel = 'formatvariant.repo.mismatchconfig'
960 963 elif repovalue != fv.default:
961 964 namelabel = 'formatvariant.name.mismatchdefault'
962 965 repolabel = 'formatvariant.repo.mismatchdefault'
963 966 else:
964 967 namelabel = 'formatvariant.name.uptodate'
965 968 repolabel = 'formatvariant.repo.uptodate'
966 969
967 970 fm.write('name', makeformatname(fv.name), fv.name,
968 971 label=namelabel)
969 972 fm.write('repo', ' %3s', formatvalue(repovalue),
970 973 label=repolabel)
971 974 if fv.default != configvalue:
972 975 configlabel = 'formatvariant.config.special'
973 976 else:
974 977 configlabel = 'formatvariant.config.default'
975 978 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
976 979 label=configlabel)
977 980 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
978 981 label='formatvariant.default')
979 982 fm.plain('\n')
980 983 fm.end()
981 984
982 985 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
983 986 def debugfsinfo(ui, path="."):
984 987 """show information detected about current filesystem"""
985 988 ui.write(('path: %s\n') % path)
986 989 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
987 990 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
988 991 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
989 992 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
990 993 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
991 994 casesensitive = '(unknown)'
992 995 try:
993 996 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
994 997 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
995 998 except OSError:
996 999 pass
997 1000 ui.write(('case-sensitive: %s\n') % casesensitive)
998 1001
999 1002 @command('debuggetbundle',
1000 1003 [('H', 'head', [], _('id of head node'), _('ID')),
1001 1004 ('C', 'common', [], _('id of common node'), _('ID')),
1002 1005 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1003 1006 _('REPO FILE [-H|-C ID]...'),
1004 1007 norepo=True)
1005 1008 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1006 1009 """retrieves a bundle from a repo
1007 1010
1008 1011 Every ID must be a full-length hex node id string. Saves the bundle to the
1009 1012 given file.
1010 1013 """
1011 1014 opts = pycompat.byteskwargs(opts)
1012 1015 repo = hg.peer(ui, opts, repopath)
1013 1016 if not repo.capable('getbundle'):
1014 1017 raise error.Abort("getbundle() not supported by target repository")
1015 1018 args = {}
1016 1019 if common:
1017 1020 args[r'common'] = [bin(s) for s in common]
1018 1021 if head:
1019 1022 args[r'heads'] = [bin(s) for s in head]
1020 1023 # TODO: get desired bundlecaps from command line.
1021 1024 args[r'bundlecaps'] = None
1022 1025 bundle = repo.getbundle('debug', **args)
1023 1026
1024 1027 bundletype = opts.get('type', 'bzip2').lower()
1025 1028 btypes = {'none': 'HG10UN',
1026 1029 'bzip2': 'HG10BZ',
1027 1030 'gzip': 'HG10GZ',
1028 1031 'bundle2': 'HG20'}
1029 1032 bundletype = btypes.get(bundletype)
1030 1033 if bundletype not in bundle2.bundletypes:
1031 1034 raise error.Abort(_('unknown bundle type specified with --type'))
1032 1035 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1033 1036
1034 1037 @command('debugignore', [], '[FILE]')
1035 1038 def debugignore(ui, repo, *files, **opts):
1036 1039 """display the combined ignore pattern and information about ignored files
1037 1040
1038 1041 With no argument display the combined ignore pattern.
1039 1042
1040 1043 Given space separated file names, shows if the given file is ignored and
1041 1044 if so, show the ignore rule (file and line number) that matched it.
1042 1045 """
1043 1046 ignore = repo.dirstate._ignore
1044 1047 if not files:
1045 1048 # Show all the patterns
1046 1049 ui.write("%s\n" % pycompat.byterepr(ignore))
1047 1050 else:
1048 1051 m = scmutil.match(repo[None], pats=files)
1049 1052 for f in m.files():
1050 1053 nf = util.normpath(f)
1051 1054 ignored = None
1052 1055 ignoredata = None
1053 1056 if nf != '.':
1054 1057 if ignore(nf):
1055 1058 ignored = nf
1056 1059 ignoredata = repo.dirstate._ignorefileandline(nf)
1057 1060 else:
1058 1061 for p in util.finddirs(nf):
1059 1062 if ignore(p):
1060 1063 ignored = p
1061 1064 ignoredata = repo.dirstate._ignorefileandline(p)
1062 1065 break
1063 1066 if ignored:
1064 1067 if ignored == nf:
1065 1068 ui.write(_("%s is ignored\n") % m.uipath(f))
1066 1069 else:
1067 1070 ui.write(_("%s is ignored because of "
1068 1071 "containing folder %s\n")
1069 1072 % (m.uipath(f), ignored))
1070 1073 ignorefile, lineno, line = ignoredata
1071 1074 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1072 1075 % (ignorefile, lineno, line))
1073 1076 else:
1074 1077 ui.write(_("%s is not ignored\n") % m.uipath(f))
1075 1078
1076 1079 @command('debugindex', cmdutil.debugrevlogopts +
1077 1080 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1078 1081 _('[-f FORMAT] -c|-m|FILE'),
1079 1082 optionalrepo=True)
1080 1083 def debugindex(ui, repo, file_=None, **opts):
1081 1084 """dump the contents of an index file"""
1082 1085 opts = pycompat.byteskwargs(opts)
1083 1086 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1084 1087 format = opts.get('format', 0)
1085 1088 if format not in (0, 1):
1086 1089 raise error.Abort(_("unknown format %d") % format)
1087 1090
1088 1091 if ui.debugflag:
1089 1092 shortfn = hex
1090 1093 else:
1091 1094 shortfn = short
1092 1095
1093 1096 # There might not be anything in r, so have a sane default
1094 1097 idlen = 12
1095 1098 for i in r:
1096 1099 idlen = len(shortfn(r.node(i)))
1097 1100 break
1098 1101
1099 1102 if format == 0:
1100 1103 if ui.verbose:
1101 1104 ui.write((" rev offset length linkrev"
1102 1105 " %s %s p2\n") % ("nodeid".ljust(idlen),
1103 1106 "p1".ljust(idlen)))
1104 1107 else:
1105 1108 ui.write((" rev linkrev %s %s p2\n") % (
1106 1109 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1107 1110 elif format == 1:
1108 1111 if ui.verbose:
1109 1112 ui.write((" rev flag offset length size link p1"
1110 1113 " p2 %s\n") % "nodeid".rjust(idlen))
1111 1114 else:
1112 1115 ui.write((" rev flag size link p1 p2 %s\n") %
1113 1116 "nodeid".rjust(idlen))
1114 1117
1115 1118 for i in r:
1116 1119 node = r.node(i)
1117 1120 if format == 0:
1118 1121 try:
1119 1122 pp = r.parents(node)
1120 1123 except Exception:
1121 1124 pp = [nullid, nullid]
1122 1125 if ui.verbose:
1123 1126 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1124 1127 i, r.start(i), r.length(i), r.linkrev(i),
1125 1128 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1126 1129 else:
1127 1130 ui.write("% 6d % 7d %s %s %s\n" % (
1128 1131 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1129 1132 shortfn(pp[1])))
1130 1133 elif format == 1:
1131 1134 pr = r.parentrevs(i)
1132 1135 if ui.verbose:
1133 1136 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1134 1137 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1135 1138 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1136 1139 else:
1137 1140 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1138 1141 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1139 1142 shortfn(node)))
1140 1143
1141 1144 @command('debugindexdot', cmdutil.debugrevlogopts,
1142 1145 _('-c|-m|FILE'), optionalrepo=True)
1143 1146 def debugindexdot(ui, repo, file_=None, **opts):
1144 1147 """dump an index DAG as a graphviz dot file"""
1145 1148 opts = pycompat.byteskwargs(opts)
1146 1149 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1147 1150 ui.write(("digraph G {\n"))
1148 1151 for i in r:
1149 1152 node = r.node(i)
1150 1153 pp = r.parents(node)
1151 1154 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1152 1155 if pp[1] != nullid:
1153 1156 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1154 1157 ui.write("}\n")
1155 1158
1156 1159 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1157 1160 def debuginstall(ui, **opts):
1158 1161 '''test Mercurial installation
1159 1162
1160 1163 Returns 0 on success.
1161 1164 '''
1162 1165 opts = pycompat.byteskwargs(opts)
1163 1166
1164 1167 def writetemp(contents):
1165 1168 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1166 1169 f = os.fdopen(fd, r"wb")
1167 1170 f.write(contents)
1168 1171 f.close()
1169 1172 return name
1170 1173
1171 1174 problems = 0
1172 1175
1173 1176 fm = ui.formatter('debuginstall', opts)
1174 1177 fm.startitem()
1175 1178
1176 1179 # encoding
1177 1180 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1178 1181 err = None
1179 1182 try:
1180 1183 codecs.lookup(pycompat.sysstr(encoding.encoding))
1181 1184 except LookupError as inst:
1182 1185 err = stringutil.forcebytestr(inst)
1183 1186 problems += 1
1184 1187 fm.condwrite(err, 'encodingerror', _(" %s\n"
1185 1188 " (check that your locale is properly set)\n"), err)
1186 1189
1187 1190 # Python
1188 1191 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1189 1192 pycompat.sysexecutable)
1190 1193 fm.write('pythonver', _("checking Python version (%s)\n"),
1191 1194 ("%d.%d.%d" % sys.version_info[:3]))
1192 1195 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1193 1196 os.path.dirname(pycompat.fsencode(os.__file__)))
1194 1197
1195 1198 security = set(sslutil.supportedprotocols)
1196 1199 if sslutil.hassni:
1197 1200 security.add('sni')
1198 1201
1199 1202 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1200 1203 fm.formatlist(sorted(security), name='protocol',
1201 1204 fmt='%s', sep=','))
1202 1205
1203 1206 # These are warnings, not errors. So don't increment problem count. This
1204 1207 # may change in the future.
1205 1208 if 'tls1.2' not in security:
1206 1209 fm.plain(_(' TLS 1.2 not supported by Python install; '
1207 1210 'network connections lack modern security\n'))
1208 1211 if 'sni' not in security:
1209 1212 fm.plain(_(' SNI not supported by Python install; may have '
1210 1213 'connectivity issues with some servers\n'))
1211 1214
1212 1215 # TODO print CA cert info
1213 1216
1214 1217 # hg version
1215 1218 hgver = util.version()
1216 1219 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1217 1220 hgver.split('+')[0])
1218 1221 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1219 1222 '+'.join(hgver.split('+')[1:]))
1220 1223
1221 1224 # compiled modules
1222 1225 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1223 1226 policy.policy)
1224 1227 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1225 1228 os.path.dirname(pycompat.fsencode(__file__)))
1226 1229
1227 1230 if policy.policy in ('c', 'allow'):
1228 1231 err = None
1229 1232 try:
1230 1233 from .cext import (
1231 1234 base85,
1232 1235 bdiff,
1233 1236 mpatch,
1234 1237 osutil,
1235 1238 )
1236 1239 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1237 1240 except Exception as inst:
1238 1241 err = stringutil.forcebytestr(inst)
1239 1242 problems += 1
1240 1243 fm.condwrite(err, 'extensionserror', " %s\n", err)
1241 1244
1242 1245 compengines = util.compengines._engines.values()
1243 1246 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1244 1247 fm.formatlist(sorted(e.name() for e in compengines),
1245 1248 name='compengine', fmt='%s', sep=', '))
1246 1249 fm.write('compenginesavail', _('checking available compression engines '
1247 1250 '(%s)\n'),
1248 1251 fm.formatlist(sorted(e.name() for e in compengines
1249 1252 if e.available()),
1250 1253 name='compengine', fmt='%s', sep=', '))
1251 1254 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1252 1255 fm.write('compenginesserver', _('checking available compression engines '
1253 1256 'for wire protocol (%s)\n'),
1254 1257 fm.formatlist([e.name() for e in wirecompengines
1255 1258 if e.wireprotosupport()],
1256 1259 name='compengine', fmt='%s', sep=', '))
1257 1260 re2 = 'missing'
1258 1261 if util._re2:
1259 1262 re2 = 'available'
1260 1263 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1261 1264 fm.data(re2=bool(util._re2))
1262 1265
1263 1266 # templates
1264 1267 p = templater.templatepaths()
1265 1268 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1266 1269 fm.condwrite(not p, '', _(" no template directories found\n"))
1267 1270 if p:
1268 1271 m = templater.templatepath("map-cmdline.default")
1269 1272 if m:
1270 1273 # template found, check if it is working
1271 1274 err = None
1272 1275 try:
1273 1276 templater.templater.frommapfile(m)
1274 1277 except Exception as inst:
1275 1278 err = stringutil.forcebytestr(inst)
1276 1279 p = None
1277 1280 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1278 1281 else:
1279 1282 p = None
1280 1283 fm.condwrite(p, 'defaulttemplate',
1281 1284 _("checking default template (%s)\n"), m)
1282 1285 fm.condwrite(not m, 'defaulttemplatenotfound',
1283 1286 _(" template '%s' not found\n"), "default")
1284 1287 if not p:
1285 1288 problems += 1
1286 1289 fm.condwrite(not p, '',
1287 1290 _(" (templates seem to have been installed incorrectly)\n"))
1288 1291
1289 1292 # editor
1290 1293 editor = ui.geteditor()
1291 1294 editor = util.expandpath(editor)
1292 1295 editorbin = procutil.shellsplit(editor)[0]
1293 1296 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1294 1297 cmdpath = procutil.findexe(editorbin)
1295 1298 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1296 1299 _(" No commit editor set and can't find %s in PATH\n"
1297 1300 " (specify a commit editor in your configuration"
1298 1301 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1299 1302 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1300 1303 _(" Can't find editor '%s' in PATH\n"
1301 1304 " (specify a commit editor in your configuration"
1302 1305 " file)\n"), not cmdpath and editorbin)
1303 1306 if not cmdpath and editor != 'vi':
1304 1307 problems += 1
1305 1308
1306 1309 # check username
1307 1310 username = None
1308 1311 err = None
1309 1312 try:
1310 1313 username = ui.username()
1311 1314 except error.Abort as e:
1312 1315 err = stringutil.forcebytestr(e)
1313 1316 problems += 1
1314 1317
1315 1318 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1316 1319 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1317 1320 " (specify a username in your configuration file)\n"), err)
1318 1321
1319 1322 fm.condwrite(not problems, '',
1320 1323 _("no problems detected\n"))
1321 1324 if not problems:
1322 1325 fm.data(problems=problems)
1323 1326 fm.condwrite(problems, 'problems',
1324 1327 _("%d problems detected,"
1325 1328 " please check your install!\n"), problems)
1326 1329 fm.end()
1327 1330
1328 1331 return problems
1329 1332
1330 1333 @command('debugknown', [], _('REPO ID...'), norepo=True)
1331 1334 def debugknown(ui, repopath, *ids, **opts):
1332 1335 """test whether node ids are known to a repo
1333 1336
1334 1337 Every ID must be a full-length hex node id string. Returns a list of 0s
1335 1338 and 1s indicating unknown/known.
1336 1339 """
1337 1340 opts = pycompat.byteskwargs(opts)
1338 1341 repo = hg.peer(ui, opts, repopath)
1339 1342 if not repo.capable('known'):
1340 1343 raise error.Abort("known() not supported by target repository")
1341 1344 flags = repo.known([bin(s) for s in ids])
1342 1345 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1343 1346
1344 1347 @command('debuglabelcomplete', [], _('LABEL...'))
1345 1348 def debuglabelcomplete(ui, repo, *args):
1346 1349 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1347 1350 debugnamecomplete(ui, repo, *args)
1348 1351
1349 1352 @command('debuglocks',
1350 1353 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1351 1354 ('W', 'force-wlock', None,
1352 1355 _('free the working state lock (DANGEROUS)')),
1353 1356 ('s', 'set-lock', None, _('set the store lock until stopped')),
1354 1357 ('S', 'set-wlock', None,
1355 1358 _('set the working state lock until stopped'))],
1356 1359 _('[OPTION]...'))
1357 1360 def debuglocks(ui, repo, **opts):
1358 1361 """show or modify state of locks
1359 1362
1360 1363 By default, this command will show which locks are held. This
1361 1364 includes the user and process holding the lock, the amount of time
1362 1365 the lock has been held, and the machine name where the process is
1363 1366 running if it's not local.
1364 1367
1365 1368 Locks protect the integrity of Mercurial's data, so should be
1366 1369 treated with care. System crashes or other interruptions may cause
1367 1370 locks to not be properly released, though Mercurial will usually
1368 1371 detect and remove such stale locks automatically.
1369 1372
1370 1373 However, detecting stale locks may not always be possible (for
1371 1374 instance, on a shared filesystem). Removing locks may also be
1372 1375 blocked by filesystem permissions.
1373 1376
1374 1377 Setting a lock will prevent other commands from changing the data.
1375 1378 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1376 1379 The set locks are removed when the command exits.
1377 1380
1378 1381 Returns 0 if no locks are held.
1379 1382
1380 1383 """
1381 1384
1382 1385 if opts.get(r'force_lock'):
1383 1386 repo.svfs.unlink('lock')
1384 1387 if opts.get(r'force_wlock'):
1385 1388 repo.vfs.unlink('wlock')
1386 1389 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1387 1390 return 0
1388 1391
1389 1392 locks = []
1390 1393 try:
1391 1394 if opts.get(r'set_wlock'):
1392 1395 try:
1393 1396 locks.append(repo.wlock(False))
1394 1397 except error.LockHeld:
1395 1398 raise error.Abort(_('wlock is already held'))
1396 1399 if opts.get(r'set_lock'):
1397 1400 try:
1398 1401 locks.append(repo.lock(False))
1399 1402 except error.LockHeld:
1400 1403 raise error.Abort(_('lock is already held'))
1401 1404 if len(locks):
1402 1405 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1403 1406 return 0
1404 1407 finally:
1405 1408 release(*locks)
1406 1409
1407 1410 now = time.time()
1408 1411 held = 0
1409 1412
1410 1413 def report(vfs, name, method):
1411 1414 # this causes stale locks to get reaped for more accurate reporting
1412 1415 try:
1413 1416 l = method(False)
1414 1417 except error.LockHeld:
1415 1418 l = None
1416 1419
1417 1420 if l:
1418 1421 l.release()
1419 1422 else:
1420 1423 try:
1421 1424 st = vfs.lstat(name)
1422 1425 age = now - st[stat.ST_MTIME]
1423 1426 user = util.username(st.st_uid)
1424 1427 locker = vfs.readlock(name)
1425 1428 if ":" in locker:
1426 1429 host, pid = locker.split(':')
1427 1430 if host == socket.gethostname():
1428 1431 locker = 'user %s, process %s' % (user, pid)
1429 1432 else:
1430 1433 locker = 'user %s, process %s, host %s' \
1431 1434 % (user, pid, host)
1432 1435 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1433 1436 return 1
1434 1437 except OSError as e:
1435 1438 if e.errno != errno.ENOENT:
1436 1439 raise
1437 1440
1438 1441 ui.write(("%-6s free\n") % (name + ":"))
1439 1442 return 0
1440 1443
1441 1444 held += report(repo.svfs, "lock", repo.lock)
1442 1445 held += report(repo.vfs, "wlock", repo.wlock)
1443 1446
1444 1447 return held
1445 1448
1446 1449 @command('debugmergestate', [], '')
1447 1450 def debugmergestate(ui, repo, *args):
1448 1451 """print merge state
1449 1452
1450 1453 Use --verbose to print out information about whether v1 or v2 merge state
1451 1454 was chosen."""
1452 1455 def _hashornull(h):
1453 1456 if h == nullhex:
1454 1457 return 'null'
1455 1458 else:
1456 1459 return h
1457 1460
1458 1461 def printrecords(version):
1459 1462 ui.write(('* version %d records\n') % version)
1460 1463 if version == 1:
1461 1464 records = v1records
1462 1465 else:
1463 1466 records = v2records
1464 1467
1465 1468 for rtype, record in records:
1466 1469 # pretty print some record types
1467 1470 if rtype == 'L':
1468 1471 ui.write(('local: %s\n') % record)
1469 1472 elif rtype == 'O':
1470 1473 ui.write(('other: %s\n') % record)
1471 1474 elif rtype == 'm':
1472 1475 driver, mdstate = record.split('\0', 1)
1473 1476 ui.write(('merge driver: %s (state "%s")\n')
1474 1477 % (driver, mdstate))
1475 1478 elif rtype in 'FDC':
1476 1479 r = record.split('\0')
1477 1480 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1478 1481 if version == 1:
1479 1482 onode = 'not stored in v1 format'
1480 1483 flags = r[7]
1481 1484 else:
1482 1485 onode, flags = r[7:9]
1483 1486 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1484 1487 % (f, rtype, state, _hashornull(hash)))
1485 1488 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1486 1489 ui.write((' ancestor path: %s (node %s)\n')
1487 1490 % (afile, _hashornull(anode)))
1488 1491 ui.write((' other path: %s (node %s)\n')
1489 1492 % (ofile, _hashornull(onode)))
1490 1493 elif rtype == 'f':
1491 1494 filename, rawextras = record.split('\0', 1)
1492 1495 extras = rawextras.split('\0')
1493 1496 i = 0
1494 1497 extrastrings = []
1495 1498 while i < len(extras):
1496 1499 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1497 1500 i += 2
1498 1501
1499 1502 ui.write(('file extras: %s (%s)\n')
1500 1503 % (filename, ', '.join(extrastrings)))
1501 1504 elif rtype == 'l':
1502 1505 labels = record.split('\0', 2)
1503 1506 labels = [l for l in labels if len(l) > 0]
1504 1507 ui.write(('labels:\n'))
1505 1508 ui.write((' local: %s\n' % labels[0]))
1506 1509 ui.write((' other: %s\n' % labels[1]))
1507 1510 if len(labels) > 2:
1508 1511 ui.write((' base: %s\n' % labels[2]))
1509 1512 else:
1510 1513 ui.write(('unrecognized entry: %s\t%s\n')
1511 1514 % (rtype, record.replace('\0', '\t')))
1512 1515
1513 1516 # Avoid mergestate.read() since it may raise an exception for unsupported
1514 1517 # merge state records. We shouldn't be doing this, but this is OK since this
1515 1518 # command is pretty low-level.
1516 1519 ms = mergemod.mergestate(repo)
1517 1520
1518 1521 # sort so that reasonable information is on top
1519 1522 v1records = ms._readrecordsv1()
1520 1523 v2records = ms._readrecordsv2()
1521 1524 order = 'LOml'
1522 1525 def key(r):
1523 1526 idx = order.find(r[0])
1524 1527 if idx == -1:
1525 1528 return (1, r[1])
1526 1529 else:
1527 1530 return (0, idx)
1528 1531 v1records.sort(key=key)
1529 1532 v2records.sort(key=key)
1530 1533
1531 1534 if not v1records and not v2records:
1532 1535 ui.write(('no merge state found\n'))
1533 1536 elif not v2records:
1534 1537 ui.note(('no version 2 merge state\n'))
1535 1538 printrecords(1)
1536 1539 elif ms._v1v2match(v1records, v2records):
1537 1540 ui.note(('v1 and v2 states match: using v2\n'))
1538 1541 printrecords(2)
1539 1542 else:
1540 1543 ui.note(('v1 and v2 states mismatch: using v1\n'))
1541 1544 printrecords(1)
1542 1545 if ui.verbose:
1543 1546 printrecords(2)
1544 1547
1545 1548 @command('debugnamecomplete', [], _('NAME...'))
1546 1549 def debugnamecomplete(ui, repo, *args):
1547 1550 '''complete "names" - tags, open branch names, bookmark names'''
1548 1551
1549 1552 names = set()
1550 1553 # since we previously only listed open branches, we will handle that
1551 1554 # specially (after this for loop)
1552 1555 for name, ns in repo.names.iteritems():
1553 1556 if name != 'branches':
1554 1557 names.update(ns.listnames(repo))
1555 1558 names.update(tag for (tag, heads, tip, closed)
1556 1559 in repo.branchmap().iterbranches() if not closed)
1557 1560 completions = set()
1558 1561 if not args:
1559 1562 args = ['']
1560 1563 for a in args:
1561 1564 completions.update(n for n in names if n.startswith(a))
1562 1565 ui.write('\n'.join(sorted(completions)))
1563 1566 ui.write('\n')
1564 1567
1565 1568 @command('debugobsolete',
1566 1569 [('', 'flags', 0, _('markers flag')),
1567 1570 ('', 'record-parents', False,
1568 1571 _('record parent information for the precursor')),
1569 1572 ('r', 'rev', [], _('display markers relevant to REV')),
1570 1573 ('', 'exclusive', False, _('restrict display to markers only '
1571 1574 'relevant to REV')),
1572 1575 ('', 'index', False, _('display index of the marker')),
1573 1576 ('', 'delete', [], _('delete markers specified by indices')),
1574 1577 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1575 1578 _('[OBSOLETED [REPLACEMENT ...]]'))
1576 1579 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1577 1580 """create arbitrary obsolete marker
1578 1581
1579 1582 With no arguments, displays the list of obsolescence markers."""
1580 1583
1581 1584 opts = pycompat.byteskwargs(opts)
1582 1585
1583 1586 def parsenodeid(s):
1584 1587 try:
1585 1588 # We do not use revsingle/revrange functions here to accept
1586 1589 # arbitrary node identifiers, possibly not present in the
1587 1590 # local repository.
1588 1591 n = bin(s)
1589 1592 if len(n) != len(nullid):
1590 1593 raise TypeError()
1591 1594 return n
1592 1595 except TypeError:
1593 1596 raise error.Abort('changeset references must be full hexadecimal '
1594 1597 'node identifiers')
1595 1598
1596 1599 if opts.get('delete'):
1597 1600 indices = []
1598 1601 for v in opts.get('delete'):
1599 1602 try:
1600 1603 indices.append(int(v))
1601 1604 except ValueError:
1602 1605 raise error.Abort(_('invalid index value: %r') % v,
1603 1606 hint=_('use integers for indices'))
1604 1607
1605 1608 if repo.currenttransaction():
1606 1609 raise error.Abort(_('cannot delete obsmarkers in the middle '
1607 1610 'of transaction.'))
1608 1611
1609 1612 with repo.lock():
1610 1613 n = repair.deleteobsmarkers(repo.obsstore, indices)
1611 1614 ui.write(_('deleted %i obsolescence markers\n') % n)
1612 1615
1613 1616 return
1614 1617
1615 1618 if precursor is not None:
1616 1619 if opts['rev']:
1617 1620 raise error.Abort('cannot select revision when creating marker')
1618 1621 metadata = {}
1619 1622 metadata['user'] = opts['user'] or ui.username()
1620 1623 succs = tuple(parsenodeid(succ) for succ in successors)
1621 1624 l = repo.lock()
1622 1625 try:
1623 1626 tr = repo.transaction('debugobsolete')
1624 1627 try:
1625 1628 date = opts.get('date')
1626 1629 if date:
1627 1630 date = dateutil.parsedate(date)
1628 1631 else:
1629 1632 date = None
1630 1633 prec = parsenodeid(precursor)
1631 1634 parents = None
1632 1635 if opts['record_parents']:
1633 1636 if prec not in repo.unfiltered():
1634 1637 raise error.Abort('cannot used --record-parents on '
1635 1638 'unknown changesets')
1636 1639 parents = repo.unfiltered()[prec].parents()
1637 1640 parents = tuple(p.node() for p in parents)
1638 1641 repo.obsstore.create(tr, prec, succs, opts['flags'],
1639 1642 parents=parents, date=date,
1640 1643 metadata=metadata, ui=ui)
1641 1644 tr.close()
1642 1645 except ValueError as exc:
1643 1646 raise error.Abort(_('bad obsmarker input: %s') %
1644 1647 pycompat.bytestr(exc))
1645 1648 finally:
1646 1649 tr.release()
1647 1650 finally:
1648 1651 l.release()
1649 1652 else:
1650 1653 if opts['rev']:
1651 1654 revs = scmutil.revrange(repo, opts['rev'])
1652 1655 nodes = [repo[r].node() for r in revs]
1653 1656 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1654 1657 exclusive=opts['exclusive']))
1655 1658 markers.sort(key=lambda x: x._data)
1656 1659 else:
1657 1660 markers = obsutil.getmarkers(repo)
1658 1661
1659 1662 markerstoiter = markers
1660 1663 isrelevant = lambda m: True
1661 1664 if opts.get('rev') and opts.get('index'):
1662 1665 markerstoiter = obsutil.getmarkers(repo)
1663 1666 markerset = set(markers)
1664 1667 isrelevant = lambda m: m in markerset
1665 1668
1666 1669 fm = ui.formatter('debugobsolete', opts)
1667 1670 for i, m in enumerate(markerstoiter):
1668 1671 if not isrelevant(m):
1669 1672 # marker can be irrelevant when we're iterating over a set
1670 1673 # of markers (markerstoiter) which is bigger than the set
1671 1674 # of markers we want to display (markers)
1672 1675 # this can happen if both --index and --rev options are
1673 1676 # provided and thus we need to iterate over all of the markers
1674 1677 # to get the correct indices, but only display the ones that
1675 1678 # are relevant to --rev value
1676 1679 continue
1677 1680 fm.startitem()
1678 1681 ind = i if opts.get('index') else None
1679 1682 cmdutil.showmarker(fm, m, index=ind)
1680 1683 fm.end()
1681 1684
1682 1685 @command('debugpathcomplete',
1683 1686 [('f', 'full', None, _('complete an entire path')),
1684 1687 ('n', 'normal', None, _('show only normal files')),
1685 1688 ('a', 'added', None, _('show only added files')),
1686 1689 ('r', 'removed', None, _('show only removed files'))],
1687 1690 _('FILESPEC...'))
1688 1691 def debugpathcomplete(ui, repo, *specs, **opts):
1689 1692 '''complete part or all of a tracked path
1690 1693
1691 1694 This command supports shells that offer path name completion. It
1692 1695 currently completes only files already known to the dirstate.
1693 1696
1694 1697 Completion extends only to the next path segment unless
1695 1698 --full is specified, in which case entire paths are used.'''
1696 1699
1697 1700 def complete(path, acceptable):
1698 1701 dirstate = repo.dirstate
1699 1702 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1700 1703 rootdir = repo.root + pycompat.ossep
1701 1704 if spec != repo.root and not spec.startswith(rootdir):
1702 1705 return [], []
1703 1706 if os.path.isdir(spec):
1704 1707 spec += '/'
1705 1708 spec = spec[len(rootdir):]
1706 1709 fixpaths = pycompat.ossep != '/'
1707 1710 if fixpaths:
1708 1711 spec = spec.replace(pycompat.ossep, '/')
1709 1712 speclen = len(spec)
1710 1713 fullpaths = opts[r'full']
1711 1714 files, dirs = set(), set()
1712 1715 adddir, addfile = dirs.add, files.add
1713 1716 for f, st in dirstate.iteritems():
1714 1717 if f.startswith(spec) and st[0] in acceptable:
1715 1718 if fixpaths:
1716 1719 f = f.replace('/', pycompat.ossep)
1717 1720 if fullpaths:
1718 1721 addfile(f)
1719 1722 continue
1720 1723 s = f.find(pycompat.ossep, speclen)
1721 1724 if s >= 0:
1722 1725 adddir(f[:s])
1723 1726 else:
1724 1727 addfile(f)
1725 1728 return files, dirs
1726 1729
1727 1730 acceptable = ''
1728 1731 if opts[r'normal']:
1729 1732 acceptable += 'nm'
1730 1733 if opts[r'added']:
1731 1734 acceptable += 'a'
1732 1735 if opts[r'removed']:
1733 1736 acceptable += 'r'
1734 1737 cwd = repo.getcwd()
1735 1738 if not specs:
1736 1739 specs = ['.']
1737 1740
1738 1741 files, dirs = set(), set()
1739 1742 for spec in specs:
1740 1743 f, d = complete(spec, acceptable or 'nmar')
1741 1744 files.update(f)
1742 1745 dirs.update(d)
1743 1746 files.update(dirs)
1744 1747 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1745 1748 ui.write('\n')
1746 1749
1747 1750 @command('debugpeer', [], _('PATH'), norepo=True)
1748 1751 def debugpeer(ui, path):
1749 1752 """establish a connection to a peer repository"""
1750 1753 # Always enable peer request logging. Requires --debug to display
1751 1754 # though.
1752 1755 overrides = {
1753 1756 ('devel', 'debug.peer-request'): True,
1754 1757 }
1755 1758
1756 1759 with ui.configoverride(overrides):
1757 1760 peer = hg.peer(ui, {}, path)
1758 1761
1759 1762 local = peer.local() is not None
1760 1763 canpush = peer.canpush()
1761 1764
1762 1765 ui.write(_('url: %s\n') % peer.url())
1763 1766 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1764 1767 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1765 1768
1766 1769 @command('debugpickmergetool',
1767 1770 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1768 1771 ('', 'changedelete', None, _('emulate merging change and delete')),
1769 1772 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1770 1773 _('[PATTERN]...'),
1771 1774 inferrepo=True)
1772 1775 def debugpickmergetool(ui, repo, *pats, **opts):
1773 1776 """examine which merge tool is chosen for specified file
1774 1777
1775 1778 As described in :hg:`help merge-tools`, Mercurial examines
1776 1779 configurations below in this order to decide which merge tool is
1777 1780 chosen for specified file.
1778 1781
1779 1782 1. ``--tool`` option
1780 1783 2. ``HGMERGE`` environment variable
1781 1784 3. configurations in ``merge-patterns`` section
1782 1785 4. configuration of ``ui.merge``
1783 1786 5. configurations in ``merge-tools`` section
1784 1787 6. ``hgmerge`` tool (for historical reason only)
1785 1788 7. default tool for fallback (``:merge`` or ``:prompt``)
1786 1789
1787 1790 This command writes out examination result in the style below::
1788 1791
1789 1792 FILE = MERGETOOL
1790 1793
1791 1794 By default, all files known in the first parent context of the
1792 1795 working directory are examined. Use file patterns and/or -I/-X
1793 1796 options to limit target files. -r/--rev is also useful to examine
1794 1797 files in another context without actual updating to it.
1795 1798
1796 1799 With --debug, this command shows warning messages while matching
1797 1800 against ``merge-patterns`` and so on, too. It is recommended to
1798 1801 use this option with explicit file patterns and/or -I/-X options,
1799 1802 because this option increases amount of output per file according
1800 1803 to configurations in hgrc.
1801 1804
1802 1805 With -v/--verbose, this command shows configurations below at
1803 1806 first (only if specified).
1804 1807
1805 1808 - ``--tool`` option
1806 1809 - ``HGMERGE`` environment variable
1807 1810 - configuration of ``ui.merge``
1808 1811
1809 1812 If merge tool is chosen before matching against
1810 1813 ``merge-patterns``, this command can't show any helpful
1811 1814 information, even with --debug. In such case, information above is
1812 1815 useful to know why a merge tool is chosen.
1813 1816 """
1814 1817 opts = pycompat.byteskwargs(opts)
1815 1818 overrides = {}
1816 1819 if opts['tool']:
1817 1820 overrides[('ui', 'forcemerge')] = opts['tool']
1818 1821 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1819 1822
1820 1823 with ui.configoverride(overrides, 'debugmergepatterns'):
1821 1824 hgmerge = encoding.environ.get("HGMERGE")
1822 1825 if hgmerge is not None:
1823 1826 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1824 1827 uimerge = ui.config("ui", "merge")
1825 1828 if uimerge:
1826 1829 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1827 1830
1828 1831 ctx = scmutil.revsingle(repo, opts.get('rev'))
1829 1832 m = scmutil.match(ctx, pats, opts)
1830 1833 changedelete = opts['changedelete']
1831 1834 for path in ctx.walk(m):
1832 1835 fctx = ctx[path]
1833 1836 try:
1834 1837 if not ui.debugflag:
1835 1838 ui.pushbuffer(error=True)
1836 1839 tool, toolpath = filemerge._picktool(repo, ui, path,
1837 1840 fctx.isbinary(),
1838 1841 'l' in fctx.flags(),
1839 1842 changedelete)
1840 1843 finally:
1841 1844 if not ui.debugflag:
1842 1845 ui.popbuffer()
1843 1846 ui.write(('%s = %s\n') % (path, tool))
1844 1847
1845 1848 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1846 1849 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1847 1850 '''access the pushkey key/value protocol
1848 1851
1849 1852 With two args, list the keys in the given namespace.
1850 1853
1851 1854 With five args, set a key to new if it currently is set to old.
1852 1855 Reports success or failure.
1853 1856 '''
1854 1857
1855 1858 target = hg.peer(ui, {}, repopath)
1856 1859 if keyinfo:
1857 1860 key, old, new = keyinfo
1858 1861 with target.commandexecutor() as e:
1859 1862 r = e.callcommand('pushkey', {
1860 1863 'namespace': namespace,
1861 1864 'key': key,
1862 1865 'old': old,
1863 1866 'new': new,
1864 1867 }).result()
1865 1868
1866 1869 ui.status(pycompat.bytestr(r) + '\n')
1867 1870 return not r
1868 1871 else:
1869 1872 for k, v in sorted(target.listkeys(namespace).iteritems()):
1870 1873 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1871 1874 stringutil.escapestr(v)))
1872 1875
1873 1876 @command('debugpvec', [], _('A B'))
1874 1877 def debugpvec(ui, repo, a, b=None):
1875 1878 ca = scmutil.revsingle(repo, a)
1876 1879 cb = scmutil.revsingle(repo, b)
1877 1880 pa = pvec.ctxpvec(ca)
1878 1881 pb = pvec.ctxpvec(cb)
1879 1882 if pa == pb:
1880 1883 rel = "="
1881 1884 elif pa > pb:
1882 1885 rel = ">"
1883 1886 elif pa < pb:
1884 1887 rel = "<"
1885 1888 elif pa | pb:
1886 1889 rel = "|"
1887 1890 ui.write(_("a: %s\n") % pa)
1888 1891 ui.write(_("b: %s\n") % pb)
1889 1892 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1890 1893 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1891 1894 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1892 1895 pa.distance(pb), rel))
1893 1896
1894 1897 @command('debugrebuilddirstate|debugrebuildstate',
1895 1898 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1896 1899 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1897 1900 'the working copy parent')),
1898 1901 ],
1899 1902 _('[-r REV]'))
1900 1903 def debugrebuilddirstate(ui, repo, rev, **opts):
1901 1904 """rebuild the dirstate as it would look like for the given revision
1902 1905
1903 1906 If no revision is specified the first current parent will be used.
1904 1907
1905 1908 The dirstate will be set to the files of the given revision.
1906 1909 The actual working directory content or existing dirstate
1907 1910 information such as adds or removes is not considered.
1908 1911
1909 1912 ``minimal`` will only rebuild the dirstate status for files that claim to be
1910 1913 tracked but are not in the parent manifest, or that exist in the parent
1911 1914 manifest but are not in the dirstate. It will not change adds, removes, or
1912 1915 modified files that are in the working copy parent.
1913 1916
1914 1917 One use of this command is to make the next :hg:`status` invocation
1915 1918 check the actual file content.
1916 1919 """
1917 1920 ctx = scmutil.revsingle(repo, rev)
1918 1921 with repo.wlock():
1919 1922 dirstate = repo.dirstate
1920 1923 changedfiles = None
1921 1924 # See command doc for what minimal does.
1922 1925 if opts.get(r'minimal'):
1923 1926 manifestfiles = set(ctx.manifest().keys())
1924 1927 dirstatefiles = set(dirstate)
1925 1928 manifestonly = manifestfiles - dirstatefiles
1926 1929 dsonly = dirstatefiles - manifestfiles
1927 1930 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1928 1931 changedfiles = manifestonly | dsnotadded
1929 1932
1930 1933 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1931 1934
1932 1935 @command('debugrebuildfncache', [], '')
1933 1936 def debugrebuildfncache(ui, repo):
1934 1937 """rebuild the fncache file"""
1935 1938 repair.rebuildfncache(ui, repo)
1936 1939
1937 1940 @command('debugrename',
1938 1941 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1939 1942 _('[-r REV] FILE'))
1940 1943 def debugrename(ui, repo, file1, *pats, **opts):
1941 1944 """dump rename information"""
1942 1945
1943 1946 opts = pycompat.byteskwargs(opts)
1944 1947 ctx = scmutil.revsingle(repo, opts.get('rev'))
1945 1948 m = scmutil.match(ctx, (file1,) + pats, opts)
1946 1949 for abs in ctx.walk(m):
1947 1950 fctx = ctx[abs]
1948 1951 o = fctx.filelog().renamed(fctx.filenode())
1949 1952 rel = m.rel(abs)
1950 1953 if o:
1951 1954 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1952 1955 else:
1953 1956 ui.write(_("%s not renamed\n") % rel)
1954 1957
1955 1958 @command('debugrevlog', cmdutil.debugrevlogopts +
1956 1959 [('d', 'dump', False, _('dump index data'))],
1957 1960 _('-c|-m|FILE'),
1958 1961 optionalrepo=True)
1959 1962 def debugrevlog(ui, repo, file_=None, **opts):
1960 1963 """show data and statistics about a revlog"""
1961 1964 opts = pycompat.byteskwargs(opts)
1962 1965 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1963 1966
1964 1967 if opts.get("dump"):
1965 1968 numrevs = len(r)
1966 1969 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1967 1970 " rawsize totalsize compression heads chainlen\n"))
1968 1971 ts = 0
1969 1972 heads = set()
1970 1973
1971 1974 for rev in xrange(numrevs):
1972 1975 dbase = r.deltaparent(rev)
1973 1976 if dbase == -1:
1974 1977 dbase = rev
1975 1978 cbase = r.chainbase(rev)
1976 1979 clen = r.chainlen(rev)
1977 1980 p1, p2 = r.parentrevs(rev)
1978 1981 rs = r.rawsize(rev)
1979 1982 ts = ts + rs
1980 1983 heads -= set(r.parentrevs(rev))
1981 1984 heads.add(rev)
1982 1985 try:
1983 1986 compression = ts / r.end(rev)
1984 1987 except ZeroDivisionError:
1985 1988 compression = 0
1986 1989 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1987 1990 "%11d %5d %8d\n" %
1988 1991 (rev, p1, p2, r.start(rev), r.end(rev),
1989 1992 r.start(dbase), r.start(cbase),
1990 1993 r.start(p1), r.start(p2),
1991 1994 rs, ts, compression, len(heads), clen))
1992 1995 return 0
1993 1996
1994 1997 v = r.version
1995 1998 format = v & 0xFFFF
1996 1999 flags = []
1997 2000 gdelta = False
1998 2001 if v & revlog.FLAG_INLINE_DATA:
1999 2002 flags.append('inline')
2000 2003 if v & revlog.FLAG_GENERALDELTA:
2001 2004 gdelta = True
2002 2005 flags.append('generaldelta')
2003 2006 if not flags:
2004 2007 flags = ['(none)']
2005 2008
2006 2009 nummerges = 0
2007 2010 numfull = 0
2008 2011 numprev = 0
2009 2012 nump1 = 0
2010 2013 nump2 = 0
2011 2014 numother = 0
2012 2015 nump1prev = 0
2013 2016 nump2prev = 0
2014 2017 chainlengths = []
2015 2018 chainbases = []
2016 2019 chainspans = []
2017 2020
2018 2021 datasize = [None, 0, 0]
2019 2022 fullsize = [None, 0, 0]
2020 2023 deltasize = [None, 0, 0]
2021 2024 chunktypecounts = {}
2022 2025 chunktypesizes = {}
2023 2026
2024 2027 def addsize(size, l):
2025 2028 if l[0] is None or size < l[0]:
2026 2029 l[0] = size
2027 2030 if size > l[1]:
2028 2031 l[1] = size
2029 2032 l[2] += size
2030 2033
2031 2034 numrevs = len(r)
2032 2035 for rev in xrange(numrevs):
2033 2036 p1, p2 = r.parentrevs(rev)
2034 2037 delta = r.deltaparent(rev)
2035 2038 if format > 0:
2036 2039 addsize(r.rawsize(rev), datasize)
2037 2040 if p2 != nullrev:
2038 2041 nummerges += 1
2039 2042 size = r.length(rev)
2040 2043 if delta == nullrev:
2041 2044 chainlengths.append(0)
2042 2045 chainbases.append(r.start(rev))
2043 2046 chainspans.append(size)
2044 2047 numfull += 1
2045 2048 addsize(size, fullsize)
2046 2049 else:
2047 2050 chainlengths.append(chainlengths[delta] + 1)
2048 2051 baseaddr = chainbases[delta]
2049 2052 revaddr = r.start(rev)
2050 2053 chainbases.append(baseaddr)
2051 2054 chainspans.append((revaddr - baseaddr) + size)
2052 2055 addsize(size, deltasize)
2053 2056 if delta == rev - 1:
2054 2057 numprev += 1
2055 2058 if delta == p1:
2056 2059 nump1prev += 1
2057 2060 elif delta == p2:
2058 2061 nump2prev += 1
2059 2062 elif delta == p1:
2060 2063 nump1 += 1
2061 2064 elif delta == p2:
2062 2065 nump2 += 1
2063 2066 elif delta != nullrev:
2064 2067 numother += 1
2065 2068
2066 2069 # Obtain data on the raw chunks in the revlog.
2067 2070 segment = r._getsegmentforrevs(rev, rev)[1]
2068 2071 if segment:
2069 2072 chunktype = bytes(segment[0:1])
2070 2073 else:
2071 2074 chunktype = 'empty'
2072 2075
2073 2076 if chunktype not in chunktypecounts:
2074 2077 chunktypecounts[chunktype] = 0
2075 2078 chunktypesizes[chunktype] = 0
2076 2079
2077 2080 chunktypecounts[chunktype] += 1
2078 2081 chunktypesizes[chunktype] += size
2079 2082
2080 2083 # Adjust size min value for empty cases
2081 2084 for size in (datasize, fullsize, deltasize):
2082 2085 if size[0] is None:
2083 2086 size[0] = 0
2084 2087
2085 2088 numdeltas = numrevs - numfull
2086 2089 numoprev = numprev - nump1prev - nump2prev
2087 2090 totalrawsize = datasize[2]
2088 2091 datasize[2] /= numrevs
2089 2092 fulltotal = fullsize[2]
2090 2093 fullsize[2] /= numfull
2091 2094 deltatotal = deltasize[2]
2092 2095 if numrevs - numfull > 0:
2093 2096 deltasize[2] /= numrevs - numfull
2094 2097 totalsize = fulltotal + deltatotal
2095 2098 avgchainlen = sum(chainlengths) / numrevs
2096 2099 maxchainlen = max(chainlengths)
2097 2100 maxchainspan = max(chainspans)
2098 2101 compratio = 1
2099 2102 if totalsize:
2100 2103 compratio = totalrawsize / totalsize
2101 2104
2102 2105 basedfmtstr = '%%%dd\n'
2103 2106 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2104 2107
2105 2108 def dfmtstr(max):
2106 2109 return basedfmtstr % len(str(max))
2107 2110 def pcfmtstr(max, padding=0):
2108 2111 return basepcfmtstr % (len(str(max)), ' ' * padding)
2109 2112
2110 2113 def pcfmt(value, total):
2111 2114 if total:
2112 2115 return (value, 100 * float(value) / total)
2113 2116 else:
2114 2117 return value, 100.0
2115 2118
2116 2119 ui.write(('format : %d\n') % format)
2117 2120 ui.write(('flags : %s\n') % ', '.join(flags))
2118 2121
2119 2122 ui.write('\n')
2120 2123 fmt = pcfmtstr(totalsize)
2121 2124 fmt2 = dfmtstr(totalsize)
2122 2125 ui.write(('revisions : ') + fmt2 % numrevs)
2123 2126 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2124 2127 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2125 2128 ui.write(('revisions : ') + fmt2 % numrevs)
2126 2129 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2127 2130 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2128 2131 ui.write(('revision size : ') + fmt2 % totalsize)
2129 2132 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2130 2133 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2131 2134
2132 2135 def fmtchunktype(chunktype):
2133 2136 if chunktype == 'empty':
2134 2137 return ' %s : ' % chunktype
2135 2138 elif chunktype in pycompat.bytestr(string.ascii_letters):
2136 2139 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2137 2140 else:
2138 2141 return ' 0x%s : ' % hex(chunktype)
2139 2142
2140 2143 ui.write('\n')
2141 2144 ui.write(('chunks : ') + fmt2 % numrevs)
2142 2145 for chunktype in sorted(chunktypecounts):
2143 2146 ui.write(fmtchunktype(chunktype))
2144 2147 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2145 2148 ui.write(('chunks size : ') + fmt2 % totalsize)
2146 2149 for chunktype in sorted(chunktypecounts):
2147 2150 ui.write(fmtchunktype(chunktype))
2148 2151 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2149 2152
2150 2153 ui.write('\n')
2151 2154 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2152 2155 ui.write(('avg chain length : ') + fmt % avgchainlen)
2153 2156 ui.write(('max chain length : ') + fmt % maxchainlen)
2154 2157 ui.write(('max chain reach : ') + fmt % maxchainspan)
2155 2158 ui.write(('compression ratio : ') + fmt % compratio)
2156 2159
2157 2160 if format > 0:
2158 2161 ui.write('\n')
2159 2162 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2160 2163 % tuple(datasize))
2161 2164 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2162 2165 % tuple(fullsize))
2163 2166 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2164 2167 % tuple(deltasize))
2165 2168
2166 2169 if numdeltas > 0:
2167 2170 ui.write('\n')
2168 2171 fmt = pcfmtstr(numdeltas)
2169 2172 fmt2 = pcfmtstr(numdeltas, 4)
2170 2173 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2171 2174 if numprev > 0:
2172 2175 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2173 2176 numprev))
2174 2177 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2175 2178 numprev))
2176 2179 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2177 2180 numprev))
2178 2181 if gdelta:
2179 2182 ui.write(('deltas against p1 : ')
2180 2183 + fmt % pcfmt(nump1, numdeltas))
2181 2184 ui.write(('deltas against p2 : ')
2182 2185 + fmt % pcfmt(nump2, numdeltas))
2183 2186 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2184 2187 numdeltas))
2185 2188
2186 2189 @command('debugrevspec',
2187 2190 [('', 'optimize', None,
2188 2191 _('print parsed tree after optimizing (DEPRECATED)')),
2189 2192 ('', 'show-revs', True, _('print list of result revisions (default)')),
2190 2193 ('s', 'show-set', None, _('print internal representation of result set')),
2191 2194 ('p', 'show-stage', [],
2192 2195 _('print parsed tree at the given stage'), _('NAME')),
2193 2196 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2194 2197 ('', 'verify-optimized', False, _('verify optimized result')),
2195 2198 ],
2196 2199 ('REVSPEC'))
2197 2200 def debugrevspec(ui, repo, expr, **opts):
2198 2201 """parse and apply a revision specification
2199 2202
2200 2203 Use -p/--show-stage option to print the parsed tree at the given stages.
2201 2204 Use -p all to print tree at every stage.
2202 2205
2203 2206 Use --no-show-revs option with -s or -p to print only the set
2204 2207 representation or the parsed tree respectively.
2205 2208
2206 2209 Use --verify-optimized to compare the optimized result with the unoptimized
2207 2210 one. Returns 1 if the optimized result differs.
2208 2211 """
2209 2212 opts = pycompat.byteskwargs(opts)
2210 2213 aliases = ui.configitems('revsetalias')
2211 2214 stages = [
2212 2215 ('parsed', lambda tree: tree),
2213 2216 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2214 2217 ui.warn)),
2215 2218 ('concatenated', revsetlang.foldconcat),
2216 2219 ('analyzed', revsetlang.analyze),
2217 2220 ('optimized', revsetlang.optimize),
2218 2221 ]
2219 2222 if opts['no_optimized']:
2220 2223 stages = stages[:-1]
2221 2224 if opts['verify_optimized'] and opts['no_optimized']:
2222 2225 raise error.Abort(_('cannot use --verify-optimized with '
2223 2226 '--no-optimized'))
2224 2227 stagenames = set(n for n, f in stages)
2225 2228
2226 2229 showalways = set()
2227 2230 showchanged = set()
2228 2231 if ui.verbose and not opts['show_stage']:
2229 2232 # show parsed tree by --verbose (deprecated)
2230 2233 showalways.add('parsed')
2231 2234 showchanged.update(['expanded', 'concatenated'])
2232 2235 if opts['optimize']:
2233 2236 showalways.add('optimized')
2234 2237 if opts['show_stage'] and opts['optimize']:
2235 2238 raise error.Abort(_('cannot use --optimize with --show-stage'))
2236 2239 if opts['show_stage'] == ['all']:
2237 2240 showalways.update(stagenames)
2238 2241 else:
2239 2242 for n in opts['show_stage']:
2240 2243 if n not in stagenames:
2241 2244 raise error.Abort(_('invalid stage name: %s') % n)
2242 2245 showalways.update(opts['show_stage'])
2243 2246
2244 2247 treebystage = {}
2245 2248 printedtree = None
2246 2249 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2247 2250 for n, f in stages:
2248 2251 treebystage[n] = tree = f(tree)
2249 2252 if n in showalways or (n in showchanged and tree != printedtree):
2250 2253 if opts['show_stage'] or n != 'parsed':
2251 2254 ui.write(("* %s:\n") % n)
2252 2255 ui.write(revsetlang.prettyformat(tree), "\n")
2253 2256 printedtree = tree
2254 2257
2255 2258 if opts['verify_optimized']:
2256 2259 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2257 2260 brevs = revset.makematcher(treebystage['optimized'])(repo)
2258 2261 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2259 2262 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2260 2263 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2261 2264 arevs = list(arevs)
2262 2265 brevs = list(brevs)
2263 2266 if arevs == brevs:
2264 2267 return 0
2265 2268 ui.write(('--- analyzed\n'), label='diff.file_a')
2266 2269 ui.write(('+++ optimized\n'), label='diff.file_b')
2267 2270 sm = difflib.SequenceMatcher(None, arevs, brevs)
2268 2271 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2269 2272 if tag in ('delete', 'replace'):
2270 2273 for c in arevs[alo:ahi]:
2271 2274 ui.write('-%s\n' % c, label='diff.deleted')
2272 2275 if tag in ('insert', 'replace'):
2273 2276 for c in brevs[blo:bhi]:
2274 2277 ui.write('+%s\n' % c, label='diff.inserted')
2275 2278 if tag == 'equal':
2276 2279 for c in arevs[alo:ahi]:
2277 2280 ui.write(' %s\n' % c)
2278 2281 return 1
2279 2282
2280 2283 func = revset.makematcher(tree)
2281 2284 revs = func(repo)
2282 2285 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2283 2286 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2284 2287 if not opts['show_revs']:
2285 2288 return
2286 2289 for c in revs:
2287 2290 ui.write("%d\n" % c)
2288 2291
2289 2292 @command('debugserve', [
2290 2293 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2291 2294 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2292 2295 ('', 'logiofile', '', _('file to log server I/O to')),
2293 2296 ], '')
2294 2297 def debugserve(ui, repo, **opts):
2295 2298 """run a server with advanced settings
2296 2299
2297 2300 This command is similar to :hg:`serve`. It exists partially as a
2298 2301 workaround to the fact that ``hg serve --stdio`` must have specific
2299 2302 arguments for security reasons.
2300 2303 """
2301 2304 opts = pycompat.byteskwargs(opts)
2302 2305
2303 2306 if not opts['sshstdio']:
2304 2307 raise error.Abort(_('only --sshstdio is currently supported'))
2305 2308
2306 2309 logfh = None
2307 2310
2308 2311 if opts['logiofd'] and opts['logiofile']:
2309 2312 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2310 2313
2311 2314 if opts['logiofd']:
2312 2315 # Line buffered because output is line based.
2313 2316 try:
2314 2317 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2315 2318 except OSError as e:
2316 2319 if e.errno != errno.ESPIPE:
2317 2320 raise
2318 2321 # can't seek a pipe, so `ab` mode fails on py3
2319 2322 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2320 2323 elif opts['logiofile']:
2321 2324 logfh = open(opts['logiofile'], 'ab', 1)
2322 2325
2323 2326 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2324 2327 s.serve_forever()
2325 2328
2326 2329 @command('debugsetparents', [], _('REV1 [REV2]'))
2327 2330 def debugsetparents(ui, repo, rev1, rev2=None):
2328 2331 """manually set the parents of the current working directory
2329 2332
2330 2333 This is useful for writing repository conversion tools, but should
2331 2334 be used with care. For example, neither the working directory nor the
2332 2335 dirstate is updated, so file status may be incorrect after running this
2333 2336 command.
2334 2337
2335 2338 Returns 0 on success.
2336 2339 """
2337 2340
2338 2341 node1 = scmutil.revsingle(repo, rev1).node()
2339 2342 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2340 2343
2341 2344 with repo.wlock():
2342 2345 repo.setparents(node1, node2)
2343 2346
2344 2347 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2345 2348 def debugssl(ui, repo, source=None, **opts):
2346 2349 '''test a secure connection to a server
2347 2350
2348 2351 This builds the certificate chain for the server on Windows, installing the
2349 2352 missing intermediates and trusted root via Windows Update if necessary. It
2350 2353 does nothing on other platforms.
2351 2354
2352 2355 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2353 2356 that server is used. See :hg:`help urls` for more information.
2354 2357
2355 2358 If the update succeeds, retry the original operation. Otherwise, the cause
2356 2359 of the SSL error is likely another issue.
2357 2360 '''
2358 2361 if not pycompat.iswindows:
2359 2362 raise error.Abort(_('certificate chain building is only possible on '
2360 2363 'Windows'))
2361 2364
2362 2365 if not source:
2363 2366 if not repo:
2364 2367 raise error.Abort(_("there is no Mercurial repository here, and no "
2365 2368 "server specified"))
2366 2369 source = "default"
2367 2370
2368 2371 source, branches = hg.parseurl(ui.expandpath(source))
2369 2372 url = util.url(source)
2370 2373 addr = None
2371 2374
2372 2375 defaultport = {'https': 443, 'ssh': 22}
2373 2376 if url.scheme in defaultport:
2374 2377 try:
2375 2378 addr = (url.host, int(url.port or defaultport[url.scheme]))
2376 2379 except ValueError:
2377 2380 raise error.Abort(_("malformed port number in URL"))
2378 2381 else:
2379 2382 raise error.Abort(_("only https and ssh connections are supported"))
2380 2383
2381 2384 from . import win32
2382 2385
2383 2386 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2384 2387 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2385 2388
2386 2389 try:
2387 2390 s.connect(addr)
2388 2391 cert = s.getpeercert(True)
2389 2392
2390 2393 ui.status(_('checking the certificate chain for %s\n') % url.host)
2391 2394
2392 2395 complete = win32.checkcertificatechain(cert, build=False)
2393 2396
2394 2397 if not complete:
2395 2398 ui.status(_('certificate chain is incomplete, updating... '))
2396 2399
2397 2400 if not win32.checkcertificatechain(cert):
2398 2401 ui.status(_('failed.\n'))
2399 2402 else:
2400 2403 ui.status(_('done.\n'))
2401 2404 else:
2402 2405 ui.status(_('full certificate chain is available\n'))
2403 2406 finally:
2404 2407 s.close()
2405 2408
2406 2409 @command('debugsub',
2407 2410 [('r', 'rev', '',
2408 2411 _('revision to check'), _('REV'))],
2409 2412 _('[-r REV] [REV]'))
2410 2413 def debugsub(ui, repo, rev=None):
2411 2414 ctx = scmutil.revsingle(repo, rev, None)
2412 2415 for k, v in sorted(ctx.substate.items()):
2413 2416 ui.write(('path %s\n') % k)
2414 2417 ui.write((' source %s\n') % v[0])
2415 2418 ui.write((' revision %s\n') % v[1])
2416 2419
2417 2420 @command('debugsuccessorssets',
2418 2421 [('', 'closest', False, _('return closest successors sets only'))],
2419 2422 _('[REV]'))
2420 2423 def debugsuccessorssets(ui, repo, *revs, **opts):
2421 2424 """show set of successors for revision
2422 2425
2423 2426 A successors set of changeset A is a consistent group of revisions that
2424 2427 succeed A. It contains non-obsolete changesets only unless closests
2425 2428 successors set is set.
2426 2429
2427 2430 In most cases a changeset A has a single successors set containing a single
2428 2431 successor (changeset A replaced by A').
2429 2432
2430 2433 A changeset that is made obsolete with no successors are called "pruned".
2431 2434 Such changesets have no successors sets at all.
2432 2435
2433 2436 A changeset that has been "split" will have a successors set containing
2434 2437 more than one successor.
2435 2438
2436 2439 A changeset that has been rewritten in multiple different ways is called
2437 2440 "divergent". Such changesets have multiple successor sets (each of which
2438 2441 may also be split, i.e. have multiple successors).
2439 2442
2440 2443 Results are displayed as follows::
2441 2444
2442 2445 <rev1>
2443 2446 <successors-1A>
2444 2447 <rev2>
2445 2448 <successors-2A>
2446 2449 <successors-2B1> <successors-2B2> <successors-2B3>
2447 2450
2448 2451 Here rev2 has two possible (i.e. divergent) successors sets. The first
2449 2452 holds one element, whereas the second holds three (i.e. the changeset has
2450 2453 been split).
2451 2454 """
2452 2455 # passed to successorssets caching computation from one call to another
2453 2456 cache = {}
2454 2457 ctx2str = bytes
2455 2458 node2str = short
2456 2459 for rev in scmutil.revrange(repo, revs):
2457 2460 ctx = repo[rev]
2458 2461 ui.write('%s\n'% ctx2str(ctx))
2459 2462 for succsset in obsutil.successorssets(repo, ctx.node(),
2460 2463 closest=opts[r'closest'],
2461 2464 cache=cache):
2462 2465 if succsset:
2463 2466 ui.write(' ')
2464 2467 ui.write(node2str(succsset[0]))
2465 2468 for node in succsset[1:]:
2466 2469 ui.write(' ')
2467 2470 ui.write(node2str(node))
2468 2471 ui.write('\n')
2469 2472
2470 2473 @command('debugtemplate',
2471 2474 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2472 2475 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2473 2476 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2474 2477 optionalrepo=True)
2475 2478 def debugtemplate(ui, repo, tmpl, **opts):
2476 2479 """parse and apply a template
2477 2480
2478 2481 If -r/--rev is given, the template is processed as a log template and
2479 2482 applied to the given changesets. Otherwise, it is processed as a generic
2480 2483 template.
2481 2484
2482 2485 Use --verbose to print the parsed tree.
2483 2486 """
2484 2487 revs = None
2485 2488 if opts[r'rev']:
2486 2489 if repo is None:
2487 2490 raise error.RepoError(_('there is no Mercurial repository here '
2488 2491 '(.hg not found)'))
2489 2492 revs = scmutil.revrange(repo, opts[r'rev'])
2490 2493
2491 2494 props = {}
2492 2495 for d in opts[r'define']:
2493 2496 try:
2494 2497 k, v = (e.strip() for e in d.split('=', 1))
2495 2498 if not k or k == 'ui':
2496 2499 raise ValueError
2497 2500 props[k] = v
2498 2501 except ValueError:
2499 2502 raise error.Abort(_('malformed keyword definition: %s') % d)
2500 2503
2501 2504 if ui.verbose:
2502 2505 aliases = ui.configitems('templatealias')
2503 2506 tree = templater.parse(tmpl)
2504 2507 ui.note(templater.prettyformat(tree), '\n')
2505 2508 newtree = templater.expandaliases(tree, aliases)
2506 2509 if newtree != tree:
2507 2510 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2508 2511
2509 2512 if revs is None:
2510 2513 tres = formatter.templateresources(ui, repo)
2511 2514 t = formatter.maketemplater(ui, tmpl, resources=tres)
2512 2515 if ui.verbose:
2513 2516 kwds, funcs = t.symbolsuseddefault()
2514 2517 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2515 2518 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2516 2519 ui.write(t.renderdefault(props))
2517 2520 else:
2518 2521 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2519 2522 if ui.verbose:
2520 2523 kwds, funcs = displayer.t.symbolsuseddefault()
2521 2524 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2522 2525 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2523 2526 for r in revs:
2524 2527 displayer.show(repo[r], **pycompat.strkwargs(props))
2525 2528 displayer.close()
2526 2529
2527 2530 @command('debuguigetpass', [
2528 2531 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2529 2532 ], _('[-p TEXT]'), norepo=True)
2530 2533 def debuguigetpass(ui, prompt=''):
2531 2534 """show prompt to type password"""
2532 2535 r = ui.getpass(prompt)
2533 2536 ui.write(('respose: %s\n') % r)
2534 2537
2535 2538 @command('debuguiprompt', [
2536 2539 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2537 2540 ], _('[-p TEXT]'), norepo=True)
2538 2541 def debuguiprompt(ui, prompt=''):
2539 2542 """show plain prompt"""
2540 2543 r = ui.prompt(prompt)
2541 2544 ui.write(('response: %s\n') % r)
2542 2545
2543 2546 @command('debugupdatecaches', [])
2544 2547 def debugupdatecaches(ui, repo, *pats, **opts):
2545 2548 """warm all known caches in the repository"""
2546 2549 with repo.wlock(), repo.lock():
2547 2550 repo.updatecaches(full=True)
2548 2551
2549 2552 @command('debugupgraderepo', [
2550 2553 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2551 2554 ('', 'run', False, _('performs an upgrade')),
2552 2555 ])
2553 2556 def debugupgraderepo(ui, repo, run=False, optimize=None):
2554 2557 """upgrade a repository to use different features
2555 2558
2556 2559 If no arguments are specified, the repository is evaluated for upgrade
2557 2560 and a list of problems and potential optimizations is printed.
2558 2561
2559 2562 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2560 2563 can be influenced via additional arguments. More details will be provided
2561 2564 by the command output when run without ``--run``.
2562 2565
2563 2566 During the upgrade, the repository will be locked and no writes will be
2564 2567 allowed.
2565 2568
2566 2569 At the end of the upgrade, the repository may not be readable while new
2567 2570 repository data is swapped in. This window will be as long as it takes to
2568 2571 rename some directories inside the ``.hg`` directory. On most machines, this
2569 2572 should complete almost instantaneously and the chances of a consumer being
2570 2573 unable to access the repository should be low.
2571 2574 """
2572 2575 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2573 2576
2574 2577 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2575 2578 inferrepo=True)
2576 2579 def debugwalk(ui, repo, *pats, **opts):
2577 2580 """show how files match on given patterns"""
2578 2581 opts = pycompat.byteskwargs(opts)
2579 2582 m = scmutil.match(repo[None], pats, opts)
2580 2583 if ui.verbose:
2581 2584 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2582 2585 items = list(repo[None].walk(m))
2583 2586 if not items:
2584 2587 return
2585 2588 f = lambda fn: fn
2586 2589 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2587 2590 f = lambda fn: util.normpath(fn)
2588 2591 fmt = 'f %%-%ds %%-%ds %%s' % (
2589 2592 max([len(abs) for abs in items]),
2590 2593 max([len(m.rel(abs)) for abs in items]))
2591 2594 for abs in items:
2592 2595 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2593 2596 ui.write("%s\n" % line.rstrip())
2594 2597
2595 2598 @command('debugwhyunstable', [], _('REV'))
2596 2599 def debugwhyunstable(ui, repo, rev):
2597 2600 """explain instabilities of a changeset"""
2598 2601 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2599 2602 dnodes = ''
2600 2603 if entry.get('divergentnodes'):
2601 2604 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2602 2605 for ctx in entry['divergentnodes']) + ' '
2603 2606 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2604 2607 entry['reason'], entry['node']))
2605 2608
2606 2609 @command('debugwireargs',
2607 2610 [('', 'three', '', 'three'),
2608 2611 ('', 'four', '', 'four'),
2609 2612 ('', 'five', '', 'five'),
2610 2613 ] + cmdutil.remoteopts,
2611 2614 _('REPO [OPTIONS]... [ONE [TWO]]'),
2612 2615 norepo=True)
2613 2616 def debugwireargs(ui, repopath, *vals, **opts):
2614 2617 opts = pycompat.byteskwargs(opts)
2615 2618 repo = hg.peer(ui, opts, repopath)
2616 2619 for opt in cmdutil.remoteopts:
2617 2620 del opts[opt[1]]
2618 2621 args = {}
2619 2622 for k, v in opts.iteritems():
2620 2623 if v:
2621 2624 args[k] = v
2622 2625 args = pycompat.strkwargs(args)
2623 2626 # run twice to check that we don't mess up the stream for the next command
2624 2627 res1 = repo.debugwireargs(*vals, **args)
2625 2628 res2 = repo.debugwireargs(*vals, **args)
2626 2629 ui.write("%s\n" % res1)
2627 2630 if res1 != res2:
2628 2631 ui.warn("%s\n" % res2)
2629 2632
2630 2633 def _parsewirelangblocks(fh):
2631 2634 activeaction = None
2632 2635 blocklines = []
2633 2636
2634 2637 for line in fh:
2635 2638 line = line.rstrip()
2636 2639 if not line:
2637 2640 continue
2638 2641
2639 2642 if line.startswith(b'#'):
2640 2643 continue
2641 2644
2642 2645 if not line.startswith(' '):
2643 2646 # New block. Flush previous one.
2644 2647 if activeaction:
2645 2648 yield activeaction, blocklines
2646 2649
2647 2650 activeaction = line
2648 2651 blocklines = []
2649 2652 continue
2650 2653
2651 2654 # Else we start with an indent.
2652 2655
2653 2656 if not activeaction:
2654 2657 raise error.Abort(_('indented line outside of block'))
2655 2658
2656 2659 blocklines.append(line)
2657 2660
2658 2661 # Flush last block.
2659 2662 if activeaction:
2660 2663 yield activeaction, blocklines
2661 2664
2662 2665 @command('debugwireproto',
2663 2666 [
2664 2667 ('', 'localssh', False, _('start an SSH server for this repo')),
2665 2668 ('', 'peer', '', _('construct a specific version of the peer')),
2666 2669 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2667 2670 ('', 'nologhandshake', False,
2668 2671 _('do not log I/O related to the peer handshake')),
2669 2672 ] + cmdutil.remoteopts,
2670 2673 _('[PATH]'),
2671 2674 optionalrepo=True)
2672 2675 def debugwireproto(ui, repo, path=None, **opts):
2673 2676 """send wire protocol commands to a server
2674 2677
2675 2678 This command can be used to issue wire protocol commands to remote
2676 2679 peers and to debug the raw data being exchanged.
2677 2680
2678 2681 ``--localssh`` will start an SSH server against the current repository
2679 2682 and connect to that. By default, the connection will perform a handshake
2680 2683 and establish an appropriate peer instance.
2681 2684
2682 2685 ``--peer`` can be used to bypass the handshake protocol and construct a
2683 2686 peer instance using the specified class type. Valid values are ``raw``,
2684 2687 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2685 2688 raw data payloads and don't support higher-level command actions.
2686 2689
2687 2690 ``--noreadstderr`` can be used to disable automatic reading from stderr
2688 2691 of the peer (for SSH connections only). Disabling automatic reading of
2689 2692 stderr is useful for making output more deterministic.
2690 2693
2691 2694 Commands are issued via a mini language which is specified via stdin.
2692 2695 The language consists of individual actions to perform. An action is
2693 2696 defined by a block. A block is defined as a line with no leading
2694 2697 space followed by 0 or more lines with leading space. Blocks are
2695 2698 effectively a high-level command with additional metadata.
2696 2699
2697 2700 Lines beginning with ``#`` are ignored.
2698 2701
2699 2702 The following sections denote available actions.
2700 2703
2701 2704 raw
2702 2705 ---
2703 2706
2704 2707 Send raw data to the server.
2705 2708
2706 2709 The block payload contains the raw data to send as one atomic send
2707 2710 operation. The data may not actually be delivered in a single system
2708 2711 call: it depends on the abilities of the transport being used.
2709 2712
2710 2713 Each line in the block is de-indented and concatenated. Then, that
2711 2714 value is evaluated as a Python b'' literal. This allows the use of
2712 2715 backslash escaping, etc.
2713 2716
2714 2717 raw+
2715 2718 ----
2716 2719
2717 2720 Behaves like ``raw`` except flushes output afterwards.
2718 2721
2719 2722 command <X>
2720 2723 -----------
2721 2724
2722 2725 Send a request to run a named command, whose name follows the ``command``
2723 2726 string.
2724 2727
2725 2728 Arguments to the command are defined as lines in this block. The format of
2726 2729 each line is ``<key> <value>``. e.g.::
2727 2730
2728 2731 command listkeys
2729 2732 namespace bookmarks
2730 2733
2731 2734 If the value begins with ``eval:``, it will be interpreted as a Python
2732 2735 literal expression. Otherwise values are interpreted as Python b'' literals.
2733 2736 This allows sending complex types and encoding special byte sequences via
2734 2737 backslash escaping.
2735 2738
2736 2739 The following arguments have special meaning:
2737 2740
2738 2741 ``PUSHFILE``
2739 2742 When defined, the *push* mechanism of the peer will be used instead
2740 2743 of the static request-response mechanism and the content of the
2741 2744 file specified in the value of this argument will be sent as the
2742 2745 command payload.
2743 2746
2744 2747 This can be used to submit a local bundle file to the remote.
2745 2748
2746 2749 batchbegin
2747 2750 ----------
2748 2751
2749 2752 Instruct the peer to begin a batched send.
2750 2753
2751 2754 All ``command`` blocks are queued for execution until the next
2752 2755 ``batchsubmit`` block.
2753 2756
2754 2757 batchsubmit
2755 2758 -----------
2756 2759
2757 2760 Submit previously queued ``command`` blocks as a batch request.
2758 2761
2759 2762 This action MUST be paired with a ``batchbegin`` action.
2760 2763
2761 2764 httprequest <method> <path>
2762 2765 ---------------------------
2763 2766
2764 2767 (HTTP peer only)
2765 2768
2766 2769 Send an HTTP request to the peer.
2767 2770
2768 2771 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2769 2772
2770 2773 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2771 2774 headers to add to the request. e.g. ``Accept: foo``.
2772 2775
2773 2776 The following arguments are special:
2774 2777
2775 2778 ``BODYFILE``
2776 2779 The content of the file defined as the value to this argument will be
2777 2780 transferred verbatim as the HTTP request body.
2778 2781
2779 2782 ``frame <type> <flags> <payload>``
2780 2783 Send a unified protocol frame as part of the request body.
2781 2784
2782 2785 All frames will be collected and sent as the body to the HTTP
2783 2786 request.
2784 2787
2785 2788 close
2786 2789 -----
2787 2790
2788 2791 Close the connection to the server.
2789 2792
2790 2793 flush
2791 2794 -----
2792 2795
2793 2796 Flush data written to the server.
2794 2797
2795 2798 readavailable
2796 2799 -------------
2797 2800
2798 2801 Close the write end of the connection and read all available data from
2799 2802 the server.
2800 2803
2801 2804 If the connection to the server encompasses multiple pipes, we poll both
2802 2805 pipes and read available data.
2803 2806
2804 2807 readline
2805 2808 --------
2806 2809
2807 2810 Read a line of output from the server. If there are multiple output
2808 2811 pipes, reads only the main pipe.
2809 2812
2810 2813 ereadline
2811 2814 ---------
2812 2815
2813 2816 Like ``readline``, but read from the stderr pipe, if available.
2814 2817
2815 2818 read <X>
2816 2819 --------
2817 2820
2818 2821 ``read()`` N bytes from the server's main output pipe.
2819 2822
2820 2823 eread <X>
2821 2824 ---------
2822 2825
2823 2826 ``read()`` N bytes from the server's stderr pipe, if available.
2824 2827
2825 2828 Specifying Unified Frame-Based Protocol Frames
2826 2829 ----------------------------------------------
2827 2830
2828 2831 It is possible to emit a *Unified Frame-Based Protocol* by using special
2829 2832 syntax.
2830 2833
2831 2834 A frame is composed as a type, flags, and payload. These can be parsed
2832 2835 from a string of the form:
2833 2836
2834 2837 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2835 2838
2836 2839 ``request-id`` and ``stream-id`` are integers defining the request and
2837 2840 stream identifiers.
2838 2841
2839 2842 ``type`` can be an integer value for the frame type or the string name
2840 2843 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2841 2844 ``command-name``.
2842 2845
2843 2846 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2844 2847 components. Each component (and there can be just one) can be an integer
2845 2848 or a flag name for stream flags or frame flags, respectively. Values are
2846 2849 resolved to integers and then bitwise OR'd together.
2847 2850
2848 2851 ``payload`` represents the raw frame payload. If it begins with
2849 2852 ``cbor:``, the following string is evaluated as Python code and the
2850 2853 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2851 2854 as a Python byte string literal.
2852 2855 """
2853 2856 opts = pycompat.byteskwargs(opts)
2854 2857
2855 2858 if opts['localssh'] and not repo:
2856 2859 raise error.Abort(_('--localssh requires a repository'))
2857 2860
2858 2861 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2859 2862 raise error.Abort(_('invalid value for --peer'),
2860 2863 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2861 2864
2862 2865 if path and opts['localssh']:
2863 2866 raise error.Abort(_('cannot specify --localssh with an explicit '
2864 2867 'path'))
2865 2868
2866 2869 if ui.interactive():
2867 2870 ui.write(_('(waiting for commands on stdin)\n'))
2868 2871
2869 2872 blocks = list(_parsewirelangblocks(ui.fin))
2870 2873
2871 2874 proc = None
2872 2875 stdin = None
2873 2876 stdout = None
2874 2877 stderr = None
2875 2878 opener = None
2876 2879
2877 2880 if opts['localssh']:
2878 2881 # We start the SSH server in its own process so there is process
2879 2882 # separation. This prevents a whole class of potential bugs around
2880 2883 # shared state from interfering with server operation.
2881 2884 args = procutil.hgcmd() + [
2882 2885 '-R', repo.root,
2883 2886 'debugserve', '--sshstdio',
2884 2887 ]
2885 2888 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2886 2889 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2887 2890 bufsize=0)
2888 2891
2889 2892 stdin = proc.stdin
2890 2893 stdout = proc.stdout
2891 2894 stderr = proc.stderr
2892 2895
2893 2896 # We turn the pipes into observers so we can log I/O.
2894 2897 if ui.verbose or opts['peer'] == 'raw':
2895 2898 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2896 2899 logdata=True)
2897 2900 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2898 2901 logdata=True)
2899 2902 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2900 2903 logdata=True)
2901 2904
2902 2905 # --localssh also implies the peer connection settings.
2903 2906
2904 2907 url = 'ssh://localserver'
2905 2908 autoreadstderr = not opts['noreadstderr']
2906 2909
2907 2910 if opts['peer'] == 'ssh1':
2908 2911 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2909 2912 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2910 2913 None, autoreadstderr=autoreadstderr)
2911 2914 elif opts['peer'] == 'ssh2':
2912 2915 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2913 2916 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2914 2917 None, autoreadstderr=autoreadstderr)
2915 2918 elif opts['peer'] == 'raw':
2916 2919 ui.write(_('using raw connection to peer\n'))
2917 2920 peer = None
2918 2921 else:
2919 2922 ui.write(_('creating ssh peer from handshake results\n'))
2920 2923 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2921 2924 autoreadstderr=autoreadstderr)
2922 2925
2923 2926 elif path:
2924 2927 # We bypass hg.peer() so we can proxy the sockets.
2925 2928 # TODO consider not doing this because we skip
2926 2929 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2927 2930 u = util.url(path)
2928 2931 if u.scheme != 'http':
2929 2932 raise error.Abort(_('only http:// paths are currently supported'))
2930 2933
2931 2934 url, authinfo = u.authinfo()
2932 2935 openerargs = {
2933 2936 r'useragent': b'Mercurial debugwireproto',
2934 2937 }
2935 2938
2936 2939 # Turn pipes/sockets into observers so we can log I/O.
2937 2940 if ui.verbose:
2938 2941 openerargs.update({
2939 2942 r'loggingfh': ui,
2940 2943 r'loggingname': b's',
2941 2944 r'loggingopts': {
2942 2945 r'logdata': True,
2943 2946 r'logdataapis': False,
2944 2947 },
2945 2948 })
2946 2949
2947 2950 if ui.debugflag:
2948 2951 openerargs[r'loggingopts'][r'logdataapis'] = True
2949 2952
2950 2953 # Don't send default headers when in raw mode. This allows us to
2951 2954 # bypass most of the behavior of our URL handling code so we can
2952 2955 # have near complete control over what's sent on the wire.
2953 2956 if opts['peer'] == 'raw':
2954 2957 openerargs[r'sendaccept'] = False
2955 2958
2956 2959 opener = urlmod.opener(ui, authinfo, **openerargs)
2957 2960
2958 2961 if opts['peer'] == 'http2':
2959 2962 ui.write(_('creating http peer for wire protocol version 2\n'))
2960 2963 # We go through makepeer() because we need an API descriptor for
2961 2964 # the peer instance to be useful.
2962 2965 with ui.configoverride({
2963 2966 ('experimental', 'httppeer.advertise-v2'): True}):
2964 2967 if opts['nologhandshake']:
2965 2968 ui.pushbuffer()
2966 2969
2967 2970 peer = httppeer.makepeer(ui, path, opener=opener)
2968 2971
2969 2972 if opts['nologhandshake']:
2970 2973 ui.popbuffer()
2971 2974
2972 2975 if not isinstance(peer, httppeer.httpv2peer):
2973 2976 raise error.Abort(_('could not instantiate HTTP peer for '
2974 2977 'wire protocol version 2'),
2975 2978 hint=_('the server may not have the feature '
2976 2979 'enabled or is not allowing this '
2977 2980 'client version'))
2978 2981
2979 2982 elif opts['peer'] == 'raw':
2980 2983 ui.write(_('using raw connection to peer\n'))
2981 2984 peer = None
2982 2985 elif opts['peer']:
2983 2986 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2984 2987 opts['peer'])
2985 2988 else:
2986 2989 peer = httppeer.makepeer(ui, path, opener=opener)
2987 2990
2988 2991 # We /could/ populate stdin/stdout with sock.makefile()...
2989 2992 else:
2990 2993 raise error.Abort(_('unsupported connection configuration'))
2991 2994
2992 2995 batchedcommands = None
2993 2996
2994 2997 # Now perform actions based on the parsed wire language instructions.
2995 2998 for action, lines in blocks:
2996 2999 if action in ('raw', 'raw+'):
2997 3000 if not stdin:
2998 3001 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2999 3002
3000 3003 # Concatenate the data together.
3001 3004 data = ''.join(l.lstrip() for l in lines)
3002 3005 data = stringutil.unescapestr(data)
3003 3006 stdin.write(data)
3004 3007
3005 3008 if action == 'raw+':
3006 3009 stdin.flush()
3007 3010 elif action == 'flush':
3008 3011 if not stdin:
3009 3012 raise error.Abort(_('cannot call flush on this peer'))
3010 3013 stdin.flush()
3011 3014 elif action.startswith('command'):
3012 3015 if not peer:
3013 3016 raise error.Abort(_('cannot send commands unless peer instance '
3014 3017 'is available'))
3015 3018
3016 3019 command = action.split(' ', 1)[1]
3017 3020
3018 3021 args = {}
3019 3022 for line in lines:
3020 3023 # We need to allow empty values.
3021 3024 fields = line.lstrip().split(' ', 1)
3022 3025 if len(fields) == 1:
3023 3026 key = fields[0]
3024 3027 value = ''
3025 3028 else:
3026 3029 key, value = fields
3027 3030
3028 3031 if value.startswith('eval:'):
3029 3032 value = stringutil.evalpythonliteral(value[5:])
3030 3033 else:
3031 3034 value = stringutil.unescapestr(value)
3032 3035
3033 3036 args[key] = value
3034 3037
3035 3038 if batchedcommands is not None:
3036 3039 batchedcommands.append((command, args))
3037 3040 continue
3038 3041
3039 3042 ui.status(_('sending %s command\n') % command)
3040 3043
3041 3044 if 'PUSHFILE' in args:
3042 3045 with open(args['PUSHFILE'], r'rb') as fh:
3043 3046 del args['PUSHFILE']
3044 3047 res, output = peer._callpush(command, fh,
3045 3048 **pycompat.strkwargs(args))
3046 3049 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3047 3050 ui.status(_('remote output: %s\n') %
3048 3051 stringutil.escapestr(output))
3049 3052 else:
3050 3053 with peer.commandexecutor() as e:
3051 3054 res = e.callcommand(command, args).result()
3052 3055
3053 3056 if isinstance(res, wireprotov2peer.commandresponse):
3054 3057 val = list(res.cborobjects())
3055 3058 ui.status(_('response: %s\n') %
3056 3059 stringutil.pprint(val, bprefix=True))
3057 3060
3058 3061 else:
3059 3062 ui.status(_('response: %s\n') %
3060 3063 stringutil.pprint(res, bprefix=True))
3061 3064
3062 3065 elif action == 'batchbegin':
3063 3066 if batchedcommands is not None:
3064 3067 raise error.Abort(_('nested batchbegin not allowed'))
3065 3068
3066 3069 batchedcommands = []
3067 3070 elif action == 'batchsubmit':
3068 3071 # There is a batching API we could go through. But it would be
3069 3072 # difficult to normalize requests into function calls. It is easier
3070 3073 # to bypass this layer and normalize to commands + args.
3071 3074 ui.status(_('sending batch with %d sub-commands\n') %
3072 3075 len(batchedcommands))
3073 3076 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3074 3077 ui.status(_('response #%d: %s\n') %
3075 3078 (i, stringutil.escapestr(chunk)))
3076 3079
3077 3080 batchedcommands = None
3078 3081
3079 3082 elif action.startswith('httprequest '):
3080 3083 if not opener:
3081 3084 raise error.Abort(_('cannot use httprequest without an HTTP '
3082 3085 'peer'))
3083 3086
3084 3087 request = action.split(' ', 2)
3085 3088 if len(request) != 3:
3086 3089 raise error.Abort(_('invalid httprequest: expected format is '
3087 3090 '"httprequest <method> <path>'))
3088 3091
3089 3092 method, httppath = request[1:]
3090 3093 headers = {}
3091 3094 body = None
3092 3095 frames = []
3093 3096 for line in lines:
3094 3097 line = line.lstrip()
3095 3098 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3096 3099 if m:
3097 3100 headers[m.group(1)] = m.group(2)
3098 3101 continue
3099 3102
3100 3103 if line.startswith(b'BODYFILE '):
3101 3104 with open(line.split(b' ', 1), 'rb') as fh:
3102 3105 body = fh.read()
3103 3106 elif line.startswith(b'frame '):
3104 3107 frame = wireprotoframing.makeframefromhumanstring(
3105 3108 line[len(b'frame '):])
3106 3109
3107 3110 frames.append(frame)
3108 3111 else:
3109 3112 raise error.Abort(_('unknown argument to httprequest: %s') %
3110 3113 line)
3111 3114
3112 3115 url = path + httppath
3113 3116
3114 3117 if frames:
3115 3118 body = b''.join(bytes(f) for f in frames)
3116 3119
3117 3120 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3118 3121
3119 3122 # urllib.Request insists on using has_data() as a proxy for
3120 3123 # determining the request method. Override that to use our
3121 3124 # explicitly requested method.
3122 3125 req.get_method = lambda: method
3123 3126
3124 3127 try:
3125 3128 res = opener.open(req)
3126 3129 body = res.read()
3127 3130 except util.urlerr.urlerror as e:
3128 3131 e.read()
3129 3132 continue
3130 3133
3131 3134 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3132 3135 ui.write(_('cbor> %s\n') %
3133 3136 stringutil.pprint(cbor.loads(body), bprefix=True))
3134 3137
3135 3138 elif action == 'close':
3136 3139 peer.close()
3137 3140 elif action == 'readavailable':
3138 3141 if not stdout or not stderr:
3139 3142 raise error.Abort(_('readavailable not available on this peer'))
3140 3143
3141 3144 stdin.close()
3142 3145 stdout.read()
3143 3146 stderr.read()
3144 3147
3145 3148 elif action == 'readline':
3146 3149 if not stdout:
3147 3150 raise error.Abort(_('readline not available on this peer'))
3148 3151 stdout.readline()
3149 3152 elif action == 'ereadline':
3150 3153 if not stderr:
3151 3154 raise error.Abort(_('ereadline not available on this peer'))
3152 3155 stderr.readline()
3153 3156 elif action.startswith('read '):
3154 3157 count = int(action.split(' ', 1)[1])
3155 3158 if not stdout:
3156 3159 raise error.Abort(_('read not available on this peer'))
3157 3160 stdout.read(count)
3158 3161 elif action.startswith('eread '):
3159 3162 count = int(action.split(' ', 1)[1])
3160 3163 if not stderr:
3161 3164 raise error.Abort(_('eread not available on this peer'))
3162 3165 stderr.read(count)
3163 3166 else:
3164 3167 raise error.Abort(_('unknown action: %s') % action)
3165 3168
3166 3169 if batchedcommands is not None:
3167 3170 raise error.Abort(_('unclosed "batchbegin" request'))
3168 3171
3169 3172 if peer:
3170 3173 peer.close()
3171 3174
3172 3175 if proc:
3173 3176 proc.kill()
@@ -1,439 +1,519
1 1 $ cat << EOF >> $HGRCPATH
2 2 > [ui]
3 3 > interactive=yes
4 4 > EOF
5 5
6 6 $ hg init debugrevlog
7 7 $ cd debugrevlog
8 8 $ echo a > a
9 9 $ hg ci -Am adda
10 10 adding a
11 $ hg rm .
12 removing a
13 $ hg ci -Am make-it-empty
14 $ hg revert --all -r 0
15 adding a
16 $ hg ci -Am make-it-full
11 17 #if reporevlogstore
12 18 $ hg debugrevlog -m
13 19 format : 1
14 20 flags : inline, generaldelta
15 21
16 revisions : 1
22 revisions : 3
17 23 merges : 0 ( 0.00%)
18 normal : 1 (100.00%)
19 revisions : 1
20 full : 1 (100.00%)
24 normal : 3 (100.00%)
25 revisions : 3
26 full : 3 (100.00%)
21 27 deltas : 0 ( 0.00%)
22 revision size : 44
23 full : 44 (100.00%)
28 revision size : 88
29 full : 88 (100.00%)
24 30 deltas : 0 ( 0.00%)
25 31
26 chunks : 1
27 0x75 (u) : 1 (100.00%)
28 chunks size : 44
29 0x75 (u) : 44 (100.00%)
32 chunks : 3
33 empty : 1 (33.33%)
34 0x75 (u) : 2 (66.67%)
35 chunks size : 88
36 empty : 0 ( 0.00%)
37 0x75 (u) : 88 (100.00%)
30 38
31 39 avg chain length : 0
32 40 max chain length : 0
33 41 max chain reach : 44
34 42 compression ratio : 0
35 43
36 uncompressed data size (min/max/avg) : 43 / 43 / 43
37 full revision size (min/max/avg) : 44 / 44 / 44
44 uncompressed data size (min/max/avg) : 0 / 43 / 28
45 full revision size (min/max/avg) : 0 / 44 / 29
38 46 delta size (min/max/avg) : 0 / 0 / 0
39 47 #endif
40 48
41 49 Test debugindex, with and without the --verbose/--debug flag
42 50 $ hg debugindex a
43 51 rev linkrev nodeid p1 p2
44 52 0 0 b789fdd96dc2 000000000000 000000000000
45 53
46 54 #if no-reposimplestore
47 55 $ hg --verbose debugindex a
48 56 rev offset length linkrev nodeid p1 p2
49 57 0 0 3 0 b789fdd96dc2 000000000000 000000000000
50 58
51 59 $ hg --debug debugindex a
52 60 rev offset length linkrev nodeid p1 p2
53 61 0 0 3 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
54 62 #endif
55 63
56 64 $ hg debugindex -f 1 a
57 65 rev flag size link p1 p2 nodeid
58 66 0 0000 2 0 -1 -1 b789fdd96dc2
59 67
60 68 #if no-reposimplestore
61 69 $ hg --verbose debugindex -f 1 a
62 70 rev flag offset length size link p1 p2 nodeid
63 71 0 0000 0 3 2 0 -1 -1 b789fdd96dc2
64 72
65 73 $ hg --debug debugindex -f 1 a
66 74 rev flag offset length size link p1 p2 nodeid
67 75 0 0000 0 3 2 0 -1 -1 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
68 76 #endif
69 77
70 78 debugdelta chain basic output
71 79
72 80 #if reporevlogstore
73 81 $ hg debugdeltachain -m
74 82 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
75 83 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000
84 1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000
85 2 3 1 -1 base 44 43 44 1.02326 44 0 0.00000
76 86
77 87 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen}\n'
78 88 0 1 1
89 1 2 1
90 2 3 1
79 91
80 92 $ hg debugdeltachain -m -Tjson
81 93 [
82 94 {
83 95 "chainid": 1,
84 96 "chainlen": 1,
85 97 "chainratio": 1.02325581395,
86 98 "chainsize": 44,
87 99 "compsize": 44,
88 100 "deltatype": "base",
89 101 "extradist": 0,
90 102 "extraratio": 0.0,
91 103 "lindist": 44,
92 104 "prevrev": -1,
93 105 "rev": 0,
94 106 "uncompsize": 43
107 },
108 {
109 "chainid": 2,
110 "chainlen": 1,
111 "chainratio": 0,
112 "chainsize": 0,
113 "compsize": 0,
114 "deltatype": "base",
115 "extradist": 0,
116 "extraratio": 0,
117 "lindist": 0,
118 "prevrev": -1,
119 "rev": 1,
120 "uncompsize": 0
121 },
122 {
123 "chainid": 3,
124 "chainlen": 1,
125 "chainratio": 1.02325581395,
126 "chainsize": 44,
127 "compsize": 44,
128 "deltatype": "base",
129 "extradist": 0,
130 "extraratio": 0.0,
131 "lindist": 44,
132 "prevrev": -1,
133 "rev": 2,
134 "uncompsize": 43
95 135 }
96 136 ]
97 137
98 138 debugdelta chain with sparse read enabled
99 139
100 140 $ cat >> $HGRCPATH <<EOF
101 141 > [experimental]
102 142 > sparse-read = True
103 143 > EOF
104 144 $ hg debugdeltachain -m
105 145 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
106 146 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
147 1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000 0 0 1.00000 1
148 2 3 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
107 149
108 150 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen} {readsize} {largestblock} {readdensity}\n'
109 151 0 1 1 44 44 1.0
152 1 2 1 0 0 1
153 2 3 1 44 44 1.0
110 154
111 155 $ hg debugdeltachain -m -Tjson
112 156 [
113 157 {
114 158 "chainid": 1,
115 159 "chainlen": 1,
116 160 "chainratio": 1.02325581395,
117 161 "chainsize": 44,
118 162 "compsize": 44,
119 163 "deltatype": "base",
120 164 "extradist": 0,
121 165 "extraratio": 0.0,
122 166 "largestblock": 44,
123 167 "lindist": 44,
124 168 "prevrev": -1,
125 169 "readdensity": 1.0,
126 170 "readsize": 44,
127 171 "rev": 0,
128 172 "srchunks": 1,
129 173 "uncompsize": 43
174 },
175 {
176 "chainid": 2,
177 "chainlen": 1,
178 "chainratio": 0,
179 "chainsize": 0,
180 "compsize": 0,
181 "deltatype": "base",
182 "extradist": 0,
183 "extraratio": 0,
184 "largestblock": 0,
185 "lindist": 0,
186 "prevrev": -1,
187 "readdensity": 1,
188 "readsize": 0,
189 "rev": 1,
190 "srchunks": 1,
191 "uncompsize": 0
192 },
193 {
194 "chainid": 3,
195 "chainlen": 1,
196 "chainratio": 1.02325581395,
197 "chainsize": 44,
198 "compsize": 44,
199 "deltatype": "base",
200 "extradist": 0,
201 "extraratio": 0.0,
202 "largestblock": 44,
203 "lindist": 44,
204 "prevrev": -1,
205 "readdensity": 1.0,
206 "readsize": 44,
207 "rev": 2,
208 "srchunks": 1,
209 "uncompsize": 43
130 210 }
131 211 ]
132 212
133 213 $ printf "This test checks things.\n" >> a
134 214 $ hg ci -m a
135 215 $ hg branch other
136 216 marked working directory as branch other
137 217 (branches are permanent and global, did you want a bookmark?)
138 218 $ for i in `$TESTDIR/seq.py 5`; do
139 219 > printf "shorter ${i}" >> a
140 220 > hg ci -m "a other:$i"
141 221 > hg up -q default
142 222 > printf "for the branch default we want longer chains: ${i}" >> a
143 223 > hg ci -m "a default:$i"
144 224 > hg up -q other
145 225 > done
146 226 $ hg debugdeltachain a -T '{rev} {srchunks}\n' \
147 227 > --config experimental.sparse-read.density-threshold=0.50 \
148 228 > --config experimental.sparse-read.min-gap-size=0
149 229 0 1
150 230 1 1
151 231 2 1
152 232 3 1
153 233 4 1
154 234 5 1
155 235 6 1
156 236 7 1
157 237 8 1
158 238 9 1
159 239 10 2
160 240 11 1
161 241 $ hg --config extensions.strip= strip --no-backup -r 1
162 242 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
163 243
164 244 Test max chain len
165 245 $ cat >> $HGRCPATH << EOF
166 246 > [format]
167 247 > maxchainlen=4
168 248 > EOF
169 249
170 250 $ printf "This test checks if maxchainlen config value is respected also it can serve as basic test for debugrevlog -d <file>.\n" >> a
171 251 $ hg ci -m a
172 252 $ printf "b\n" >> a
173 253 $ hg ci -m a
174 254 $ printf "c\n" >> a
175 255 $ hg ci -m a
176 256 $ printf "d\n" >> a
177 257 $ hg ci -m a
178 258 $ printf "e\n" >> a
179 259 $ hg ci -m a
180 260 $ printf "f\n" >> a
181 261 $ hg ci -m a
182 262 $ printf 'g\n' >> a
183 263 $ hg ci -m a
184 264 $ printf 'h\n' >> a
185 265 $ hg ci -m a
186 266
187 267 $ hg debugrevlog -d a
188 268 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
189 269 0 -1 -1 0 ??? 0 0 0 0 ??? ???? ? 1 0 (glob)
190 270 1 0 -1 ??? ??? 0 0 0 0 ??? ???? ? 1 1 (glob)
191 271 2 1 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
192 272 3 2 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
193 273 4 3 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 4 (glob)
194 274 5 4 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 0 (glob)
195 275 6 5 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 1 (glob)
196 276 7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
197 277 8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
198 278 #endif
199 279
200 280 Test debuglocks command:
201 281
202 282 $ hg debuglocks
203 283 lock: free
204 284 wlock: free
205 285
206 286 * Test setting the lock
207 287
208 288 waitlock <file> will wait for file to be created. If it isn't in a reasonable
209 289 amount of time, displays error message and returns 1
210 290 $ waitlock() {
211 291 > start=`date +%s`
212 292 > timeout=5
213 293 > while [ \( ! -f $1 \) -a \( ! -L $1 \) ]; do
214 294 > now=`date +%s`
215 295 > if [ "`expr $now - $start`" -gt $timeout ]; then
216 296 > echo "timeout: $1 was not created in $timeout seconds"
217 297 > return 1
218 298 > fi
219 299 > sleep 0.1
220 300 > done
221 301 > }
222 302 $ dolock() {
223 303 > {
224 304 > waitlock .hg/unlock
225 305 > rm -f .hg/unlock
226 306 > echo y
227 307 > } | hg debuglocks "$@" > /dev/null
228 308 > }
229 309 $ dolock -s &
230 310 $ waitlock .hg/store/lock
231 311
232 312 $ hg debuglocks
233 313 lock: user *, process * (*s) (glob)
234 314 wlock: free
235 315 [1]
236 316 $ touch .hg/unlock
237 317 $ wait
238 318 $ [ -f .hg/store/lock ] || echo "There is no lock"
239 319 There is no lock
240 320
241 321 * Test setting the wlock
242 322
243 323 $ dolock -S &
244 324 $ waitlock .hg/wlock
245 325
246 326 $ hg debuglocks
247 327 lock: free
248 328 wlock: user *, process * (*s) (glob)
249 329 [1]
250 330 $ touch .hg/unlock
251 331 $ wait
252 332 $ [ -f .hg/wlock ] || echo "There is no wlock"
253 333 There is no wlock
254 334
255 335 * Test setting both locks
256 336
257 337 $ dolock -Ss &
258 338 $ waitlock .hg/wlock && waitlock .hg/store/lock
259 339
260 340 $ hg debuglocks
261 341 lock: user *, process * (*s) (glob)
262 342 wlock: user *, process * (*s) (glob)
263 343 [2]
264 344
265 345 * Test failing to set a lock
266 346
267 347 $ hg debuglocks -s
268 348 abort: lock is already held
269 349 [255]
270 350
271 351 $ hg debuglocks -S
272 352 abort: wlock is already held
273 353 [255]
274 354
275 355 $ touch .hg/unlock
276 356 $ wait
277 357
278 358 $ hg debuglocks
279 359 lock: free
280 360 wlock: free
281 361
282 362 * Test forcing the lock
283 363
284 364 $ dolock -s &
285 365 $ waitlock .hg/store/lock
286 366
287 367 $ hg debuglocks
288 368 lock: user *, process * (*s) (glob)
289 369 wlock: free
290 370 [1]
291 371
292 372 $ hg debuglocks -L
293 373
294 374 $ hg debuglocks
295 375 lock: free
296 376 wlock: free
297 377
298 378 $ touch .hg/unlock
299 379 $ wait
300 380
301 381 * Test forcing the wlock
302 382
303 383 $ dolock -S &
304 384 $ waitlock .hg/wlock
305 385
306 386 $ hg debuglocks
307 387 lock: free
308 388 wlock: user *, process * (*s) (glob)
309 389 [1]
310 390
311 391 $ hg debuglocks -W
312 392
313 393 $ hg debuglocks
314 394 lock: free
315 395 wlock: free
316 396
317 397 $ touch .hg/unlock
318 398 $ wait
319 399
320 400 Test WdirUnsupported exception
321 401
322 402 $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff
323 403 abort: working directory revision cannot be specified
324 404 [255]
325 405
326 406 Test cache warming command
327 407
328 408 $ rm -rf .hg/cache/
329 409 $ hg debugupdatecaches --debug
330 410 updating the branch cache
331 411 $ ls -r .hg/cache/*
332 412 .hg/cache/rbc-revs-v1
333 413 .hg/cache/rbc-names-v1
334 414 .hg/cache/branch2-served
335 415
336 416 Test debugcolor
337 417
338 418 #if no-windows
339 419 $ hg debugcolor --style --color always | egrep 'mode|style|log\.'
340 420 color mode: 'ansi'
341 421 available style:
342 422 \x1b[0;33mlog.changeset\x1b[0m: \x1b[0;33myellow\x1b[0m (esc)
343 423 #endif
344 424
345 425 $ hg debugcolor --style --color never
346 426 color mode: None
347 427 available style:
348 428
349 429 $ cd ..
350 430
351 431 Test internal debugstacktrace command
352 432
353 433 $ cat > debugstacktrace.py << EOF
354 434 > from __future__ import absolute_import
355 435 > import sys
356 436 > from mercurial import util
357 437 > def f():
358 438 > util.debugstacktrace(f=sys.stdout)
359 439 > g()
360 440 > def g():
361 441 > util.dst('hello from g\\n', skip=1)
362 442 > h()
363 443 > def h():
364 444 > util.dst('hi ...\\nfrom h hidden in g', 1, depth=2)
365 445 > f()
366 446 > EOF
367 447 $ $PYTHON debugstacktrace.py
368 448 stacktrace at:
369 449 debugstacktrace.py:12 in * (glob)
370 450 debugstacktrace.py:5 in f
371 451 hello from g at:
372 452 debugstacktrace.py:12 in * (glob)
373 453 debugstacktrace.py:6 in f
374 454 hi ...
375 455 from h hidden in g at:
376 456 debugstacktrace.py:6 in f
377 457 debugstacktrace.py:9 in g
378 458
379 459 Test debugcapabilities command:
380 460
381 461 $ hg debugcapabilities ./debugrevlog/
382 462 Main capabilities:
383 463 branchmap
384 464 $USUAL_BUNDLE2_CAPS$
385 465 getbundle
386 466 known
387 467 lookup
388 468 pushkey
389 469 unbundle
390 470 Bundle2 capabilities:
391 471 HG20
392 472 bookmarks
393 473 changegroup
394 474 01
395 475 02
396 476 digests
397 477 md5
398 478 sha1
399 479 sha512
400 480 error
401 481 abort
402 482 unsupportedcontent
403 483 pushraced
404 484 pushkey
405 485 hgtagsfnodes
406 486 listkeys
407 487 phases
408 488 heads
409 489 pushkey
410 490 remote-changegroup
411 491 http
412 492 https
413 493 rev-branch-cache
414 494 stream
415 495 v2
416 496
417 497 Test debugpeer
418 498
419 499 $ hg --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" debugpeer ssh://user@dummy/debugrevlog
420 500 url: ssh://user@dummy/debugrevlog
421 501 local: no
422 502 pushable: yes
423 503
424 504 $ hg --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" --debug debugpeer ssh://user@dummy/debugrevlog
425 505 running "*" "*/tests/dummyssh" 'user@dummy' 'hg -R debugrevlog serve --stdio' (glob) (no-windows !)
426 506 running "*" "*\tests/dummyssh" "user@dummy" "hg -R debugrevlog serve --stdio" (glob) (windows !)
427 507 devel-peer-request: hello+between
428 508 devel-peer-request: pairs: 81 bytes
429 509 sending hello command
430 510 sending between command
431 511 remote: 413
432 512 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS_SERVER$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
433 513 remote: 1
434 514 devel-peer-request: protocaps
435 515 devel-peer-request: caps: * bytes (glob)
436 516 sending protocaps command
437 517 url: ssh://user@dummy/debugrevlog
438 518 local: no
439 519 pushable: yes
General Comments 0
You need to be logged in to leave comments. Login now