##// END OF EJS Templates
fileset: add stub for weight-based optimization...
Yuya Nishihara -
r38865:7e7e2b2f default
parent child Browse files
Show More
@@ -1,3252 +1,3253
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .thirdparty import (
36 36 cbor,
37 37 )
38 38 from . import (
39 39 bundle2,
40 40 changegroup,
41 41 cmdutil,
42 42 color,
43 43 context,
44 44 dagparser,
45 45 dagutil,
46 46 encoding,
47 47 error,
48 48 exchange,
49 49 extensions,
50 50 filemerge,
51 51 filesetlang,
52 52 formatter,
53 53 hg,
54 54 httppeer,
55 55 localrepo,
56 56 lock as lockmod,
57 57 logcmdutil,
58 58 merge as mergemod,
59 59 obsolete,
60 60 obsutil,
61 61 phases,
62 62 policy,
63 63 pvec,
64 64 pycompat,
65 65 registrar,
66 66 repair,
67 67 revlog,
68 68 revset,
69 69 revsetlang,
70 70 scmutil,
71 71 setdiscovery,
72 72 simplemerge,
73 73 sshpeer,
74 74 sslutil,
75 75 streamclone,
76 76 templater,
77 77 treediscovery,
78 78 upgrade,
79 79 url as urlmod,
80 80 util,
81 81 vfs as vfsmod,
82 82 wireprotoframing,
83 83 wireprotoserver,
84 84 wireprotov2peer,
85 85 )
86 86 from .utils import (
87 87 dateutil,
88 88 procutil,
89 89 stringutil,
90 90 )
91 91
92 92 release = lockmod.release
93 93
94 94 command = registrar.command()
95 95
96 96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 97 def debugancestor(ui, repo, *args):
98 98 """find the ancestor revision of two revisions in a given index"""
99 99 if len(args) == 3:
100 100 index, rev1, rev2 = args
101 101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 102 lookup = r.lookup
103 103 elif len(args) == 2:
104 104 if not repo:
105 105 raise error.Abort(_('there is no Mercurial repository here '
106 106 '(.hg not found)'))
107 107 rev1, rev2 = args
108 108 r = repo.changelog
109 109 lookup = repo.lookup
110 110 else:
111 111 raise error.Abort(_('either two or three arguments required'))
112 112 a = r.ancestor(lookup(rev1), lookup(rev2))
113 113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114 114
115 115 @command('debugapplystreamclonebundle', [], 'FILE')
116 116 def debugapplystreamclonebundle(ui, repo, fname):
117 117 """apply a stream clone bundle file"""
118 118 f = hg.openpath(ui, fname)
119 119 gen = exchange.readbundle(ui, f, fname)
120 120 gen.apply(repo)
121 121
122 122 @command('debugbuilddag',
123 123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 125 ('n', 'new-file', None, _('add new file at each rev'))],
126 126 _('[OPTION]... [TEXT]'))
127 127 def debugbuilddag(ui, repo, text=None,
128 128 mergeable_file=False,
129 129 overwritten_file=False,
130 130 new_file=False):
131 131 """builds a repo with a given DAG from scratch in the current empty repo
132 132
133 133 The description of the DAG is read from stdin if not given on the
134 134 command line.
135 135
136 136 Elements:
137 137
138 138 - "+n" is a linear run of n nodes based on the current default parent
139 139 - "." is a single node based on the current default parent
140 140 - "$" resets the default parent to null (implied at the start);
141 141 otherwise the default parent is always the last node created
142 142 - "<p" sets the default parent to the backref p
143 143 - "*p" is a fork at parent p, which is a backref
144 144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 145 - "/p2" is a merge of the preceding node and p2
146 146 - ":tag" defines a local tag for the preceding node
147 147 - "@branch" sets the named branch for subsequent nodes
148 148 - "#...\\n" is a comment up to the end of the line
149 149
150 150 Whitespace between the above elements is ignored.
151 151
152 152 A backref is either
153 153
154 154 - a number n, which references the node curr-n, where curr is the current
155 155 node, or
156 156 - the name of a local tag you placed earlier using ":tag", or
157 157 - empty to denote the default parent.
158 158
159 159 All string valued-elements are either strictly alphanumeric, or must
160 160 be enclosed in double quotes ("..."), with "\\" as escape character.
161 161 """
162 162
163 163 if text is None:
164 164 ui.status(_("reading DAG from stdin\n"))
165 165 text = ui.fin.read()
166 166
167 167 cl = repo.changelog
168 168 if len(cl) > 0:
169 169 raise error.Abort(_('repository is not empty'))
170 170
171 171 # determine number of revs in DAG
172 172 total = 0
173 173 for type, data in dagparser.parsedag(text):
174 174 if type == 'n':
175 175 total += 1
176 176
177 177 if mergeable_file:
178 178 linesperrev = 2
179 179 # make a file with k lines per rev
180 180 initialmergedlines = ['%d' % i
181 181 for i in pycompat.xrange(0, total * linesperrev)]
182 182 initialmergedlines.append("")
183 183
184 184 tags = []
185 185 progress = ui.makeprogress(_('building'), unit=_('revisions'),
186 186 total=total)
187 187 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
188 188 at = -1
189 189 atbranch = 'default'
190 190 nodeids = []
191 191 id = 0
192 192 progress.update(id)
193 193 for type, data in dagparser.parsedag(text):
194 194 if type == 'n':
195 195 ui.note(('node %s\n' % pycompat.bytestr(data)))
196 196 id, ps = data
197 197
198 198 files = []
199 199 filecontent = {}
200 200
201 201 p2 = None
202 202 if mergeable_file:
203 203 fn = "mf"
204 204 p1 = repo[ps[0]]
205 205 if len(ps) > 1:
206 206 p2 = repo[ps[1]]
207 207 pa = p1.ancestor(p2)
208 208 base, local, other = [x[fn].data() for x in (pa, p1,
209 209 p2)]
210 210 m3 = simplemerge.Merge3Text(base, local, other)
211 211 ml = [l.strip() for l in m3.merge_lines()]
212 212 ml.append("")
213 213 elif at > 0:
214 214 ml = p1[fn].data().split("\n")
215 215 else:
216 216 ml = initialmergedlines
217 217 ml[id * linesperrev] += " r%i" % id
218 218 mergedtext = "\n".join(ml)
219 219 files.append(fn)
220 220 filecontent[fn] = mergedtext
221 221
222 222 if overwritten_file:
223 223 fn = "of"
224 224 files.append(fn)
225 225 filecontent[fn] = "r%i\n" % id
226 226
227 227 if new_file:
228 228 fn = "nf%i" % id
229 229 files.append(fn)
230 230 filecontent[fn] = "r%i\n" % id
231 231 if len(ps) > 1:
232 232 if not p2:
233 233 p2 = repo[ps[1]]
234 234 for fn in p2:
235 235 if fn.startswith("nf"):
236 236 files.append(fn)
237 237 filecontent[fn] = p2[fn].data()
238 238
239 239 def fctxfn(repo, cx, path):
240 240 if path in filecontent:
241 241 return context.memfilectx(repo, cx, path,
242 242 filecontent[path])
243 243 return None
244 244
245 245 if len(ps) == 0 or ps[0] < 0:
246 246 pars = [None, None]
247 247 elif len(ps) == 1:
248 248 pars = [nodeids[ps[0]], None]
249 249 else:
250 250 pars = [nodeids[p] for p in ps]
251 251 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
252 252 date=(id, 0),
253 253 user="debugbuilddag",
254 254 extra={'branch': atbranch})
255 255 nodeid = repo.commitctx(cx)
256 256 nodeids.append(nodeid)
257 257 at = id
258 258 elif type == 'l':
259 259 id, name = data
260 260 ui.note(('tag %s\n' % name))
261 261 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
262 262 elif type == 'a':
263 263 ui.note(('branch %s\n' % data))
264 264 atbranch = data
265 265 progress.update(id)
266 266
267 267 if tags:
268 268 repo.vfs.write("localtags", "".join(tags))
269 269
270 270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
271 271 indent_string = ' ' * indent
272 272 if all:
273 273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
274 274 % indent_string)
275 275
276 276 def showchunks(named):
277 277 ui.write("\n%s%s\n" % (indent_string, named))
278 278 for deltadata in gen.deltaiter():
279 279 node, p1, p2, cs, deltabase, delta, flags = deltadata
280 280 ui.write("%s%s %s %s %s %s %d\n" %
281 281 (indent_string, hex(node), hex(p1), hex(p2),
282 282 hex(cs), hex(deltabase), len(delta)))
283 283
284 284 chunkdata = gen.changelogheader()
285 285 showchunks("changelog")
286 286 chunkdata = gen.manifestheader()
287 287 showchunks("manifest")
288 288 for chunkdata in iter(gen.filelogheader, {}):
289 289 fname = chunkdata['filename']
290 290 showchunks(fname)
291 291 else:
292 292 if isinstance(gen, bundle2.unbundle20):
293 293 raise error.Abort(_('use debugbundle2 for this file'))
294 294 chunkdata = gen.changelogheader()
295 295 for deltadata in gen.deltaiter():
296 296 node, p1, p2, cs, deltabase, delta, flags = deltadata
297 297 ui.write("%s%s\n" % (indent_string, hex(node)))
298 298
299 299 def _debugobsmarkers(ui, part, indent=0, **opts):
300 300 """display version and markers contained in 'data'"""
301 301 opts = pycompat.byteskwargs(opts)
302 302 data = part.read()
303 303 indent_string = ' ' * indent
304 304 try:
305 305 version, markers = obsolete._readmarkers(data)
306 306 except error.UnknownVersion as exc:
307 307 msg = "%sunsupported version: %s (%d bytes)\n"
308 308 msg %= indent_string, exc.version, len(data)
309 309 ui.write(msg)
310 310 else:
311 311 msg = "%sversion: %d (%d bytes)\n"
312 312 msg %= indent_string, version, len(data)
313 313 ui.write(msg)
314 314 fm = ui.formatter('debugobsolete', opts)
315 315 for rawmarker in sorted(markers):
316 316 m = obsutil.marker(None, rawmarker)
317 317 fm.startitem()
318 318 fm.plain(indent_string)
319 319 cmdutil.showmarker(fm, m)
320 320 fm.end()
321 321
322 322 def _debugphaseheads(ui, data, indent=0):
323 323 """display version and markers contained in 'data'"""
324 324 indent_string = ' ' * indent
325 325 headsbyphase = phases.binarydecode(data)
326 326 for phase in phases.allphases:
327 327 for head in headsbyphase[phase]:
328 328 ui.write(indent_string)
329 329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
330 330
331 331 def _quasirepr(thing):
332 332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
333 333 return '{%s}' % (
334 334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
335 335 return pycompat.bytestr(repr(thing))
336 336
337 337 def _debugbundle2(ui, gen, all=None, **opts):
338 338 """lists the contents of a bundle2"""
339 339 if not isinstance(gen, bundle2.unbundle20):
340 340 raise error.Abort(_('not a bundle2 file'))
341 341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
342 342 parttypes = opts.get(r'part_type', [])
343 343 for part in gen.iterparts():
344 344 if parttypes and part.type not in parttypes:
345 345 continue
346 346 msg = '%s -- %s (mandatory: %r)\n'
347 347 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
348 348 if part.type == 'changegroup':
349 349 version = part.params.get('version', '01')
350 350 cg = changegroup.getunbundler(version, part, 'UN')
351 351 if not ui.quiet:
352 352 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
353 353 if part.type == 'obsmarkers':
354 354 if not ui.quiet:
355 355 _debugobsmarkers(ui, part, indent=4, **opts)
356 356 if part.type == 'phase-heads':
357 357 if not ui.quiet:
358 358 _debugphaseheads(ui, part, indent=4)
359 359
360 360 @command('debugbundle',
361 361 [('a', 'all', None, _('show all details')),
362 362 ('', 'part-type', [], _('show only the named part type')),
363 363 ('', 'spec', None, _('print the bundlespec of the bundle'))],
364 364 _('FILE'),
365 365 norepo=True)
366 366 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
367 367 """lists the contents of a bundle"""
368 368 with hg.openpath(ui, bundlepath) as f:
369 369 if spec:
370 370 spec = exchange.getbundlespec(ui, f)
371 371 ui.write('%s\n' % spec)
372 372 return
373 373
374 374 gen = exchange.readbundle(ui, f, bundlepath)
375 375 if isinstance(gen, bundle2.unbundle20):
376 376 return _debugbundle2(ui, gen, all=all, **opts)
377 377 _debugchangegroup(ui, gen, all=all, **opts)
378 378
379 379 @command('debugcapabilities',
380 380 [], _('PATH'),
381 381 norepo=True)
382 382 def debugcapabilities(ui, path, **opts):
383 383 """lists the capabilities of a remote peer"""
384 384 opts = pycompat.byteskwargs(opts)
385 385 peer = hg.peer(ui, opts, path)
386 386 caps = peer.capabilities()
387 387 ui.write(('Main capabilities:\n'))
388 388 for c in sorted(caps):
389 389 ui.write((' %s\n') % c)
390 390 b2caps = bundle2.bundle2caps(peer)
391 391 if b2caps:
392 392 ui.write(('Bundle2 capabilities:\n'))
393 393 for key, values in sorted(b2caps.iteritems()):
394 394 ui.write((' %s\n') % key)
395 395 for v in values:
396 396 ui.write((' %s\n') % v)
397 397
398 398 @command('debugcheckstate', [], '')
399 399 def debugcheckstate(ui, repo):
400 400 """validate the correctness of the current dirstate"""
401 401 parent1, parent2 = repo.dirstate.parents()
402 402 m1 = repo[parent1].manifest()
403 403 m2 = repo[parent2].manifest()
404 404 errors = 0
405 405 for f in repo.dirstate:
406 406 state = repo.dirstate[f]
407 407 if state in "nr" and f not in m1:
408 408 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
409 409 errors += 1
410 410 if state in "a" and f in m1:
411 411 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
412 412 errors += 1
413 413 if state in "m" and f not in m1 and f not in m2:
414 414 ui.warn(_("%s in state %s, but not in either manifest\n") %
415 415 (f, state))
416 416 errors += 1
417 417 for f in m1:
418 418 state = repo.dirstate[f]
419 419 if state not in "nrm":
420 420 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
421 421 errors += 1
422 422 if errors:
423 423 error = _(".hg/dirstate inconsistent with current parent's manifest")
424 424 raise error.Abort(error)
425 425
426 426 @command('debugcolor',
427 427 [('', 'style', None, _('show all configured styles'))],
428 428 'hg debugcolor')
429 429 def debugcolor(ui, repo, **opts):
430 430 """show available color, effects or style"""
431 431 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
432 432 if opts.get(r'style'):
433 433 return _debugdisplaystyle(ui)
434 434 else:
435 435 return _debugdisplaycolor(ui)
436 436
437 437 def _debugdisplaycolor(ui):
438 438 ui = ui.copy()
439 439 ui._styles.clear()
440 440 for effect in color._activeeffects(ui).keys():
441 441 ui._styles[effect] = effect
442 442 if ui._terminfoparams:
443 443 for k, v in ui.configitems('color'):
444 444 if k.startswith('color.'):
445 445 ui._styles[k] = k[6:]
446 446 elif k.startswith('terminfo.'):
447 447 ui._styles[k] = k[9:]
448 448 ui.write(_('available colors:\n'))
449 449 # sort label with a '_' after the other to group '_background' entry.
450 450 items = sorted(ui._styles.items(),
451 451 key=lambda i: ('_' in i[0], i[0], i[1]))
452 452 for colorname, label in items:
453 453 ui.write(('%s\n') % colorname, label=label)
454 454
455 455 def _debugdisplaystyle(ui):
456 456 ui.write(_('available style:\n'))
457 457 if not ui._styles:
458 458 return
459 459 width = max(len(s) for s in ui._styles)
460 460 for label, effects in sorted(ui._styles.items()):
461 461 ui.write('%s' % label, label=label)
462 462 if effects:
463 463 # 50
464 464 ui.write(': ')
465 465 ui.write(' ' * (max(0, width - len(label))))
466 466 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
467 467 ui.write('\n')
468 468
469 469 @command('debugcreatestreamclonebundle', [], 'FILE')
470 470 def debugcreatestreamclonebundle(ui, repo, fname):
471 471 """create a stream clone bundle file
472 472
473 473 Stream bundles are special bundles that are essentially archives of
474 474 revlog files. They are commonly used for cloning very quickly.
475 475 """
476 476 # TODO we may want to turn this into an abort when this functionality
477 477 # is moved into `hg bundle`.
478 478 if phases.hassecret(repo):
479 479 ui.warn(_('(warning: stream clone bundle will contain secret '
480 480 'revisions)\n'))
481 481
482 482 requirements, gen = streamclone.generatebundlev1(repo)
483 483 changegroup.writechunks(ui, gen, fname)
484 484
485 485 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
486 486
487 487 @command('debugdag',
488 488 [('t', 'tags', None, _('use tags as labels')),
489 489 ('b', 'branches', None, _('annotate with branch names')),
490 490 ('', 'dots', None, _('use dots for runs')),
491 491 ('s', 'spaces', None, _('separate elements by spaces'))],
492 492 _('[OPTION]... [FILE [REV]...]'),
493 493 optionalrepo=True)
494 494 def debugdag(ui, repo, file_=None, *revs, **opts):
495 495 """format the changelog or an index DAG as a concise textual description
496 496
497 497 If you pass a revlog index, the revlog's DAG is emitted. If you list
498 498 revision numbers, they get labeled in the output as rN.
499 499
500 500 Otherwise, the changelog DAG of the current repo is emitted.
501 501 """
502 502 spaces = opts.get(r'spaces')
503 503 dots = opts.get(r'dots')
504 504 if file_:
505 505 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
506 506 file_)
507 507 revs = set((int(r) for r in revs))
508 508 def events():
509 509 for r in rlog:
510 510 yield 'n', (r, list(p for p in rlog.parentrevs(r)
511 511 if p != -1))
512 512 if r in revs:
513 513 yield 'l', (r, "r%i" % r)
514 514 elif repo:
515 515 cl = repo.changelog
516 516 tags = opts.get(r'tags')
517 517 branches = opts.get(r'branches')
518 518 if tags:
519 519 labels = {}
520 520 for l, n in repo.tags().items():
521 521 labels.setdefault(cl.rev(n), []).append(l)
522 522 def events():
523 523 b = "default"
524 524 for r in cl:
525 525 if branches:
526 526 newb = cl.read(cl.node(r))[5]['branch']
527 527 if newb != b:
528 528 yield 'a', newb
529 529 b = newb
530 530 yield 'n', (r, list(p for p in cl.parentrevs(r)
531 531 if p != -1))
532 532 if tags:
533 533 ls = labels.get(r)
534 534 if ls:
535 535 for l in ls:
536 536 yield 'l', (r, l)
537 537 else:
538 538 raise error.Abort(_('need repo for changelog dag'))
539 539
540 540 for line in dagparser.dagtextlines(events(),
541 541 addspaces=spaces,
542 542 wraplabels=True,
543 543 wrapannotations=True,
544 544 wrapnonlinear=dots,
545 545 usedots=dots,
546 546 maxlinewidth=70):
547 547 ui.write(line)
548 548 ui.write("\n")
549 549
550 550 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
551 551 def debugdata(ui, repo, file_, rev=None, **opts):
552 552 """dump the contents of a data file revision"""
553 553 opts = pycompat.byteskwargs(opts)
554 554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
555 555 if rev is not None:
556 556 raise error.CommandError('debugdata', _('invalid arguments'))
557 557 file_, rev = None, file_
558 558 elif rev is None:
559 559 raise error.CommandError('debugdata', _('invalid arguments'))
560 560 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
561 561 try:
562 562 ui.write(r.revision(r.lookup(rev), raw=True))
563 563 except KeyError:
564 564 raise error.Abort(_('invalid revision identifier %s') % rev)
565 565
566 566 @command('debugdate',
567 567 [('e', 'extended', None, _('try extended date formats'))],
568 568 _('[-e] DATE [RANGE]'),
569 569 norepo=True, optionalrepo=True)
570 570 def debugdate(ui, date, range=None, **opts):
571 571 """parse and display a date"""
572 572 if opts[r"extended"]:
573 573 d = dateutil.parsedate(date, util.extendeddateformats)
574 574 else:
575 575 d = dateutil.parsedate(date)
576 576 ui.write(("internal: %d %d\n") % d)
577 577 ui.write(("standard: %s\n") % dateutil.datestr(d))
578 578 if range:
579 579 m = dateutil.matchdate(range)
580 580 ui.write(("match: %s\n") % m(d[0]))
581 581
582 582 @command('debugdeltachain',
583 583 cmdutil.debugrevlogopts + cmdutil.formatteropts,
584 584 _('-c|-m|FILE'),
585 585 optionalrepo=True)
586 586 def debugdeltachain(ui, repo, file_=None, **opts):
587 587 """dump information about delta chains in a revlog
588 588
589 589 Output can be templatized. Available template keywords are:
590 590
591 591 :``rev``: revision number
592 592 :``chainid``: delta chain identifier (numbered by unique base)
593 593 :``chainlen``: delta chain length to this revision
594 594 :``prevrev``: previous revision in delta chain
595 595 :``deltatype``: role of delta / how it was computed
596 596 :``compsize``: compressed size of revision
597 597 :``uncompsize``: uncompressed size of revision
598 598 :``chainsize``: total size of compressed revisions in chain
599 599 :``chainratio``: total chain size divided by uncompressed revision size
600 600 (new delta chains typically start at ratio 2.00)
601 601 :``lindist``: linear distance from base revision in delta chain to end
602 602 of this revision
603 603 :``extradist``: total size of revisions not part of this delta chain from
604 604 base of delta chain to end of this revision; a measurement
605 605 of how much extra data we need to read/seek across to read
606 606 the delta chain for this revision
607 607 :``extraratio``: extradist divided by chainsize; another representation of
608 608 how much unrelated data is needed to load this delta chain
609 609
610 610 If the repository is configured to use the sparse read, additional keywords
611 611 are available:
612 612
613 613 :``readsize``: total size of data read from the disk for a revision
614 614 (sum of the sizes of all the blocks)
615 615 :``largestblock``: size of the largest block of data read from the disk
616 616 :``readdensity``: density of useful bytes in the data read from the disk
617 617 :``srchunks``: in how many data hunks the whole revision would be read
618 618
619 619 The sparse read can be enabled with experimental.sparse-read = True
620 620 """
621 621 opts = pycompat.byteskwargs(opts)
622 622 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
623 623 index = r.index
624 624 start = r.start
625 625 length = r.length
626 626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
627 627 withsparseread = getattr(r, '_withsparseread', False)
628 628
629 629 def revinfo(rev):
630 630 e = index[rev]
631 631 compsize = e[1]
632 632 uncompsize = e[2]
633 633 chainsize = 0
634 634
635 635 if generaldelta:
636 636 if e[3] == e[5]:
637 637 deltatype = 'p1'
638 638 elif e[3] == e[6]:
639 639 deltatype = 'p2'
640 640 elif e[3] == rev - 1:
641 641 deltatype = 'prev'
642 642 elif e[3] == rev:
643 643 deltatype = 'base'
644 644 else:
645 645 deltatype = 'other'
646 646 else:
647 647 if e[3] == rev:
648 648 deltatype = 'base'
649 649 else:
650 650 deltatype = 'prev'
651 651
652 652 chain = r._deltachain(rev)[0]
653 653 for iterrev in chain:
654 654 e = index[iterrev]
655 655 chainsize += e[1]
656 656
657 657 return compsize, uncompsize, deltatype, chain, chainsize
658 658
659 659 fm = ui.formatter('debugdeltachain', opts)
660 660
661 661 fm.plain(' rev chain# chainlen prev delta '
662 662 'size rawsize chainsize ratio lindist extradist '
663 663 'extraratio')
664 664 if withsparseread:
665 665 fm.plain(' readsize largestblk rddensity srchunks')
666 666 fm.plain('\n')
667 667
668 668 chainbases = {}
669 669 for rev in r:
670 670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
671 671 chainbase = chain[0]
672 672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
673 673 basestart = start(chainbase)
674 674 revstart = start(rev)
675 675 lineardist = revstart + comp - basestart
676 676 extradist = lineardist - chainsize
677 677 try:
678 678 prevrev = chain[-2]
679 679 except IndexError:
680 680 prevrev = -1
681 681
682 682 if uncomp != 0:
683 683 chainratio = float(chainsize) / float(uncomp)
684 684 else:
685 685 chainratio = chainsize
686 686
687 687 if chainsize != 0:
688 688 extraratio = float(extradist) / float(chainsize)
689 689 else:
690 690 extraratio = extradist
691 691
692 692 fm.startitem()
693 693 fm.write('rev chainid chainlen prevrev deltatype compsize '
694 694 'uncompsize chainsize chainratio lindist extradist '
695 695 'extraratio',
696 696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
697 697 rev, chainid, len(chain), prevrev, deltatype, comp,
698 698 uncomp, chainsize, chainratio, lineardist, extradist,
699 699 extraratio,
700 700 rev=rev, chainid=chainid, chainlen=len(chain),
701 701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
702 702 uncompsize=uncomp, chainsize=chainsize,
703 703 chainratio=chainratio, lindist=lineardist,
704 704 extradist=extradist, extraratio=extraratio)
705 705 if withsparseread:
706 706 readsize = 0
707 707 largestblock = 0
708 708 srchunks = 0
709 709
710 710 for revschunk in revlog._slicechunk(r, chain):
711 711 srchunks += 1
712 712 blkend = start(revschunk[-1]) + length(revschunk[-1])
713 713 blksize = blkend - start(revschunk[0])
714 714
715 715 readsize += blksize
716 716 if largestblock < blksize:
717 717 largestblock = blksize
718 718
719 719 if readsize:
720 720 readdensity = float(chainsize) / float(readsize)
721 721 else:
722 722 readdensity = 1
723 723
724 724 fm.write('readsize largestblock readdensity srchunks',
725 725 ' %10d %10d %9.5f %8d',
726 726 readsize, largestblock, readdensity, srchunks,
727 727 readsize=readsize, largestblock=largestblock,
728 728 readdensity=readdensity, srchunks=srchunks)
729 729
730 730 fm.plain('\n')
731 731
732 732 fm.end()
733 733
734 734 @command('debugdirstate|debugstate',
735 735 [('', 'nodates', None, _('do not display the saved mtime')),
736 736 ('', 'datesort', None, _('sort by saved mtime'))],
737 737 _('[OPTION]...'))
738 738 def debugstate(ui, repo, **opts):
739 739 """show the contents of the current dirstate"""
740 740
741 741 nodates = opts.get(r'nodates')
742 742 datesort = opts.get(r'datesort')
743 743
744 744 timestr = ""
745 745 if datesort:
746 746 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
747 747 else:
748 748 keyfunc = None # sort by filename
749 749 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
750 750 if ent[3] == -1:
751 751 timestr = 'unset '
752 752 elif nodates:
753 753 timestr = 'set '
754 754 else:
755 755 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
756 756 time.localtime(ent[3]))
757 757 timestr = encoding.strtolocal(timestr)
758 758 if ent[1] & 0o20000:
759 759 mode = 'lnk'
760 760 else:
761 761 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
762 762 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
763 763 for f in repo.dirstate.copies():
764 764 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
765 765
766 766 @command('debugdiscovery',
767 767 [('', 'old', None, _('use old-style discovery')),
768 768 ('', 'nonheads', None,
769 769 _('use old-style discovery with non-heads included')),
770 770 ('', 'rev', [], 'restrict discovery to this set of revs'),
771 771 ] + cmdutil.remoteopts,
772 772 _('[--rev REV] [OTHER]'))
773 773 def debugdiscovery(ui, repo, remoteurl="default", **opts):
774 774 """runs the changeset discovery protocol in isolation"""
775 775 opts = pycompat.byteskwargs(opts)
776 776 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
777 777 remote = hg.peer(repo, opts, remoteurl)
778 778 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
779 779
780 780 # make sure tests are repeatable
781 781 random.seed(12323)
782 782
783 783 def doit(pushedrevs, remoteheads, remote=remote):
784 784 if opts.get('old'):
785 785 if not util.safehasattr(remote, 'branches'):
786 786 # enable in-client legacy support
787 787 remote = localrepo.locallegacypeer(remote.local())
788 788 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
789 789 force=True)
790 790 common = set(common)
791 791 if not opts.get('nonheads'):
792 792 ui.write(("unpruned common: %s\n") %
793 793 " ".join(sorted(short(n) for n in common)))
794 794 dag = dagutil.revlogdag(repo.changelog)
795 795 all = dag.ancestorset(dag.internalizeall(common))
796 796 common = dag.externalizeall(dag.headsetofconnecteds(all))
797 797 else:
798 798 nodes = None
799 799 if pushedrevs:
800 800 revs = scmutil.revrange(repo, pushedrevs)
801 801 nodes = [repo[r].node() for r in revs]
802 802 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
803 803 ancestorsof=nodes)
804 804 common = set(common)
805 805 rheads = set(hds)
806 806 lheads = set(repo.heads())
807 807 ui.write(("common heads: %s\n") %
808 808 " ".join(sorted(short(n) for n in common)))
809 809 if lheads <= common:
810 810 ui.write(("local is subset\n"))
811 811 elif rheads <= common:
812 812 ui.write(("remote is subset\n"))
813 813
814 814 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
815 815 localrevs = opts['rev']
816 816 doit(localrevs, remoterevs)
817 817
818 818 _chunksize = 4 << 10
819 819
820 820 @command('debugdownload',
821 821 [
822 822 ('o', 'output', '', _('path')),
823 823 ],
824 824 optionalrepo=True)
825 825 def debugdownload(ui, repo, url, output=None, **opts):
826 826 """download a resource using Mercurial logic and config
827 827 """
828 828 fh = urlmod.open(ui, url, output)
829 829
830 830 dest = ui
831 831 if output:
832 832 dest = open(output, "wb", _chunksize)
833 833 try:
834 834 data = fh.read(_chunksize)
835 835 while data:
836 836 dest.write(data)
837 837 data = fh.read(_chunksize)
838 838 finally:
839 839 if output:
840 840 dest.close()
841 841
842 842 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
843 843 def debugextensions(ui, repo, **opts):
844 844 '''show information about active extensions'''
845 845 opts = pycompat.byteskwargs(opts)
846 846 exts = extensions.extensions(ui)
847 847 hgver = util.version()
848 848 fm = ui.formatter('debugextensions', opts)
849 849 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
850 850 isinternal = extensions.ismoduleinternal(extmod)
851 851 extsource = pycompat.fsencode(extmod.__file__)
852 852 if isinternal:
853 853 exttestedwith = [] # never expose magic string to users
854 854 else:
855 855 exttestedwith = getattr(extmod, 'testedwith', '').split()
856 856 extbuglink = getattr(extmod, 'buglink', None)
857 857
858 858 fm.startitem()
859 859
860 860 if ui.quiet or ui.verbose:
861 861 fm.write('name', '%s\n', extname)
862 862 else:
863 863 fm.write('name', '%s', extname)
864 864 if isinternal or hgver in exttestedwith:
865 865 fm.plain('\n')
866 866 elif not exttestedwith:
867 867 fm.plain(_(' (untested!)\n'))
868 868 else:
869 869 lasttestedversion = exttestedwith[-1]
870 870 fm.plain(' (%s!)\n' % lasttestedversion)
871 871
872 872 fm.condwrite(ui.verbose and extsource, 'source',
873 873 _(' location: %s\n'), extsource or "")
874 874
875 875 if ui.verbose:
876 876 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
877 877 fm.data(bundled=isinternal)
878 878
879 879 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
880 880 _(' tested with: %s\n'),
881 881 fm.formatlist(exttestedwith, name='ver'))
882 882
883 883 fm.condwrite(ui.verbose and extbuglink, 'buglink',
884 884 _(' bug reporting: %s\n'), extbuglink or "")
885 885
886 886 fm.end()
887 887
888 888 @command('debugfileset',
889 889 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
890 890 ('', 'all-files', False,
891 891 _('test files from all revisions and working directory')),
892 892 ('s', 'show-matcher', None,
893 893 _('print internal representation of matcher')),
894 894 ('p', 'show-stage', [],
895 895 _('print parsed tree at the given stage'), _('NAME'))],
896 896 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
897 897 def debugfileset(ui, repo, expr, **opts):
898 898 '''parse and apply a fileset specification'''
899 899 opts = pycompat.byteskwargs(opts)
900 900 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
901 901
902 902 stages = [
903 903 ('parsed', pycompat.identity),
904 904 ('analyzed', filesetlang.analyze),
905 ('optimized', filesetlang.optimize),
905 906 ]
906 907 stagenames = set(n for n, f in stages)
907 908
908 909 showalways = set()
909 910 if ui.verbose and not opts['show_stage']:
910 911 # show parsed tree by --verbose (deprecated)
911 912 showalways.add('parsed')
912 913 if opts['show_stage'] == ['all']:
913 914 showalways.update(stagenames)
914 915 else:
915 916 for n in opts['show_stage']:
916 917 if n not in stagenames:
917 918 raise error.Abort(_('invalid stage name: %s') % n)
918 919 showalways.update(opts['show_stage'])
919 920
920 921 tree = filesetlang.parse(expr)
921 922 for n, f in stages:
922 923 tree = f(tree)
923 924 if n in showalways:
924 925 if opts['show_stage'] or n != 'parsed':
925 926 ui.write(("* %s:\n") % n)
926 927 ui.write(filesetlang.prettyformat(tree), "\n")
927 928
928 929 files = set()
929 930 if opts['all_files']:
930 931 for r in repo:
931 932 c = repo[r]
932 933 files.update(c.files())
933 934 files.update(c.substate)
934 935 if opts['all_files'] or ctx.rev() is None:
935 936 wctx = repo[None]
936 937 files.update(repo.dirstate.walk(scmutil.matchall(repo),
937 938 subrepos=list(wctx.substate),
938 939 unknown=True, ignored=True))
939 940 files.update(wctx.substate)
940 941 else:
941 942 files.update(ctx.files())
942 943 files.update(ctx.substate)
943 944
944 945 m = ctx.matchfileset(expr)
945 946 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
946 947 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
947 948 for f in sorted(files):
948 949 if not m(f):
949 950 continue
950 951 ui.write("%s\n" % f)
951 952
952 953 @command('debugformat',
953 954 [] + cmdutil.formatteropts)
954 955 def debugformat(ui, repo, **opts):
955 956 """display format information about the current repository
956 957
957 958 Use --verbose to get extra information about current config value and
958 959 Mercurial default."""
959 960 opts = pycompat.byteskwargs(opts)
960 961 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
961 962 maxvariantlength = max(len('format-variant'), maxvariantlength)
962 963
963 964 def makeformatname(name):
964 965 return '%s:' + (' ' * (maxvariantlength - len(name)))
965 966
966 967 fm = ui.formatter('debugformat', opts)
967 968 if fm.isplain():
968 969 def formatvalue(value):
969 970 if util.safehasattr(value, 'startswith'):
970 971 return value
971 972 if value:
972 973 return 'yes'
973 974 else:
974 975 return 'no'
975 976 else:
976 977 formatvalue = pycompat.identity
977 978
978 979 fm.plain('format-variant')
979 980 fm.plain(' ' * (maxvariantlength - len('format-variant')))
980 981 fm.plain(' repo')
981 982 if ui.verbose:
982 983 fm.plain(' config default')
983 984 fm.plain('\n')
984 985 for fv in upgrade.allformatvariant:
985 986 fm.startitem()
986 987 repovalue = fv.fromrepo(repo)
987 988 configvalue = fv.fromconfig(repo)
988 989
989 990 if repovalue != configvalue:
990 991 namelabel = 'formatvariant.name.mismatchconfig'
991 992 repolabel = 'formatvariant.repo.mismatchconfig'
992 993 elif repovalue != fv.default:
993 994 namelabel = 'formatvariant.name.mismatchdefault'
994 995 repolabel = 'formatvariant.repo.mismatchdefault'
995 996 else:
996 997 namelabel = 'formatvariant.name.uptodate'
997 998 repolabel = 'formatvariant.repo.uptodate'
998 999
999 1000 fm.write('name', makeformatname(fv.name), fv.name,
1000 1001 label=namelabel)
1001 1002 fm.write('repo', ' %3s', formatvalue(repovalue),
1002 1003 label=repolabel)
1003 1004 if fv.default != configvalue:
1004 1005 configlabel = 'formatvariant.config.special'
1005 1006 else:
1006 1007 configlabel = 'formatvariant.config.default'
1007 1008 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1008 1009 label=configlabel)
1009 1010 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1010 1011 label='formatvariant.default')
1011 1012 fm.plain('\n')
1012 1013 fm.end()
1013 1014
1014 1015 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1015 1016 def debugfsinfo(ui, path="."):
1016 1017 """show information detected about current filesystem"""
1017 1018 ui.write(('path: %s\n') % path)
1018 1019 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1019 1020 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1020 1021 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1021 1022 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1022 1023 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1023 1024 casesensitive = '(unknown)'
1024 1025 try:
1025 1026 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1026 1027 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1027 1028 except OSError:
1028 1029 pass
1029 1030 ui.write(('case-sensitive: %s\n') % casesensitive)
1030 1031
1031 1032 @command('debuggetbundle',
1032 1033 [('H', 'head', [], _('id of head node'), _('ID')),
1033 1034 ('C', 'common', [], _('id of common node'), _('ID')),
1034 1035 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1035 1036 _('REPO FILE [-H|-C ID]...'),
1036 1037 norepo=True)
1037 1038 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1038 1039 """retrieves a bundle from a repo
1039 1040
1040 1041 Every ID must be a full-length hex node id string. Saves the bundle to the
1041 1042 given file.
1042 1043 """
1043 1044 opts = pycompat.byteskwargs(opts)
1044 1045 repo = hg.peer(ui, opts, repopath)
1045 1046 if not repo.capable('getbundle'):
1046 1047 raise error.Abort("getbundle() not supported by target repository")
1047 1048 args = {}
1048 1049 if common:
1049 1050 args[r'common'] = [bin(s) for s in common]
1050 1051 if head:
1051 1052 args[r'heads'] = [bin(s) for s in head]
1052 1053 # TODO: get desired bundlecaps from command line.
1053 1054 args[r'bundlecaps'] = None
1054 1055 bundle = repo.getbundle('debug', **args)
1055 1056
1056 1057 bundletype = opts.get('type', 'bzip2').lower()
1057 1058 btypes = {'none': 'HG10UN',
1058 1059 'bzip2': 'HG10BZ',
1059 1060 'gzip': 'HG10GZ',
1060 1061 'bundle2': 'HG20'}
1061 1062 bundletype = btypes.get(bundletype)
1062 1063 if bundletype not in bundle2.bundletypes:
1063 1064 raise error.Abort(_('unknown bundle type specified with --type'))
1064 1065 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1065 1066
1066 1067 @command('debugignore', [], '[FILE]')
1067 1068 def debugignore(ui, repo, *files, **opts):
1068 1069 """display the combined ignore pattern and information about ignored files
1069 1070
1070 1071 With no argument display the combined ignore pattern.
1071 1072
1072 1073 Given space separated file names, shows if the given file is ignored and
1073 1074 if so, show the ignore rule (file and line number) that matched it.
1074 1075 """
1075 1076 ignore = repo.dirstate._ignore
1076 1077 if not files:
1077 1078 # Show all the patterns
1078 1079 ui.write("%s\n" % pycompat.byterepr(ignore))
1079 1080 else:
1080 1081 m = scmutil.match(repo[None], pats=files)
1081 1082 for f in m.files():
1082 1083 nf = util.normpath(f)
1083 1084 ignored = None
1084 1085 ignoredata = None
1085 1086 if nf != '.':
1086 1087 if ignore(nf):
1087 1088 ignored = nf
1088 1089 ignoredata = repo.dirstate._ignorefileandline(nf)
1089 1090 else:
1090 1091 for p in util.finddirs(nf):
1091 1092 if ignore(p):
1092 1093 ignored = p
1093 1094 ignoredata = repo.dirstate._ignorefileandline(p)
1094 1095 break
1095 1096 if ignored:
1096 1097 if ignored == nf:
1097 1098 ui.write(_("%s is ignored\n") % m.uipath(f))
1098 1099 else:
1099 1100 ui.write(_("%s is ignored because of "
1100 1101 "containing folder %s\n")
1101 1102 % (m.uipath(f), ignored))
1102 1103 ignorefile, lineno, line = ignoredata
1103 1104 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1104 1105 % (ignorefile, lineno, line))
1105 1106 else:
1106 1107 ui.write(_("%s is not ignored\n") % m.uipath(f))
1107 1108
1108 1109 @command('debugindex', cmdutil.debugrevlogopts +
1109 1110 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1110 1111 _('[-f FORMAT] -c|-m|FILE'),
1111 1112 optionalrepo=True)
1112 1113 def debugindex(ui, repo, file_=None, **opts):
1113 1114 """dump the contents of an index file"""
1114 1115 opts = pycompat.byteskwargs(opts)
1115 1116 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1116 1117 format = opts.get('format', 0)
1117 1118 if format not in (0, 1):
1118 1119 raise error.Abort(_("unknown format %d") % format)
1119 1120
1120 1121 if ui.debugflag:
1121 1122 shortfn = hex
1122 1123 else:
1123 1124 shortfn = short
1124 1125
1125 1126 # There might not be anything in r, so have a sane default
1126 1127 idlen = 12
1127 1128 for i in r:
1128 1129 idlen = len(shortfn(r.node(i)))
1129 1130 break
1130 1131
1131 1132 if format == 0:
1132 1133 if ui.verbose:
1133 1134 ui.write((" rev offset length linkrev"
1134 1135 " %s %s p2\n") % ("nodeid".ljust(idlen),
1135 1136 "p1".ljust(idlen)))
1136 1137 else:
1137 1138 ui.write((" rev linkrev %s %s p2\n") % (
1138 1139 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1139 1140 elif format == 1:
1140 1141 if ui.verbose:
1141 1142 ui.write((" rev flag offset length size link p1"
1142 1143 " p2 %s\n") % "nodeid".rjust(idlen))
1143 1144 else:
1144 1145 ui.write((" rev flag size link p1 p2 %s\n") %
1145 1146 "nodeid".rjust(idlen))
1146 1147
1147 1148 for i in r:
1148 1149 node = r.node(i)
1149 1150 if format == 0:
1150 1151 try:
1151 1152 pp = r.parents(node)
1152 1153 except Exception:
1153 1154 pp = [nullid, nullid]
1154 1155 if ui.verbose:
1155 1156 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1156 1157 i, r.start(i), r.length(i), r.linkrev(i),
1157 1158 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1158 1159 else:
1159 1160 ui.write("% 6d % 7d %s %s %s\n" % (
1160 1161 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1161 1162 shortfn(pp[1])))
1162 1163 elif format == 1:
1163 1164 pr = r.parentrevs(i)
1164 1165 if ui.verbose:
1165 1166 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1166 1167 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1167 1168 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1168 1169 else:
1169 1170 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1170 1171 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1171 1172 shortfn(node)))
1172 1173
1173 1174 @command('debugindexdot', cmdutil.debugrevlogopts,
1174 1175 _('-c|-m|FILE'), optionalrepo=True)
1175 1176 def debugindexdot(ui, repo, file_=None, **opts):
1176 1177 """dump an index DAG as a graphviz dot file"""
1177 1178 opts = pycompat.byteskwargs(opts)
1178 1179 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1179 1180 ui.write(("digraph G {\n"))
1180 1181 for i in r:
1181 1182 node = r.node(i)
1182 1183 pp = r.parents(node)
1183 1184 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1184 1185 if pp[1] != nullid:
1185 1186 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1186 1187 ui.write("}\n")
1187 1188
1188 1189 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1189 1190 def debuginstall(ui, **opts):
1190 1191 '''test Mercurial installation
1191 1192
1192 1193 Returns 0 on success.
1193 1194 '''
1194 1195 opts = pycompat.byteskwargs(opts)
1195 1196
1196 1197 def writetemp(contents):
1197 1198 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1198 1199 f = os.fdopen(fd, r"wb")
1199 1200 f.write(contents)
1200 1201 f.close()
1201 1202 return name
1202 1203
1203 1204 problems = 0
1204 1205
1205 1206 fm = ui.formatter('debuginstall', opts)
1206 1207 fm.startitem()
1207 1208
1208 1209 # encoding
1209 1210 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1210 1211 err = None
1211 1212 try:
1212 1213 codecs.lookup(pycompat.sysstr(encoding.encoding))
1213 1214 except LookupError as inst:
1214 1215 err = stringutil.forcebytestr(inst)
1215 1216 problems += 1
1216 1217 fm.condwrite(err, 'encodingerror', _(" %s\n"
1217 1218 " (check that your locale is properly set)\n"), err)
1218 1219
1219 1220 # Python
1220 1221 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1221 1222 pycompat.sysexecutable)
1222 1223 fm.write('pythonver', _("checking Python version (%s)\n"),
1223 1224 ("%d.%d.%d" % sys.version_info[:3]))
1224 1225 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1225 1226 os.path.dirname(pycompat.fsencode(os.__file__)))
1226 1227
1227 1228 security = set(sslutil.supportedprotocols)
1228 1229 if sslutil.hassni:
1229 1230 security.add('sni')
1230 1231
1231 1232 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1232 1233 fm.formatlist(sorted(security), name='protocol',
1233 1234 fmt='%s', sep=','))
1234 1235
1235 1236 # These are warnings, not errors. So don't increment problem count. This
1236 1237 # may change in the future.
1237 1238 if 'tls1.2' not in security:
1238 1239 fm.plain(_(' TLS 1.2 not supported by Python install; '
1239 1240 'network connections lack modern security\n'))
1240 1241 if 'sni' not in security:
1241 1242 fm.plain(_(' SNI not supported by Python install; may have '
1242 1243 'connectivity issues with some servers\n'))
1243 1244
1244 1245 # TODO print CA cert info
1245 1246
1246 1247 # hg version
1247 1248 hgver = util.version()
1248 1249 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1249 1250 hgver.split('+')[0])
1250 1251 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1251 1252 '+'.join(hgver.split('+')[1:]))
1252 1253
1253 1254 # compiled modules
1254 1255 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1255 1256 policy.policy)
1256 1257 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1257 1258 os.path.dirname(pycompat.fsencode(__file__)))
1258 1259
1259 1260 if policy.policy in ('c', 'allow'):
1260 1261 err = None
1261 1262 try:
1262 1263 from .cext import (
1263 1264 base85,
1264 1265 bdiff,
1265 1266 mpatch,
1266 1267 osutil,
1267 1268 )
1268 1269 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1269 1270 except Exception as inst:
1270 1271 err = stringutil.forcebytestr(inst)
1271 1272 problems += 1
1272 1273 fm.condwrite(err, 'extensionserror', " %s\n", err)
1273 1274
1274 1275 compengines = util.compengines._engines.values()
1275 1276 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1276 1277 fm.formatlist(sorted(e.name() for e in compengines),
1277 1278 name='compengine', fmt='%s', sep=', '))
1278 1279 fm.write('compenginesavail', _('checking available compression engines '
1279 1280 '(%s)\n'),
1280 1281 fm.formatlist(sorted(e.name() for e in compengines
1281 1282 if e.available()),
1282 1283 name='compengine', fmt='%s', sep=', '))
1283 1284 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1284 1285 fm.write('compenginesserver', _('checking available compression engines '
1285 1286 'for wire protocol (%s)\n'),
1286 1287 fm.formatlist([e.name() for e in wirecompengines
1287 1288 if e.wireprotosupport()],
1288 1289 name='compengine', fmt='%s', sep=', '))
1289 1290 re2 = 'missing'
1290 1291 if util._re2:
1291 1292 re2 = 'available'
1292 1293 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1293 1294 fm.data(re2=bool(util._re2))
1294 1295
1295 1296 # templates
1296 1297 p = templater.templatepaths()
1297 1298 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1298 1299 fm.condwrite(not p, '', _(" no template directories found\n"))
1299 1300 if p:
1300 1301 m = templater.templatepath("map-cmdline.default")
1301 1302 if m:
1302 1303 # template found, check if it is working
1303 1304 err = None
1304 1305 try:
1305 1306 templater.templater.frommapfile(m)
1306 1307 except Exception as inst:
1307 1308 err = stringutil.forcebytestr(inst)
1308 1309 p = None
1309 1310 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1310 1311 else:
1311 1312 p = None
1312 1313 fm.condwrite(p, 'defaulttemplate',
1313 1314 _("checking default template (%s)\n"), m)
1314 1315 fm.condwrite(not m, 'defaulttemplatenotfound',
1315 1316 _(" template '%s' not found\n"), "default")
1316 1317 if not p:
1317 1318 problems += 1
1318 1319 fm.condwrite(not p, '',
1319 1320 _(" (templates seem to have been installed incorrectly)\n"))
1320 1321
1321 1322 # editor
1322 1323 editor = ui.geteditor()
1323 1324 editor = util.expandpath(editor)
1324 1325 editorbin = procutil.shellsplit(editor)[0]
1325 1326 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1326 1327 cmdpath = procutil.findexe(editorbin)
1327 1328 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1328 1329 _(" No commit editor set and can't find %s in PATH\n"
1329 1330 " (specify a commit editor in your configuration"
1330 1331 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1331 1332 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1332 1333 _(" Can't find editor '%s' in PATH\n"
1333 1334 " (specify a commit editor in your configuration"
1334 1335 " file)\n"), not cmdpath and editorbin)
1335 1336 if not cmdpath and editor != 'vi':
1336 1337 problems += 1
1337 1338
1338 1339 # check username
1339 1340 username = None
1340 1341 err = None
1341 1342 try:
1342 1343 username = ui.username()
1343 1344 except error.Abort as e:
1344 1345 err = stringutil.forcebytestr(e)
1345 1346 problems += 1
1346 1347
1347 1348 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1348 1349 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1349 1350 " (specify a username in your configuration file)\n"), err)
1350 1351
1351 1352 fm.condwrite(not problems, '',
1352 1353 _("no problems detected\n"))
1353 1354 if not problems:
1354 1355 fm.data(problems=problems)
1355 1356 fm.condwrite(problems, 'problems',
1356 1357 _("%d problems detected,"
1357 1358 " please check your install!\n"), problems)
1358 1359 fm.end()
1359 1360
1360 1361 return problems
1361 1362
1362 1363 @command('debugknown', [], _('REPO ID...'), norepo=True)
1363 1364 def debugknown(ui, repopath, *ids, **opts):
1364 1365 """test whether node ids are known to a repo
1365 1366
1366 1367 Every ID must be a full-length hex node id string. Returns a list of 0s
1367 1368 and 1s indicating unknown/known.
1368 1369 """
1369 1370 opts = pycompat.byteskwargs(opts)
1370 1371 repo = hg.peer(ui, opts, repopath)
1371 1372 if not repo.capable('known'):
1372 1373 raise error.Abort("known() not supported by target repository")
1373 1374 flags = repo.known([bin(s) for s in ids])
1374 1375 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1375 1376
1376 1377 @command('debuglabelcomplete', [], _('LABEL...'))
1377 1378 def debuglabelcomplete(ui, repo, *args):
1378 1379 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1379 1380 debugnamecomplete(ui, repo, *args)
1380 1381
1381 1382 @command('debuglocks',
1382 1383 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1383 1384 ('W', 'force-wlock', None,
1384 1385 _('free the working state lock (DANGEROUS)')),
1385 1386 ('s', 'set-lock', None, _('set the store lock until stopped')),
1386 1387 ('S', 'set-wlock', None,
1387 1388 _('set the working state lock until stopped'))],
1388 1389 _('[OPTION]...'))
1389 1390 def debuglocks(ui, repo, **opts):
1390 1391 """show or modify state of locks
1391 1392
1392 1393 By default, this command will show which locks are held. This
1393 1394 includes the user and process holding the lock, the amount of time
1394 1395 the lock has been held, and the machine name where the process is
1395 1396 running if it's not local.
1396 1397
1397 1398 Locks protect the integrity of Mercurial's data, so should be
1398 1399 treated with care. System crashes or other interruptions may cause
1399 1400 locks to not be properly released, though Mercurial will usually
1400 1401 detect and remove such stale locks automatically.
1401 1402
1402 1403 However, detecting stale locks may not always be possible (for
1403 1404 instance, on a shared filesystem). Removing locks may also be
1404 1405 blocked by filesystem permissions.
1405 1406
1406 1407 Setting a lock will prevent other commands from changing the data.
1407 1408 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1408 1409 The set locks are removed when the command exits.
1409 1410
1410 1411 Returns 0 if no locks are held.
1411 1412
1412 1413 """
1413 1414
1414 1415 if opts.get(r'force_lock'):
1415 1416 repo.svfs.unlink('lock')
1416 1417 if opts.get(r'force_wlock'):
1417 1418 repo.vfs.unlink('wlock')
1418 1419 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1419 1420 return 0
1420 1421
1421 1422 locks = []
1422 1423 try:
1423 1424 if opts.get(r'set_wlock'):
1424 1425 try:
1425 1426 locks.append(repo.wlock(False))
1426 1427 except error.LockHeld:
1427 1428 raise error.Abort(_('wlock is already held'))
1428 1429 if opts.get(r'set_lock'):
1429 1430 try:
1430 1431 locks.append(repo.lock(False))
1431 1432 except error.LockHeld:
1432 1433 raise error.Abort(_('lock is already held'))
1433 1434 if len(locks):
1434 1435 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1435 1436 return 0
1436 1437 finally:
1437 1438 release(*locks)
1438 1439
1439 1440 now = time.time()
1440 1441 held = 0
1441 1442
1442 1443 def report(vfs, name, method):
1443 1444 # this causes stale locks to get reaped for more accurate reporting
1444 1445 try:
1445 1446 l = method(False)
1446 1447 except error.LockHeld:
1447 1448 l = None
1448 1449
1449 1450 if l:
1450 1451 l.release()
1451 1452 else:
1452 1453 try:
1453 1454 st = vfs.lstat(name)
1454 1455 age = now - st[stat.ST_MTIME]
1455 1456 user = util.username(st.st_uid)
1456 1457 locker = vfs.readlock(name)
1457 1458 if ":" in locker:
1458 1459 host, pid = locker.split(':')
1459 1460 if host == socket.gethostname():
1460 1461 locker = 'user %s, process %s' % (user, pid)
1461 1462 else:
1462 1463 locker = 'user %s, process %s, host %s' \
1463 1464 % (user, pid, host)
1464 1465 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1465 1466 return 1
1466 1467 except OSError as e:
1467 1468 if e.errno != errno.ENOENT:
1468 1469 raise
1469 1470
1470 1471 ui.write(("%-6s free\n") % (name + ":"))
1471 1472 return 0
1472 1473
1473 1474 held += report(repo.svfs, "lock", repo.lock)
1474 1475 held += report(repo.vfs, "wlock", repo.wlock)
1475 1476
1476 1477 return held
1477 1478
1478 1479 @command('debugmanifestfulltextcache', [
1479 1480 ('', 'clear', False, _('clear the cache')),
1480 1481 ('a', 'add', '', _('add the given manifest node to the cache'),
1481 1482 _('NODE'))
1482 1483 ], '')
1483 1484 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1484 1485 """show, clear or amend the contents of the manifest fulltext cache"""
1485 1486 with repo.lock():
1486 1487 r = repo.manifestlog._revlog
1487 1488 try:
1488 1489 cache = r._fulltextcache
1489 1490 except AttributeError:
1490 1491 ui.warn(_(
1491 1492 "Current revlog implementation doesn't appear to have a "
1492 1493 'manifest fulltext cache\n'))
1493 1494 return
1494 1495
1495 1496 if opts.get(r'clear'):
1496 1497 cache.clear()
1497 1498
1498 1499 if add:
1499 1500 try:
1500 1501 manifest = repo.manifestlog[r.lookup(add)]
1501 1502 except error.LookupError as e:
1502 1503 raise error.Abort(e, hint="Check your manifest node id")
1503 1504 manifest.read() # stores revisision in cache too
1504 1505
1505 1506 if not len(cache):
1506 1507 ui.write(_('Cache empty'))
1507 1508 else:
1508 1509 ui.write(
1509 1510 _('Cache contains %d manifest entries, in order of most to '
1510 1511 'least recent:\n') % (len(cache),))
1511 1512 totalsize = 0
1512 1513 for nodeid in cache:
1513 1514 # Use cache.get to not update the LRU order
1514 1515 data = cache.get(nodeid)
1515 1516 size = len(data)
1516 1517 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1517 1518 ui.write(_('id: %s, size %s\n') % (
1518 1519 hex(nodeid), util.bytecount(size)))
1519 1520 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1520 1521 ui.write(
1521 1522 _('Total cache data size %s, on-disk %s\n') % (
1522 1523 util.bytecount(totalsize), util.bytecount(ondisk))
1523 1524 )
1524 1525
1525 1526 @command('debugmergestate', [], '')
1526 1527 def debugmergestate(ui, repo, *args):
1527 1528 """print merge state
1528 1529
1529 1530 Use --verbose to print out information about whether v1 or v2 merge state
1530 1531 was chosen."""
1531 1532 def _hashornull(h):
1532 1533 if h == nullhex:
1533 1534 return 'null'
1534 1535 else:
1535 1536 return h
1536 1537
1537 1538 def printrecords(version):
1538 1539 ui.write(('* version %d records\n') % version)
1539 1540 if version == 1:
1540 1541 records = v1records
1541 1542 else:
1542 1543 records = v2records
1543 1544
1544 1545 for rtype, record in records:
1545 1546 # pretty print some record types
1546 1547 if rtype == 'L':
1547 1548 ui.write(('local: %s\n') % record)
1548 1549 elif rtype == 'O':
1549 1550 ui.write(('other: %s\n') % record)
1550 1551 elif rtype == 'm':
1551 1552 driver, mdstate = record.split('\0', 1)
1552 1553 ui.write(('merge driver: %s (state "%s")\n')
1553 1554 % (driver, mdstate))
1554 1555 elif rtype in 'FDC':
1555 1556 r = record.split('\0')
1556 1557 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1557 1558 if version == 1:
1558 1559 onode = 'not stored in v1 format'
1559 1560 flags = r[7]
1560 1561 else:
1561 1562 onode, flags = r[7:9]
1562 1563 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1563 1564 % (f, rtype, state, _hashornull(hash)))
1564 1565 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1565 1566 ui.write((' ancestor path: %s (node %s)\n')
1566 1567 % (afile, _hashornull(anode)))
1567 1568 ui.write((' other path: %s (node %s)\n')
1568 1569 % (ofile, _hashornull(onode)))
1569 1570 elif rtype == 'f':
1570 1571 filename, rawextras = record.split('\0', 1)
1571 1572 extras = rawextras.split('\0')
1572 1573 i = 0
1573 1574 extrastrings = []
1574 1575 while i < len(extras):
1575 1576 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1576 1577 i += 2
1577 1578
1578 1579 ui.write(('file extras: %s (%s)\n')
1579 1580 % (filename, ', '.join(extrastrings)))
1580 1581 elif rtype == 'l':
1581 1582 labels = record.split('\0', 2)
1582 1583 labels = [l for l in labels if len(l) > 0]
1583 1584 ui.write(('labels:\n'))
1584 1585 ui.write((' local: %s\n' % labels[0]))
1585 1586 ui.write((' other: %s\n' % labels[1]))
1586 1587 if len(labels) > 2:
1587 1588 ui.write((' base: %s\n' % labels[2]))
1588 1589 else:
1589 1590 ui.write(('unrecognized entry: %s\t%s\n')
1590 1591 % (rtype, record.replace('\0', '\t')))
1591 1592
1592 1593 # Avoid mergestate.read() since it may raise an exception for unsupported
1593 1594 # merge state records. We shouldn't be doing this, but this is OK since this
1594 1595 # command is pretty low-level.
1595 1596 ms = mergemod.mergestate(repo)
1596 1597
1597 1598 # sort so that reasonable information is on top
1598 1599 v1records = ms._readrecordsv1()
1599 1600 v2records = ms._readrecordsv2()
1600 1601 order = 'LOml'
1601 1602 def key(r):
1602 1603 idx = order.find(r[0])
1603 1604 if idx == -1:
1604 1605 return (1, r[1])
1605 1606 else:
1606 1607 return (0, idx)
1607 1608 v1records.sort(key=key)
1608 1609 v2records.sort(key=key)
1609 1610
1610 1611 if not v1records and not v2records:
1611 1612 ui.write(('no merge state found\n'))
1612 1613 elif not v2records:
1613 1614 ui.note(('no version 2 merge state\n'))
1614 1615 printrecords(1)
1615 1616 elif ms._v1v2match(v1records, v2records):
1616 1617 ui.note(('v1 and v2 states match: using v2\n'))
1617 1618 printrecords(2)
1618 1619 else:
1619 1620 ui.note(('v1 and v2 states mismatch: using v1\n'))
1620 1621 printrecords(1)
1621 1622 if ui.verbose:
1622 1623 printrecords(2)
1623 1624
1624 1625 @command('debugnamecomplete', [], _('NAME...'))
1625 1626 def debugnamecomplete(ui, repo, *args):
1626 1627 '''complete "names" - tags, open branch names, bookmark names'''
1627 1628
1628 1629 names = set()
1629 1630 # since we previously only listed open branches, we will handle that
1630 1631 # specially (after this for loop)
1631 1632 for name, ns in repo.names.iteritems():
1632 1633 if name != 'branches':
1633 1634 names.update(ns.listnames(repo))
1634 1635 names.update(tag for (tag, heads, tip, closed)
1635 1636 in repo.branchmap().iterbranches() if not closed)
1636 1637 completions = set()
1637 1638 if not args:
1638 1639 args = ['']
1639 1640 for a in args:
1640 1641 completions.update(n for n in names if n.startswith(a))
1641 1642 ui.write('\n'.join(sorted(completions)))
1642 1643 ui.write('\n')
1643 1644
1644 1645 @command('debugobsolete',
1645 1646 [('', 'flags', 0, _('markers flag')),
1646 1647 ('', 'record-parents', False,
1647 1648 _('record parent information for the precursor')),
1648 1649 ('r', 'rev', [], _('display markers relevant to REV')),
1649 1650 ('', 'exclusive', False, _('restrict display to markers only '
1650 1651 'relevant to REV')),
1651 1652 ('', 'index', False, _('display index of the marker')),
1652 1653 ('', 'delete', [], _('delete markers specified by indices')),
1653 1654 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1654 1655 _('[OBSOLETED [REPLACEMENT ...]]'))
1655 1656 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1656 1657 """create arbitrary obsolete marker
1657 1658
1658 1659 With no arguments, displays the list of obsolescence markers."""
1659 1660
1660 1661 opts = pycompat.byteskwargs(opts)
1661 1662
1662 1663 def parsenodeid(s):
1663 1664 try:
1664 1665 # We do not use revsingle/revrange functions here to accept
1665 1666 # arbitrary node identifiers, possibly not present in the
1666 1667 # local repository.
1667 1668 n = bin(s)
1668 1669 if len(n) != len(nullid):
1669 1670 raise TypeError()
1670 1671 return n
1671 1672 except TypeError:
1672 1673 raise error.Abort('changeset references must be full hexadecimal '
1673 1674 'node identifiers')
1674 1675
1675 1676 if opts.get('delete'):
1676 1677 indices = []
1677 1678 for v in opts.get('delete'):
1678 1679 try:
1679 1680 indices.append(int(v))
1680 1681 except ValueError:
1681 1682 raise error.Abort(_('invalid index value: %r') % v,
1682 1683 hint=_('use integers for indices'))
1683 1684
1684 1685 if repo.currenttransaction():
1685 1686 raise error.Abort(_('cannot delete obsmarkers in the middle '
1686 1687 'of transaction.'))
1687 1688
1688 1689 with repo.lock():
1689 1690 n = repair.deleteobsmarkers(repo.obsstore, indices)
1690 1691 ui.write(_('deleted %i obsolescence markers\n') % n)
1691 1692
1692 1693 return
1693 1694
1694 1695 if precursor is not None:
1695 1696 if opts['rev']:
1696 1697 raise error.Abort('cannot select revision when creating marker')
1697 1698 metadata = {}
1698 1699 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1699 1700 succs = tuple(parsenodeid(succ) for succ in successors)
1700 1701 l = repo.lock()
1701 1702 try:
1702 1703 tr = repo.transaction('debugobsolete')
1703 1704 try:
1704 1705 date = opts.get('date')
1705 1706 if date:
1706 1707 date = dateutil.parsedate(date)
1707 1708 else:
1708 1709 date = None
1709 1710 prec = parsenodeid(precursor)
1710 1711 parents = None
1711 1712 if opts['record_parents']:
1712 1713 if prec not in repo.unfiltered():
1713 1714 raise error.Abort('cannot used --record-parents on '
1714 1715 'unknown changesets')
1715 1716 parents = repo.unfiltered()[prec].parents()
1716 1717 parents = tuple(p.node() for p in parents)
1717 1718 repo.obsstore.create(tr, prec, succs, opts['flags'],
1718 1719 parents=parents, date=date,
1719 1720 metadata=metadata, ui=ui)
1720 1721 tr.close()
1721 1722 except ValueError as exc:
1722 1723 raise error.Abort(_('bad obsmarker input: %s') %
1723 1724 pycompat.bytestr(exc))
1724 1725 finally:
1725 1726 tr.release()
1726 1727 finally:
1727 1728 l.release()
1728 1729 else:
1729 1730 if opts['rev']:
1730 1731 revs = scmutil.revrange(repo, opts['rev'])
1731 1732 nodes = [repo[r].node() for r in revs]
1732 1733 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1733 1734 exclusive=opts['exclusive']))
1734 1735 markers.sort(key=lambda x: x._data)
1735 1736 else:
1736 1737 markers = obsutil.getmarkers(repo)
1737 1738
1738 1739 markerstoiter = markers
1739 1740 isrelevant = lambda m: True
1740 1741 if opts.get('rev') and opts.get('index'):
1741 1742 markerstoiter = obsutil.getmarkers(repo)
1742 1743 markerset = set(markers)
1743 1744 isrelevant = lambda m: m in markerset
1744 1745
1745 1746 fm = ui.formatter('debugobsolete', opts)
1746 1747 for i, m in enumerate(markerstoiter):
1747 1748 if not isrelevant(m):
1748 1749 # marker can be irrelevant when we're iterating over a set
1749 1750 # of markers (markerstoiter) which is bigger than the set
1750 1751 # of markers we want to display (markers)
1751 1752 # this can happen if both --index and --rev options are
1752 1753 # provided and thus we need to iterate over all of the markers
1753 1754 # to get the correct indices, but only display the ones that
1754 1755 # are relevant to --rev value
1755 1756 continue
1756 1757 fm.startitem()
1757 1758 ind = i if opts.get('index') else None
1758 1759 cmdutil.showmarker(fm, m, index=ind)
1759 1760 fm.end()
1760 1761
1761 1762 @command('debugpathcomplete',
1762 1763 [('f', 'full', None, _('complete an entire path')),
1763 1764 ('n', 'normal', None, _('show only normal files')),
1764 1765 ('a', 'added', None, _('show only added files')),
1765 1766 ('r', 'removed', None, _('show only removed files'))],
1766 1767 _('FILESPEC...'))
1767 1768 def debugpathcomplete(ui, repo, *specs, **opts):
1768 1769 '''complete part or all of a tracked path
1769 1770
1770 1771 This command supports shells that offer path name completion. It
1771 1772 currently completes only files already known to the dirstate.
1772 1773
1773 1774 Completion extends only to the next path segment unless
1774 1775 --full is specified, in which case entire paths are used.'''
1775 1776
1776 1777 def complete(path, acceptable):
1777 1778 dirstate = repo.dirstate
1778 1779 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1779 1780 rootdir = repo.root + pycompat.ossep
1780 1781 if spec != repo.root and not spec.startswith(rootdir):
1781 1782 return [], []
1782 1783 if os.path.isdir(spec):
1783 1784 spec += '/'
1784 1785 spec = spec[len(rootdir):]
1785 1786 fixpaths = pycompat.ossep != '/'
1786 1787 if fixpaths:
1787 1788 spec = spec.replace(pycompat.ossep, '/')
1788 1789 speclen = len(spec)
1789 1790 fullpaths = opts[r'full']
1790 1791 files, dirs = set(), set()
1791 1792 adddir, addfile = dirs.add, files.add
1792 1793 for f, st in dirstate.iteritems():
1793 1794 if f.startswith(spec) and st[0] in acceptable:
1794 1795 if fixpaths:
1795 1796 f = f.replace('/', pycompat.ossep)
1796 1797 if fullpaths:
1797 1798 addfile(f)
1798 1799 continue
1799 1800 s = f.find(pycompat.ossep, speclen)
1800 1801 if s >= 0:
1801 1802 adddir(f[:s])
1802 1803 else:
1803 1804 addfile(f)
1804 1805 return files, dirs
1805 1806
1806 1807 acceptable = ''
1807 1808 if opts[r'normal']:
1808 1809 acceptable += 'nm'
1809 1810 if opts[r'added']:
1810 1811 acceptable += 'a'
1811 1812 if opts[r'removed']:
1812 1813 acceptable += 'r'
1813 1814 cwd = repo.getcwd()
1814 1815 if not specs:
1815 1816 specs = ['.']
1816 1817
1817 1818 files, dirs = set(), set()
1818 1819 for spec in specs:
1819 1820 f, d = complete(spec, acceptable or 'nmar')
1820 1821 files.update(f)
1821 1822 dirs.update(d)
1822 1823 files.update(dirs)
1823 1824 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1824 1825 ui.write('\n')
1825 1826
1826 1827 @command('debugpeer', [], _('PATH'), norepo=True)
1827 1828 def debugpeer(ui, path):
1828 1829 """establish a connection to a peer repository"""
1829 1830 # Always enable peer request logging. Requires --debug to display
1830 1831 # though.
1831 1832 overrides = {
1832 1833 ('devel', 'debug.peer-request'): True,
1833 1834 }
1834 1835
1835 1836 with ui.configoverride(overrides):
1836 1837 peer = hg.peer(ui, {}, path)
1837 1838
1838 1839 local = peer.local() is not None
1839 1840 canpush = peer.canpush()
1840 1841
1841 1842 ui.write(_('url: %s\n') % peer.url())
1842 1843 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1843 1844 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1844 1845
1845 1846 @command('debugpickmergetool',
1846 1847 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1847 1848 ('', 'changedelete', None, _('emulate merging change and delete')),
1848 1849 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1849 1850 _('[PATTERN]...'),
1850 1851 inferrepo=True)
1851 1852 def debugpickmergetool(ui, repo, *pats, **opts):
1852 1853 """examine which merge tool is chosen for specified file
1853 1854
1854 1855 As described in :hg:`help merge-tools`, Mercurial examines
1855 1856 configurations below in this order to decide which merge tool is
1856 1857 chosen for specified file.
1857 1858
1858 1859 1. ``--tool`` option
1859 1860 2. ``HGMERGE`` environment variable
1860 1861 3. configurations in ``merge-patterns`` section
1861 1862 4. configuration of ``ui.merge``
1862 1863 5. configurations in ``merge-tools`` section
1863 1864 6. ``hgmerge`` tool (for historical reason only)
1864 1865 7. default tool for fallback (``:merge`` or ``:prompt``)
1865 1866
1866 1867 This command writes out examination result in the style below::
1867 1868
1868 1869 FILE = MERGETOOL
1869 1870
1870 1871 By default, all files known in the first parent context of the
1871 1872 working directory are examined. Use file patterns and/or -I/-X
1872 1873 options to limit target files. -r/--rev is also useful to examine
1873 1874 files in another context without actual updating to it.
1874 1875
1875 1876 With --debug, this command shows warning messages while matching
1876 1877 against ``merge-patterns`` and so on, too. It is recommended to
1877 1878 use this option with explicit file patterns and/or -I/-X options,
1878 1879 because this option increases amount of output per file according
1879 1880 to configurations in hgrc.
1880 1881
1881 1882 With -v/--verbose, this command shows configurations below at
1882 1883 first (only if specified).
1883 1884
1884 1885 - ``--tool`` option
1885 1886 - ``HGMERGE`` environment variable
1886 1887 - configuration of ``ui.merge``
1887 1888
1888 1889 If merge tool is chosen before matching against
1889 1890 ``merge-patterns``, this command can't show any helpful
1890 1891 information, even with --debug. In such case, information above is
1891 1892 useful to know why a merge tool is chosen.
1892 1893 """
1893 1894 opts = pycompat.byteskwargs(opts)
1894 1895 overrides = {}
1895 1896 if opts['tool']:
1896 1897 overrides[('ui', 'forcemerge')] = opts['tool']
1897 1898 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1898 1899
1899 1900 with ui.configoverride(overrides, 'debugmergepatterns'):
1900 1901 hgmerge = encoding.environ.get("HGMERGE")
1901 1902 if hgmerge is not None:
1902 1903 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1903 1904 uimerge = ui.config("ui", "merge")
1904 1905 if uimerge:
1905 1906 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1906 1907
1907 1908 ctx = scmutil.revsingle(repo, opts.get('rev'))
1908 1909 m = scmutil.match(ctx, pats, opts)
1909 1910 changedelete = opts['changedelete']
1910 1911 for path in ctx.walk(m):
1911 1912 fctx = ctx[path]
1912 1913 try:
1913 1914 if not ui.debugflag:
1914 1915 ui.pushbuffer(error=True)
1915 1916 tool, toolpath = filemerge._picktool(repo, ui, path,
1916 1917 fctx.isbinary(),
1917 1918 'l' in fctx.flags(),
1918 1919 changedelete)
1919 1920 finally:
1920 1921 if not ui.debugflag:
1921 1922 ui.popbuffer()
1922 1923 ui.write(('%s = %s\n') % (path, tool))
1923 1924
1924 1925 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1925 1926 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1926 1927 '''access the pushkey key/value protocol
1927 1928
1928 1929 With two args, list the keys in the given namespace.
1929 1930
1930 1931 With five args, set a key to new if it currently is set to old.
1931 1932 Reports success or failure.
1932 1933 '''
1933 1934
1934 1935 target = hg.peer(ui, {}, repopath)
1935 1936 if keyinfo:
1936 1937 key, old, new = keyinfo
1937 1938 with target.commandexecutor() as e:
1938 1939 r = e.callcommand('pushkey', {
1939 1940 'namespace': namespace,
1940 1941 'key': key,
1941 1942 'old': old,
1942 1943 'new': new,
1943 1944 }).result()
1944 1945
1945 1946 ui.status(pycompat.bytestr(r) + '\n')
1946 1947 return not r
1947 1948 else:
1948 1949 for k, v in sorted(target.listkeys(namespace).iteritems()):
1949 1950 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1950 1951 stringutil.escapestr(v)))
1951 1952
1952 1953 @command('debugpvec', [], _('A B'))
1953 1954 def debugpvec(ui, repo, a, b=None):
1954 1955 ca = scmutil.revsingle(repo, a)
1955 1956 cb = scmutil.revsingle(repo, b)
1956 1957 pa = pvec.ctxpvec(ca)
1957 1958 pb = pvec.ctxpvec(cb)
1958 1959 if pa == pb:
1959 1960 rel = "="
1960 1961 elif pa > pb:
1961 1962 rel = ">"
1962 1963 elif pa < pb:
1963 1964 rel = "<"
1964 1965 elif pa | pb:
1965 1966 rel = "|"
1966 1967 ui.write(_("a: %s\n") % pa)
1967 1968 ui.write(_("b: %s\n") % pb)
1968 1969 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1969 1970 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1970 1971 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1971 1972 pa.distance(pb), rel))
1972 1973
1973 1974 @command('debugrebuilddirstate|debugrebuildstate',
1974 1975 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1975 1976 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1976 1977 'the working copy parent')),
1977 1978 ],
1978 1979 _('[-r REV]'))
1979 1980 def debugrebuilddirstate(ui, repo, rev, **opts):
1980 1981 """rebuild the dirstate as it would look like for the given revision
1981 1982
1982 1983 If no revision is specified the first current parent will be used.
1983 1984
1984 1985 The dirstate will be set to the files of the given revision.
1985 1986 The actual working directory content or existing dirstate
1986 1987 information such as adds or removes is not considered.
1987 1988
1988 1989 ``minimal`` will only rebuild the dirstate status for files that claim to be
1989 1990 tracked but are not in the parent manifest, or that exist in the parent
1990 1991 manifest but are not in the dirstate. It will not change adds, removes, or
1991 1992 modified files that are in the working copy parent.
1992 1993
1993 1994 One use of this command is to make the next :hg:`status` invocation
1994 1995 check the actual file content.
1995 1996 """
1996 1997 ctx = scmutil.revsingle(repo, rev)
1997 1998 with repo.wlock():
1998 1999 dirstate = repo.dirstate
1999 2000 changedfiles = None
2000 2001 # See command doc for what minimal does.
2001 2002 if opts.get(r'minimal'):
2002 2003 manifestfiles = set(ctx.manifest().keys())
2003 2004 dirstatefiles = set(dirstate)
2004 2005 manifestonly = manifestfiles - dirstatefiles
2005 2006 dsonly = dirstatefiles - manifestfiles
2006 2007 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2007 2008 changedfiles = manifestonly | dsnotadded
2008 2009
2009 2010 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2010 2011
2011 2012 @command('debugrebuildfncache', [], '')
2012 2013 def debugrebuildfncache(ui, repo):
2013 2014 """rebuild the fncache file"""
2014 2015 repair.rebuildfncache(ui, repo)
2015 2016
2016 2017 @command('debugrename',
2017 2018 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2018 2019 _('[-r REV] FILE'))
2019 2020 def debugrename(ui, repo, file1, *pats, **opts):
2020 2021 """dump rename information"""
2021 2022
2022 2023 opts = pycompat.byteskwargs(opts)
2023 2024 ctx = scmutil.revsingle(repo, opts.get('rev'))
2024 2025 m = scmutil.match(ctx, (file1,) + pats, opts)
2025 2026 for abs in ctx.walk(m):
2026 2027 fctx = ctx[abs]
2027 2028 o = fctx.filelog().renamed(fctx.filenode())
2028 2029 rel = m.rel(abs)
2029 2030 if o:
2030 2031 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2031 2032 else:
2032 2033 ui.write(_("%s not renamed\n") % rel)
2033 2034
2034 2035 @command('debugrevlog', cmdutil.debugrevlogopts +
2035 2036 [('d', 'dump', False, _('dump index data'))],
2036 2037 _('-c|-m|FILE'),
2037 2038 optionalrepo=True)
2038 2039 def debugrevlog(ui, repo, file_=None, **opts):
2039 2040 """show data and statistics about a revlog"""
2040 2041 opts = pycompat.byteskwargs(opts)
2041 2042 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2042 2043
2043 2044 if opts.get("dump"):
2044 2045 numrevs = len(r)
2045 2046 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2046 2047 " rawsize totalsize compression heads chainlen\n"))
2047 2048 ts = 0
2048 2049 heads = set()
2049 2050
2050 2051 for rev in pycompat.xrange(numrevs):
2051 2052 dbase = r.deltaparent(rev)
2052 2053 if dbase == -1:
2053 2054 dbase = rev
2054 2055 cbase = r.chainbase(rev)
2055 2056 clen = r.chainlen(rev)
2056 2057 p1, p2 = r.parentrevs(rev)
2057 2058 rs = r.rawsize(rev)
2058 2059 ts = ts + rs
2059 2060 heads -= set(r.parentrevs(rev))
2060 2061 heads.add(rev)
2061 2062 try:
2062 2063 compression = ts / r.end(rev)
2063 2064 except ZeroDivisionError:
2064 2065 compression = 0
2065 2066 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2066 2067 "%11d %5d %8d\n" %
2067 2068 (rev, p1, p2, r.start(rev), r.end(rev),
2068 2069 r.start(dbase), r.start(cbase),
2069 2070 r.start(p1), r.start(p2),
2070 2071 rs, ts, compression, len(heads), clen))
2071 2072 return 0
2072 2073
2073 2074 v = r.version
2074 2075 format = v & 0xFFFF
2075 2076 flags = []
2076 2077 gdelta = False
2077 2078 if v & revlog.FLAG_INLINE_DATA:
2078 2079 flags.append('inline')
2079 2080 if v & revlog.FLAG_GENERALDELTA:
2080 2081 gdelta = True
2081 2082 flags.append('generaldelta')
2082 2083 if not flags:
2083 2084 flags = ['(none)']
2084 2085
2085 2086 nummerges = 0
2086 2087 numfull = 0
2087 2088 numprev = 0
2088 2089 nump1 = 0
2089 2090 nump2 = 0
2090 2091 numother = 0
2091 2092 nump1prev = 0
2092 2093 nump2prev = 0
2093 2094 chainlengths = []
2094 2095 chainbases = []
2095 2096 chainspans = []
2096 2097
2097 2098 datasize = [None, 0, 0]
2098 2099 fullsize = [None, 0, 0]
2099 2100 deltasize = [None, 0, 0]
2100 2101 chunktypecounts = {}
2101 2102 chunktypesizes = {}
2102 2103
2103 2104 def addsize(size, l):
2104 2105 if l[0] is None or size < l[0]:
2105 2106 l[0] = size
2106 2107 if size > l[1]:
2107 2108 l[1] = size
2108 2109 l[2] += size
2109 2110
2110 2111 numrevs = len(r)
2111 2112 for rev in pycompat.xrange(numrevs):
2112 2113 p1, p2 = r.parentrevs(rev)
2113 2114 delta = r.deltaparent(rev)
2114 2115 if format > 0:
2115 2116 addsize(r.rawsize(rev), datasize)
2116 2117 if p2 != nullrev:
2117 2118 nummerges += 1
2118 2119 size = r.length(rev)
2119 2120 if delta == nullrev:
2120 2121 chainlengths.append(0)
2121 2122 chainbases.append(r.start(rev))
2122 2123 chainspans.append(size)
2123 2124 numfull += 1
2124 2125 addsize(size, fullsize)
2125 2126 else:
2126 2127 chainlengths.append(chainlengths[delta] + 1)
2127 2128 baseaddr = chainbases[delta]
2128 2129 revaddr = r.start(rev)
2129 2130 chainbases.append(baseaddr)
2130 2131 chainspans.append((revaddr - baseaddr) + size)
2131 2132 addsize(size, deltasize)
2132 2133 if delta == rev - 1:
2133 2134 numprev += 1
2134 2135 if delta == p1:
2135 2136 nump1prev += 1
2136 2137 elif delta == p2:
2137 2138 nump2prev += 1
2138 2139 elif delta == p1:
2139 2140 nump1 += 1
2140 2141 elif delta == p2:
2141 2142 nump2 += 1
2142 2143 elif delta != nullrev:
2143 2144 numother += 1
2144 2145
2145 2146 # Obtain data on the raw chunks in the revlog.
2146 2147 segment = r._getsegmentforrevs(rev, rev)[1]
2147 2148 if segment:
2148 2149 chunktype = bytes(segment[0:1])
2149 2150 else:
2150 2151 chunktype = 'empty'
2151 2152
2152 2153 if chunktype not in chunktypecounts:
2153 2154 chunktypecounts[chunktype] = 0
2154 2155 chunktypesizes[chunktype] = 0
2155 2156
2156 2157 chunktypecounts[chunktype] += 1
2157 2158 chunktypesizes[chunktype] += size
2158 2159
2159 2160 # Adjust size min value for empty cases
2160 2161 for size in (datasize, fullsize, deltasize):
2161 2162 if size[0] is None:
2162 2163 size[0] = 0
2163 2164
2164 2165 numdeltas = numrevs - numfull
2165 2166 numoprev = numprev - nump1prev - nump2prev
2166 2167 totalrawsize = datasize[2]
2167 2168 datasize[2] /= numrevs
2168 2169 fulltotal = fullsize[2]
2169 2170 fullsize[2] /= numfull
2170 2171 deltatotal = deltasize[2]
2171 2172 if numrevs - numfull > 0:
2172 2173 deltasize[2] /= numrevs - numfull
2173 2174 totalsize = fulltotal + deltatotal
2174 2175 avgchainlen = sum(chainlengths) / numrevs
2175 2176 maxchainlen = max(chainlengths)
2176 2177 maxchainspan = max(chainspans)
2177 2178 compratio = 1
2178 2179 if totalsize:
2179 2180 compratio = totalrawsize / totalsize
2180 2181
2181 2182 basedfmtstr = '%%%dd\n'
2182 2183 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2183 2184
2184 2185 def dfmtstr(max):
2185 2186 return basedfmtstr % len(str(max))
2186 2187 def pcfmtstr(max, padding=0):
2187 2188 return basepcfmtstr % (len(str(max)), ' ' * padding)
2188 2189
2189 2190 def pcfmt(value, total):
2190 2191 if total:
2191 2192 return (value, 100 * float(value) / total)
2192 2193 else:
2193 2194 return value, 100.0
2194 2195
2195 2196 ui.write(('format : %d\n') % format)
2196 2197 ui.write(('flags : %s\n') % ', '.join(flags))
2197 2198
2198 2199 ui.write('\n')
2199 2200 fmt = pcfmtstr(totalsize)
2200 2201 fmt2 = dfmtstr(totalsize)
2201 2202 ui.write(('revisions : ') + fmt2 % numrevs)
2202 2203 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2203 2204 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2204 2205 ui.write(('revisions : ') + fmt2 % numrevs)
2205 2206 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2206 2207 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2207 2208 ui.write(('revision size : ') + fmt2 % totalsize)
2208 2209 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2209 2210 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2210 2211
2211 2212 def fmtchunktype(chunktype):
2212 2213 if chunktype == 'empty':
2213 2214 return ' %s : ' % chunktype
2214 2215 elif chunktype in pycompat.bytestr(string.ascii_letters):
2215 2216 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2216 2217 else:
2217 2218 return ' 0x%s : ' % hex(chunktype)
2218 2219
2219 2220 ui.write('\n')
2220 2221 ui.write(('chunks : ') + fmt2 % numrevs)
2221 2222 for chunktype in sorted(chunktypecounts):
2222 2223 ui.write(fmtchunktype(chunktype))
2223 2224 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2224 2225 ui.write(('chunks size : ') + fmt2 % totalsize)
2225 2226 for chunktype in sorted(chunktypecounts):
2226 2227 ui.write(fmtchunktype(chunktype))
2227 2228 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2228 2229
2229 2230 ui.write('\n')
2230 2231 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2231 2232 ui.write(('avg chain length : ') + fmt % avgchainlen)
2232 2233 ui.write(('max chain length : ') + fmt % maxchainlen)
2233 2234 ui.write(('max chain reach : ') + fmt % maxchainspan)
2234 2235 ui.write(('compression ratio : ') + fmt % compratio)
2235 2236
2236 2237 if format > 0:
2237 2238 ui.write('\n')
2238 2239 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2239 2240 % tuple(datasize))
2240 2241 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2241 2242 % tuple(fullsize))
2242 2243 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2243 2244 % tuple(deltasize))
2244 2245
2245 2246 if numdeltas > 0:
2246 2247 ui.write('\n')
2247 2248 fmt = pcfmtstr(numdeltas)
2248 2249 fmt2 = pcfmtstr(numdeltas, 4)
2249 2250 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2250 2251 if numprev > 0:
2251 2252 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2252 2253 numprev))
2253 2254 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2254 2255 numprev))
2255 2256 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2256 2257 numprev))
2257 2258 if gdelta:
2258 2259 ui.write(('deltas against p1 : ')
2259 2260 + fmt % pcfmt(nump1, numdeltas))
2260 2261 ui.write(('deltas against p2 : ')
2261 2262 + fmt % pcfmt(nump2, numdeltas))
2262 2263 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2263 2264 numdeltas))
2264 2265
2265 2266 @command('debugrevspec',
2266 2267 [('', 'optimize', None,
2267 2268 _('print parsed tree after optimizing (DEPRECATED)')),
2268 2269 ('', 'show-revs', True, _('print list of result revisions (default)')),
2269 2270 ('s', 'show-set', None, _('print internal representation of result set')),
2270 2271 ('p', 'show-stage', [],
2271 2272 _('print parsed tree at the given stage'), _('NAME')),
2272 2273 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2273 2274 ('', 'verify-optimized', False, _('verify optimized result')),
2274 2275 ],
2275 2276 ('REVSPEC'))
2276 2277 def debugrevspec(ui, repo, expr, **opts):
2277 2278 """parse and apply a revision specification
2278 2279
2279 2280 Use -p/--show-stage option to print the parsed tree at the given stages.
2280 2281 Use -p all to print tree at every stage.
2281 2282
2282 2283 Use --no-show-revs option with -s or -p to print only the set
2283 2284 representation or the parsed tree respectively.
2284 2285
2285 2286 Use --verify-optimized to compare the optimized result with the unoptimized
2286 2287 one. Returns 1 if the optimized result differs.
2287 2288 """
2288 2289 opts = pycompat.byteskwargs(opts)
2289 2290 aliases = ui.configitems('revsetalias')
2290 2291 stages = [
2291 2292 ('parsed', lambda tree: tree),
2292 2293 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2293 2294 ui.warn)),
2294 2295 ('concatenated', revsetlang.foldconcat),
2295 2296 ('analyzed', revsetlang.analyze),
2296 2297 ('optimized', revsetlang.optimize),
2297 2298 ]
2298 2299 if opts['no_optimized']:
2299 2300 stages = stages[:-1]
2300 2301 if opts['verify_optimized'] and opts['no_optimized']:
2301 2302 raise error.Abort(_('cannot use --verify-optimized with '
2302 2303 '--no-optimized'))
2303 2304 stagenames = set(n for n, f in stages)
2304 2305
2305 2306 showalways = set()
2306 2307 showchanged = set()
2307 2308 if ui.verbose and not opts['show_stage']:
2308 2309 # show parsed tree by --verbose (deprecated)
2309 2310 showalways.add('parsed')
2310 2311 showchanged.update(['expanded', 'concatenated'])
2311 2312 if opts['optimize']:
2312 2313 showalways.add('optimized')
2313 2314 if opts['show_stage'] and opts['optimize']:
2314 2315 raise error.Abort(_('cannot use --optimize with --show-stage'))
2315 2316 if opts['show_stage'] == ['all']:
2316 2317 showalways.update(stagenames)
2317 2318 else:
2318 2319 for n in opts['show_stage']:
2319 2320 if n not in stagenames:
2320 2321 raise error.Abort(_('invalid stage name: %s') % n)
2321 2322 showalways.update(opts['show_stage'])
2322 2323
2323 2324 treebystage = {}
2324 2325 printedtree = None
2325 2326 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2326 2327 for n, f in stages:
2327 2328 treebystage[n] = tree = f(tree)
2328 2329 if n in showalways or (n in showchanged and tree != printedtree):
2329 2330 if opts['show_stage'] or n != 'parsed':
2330 2331 ui.write(("* %s:\n") % n)
2331 2332 ui.write(revsetlang.prettyformat(tree), "\n")
2332 2333 printedtree = tree
2333 2334
2334 2335 if opts['verify_optimized']:
2335 2336 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2336 2337 brevs = revset.makematcher(treebystage['optimized'])(repo)
2337 2338 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2338 2339 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2339 2340 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2340 2341 arevs = list(arevs)
2341 2342 brevs = list(brevs)
2342 2343 if arevs == brevs:
2343 2344 return 0
2344 2345 ui.write(('--- analyzed\n'), label='diff.file_a')
2345 2346 ui.write(('+++ optimized\n'), label='diff.file_b')
2346 2347 sm = difflib.SequenceMatcher(None, arevs, brevs)
2347 2348 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2348 2349 if tag in ('delete', 'replace'):
2349 2350 for c in arevs[alo:ahi]:
2350 2351 ui.write('-%s\n' % c, label='diff.deleted')
2351 2352 if tag in ('insert', 'replace'):
2352 2353 for c in brevs[blo:bhi]:
2353 2354 ui.write('+%s\n' % c, label='diff.inserted')
2354 2355 if tag == 'equal':
2355 2356 for c in arevs[alo:ahi]:
2356 2357 ui.write(' %s\n' % c)
2357 2358 return 1
2358 2359
2359 2360 func = revset.makematcher(tree)
2360 2361 revs = func(repo)
2361 2362 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2362 2363 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2363 2364 if not opts['show_revs']:
2364 2365 return
2365 2366 for c in revs:
2366 2367 ui.write("%d\n" % c)
2367 2368
2368 2369 @command('debugserve', [
2369 2370 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2370 2371 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2371 2372 ('', 'logiofile', '', _('file to log server I/O to')),
2372 2373 ], '')
2373 2374 def debugserve(ui, repo, **opts):
2374 2375 """run a server with advanced settings
2375 2376
2376 2377 This command is similar to :hg:`serve`. It exists partially as a
2377 2378 workaround to the fact that ``hg serve --stdio`` must have specific
2378 2379 arguments for security reasons.
2379 2380 """
2380 2381 opts = pycompat.byteskwargs(opts)
2381 2382
2382 2383 if not opts['sshstdio']:
2383 2384 raise error.Abort(_('only --sshstdio is currently supported'))
2384 2385
2385 2386 logfh = None
2386 2387
2387 2388 if opts['logiofd'] and opts['logiofile']:
2388 2389 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2389 2390
2390 2391 if opts['logiofd']:
2391 2392 # Line buffered because output is line based.
2392 2393 try:
2393 2394 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2394 2395 except OSError as e:
2395 2396 if e.errno != errno.ESPIPE:
2396 2397 raise
2397 2398 # can't seek a pipe, so `ab` mode fails on py3
2398 2399 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2399 2400 elif opts['logiofile']:
2400 2401 logfh = open(opts['logiofile'], 'ab', 1)
2401 2402
2402 2403 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2403 2404 s.serve_forever()
2404 2405
2405 2406 @command('debugsetparents', [], _('REV1 [REV2]'))
2406 2407 def debugsetparents(ui, repo, rev1, rev2=None):
2407 2408 """manually set the parents of the current working directory
2408 2409
2409 2410 This is useful for writing repository conversion tools, but should
2410 2411 be used with care. For example, neither the working directory nor the
2411 2412 dirstate is updated, so file status may be incorrect after running this
2412 2413 command.
2413 2414
2414 2415 Returns 0 on success.
2415 2416 """
2416 2417
2417 2418 node1 = scmutil.revsingle(repo, rev1).node()
2418 2419 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2419 2420
2420 2421 with repo.wlock():
2421 2422 repo.setparents(node1, node2)
2422 2423
2423 2424 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2424 2425 def debugssl(ui, repo, source=None, **opts):
2425 2426 '''test a secure connection to a server
2426 2427
2427 2428 This builds the certificate chain for the server on Windows, installing the
2428 2429 missing intermediates and trusted root via Windows Update if necessary. It
2429 2430 does nothing on other platforms.
2430 2431
2431 2432 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2432 2433 that server is used. See :hg:`help urls` for more information.
2433 2434
2434 2435 If the update succeeds, retry the original operation. Otherwise, the cause
2435 2436 of the SSL error is likely another issue.
2436 2437 '''
2437 2438 if not pycompat.iswindows:
2438 2439 raise error.Abort(_('certificate chain building is only possible on '
2439 2440 'Windows'))
2440 2441
2441 2442 if not source:
2442 2443 if not repo:
2443 2444 raise error.Abort(_("there is no Mercurial repository here, and no "
2444 2445 "server specified"))
2445 2446 source = "default"
2446 2447
2447 2448 source, branches = hg.parseurl(ui.expandpath(source))
2448 2449 url = util.url(source)
2449 2450 addr = None
2450 2451
2451 2452 defaultport = {'https': 443, 'ssh': 22}
2452 2453 if url.scheme in defaultport:
2453 2454 try:
2454 2455 addr = (url.host, int(url.port or defaultport[url.scheme]))
2455 2456 except ValueError:
2456 2457 raise error.Abort(_("malformed port number in URL"))
2457 2458 else:
2458 2459 raise error.Abort(_("only https and ssh connections are supported"))
2459 2460
2460 2461 from . import win32
2461 2462
2462 2463 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2463 2464 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2464 2465
2465 2466 try:
2466 2467 s.connect(addr)
2467 2468 cert = s.getpeercert(True)
2468 2469
2469 2470 ui.status(_('checking the certificate chain for %s\n') % url.host)
2470 2471
2471 2472 complete = win32.checkcertificatechain(cert, build=False)
2472 2473
2473 2474 if not complete:
2474 2475 ui.status(_('certificate chain is incomplete, updating... '))
2475 2476
2476 2477 if not win32.checkcertificatechain(cert):
2477 2478 ui.status(_('failed.\n'))
2478 2479 else:
2479 2480 ui.status(_('done.\n'))
2480 2481 else:
2481 2482 ui.status(_('full certificate chain is available\n'))
2482 2483 finally:
2483 2484 s.close()
2484 2485
2485 2486 @command('debugsub',
2486 2487 [('r', 'rev', '',
2487 2488 _('revision to check'), _('REV'))],
2488 2489 _('[-r REV] [REV]'))
2489 2490 def debugsub(ui, repo, rev=None):
2490 2491 ctx = scmutil.revsingle(repo, rev, None)
2491 2492 for k, v in sorted(ctx.substate.items()):
2492 2493 ui.write(('path %s\n') % k)
2493 2494 ui.write((' source %s\n') % v[0])
2494 2495 ui.write((' revision %s\n') % v[1])
2495 2496
2496 2497 @command('debugsuccessorssets',
2497 2498 [('', 'closest', False, _('return closest successors sets only'))],
2498 2499 _('[REV]'))
2499 2500 def debugsuccessorssets(ui, repo, *revs, **opts):
2500 2501 """show set of successors for revision
2501 2502
2502 2503 A successors set of changeset A is a consistent group of revisions that
2503 2504 succeed A. It contains non-obsolete changesets only unless closests
2504 2505 successors set is set.
2505 2506
2506 2507 In most cases a changeset A has a single successors set containing a single
2507 2508 successor (changeset A replaced by A').
2508 2509
2509 2510 A changeset that is made obsolete with no successors are called "pruned".
2510 2511 Such changesets have no successors sets at all.
2511 2512
2512 2513 A changeset that has been "split" will have a successors set containing
2513 2514 more than one successor.
2514 2515
2515 2516 A changeset that has been rewritten in multiple different ways is called
2516 2517 "divergent". Such changesets have multiple successor sets (each of which
2517 2518 may also be split, i.e. have multiple successors).
2518 2519
2519 2520 Results are displayed as follows::
2520 2521
2521 2522 <rev1>
2522 2523 <successors-1A>
2523 2524 <rev2>
2524 2525 <successors-2A>
2525 2526 <successors-2B1> <successors-2B2> <successors-2B3>
2526 2527
2527 2528 Here rev2 has two possible (i.e. divergent) successors sets. The first
2528 2529 holds one element, whereas the second holds three (i.e. the changeset has
2529 2530 been split).
2530 2531 """
2531 2532 # passed to successorssets caching computation from one call to another
2532 2533 cache = {}
2533 2534 ctx2str = bytes
2534 2535 node2str = short
2535 2536 for rev in scmutil.revrange(repo, revs):
2536 2537 ctx = repo[rev]
2537 2538 ui.write('%s\n'% ctx2str(ctx))
2538 2539 for succsset in obsutil.successorssets(repo, ctx.node(),
2539 2540 closest=opts[r'closest'],
2540 2541 cache=cache):
2541 2542 if succsset:
2542 2543 ui.write(' ')
2543 2544 ui.write(node2str(succsset[0]))
2544 2545 for node in succsset[1:]:
2545 2546 ui.write(' ')
2546 2547 ui.write(node2str(node))
2547 2548 ui.write('\n')
2548 2549
2549 2550 @command('debugtemplate',
2550 2551 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2551 2552 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2552 2553 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2553 2554 optionalrepo=True)
2554 2555 def debugtemplate(ui, repo, tmpl, **opts):
2555 2556 """parse and apply a template
2556 2557
2557 2558 If -r/--rev is given, the template is processed as a log template and
2558 2559 applied to the given changesets. Otherwise, it is processed as a generic
2559 2560 template.
2560 2561
2561 2562 Use --verbose to print the parsed tree.
2562 2563 """
2563 2564 revs = None
2564 2565 if opts[r'rev']:
2565 2566 if repo is None:
2566 2567 raise error.RepoError(_('there is no Mercurial repository here '
2567 2568 '(.hg not found)'))
2568 2569 revs = scmutil.revrange(repo, opts[r'rev'])
2569 2570
2570 2571 props = {}
2571 2572 for d in opts[r'define']:
2572 2573 try:
2573 2574 k, v = (e.strip() for e in d.split('=', 1))
2574 2575 if not k or k == 'ui':
2575 2576 raise ValueError
2576 2577 props[k] = v
2577 2578 except ValueError:
2578 2579 raise error.Abort(_('malformed keyword definition: %s') % d)
2579 2580
2580 2581 if ui.verbose:
2581 2582 aliases = ui.configitems('templatealias')
2582 2583 tree = templater.parse(tmpl)
2583 2584 ui.note(templater.prettyformat(tree), '\n')
2584 2585 newtree = templater.expandaliases(tree, aliases)
2585 2586 if newtree != tree:
2586 2587 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2587 2588
2588 2589 if revs is None:
2589 2590 tres = formatter.templateresources(ui, repo)
2590 2591 t = formatter.maketemplater(ui, tmpl, resources=tres)
2591 2592 if ui.verbose:
2592 2593 kwds, funcs = t.symbolsuseddefault()
2593 2594 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2594 2595 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2595 2596 ui.write(t.renderdefault(props))
2596 2597 else:
2597 2598 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2598 2599 if ui.verbose:
2599 2600 kwds, funcs = displayer.t.symbolsuseddefault()
2600 2601 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2601 2602 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2602 2603 for r in revs:
2603 2604 displayer.show(repo[r], **pycompat.strkwargs(props))
2604 2605 displayer.close()
2605 2606
2606 2607 @command('debuguigetpass', [
2607 2608 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2608 2609 ], _('[-p TEXT]'), norepo=True)
2609 2610 def debuguigetpass(ui, prompt=''):
2610 2611 """show prompt to type password"""
2611 2612 r = ui.getpass(prompt)
2612 2613 ui.write(('respose: %s\n') % r)
2613 2614
2614 2615 @command('debuguiprompt', [
2615 2616 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2616 2617 ], _('[-p TEXT]'), norepo=True)
2617 2618 def debuguiprompt(ui, prompt=''):
2618 2619 """show plain prompt"""
2619 2620 r = ui.prompt(prompt)
2620 2621 ui.write(('response: %s\n') % r)
2621 2622
2622 2623 @command('debugupdatecaches', [])
2623 2624 def debugupdatecaches(ui, repo, *pats, **opts):
2624 2625 """warm all known caches in the repository"""
2625 2626 with repo.wlock(), repo.lock():
2626 2627 repo.updatecaches(full=True)
2627 2628
2628 2629 @command('debugupgraderepo', [
2629 2630 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2630 2631 ('', 'run', False, _('performs an upgrade')),
2631 2632 ])
2632 2633 def debugupgraderepo(ui, repo, run=False, optimize=None):
2633 2634 """upgrade a repository to use different features
2634 2635
2635 2636 If no arguments are specified, the repository is evaluated for upgrade
2636 2637 and a list of problems and potential optimizations is printed.
2637 2638
2638 2639 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2639 2640 can be influenced via additional arguments. More details will be provided
2640 2641 by the command output when run without ``--run``.
2641 2642
2642 2643 During the upgrade, the repository will be locked and no writes will be
2643 2644 allowed.
2644 2645
2645 2646 At the end of the upgrade, the repository may not be readable while new
2646 2647 repository data is swapped in. This window will be as long as it takes to
2647 2648 rename some directories inside the ``.hg`` directory. On most machines, this
2648 2649 should complete almost instantaneously and the chances of a consumer being
2649 2650 unable to access the repository should be low.
2650 2651 """
2651 2652 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2652 2653
2653 2654 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2654 2655 inferrepo=True)
2655 2656 def debugwalk(ui, repo, *pats, **opts):
2656 2657 """show how files match on given patterns"""
2657 2658 opts = pycompat.byteskwargs(opts)
2658 2659 m = scmutil.match(repo[None], pats, opts)
2659 2660 if ui.verbose:
2660 2661 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2661 2662 items = list(repo[None].walk(m))
2662 2663 if not items:
2663 2664 return
2664 2665 f = lambda fn: fn
2665 2666 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2666 2667 f = lambda fn: util.normpath(fn)
2667 2668 fmt = 'f %%-%ds %%-%ds %%s' % (
2668 2669 max([len(abs) for abs in items]),
2669 2670 max([len(m.rel(abs)) for abs in items]))
2670 2671 for abs in items:
2671 2672 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2672 2673 ui.write("%s\n" % line.rstrip())
2673 2674
2674 2675 @command('debugwhyunstable', [], _('REV'))
2675 2676 def debugwhyunstable(ui, repo, rev):
2676 2677 """explain instabilities of a changeset"""
2677 2678 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2678 2679 dnodes = ''
2679 2680 if entry.get('divergentnodes'):
2680 2681 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2681 2682 for ctx in entry['divergentnodes']) + ' '
2682 2683 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2683 2684 entry['reason'], entry['node']))
2684 2685
2685 2686 @command('debugwireargs',
2686 2687 [('', 'three', '', 'three'),
2687 2688 ('', 'four', '', 'four'),
2688 2689 ('', 'five', '', 'five'),
2689 2690 ] + cmdutil.remoteopts,
2690 2691 _('REPO [OPTIONS]... [ONE [TWO]]'),
2691 2692 norepo=True)
2692 2693 def debugwireargs(ui, repopath, *vals, **opts):
2693 2694 opts = pycompat.byteskwargs(opts)
2694 2695 repo = hg.peer(ui, opts, repopath)
2695 2696 for opt in cmdutil.remoteopts:
2696 2697 del opts[opt[1]]
2697 2698 args = {}
2698 2699 for k, v in opts.iteritems():
2699 2700 if v:
2700 2701 args[k] = v
2701 2702 args = pycompat.strkwargs(args)
2702 2703 # run twice to check that we don't mess up the stream for the next command
2703 2704 res1 = repo.debugwireargs(*vals, **args)
2704 2705 res2 = repo.debugwireargs(*vals, **args)
2705 2706 ui.write("%s\n" % res1)
2706 2707 if res1 != res2:
2707 2708 ui.warn("%s\n" % res2)
2708 2709
2709 2710 def _parsewirelangblocks(fh):
2710 2711 activeaction = None
2711 2712 blocklines = []
2712 2713
2713 2714 for line in fh:
2714 2715 line = line.rstrip()
2715 2716 if not line:
2716 2717 continue
2717 2718
2718 2719 if line.startswith(b'#'):
2719 2720 continue
2720 2721
2721 2722 if not line.startswith(' '):
2722 2723 # New block. Flush previous one.
2723 2724 if activeaction:
2724 2725 yield activeaction, blocklines
2725 2726
2726 2727 activeaction = line
2727 2728 blocklines = []
2728 2729 continue
2729 2730
2730 2731 # Else we start with an indent.
2731 2732
2732 2733 if not activeaction:
2733 2734 raise error.Abort(_('indented line outside of block'))
2734 2735
2735 2736 blocklines.append(line)
2736 2737
2737 2738 # Flush last block.
2738 2739 if activeaction:
2739 2740 yield activeaction, blocklines
2740 2741
2741 2742 @command('debugwireproto',
2742 2743 [
2743 2744 ('', 'localssh', False, _('start an SSH server for this repo')),
2744 2745 ('', 'peer', '', _('construct a specific version of the peer')),
2745 2746 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2746 2747 ('', 'nologhandshake', False,
2747 2748 _('do not log I/O related to the peer handshake')),
2748 2749 ] + cmdutil.remoteopts,
2749 2750 _('[PATH]'),
2750 2751 optionalrepo=True)
2751 2752 def debugwireproto(ui, repo, path=None, **opts):
2752 2753 """send wire protocol commands to a server
2753 2754
2754 2755 This command can be used to issue wire protocol commands to remote
2755 2756 peers and to debug the raw data being exchanged.
2756 2757
2757 2758 ``--localssh`` will start an SSH server against the current repository
2758 2759 and connect to that. By default, the connection will perform a handshake
2759 2760 and establish an appropriate peer instance.
2760 2761
2761 2762 ``--peer`` can be used to bypass the handshake protocol and construct a
2762 2763 peer instance using the specified class type. Valid values are ``raw``,
2763 2764 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2764 2765 raw data payloads and don't support higher-level command actions.
2765 2766
2766 2767 ``--noreadstderr`` can be used to disable automatic reading from stderr
2767 2768 of the peer (for SSH connections only). Disabling automatic reading of
2768 2769 stderr is useful for making output more deterministic.
2769 2770
2770 2771 Commands are issued via a mini language which is specified via stdin.
2771 2772 The language consists of individual actions to perform. An action is
2772 2773 defined by a block. A block is defined as a line with no leading
2773 2774 space followed by 0 or more lines with leading space. Blocks are
2774 2775 effectively a high-level command with additional metadata.
2775 2776
2776 2777 Lines beginning with ``#`` are ignored.
2777 2778
2778 2779 The following sections denote available actions.
2779 2780
2780 2781 raw
2781 2782 ---
2782 2783
2783 2784 Send raw data to the server.
2784 2785
2785 2786 The block payload contains the raw data to send as one atomic send
2786 2787 operation. The data may not actually be delivered in a single system
2787 2788 call: it depends on the abilities of the transport being used.
2788 2789
2789 2790 Each line in the block is de-indented and concatenated. Then, that
2790 2791 value is evaluated as a Python b'' literal. This allows the use of
2791 2792 backslash escaping, etc.
2792 2793
2793 2794 raw+
2794 2795 ----
2795 2796
2796 2797 Behaves like ``raw`` except flushes output afterwards.
2797 2798
2798 2799 command <X>
2799 2800 -----------
2800 2801
2801 2802 Send a request to run a named command, whose name follows the ``command``
2802 2803 string.
2803 2804
2804 2805 Arguments to the command are defined as lines in this block. The format of
2805 2806 each line is ``<key> <value>``. e.g.::
2806 2807
2807 2808 command listkeys
2808 2809 namespace bookmarks
2809 2810
2810 2811 If the value begins with ``eval:``, it will be interpreted as a Python
2811 2812 literal expression. Otherwise values are interpreted as Python b'' literals.
2812 2813 This allows sending complex types and encoding special byte sequences via
2813 2814 backslash escaping.
2814 2815
2815 2816 The following arguments have special meaning:
2816 2817
2817 2818 ``PUSHFILE``
2818 2819 When defined, the *push* mechanism of the peer will be used instead
2819 2820 of the static request-response mechanism and the content of the
2820 2821 file specified in the value of this argument will be sent as the
2821 2822 command payload.
2822 2823
2823 2824 This can be used to submit a local bundle file to the remote.
2824 2825
2825 2826 batchbegin
2826 2827 ----------
2827 2828
2828 2829 Instruct the peer to begin a batched send.
2829 2830
2830 2831 All ``command`` blocks are queued for execution until the next
2831 2832 ``batchsubmit`` block.
2832 2833
2833 2834 batchsubmit
2834 2835 -----------
2835 2836
2836 2837 Submit previously queued ``command`` blocks as a batch request.
2837 2838
2838 2839 This action MUST be paired with a ``batchbegin`` action.
2839 2840
2840 2841 httprequest <method> <path>
2841 2842 ---------------------------
2842 2843
2843 2844 (HTTP peer only)
2844 2845
2845 2846 Send an HTTP request to the peer.
2846 2847
2847 2848 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2848 2849
2849 2850 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2850 2851 headers to add to the request. e.g. ``Accept: foo``.
2851 2852
2852 2853 The following arguments are special:
2853 2854
2854 2855 ``BODYFILE``
2855 2856 The content of the file defined as the value to this argument will be
2856 2857 transferred verbatim as the HTTP request body.
2857 2858
2858 2859 ``frame <type> <flags> <payload>``
2859 2860 Send a unified protocol frame as part of the request body.
2860 2861
2861 2862 All frames will be collected and sent as the body to the HTTP
2862 2863 request.
2863 2864
2864 2865 close
2865 2866 -----
2866 2867
2867 2868 Close the connection to the server.
2868 2869
2869 2870 flush
2870 2871 -----
2871 2872
2872 2873 Flush data written to the server.
2873 2874
2874 2875 readavailable
2875 2876 -------------
2876 2877
2877 2878 Close the write end of the connection and read all available data from
2878 2879 the server.
2879 2880
2880 2881 If the connection to the server encompasses multiple pipes, we poll both
2881 2882 pipes and read available data.
2882 2883
2883 2884 readline
2884 2885 --------
2885 2886
2886 2887 Read a line of output from the server. If there are multiple output
2887 2888 pipes, reads only the main pipe.
2888 2889
2889 2890 ereadline
2890 2891 ---------
2891 2892
2892 2893 Like ``readline``, but read from the stderr pipe, if available.
2893 2894
2894 2895 read <X>
2895 2896 --------
2896 2897
2897 2898 ``read()`` N bytes from the server's main output pipe.
2898 2899
2899 2900 eread <X>
2900 2901 ---------
2901 2902
2902 2903 ``read()`` N bytes from the server's stderr pipe, if available.
2903 2904
2904 2905 Specifying Unified Frame-Based Protocol Frames
2905 2906 ----------------------------------------------
2906 2907
2907 2908 It is possible to emit a *Unified Frame-Based Protocol* by using special
2908 2909 syntax.
2909 2910
2910 2911 A frame is composed as a type, flags, and payload. These can be parsed
2911 2912 from a string of the form:
2912 2913
2913 2914 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2914 2915
2915 2916 ``request-id`` and ``stream-id`` are integers defining the request and
2916 2917 stream identifiers.
2917 2918
2918 2919 ``type`` can be an integer value for the frame type or the string name
2919 2920 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2920 2921 ``command-name``.
2921 2922
2922 2923 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2923 2924 components. Each component (and there can be just one) can be an integer
2924 2925 or a flag name for stream flags or frame flags, respectively. Values are
2925 2926 resolved to integers and then bitwise OR'd together.
2926 2927
2927 2928 ``payload`` represents the raw frame payload. If it begins with
2928 2929 ``cbor:``, the following string is evaluated as Python code and the
2929 2930 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2930 2931 as a Python byte string literal.
2931 2932 """
2932 2933 opts = pycompat.byteskwargs(opts)
2933 2934
2934 2935 if opts['localssh'] and not repo:
2935 2936 raise error.Abort(_('--localssh requires a repository'))
2936 2937
2937 2938 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2938 2939 raise error.Abort(_('invalid value for --peer'),
2939 2940 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2940 2941
2941 2942 if path and opts['localssh']:
2942 2943 raise error.Abort(_('cannot specify --localssh with an explicit '
2943 2944 'path'))
2944 2945
2945 2946 if ui.interactive():
2946 2947 ui.write(_('(waiting for commands on stdin)\n'))
2947 2948
2948 2949 blocks = list(_parsewirelangblocks(ui.fin))
2949 2950
2950 2951 proc = None
2951 2952 stdin = None
2952 2953 stdout = None
2953 2954 stderr = None
2954 2955 opener = None
2955 2956
2956 2957 if opts['localssh']:
2957 2958 # We start the SSH server in its own process so there is process
2958 2959 # separation. This prevents a whole class of potential bugs around
2959 2960 # shared state from interfering with server operation.
2960 2961 args = procutil.hgcmd() + [
2961 2962 '-R', repo.root,
2962 2963 'debugserve', '--sshstdio',
2963 2964 ]
2964 2965 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2965 2966 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2966 2967 bufsize=0)
2967 2968
2968 2969 stdin = proc.stdin
2969 2970 stdout = proc.stdout
2970 2971 stderr = proc.stderr
2971 2972
2972 2973 # We turn the pipes into observers so we can log I/O.
2973 2974 if ui.verbose or opts['peer'] == 'raw':
2974 2975 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2975 2976 logdata=True)
2976 2977 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2977 2978 logdata=True)
2978 2979 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2979 2980 logdata=True)
2980 2981
2981 2982 # --localssh also implies the peer connection settings.
2982 2983
2983 2984 url = 'ssh://localserver'
2984 2985 autoreadstderr = not opts['noreadstderr']
2985 2986
2986 2987 if opts['peer'] == 'ssh1':
2987 2988 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2988 2989 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2989 2990 None, autoreadstderr=autoreadstderr)
2990 2991 elif opts['peer'] == 'ssh2':
2991 2992 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2992 2993 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2993 2994 None, autoreadstderr=autoreadstderr)
2994 2995 elif opts['peer'] == 'raw':
2995 2996 ui.write(_('using raw connection to peer\n'))
2996 2997 peer = None
2997 2998 else:
2998 2999 ui.write(_('creating ssh peer from handshake results\n'))
2999 3000 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3000 3001 autoreadstderr=autoreadstderr)
3001 3002
3002 3003 elif path:
3003 3004 # We bypass hg.peer() so we can proxy the sockets.
3004 3005 # TODO consider not doing this because we skip
3005 3006 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3006 3007 u = util.url(path)
3007 3008 if u.scheme != 'http':
3008 3009 raise error.Abort(_('only http:// paths are currently supported'))
3009 3010
3010 3011 url, authinfo = u.authinfo()
3011 3012 openerargs = {
3012 3013 r'useragent': b'Mercurial debugwireproto',
3013 3014 }
3014 3015
3015 3016 # Turn pipes/sockets into observers so we can log I/O.
3016 3017 if ui.verbose:
3017 3018 openerargs.update({
3018 3019 r'loggingfh': ui,
3019 3020 r'loggingname': b's',
3020 3021 r'loggingopts': {
3021 3022 r'logdata': True,
3022 3023 r'logdataapis': False,
3023 3024 },
3024 3025 })
3025 3026
3026 3027 if ui.debugflag:
3027 3028 openerargs[r'loggingopts'][r'logdataapis'] = True
3028 3029
3029 3030 # Don't send default headers when in raw mode. This allows us to
3030 3031 # bypass most of the behavior of our URL handling code so we can
3031 3032 # have near complete control over what's sent on the wire.
3032 3033 if opts['peer'] == 'raw':
3033 3034 openerargs[r'sendaccept'] = False
3034 3035
3035 3036 opener = urlmod.opener(ui, authinfo, **openerargs)
3036 3037
3037 3038 if opts['peer'] == 'http2':
3038 3039 ui.write(_('creating http peer for wire protocol version 2\n'))
3039 3040 # We go through makepeer() because we need an API descriptor for
3040 3041 # the peer instance to be useful.
3041 3042 with ui.configoverride({
3042 3043 ('experimental', 'httppeer.advertise-v2'): True}):
3043 3044 if opts['nologhandshake']:
3044 3045 ui.pushbuffer()
3045 3046
3046 3047 peer = httppeer.makepeer(ui, path, opener=opener)
3047 3048
3048 3049 if opts['nologhandshake']:
3049 3050 ui.popbuffer()
3050 3051
3051 3052 if not isinstance(peer, httppeer.httpv2peer):
3052 3053 raise error.Abort(_('could not instantiate HTTP peer for '
3053 3054 'wire protocol version 2'),
3054 3055 hint=_('the server may not have the feature '
3055 3056 'enabled or is not allowing this '
3056 3057 'client version'))
3057 3058
3058 3059 elif opts['peer'] == 'raw':
3059 3060 ui.write(_('using raw connection to peer\n'))
3060 3061 peer = None
3061 3062 elif opts['peer']:
3062 3063 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3063 3064 opts['peer'])
3064 3065 else:
3065 3066 peer = httppeer.makepeer(ui, path, opener=opener)
3066 3067
3067 3068 # We /could/ populate stdin/stdout with sock.makefile()...
3068 3069 else:
3069 3070 raise error.Abort(_('unsupported connection configuration'))
3070 3071
3071 3072 batchedcommands = None
3072 3073
3073 3074 # Now perform actions based on the parsed wire language instructions.
3074 3075 for action, lines in blocks:
3075 3076 if action in ('raw', 'raw+'):
3076 3077 if not stdin:
3077 3078 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3078 3079
3079 3080 # Concatenate the data together.
3080 3081 data = ''.join(l.lstrip() for l in lines)
3081 3082 data = stringutil.unescapestr(data)
3082 3083 stdin.write(data)
3083 3084
3084 3085 if action == 'raw+':
3085 3086 stdin.flush()
3086 3087 elif action == 'flush':
3087 3088 if not stdin:
3088 3089 raise error.Abort(_('cannot call flush on this peer'))
3089 3090 stdin.flush()
3090 3091 elif action.startswith('command'):
3091 3092 if not peer:
3092 3093 raise error.Abort(_('cannot send commands unless peer instance '
3093 3094 'is available'))
3094 3095
3095 3096 command = action.split(' ', 1)[1]
3096 3097
3097 3098 args = {}
3098 3099 for line in lines:
3099 3100 # We need to allow empty values.
3100 3101 fields = line.lstrip().split(' ', 1)
3101 3102 if len(fields) == 1:
3102 3103 key = fields[0]
3103 3104 value = ''
3104 3105 else:
3105 3106 key, value = fields
3106 3107
3107 3108 if value.startswith('eval:'):
3108 3109 value = stringutil.evalpythonliteral(value[5:])
3109 3110 else:
3110 3111 value = stringutil.unescapestr(value)
3111 3112
3112 3113 args[key] = value
3113 3114
3114 3115 if batchedcommands is not None:
3115 3116 batchedcommands.append((command, args))
3116 3117 continue
3117 3118
3118 3119 ui.status(_('sending %s command\n') % command)
3119 3120
3120 3121 if 'PUSHFILE' in args:
3121 3122 with open(args['PUSHFILE'], r'rb') as fh:
3122 3123 del args['PUSHFILE']
3123 3124 res, output = peer._callpush(command, fh,
3124 3125 **pycompat.strkwargs(args))
3125 3126 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3126 3127 ui.status(_('remote output: %s\n') %
3127 3128 stringutil.escapestr(output))
3128 3129 else:
3129 3130 with peer.commandexecutor() as e:
3130 3131 res = e.callcommand(command, args).result()
3131 3132
3132 3133 if isinstance(res, wireprotov2peer.commandresponse):
3133 3134 val = list(res.cborobjects())
3134 3135 ui.status(_('response: %s\n') %
3135 3136 stringutil.pprint(val, bprefix=True))
3136 3137
3137 3138 else:
3138 3139 ui.status(_('response: %s\n') %
3139 3140 stringutil.pprint(res, bprefix=True))
3140 3141
3141 3142 elif action == 'batchbegin':
3142 3143 if batchedcommands is not None:
3143 3144 raise error.Abort(_('nested batchbegin not allowed'))
3144 3145
3145 3146 batchedcommands = []
3146 3147 elif action == 'batchsubmit':
3147 3148 # There is a batching API we could go through. But it would be
3148 3149 # difficult to normalize requests into function calls. It is easier
3149 3150 # to bypass this layer and normalize to commands + args.
3150 3151 ui.status(_('sending batch with %d sub-commands\n') %
3151 3152 len(batchedcommands))
3152 3153 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3153 3154 ui.status(_('response #%d: %s\n') %
3154 3155 (i, stringutil.escapestr(chunk)))
3155 3156
3156 3157 batchedcommands = None
3157 3158
3158 3159 elif action.startswith('httprequest '):
3159 3160 if not opener:
3160 3161 raise error.Abort(_('cannot use httprequest without an HTTP '
3161 3162 'peer'))
3162 3163
3163 3164 request = action.split(' ', 2)
3164 3165 if len(request) != 3:
3165 3166 raise error.Abort(_('invalid httprequest: expected format is '
3166 3167 '"httprequest <method> <path>'))
3167 3168
3168 3169 method, httppath = request[1:]
3169 3170 headers = {}
3170 3171 body = None
3171 3172 frames = []
3172 3173 for line in lines:
3173 3174 line = line.lstrip()
3174 3175 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3175 3176 if m:
3176 3177 headers[m.group(1)] = m.group(2)
3177 3178 continue
3178 3179
3179 3180 if line.startswith(b'BODYFILE '):
3180 3181 with open(line.split(b' ', 1), 'rb') as fh:
3181 3182 body = fh.read()
3182 3183 elif line.startswith(b'frame '):
3183 3184 frame = wireprotoframing.makeframefromhumanstring(
3184 3185 line[len(b'frame '):])
3185 3186
3186 3187 frames.append(frame)
3187 3188 else:
3188 3189 raise error.Abort(_('unknown argument to httprequest: %s') %
3189 3190 line)
3190 3191
3191 3192 url = path + httppath
3192 3193
3193 3194 if frames:
3194 3195 body = b''.join(bytes(f) for f in frames)
3195 3196
3196 3197 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3197 3198
3198 3199 # urllib.Request insists on using has_data() as a proxy for
3199 3200 # determining the request method. Override that to use our
3200 3201 # explicitly requested method.
3201 3202 req.get_method = lambda: method
3202 3203
3203 3204 try:
3204 3205 res = opener.open(req)
3205 3206 body = res.read()
3206 3207 except util.urlerr.urlerror as e:
3207 3208 e.read()
3208 3209 continue
3209 3210
3210 3211 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3211 3212 ui.write(_('cbor> %s\n') %
3212 3213 stringutil.pprint(cbor.loads(body), bprefix=True))
3213 3214
3214 3215 elif action == 'close':
3215 3216 peer.close()
3216 3217 elif action == 'readavailable':
3217 3218 if not stdout or not stderr:
3218 3219 raise error.Abort(_('readavailable not available on this peer'))
3219 3220
3220 3221 stdin.close()
3221 3222 stdout.read()
3222 3223 stderr.read()
3223 3224
3224 3225 elif action == 'readline':
3225 3226 if not stdout:
3226 3227 raise error.Abort(_('readline not available on this peer'))
3227 3228 stdout.readline()
3228 3229 elif action == 'ereadline':
3229 3230 if not stderr:
3230 3231 raise error.Abort(_('ereadline not available on this peer'))
3231 3232 stderr.readline()
3232 3233 elif action.startswith('read '):
3233 3234 count = int(action.split(' ', 1)[1])
3234 3235 if not stdout:
3235 3236 raise error.Abort(_('read not available on this peer'))
3236 3237 stdout.read(count)
3237 3238 elif action.startswith('eread '):
3238 3239 count = int(action.split(' ', 1)[1])
3239 3240 if not stderr:
3240 3241 raise error.Abort(_('eread not available on this peer'))
3241 3242 stderr.read(count)
3242 3243 else:
3243 3244 raise error.Abort(_('unknown action: %s') % action)
3244 3245
3245 3246 if batchedcommands is not None:
3246 3247 raise error.Abort(_('unclosed "batchbegin" request'))
3247 3248
3248 3249 if peer:
3249 3250 peer.close()
3250 3251
3251 3252 if proc:
3252 3253 proc.kill()
@@ -1,556 +1,557
1 1 # fileset.py - file set queries for mercurial
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import re
12 12
13 13 from .i18n import _
14 14 from . import (
15 15 error,
16 16 filesetlang,
17 17 match as matchmod,
18 18 merge,
19 19 pycompat,
20 20 registrar,
21 21 scmutil,
22 22 util,
23 23 )
24 24 from .utils import (
25 25 stringutil,
26 26 )
27 27
28 28 # helpers for processing parsed tree
29 29 getsymbol = filesetlang.getsymbol
30 30 getstring = filesetlang.getstring
31 31 _getkindpat = filesetlang.getkindpat
32 32 getpattern = filesetlang.getpattern
33 33 getargs = filesetlang.getargs
34 34
35 35 def getmatch(mctx, x):
36 36 if not x:
37 37 raise error.ParseError(_("missing argument"))
38 38 return methods[x[0]](mctx, *x[1:])
39 39
40 40 def stringmatch(mctx, x):
41 41 return mctx.matcher([x])
42 42
43 43 def kindpatmatch(mctx, x, y):
44 44 return stringmatch(mctx, _getkindpat(x, y, matchmod.allpatternkinds,
45 45 _("pattern must be a string")))
46 46
47 47 def andmatch(mctx, x, y):
48 48 xm = getmatch(mctx, x)
49 49 ym = getmatch(mctx, y)
50 50 return matchmod.intersectmatchers(xm, ym)
51 51
52 52 def ormatch(mctx, *xs):
53 53 ms = [getmatch(mctx, x) for x in xs]
54 54 return matchmod.unionmatcher(ms)
55 55
56 56 def notmatch(mctx, x):
57 57 m = getmatch(mctx, x)
58 58 return mctx.predicate(lambda f: not m(f), predrepr=('<not %r>', m))
59 59
60 60 def minusmatch(mctx, x, y):
61 61 xm = getmatch(mctx, x)
62 62 ym = getmatch(mctx, y)
63 63 return matchmod.differencematcher(xm, ym)
64 64
65 65 def listmatch(mctx, *xs):
66 66 raise error.ParseError(_("can't use a list in this context"),
67 67 hint=_('see \'hg help "filesets.x or y"\''))
68 68
69 69 def func(mctx, a, b):
70 70 funcname = getsymbol(a)
71 71 if funcname in symbols:
72 72 return symbols[funcname](mctx, b)
73 73
74 74 keep = lambda fn: getattr(fn, '__doc__', None) is not None
75 75
76 76 syms = [s for (s, fn) in symbols.items() if keep(fn)]
77 77 raise error.UnknownIdentifier(funcname, syms)
78 78
79 79 # symbols are callable like:
80 80 # fun(mctx, x)
81 81 # with:
82 82 # mctx - current matchctx instance
83 83 # x - argument in tree form
84 84 symbols = filesetlang.symbols
85 85
86 86 # filesets using matchctx.status()
87 87 _statuscallers = set()
88 88
89 89 predicate = registrar.filesetpredicate()
90 90
91 91 @predicate('modified()', callstatus=True)
92 92 def modified(mctx, x):
93 93 """File that is modified according to :hg:`status`.
94 94 """
95 95 # i18n: "modified" is a keyword
96 96 getargs(x, 0, 0, _("modified takes no arguments"))
97 97 s = set(mctx.status().modified)
98 98 return mctx.predicate(s.__contains__, predrepr='modified')
99 99
100 100 @predicate('added()', callstatus=True)
101 101 def added(mctx, x):
102 102 """File that is added according to :hg:`status`.
103 103 """
104 104 # i18n: "added" is a keyword
105 105 getargs(x, 0, 0, _("added takes no arguments"))
106 106 s = set(mctx.status().added)
107 107 return mctx.predicate(s.__contains__, predrepr='added')
108 108
109 109 @predicate('removed()', callstatus=True)
110 110 def removed(mctx, x):
111 111 """File that is removed according to :hg:`status`.
112 112 """
113 113 # i18n: "removed" is a keyword
114 114 getargs(x, 0, 0, _("removed takes no arguments"))
115 115 s = set(mctx.status().removed)
116 116 return mctx.predicate(s.__contains__, predrepr='removed')
117 117
118 118 @predicate('deleted()', callstatus=True)
119 119 def deleted(mctx, x):
120 120 """Alias for ``missing()``.
121 121 """
122 122 # i18n: "deleted" is a keyword
123 123 getargs(x, 0, 0, _("deleted takes no arguments"))
124 124 s = set(mctx.status().deleted)
125 125 return mctx.predicate(s.__contains__, predrepr='deleted')
126 126
127 127 @predicate('missing()', callstatus=True)
128 128 def missing(mctx, x):
129 129 """File that is missing according to :hg:`status`.
130 130 """
131 131 # i18n: "missing" is a keyword
132 132 getargs(x, 0, 0, _("missing takes no arguments"))
133 133 s = set(mctx.status().deleted)
134 134 return mctx.predicate(s.__contains__, predrepr='deleted')
135 135
136 136 @predicate('unknown()', callstatus=True)
137 137 def unknown(mctx, x):
138 138 """File that is unknown according to :hg:`status`."""
139 139 # i18n: "unknown" is a keyword
140 140 getargs(x, 0, 0, _("unknown takes no arguments"))
141 141 s = set(mctx.status().unknown)
142 142 return mctx.predicate(s.__contains__, predrepr='unknown')
143 143
144 144 @predicate('ignored()', callstatus=True)
145 145 def ignored(mctx, x):
146 146 """File that is ignored according to :hg:`status`."""
147 147 # i18n: "ignored" is a keyword
148 148 getargs(x, 0, 0, _("ignored takes no arguments"))
149 149 s = set(mctx.status().ignored)
150 150 return mctx.predicate(s.__contains__, predrepr='ignored')
151 151
152 152 @predicate('clean()', callstatus=True)
153 153 def clean(mctx, x):
154 154 """File that is clean according to :hg:`status`.
155 155 """
156 156 # i18n: "clean" is a keyword
157 157 getargs(x, 0, 0, _("clean takes no arguments"))
158 158 s = set(mctx.status().clean)
159 159 return mctx.predicate(s.__contains__, predrepr='clean')
160 160
161 161 @predicate('tracked()')
162 162 def tracked(mctx, x):
163 163 """File that is under Mercurial control."""
164 164 # i18n: "tracked" is a keyword
165 165 getargs(x, 0, 0, _("tracked takes no arguments"))
166 166 return mctx.predicate(mctx.ctx.__contains__, predrepr='tracked')
167 167
168 168 @predicate('binary()')
169 169 def binary(mctx, x):
170 170 """File that appears to be binary (contains NUL bytes).
171 171 """
172 172 # i18n: "binary" is a keyword
173 173 getargs(x, 0, 0, _("binary takes no arguments"))
174 174 return mctx.fpredicate(lambda fctx: fctx.isbinary(),
175 175 predrepr='binary', cache=True)
176 176
177 177 @predicate('exec()')
178 178 def exec_(mctx, x):
179 179 """File that is marked as executable.
180 180 """
181 181 # i18n: "exec" is a keyword
182 182 getargs(x, 0, 0, _("exec takes no arguments"))
183 183 ctx = mctx.ctx
184 184 return mctx.predicate(lambda f: ctx.flags(f) == 'x', predrepr='exec')
185 185
186 186 @predicate('symlink()')
187 187 def symlink(mctx, x):
188 188 """File that is marked as a symlink.
189 189 """
190 190 # i18n: "symlink" is a keyword
191 191 getargs(x, 0, 0, _("symlink takes no arguments"))
192 192 ctx = mctx.ctx
193 193 return mctx.predicate(lambda f: ctx.flags(f) == 'l', predrepr='symlink')
194 194
195 195 @predicate('resolved()')
196 196 def resolved(mctx, x):
197 197 """File that is marked resolved according to :hg:`resolve -l`.
198 198 """
199 199 # i18n: "resolved" is a keyword
200 200 getargs(x, 0, 0, _("resolved takes no arguments"))
201 201 if mctx.ctx.rev() is not None:
202 202 return mctx.never()
203 203 ms = merge.mergestate.read(mctx.ctx.repo())
204 204 return mctx.predicate(lambda f: f in ms and ms[f] == 'r',
205 205 predrepr='resolved')
206 206
207 207 @predicate('unresolved()')
208 208 def unresolved(mctx, x):
209 209 """File that is marked unresolved according to :hg:`resolve -l`.
210 210 """
211 211 # i18n: "unresolved" is a keyword
212 212 getargs(x, 0, 0, _("unresolved takes no arguments"))
213 213 if mctx.ctx.rev() is not None:
214 214 return mctx.never()
215 215 ms = merge.mergestate.read(mctx.ctx.repo())
216 216 return mctx.predicate(lambda f: f in ms and ms[f] == 'u',
217 217 predrepr='unresolved')
218 218
219 219 @predicate('hgignore()')
220 220 def hgignore(mctx, x):
221 221 """File that matches the active .hgignore pattern.
222 222 """
223 223 # i18n: "hgignore" is a keyword
224 224 getargs(x, 0, 0, _("hgignore takes no arguments"))
225 225 return mctx.ctx.repo().dirstate._ignore
226 226
227 227 @predicate('portable()')
228 228 def portable(mctx, x):
229 229 """File that has a portable name. (This doesn't include filenames with case
230 230 collisions.)
231 231 """
232 232 # i18n: "portable" is a keyword
233 233 getargs(x, 0, 0, _("portable takes no arguments"))
234 234 return mctx.predicate(lambda f: util.checkwinfilename(f) is None,
235 235 predrepr='portable')
236 236
237 237 @predicate('grep(regex)')
238 238 def grep(mctx, x):
239 239 """File contains the given regular expression.
240 240 """
241 241 try:
242 242 # i18n: "grep" is a keyword
243 243 r = re.compile(getstring(x, _("grep requires a pattern")))
244 244 except re.error as e:
245 245 raise error.ParseError(_('invalid match pattern: %s') %
246 246 stringutil.forcebytestr(e))
247 247 return mctx.fpredicate(lambda fctx: r.search(fctx.data()),
248 248 predrepr=('grep(%r)', r.pattern), cache=True)
249 249
250 250 def _sizetomax(s):
251 251 try:
252 252 s = s.strip().lower()
253 253 for k, v in util._sizeunits:
254 254 if s.endswith(k):
255 255 # max(4k) = 5k - 1, max(4.5k) = 4.6k - 1
256 256 n = s[:-len(k)]
257 257 inc = 1.0
258 258 if "." in n:
259 259 inc /= 10 ** len(n.split(".")[1])
260 260 return int((float(n) + inc) * v) - 1
261 261 # no extension, this is a precise value
262 262 return int(s)
263 263 except ValueError:
264 264 raise error.ParseError(_("couldn't parse size: %s") % s)
265 265
266 266 def sizematcher(expr):
267 267 """Return a function(size) -> bool from the ``size()`` expression"""
268 268 expr = expr.strip()
269 269 if '-' in expr: # do we have a range?
270 270 a, b = expr.split('-', 1)
271 271 a = util.sizetoint(a)
272 272 b = util.sizetoint(b)
273 273 return lambda x: x >= a and x <= b
274 274 elif expr.startswith("<="):
275 275 a = util.sizetoint(expr[2:])
276 276 return lambda x: x <= a
277 277 elif expr.startswith("<"):
278 278 a = util.sizetoint(expr[1:])
279 279 return lambda x: x < a
280 280 elif expr.startswith(">="):
281 281 a = util.sizetoint(expr[2:])
282 282 return lambda x: x >= a
283 283 elif expr.startswith(">"):
284 284 a = util.sizetoint(expr[1:])
285 285 return lambda x: x > a
286 286 else:
287 287 a = util.sizetoint(expr)
288 288 b = _sizetomax(expr)
289 289 return lambda x: x >= a and x <= b
290 290
291 291 @predicate('size(expression)')
292 292 def size(mctx, x):
293 293 """File size matches the given expression. Examples:
294 294
295 295 - size('1k') - files from 1024 to 2047 bytes
296 296 - size('< 20k') - files less than 20480 bytes
297 297 - size('>= .5MB') - files at least 524288 bytes
298 298 - size('4k - 1MB') - files from 4096 bytes to 1048576 bytes
299 299 """
300 300 # i18n: "size" is a keyword
301 301 expr = getstring(x, _("size requires an expression"))
302 302 m = sizematcher(expr)
303 303 return mctx.fpredicate(lambda fctx: m(fctx.size()),
304 304 predrepr=('size(%r)', expr), cache=True)
305 305
306 306 @predicate('encoding(name)')
307 307 def encoding(mctx, x):
308 308 """File can be successfully decoded with the given character
309 309 encoding. May not be useful for encodings other than ASCII and
310 310 UTF-8.
311 311 """
312 312
313 313 # i18n: "encoding" is a keyword
314 314 enc = getstring(x, _("encoding requires an encoding name"))
315 315
316 316 def encp(fctx):
317 317 d = fctx.data()
318 318 try:
319 319 d.decode(pycompat.sysstr(enc))
320 320 return True
321 321 except LookupError:
322 322 raise error.Abort(_("unknown encoding '%s'") % enc)
323 323 except UnicodeDecodeError:
324 324 return False
325 325
326 326 return mctx.fpredicate(encp, predrepr=('encoding(%r)', enc), cache=True)
327 327
328 328 @predicate('eol(style)')
329 329 def eol(mctx, x):
330 330 """File contains newlines of the given style (dos, unix, mac). Binary
331 331 files are excluded, files with mixed line endings match multiple
332 332 styles.
333 333 """
334 334
335 335 # i18n: "eol" is a keyword
336 336 enc = getstring(x, _("eol requires a style name"))
337 337
338 338 def eolp(fctx):
339 339 if fctx.isbinary():
340 340 return False
341 341 d = fctx.data()
342 342 if (enc == 'dos' or enc == 'win') and '\r\n' in d:
343 343 return True
344 344 elif enc == 'unix' and re.search('(?<!\r)\n', d):
345 345 return True
346 346 elif enc == 'mac' and re.search('\r(?!\n)', d):
347 347 return True
348 348 return False
349 349 return mctx.fpredicate(eolp, predrepr=('eol(%r)', enc), cache=True)
350 350
351 351 @predicate('copied()')
352 352 def copied(mctx, x):
353 353 """File that is recorded as being copied.
354 354 """
355 355 # i18n: "copied" is a keyword
356 356 getargs(x, 0, 0, _("copied takes no arguments"))
357 357 def copiedp(fctx):
358 358 p = fctx.parents()
359 359 return p and p[0].path() != fctx.path()
360 360 return mctx.fpredicate(copiedp, predrepr='copied', cache=True)
361 361
362 362 @predicate('revs(revs, pattern)')
363 363 def revs(mctx, x):
364 364 """Evaluate set in the specified revisions. If the revset match multiple
365 365 revs, this will return file matching pattern in any of the revision.
366 366 """
367 367 # i18n: "revs" is a keyword
368 368 r, x = getargs(x, 2, 2, _("revs takes two arguments"))
369 369 # i18n: "revs" is a keyword
370 370 revspec = getstring(r, _("first argument to revs must be a revision"))
371 371 repo = mctx.ctx.repo()
372 372 revs = scmutil.revrange(repo, [revspec])
373 373
374 374 matchers = []
375 375 for r in revs:
376 376 ctx = repo[r]
377 377 matchers.append(getmatch(mctx.switch(ctx, _buildstatus(ctx, x)), x))
378 378 if not matchers:
379 379 return mctx.never()
380 380 if len(matchers) == 1:
381 381 return matchers[0]
382 382 return matchmod.unionmatcher(matchers)
383 383
384 384 @predicate('status(base, rev, pattern)')
385 385 def status(mctx, x):
386 386 """Evaluate predicate using status change between ``base`` and
387 387 ``rev``. Examples:
388 388
389 389 - ``status(3, 7, added())`` - matches files added from "3" to "7"
390 390 """
391 391 repo = mctx.ctx.repo()
392 392 # i18n: "status" is a keyword
393 393 b, r, x = getargs(x, 3, 3, _("status takes three arguments"))
394 394 # i18n: "status" is a keyword
395 395 baseerr = _("first argument to status must be a revision")
396 396 baserevspec = getstring(b, baseerr)
397 397 if not baserevspec:
398 398 raise error.ParseError(baseerr)
399 399 reverr = _("second argument to status must be a revision")
400 400 revspec = getstring(r, reverr)
401 401 if not revspec:
402 402 raise error.ParseError(reverr)
403 403 basectx, ctx = scmutil.revpair(repo, [baserevspec, revspec])
404 404 return getmatch(mctx.switch(ctx, _buildstatus(ctx, x, basectx=basectx)), x)
405 405
406 406 @predicate('subrepo([pattern])')
407 407 def subrepo(mctx, x):
408 408 """Subrepositories whose paths match the given pattern.
409 409 """
410 410 # i18n: "subrepo" is a keyword
411 411 getargs(x, 0, 1, _("subrepo takes at most one argument"))
412 412 ctx = mctx.ctx
413 413 sstate = ctx.substate
414 414 if x:
415 415 pat = getpattern(x, matchmod.allpatternkinds,
416 416 # i18n: "subrepo" is a keyword
417 417 _("subrepo requires a pattern or no arguments"))
418 418 fast = not matchmod.patkind(pat)
419 419 if fast:
420 420 def m(s):
421 421 return (s == pat)
422 422 else:
423 423 m = matchmod.match(ctx.repo().root, '', [pat], ctx=ctx)
424 424 return mctx.predicate(lambda f: f in sstate and m(f),
425 425 predrepr=('subrepo(%r)', pat))
426 426 else:
427 427 return mctx.predicate(sstate.__contains__, predrepr='subrepo')
428 428
429 429 methods = {
430 430 'string': stringmatch,
431 431 'symbol': stringmatch,
432 432 'kindpat': kindpatmatch,
433 433 'and': andmatch,
434 434 'or': ormatch,
435 435 'minus': minusmatch,
436 436 'list': listmatch,
437 437 'not': notmatch,
438 438 'func': func,
439 439 }
440 440
441 441 class matchctx(object):
442 442 def __init__(self, ctx, status=None, badfn=None):
443 443 self.ctx = ctx
444 444 self._status = status
445 445 self._badfn = badfn
446 446
447 447 def status(self):
448 448 return self._status
449 449
450 450 def matcher(self, patterns):
451 451 return self.ctx.match(patterns, badfn=self._badfn)
452 452
453 453 def predicate(self, predfn, predrepr=None, cache=False):
454 454 """Create a matcher to select files by predfn(filename)"""
455 455 if cache:
456 456 predfn = util.cachefunc(predfn)
457 457 repo = self.ctx.repo()
458 458 return matchmod.predicatematcher(repo.root, repo.getcwd(), predfn,
459 459 predrepr=predrepr, badfn=self._badfn)
460 460
461 461 def fpredicate(self, predfn, predrepr=None, cache=False):
462 462 """Create a matcher to select files by predfn(fctx) at the current
463 463 revision
464 464
465 465 Missing files are ignored.
466 466 """
467 467 ctx = self.ctx
468 468 if ctx.rev() is None:
469 469 def fctxpredfn(f):
470 470 try:
471 471 fctx = ctx[f]
472 472 except error.LookupError:
473 473 return False
474 474 try:
475 475 fctx.audit()
476 476 except error.Abort:
477 477 return False
478 478 try:
479 479 return predfn(fctx)
480 480 except (IOError, OSError) as e:
481 481 # open()-ing a directory fails with EACCES on Windows
482 482 if e.errno in (errno.ENOENT, errno.EACCES, errno.ENOTDIR,
483 483 errno.EISDIR):
484 484 return False
485 485 raise
486 486 else:
487 487 def fctxpredfn(f):
488 488 try:
489 489 fctx = ctx[f]
490 490 except error.LookupError:
491 491 return False
492 492 return predfn(fctx)
493 493 return self.predicate(fctxpredfn, predrepr=predrepr, cache=cache)
494 494
495 495 def never(self):
496 496 """Create a matcher to select nothing"""
497 497 repo = self.ctx.repo()
498 498 return matchmod.nevermatcher(repo.root, repo.getcwd(),
499 499 badfn=self._badfn)
500 500
501 501 def switch(self, ctx, status=None):
502 502 return matchctx(ctx, status, self._badfn)
503 503
504 504 # filesets using matchctx.switch()
505 505 _switchcallers = [
506 506 'revs',
507 507 'status',
508 508 ]
509 509
510 510 def _intree(funcs, tree):
511 511 if isinstance(tree, tuple):
512 512 if tree[0] == 'func' and tree[1][0] == 'symbol':
513 513 if tree[1][1] in funcs:
514 514 return True
515 515 if tree[1][1] in _switchcallers:
516 516 # arguments won't be evaluated in the current context
517 517 return False
518 518 for s in tree[1:]:
519 519 if _intree(funcs, s):
520 520 return True
521 521 return False
522 522
523 523 def match(ctx, expr, badfn=None):
524 524 """Create a matcher for a single fileset expression"""
525 525 tree = filesetlang.parse(expr)
526 526 tree = filesetlang.analyze(tree)
527 tree = filesetlang.optimize(tree)
527 528 mctx = matchctx(ctx, _buildstatus(ctx, tree), badfn=badfn)
528 529 return getmatch(mctx, tree)
529 530
530 531 def _buildstatus(ctx, tree, basectx=None):
531 532 # do we need status info?
532 533
533 534 if _intree(_statuscallers, tree):
534 535 unknown = _intree(['unknown'], tree)
535 536 ignored = _intree(['ignored'], tree)
536 537
537 538 if basectx is None:
538 539 basectx = ctx.p1()
539 540 return basectx.status(ctx, listunknown=unknown, listignored=ignored,
540 541 listclean=True)
541 542 else:
542 543 return None
543 544
544 545 def loadpredicate(ui, extname, registrarobj):
545 546 """Load fileset predicates from specified registrarobj
546 547 """
547 548 for name, func in registrarobj._table.iteritems():
548 549 symbols[name] = func
549 550 if func._callstatus:
550 551 _statuscallers.add(name)
551 552
552 553 # load built-in predicates explicitly to setup _statuscallers
553 554 loadpredicate(None, None, predicate)
554 555
555 556 # tell hggettext to extract docstrings from these functions:
556 557 i18nfunctions = symbols.values()
@@ -1,175 +1,213
1 1 # filesetlang.py - parser, tokenizer and utility for file set language
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 from .i18n import _
11 11 from . import (
12 12 error,
13 13 parser,
14 14 pycompat,
15 15 )
16 16
17 17 elements = {
18 18 # token-type: binding-strength, primary, prefix, infix, suffix
19 19 "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
20 20 ":": (15, None, None, ("kindpat", 15), None),
21 21 "-": (5, None, ("negate", 19), ("minus", 5), None),
22 22 "not": (10, None, ("not", 10), None, None),
23 23 "!": (10, None, ("not", 10), None, None),
24 24 "and": (5, None, None, ("and", 5), None),
25 25 "&": (5, None, None, ("and", 5), None),
26 26 "or": (4, None, None, ("or", 4), None),
27 27 "|": (4, None, None, ("or", 4), None),
28 28 "+": (4, None, None, ("or", 4), None),
29 29 ",": (2, None, None, ("list", 2), None),
30 30 ")": (0, None, None, None, None),
31 31 "symbol": (0, "symbol", None, None, None),
32 32 "string": (0, "string", None, None, None),
33 33 "end": (0, None, None, None, None),
34 34 }
35 35
36 36 keywords = {'and', 'or', 'not'}
37 37
38 38 symbols = {}
39 39
40 40 globchars = ".*{}[]?/\\_"
41 41
42 42 def tokenize(program):
43 43 pos, l = 0, len(program)
44 44 program = pycompat.bytestr(program)
45 45 while pos < l:
46 46 c = program[pos]
47 47 if c.isspace(): # skip inter-token whitespace
48 48 pass
49 49 elif c in "(),-:|&+!": # handle simple operators
50 50 yield (c, None, pos)
51 51 elif (c in '"\'' or c == 'r' and
52 52 program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings
53 53 if c == 'r':
54 54 pos += 1
55 55 c = program[pos]
56 56 decode = lambda x: x
57 57 else:
58 58 decode = parser.unescapestr
59 59 pos += 1
60 60 s = pos
61 61 while pos < l: # find closing quote
62 62 d = program[pos]
63 63 if d == '\\': # skip over escaped characters
64 64 pos += 2
65 65 continue
66 66 if d == c:
67 67 yield ('string', decode(program[s:pos]), s)
68 68 break
69 69 pos += 1
70 70 else:
71 71 raise error.ParseError(_("unterminated string"), s)
72 72 elif c.isalnum() or c in globchars or ord(c) > 127:
73 73 # gather up a symbol/keyword
74 74 s = pos
75 75 pos += 1
76 76 while pos < l: # find end of symbol
77 77 d = program[pos]
78 78 if not (d.isalnum() or d in globchars or ord(d) > 127):
79 79 break
80 80 pos += 1
81 81 sym = program[s:pos]
82 82 if sym in keywords: # operator keywords
83 83 yield (sym, None, s)
84 84 else:
85 85 yield ('symbol', sym, s)
86 86 pos -= 1
87 87 else:
88 88 raise error.ParseError(_("syntax error"), pos)
89 89 pos += 1
90 90 yield ('end', None, pos)
91 91
92 92 def parse(expr):
93 93 p = parser.parser(elements)
94 94 tree, pos = p.parse(tokenize(expr))
95 95 if pos != len(expr):
96 96 raise error.ParseError(_("invalid token"), pos)
97 97 return parser.simplifyinfixops(tree, {'list', 'or'})
98 98
99 99 def getsymbol(x):
100 100 if x and x[0] == 'symbol':
101 101 return x[1]
102 102 raise error.ParseError(_('not a symbol'))
103 103
104 104 def getstring(x, err):
105 105 if x and (x[0] == 'string' or x[0] == 'symbol'):
106 106 return x[1]
107 107 raise error.ParseError(err)
108 108
109 109 def getkindpat(x, y, allkinds, err):
110 110 kind = getsymbol(x)
111 111 pat = getstring(y, err)
112 112 if kind not in allkinds:
113 113 raise error.ParseError(_("invalid pattern kind: %s") % kind)
114 114 return '%s:%s' % (kind, pat)
115 115
116 116 def getpattern(x, allkinds, err):
117 117 if x and x[0] == 'kindpat':
118 118 return getkindpat(x[1], x[2], allkinds, err)
119 119 return getstring(x, err)
120 120
121 121 def getlist(x):
122 122 if not x:
123 123 return []
124 124 if x[0] == 'list':
125 125 return list(x[1:])
126 126 return [x]
127 127
128 128 def getargs(x, min, max, err):
129 129 l = getlist(x)
130 130 if len(l) < min or len(l) > max:
131 131 raise error.ParseError(err)
132 132 return l
133 133
134 134 def _analyze(x):
135 135 if x is None:
136 136 return x
137 137
138 138 op = x[0]
139 139 if op in {'string', 'symbol'}:
140 140 return x
141 141 if op == 'kindpat':
142 142 getsymbol(x[1]) # kind must be a symbol
143 143 t = _analyze(x[2])
144 144 return (op, x[1], t)
145 145 if op == 'group':
146 146 return _analyze(x[1])
147 147 if op == 'negate':
148 148 raise error.ParseError(_("can't use negate operator in this context"))
149 149 if op == 'not':
150 150 t = _analyze(x[1])
151 151 return (op, t)
152 152 if op in {'and', 'minus'}:
153 153 ta = _analyze(x[1])
154 154 tb = _analyze(x[2])
155 155 return (op, ta, tb)
156 156 if op in {'list', 'or'}:
157 157 ts = tuple(_analyze(y) for y in x[1:])
158 158 return (op,) + ts
159 159 if op == 'func':
160 160 getsymbol(x[1]) # function name must be a symbol
161 161 ta = _analyze(x[2])
162 162 return (op, x[1], ta)
163 163 raise error.ProgrammingError('invalid operator %r' % op)
164 164
165 165 def analyze(x):
166 166 """Transform raw parsed tree to evaluatable tree which can be fed to
167 getmatch()
167 optimize() or getmatch()
168 168
169 169 All pseudo operations should be mapped to real operations or functions
170 170 defined in methods or symbols table respectively.
171 171 """
172 172 return _analyze(x)
173 173
174 def _optimize(x):
175 if x is None:
176 return 0, x
177
178 op = x[0]
179 if op in {'string', 'symbol'}:
180 return 0.5, x
181 if op == 'kindpat':
182 w, t = _optimize(x[2])
183 return w, (op, x[1], t)
184 if op == 'not':
185 w, t = _optimize(x[1])
186 return w, (op, t)
187 if op in {'and', 'minus'}:
188 wa, ta = _optimize(x[1])
189 wb, tb = _optimize(x[2])
190 return max(wa, wb), (op, ta, tb)
191 if op == 'or':
192 ws, ts = zip(*(_optimize(y) for y in x[1:]))
193 return max(ws), (op,) + ts
194 if op == 'list':
195 ws, ts = zip(*(_optimize(y) for y in x[1:]))
196 return sum(ws), (op,) + ts
197 if op == 'func':
198 f = getsymbol(x[1])
199 w = getattr(symbols.get(f), '_weight', 1)
200 wa, ta = _optimize(x[2])
201 return w + wa, (op, x[1], ta)
202 raise error.ProgrammingError('invalid operator %r' % op)
203
204 def optimize(x):
205 """Reorder/rewrite evaluatable tree for optimization
206
207 All pseudo operations should be transformed beforehand.
208 """
209 _w, t = _optimize(x)
210 return t
211
174 212 def prettyformat(tree):
175 213 return parser.prettyformat(tree, ('string', 'symbol'))
@@ -1,89 +1,90
1 1 # minifileset.py - a simple language to select files
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 from .i18n import _
11 11 from . import (
12 12 error,
13 13 fileset,
14 14 filesetlang,
15 15 pycompat,
16 16 )
17 17
18 18 def _sizep(x):
19 19 # i18n: "size" is a keyword
20 20 expr = filesetlang.getstring(x, _("size requires an expression"))
21 21 return fileset.sizematcher(expr)
22 22
23 23 def _compile(tree):
24 24 if not tree:
25 25 raise error.ParseError(_("missing argument"))
26 26 op = tree[0]
27 27 if op in {'symbol', 'string', 'kindpat'}:
28 28 name = filesetlang.getpattern(tree, {'path'}, _('invalid file pattern'))
29 29 if name.startswith('**'): # file extension test, ex. "**.tar.gz"
30 30 ext = name[2:]
31 31 for c in pycompat.bytestr(ext):
32 32 if c in '*{}[]?/\\':
33 33 raise error.ParseError(_('reserved character: %s') % c)
34 34 return lambda n, s: n.endswith(ext)
35 35 elif name.startswith('path:'): # directory or full path test
36 36 p = name[5:] # prefix
37 37 pl = len(p)
38 38 f = lambda n, s: n.startswith(p) and (len(n) == pl
39 39 or n[pl:pl + 1] == '/')
40 40 return f
41 41 raise error.ParseError(_("unsupported file pattern: %s") % name,
42 42 hint=_('paths must be prefixed with "path:"'))
43 43 elif op == 'or':
44 44 funcs = [_compile(x) for x in tree[1:]]
45 45 return lambda n, s: any(f(n, s) for f in funcs)
46 46 elif op == 'and':
47 47 func1 = _compile(tree[1])
48 48 func2 = _compile(tree[2])
49 49 return lambda n, s: func1(n, s) and func2(n, s)
50 50 elif op == 'not':
51 51 return lambda n, s: not _compile(tree[1])(n, s)
52 52 elif op == 'func':
53 53 symbols = {
54 54 'all': lambda n, s: True,
55 55 'none': lambda n, s: False,
56 56 'size': lambda n, s: _sizep(tree[2])(s),
57 57 }
58 58
59 59 name = filesetlang.getsymbol(tree[1])
60 60 if name in symbols:
61 61 return symbols[name]
62 62
63 63 raise error.UnknownIdentifier(name, symbols.keys())
64 64 elif op == 'minus': # equivalent to 'x and not y'
65 65 func1 = _compile(tree[1])
66 66 func2 = _compile(tree[2])
67 67 return lambda n, s: func1(n, s) and not func2(n, s)
68 68 elif op == 'list':
69 69 raise error.ParseError(_("can't use a list in this context"),
70 70 hint=_('see \'hg help "filesets.x or y"\''))
71 71 raise error.ProgrammingError('illegal tree: %r' % (tree,))
72 72
73 73 def compile(text):
74 74 """generate a function (path, size) -> bool from filter specification.
75 75
76 76 "text" could contain the operators defined by the fileset language for
77 77 common logic operations, and parenthesis for grouping. The supported path
78 78 tests are '**.extname' for file extension test, and '"path:dir/subdir"'
79 79 for prefix test. The ``size()`` predicate is borrowed from filesets to test
80 80 file size. The predicates ``all()`` and ``none()`` are also supported.
81 81
82 82 '(**.php & size(">10MB")) | **.zip | (path:bin & !path:bin/README)' for
83 83 example, will catch all php files whose size is greater than 10 MB, all
84 84 files whose name ends with ".zip", and all files under "bin" in the repo
85 85 root except for "bin/README".
86 86 """
87 87 tree = filesetlang.parse(text)
88 88 tree = filesetlang.analyze(tree)
89 tree = filesetlang.optimize(tree)
89 90 return _compile(tree)
@@ -1,439 +1,443
1 1 # registrar.py - utilities to register function for specific purpose
2 2 #
3 3 # Copyright FUJIWARA Katsunori <foozy@lares.dti.ne.jp> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 from . import (
11 11 configitems,
12 12 error,
13 13 pycompat,
14 14 util,
15 15 )
16 16
17 17 # unlike the other registered items, config options are neither functions or
18 18 # classes. Registering the option is just small function call.
19 19 #
20 20 # We still add the official API to the registrar module for consistency with
21 21 # the other items extensions want might to register.
22 22 configitem = configitems.getitemregister
23 23
24 24 class _funcregistrarbase(object):
25 25 """Base of decorator to register a function for specific purpose
26 26
27 27 This decorator stores decorated functions into own dict 'table'.
28 28
29 29 The least derived class can be defined by overriding 'formatdoc',
30 30 for example::
31 31
32 32 class keyword(_funcregistrarbase):
33 33 _docformat = ":%s: %s"
34 34
35 35 This should be used as below:
36 36
37 37 keyword = registrar.keyword()
38 38
39 39 @keyword('bar')
40 40 def barfunc(*args, **kwargs):
41 41 '''Explanation of bar keyword ....
42 42 '''
43 43 pass
44 44
45 45 In this case:
46 46
47 47 - 'barfunc' is stored as 'bar' in '_table' of an instance 'keyword' above
48 48 - 'barfunc.__doc__' becomes ":bar: Explanation of bar keyword"
49 49 """
50 50 def __init__(self, table=None):
51 51 if table is None:
52 52 self._table = {}
53 53 else:
54 54 self._table = table
55 55
56 56 def __call__(self, decl, *args, **kwargs):
57 57 return lambda func: self._doregister(func, decl, *args, **kwargs)
58 58
59 59 def _doregister(self, func, decl, *args, **kwargs):
60 60 name = self._getname(decl)
61 61
62 62 if name in self._table:
63 63 msg = 'duplicate registration for name: "%s"' % name
64 64 raise error.ProgrammingError(msg)
65 65
66 66 if func.__doc__ and not util.safehasattr(func, '_origdoc'):
67 67 doc = pycompat.sysbytes(func.__doc__).strip()
68 68 func._origdoc = doc
69 69 func.__doc__ = pycompat.sysstr(self._formatdoc(decl, doc))
70 70
71 71 self._table[name] = func
72 72 self._extrasetup(name, func, *args, **kwargs)
73 73
74 74 return func
75 75
76 76 def _parsefuncdecl(self, decl):
77 77 """Parse function declaration and return the name of function in it
78 78 """
79 79 i = decl.find('(')
80 80 if i >= 0:
81 81 return decl[:i]
82 82 else:
83 83 return decl
84 84
85 85 def _getname(self, decl):
86 86 """Return the name of the registered function from decl
87 87
88 88 Derived class should override this, if it allows more
89 89 descriptive 'decl' string than just a name.
90 90 """
91 91 return decl
92 92
93 93 _docformat = None
94 94
95 95 def _formatdoc(self, decl, doc):
96 96 """Return formatted document of the registered function for help
97 97
98 98 'doc' is '__doc__.strip()' of the registered function.
99 99 """
100 100 return self._docformat % (decl, doc)
101 101
102 102 def _extrasetup(self, name, func):
103 103 """Execute exra setup for registered function, if needed
104 104 """
105 105
106 106 class command(_funcregistrarbase):
107 107 """Decorator to register a command function to table
108 108
109 109 This class receives a command table as its argument. The table should
110 110 be a dict.
111 111
112 112 The created object can be used as a decorator for adding commands to
113 113 that command table. This accepts multiple arguments to define a command.
114 114
115 115 The first argument is the command name (as bytes).
116 116
117 117 The `options` keyword argument is an iterable of tuples defining command
118 118 arguments. See ``mercurial.fancyopts.fancyopts()`` for the format of each
119 119 tuple.
120 120
121 121 The `synopsis` argument defines a short, one line summary of how to use the
122 122 command. This shows up in the help output.
123 123
124 124 There are three arguments that control what repository (if any) is found
125 125 and passed to the decorated function: `norepo`, `optionalrepo`, and
126 126 `inferrepo`.
127 127
128 128 The `norepo` argument defines whether the command does not require a
129 129 local repository. Most commands operate against a repository, thus the
130 130 default is False. When True, no repository will be passed.
131 131
132 132 The `optionalrepo` argument defines whether the command optionally requires
133 133 a local repository. If no repository can be found, None will be passed
134 134 to the decorated function.
135 135
136 136 The `inferrepo` argument defines whether to try to find a repository from
137 137 the command line arguments. If True, arguments will be examined for
138 138 potential repository locations. See ``findrepo()``. If a repository is
139 139 found, it will be used and passed to the decorated function.
140 140
141 141 The `intents` argument defines a set of intended actions or capabilities
142 142 the command is taking. These intents can be used to affect the construction
143 143 of the repository object passed to the command. For example, commands
144 144 declaring that they are read-only could receive a repository that doesn't
145 145 have any methods allowing repository mutation. Other intents could be used
146 146 to prevent the command from running if the requested intent could not be
147 147 fulfilled.
148 148
149 149 The following intents are defined:
150 150
151 151 readonly
152 152 The command is read-only
153 153
154 154 The signature of the decorated function looks like this:
155 155 def cmd(ui[, repo] [, <args>] [, <options>])
156 156
157 157 `repo` is required if `norepo` is False.
158 158 `<args>` are positional args (or `*args`) arguments, of non-option
159 159 arguments from the command line.
160 160 `<options>` are keyword arguments (or `**options`) of option arguments
161 161 from the command line.
162 162
163 163 See the WritingExtensions and MercurialApi documentation for more exhaustive
164 164 descriptions and examples.
165 165 """
166 166
167 167 def _doregister(self, func, name, options=(), synopsis=None,
168 168 norepo=False, optionalrepo=False, inferrepo=False,
169 169 intents=None):
170 170
171 171 func.norepo = norepo
172 172 func.optionalrepo = optionalrepo
173 173 func.inferrepo = inferrepo
174 174 func.intents = intents or set()
175 175 if synopsis:
176 176 self._table[name] = func, list(options), synopsis
177 177 else:
178 178 self._table[name] = func, list(options)
179 179 return func
180 180
181 181 INTENT_READONLY = b'readonly'
182 182
183 183 class revsetpredicate(_funcregistrarbase):
184 184 """Decorator to register revset predicate
185 185
186 186 Usage::
187 187
188 188 revsetpredicate = registrar.revsetpredicate()
189 189
190 190 @revsetpredicate('mypredicate(arg1, arg2[, arg3])')
191 191 def mypredicatefunc(repo, subset, x):
192 192 '''Explanation of this revset predicate ....
193 193 '''
194 194 pass
195 195
196 196 The first string argument is used also in online help.
197 197
198 198 Optional argument 'safe' indicates whether a predicate is safe for
199 199 DoS attack (False by default).
200 200
201 201 Optional argument 'takeorder' indicates whether a predicate function
202 202 takes ordering policy as the last argument.
203 203
204 204 Optional argument 'weight' indicates the estimated run-time cost, useful
205 205 for static optimization, default is 1. Higher weight means more expensive.
206 206 Usually, revsets that are fast and return only one revision has a weight of
207 207 0.5 (ex. a symbol); revsets with O(changelog) complexity and read only the
208 208 changelog have weight 10 (ex. author); revsets reading manifest deltas have
209 209 weight 30 (ex. adds); revset reading manifest contents have weight 100
210 210 (ex. contains). Note: those values are flexible. If the revset has a
211 211 same big-O time complexity as 'contains', but with a smaller constant, it
212 212 might have a weight of 90.
213 213
214 214 'revsetpredicate' instance in example above can be used to
215 215 decorate multiple functions.
216 216
217 217 Decorated functions are registered automatically at loading
218 218 extension, if an instance named as 'revsetpredicate' is used for
219 219 decorating in extension.
220 220
221 221 Otherwise, explicit 'revset.loadpredicate()' is needed.
222 222 """
223 223 _getname = _funcregistrarbase._parsefuncdecl
224 224 _docformat = "``%s``\n %s"
225 225
226 226 def _extrasetup(self, name, func, safe=False, takeorder=False, weight=1):
227 227 func._safe = safe
228 228 func._takeorder = takeorder
229 229 func._weight = weight
230 230
231 231 class filesetpredicate(_funcregistrarbase):
232 232 """Decorator to register fileset predicate
233 233
234 234 Usage::
235 235
236 236 filesetpredicate = registrar.filesetpredicate()
237 237
238 238 @filesetpredicate('mypredicate()')
239 239 def mypredicatefunc(mctx, x):
240 240 '''Explanation of this fileset predicate ....
241 241 '''
242 242 pass
243 243
244 244 The first string argument is used also in online help.
245 245
246 246 Optional argument 'callstatus' indicates whether a predicate
247 247 implies 'matchctx.status()' at runtime or not (False, by
248 248 default).
249 249
250 Optional argument 'weight' indicates the estimated run-time cost, useful
251 for static optimization, default is 1. Higher weight means more expensive.
252
250 253 'filesetpredicate' instance in example above can be used to
251 254 decorate multiple functions.
252 255
253 256 Decorated functions are registered automatically at loading
254 257 extension, if an instance named as 'filesetpredicate' is used for
255 258 decorating in extension.
256 259
257 260 Otherwise, explicit 'fileset.loadpredicate()' is needed.
258 261 """
259 262 _getname = _funcregistrarbase._parsefuncdecl
260 263 _docformat = "``%s``\n %s"
261 264
262 def _extrasetup(self, name, func, callstatus=False):
265 def _extrasetup(self, name, func, callstatus=False, weight=1):
263 266 func._callstatus = callstatus
267 func._weight = weight
264 268
265 269 class _templateregistrarbase(_funcregistrarbase):
266 270 """Base of decorator to register functions as template specific one
267 271 """
268 272 _docformat = ":%s: %s"
269 273
270 274 class templatekeyword(_templateregistrarbase):
271 275 """Decorator to register template keyword
272 276
273 277 Usage::
274 278
275 279 templatekeyword = registrar.templatekeyword()
276 280
277 281 # new API (since Mercurial 4.6)
278 282 @templatekeyword('mykeyword', requires={'repo', 'ctx'})
279 283 def mykeywordfunc(context, mapping):
280 284 '''Explanation of this template keyword ....
281 285 '''
282 286 pass
283 287
284 288 # old API
285 289 @templatekeyword('mykeyword')
286 290 def mykeywordfunc(repo, ctx, templ, cache, revcache, **args):
287 291 '''Explanation of this template keyword ....
288 292 '''
289 293 pass
290 294
291 295 The first string argument is used also in online help.
292 296
293 297 Optional argument 'requires' should be a collection of resource names
294 298 which the template keyword depends on. This also serves as a flag to
295 299 switch to the new API. If 'requires' is unspecified, all template
296 300 keywords and resources are expanded to the function arguments.
297 301
298 302 'templatekeyword' instance in example above can be used to
299 303 decorate multiple functions.
300 304
301 305 Decorated functions are registered automatically at loading
302 306 extension, if an instance named as 'templatekeyword' is used for
303 307 decorating in extension.
304 308
305 309 Otherwise, explicit 'templatekw.loadkeyword()' is needed.
306 310 """
307 311
308 312 def _extrasetup(self, name, func, requires=None):
309 313 func._requires = requires
310 314
311 315 class templatefilter(_templateregistrarbase):
312 316 """Decorator to register template filer
313 317
314 318 Usage::
315 319
316 320 templatefilter = registrar.templatefilter()
317 321
318 322 @templatefilter('myfilter', intype=bytes)
319 323 def myfilterfunc(text):
320 324 '''Explanation of this template filter ....
321 325 '''
322 326 pass
323 327
324 328 The first string argument is used also in online help.
325 329
326 330 Optional argument 'intype' defines the type of the input argument,
327 331 which should be (bytes, int, templateutil.date, or None for any.)
328 332
329 333 'templatefilter' instance in example above can be used to
330 334 decorate multiple functions.
331 335
332 336 Decorated functions are registered automatically at loading
333 337 extension, if an instance named as 'templatefilter' is used for
334 338 decorating in extension.
335 339
336 340 Otherwise, explicit 'templatefilters.loadkeyword()' is needed.
337 341 """
338 342
339 343 def _extrasetup(self, name, func, intype=None):
340 344 func._intype = intype
341 345
342 346 class templatefunc(_templateregistrarbase):
343 347 """Decorator to register template function
344 348
345 349 Usage::
346 350
347 351 templatefunc = registrar.templatefunc()
348 352
349 353 @templatefunc('myfunc(arg1, arg2[, arg3])', argspec='arg1 arg2 arg3',
350 354 requires={'ctx'})
351 355 def myfuncfunc(context, mapping, args):
352 356 '''Explanation of this template function ....
353 357 '''
354 358 pass
355 359
356 360 The first string argument is used also in online help.
357 361
358 362 If optional 'argspec' is defined, the function will receive 'args' as
359 363 a dict of named arguments. Otherwise 'args' is a list of positional
360 364 arguments.
361 365
362 366 Optional argument 'requires' should be a collection of resource names
363 367 which the template function depends on.
364 368
365 369 'templatefunc' instance in example above can be used to
366 370 decorate multiple functions.
367 371
368 372 Decorated functions are registered automatically at loading
369 373 extension, if an instance named as 'templatefunc' is used for
370 374 decorating in extension.
371 375
372 376 Otherwise, explicit 'templatefuncs.loadfunction()' is needed.
373 377 """
374 378 _getname = _funcregistrarbase._parsefuncdecl
375 379
376 380 def _extrasetup(self, name, func, argspec=None, requires=()):
377 381 func._argspec = argspec
378 382 func._requires = requires
379 383
380 384 class internalmerge(_funcregistrarbase):
381 385 """Decorator to register in-process merge tool
382 386
383 387 Usage::
384 388
385 389 internalmerge = registrar.internalmerge()
386 390
387 391 @internalmerge('mymerge', internalmerge.mergeonly,
388 392 onfailure=None, precheck=None):
389 393 def mymergefunc(repo, mynode, orig, fcd, fco, fca,
390 394 toolconf, files, labels=None):
391 395 '''Explanation of this internal merge tool ....
392 396 '''
393 397 return 1, False # means "conflicted", "no deletion needed"
394 398
395 399 The first string argument is used to compose actual merge tool name,
396 400 ":name" and "internal:name" (the latter is historical one).
397 401
398 402 The second argument is one of merge types below:
399 403
400 404 ========== ======== ======== =========
401 405 merge type precheck premerge fullmerge
402 406 ========== ======== ======== =========
403 407 nomerge x x x
404 408 mergeonly o x o
405 409 fullmerge o o o
406 410 ========== ======== ======== =========
407 411
408 412 Optional argument 'onfailure' is the format of warning message
409 413 to be used at failure of merging (target filename is specified
410 414 at formatting). Or, None or so, if warning message should be
411 415 suppressed.
412 416
413 417 Optional argument 'precheck' is the function to be used
414 418 before actual invocation of internal merge tool itself.
415 419 It takes as same arguments as internal merge tool does, other than
416 420 'files' and 'labels'. If it returns false value, merging is aborted
417 421 immediately (and file is marked as "unresolved").
418 422
419 423 'internalmerge' instance in example above can be used to
420 424 decorate multiple functions.
421 425
422 426 Decorated functions are registered automatically at loading
423 427 extension, if an instance named as 'internalmerge' is used for
424 428 decorating in extension.
425 429
426 430 Otherwise, explicit 'filemerge.loadinternalmerge()' is needed.
427 431 """
428 432 _docformat = "``:%s``\n %s"
429 433
430 434 # merge type definitions:
431 435 nomerge = None
432 436 mergeonly = 'mergeonly' # just the full merge, no premerge
433 437 fullmerge = 'fullmerge' # both premerge and merge
434 438
435 439 def _extrasetup(self, name, func, mergetype,
436 440 onfailure=None, precheck=None):
437 441 func.mergetype = mergetype
438 442 func.onfailure = onfailure
439 443 func.precheck = precheck
@@ -1,754 +1,765
1 1 $ fileset() {
2 2 > hg debugfileset --all-files "$@"
3 3 > }
4 4
5 5 $ hg init repo
6 6 $ cd repo
7 7 $ echo a > a1
8 8 $ echo a > a2
9 9 $ echo b > b1
10 10 $ echo b > b2
11 11 $ hg ci -Am addfiles
12 12 adding a1
13 13 adding a2
14 14 adding b1
15 15 adding b2
16 16
17 17 Test operators and basic patterns
18 18
19 19 $ fileset -v a1
20 20 (symbol 'a1')
21 21 * matcher:
22 22 <patternmatcher patterns='(?:a1$)'>
23 23 a1
24 24 $ fileset -v 'a*'
25 25 (symbol 'a*')
26 26 * matcher:
27 27 <patternmatcher patterns='(?:a[^/]*$)'>
28 28 a1
29 29 a2
30 30 $ fileset -v '"re:a\d"'
31 31 (string 're:a\\d')
32 32 * matcher:
33 33 <patternmatcher patterns='(?:a\\d)'>
34 34 a1
35 35 a2
36 36 $ fileset -v '!re:"a\d"'
37 37 (not
38 38 (kindpat
39 39 (symbol 're')
40 40 (string 'a\\d')))
41 41 * matcher:
42 42 <predicatenmatcher
43 43 pred=<not
44 44 <patternmatcher patterns='(?:a\\d)'>>>
45 45 b1
46 46 b2
47 47 $ fileset -v 'path:a1 or glob:b?'
48 48 (or
49 49 (kindpat
50 50 (symbol 'path')
51 51 (symbol 'a1'))
52 52 (kindpat
53 53 (symbol 'glob')
54 54 (symbol 'b?')))
55 55 * matcher:
56 56 <unionmatcher matchers=[
57 57 <patternmatcher patterns='(?:a1(?:/|$))'>,
58 58 <patternmatcher patterns='(?:b.$)'>]>
59 59 a1
60 60 b1
61 61 b2
62 62 $ fileset -v --no-show-matcher 'a1 or a2'
63 63 (or
64 64 (symbol 'a1')
65 65 (symbol 'a2'))
66 66 a1
67 67 a2
68 68 $ fileset 'a1 | a2'
69 69 a1
70 70 a2
71 71 $ fileset 'a* and "*1"'
72 72 a1
73 73 $ fileset 'a* & "*1"'
74 74 a1
75 75 $ fileset 'not (r"a*")'
76 76 b1
77 77 b2
78 78 $ fileset '! ("a*")'
79 79 b1
80 80 b2
81 81 $ fileset 'a* - a1'
82 82 a2
83 83 $ fileset 'a_b'
84 84 $ fileset '"\xy"'
85 85 hg: parse error: invalid \x escape* (glob)
86 86 [255]
87 87
88 88 Test invalid syntax
89 89
90 90 $ fileset -v '"added"()'
91 91 (func
92 92 (string 'added')
93 93 None)
94 94 hg: parse error: not a symbol
95 95 [255]
96 96 $ fileset -v '()()'
97 97 (func
98 98 (group
99 99 None)
100 100 None)
101 101 hg: parse error: not a symbol
102 102 [255]
103 103 $ fileset -v -- '-x'
104 104 (negate
105 105 (symbol 'x'))
106 106 hg: parse error: can't use negate operator in this context
107 107 [255]
108 108 $ fileset -v -- '-()'
109 109 (negate
110 110 (group
111 111 None))
112 112 hg: parse error: can't use negate operator in this context
113 113 [255]
114 114 $ fileset -p parsed 'a, b, c'
115 115 * parsed:
116 116 (list
117 117 (symbol 'a')
118 118 (symbol 'b')
119 119 (symbol 'c'))
120 120 hg: parse error: can't use a list in this context
121 121 (see 'hg help "filesets.x or y"')
122 122 [255]
123 123
124 124 $ fileset '"path":.'
125 125 hg: parse error: not a symbol
126 126 [255]
127 127 $ fileset 'path:foo bar'
128 128 hg: parse error at 9: invalid token
129 129 [255]
130 130 $ fileset 'foo:bar:baz'
131 131 hg: parse error: not a symbol
132 132 [255]
133 133 $ fileset 'foo:bar()'
134 134 hg: parse error: pattern must be a string
135 135 [255]
136 136 $ fileset 'foo:bar'
137 137 hg: parse error: invalid pattern kind: foo
138 138 [255]
139 139
140 140 Show parsed tree at stages:
141 141
142 142 $ fileset -p unknown a
143 143 abort: invalid stage name: unknown
144 144 [255]
145 145
146 146 $ fileset -p parsed 'path:a1 or glob:b?'
147 147 * parsed:
148 148 (or
149 149 (kindpat
150 150 (symbol 'path')
151 151 (symbol 'a1'))
152 152 (kindpat
153 153 (symbol 'glob')
154 154 (symbol 'b?')))
155 155 a1
156 156 b1
157 157 b2
158 158
159 159 $ fileset -p all -s 'a1 or a2 or (grep("b") & clean())'
160 160 * parsed:
161 161 (or
162 162 (symbol 'a1')
163 163 (symbol 'a2')
164 164 (group
165 165 (and
166 166 (func
167 167 (symbol 'grep')
168 168 (string 'b'))
169 169 (func
170 170 (symbol 'clean')
171 171 None))))
172 172 * analyzed:
173 173 (or
174 174 (symbol 'a1')
175 175 (symbol 'a2')
176 176 (and
177 177 (func
178 178 (symbol 'grep')
179 179 (string 'b'))
180 180 (func
181 181 (symbol 'clean')
182 182 None)))
183 * optimized:
184 (or
185 (symbol 'a1')
186 (symbol 'a2')
187 (and
188 (func
189 (symbol 'grep')
190 (string 'b'))
191 (func
192 (symbol 'clean')
193 None)))
183 194 * matcher:
184 195 <unionmatcher matchers=[
185 196 <patternmatcher patterns='(?:a1$)'>,
186 197 <patternmatcher patterns='(?:a2$)'>,
187 198 <intersectionmatcher
188 199 m1=<predicatenmatcher pred=grep('b')>,
189 200 m2=<predicatenmatcher pred=clean>>]>
190 201 a1
191 202 a2
192 203 b1
193 204 b2
194 205
195 206 Test files status
196 207
197 208 $ rm a1
198 209 $ hg rm a2
199 210 $ echo b >> b2
200 211 $ hg cp b1 c1
201 212 $ echo c > c2
202 213 $ echo c > c3
203 214 $ cat > .hgignore <<EOF
204 215 > \.hgignore
205 216 > 2$
206 217 > EOF
207 218 $ fileset 'modified()'
208 219 b2
209 220 $ fileset 'added()'
210 221 c1
211 222 $ fileset 'removed()'
212 223 a2
213 224 $ fileset 'deleted()'
214 225 a1
215 226 $ fileset 'missing()'
216 227 a1
217 228 $ fileset 'unknown()'
218 229 c3
219 230 $ fileset 'ignored()'
220 231 .hgignore
221 232 c2
222 233 $ fileset 'hgignore()'
223 234 .hgignore
224 235 a2
225 236 b2
226 237 c2
227 238 $ fileset 'clean()'
228 239 b1
229 240 $ fileset 'copied()'
230 241 c1
231 242
232 243 Test files status in different revisions
233 244
234 245 $ hg status -m
235 246 M b2
236 247 $ fileset -r0 'revs("wdir()", modified())' --traceback
237 248 b2
238 249 $ hg status -a
239 250 A c1
240 251 $ fileset -r0 'revs("wdir()", added())'
241 252 c1
242 253 $ hg status --change 0 -a
243 254 A a1
244 255 A a2
245 256 A b1
246 257 A b2
247 258 $ hg status -mru
248 259 M b2
249 260 R a2
250 261 ? c3
251 262 $ fileset -r0 'added() and revs("wdir()", modified() or removed() or unknown())'
252 263 a2
253 264 b2
254 265 $ fileset -r0 'added() or revs("wdir()", added())'
255 266 a1
256 267 a2
257 268 b1
258 269 b2
259 270 c1
260 271
261 272 Test files properties
262 273
263 274 >>> open('bin', 'wb').write(b'\0a') and None
264 275 $ fileset 'binary()'
265 276 bin
266 277 $ fileset 'binary() and unknown()'
267 278 bin
268 279 $ echo '^bin$' >> .hgignore
269 280 $ fileset 'binary() and ignored()'
270 281 bin
271 282 $ hg add bin
272 283 $ fileset 'binary()'
273 284 bin
274 285
275 286 $ fileset 'grep("b{1}")'
276 287 .hgignore
277 288 b1
278 289 b2
279 290 c1
280 291 $ fileset 'grep("missingparens(")'
281 292 hg: parse error: invalid match pattern: (unbalanced parenthesis|missing \)).* (re)
282 293 [255]
283 294
284 295 #if execbit
285 296 $ chmod +x b2
286 297 $ fileset 'exec()'
287 298 b2
288 299 #endif
289 300
290 301 #if symlink
291 302 $ ln -s b2 b2link
292 303 $ fileset 'symlink() and unknown()'
293 304 b2link
294 305 $ hg add b2link
295 306 #endif
296 307
297 308 #if no-windows
298 309 $ echo foo > con.xml
299 310 $ fileset 'not portable()'
300 311 con.xml
301 312 $ hg --config ui.portablefilenames=ignore add con.xml
302 313 #endif
303 314
304 315 >>> open('1k', 'wb').write(b' '*1024) and None
305 316 >>> open('2k', 'wb').write(b' '*2048) and None
306 317 $ hg add 1k 2k
307 318 $ fileset 'size("bar")'
308 319 hg: parse error: couldn't parse size: bar
309 320 [255]
310 321 $ fileset '(1k, 2k)'
311 322 hg: parse error: can't use a list in this context
312 323 (see 'hg help "filesets.x or y"')
313 324 [255]
314 325 $ fileset 'size(1k)'
315 326 1k
316 327 $ fileset '(1k or 2k) and size("< 2k")'
317 328 1k
318 329 $ fileset '(1k or 2k) and size("<=2k")'
319 330 1k
320 331 2k
321 332 $ fileset '(1k or 2k) and size("> 1k")'
322 333 2k
323 334 $ fileset '(1k or 2k) and size(">=1K")'
324 335 1k
325 336 2k
326 337 $ fileset '(1k or 2k) and size(".5KB - 1.5kB")'
327 338 1k
328 339 $ fileset 'size("1M")'
329 340 $ fileset 'size("1 GB")'
330 341
331 342 Test merge states
332 343
333 344 $ hg ci -m manychanges
334 345 $ hg file -r . 'set:copied() & modified()'
335 346 [1]
336 347 $ hg up -C 0
337 348 * files updated, 0 files merged, * files removed, 0 files unresolved (glob)
338 349 $ echo c >> b2
339 350 $ hg ci -m diverging b2
340 351 created new head
341 352 $ fileset 'resolved()'
342 353 $ fileset 'unresolved()'
343 354 $ hg merge
344 355 merging b2
345 356 warning: conflicts while merging b2! (edit, then use 'hg resolve --mark')
346 357 * files updated, 0 files merged, 1 files removed, 1 files unresolved (glob)
347 358 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
348 359 [1]
349 360 $ fileset 'resolved()'
350 361 $ fileset 'unresolved()'
351 362 b2
352 363 $ echo e > b2
353 364 $ hg resolve -m b2
354 365 (no more unresolved files)
355 366 $ fileset 'resolved()'
356 367 b2
357 368 $ fileset 'unresolved()'
358 369 $ hg ci -m merge
359 370
360 371 Test subrepo predicate
361 372
362 373 $ hg init sub
363 374 $ echo a > sub/suba
364 375 $ hg -R sub add sub/suba
365 376 $ hg -R sub ci -m sub
366 377 $ echo 'sub = sub' > .hgsub
367 378 $ hg init sub2
368 379 $ echo b > sub2/b
369 380 $ hg -R sub2 ci -Am sub2
370 381 adding b
371 382 $ echo 'sub2 = sub2' >> .hgsub
372 383 $ fileset 'subrepo()'
373 384 $ hg add .hgsub
374 385 $ fileset 'subrepo()'
375 386 sub
376 387 sub2
377 388 $ fileset 'subrepo("sub")'
378 389 sub
379 390 $ fileset 'subrepo("glob:*")'
380 391 sub
381 392 sub2
382 393 $ hg ci -m subrepo
383 394
384 395 Test that .hgsubstate is updated as appropriate during a conversion. The
385 396 saverev property is enough to alter the hashes of the subrepo.
386 397
387 398 $ hg init ../converted
388 399 $ hg --config extensions.convert= convert --config convert.hg.saverev=True \
389 400 > sub ../converted/sub
390 401 initializing destination ../converted/sub repository
391 402 scanning source...
392 403 sorting...
393 404 converting...
394 405 0 sub
395 406 $ hg clone -U sub2 ../converted/sub2
396 407 $ hg --config extensions.convert= convert --config convert.hg.saverev=True \
397 408 > . ../converted
398 409 scanning source...
399 410 sorting...
400 411 converting...
401 412 4 addfiles
402 413 3 manychanges
403 414 2 diverging
404 415 1 merge
405 416 0 subrepo
406 417 no ".hgsubstate" updates will be made for "sub2"
407 418 $ hg up -q -R ../converted -r tip
408 419 $ hg --cwd ../converted cat sub/suba sub2/b -r tip
409 420 a
410 421 b
411 422 $ oldnode=`hg log -r tip -T "{node}\n"`
412 423 $ newnode=`hg log -R ../converted -r tip -T "{node}\n"`
413 424 $ [ "$oldnode" != "$newnode" ] || echo "nothing changed"
414 425
415 426 Test with a revision
416 427
417 428 $ hg log -G --template '{rev} {desc}\n'
418 429 @ 4 subrepo
419 430 |
420 431 o 3 merge
421 432 |\
422 433 | o 2 diverging
423 434 | |
424 435 o | 1 manychanges
425 436 |/
426 437 o 0 addfiles
427 438
428 439 $ echo unknown > unknown
429 440 $ fileset -r1 'modified()'
430 441 b2
431 442 $ fileset -r1 'added() and c1'
432 443 c1
433 444 $ fileset -r1 'removed()'
434 445 a2
435 446 $ fileset -r1 'deleted()'
436 447 $ fileset -r1 'unknown()'
437 448 $ fileset -r1 'ignored()'
438 449 $ fileset -r1 'hgignore()'
439 450 .hgignore
440 451 a2
441 452 b2
442 453 bin
443 454 c2
444 455 sub2
445 456 $ fileset -r1 'binary()'
446 457 bin
447 458 $ fileset -r1 'size(1k)'
448 459 1k
449 460 $ fileset -r3 'resolved()'
450 461 $ fileset -r3 'unresolved()'
451 462
452 463 #if execbit
453 464 $ fileset -r1 'exec()'
454 465 b2
455 466 #endif
456 467
457 468 #if symlink
458 469 $ fileset -r1 'symlink()'
459 470 b2link
460 471 #endif
461 472
462 473 #if no-windows
463 474 $ fileset -r1 'not portable()'
464 475 con.xml
465 476 $ hg forget 'con.xml'
466 477 #endif
467 478
468 479 $ fileset -r4 'subrepo("re:su.*")'
469 480 sub
470 481 sub2
471 482 $ fileset -r4 'subrepo(re:su.*)'
472 483 sub
473 484 sub2
474 485 $ fileset -r4 'subrepo("sub")'
475 486 sub
476 487 $ fileset -r4 'b2 or c1'
477 488 b2
478 489 c1
479 490
480 491 >>> open('dos', 'wb').write(b"dos\r\n") and None
481 492 >>> open('mixed', 'wb').write(b"dos\r\nunix\n") and None
482 493 >>> open('mac', 'wb').write(b"mac\r") and None
483 494 $ hg add dos mixed mac
484 495
485 496 (remove a1, to examine safety of 'eol' on removed files)
486 497 $ rm a1
487 498
488 499 $ fileset 'eol(dos)'
489 500 dos
490 501 mixed
491 502 $ fileset 'eol(unix)'
492 503 .hgignore
493 504 .hgsub
494 505 .hgsubstate
495 506 b1
496 507 b2
497 508 b2.orig
498 509 c1
499 510 c2
500 511 c3
501 512 con.xml (no-windows !)
502 513 mixed
503 514 unknown
504 515 $ fileset 'eol(mac)'
505 516 mac
506 517
507 518 Test safety of 'encoding' on removed files
508 519
509 520 $ fileset 'encoding("ascii")'
510 521 .hgignore
511 522 .hgsub
512 523 .hgsubstate
513 524 1k
514 525 2k
515 526 b1
516 527 b2
517 528 b2.orig
518 529 b2link (symlink !)
519 530 bin
520 531 c1
521 532 c2
522 533 c3
523 534 con.xml (no-windows !)
524 535 dos
525 536 mac
526 537 mixed
527 538 unknown
528 539
529 540 Test 'revs(...)'
530 541 ================
531 542
532 543 small reminder of the repository state
533 544
534 545 $ hg log -G
535 546 @ changeset: 4:* (glob)
536 547 | tag: tip
537 548 | user: test
538 549 | date: Thu Jan 01 00:00:00 1970 +0000
539 550 | summary: subrepo
540 551 |
541 552 o changeset: 3:* (glob)
542 553 |\ parent: 2:55b05bdebf36
543 554 | | parent: 1:* (glob)
544 555 | | user: test
545 556 | | date: Thu Jan 01 00:00:00 1970 +0000
546 557 | | summary: merge
547 558 | |
548 559 | o changeset: 2:55b05bdebf36
549 560 | | parent: 0:8a9576c51c1f
550 561 | | user: test
551 562 | | date: Thu Jan 01 00:00:00 1970 +0000
552 563 | | summary: diverging
553 564 | |
554 565 o | changeset: 1:* (glob)
555 566 |/ user: test
556 567 | date: Thu Jan 01 00:00:00 1970 +0000
557 568 | summary: manychanges
558 569 |
559 570 o changeset: 0:8a9576c51c1f
560 571 user: test
561 572 date: Thu Jan 01 00:00:00 1970 +0000
562 573 summary: addfiles
563 574
564 575 $ hg status --change 0
565 576 A a1
566 577 A a2
567 578 A b1
568 579 A b2
569 580 $ hg status --change 1
570 581 M b2
571 582 A 1k
572 583 A 2k
573 584 A b2link (no-windows !)
574 585 A bin
575 586 A c1
576 587 A con.xml (no-windows !)
577 588 R a2
578 589 $ hg status --change 2
579 590 M b2
580 591 $ hg status --change 3
581 592 M b2
582 593 A 1k
583 594 A 2k
584 595 A b2link (no-windows !)
585 596 A bin
586 597 A c1
587 598 A con.xml (no-windows !)
588 599 R a2
589 600 $ hg status --change 4
590 601 A .hgsub
591 602 A .hgsubstate
592 603 $ hg status
593 604 A dos
594 605 A mac
595 606 A mixed
596 607 R con.xml (no-windows !)
597 608 ! a1
598 609 ? b2.orig
599 610 ? c3
600 611 ? unknown
601 612
602 613 Test files at -r0 should be filtered by files at wdir
603 614 -----------------------------------------------------
604 615
605 616 $ fileset -r0 'tracked() and revs("wdir()", tracked())'
606 617 a1
607 618 b1
608 619 b2
609 620
610 621 Test that "revs()" work at all
611 622 ------------------------------
612 623
613 624 $ fileset "revs('2', modified())"
614 625 b2
615 626
616 627 Test that "revs()" work for file missing in the working copy/current context
617 628 ----------------------------------------------------------------------------
618 629
619 630 (a2 not in working copy)
620 631
621 632 $ fileset "revs('0', added())"
622 633 a1
623 634 a2
624 635 b1
625 636 b2
626 637
627 638 (none of the file exist in "0")
628 639
629 640 $ fileset -r 0 "revs('4', added())"
630 641 .hgsub
631 642 .hgsubstate
632 643
633 644 Call with empty revset
634 645 --------------------------
635 646
636 647 $ fileset "revs('2-2', modified())"
637 648
638 649 Call with revset matching multiple revs
639 650 ---------------------------------------
640 651
641 652 $ fileset "revs('0+4', added())"
642 653 .hgsub
643 654 .hgsubstate
644 655 a1
645 656 a2
646 657 b1
647 658 b2
648 659
649 660 overlapping set
650 661
651 662 $ fileset "revs('1+2', modified())"
652 663 b2
653 664
654 665 test 'status(...)'
655 666 =================
656 667
657 668 Simple case
658 669 -----------
659 670
660 671 $ fileset "status(3, 4, added())"
661 672 .hgsub
662 673 .hgsubstate
663 674
664 675 use rev to restrict matched file
665 676 -----------------------------------------
666 677
667 678 $ hg status --removed --rev 0 --rev 1
668 679 R a2
669 680 $ fileset "status(0, 1, removed())"
670 681 a2
671 682 $ fileset "tracked() and status(0, 1, removed())"
672 683 $ fileset -r 4 "status(0, 1, removed())"
673 684 a2
674 685 $ fileset -r 4 "tracked() and status(0, 1, removed())"
675 686 $ fileset "revs('4', tracked() and status(0, 1, removed()))"
676 687 $ fileset "revs('0', tracked() and status(0, 1, removed()))"
677 688 a2
678 689
679 690 check wdir()
680 691 ------------
681 692
682 693 $ hg status --removed --rev 4
683 694 R con.xml (no-windows !)
684 695 $ fileset "status(4, 'wdir()', removed())"
685 696 con.xml (no-windows !)
686 697
687 698 $ hg status --removed --rev 2
688 699 R a2
689 700 $ fileset "status('2', 'wdir()', removed())"
690 701 a2
691 702
692 703 test backward status
693 704 --------------------
694 705
695 706 $ hg status --removed --rev 0 --rev 4
696 707 R a2
697 708 $ hg status --added --rev 4 --rev 0
698 709 A a2
699 710 $ fileset "status(4, 0, added())"
700 711 a2
701 712
702 713 test cross branch status
703 714 ------------------------
704 715
705 716 $ hg status --added --rev 1 --rev 2
706 717 A a2
707 718 $ fileset "status(1, 2, added())"
708 719 a2
709 720
710 721 test with multi revs revset
711 722 ---------------------------
712 723 $ hg status --added --rev 0:1 --rev 3:4
713 724 A .hgsub
714 725 A .hgsubstate
715 726 A 1k
716 727 A 2k
717 728 A b2link (no-windows !)
718 729 A bin
719 730 A c1
720 731 A con.xml (no-windows !)
721 732 $ fileset "status('0:1', '3:4', added())"
722 733 .hgsub
723 734 .hgsubstate
724 735 1k
725 736 2k
726 737 b2link (no-windows !)
727 738 bin
728 739 c1
729 740 con.xml (no-windows !)
730 741
731 742 tests with empty value
732 743 ----------------------
733 744
734 745 Fully empty revset
735 746
736 747 $ fileset "status('', '4', added())"
737 748 hg: parse error: first argument to status must be a revision
738 749 [255]
739 750 $ fileset "status('2', '', added())"
740 751 hg: parse error: second argument to status must be a revision
741 752 [255]
742 753
743 754 Empty revset will error at the revset layer
744 755
745 756 $ fileset "status(' ', '4', added())"
746 757 hg: parse error at 1: not a prefix: end
747 758 (
748 759 ^ here)
749 760 [255]
750 761 $ fileset "status('2', ' ', added())"
751 762 hg: parse error at 1: not a prefix: end
752 763 (
753 764 ^ here)
754 765 [255]
General Comments 0
You need to be logged in to leave comments. Login now