##// END OF EJS Templates
templater: add exception-raising version of open_template()...
Martin von Zweigbergk -
r45880:4aa484ef default
parent child Browse files
Show More
@@ -1,4529 +1,4529 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import glob
14 import glob
15 import operator
15 import operator
16 import os
16 import os
17 import platform
17 import platform
18 import random
18 import random
19 import re
19 import re
20 import socket
20 import socket
21 import ssl
21 import ssl
22 import stat
22 import stat
23 import string
23 import string
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullid,
32 nullid,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 mergestate as mergestatemod,
61 mergestate as mergestatemod,
62 obsolete,
62 obsolete,
63 obsutil,
63 obsutil,
64 pathutil,
64 pathutil,
65 phases,
65 phases,
66 policy,
66 policy,
67 pvec,
67 pvec,
68 pycompat,
68 pycompat,
69 registrar,
69 registrar,
70 repair,
70 repair,
71 revlog,
71 revlog,
72 revset,
72 revset,
73 revsetlang,
73 revsetlang,
74 scmutil,
74 scmutil,
75 setdiscovery,
75 setdiscovery,
76 simplemerge,
76 simplemerge,
77 sshpeer,
77 sshpeer,
78 sslutil,
78 sslutil,
79 streamclone,
79 streamclone,
80 tags as tagsmod,
80 tags as tagsmod,
81 templater,
81 templater,
82 treediscovery,
82 treediscovery,
83 upgrade,
83 upgrade,
84 url as urlmod,
84 url as urlmod,
85 util,
85 util,
86 vfs as vfsmod,
86 vfs as vfsmod,
87 wireprotoframing,
87 wireprotoframing,
88 wireprotoserver,
88 wireprotoserver,
89 wireprotov2peer,
89 wireprotov2peer,
90 )
90 )
91 from .utils import (
91 from .utils import (
92 cborutil,
92 cborutil,
93 compression,
93 compression,
94 dateutil,
94 dateutil,
95 procutil,
95 procutil,
96 stringutil,
96 stringutil,
97 )
97 )
98
98
99 from .revlogutils import (
99 from .revlogutils import (
100 deltas as deltautil,
100 deltas as deltautil,
101 nodemap,
101 nodemap,
102 )
102 )
103
103
104 release = lockmod.release
104 release = lockmod.release
105
105
106 command = registrar.command()
106 command = registrar.command()
107
107
108
108
109 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
109 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
110 def debugancestor(ui, repo, *args):
110 def debugancestor(ui, repo, *args):
111 """find the ancestor revision of two revisions in a given index"""
111 """find the ancestor revision of two revisions in a given index"""
112 if len(args) == 3:
112 if len(args) == 3:
113 index, rev1, rev2 = args
113 index, rev1, rev2 = args
114 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
114 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
115 lookup = r.lookup
115 lookup = r.lookup
116 elif len(args) == 2:
116 elif len(args) == 2:
117 if not repo:
117 if not repo:
118 raise error.Abort(
118 raise error.Abort(
119 _(b'there is no Mercurial repository here (.hg not found)')
119 _(b'there is no Mercurial repository here (.hg not found)')
120 )
120 )
121 rev1, rev2 = args
121 rev1, rev2 = args
122 r = repo.changelog
122 r = repo.changelog
123 lookup = repo.lookup
123 lookup = repo.lookup
124 else:
124 else:
125 raise error.Abort(_(b'either two or three arguments required'))
125 raise error.Abort(_(b'either two or three arguments required'))
126 a = r.ancestor(lookup(rev1), lookup(rev2))
126 a = r.ancestor(lookup(rev1), lookup(rev2))
127 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
127 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
128
128
129
129
130 @command(b'debugantivirusrunning', [])
130 @command(b'debugantivirusrunning', [])
131 def debugantivirusrunning(ui, repo):
131 def debugantivirusrunning(ui, repo):
132 """attempt to trigger an antivirus scanner to see if one is active"""
132 """attempt to trigger an antivirus scanner to see if one is active"""
133 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
133 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
134 f.write(
134 f.write(
135 util.b85decode(
135 util.b85decode(
136 # This is a base85-armored version of the EICAR test file. See
136 # This is a base85-armored version of the EICAR test file. See
137 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
137 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
138 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
138 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
139 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
139 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
140 )
140 )
141 )
141 )
142 # Give an AV engine time to scan the file.
142 # Give an AV engine time to scan the file.
143 time.sleep(2)
143 time.sleep(2)
144 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
144 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
145
145
146
146
147 @command(b'debugapplystreamclonebundle', [], b'FILE')
147 @command(b'debugapplystreamclonebundle', [], b'FILE')
148 def debugapplystreamclonebundle(ui, repo, fname):
148 def debugapplystreamclonebundle(ui, repo, fname):
149 """apply a stream clone bundle file"""
149 """apply a stream clone bundle file"""
150 f = hg.openpath(ui, fname)
150 f = hg.openpath(ui, fname)
151 gen = exchange.readbundle(ui, f, fname)
151 gen = exchange.readbundle(ui, f, fname)
152 gen.apply(repo)
152 gen.apply(repo)
153
153
154
154
155 @command(
155 @command(
156 b'debugbuilddag',
156 b'debugbuilddag',
157 [
157 [
158 (
158 (
159 b'm',
159 b'm',
160 b'mergeable-file',
160 b'mergeable-file',
161 None,
161 None,
162 _(b'add single file mergeable changes'),
162 _(b'add single file mergeable changes'),
163 ),
163 ),
164 (
164 (
165 b'o',
165 b'o',
166 b'overwritten-file',
166 b'overwritten-file',
167 None,
167 None,
168 _(b'add single file all revs overwrite'),
168 _(b'add single file all revs overwrite'),
169 ),
169 ),
170 (b'n', b'new-file', None, _(b'add new file at each rev')),
170 (b'n', b'new-file', None, _(b'add new file at each rev')),
171 ],
171 ],
172 _(b'[OPTION]... [TEXT]'),
172 _(b'[OPTION]... [TEXT]'),
173 )
173 )
174 def debugbuilddag(
174 def debugbuilddag(
175 ui,
175 ui,
176 repo,
176 repo,
177 text=None,
177 text=None,
178 mergeable_file=False,
178 mergeable_file=False,
179 overwritten_file=False,
179 overwritten_file=False,
180 new_file=False,
180 new_file=False,
181 ):
181 ):
182 """builds a repo with a given DAG from scratch in the current empty repo
182 """builds a repo with a given DAG from scratch in the current empty repo
183
183
184 The description of the DAG is read from stdin if not given on the
184 The description of the DAG is read from stdin if not given on the
185 command line.
185 command line.
186
186
187 Elements:
187 Elements:
188
188
189 - "+n" is a linear run of n nodes based on the current default parent
189 - "+n" is a linear run of n nodes based on the current default parent
190 - "." is a single node based on the current default parent
190 - "." is a single node based on the current default parent
191 - "$" resets the default parent to null (implied at the start);
191 - "$" resets the default parent to null (implied at the start);
192 otherwise the default parent is always the last node created
192 otherwise the default parent is always the last node created
193 - "<p" sets the default parent to the backref p
193 - "<p" sets the default parent to the backref p
194 - "*p" is a fork at parent p, which is a backref
194 - "*p" is a fork at parent p, which is a backref
195 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
195 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
196 - "/p2" is a merge of the preceding node and p2
196 - "/p2" is a merge of the preceding node and p2
197 - ":tag" defines a local tag for the preceding node
197 - ":tag" defines a local tag for the preceding node
198 - "@branch" sets the named branch for subsequent nodes
198 - "@branch" sets the named branch for subsequent nodes
199 - "#...\\n" is a comment up to the end of the line
199 - "#...\\n" is a comment up to the end of the line
200
200
201 Whitespace between the above elements is ignored.
201 Whitespace between the above elements is ignored.
202
202
203 A backref is either
203 A backref is either
204
204
205 - a number n, which references the node curr-n, where curr is the current
205 - a number n, which references the node curr-n, where curr is the current
206 node, or
206 node, or
207 - the name of a local tag you placed earlier using ":tag", or
207 - the name of a local tag you placed earlier using ":tag", or
208 - empty to denote the default parent.
208 - empty to denote the default parent.
209
209
210 All string valued-elements are either strictly alphanumeric, or must
210 All string valued-elements are either strictly alphanumeric, or must
211 be enclosed in double quotes ("..."), with "\\" as escape character.
211 be enclosed in double quotes ("..."), with "\\" as escape character.
212 """
212 """
213
213
214 if text is None:
214 if text is None:
215 ui.status(_(b"reading DAG from stdin\n"))
215 ui.status(_(b"reading DAG from stdin\n"))
216 text = ui.fin.read()
216 text = ui.fin.read()
217
217
218 cl = repo.changelog
218 cl = repo.changelog
219 if len(cl) > 0:
219 if len(cl) > 0:
220 raise error.Abort(_(b'repository is not empty'))
220 raise error.Abort(_(b'repository is not empty'))
221
221
222 # determine number of revs in DAG
222 # determine number of revs in DAG
223 total = 0
223 total = 0
224 for type, data in dagparser.parsedag(text):
224 for type, data in dagparser.parsedag(text):
225 if type == b'n':
225 if type == b'n':
226 total += 1
226 total += 1
227
227
228 if mergeable_file:
228 if mergeable_file:
229 linesperrev = 2
229 linesperrev = 2
230 # make a file with k lines per rev
230 # make a file with k lines per rev
231 initialmergedlines = [
231 initialmergedlines = [
232 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
232 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
233 ]
233 ]
234 initialmergedlines.append(b"")
234 initialmergedlines.append(b"")
235
235
236 tags = []
236 tags = []
237 progress = ui.makeprogress(
237 progress = ui.makeprogress(
238 _(b'building'), unit=_(b'revisions'), total=total
238 _(b'building'), unit=_(b'revisions'), total=total
239 )
239 )
240 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
240 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
241 at = -1
241 at = -1
242 atbranch = b'default'
242 atbranch = b'default'
243 nodeids = []
243 nodeids = []
244 id = 0
244 id = 0
245 progress.update(id)
245 progress.update(id)
246 for type, data in dagparser.parsedag(text):
246 for type, data in dagparser.parsedag(text):
247 if type == b'n':
247 if type == b'n':
248 ui.note((b'node %s\n' % pycompat.bytestr(data)))
248 ui.note((b'node %s\n' % pycompat.bytestr(data)))
249 id, ps = data
249 id, ps = data
250
250
251 files = []
251 files = []
252 filecontent = {}
252 filecontent = {}
253
253
254 p2 = None
254 p2 = None
255 if mergeable_file:
255 if mergeable_file:
256 fn = b"mf"
256 fn = b"mf"
257 p1 = repo[ps[0]]
257 p1 = repo[ps[0]]
258 if len(ps) > 1:
258 if len(ps) > 1:
259 p2 = repo[ps[1]]
259 p2 = repo[ps[1]]
260 pa = p1.ancestor(p2)
260 pa = p1.ancestor(p2)
261 base, local, other = [
261 base, local, other = [
262 x[fn].data() for x in (pa, p1, p2)
262 x[fn].data() for x in (pa, p1, p2)
263 ]
263 ]
264 m3 = simplemerge.Merge3Text(base, local, other)
264 m3 = simplemerge.Merge3Text(base, local, other)
265 ml = [l.strip() for l in m3.merge_lines()]
265 ml = [l.strip() for l in m3.merge_lines()]
266 ml.append(b"")
266 ml.append(b"")
267 elif at > 0:
267 elif at > 0:
268 ml = p1[fn].data().split(b"\n")
268 ml = p1[fn].data().split(b"\n")
269 else:
269 else:
270 ml = initialmergedlines
270 ml = initialmergedlines
271 ml[id * linesperrev] += b" r%i" % id
271 ml[id * linesperrev] += b" r%i" % id
272 mergedtext = b"\n".join(ml)
272 mergedtext = b"\n".join(ml)
273 files.append(fn)
273 files.append(fn)
274 filecontent[fn] = mergedtext
274 filecontent[fn] = mergedtext
275
275
276 if overwritten_file:
276 if overwritten_file:
277 fn = b"of"
277 fn = b"of"
278 files.append(fn)
278 files.append(fn)
279 filecontent[fn] = b"r%i\n" % id
279 filecontent[fn] = b"r%i\n" % id
280
280
281 if new_file:
281 if new_file:
282 fn = b"nf%i" % id
282 fn = b"nf%i" % id
283 files.append(fn)
283 files.append(fn)
284 filecontent[fn] = b"r%i\n" % id
284 filecontent[fn] = b"r%i\n" % id
285 if len(ps) > 1:
285 if len(ps) > 1:
286 if not p2:
286 if not p2:
287 p2 = repo[ps[1]]
287 p2 = repo[ps[1]]
288 for fn in p2:
288 for fn in p2:
289 if fn.startswith(b"nf"):
289 if fn.startswith(b"nf"):
290 files.append(fn)
290 files.append(fn)
291 filecontent[fn] = p2[fn].data()
291 filecontent[fn] = p2[fn].data()
292
292
293 def fctxfn(repo, cx, path):
293 def fctxfn(repo, cx, path):
294 if path in filecontent:
294 if path in filecontent:
295 return context.memfilectx(
295 return context.memfilectx(
296 repo, cx, path, filecontent[path]
296 repo, cx, path, filecontent[path]
297 )
297 )
298 return None
298 return None
299
299
300 if len(ps) == 0 or ps[0] < 0:
300 if len(ps) == 0 or ps[0] < 0:
301 pars = [None, None]
301 pars = [None, None]
302 elif len(ps) == 1:
302 elif len(ps) == 1:
303 pars = [nodeids[ps[0]], None]
303 pars = [nodeids[ps[0]], None]
304 else:
304 else:
305 pars = [nodeids[p] for p in ps]
305 pars = [nodeids[p] for p in ps]
306 cx = context.memctx(
306 cx = context.memctx(
307 repo,
307 repo,
308 pars,
308 pars,
309 b"r%i" % id,
309 b"r%i" % id,
310 files,
310 files,
311 fctxfn,
311 fctxfn,
312 date=(id, 0),
312 date=(id, 0),
313 user=b"debugbuilddag",
313 user=b"debugbuilddag",
314 extra={b'branch': atbranch},
314 extra={b'branch': atbranch},
315 )
315 )
316 nodeid = repo.commitctx(cx)
316 nodeid = repo.commitctx(cx)
317 nodeids.append(nodeid)
317 nodeids.append(nodeid)
318 at = id
318 at = id
319 elif type == b'l':
319 elif type == b'l':
320 id, name = data
320 id, name = data
321 ui.note((b'tag %s\n' % name))
321 ui.note((b'tag %s\n' % name))
322 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
322 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
323 elif type == b'a':
323 elif type == b'a':
324 ui.note((b'branch %s\n' % data))
324 ui.note((b'branch %s\n' % data))
325 atbranch = data
325 atbranch = data
326 progress.update(id)
326 progress.update(id)
327
327
328 if tags:
328 if tags:
329 repo.vfs.write(b"localtags", b"".join(tags))
329 repo.vfs.write(b"localtags", b"".join(tags))
330
330
331
331
332 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
332 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
333 indent_string = b' ' * indent
333 indent_string = b' ' * indent
334 if all:
334 if all:
335 ui.writenoi18n(
335 ui.writenoi18n(
336 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
336 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
337 % indent_string
337 % indent_string
338 )
338 )
339
339
340 def showchunks(named):
340 def showchunks(named):
341 ui.write(b"\n%s%s\n" % (indent_string, named))
341 ui.write(b"\n%s%s\n" % (indent_string, named))
342 for deltadata in gen.deltaiter():
342 for deltadata in gen.deltaiter():
343 node, p1, p2, cs, deltabase, delta, flags = deltadata
343 node, p1, p2, cs, deltabase, delta, flags = deltadata
344 ui.write(
344 ui.write(
345 b"%s%s %s %s %s %s %d\n"
345 b"%s%s %s %s %s %s %d\n"
346 % (
346 % (
347 indent_string,
347 indent_string,
348 hex(node),
348 hex(node),
349 hex(p1),
349 hex(p1),
350 hex(p2),
350 hex(p2),
351 hex(cs),
351 hex(cs),
352 hex(deltabase),
352 hex(deltabase),
353 len(delta),
353 len(delta),
354 )
354 )
355 )
355 )
356
356
357 gen.changelogheader()
357 gen.changelogheader()
358 showchunks(b"changelog")
358 showchunks(b"changelog")
359 gen.manifestheader()
359 gen.manifestheader()
360 showchunks(b"manifest")
360 showchunks(b"manifest")
361 for chunkdata in iter(gen.filelogheader, {}):
361 for chunkdata in iter(gen.filelogheader, {}):
362 fname = chunkdata[b'filename']
362 fname = chunkdata[b'filename']
363 showchunks(fname)
363 showchunks(fname)
364 else:
364 else:
365 if isinstance(gen, bundle2.unbundle20):
365 if isinstance(gen, bundle2.unbundle20):
366 raise error.Abort(_(b'use debugbundle2 for this file'))
366 raise error.Abort(_(b'use debugbundle2 for this file'))
367 gen.changelogheader()
367 gen.changelogheader()
368 for deltadata in gen.deltaiter():
368 for deltadata in gen.deltaiter():
369 node, p1, p2, cs, deltabase, delta, flags = deltadata
369 node, p1, p2, cs, deltabase, delta, flags = deltadata
370 ui.write(b"%s%s\n" % (indent_string, hex(node)))
370 ui.write(b"%s%s\n" % (indent_string, hex(node)))
371
371
372
372
373 def _debugobsmarkers(ui, part, indent=0, **opts):
373 def _debugobsmarkers(ui, part, indent=0, **opts):
374 """display version and markers contained in 'data'"""
374 """display version and markers contained in 'data'"""
375 opts = pycompat.byteskwargs(opts)
375 opts = pycompat.byteskwargs(opts)
376 data = part.read()
376 data = part.read()
377 indent_string = b' ' * indent
377 indent_string = b' ' * indent
378 try:
378 try:
379 version, markers = obsolete._readmarkers(data)
379 version, markers = obsolete._readmarkers(data)
380 except error.UnknownVersion as exc:
380 except error.UnknownVersion as exc:
381 msg = b"%sunsupported version: %s (%d bytes)\n"
381 msg = b"%sunsupported version: %s (%d bytes)\n"
382 msg %= indent_string, exc.version, len(data)
382 msg %= indent_string, exc.version, len(data)
383 ui.write(msg)
383 ui.write(msg)
384 else:
384 else:
385 msg = b"%sversion: %d (%d bytes)\n"
385 msg = b"%sversion: %d (%d bytes)\n"
386 msg %= indent_string, version, len(data)
386 msg %= indent_string, version, len(data)
387 ui.write(msg)
387 ui.write(msg)
388 fm = ui.formatter(b'debugobsolete', opts)
388 fm = ui.formatter(b'debugobsolete', opts)
389 for rawmarker in sorted(markers):
389 for rawmarker in sorted(markers):
390 m = obsutil.marker(None, rawmarker)
390 m = obsutil.marker(None, rawmarker)
391 fm.startitem()
391 fm.startitem()
392 fm.plain(indent_string)
392 fm.plain(indent_string)
393 cmdutil.showmarker(fm, m)
393 cmdutil.showmarker(fm, m)
394 fm.end()
394 fm.end()
395
395
396
396
397 def _debugphaseheads(ui, data, indent=0):
397 def _debugphaseheads(ui, data, indent=0):
398 """display version and markers contained in 'data'"""
398 """display version and markers contained in 'data'"""
399 indent_string = b' ' * indent
399 indent_string = b' ' * indent
400 headsbyphase = phases.binarydecode(data)
400 headsbyphase = phases.binarydecode(data)
401 for phase in phases.allphases:
401 for phase in phases.allphases:
402 for head in headsbyphase[phase]:
402 for head in headsbyphase[phase]:
403 ui.write(indent_string)
403 ui.write(indent_string)
404 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
404 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
405
405
406
406
407 def _quasirepr(thing):
407 def _quasirepr(thing):
408 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
408 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
409 return b'{%s}' % (
409 return b'{%s}' % (
410 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
410 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
411 )
411 )
412 return pycompat.bytestr(repr(thing))
412 return pycompat.bytestr(repr(thing))
413
413
414
414
415 def _debugbundle2(ui, gen, all=None, **opts):
415 def _debugbundle2(ui, gen, all=None, **opts):
416 """lists the contents of a bundle2"""
416 """lists the contents of a bundle2"""
417 if not isinstance(gen, bundle2.unbundle20):
417 if not isinstance(gen, bundle2.unbundle20):
418 raise error.Abort(_(b'not a bundle2 file'))
418 raise error.Abort(_(b'not a bundle2 file'))
419 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
419 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
420 parttypes = opts.get('part_type', [])
420 parttypes = opts.get('part_type', [])
421 for part in gen.iterparts():
421 for part in gen.iterparts():
422 if parttypes and part.type not in parttypes:
422 if parttypes and part.type not in parttypes:
423 continue
423 continue
424 msg = b'%s -- %s (mandatory: %r)\n'
424 msg = b'%s -- %s (mandatory: %r)\n'
425 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
425 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
426 if part.type == b'changegroup':
426 if part.type == b'changegroup':
427 version = part.params.get(b'version', b'01')
427 version = part.params.get(b'version', b'01')
428 cg = changegroup.getunbundler(version, part, b'UN')
428 cg = changegroup.getunbundler(version, part, b'UN')
429 if not ui.quiet:
429 if not ui.quiet:
430 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
430 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
431 if part.type == b'obsmarkers':
431 if part.type == b'obsmarkers':
432 if not ui.quiet:
432 if not ui.quiet:
433 _debugobsmarkers(ui, part, indent=4, **opts)
433 _debugobsmarkers(ui, part, indent=4, **opts)
434 if part.type == b'phase-heads':
434 if part.type == b'phase-heads':
435 if not ui.quiet:
435 if not ui.quiet:
436 _debugphaseheads(ui, part, indent=4)
436 _debugphaseheads(ui, part, indent=4)
437
437
438
438
439 @command(
439 @command(
440 b'debugbundle',
440 b'debugbundle',
441 [
441 [
442 (b'a', b'all', None, _(b'show all details')),
442 (b'a', b'all', None, _(b'show all details')),
443 (b'', b'part-type', [], _(b'show only the named part type')),
443 (b'', b'part-type', [], _(b'show only the named part type')),
444 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
444 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
445 ],
445 ],
446 _(b'FILE'),
446 _(b'FILE'),
447 norepo=True,
447 norepo=True,
448 )
448 )
449 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
449 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
450 """lists the contents of a bundle"""
450 """lists the contents of a bundle"""
451 with hg.openpath(ui, bundlepath) as f:
451 with hg.openpath(ui, bundlepath) as f:
452 if spec:
452 if spec:
453 spec = exchange.getbundlespec(ui, f)
453 spec = exchange.getbundlespec(ui, f)
454 ui.write(b'%s\n' % spec)
454 ui.write(b'%s\n' % spec)
455 return
455 return
456
456
457 gen = exchange.readbundle(ui, f, bundlepath)
457 gen = exchange.readbundle(ui, f, bundlepath)
458 if isinstance(gen, bundle2.unbundle20):
458 if isinstance(gen, bundle2.unbundle20):
459 return _debugbundle2(ui, gen, all=all, **opts)
459 return _debugbundle2(ui, gen, all=all, **opts)
460 _debugchangegroup(ui, gen, all=all, **opts)
460 _debugchangegroup(ui, gen, all=all, **opts)
461
461
462
462
463 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
463 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
464 def debugcapabilities(ui, path, **opts):
464 def debugcapabilities(ui, path, **opts):
465 """lists the capabilities of a remote peer"""
465 """lists the capabilities of a remote peer"""
466 opts = pycompat.byteskwargs(opts)
466 opts = pycompat.byteskwargs(opts)
467 peer = hg.peer(ui, opts, path)
467 peer = hg.peer(ui, opts, path)
468 caps = peer.capabilities()
468 caps = peer.capabilities()
469 ui.writenoi18n(b'Main capabilities:\n')
469 ui.writenoi18n(b'Main capabilities:\n')
470 for c in sorted(caps):
470 for c in sorted(caps):
471 ui.write(b' %s\n' % c)
471 ui.write(b' %s\n' % c)
472 b2caps = bundle2.bundle2caps(peer)
472 b2caps = bundle2.bundle2caps(peer)
473 if b2caps:
473 if b2caps:
474 ui.writenoi18n(b'Bundle2 capabilities:\n')
474 ui.writenoi18n(b'Bundle2 capabilities:\n')
475 for key, values in sorted(pycompat.iteritems(b2caps)):
475 for key, values in sorted(pycompat.iteritems(b2caps)):
476 ui.write(b' %s\n' % key)
476 ui.write(b' %s\n' % key)
477 for v in values:
477 for v in values:
478 ui.write(b' %s\n' % v)
478 ui.write(b' %s\n' % v)
479
479
480
480
481 @command(b'debugcheckstate', [], b'')
481 @command(b'debugcheckstate', [], b'')
482 def debugcheckstate(ui, repo):
482 def debugcheckstate(ui, repo):
483 """validate the correctness of the current dirstate"""
483 """validate the correctness of the current dirstate"""
484 parent1, parent2 = repo.dirstate.parents()
484 parent1, parent2 = repo.dirstate.parents()
485 m1 = repo[parent1].manifest()
485 m1 = repo[parent1].manifest()
486 m2 = repo[parent2].manifest()
486 m2 = repo[parent2].manifest()
487 errors = 0
487 errors = 0
488 for f in repo.dirstate:
488 for f in repo.dirstate:
489 state = repo.dirstate[f]
489 state = repo.dirstate[f]
490 if state in b"nr" and f not in m1:
490 if state in b"nr" and f not in m1:
491 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
491 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
492 errors += 1
492 errors += 1
493 if state in b"a" and f in m1:
493 if state in b"a" and f in m1:
494 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
494 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
495 errors += 1
495 errors += 1
496 if state in b"m" and f not in m1 and f not in m2:
496 if state in b"m" and f not in m1 and f not in m2:
497 ui.warn(
497 ui.warn(
498 _(b"%s in state %s, but not in either manifest\n") % (f, state)
498 _(b"%s in state %s, but not in either manifest\n") % (f, state)
499 )
499 )
500 errors += 1
500 errors += 1
501 for f in m1:
501 for f in m1:
502 state = repo.dirstate[f]
502 state = repo.dirstate[f]
503 if state not in b"nrm":
503 if state not in b"nrm":
504 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
504 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
505 errors += 1
505 errors += 1
506 if errors:
506 if errors:
507 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
507 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
508 raise error.Abort(errstr)
508 raise error.Abort(errstr)
509
509
510
510
511 @command(
511 @command(
512 b'debugcolor',
512 b'debugcolor',
513 [(b'', b'style', None, _(b'show all configured styles'))],
513 [(b'', b'style', None, _(b'show all configured styles'))],
514 b'hg debugcolor',
514 b'hg debugcolor',
515 )
515 )
516 def debugcolor(ui, repo, **opts):
516 def debugcolor(ui, repo, **opts):
517 """show available color, effects or style"""
517 """show available color, effects or style"""
518 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
518 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
519 if opts.get('style'):
519 if opts.get('style'):
520 return _debugdisplaystyle(ui)
520 return _debugdisplaystyle(ui)
521 else:
521 else:
522 return _debugdisplaycolor(ui)
522 return _debugdisplaycolor(ui)
523
523
524
524
525 def _debugdisplaycolor(ui):
525 def _debugdisplaycolor(ui):
526 ui = ui.copy()
526 ui = ui.copy()
527 ui._styles.clear()
527 ui._styles.clear()
528 for effect in color._activeeffects(ui).keys():
528 for effect in color._activeeffects(ui).keys():
529 ui._styles[effect] = effect
529 ui._styles[effect] = effect
530 if ui._terminfoparams:
530 if ui._terminfoparams:
531 for k, v in ui.configitems(b'color'):
531 for k, v in ui.configitems(b'color'):
532 if k.startswith(b'color.'):
532 if k.startswith(b'color.'):
533 ui._styles[k] = k[6:]
533 ui._styles[k] = k[6:]
534 elif k.startswith(b'terminfo.'):
534 elif k.startswith(b'terminfo.'):
535 ui._styles[k] = k[9:]
535 ui._styles[k] = k[9:]
536 ui.write(_(b'available colors:\n'))
536 ui.write(_(b'available colors:\n'))
537 # sort label with a '_' after the other to group '_background' entry.
537 # sort label with a '_' after the other to group '_background' entry.
538 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
538 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
539 for colorname, label in items:
539 for colorname, label in items:
540 ui.write(b'%s\n' % colorname, label=label)
540 ui.write(b'%s\n' % colorname, label=label)
541
541
542
542
543 def _debugdisplaystyle(ui):
543 def _debugdisplaystyle(ui):
544 ui.write(_(b'available style:\n'))
544 ui.write(_(b'available style:\n'))
545 if not ui._styles:
545 if not ui._styles:
546 return
546 return
547 width = max(len(s) for s in ui._styles)
547 width = max(len(s) for s in ui._styles)
548 for label, effects in sorted(ui._styles.items()):
548 for label, effects in sorted(ui._styles.items()):
549 ui.write(b'%s' % label, label=label)
549 ui.write(b'%s' % label, label=label)
550 if effects:
550 if effects:
551 # 50
551 # 50
552 ui.write(b': ')
552 ui.write(b': ')
553 ui.write(b' ' * (max(0, width - len(label))))
553 ui.write(b' ' * (max(0, width - len(label))))
554 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
554 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
555 ui.write(b'\n')
555 ui.write(b'\n')
556
556
557
557
558 @command(b'debugcreatestreamclonebundle', [], b'FILE')
558 @command(b'debugcreatestreamclonebundle', [], b'FILE')
559 def debugcreatestreamclonebundle(ui, repo, fname):
559 def debugcreatestreamclonebundle(ui, repo, fname):
560 """create a stream clone bundle file
560 """create a stream clone bundle file
561
561
562 Stream bundles are special bundles that are essentially archives of
562 Stream bundles are special bundles that are essentially archives of
563 revlog files. They are commonly used for cloning very quickly.
563 revlog files. They are commonly used for cloning very quickly.
564 """
564 """
565 # TODO we may want to turn this into an abort when this functionality
565 # TODO we may want to turn this into an abort when this functionality
566 # is moved into `hg bundle`.
566 # is moved into `hg bundle`.
567 if phases.hassecret(repo):
567 if phases.hassecret(repo):
568 ui.warn(
568 ui.warn(
569 _(
569 _(
570 b'(warning: stream clone bundle will contain secret '
570 b'(warning: stream clone bundle will contain secret '
571 b'revisions)\n'
571 b'revisions)\n'
572 )
572 )
573 )
573 )
574
574
575 requirements, gen = streamclone.generatebundlev1(repo)
575 requirements, gen = streamclone.generatebundlev1(repo)
576 changegroup.writechunks(ui, gen, fname)
576 changegroup.writechunks(ui, gen, fname)
577
577
578 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
578 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
579
579
580
580
581 @command(
581 @command(
582 b'debugdag',
582 b'debugdag',
583 [
583 [
584 (b't', b'tags', None, _(b'use tags as labels')),
584 (b't', b'tags', None, _(b'use tags as labels')),
585 (b'b', b'branches', None, _(b'annotate with branch names')),
585 (b'b', b'branches', None, _(b'annotate with branch names')),
586 (b'', b'dots', None, _(b'use dots for runs')),
586 (b'', b'dots', None, _(b'use dots for runs')),
587 (b's', b'spaces', None, _(b'separate elements by spaces')),
587 (b's', b'spaces', None, _(b'separate elements by spaces')),
588 ],
588 ],
589 _(b'[OPTION]... [FILE [REV]...]'),
589 _(b'[OPTION]... [FILE [REV]...]'),
590 optionalrepo=True,
590 optionalrepo=True,
591 )
591 )
592 def debugdag(ui, repo, file_=None, *revs, **opts):
592 def debugdag(ui, repo, file_=None, *revs, **opts):
593 """format the changelog or an index DAG as a concise textual description
593 """format the changelog or an index DAG as a concise textual description
594
594
595 If you pass a revlog index, the revlog's DAG is emitted. If you list
595 If you pass a revlog index, the revlog's DAG is emitted. If you list
596 revision numbers, they get labeled in the output as rN.
596 revision numbers, they get labeled in the output as rN.
597
597
598 Otherwise, the changelog DAG of the current repo is emitted.
598 Otherwise, the changelog DAG of the current repo is emitted.
599 """
599 """
600 spaces = opts.get('spaces')
600 spaces = opts.get('spaces')
601 dots = opts.get('dots')
601 dots = opts.get('dots')
602 if file_:
602 if file_:
603 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
603 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
604 revs = {int(r) for r in revs}
604 revs = {int(r) for r in revs}
605
605
606 def events():
606 def events():
607 for r in rlog:
607 for r in rlog:
608 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
608 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
609 if r in revs:
609 if r in revs:
610 yield b'l', (r, b"r%i" % r)
610 yield b'l', (r, b"r%i" % r)
611
611
612 elif repo:
612 elif repo:
613 cl = repo.changelog
613 cl = repo.changelog
614 tags = opts.get('tags')
614 tags = opts.get('tags')
615 branches = opts.get('branches')
615 branches = opts.get('branches')
616 if tags:
616 if tags:
617 labels = {}
617 labels = {}
618 for l, n in repo.tags().items():
618 for l, n in repo.tags().items():
619 labels.setdefault(cl.rev(n), []).append(l)
619 labels.setdefault(cl.rev(n), []).append(l)
620
620
621 def events():
621 def events():
622 b = b"default"
622 b = b"default"
623 for r in cl:
623 for r in cl:
624 if branches:
624 if branches:
625 newb = cl.read(cl.node(r))[5][b'branch']
625 newb = cl.read(cl.node(r))[5][b'branch']
626 if newb != b:
626 if newb != b:
627 yield b'a', newb
627 yield b'a', newb
628 b = newb
628 b = newb
629 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
629 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
630 if tags:
630 if tags:
631 ls = labels.get(r)
631 ls = labels.get(r)
632 if ls:
632 if ls:
633 for l in ls:
633 for l in ls:
634 yield b'l', (r, l)
634 yield b'l', (r, l)
635
635
636 else:
636 else:
637 raise error.Abort(_(b'need repo for changelog dag'))
637 raise error.Abort(_(b'need repo for changelog dag'))
638
638
639 for line in dagparser.dagtextlines(
639 for line in dagparser.dagtextlines(
640 events(),
640 events(),
641 addspaces=spaces,
641 addspaces=spaces,
642 wraplabels=True,
642 wraplabels=True,
643 wrapannotations=True,
643 wrapannotations=True,
644 wrapnonlinear=dots,
644 wrapnonlinear=dots,
645 usedots=dots,
645 usedots=dots,
646 maxlinewidth=70,
646 maxlinewidth=70,
647 ):
647 ):
648 ui.write(line)
648 ui.write(line)
649 ui.write(b"\n")
649 ui.write(b"\n")
650
650
651
651
652 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
652 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
653 def debugdata(ui, repo, file_, rev=None, **opts):
653 def debugdata(ui, repo, file_, rev=None, **opts):
654 """dump the contents of a data file revision"""
654 """dump the contents of a data file revision"""
655 opts = pycompat.byteskwargs(opts)
655 opts = pycompat.byteskwargs(opts)
656 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
656 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
657 if rev is not None:
657 if rev is not None:
658 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
658 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
659 file_, rev = None, file_
659 file_, rev = None, file_
660 elif rev is None:
660 elif rev is None:
661 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
661 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
662 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
662 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
663 try:
663 try:
664 ui.write(r.rawdata(r.lookup(rev)))
664 ui.write(r.rawdata(r.lookup(rev)))
665 except KeyError:
665 except KeyError:
666 raise error.Abort(_(b'invalid revision identifier %s') % rev)
666 raise error.Abort(_(b'invalid revision identifier %s') % rev)
667
667
668
668
669 @command(
669 @command(
670 b'debugdate',
670 b'debugdate',
671 [(b'e', b'extended', None, _(b'try extended date formats'))],
671 [(b'e', b'extended', None, _(b'try extended date formats'))],
672 _(b'[-e] DATE [RANGE]'),
672 _(b'[-e] DATE [RANGE]'),
673 norepo=True,
673 norepo=True,
674 optionalrepo=True,
674 optionalrepo=True,
675 )
675 )
676 def debugdate(ui, date, range=None, **opts):
676 def debugdate(ui, date, range=None, **opts):
677 """parse and display a date"""
677 """parse and display a date"""
678 if opts["extended"]:
678 if opts["extended"]:
679 d = dateutil.parsedate(date, dateutil.extendeddateformats)
679 d = dateutil.parsedate(date, dateutil.extendeddateformats)
680 else:
680 else:
681 d = dateutil.parsedate(date)
681 d = dateutil.parsedate(date)
682 ui.writenoi18n(b"internal: %d %d\n" % d)
682 ui.writenoi18n(b"internal: %d %d\n" % d)
683 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
683 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
684 if range:
684 if range:
685 m = dateutil.matchdate(range)
685 m = dateutil.matchdate(range)
686 ui.writenoi18n(b"match: %s\n" % m(d[0]))
686 ui.writenoi18n(b"match: %s\n" % m(d[0]))
687
687
688
688
689 @command(
689 @command(
690 b'debugdeltachain',
690 b'debugdeltachain',
691 cmdutil.debugrevlogopts + cmdutil.formatteropts,
691 cmdutil.debugrevlogopts + cmdutil.formatteropts,
692 _(b'-c|-m|FILE'),
692 _(b'-c|-m|FILE'),
693 optionalrepo=True,
693 optionalrepo=True,
694 )
694 )
695 def debugdeltachain(ui, repo, file_=None, **opts):
695 def debugdeltachain(ui, repo, file_=None, **opts):
696 """dump information about delta chains in a revlog
696 """dump information about delta chains in a revlog
697
697
698 Output can be templatized. Available template keywords are:
698 Output can be templatized. Available template keywords are:
699
699
700 :``rev``: revision number
700 :``rev``: revision number
701 :``chainid``: delta chain identifier (numbered by unique base)
701 :``chainid``: delta chain identifier (numbered by unique base)
702 :``chainlen``: delta chain length to this revision
702 :``chainlen``: delta chain length to this revision
703 :``prevrev``: previous revision in delta chain
703 :``prevrev``: previous revision in delta chain
704 :``deltatype``: role of delta / how it was computed
704 :``deltatype``: role of delta / how it was computed
705 :``compsize``: compressed size of revision
705 :``compsize``: compressed size of revision
706 :``uncompsize``: uncompressed size of revision
706 :``uncompsize``: uncompressed size of revision
707 :``chainsize``: total size of compressed revisions in chain
707 :``chainsize``: total size of compressed revisions in chain
708 :``chainratio``: total chain size divided by uncompressed revision size
708 :``chainratio``: total chain size divided by uncompressed revision size
709 (new delta chains typically start at ratio 2.00)
709 (new delta chains typically start at ratio 2.00)
710 :``lindist``: linear distance from base revision in delta chain to end
710 :``lindist``: linear distance from base revision in delta chain to end
711 of this revision
711 of this revision
712 :``extradist``: total size of revisions not part of this delta chain from
712 :``extradist``: total size of revisions not part of this delta chain from
713 base of delta chain to end of this revision; a measurement
713 base of delta chain to end of this revision; a measurement
714 of how much extra data we need to read/seek across to read
714 of how much extra data we need to read/seek across to read
715 the delta chain for this revision
715 the delta chain for this revision
716 :``extraratio``: extradist divided by chainsize; another representation of
716 :``extraratio``: extradist divided by chainsize; another representation of
717 how much unrelated data is needed to load this delta chain
717 how much unrelated data is needed to load this delta chain
718
718
719 If the repository is configured to use the sparse read, additional keywords
719 If the repository is configured to use the sparse read, additional keywords
720 are available:
720 are available:
721
721
722 :``readsize``: total size of data read from the disk for a revision
722 :``readsize``: total size of data read from the disk for a revision
723 (sum of the sizes of all the blocks)
723 (sum of the sizes of all the blocks)
724 :``largestblock``: size of the largest block of data read from the disk
724 :``largestblock``: size of the largest block of data read from the disk
725 :``readdensity``: density of useful bytes in the data read from the disk
725 :``readdensity``: density of useful bytes in the data read from the disk
726 :``srchunks``: in how many data hunks the whole revision would be read
726 :``srchunks``: in how many data hunks the whole revision would be read
727
727
728 The sparse read can be enabled with experimental.sparse-read = True
728 The sparse read can be enabled with experimental.sparse-read = True
729 """
729 """
730 opts = pycompat.byteskwargs(opts)
730 opts = pycompat.byteskwargs(opts)
731 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
731 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
732 index = r.index
732 index = r.index
733 start = r.start
733 start = r.start
734 length = r.length
734 length = r.length
735 generaldelta = r.version & revlog.FLAG_GENERALDELTA
735 generaldelta = r.version & revlog.FLAG_GENERALDELTA
736 withsparseread = getattr(r, '_withsparseread', False)
736 withsparseread = getattr(r, '_withsparseread', False)
737
737
738 def revinfo(rev):
738 def revinfo(rev):
739 e = index[rev]
739 e = index[rev]
740 compsize = e[1]
740 compsize = e[1]
741 uncompsize = e[2]
741 uncompsize = e[2]
742 chainsize = 0
742 chainsize = 0
743
743
744 if generaldelta:
744 if generaldelta:
745 if e[3] == e[5]:
745 if e[3] == e[5]:
746 deltatype = b'p1'
746 deltatype = b'p1'
747 elif e[3] == e[6]:
747 elif e[3] == e[6]:
748 deltatype = b'p2'
748 deltatype = b'p2'
749 elif e[3] == rev - 1:
749 elif e[3] == rev - 1:
750 deltatype = b'prev'
750 deltatype = b'prev'
751 elif e[3] == rev:
751 elif e[3] == rev:
752 deltatype = b'base'
752 deltatype = b'base'
753 else:
753 else:
754 deltatype = b'other'
754 deltatype = b'other'
755 else:
755 else:
756 if e[3] == rev:
756 if e[3] == rev:
757 deltatype = b'base'
757 deltatype = b'base'
758 else:
758 else:
759 deltatype = b'prev'
759 deltatype = b'prev'
760
760
761 chain = r._deltachain(rev)[0]
761 chain = r._deltachain(rev)[0]
762 for iterrev in chain:
762 for iterrev in chain:
763 e = index[iterrev]
763 e = index[iterrev]
764 chainsize += e[1]
764 chainsize += e[1]
765
765
766 return compsize, uncompsize, deltatype, chain, chainsize
766 return compsize, uncompsize, deltatype, chain, chainsize
767
767
768 fm = ui.formatter(b'debugdeltachain', opts)
768 fm = ui.formatter(b'debugdeltachain', opts)
769
769
770 fm.plain(
770 fm.plain(
771 b' rev chain# chainlen prev delta '
771 b' rev chain# chainlen prev delta '
772 b'size rawsize chainsize ratio lindist extradist '
772 b'size rawsize chainsize ratio lindist extradist '
773 b'extraratio'
773 b'extraratio'
774 )
774 )
775 if withsparseread:
775 if withsparseread:
776 fm.plain(b' readsize largestblk rddensity srchunks')
776 fm.plain(b' readsize largestblk rddensity srchunks')
777 fm.plain(b'\n')
777 fm.plain(b'\n')
778
778
779 chainbases = {}
779 chainbases = {}
780 for rev in r:
780 for rev in r:
781 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
781 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
782 chainbase = chain[0]
782 chainbase = chain[0]
783 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
783 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
784 basestart = start(chainbase)
784 basestart = start(chainbase)
785 revstart = start(rev)
785 revstart = start(rev)
786 lineardist = revstart + comp - basestart
786 lineardist = revstart + comp - basestart
787 extradist = lineardist - chainsize
787 extradist = lineardist - chainsize
788 try:
788 try:
789 prevrev = chain[-2]
789 prevrev = chain[-2]
790 except IndexError:
790 except IndexError:
791 prevrev = -1
791 prevrev = -1
792
792
793 if uncomp != 0:
793 if uncomp != 0:
794 chainratio = float(chainsize) / float(uncomp)
794 chainratio = float(chainsize) / float(uncomp)
795 else:
795 else:
796 chainratio = chainsize
796 chainratio = chainsize
797
797
798 if chainsize != 0:
798 if chainsize != 0:
799 extraratio = float(extradist) / float(chainsize)
799 extraratio = float(extradist) / float(chainsize)
800 else:
800 else:
801 extraratio = extradist
801 extraratio = extradist
802
802
803 fm.startitem()
803 fm.startitem()
804 fm.write(
804 fm.write(
805 b'rev chainid chainlen prevrev deltatype compsize '
805 b'rev chainid chainlen prevrev deltatype compsize '
806 b'uncompsize chainsize chainratio lindist extradist '
806 b'uncompsize chainsize chainratio lindist extradist '
807 b'extraratio',
807 b'extraratio',
808 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
808 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
809 rev,
809 rev,
810 chainid,
810 chainid,
811 len(chain),
811 len(chain),
812 prevrev,
812 prevrev,
813 deltatype,
813 deltatype,
814 comp,
814 comp,
815 uncomp,
815 uncomp,
816 chainsize,
816 chainsize,
817 chainratio,
817 chainratio,
818 lineardist,
818 lineardist,
819 extradist,
819 extradist,
820 extraratio,
820 extraratio,
821 rev=rev,
821 rev=rev,
822 chainid=chainid,
822 chainid=chainid,
823 chainlen=len(chain),
823 chainlen=len(chain),
824 prevrev=prevrev,
824 prevrev=prevrev,
825 deltatype=deltatype,
825 deltatype=deltatype,
826 compsize=comp,
826 compsize=comp,
827 uncompsize=uncomp,
827 uncompsize=uncomp,
828 chainsize=chainsize,
828 chainsize=chainsize,
829 chainratio=chainratio,
829 chainratio=chainratio,
830 lindist=lineardist,
830 lindist=lineardist,
831 extradist=extradist,
831 extradist=extradist,
832 extraratio=extraratio,
832 extraratio=extraratio,
833 )
833 )
834 if withsparseread:
834 if withsparseread:
835 readsize = 0
835 readsize = 0
836 largestblock = 0
836 largestblock = 0
837 srchunks = 0
837 srchunks = 0
838
838
839 for revschunk in deltautil.slicechunk(r, chain):
839 for revschunk in deltautil.slicechunk(r, chain):
840 srchunks += 1
840 srchunks += 1
841 blkend = start(revschunk[-1]) + length(revschunk[-1])
841 blkend = start(revschunk[-1]) + length(revschunk[-1])
842 blksize = blkend - start(revschunk[0])
842 blksize = blkend - start(revschunk[0])
843
843
844 readsize += blksize
844 readsize += blksize
845 if largestblock < blksize:
845 if largestblock < blksize:
846 largestblock = blksize
846 largestblock = blksize
847
847
848 if readsize:
848 if readsize:
849 readdensity = float(chainsize) / float(readsize)
849 readdensity = float(chainsize) / float(readsize)
850 else:
850 else:
851 readdensity = 1
851 readdensity = 1
852
852
853 fm.write(
853 fm.write(
854 b'readsize largestblock readdensity srchunks',
854 b'readsize largestblock readdensity srchunks',
855 b' %10d %10d %9.5f %8d',
855 b' %10d %10d %9.5f %8d',
856 readsize,
856 readsize,
857 largestblock,
857 largestblock,
858 readdensity,
858 readdensity,
859 srchunks,
859 srchunks,
860 readsize=readsize,
860 readsize=readsize,
861 largestblock=largestblock,
861 largestblock=largestblock,
862 readdensity=readdensity,
862 readdensity=readdensity,
863 srchunks=srchunks,
863 srchunks=srchunks,
864 )
864 )
865
865
866 fm.plain(b'\n')
866 fm.plain(b'\n')
867
867
868 fm.end()
868 fm.end()
869
869
870
870
871 @command(
871 @command(
872 b'debugdirstate|debugstate',
872 b'debugdirstate|debugstate',
873 [
873 [
874 (
874 (
875 b'',
875 b'',
876 b'nodates',
876 b'nodates',
877 None,
877 None,
878 _(b'do not display the saved mtime (DEPRECATED)'),
878 _(b'do not display the saved mtime (DEPRECATED)'),
879 ),
879 ),
880 (b'', b'dates', True, _(b'display the saved mtime')),
880 (b'', b'dates', True, _(b'display the saved mtime')),
881 (b'', b'datesort', None, _(b'sort by saved mtime')),
881 (b'', b'datesort', None, _(b'sort by saved mtime')),
882 ],
882 ],
883 _(b'[OPTION]...'),
883 _(b'[OPTION]...'),
884 )
884 )
885 def debugstate(ui, repo, **opts):
885 def debugstate(ui, repo, **opts):
886 """show the contents of the current dirstate"""
886 """show the contents of the current dirstate"""
887
887
888 nodates = not opts['dates']
888 nodates = not opts['dates']
889 if opts.get('nodates') is not None:
889 if opts.get('nodates') is not None:
890 nodates = True
890 nodates = True
891 datesort = opts.get('datesort')
891 datesort = opts.get('datesort')
892
892
893 if datesort:
893 if datesort:
894 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
894 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
895 else:
895 else:
896 keyfunc = None # sort by filename
896 keyfunc = None # sort by filename
897 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
897 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
898 if ent[3] == -1:
898 if ent[3] == -1:
899 timestr = b'unset '
899 timestr = b'unset '
900 elif nodates:
900 elif nodates:
901 timestr = b'set '
901 timestr = b'set '
902 else:
902 else:
903 timestr = time.strftime(
903 timestr = time.strftime(
904 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
904 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
905 )
905 )
906 timestr = encoding.strtolocal(timestr)
906 timestr = encoding.strtolocal(timestr)
907 if ent[1] & 0o20000:
907 if ent[1] & 0o20000:
908 mode = b'lnk'
908 mode = b'lnk'
909 else:
909 else:
910 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
910 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
911 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
911 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
912 for f in repo.dirstate.copies():
912 for f in repo.dirstate.copies():
913 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
913 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
914
914
915
915
916 @command(
916 @command(
917 b'debugdiscovery',
917 b'debugdiscovery',
918 [
918 [
919 (b'', b'old', None, _(b'use old-style discovery')),
919 (b'', b'old', None, _(b'use old-style discovery')),
920 (
920 (
921 b'',
921 b'',
922 b'nonheads',
922 b'nonheads',
923 None,
923 None,
924 _(b'use old-style discovery with non-heads included'),
924 _(b'use old-style discovery with non-heads included'),
925 ),
925 ),
926 (b'', b'rev', [], b'restrict discovery to this set of revs'),
926 (b'', b'rev', [], b'restrict discovery to this set of revs'),
927 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
927 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
928 ]
928 ]
929 + cmdutil.remoteopts,
929 + cmdutil.remoteopts,
930 _(b'[--rev REV] [OTHER]'),
930 _(b'[--rev REV] [OTHER]'),
931 )
931 )
932 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
932 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
933 """runs the changeset discovery protocol in isolation"""
933 """runs the changeset discovery protocol in isolation"""
934 opts = pycompat.byteskwargs(opts)
934 opts = pycompat.byteskwargs(opts)
935 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
935 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
936 remote = hg.peer(repo, opts, remoteurl)
936 remote = hg.peer(repo, opts, remoteurl)
937 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
937 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
938
938
939 # make sure tests are repeatable
939 # make sure tests are repeatable
940 random.seed(int(opts[b'seed']))
940 random.seed(int(opts[b'seed']))
941
941
942 if opts.get(b'old'):
942 if opts.get(b'old'):
943
943
944 def doit(pushedrevs, remoteheads, remote=remote):
944 def doit(pushedrevs, remoteheads, remote=remote):
945 if not util.safehasattr(remote, b'branches'):
945 if not util.safehasattr(remote, b'branches'):
946 # enable in-client legacy support
946 # enable in-client legacy support
947 remote = localrepo.locallegacypeer(remote.local())
947 remote = localrepo.locallegacypeer(remote.local())
948 common, _in, hds = treediscovery.findcommonincoming(
948 common, _in, hds = treediscovery.findcommonincoming(
949 repo, remote, force=True
949 repo, remote, force=True
950 )
950 )
951 common = set(common)
951 common = set(common)
952 if not opts.get(b'nonheads'):
952 if not opts.get(b'nonheads'):
953 ui.writenoi18n(
953 ui.writenoi18n(
954 b"unpruned common: %s\n"
954 b"unpruned common: %s\n"
955 % b" ".join(sorted(short(n) for n in common))
955 % b" ".join(sorted(short(n) for n in common))
956 )
956 )
957
957
958 clnode = repo.changelog.node
958 clnode = repo.changelog.node
959 common = repo.revs(b'heads(::%ln)', common)
959 common = repo.revs(b'heads(::%ln)', common)
960 common = {clnode(r) for r in common}
960 common = {clnode(r) for r in common}
961 return common, hds
961 return common, hds
962
962
963 else:
963 else:
964
964
965 def doit(pushedrevs, remoteheads, remote=remote):
965 def doit(pushedrevs, remoteheads, remote=remote):
966 nodes = None
966 nodes = None
967 if pushedrevs:
967 if pushedrevs:
968 revs = scmutil.revrange(repo, pushedrevs)
968 revs = scmutil.revrange(repo, pushedrevs)
969 nodes = [repo[r].node() for r in revs]
969 nodes = [repo[r].node() for r in revs]
970 common, any, hds = setdiscovery.findcommonheads(
970 common, any, hds = setdiscovery.findcommonheads(
971 ui, repo, remote, ancestorsof=nodes
971 ui, repo, remote, ancestorsof=nodes
972 )
972 )
973 return common, hds
973 return common, hds
974
974
975 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
975 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
976 localrevs = opts[b'rev']
976 localrevs = opts[b'rev']
977 with util.timedcm('debug-discovery') as t:
977 with util.timedcm('debug-discovery') as t:
978 common, hds = doit(localrevs, remoterevs)
978 common, hds = doit(localrevs, remoterevs)
979
979
980 # compute all statistics
980 # compute all statistics
981 common = set(common)
981 common = set(common)
982 rheads = set(hds)
982 rheads = set(hds)
983 lheads = set(repo.heads())
983 lheads = set(repo.heads())
984
984
985 data = {}
985 data = {}
986 data[b'elapsed'] = t.elapsed
986 data[b'elapsed'] = t.elapsed
987 data[b'nb-common'] = len(common)
987 data[b'nb-common'] = len(common)
988 data[b'nb-common-local'] = len(common & lheads)
988 data[b'nb-common-local'] = len(common & lheads)
989 data[b'nb-common-remote'] = len(common & rheads)
989 data[b'nb-common-remote'] = len(common & rheads)
990 data[b'nb-common-both'] = len(common & rheads & lheads)
990 data[b'nb-common-both'] = len(common & rheads & lheads)
991 data[b'nb-local'] = len(lheads)
991 data[b'nb-local'] = len(lheads)
992 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
992 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
993 data[b'nb-remote'] = len(rheads)
993 data[b'nb-remote'] = len(rheads)
994 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
994 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
995 data[b'nb-revs'] = len(repo.revs(b'all()'))
995 data[b'nb-revs'] = len(repo.revs(b'all()'))
996 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
996 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
997 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
997 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
998
998
999 # display discovery summary
999 # display discovery summary
1000 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1000 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1001 ui.writenoi18n(b"heads summary:\n")
1001 ui.writenoi18n(b"heads summary:\n")
1002 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
1002 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
1003 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
1003 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
1004 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
1004 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
1005 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
1005 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
1006 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
1006 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
1007 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
1007 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
1008 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
1008 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
1009 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
1009 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
1010 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
1010 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
1011 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
1011 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
1012 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1012 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1013 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1013 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1014 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1014 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1015
1015
1016 if ui.verbose:
1016 if ui.verbose:
1017 ui.writenoi18n(
1017 ui.writenoi18n(
1018 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1018 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1019 )
1019 )
1020
1020
1021
1021
1022 _chunksize = 4 << 10
1022 _chunksize = 4 << 10
1023
1023
1024
1024
1025 @command(
1025 @command(
1026 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1026 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1027 )
1027 )
1028 def debugdownload(ui, repo, url, output=None, **opts):
1028 def debugdownload(ui, repo, url, output=None, **opts):
1029 """download a resource using Mercurial logic and config
1029 """download a resource using Mercurial logic and config
1030 """
1030 """
1031 fh = urlmod.open(ui, url, output)
1031 fh = urlmod.open(ui, url, output)
1032
1032
1033 dest = ui
1033 dest = ui
1034 if output:
1034 if output:
1035 dest = open(output, b"wb", _chunksize)
1035 dest = open(output, b"wb", _chunksize)
1036 try:
1036 try:
1037 data = fh.read(_chunksize)
1037 data = fh.read(_chunksize)
1038 while data:
1038 while data:
1039 dest.write(data)
1039 dest.write(data)
1040 data = fh.read(_chunksize)
1040 data = fh.read(_chunksize)
1041 finally:
1041 finally:
1042 if output:
1042 if output:
1043 dest.close()
1043 dest.close()
1044
1044
1045
1045
1046 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1046 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1047 def debugextensions(ui, repo, **opts):
1047 def debugextensions(ui, repo, **opts):
1048 '''show information about active extensions'''
1048 '''show information about active extensions'''
1049 opts = pycompat.byteskwargs(opts)
1049 opts = pycompat.byteskwargs(opts)
1050 exts = extensions.extensions(ui)
1050 exts = extensions.extensions(ui)
1051 hgver = util.version()
1051 hgver = util.version()
1052 fm = ui.formatter(b'debugextensions', opts)
1052 fm = ui.formatter(b'debugextensions', opts)
1053 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1053 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1054 isinternal = extensions.ismoduleinternal(extmod)
1054 isinternal = extensions.ismoduleinternal(extmod)
1055 extsource = None
1055 extsource = None
1056
1056
1057 if util.safehasattr(extmod, '__file__'):
1057 if util.safehasattr(extmod, '__file__'):
1058 extsource = pycompat.fsencode(extmod.__file__)
1058 extsource = pycompat.fsencode(extmod.__file__)
1059 elif getattr(sys, 'oxidized', False):
1059 elif getattr(sys, 'oxidized', False):
1060 extsource = pycompat.sysexecutable
1060 extsource = pycompat.sysexecutable
1061 if isinternal:
1061 if isinternal:
1062 exttestedwith = [] # never expose magic string to users
1062 exttestedwith = [] # never expose magic string to users
1063 else:
1063 else:
1064 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1064 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1065 extbuglink = getattr(extmod, 'buglink', None)
1065 extbuglink = getattr(extmod, 'buglink', None)
1066
1066
1067 fm.startitem()
1067 fm.startitem()
1068
1068
1069 if ui.quiet or ui.verbose:
1069 if ui.quiet or ui.verbose:
1070 fm.write(b'name', b'%s\n', extname)
1070 fm.write(b'name', b'%s\n', extname)
1071 else:
1071 else:
1072 fm.write(b'name', b'%s', extname)
1072 fm.write(b'name', b'%s', extname)
1073 if isinternal or hgver in exttestedwith:
1073 if isinternal or hgver in exttestedwith:
1074 fm.plain(b'\n')
1074 fm.plain(b'\n')
1075 elif not exttestedwith:
1075 elif not exttestedwith:
1076 fm.plain(_(b' (untested!)\n'))
1076 fm.plain(_(b' (untested!)\n'))
1077 else:
1077 else:
1078 lasttestedversion = exttestedwith[-1]
1078 lasttestedversion = exttestedwith[-1]
1079 fm.plain(b' (%s!)\n' % lasttestedversion)
1079 fm.plain(b' (%s!)\n' % lasttestedversion)
1080
1080
1081 fm.condwrite(
1081 fm.condwrite(
1082 ui.verbose and extsource,
1082 ui.verbose and extsource,
1083 b'source',
1083 b'source',
1084 _(b' location: %s\n'),
1084 _(b' location: %s\n'),
1085 extsource or b"",
1085 extsource or b"",
1086 )
1086 )
1087
1087
1088 if ui.verbose:
1088 if ui.verbose:
1089 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1089 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1090 fm.data(bundled=isinternal)
1090 fm.data(bundled=isinternal)
1091
1091
1092 fm.condwrite(
1092 fm.condwrite(
1093 ui.verbose and exttestedwith,
1093 ui.verbose and exttestedwith,
1094 b'testedwith',
1094 b'testedwith',
1095 _(b' tested with: %s\n'),
1095 _(b' tested with: %s\n'),
1096 fm.formatlist(exttestedwith, name=b'ver'),
1096 fm.formatlist(exttestedwith, name=b'ver'),
1097 )
1097 )
1098
1098
1099 fm.condwrite(
1099 fm.condwrite(
1100 ui.verbose and extbuglink,
1100 ui.verbose and extbuglink,
1101 b'buglink',
1101 b'buglink',
1102 _(b' bug reporting: %s\n'),
1102 _(b' bug reporting: %s\n'),
1103 extbuglink or b"",
1103 extbuglink or b"",
1104 )
1104 )
1105
1105
1106 fm.end()
1106 fm.end()
1107
1107
1108
1108
1109 @command(
1109 @command(
1110 b'debugfileset',
1110 b'debugfileset',
1111 [
1111 [
1112 (
1112 (
1113 b'r',
1113 b'r',
1114 b'rev',
1114 b'rev',
1115 b'',
1115 b'',
1116 _(b'apply the filespec on this revision'),
1116 _(b'apply the filespec on this revision'),
1117 _(b'REV'),
1117 _(b'REV'),
1118 ),
1118 ),
1119 (
1119 (
1120 b'',
1120 b'',
1121 b'all-files',
1121 b'all-files',
1122 False,
1122 False,
1123 _(b'test files from all revisions and working directory'),
1123 _(b'test files from all revisions and working directory'),
1124 ),
1124 ),
1125 (
1125 (
1126 b's',
1126 b's',
1127 b'show-matcher',
1127 b'show-matcher',
1128 None,
1128 None,
1129 _(b'print internal representation of matcher'),
1129 _(b'print internal representation of matcher'),
1130 ),
1130 ),
1131 (
1131 (
1132 b'p',
1132 b'p',
1133 b'show-stage',
1133 b'show-stage',
1134 [],
1134 [],
1135 _(b'print parsed tree at the given stage'),
1135 _(b'print parsed tree at the given stage'),
1136 _(b'NAME'),
1136 _(b'NAME'),
1137 ),
1137 ),
1138 ],
1138 ],
1139 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1139 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1140 )
1140 )
1141 def debugfileset(ui, repo, expr, **opts):
1141 def debugfileset(ui, repo, expr, **opts):
1142 '''parse and apply a fileset specification'''
1142 '''parse and apply a fileset specification'''
1143 from . import fileset
1143 from . import fileset
1144
1144
1145 fileset.symbols # force import of fileset so we have predicates to optimize
1145 fileset.symbols # force import of fileset so we have predicates to optimize
1146 opts = pycompat.byteskwargs(opts)
1146 opts = pycompat.byteskwargs(opts)
1147 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1147 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1148
1148
1149 stages = [
1149 stages = [
1150 (b'parsed', pycompat.identity),
1150 (b'parsed', pycompat.identity),
1151 (b'analyzed', filesetlang.analyze),
1151 (b'analyzed', filesetlang.analyze),
1152 (b'optimized', filesetlang.optimize),
1152 (b'optimized', filesetlang.optimize),
1153 ]
1153 ]
1154 stagenames = {n for n, f in stages}
1154 stagenames = {n for n, f in stages}
1155
1155
1156 showalways = set()
1156 showalways = set()
1157 if ui.verbose and not opts[b'show_stage']:
1157 if ui.verbose and not opts[b'show_stage']:
1158 # show parsed tree by --verbose (deprecated)
1158 # show parsed tree by --verbose (deprecated)
1159 showalways.add(b'parsed')
1159 showalways.add(b'parsed')
1160 if opts[b'show_stage'] == [b'all']:
1160 if opts[b'show_stage'] == [b'all']:
1161 showalways.update(stagenames)
1161 showalways.update(stagenames)
1162 else:
1162 else:
1163 for n in opts[b'show_stage']:
1163 for n in opts[b'show_stage']:
1164 if n not in stagenames:
1164 if n not in stagenames:
1165 raise error.Abort(_(b'invalid stage name: %s') % n)
1165 raise error.Abort(_(b'invalid stage name: %s') % n)
1166 showalways.update(opts[b'show_stage'])
1166 showalways.update(opts[b'show_stage'])
1167
1167
1168 tree = filesetlang.parse(expr)
1168 tree = filesetlang.parse(expr)
1169 for n, f in stages:
1169 for n, f in stages:
1170 tree = f(tree)
1170 tree = f(tree)
1171 if n in showalways:
1171 if n in showalways:
1172 if opts[b'show_stage'] or n != b'parsed':
1172 if opts[b'show_stage'] or n != b'parsed':
1173 ui.write(b"* %s:\n" % n)
1173 ui.write(b"* %s:\n" % n)
1174 ui.write(filesetlang.prettyformat(tree), b"\n")
1174 ui.write(filesetlang.prettyformat(tree), b"\n")
1175
1175
1176 files = set()
1176 files = set()
1177 if opts[b'all_files']:
1177 if opts[b'all_files']:
1178 for r in repo:
1178 for r in repo:
1179 c = repo[r]
1179 c = repo[r]
1180 files.update(c.files())
1180 files.update(c.files())
1181 files.update(c.substate)
1181 files.update(c.substate)
1182 if opts[b'all_files'] or ctx.rev() is None:
1182 if opts[b'all_files'] or ctx.rev() is None:
1183 wctx = repo[None]
1183 wctx = repo[None]
1184 files.update(
1184 files.update(
1185 repo.dirstate.walk(
1185 repo.dirstate.walk(
1186 scmutil.matchall(repo),
1186 scmutil.matchall(repo),
1187 subrepos=list(wctx.substate),
1187 subrepos=list(wctx.substate),
1188 unknown=True,
1188 unknown=True,
1189 ignored=True,
1189 ignored=True,
1190 )
1190 )
1191 )
1191 )
1192 files.update(wctx.substate)
1192 files.update(wctx.substate)
1193 else:
1193 else:
1194 files.update(ctx.files())
1194 files.update(ctx.files())
1195 files.update(ctx.substate)
1195 files.update(ctx.substate)
1196
1196
1197 m = ctx.matchfileset(repo.getcwd(), expr)
1197 m = ctx.matchfileset(repo.getcwd(), expr)
1198 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1198 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1199 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1199 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1200 for f in sorted(files):
1200 for f in sorted(files):
1201 if not m(f):
1201 if not m(f):
1202 continue
1202 continue
1203 ui.write(b"%s\n" % f)
1203 ui.write(b"%s\n" % f)
1204
1204
1205
1205
1206 @command(b'debugformat', [] + cmdutil.formatteropts)
1206 @command(b'debugformat', [] + cmdutil.formatteropts)
1207 def debugformat(ui, repo, **opts):
1207 def debugformat(ui, repo, **opts):
1208 """display format information about the current repository
1208 """display format information about the current repository
1209
1209
1210 Use --verbose to get extra information about current config value and
1210 Use --verbose to get extra information about current config value and
1211 Mercurial default."""
1211 Mercurial default."""
1212 opts = pycompat.byteskwargs(opts)
1212 opts = pycompat.byteskwargs(opts)
1213 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1213 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1214 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1214 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1215
1215
1216 def makeformatname(name):
1216 def makeformatname(name):
1217 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1217 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1218
1218
1219 fm = ui.formatter(b'debugformat', opts)
1219 fm = ui.formatter(b'debugformat', opts)
1220 if fm.isplain():
1220 if fm.isplain():
1221
1221
1222 def formatvalue(value):
1222 def formatvalue(value):
1223 if util.safehasattr(value, b'startswith'):
1223 if util.safehasattr(value, b'startswith'):
1224 return value
1224 return value
1225 if value:
1225 if value:
1226 return b'yes'
1226 return b'yes'
1227 else:
1227 else:
1228 return b'no'
1228 return b'no'
1229
1229
1230 else:
1230 else:
1231 formatvalue = pycompat.identity
1231 formatvalue = pycompat.identity
1232
1232
1233 fm.plain(b'format-variant')
1233 fm.plain(b'format-variant')
1234 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1234 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1235 fm.plain(b' repo')
1235 fm.plain(b' repo')
1236 if ui.verbose:
1236 if ui.verbose:
1237 fm.plain(b' config default')
1237 fm.plain(b' config default')
1238 fm.plain(b'\n')
1238 fm.plain(b'\n')
1239 for fv in upgrade.allformatvariant:
1239 for fv in upgrade.allformatvariant:
1240 fm.startitem()
1240 fm.startitem()
1241 repovalue = fv.fromrepo(repo)
1241 repovalue = fv.fromrepo(repo)
1242 configvalue = fv.fromconfig(repo)
1242 configvalue = fv.fromconfig(repo)
1243
1243
1244 if repovalue != configvalue:
1244 if repovalue != configvalue:
1245 namelabel = b'formatvariant.name.mismatchconfig'
1245 namelabel = b'formatvariant.name.mismatchconfig'
1246 repolabel = b'formatvariant.repo.mismatchconfig'
1246 repolabel = b'formatvariant.repo.mismatchconfig'
1247 elif repovalue != fv.default:
1247 elif repovalue != fv.default:
1248 namelabel = b'formatvariant.name.mismatchdefault'
1248 namelabel = b'formatvariant.name.mismatchdefault'
1249 repolabel = b'formatvariant.repo.mismatchdefault'
1249 repolabel = b'formatvariant.repo.mismatchdefault'
1250 else:
1250 else:
1251 namelabel = b'formatvariant.name.uptodate'
1251 namelabel = b'formatvariant.name.uptodate'
1252 repolabel = b'formatvariant.repo.uptodate'
1252 repolabel = b'formatvariant.repo.uptodate'
1253
1253
1254 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1254 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1255 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1255 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1256 if fv.default != configvalue:
1256 if fv.default != configvalue:
1257 configlabel = b'formatvariant.config.special'
1257 configlabel = b'formatvariant.config.special'
1258 else:
1258 else:
1259 configlabel = b'formatvariant.config.default'
1259 configlabel = b'formatvariant.config.default'
1260 fm.condwrite(
1260 fm.condwrite(
1261 ui.verbose,
1261 ui.verbose,
1262 b'config',
1262 b'config',
1263 b' %6s',
1263 b' %6s',
1264 formatvalue(configvalue),
1264 formatvalue(configvalue),
1265 label=configlabel,
1265 label=configlabel,
1266 )
1266 )
1267 fm.condwrite(
1267 fm.condwrite(
1268 ui.verbose,
1268 ui.verbose,
1269 b'default',
1269 b'default',
1270 b' %7s',
1270 b' %7s',
1271 formatvalue(fv.default),
1271 formatvalue(fv.default),
1272 label=b'formatvariant.default',
1272 label=b'formatvariant.default',
1273 )
1273 )
1274 fm.plain(b'\n')
1274 fm.plain(b'\n')
1275 fm.end()
1275 fm.end()
1276
1276
1277
1277
1278 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1278 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1279 def debugfsinfo(ui, path=b"."):
1279 def debugfsinfo(ui, path=b"."):
1280 """show information detected about current filesystem"""
1280 """show information detected about current filesystem"""
1281 ui.writenoi18n(b'path: %s\n' % path)
1281 ui.writenoi18n(b'path: %s\n' % path)
1282 ui.writenoi18n(
1282 ui.writenoi18n(
1283 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1283 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1284 )
1284 )
1285 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1285 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1286 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1286 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1287 ui.writenoi18n(
1287 ui.writenoi18n(
1288 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1288 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1289 )
1289 )
1290 ui.writenoi18n(
1290 ui.writenoi18n(
1291 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1291 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1292 )
1292 )
1293 casesensitive = b'(unknown)'
1293 casesensitive = b'(unknown)'
1294 try:
1294 try:
1295 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1295 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1296 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1296 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1297 except OSError:
1297 except OSError:
1298 pass
1298 pass
1299 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1299 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1300
1300
1301
1301
1302 @command(
1302 @command(
1303 b'debuggetbundle',
1303 b'debuggetbundle',
1304 [
1304 [
1305 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1305 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1306 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1306 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1307 (
1307 (
1308 b't',
1308 b't',
1309 b'type',
1309 b'type',
1310 b'bzip2',
1310 b'bzip2',
1311 _(b'bundle compression type to use'),
1311 _(b'bundle compression type to use'),
1312 _(b'TYPE'),
1312 _(b'TYPE'),
1313 ),
1313 ),
1314 ],
1314 ],
1315 _(b'REPO FILE [-H|-C ID]...'),
1315 _(b'REPO FILE [-H|-C ID]...'),
1316 norepo=True,
1316 norepo=True,
1317 )
1317 )
1318 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1318 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1319 """retrieves a bundle from a repo
1319 """retrieves a bundle from a repo
1320
1320
1321 Every ID must be a full-length hex node id string. Saves the bundle to the
1321 Every ID must be a full-length hex node id string. Saves the bundle to the
1322 given file.
1322 given file.
1323 """
1323 """
1324 opts = pycompat.byteskwargs(opts)
1324 opts = pycompat.byteskwargs(opts)
1325 repo = hg.peer(ui, opts, repopath)
1325 repo = hg.peer(ui, opts, repopath)
1326 if not repo.capable(b'getbundle'):
1326 if not repo.capable(b'getbundle'):
1327 raise error.Abort(b"getbundle() not supported by target repository")
1327 raise error.Abort(b"getbundle() not supported by target repository")
1328 args = {}
1328 args = {}
1329 if common:
1329 if common:
1330 args['common'] = [bin(s) for s in common]
1330 args['common'] = [bin(s) for s in common]
1331 if head:
1331 if head:
1332 args['heads'] = [bin(s) for s in head]
1332 args['heads'] = [bin(s) for s in head]
1333 # TODO: get desired bundlecaps from command line.
1333 # TODO: get desired bundlecaps from command line.
1334 args['bundlecaps'] = None
1334 args['bundlecaps'] = None
1335 bundle = repo.getbundle(b'debug', **args)
1335 bundle = repo.getbundle(b'debug', **args)
1336
1336
1337 bundletype = opts.get(b'type', b'bzip2').lower()
1337 bundletype = opts.get(b'type', b'bzip2').lower()
1338 btypes = {
1338 btypes = {
1339 b'none': b'HG10UN',
1339 b'none': b'HG10UN',
1340 b'bzip2': b'HG10BZ',
1340 b'bzip2': b'HG10BZ',
1341 b'gzip': b'HG10GZ',
1341 b'gzip': b'HG10GZ',
1342 b'bundle2': b'HG20',
1342 b'bundle2': b'HG20',
1343 }
1343 }
1344 bundletype = btypes.get(bundletype)
1344 bundletype = btypes.get(bundletype)
1345 if bundletype not in bundle2.bundletypes:
1345 if bundletype not in bundle2.bundletypes:
1346 raise error.Abort(_(b'unknown bundle type specified with --type'))
1346 raise error.Abort(_(b'unknown bundle type specified with --type'))
1347 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1347 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1348
1348
1349
1349
1350 @command(b'debugignore', [], b'[FILE]')
1350 @command(b'debugignore', [], b'[FILE]')
1351 def debugignore(ui, repo, *files, **opts):
1351 def debugignore(ui, repo, *files, **opts):
1352 """display the combined ignore pattern and information about ignored files
1352 """display the combined ignore pattern and information about ignored files
1353
1353
1354 With no argument display the combined ignore pattern.
1354 With no argument display the combined ignore pattern.
1355
1355
1356 Given space separated file names, shows if the given file is ignored and
1356 Given space separated file names, shows if the given file is ignored and
1357 if so, show the ignore rule (file and line number) that matched it.
1357 if so, show the ignore rule (file and line number) that matched it.
1358 """
1358 """
1359 ignore = repo.dirstate._ignore
1359 ignore = repo.dirstate._ignore
1360 if not files:
1360 if not files:
1361 # Show all the patterns
1361 # Show all the patterns
1362 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1362 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1363 else:
1363 else:
1364 m = scmutil.match(repo[None], pats=files)
1364 m = scmutil.match(repo[None], pats=files)
1365 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1365 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1366 for f in m.files():
1366 for f in m.files():
1367 nf = util.normpath(f)
1367 nf = util.normpath(f)
1368 ignored = None
1368 ignored = None
1369 ignoredata = None
1369 ignoredata = None
1370 if nf != b'.':
1370 if nf != b'.':
1371 if ignore(nf):
1371 if ignore(nf):
1372 ignored = nf
1372 ignored = nf
1373 ignoredata = repo.dirstate._ignorefileandline(nf)
1373 ignoredata = repo.dirstate._ignorefileandline(nf)
1374 else:
1374 else:
1375 for p in pathutil.finddirs(nf):
1375 for p in pathutil.finddirs(nf):
1376 if ignore(p):
1376 if ignore(p):
1377 ignored = p
1377 ignored = p
1378 ignoredata = repo.dirstate._ignorefileandline(p)
1378 ignoredata = repo.dirstate._ignorefileandline(p)
1379 break
1379 break
1380 if ignored:
1380 if ignored:
1381 if ignored == nf:
1381 if ignored == nf:
1382 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1382 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1383 else:
1383 else:
1384 ui.write(
1384 ui.write(
1385 _(
1385 _(
1386 b"%s is ignored because of "
1386 b"%s is ignored because of "
1387 b"containing directory %s\n"
1387 b"containing directory %s\n"
1388 )
1388 )
1389 % (uipathfn(f), ignored)
1389 % (uipathfn(f), ignored)
1390 )
1390 )
1391 ignorefile, lineno, line = ignoredata
1391 ignorefile, lineno, line = ignoredata
1392 ui.write(
1392 ui.write(
1393 _(b"(ignore rule in %s, line %d: '%s')\n")
1393 _(b"(ignore rule in %s, line %d: '%s')\n")
1394 % (ignorefile, lineno, line)
1394 % (ignorefile, lineno, line)
1395 )
1395 )
1396 else:
1396 else:
1397 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1397 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1398
1398
1399
1399
1400 @command(
1400 @command(
1401 b'debugindex',
1401 b'debugindex',
1402 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1402 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1403 _(b'-c|-m|FILE'),
1403 _(b'-c|-m|FILE'),
1404 )
1404 )
1405 def debugindex(ui, repo, file_=None, **opts):
1405 def debugindex(ui, repo, file_=None, **opts):
1406 """dump index data for a storage primitive"""
1406 """dump index data for a storage primitive"""
1407 opts = pycompat.byteskwargs(opts)
1407 opts = pycompat.byteskwargs(opts)
1408 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1408 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1409
1409
1410 if ui.debugflag:
1410 if ui.debugflag:
1411 shortfn = hex
1411 shortfn = hex
1412 else:
1412 else:
1413 shortfn = short
1413 shortfn = short
1414
1414
1415 idlen = 12
1415 idlen = 12
1416 for i in store:
1416 for i in store:
1417 idlen = len(shortfn(store.node(i)))
1417 idlen = len(shortfn(store.node(i)))
1418 break
1418 break
1419
1419
1420 fm = ui.formatter(b'debugindex', opts)
1420 fm = ui.formatter(b'debugindex', opts)
1421 fm.plain(
1421 fm.plain(
1422 b' rev linkrev %s %s p2\n'
1422 b' rev linkrev %s %s p2\n'
1423 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1423 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1424 )
1424 )
1425
1425
1426 for rev in store:
1426 for rev in store:
1427 node = store.node(rev)
1427 node = store.node(rev)
1428 parents = store.parents(node)
1428 parents = store.parents(node)
1429
1429
1430 fm.startitem()
1430 fm.startitem()
1431 fm.write(b'rev', b'%6d ', rev)
1431 fm.write(b'rev', b'%6d ', rev)
1432 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1432 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1433 fm.write(b'node', b'%s ', shortfn(node))
1433 fm.write(b'node', b'%s ', shortfn(node))
1434 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1434 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1435 fm.write(b'p2', b'%s', shortfn(parents[1]))
1435 fm.write(b'p2', b'%s', shortfn(parents[1]))
1436 fm.plain(b'\n')
1436 fm.plain(b'\n')
1437
1437
1438 fm.end()
1438 fm.end()
1439
1439
1440
1440
1441 @command(
1441 @command(
1442 b'debugindexdot',
1442 b'debugindexdot',
1443 cmdutil.debugrevlogopts,
1443 cmdutil.debugrevlogopts,
1444 _(b'-c|-m|FILE'),
1444 _(b'-c|-m|FILE'),
1445 optionalrepo=True,
1445 optionalrepo=True,
1446 )
1446 )
1447 def debugindexdot(ui, repo, file_=None, **opts):
1447 def debugindexdot(ui, repo, file_=None, **opts):
1448 """dump an index DAG as a graphviz dot file"""
1448 """dump an index DAG as a graphviz dot file"""
1449 opts = pycompat.byteskwargs(opts)
1449 opts = pycompat.byteskwargs(opts)
1450 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1450 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1451 ui.writenoi18n(b"digraph G {\n")
1451 ui.writenoi18n(b"digraph G {\n")
1452 for i in r:
1452 for i in r:
1453 node = r.node(i)
1453 node = r.node(i)
1454 pp = r.parents(node)
1454 pp = r.parents(node)
1455 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1455 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1456 if pp[1] != nullid:
1456 if pp[1] != nullid:
1457 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1457 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1458 ui.write(b"}\n")
1458 ui.write(b"}\n")
1459
1459
1460
1460
1461 @command(b'debugindexstats', [])
1461 @command(b'debugindexstats', [])
1462 def debugindexstats(ui, repo):
1462 def debugindexstats(ui, repo):
1463 """show stats related to the changelog index"""
1463 """show stats related to the changelog index"""
1464 repo.changelog.shortest(nullid, 1)
1464 repo.changelog.shortest(nullid, 1)
1465 index = repo.changelog.index
1465 index = repo.changelog.index
1466 if not util.safehasattr(index, b'stats'):
1466 if not util.safehasattr(index, b'stats'):
1467 raise error.Abort(_(b'debugindexstats only works with native code'))
1467 raise error.Abort(_(b'debugindexstats only works with native code'))
1468 for k, v in sorted(index.stats().items()):
1468 for k, v in sorted(index.stats().items()):
1469 ui.write(b'%s: %d\n' % (k, v))
1469 ui.write(b'%s: %d\n' % (k, v))
1470
1470
1471
1471
1472 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1472 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1473 def debuginstall(ui, **opts):
1473 def debuginstall(ui, **opts):
1474 '''test Mercurial installation
1474 '''test Mercurial installation
1475
1475
1476 Returns 0 on success.
1476 Returns 0 on success.
1477 '''
1477 '''
1478 opts = pycompat.byteskwargs(opts)
1478 opts = pycompat.byteskwargs(opts)
1479
1479
1480 problems = 0
1480 problems = 0
1481
1481
1482 fm = ui.formatter(b'debuginstall', opts)
1482 fm = ui.formatter(b'debuginstall', opts)
1483 fm.startitem()
1483 fm.startitem()
1484
1484
1485 # encoding might be unknown or wrong. don't translate these messages.
1485 # encoding might be unknown or wrong. don't translate these messages.
1486 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1486 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1487 err = None
1487 err = None
1488 try:
1488 try:
1489 codecs.lookup(pycompat.sysstr(encoding.encoding))
1489 codecs.lookup(pycompat.sysstr(encoding.encoding))
1490 except LookupError as inst:
1490 except LookupError as inst:
1491 err = stringutil.forcebytestr(inst)
1491 err = stringutil.forcebytestr(inst)
1492 problems += 1
1492 problems += 1
1493 fm.condwrite(
1493 fm.condwrite(
1494 err,
1494 err,
1495 b'encodingerror',
1495 b'encodingerror',
1496 b" %s\n (check that your locale is properly set)\n",
1496 b" %s\n (check that your locale is properly set)\n",
1497 err,
1497 err,
1498 )
1498 )
1499
1499
1500 # Python
1500 # Python
1501 pythonlib = None
1501 pythonlib = None
1502 if util.safehasattr(os, '__file__'):
1502 if util.safehasattr(os, '__file__'):
1503 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1503 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1504 elif getattr(sys, 'oxidized', False):
1504 elif getattr(sys, 'oxidized', False):
1505 pythonlib = pycompat.sysexecutable
1505 pythonlib = pycompat.sysexecutable
1506
1506
1507 fm.write(
1507 fm.write(
1508 b'pythonexe',
1508 b'pythonexe',
1509 _(b"checking Python executable (%s)\n"),
1509 _(b"checking Python executable (%s)\n"),
1510 pycompat.sysexecutable or _(b"unknown"),
1510 pycompat.sysexecutable or _(b"unknown"),
1511 )
1511 )
1512 fm.write(
1512 fm.write(
1513 b'pythonimplementation',
1513 b'pythonimplementation',
1514 _(b"checking Python implementation (%s)\n"),
1514 _(b"checking Python implementation (%s)\n"),
1515 pycompat.sysbytes(platform.python_implementation()),
1515 pycompat.sysbytes(platform.python_implementation()),
1516 )
1516 )
1517 fm.write(
1517 fm.write(
1518 b'pythonver',
1518 b'pythonver',
1519 _(b"checking Python version (%s)\n"),
1519 _(b"checking Python version (%s)\n"),
1520 (b"%d.%d.%d" % sys.version_info[:3]),
1520 (b"%d.%d.%d" % sys.version_info[:3]),
1521 )
1521 )
1522 fm.write(
1522 fm.write(
1523 b'pythonlib',
1523 b'pythonlib',
1524 _(b"checking Python lib (%s)...\n"),
1524 _(b"checking Python lib (%s)...\n"),
1525 pythonlib or _(b"unknown"),
1525 pythonlib or _(b"unknown"),
1526 )
1526 )
1527
1527
1528 try:
1528 try:
1529 from . import rustext
1529 from . import rustext
1530
1530
1531 rustext.__doc__ # trigger lazy import
1531 rustext.__doc__ # trigger lazy import
1532 except ImportError:
1532 except ImportError:
1533 rustext = None
1533 rustext = None
1534
1534
1535 security = set(sslutil.supportedprotocols)
1535 security = set(sslutil.supportedprotocols)
1536 if sslutil.hassni:
1536 if sslutil.hassni:
1537 security.add(b'sni')
1537 security.add(b'sni')
1538
1538
1539 fm.write(
1539 fm.write(
1540 b'pythonsecurity',
1540 b'pythonsecurity',
1541 _(b"checking Python security support (%s)\n"),
1541 _(b"checking Python security support (%s)\n"),
1542 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1542 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1543 )
1543 )
1544
1544
1545 # These are warnings, not errors. So don't increment problem count. This
1545 # These are warnings, not errors. So don't increment problem count. This
1546 # may change in the future.
1546 # may change in the future.
1547 if b'tls1.2' not in security:
1547 if b'tls1.2' not in security:
1548 fm.plain(
1548 fm.plain(
1549 _(
1549 _(
1550 b' TLS 1.2 not supported by Python install; '
1550 b' TLS 1.2 not supported by Python install; '
1551 b'network connections lack modern security\n'
1551 b'network connections lack modern security\n'
1552 )
1552 )
1553 )
1553 )
1554 if b'sni' not in security:
1554 if b'sni' not in security:
1555 fm.plain(
1555 fm.plain(
1556 _(
1556 _(
1557 b' SNI not supported by Python install; may have '
1557 b' SNI not supported by Python install; may have '
1558 b'connectivity issues with some servers\n'
1558 b'connectivity issues with some servers\n'
1559 )
1559 )
1560 )
1560 )
1561
1561
1562 fm.plain(
1562 fm.plain(
1563 _(
1563 _(
1564 b"checking Rust extensions (%s)\n"
1564 b"checking Rust extensions (%s)\n"
1565 % (b'missing' if rustext is None else b'installed')
1565 % (b'missing' if rustext is None else b'installed')
1566 ),
1566 ),
1567 )
1567 )
1568
1568
1569 # TODO print CA cert info
1569 # TODO print CA cert info
1570
1570
1571 # hg version
1571 # hg version
1572 hgver = util.version()
1572 hgver = util.version()
1573 fm.write(
1573 fm.write(
1574 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1574 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1575 )
1575 )
1576 fm.write(
1576 fm.write(
1577 b'hgverextra',
1577 b'hgverextra',
1578 _(b"checking Mercurial custom build (%s)\n"),
1578 _(b"checking Mercurial custom build (%s)\n"),
1579 b'+'.join(hgver.split(b'+')[1:]),
1579 b'+'.join(hgver.split(b'+')[1:]),
1580 )
1580 )
1581
1581
1582 # compiled modules
1582 # compiled modules
1583 hgmodules = None
1583 hgmodules = None
1584 if util.safehasattr(sys.modules[__name__], '__file__'):
1584 if util.safehasattr(sys.modules[__name__], '__file__'):
1585 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1585 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1586 elif getattr(sys, 'oxidized', False):
1586 elif getattr(sys, 'oxidized', False):
1587 hgmodules = pycompat.sysexecutable
1587 hgmodules = pycompat.sysexecutable
1588
1588
1589 fm.write(
1589 fm.write(
1590 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1590 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1591 )
1591 )
1592 fm.write(
1592 fm.write(
1593 b'hgmodules',
1593 b'hgmodules',
1594 _(b"checking installed modules (%s)...\n"),
1594 _(b"checking installed modules (%s)...\n"),
1595 hgmodules or _(b"unknown"),
1595 hgmodules or _(b"unknown"),
1596 )
1596 )
1597
1597
1598 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1598 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1599 rustext = rustandc # for now, that's the only case
1599 rustext = rustandc # for now, that's the only case
1600 cext = policy.policy in (b'c', b'allow') or rustandc
1600 cext = policy.policy in (b'c', b'allow') or rustandc
1601 nopure = cext or rustext
1601 nopure = cext or rustext
1602 if nopure:
1602 if nopure:
1603 err = None
1603 err = None
1604 try:
1604 try:
1605 if cext:
1605 if cext:
1606 from .cext import ( # pytype: disable=import-error
1606 from .cext import ( # pytype: disable=import-error
1607 base85,
1607 base85,
1608 bdiff,
1608 bdiff,
1609 mpatch,
1609 mpatch,
1610 osutil,
1610 osutil,
1611 )
1611 )
1612
1612
1613 # quiet pyflakes
1613 # quiet pyflakes
1614 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1614 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1615 if rustext:
1615 if rustext:
1616 from .rustext import ( # pytype: disable=import-error
1616 from .rustext import ( # pytype: disable=import-error
1617 ancestor,
1617 ancestor,
1618 dirstate,
1618 dirstate,
1619 )
1619 )
1620
1620
1621 dir(ancestor), dir(dirstate) # quiet pyflakes
1621 dir(ancestor), dir(dirstate) # quiet pyflakes
1622 except Exception as inst:
1622 except Exception as inst:
1623 err = stringutil.forcebytestr(inst)
1623 err = stringutil.forcebytestr(inst)
1624 problems += 1
1624 problems += 1
1625 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1625 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1626
1626
1627 compengines = util.compengines._engines.values()
1627 compengines = util.compengines._engines.values()
1628 fm.write(
1628 fm.write(
1629 b'compengines',
1629 b'compengines',
1630 _(b'checking registered compression engines (%s)\n'),
1630 _(b'checking registered compression engines (%s)\n'),
1631 fm.formatlist(
1631 fm.formatlist(
1632 sorted(e.name() for e in compengines),
1632 sorted(e.name() for e in compengines),
1633 name=b'compengine',
1633 name=b'compengine',
1634 fmt=b'%s',
1634 fmt=b'%s',
1635 sep=b', ',
1635 sep=b', ',
1636 ),
1636 ),
1637 )
1637 )
1638 fm.write(
1638 fm.write(
1639 b'compenginesavail',
1639 b'compenginesavail',
1640 _(b'checking available compression engines (%s)\n'),
1640 _(b'checking available compression engines (%s)\n'),
1641 fm.formatlist(
1641 fm.formatlist(
1642 sorted(e.name() for e in compengines if e.available()),
1642 sorted(e.name() for e in compengines if e.available()),
1643 name=b'compengine',
1643 name=b'compengine',
1644 fmt=b'%s',
1644 fmt=b'%s',
1645 sep=b', ',
1645 sep=b', ',
1646 ),
1646 ),
1647 )
1647 )
1648 wirecompengines = compression.compengines.supportedwireengines(
1648 wirecompengines = compression.compengines.supportedwireengines(
1649 compression.SERVERROLE
1649 compression.SERVERROLE
1650 )
1650 )
1651 fm.write(
1651 fm.write(
1652 b'compenginesserver',
1652 b'compenginesserver',
1653 _(
1653 _(
1654 b'checking available compression engines '
1654 b'checking available compression engines '
1655 b'for wire protocol (%s)\n'
1655 b'for wire protocol (%s)\n'
1656 ),
1656 ),
1657 fm.formatlist(
1657 fm.formatlist(
1658 [e.name() for e in wirecompengines if e.wireprotosupport()],
1658 [e.name() for e in wirecompengines if e.wireprotosupport()],
1659 name=b'compengine',
1659 name=b'compengine',
1660 fmt=b'%s',
1660 fmt=b'%s',
1661 sep=b', ',
1661 sep=b', ',
1662 ),
1662 ),
1663 )
1663 )
1664 re2 = b'missing'
1664 re2 = b'missing'
1665 if util._re2:
1665 if util._re2:
1666 re2 = b'available'
1666 re2 = b'available'
1667 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1667 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1668 fm.data(re2=bool(util._re2))
1668 fm.data(re2=bool(util._re2))
1669
1669
1670 # templates
1670 # templates
1671 p = templater.templatedir()
1671 p = templater.templatedir()
1672 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1672 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1673 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1673 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1674 if p:
1674 if p:
1675 (m, fp) = templater.open_template(b"map-cmdline.default")
1675 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1676 if m:
1676 if m:
1677 # template found, check if it is working
1677 # template found, check if it is working
1678 err = None
1678 err = None
1679 try:
1679 try:
1680 templater.templater.frommapfile(m)
1680 templater.templater.frommapfile(m)
1681 except Exception as inst:
1681 except Exception as inst:
1682 err = stringutil.forcebytestr(inst)
1682 err = stringutil.forcebytestr(inst)
1683 p = None
1683 p = None
1684 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1684 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1685 else:
1685 else:
1686 p = None
1686 p = None
1687 fm.condwrite(
1687 fm.condwrite(
1688 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1688 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1689 )
1689 )
1690 fm.condwrite(
1690 fm.condwrite(
1691 not m,
1691 not m,
1692 b'defaulttemplatenotfound',
1692 b'defaulttemplatenotfound',
1693 _(b" template '%s' not found\n"),
1693 _(b" template '%s' not found\n"),
1694 b"default",
1694 b"default",
1695 )
1695 )
1696 if not p:
1696 if not p:
1697 problems += 1
1697 problems += 1
1698 fm.condwrite(
1698 fm.condwrite(
1699 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1699 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1700 )
1700 )
1701
1701
1702 # editor
1702 # editor
1703 editor = ui.geteditor()
1703 editor = ui.geteditor()
1704 editor = util.expandpath(editor)
1704 editor = util.expandpath(editor)
1705 editorbin = procutil.shellsplit(editor)[0]
1705 editorbin = procutil.shellsplit(editor)[0]
1706 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1706 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1707 cmdpath = procutil.findexe(editorbin)
1707 cmdpath = procutil.findexe(editorbin)
1708 fm.condwrite(
1708 fm.condwrite(
1709 not cmdpath and editor == b'vi',
1709 not cmdpath and editor == b'vi',
1710 b'vinotfound',
1710 b'vinotfound',
1711 _(
1711 _(
1712 b" No commit editor set and can't find %s in PATH\n"
1712 b" No commit editor set and can't find %s in PATH\n"
1713 b" (specify a commit editor in your configuration"
1713 b" (specify a commit editor in your configuration"
1714 b" file)\n"
1714 b" file)\n"
1715 ),
1715 ),
1716 not cmdpath and editor == b'vi' and editorbin,
1716 not cmdpath and editor == b'vi' and editorbin,
1717 )
1717 )
1718 fm.condwrite(
1718 fm.condwrite(
1719 not cmdpath and editor != b'vi',
1719 not cmdpath and editor != b'vi',
1720 b'editornotfound',
1720 b'editornotfound',
1721 _(
1721 _(
1722 b" Can't find editor '%s' in PATH\n"
1722 b" Can't find editor '%s' in PATH\n"
1723 b" (specify a commit editor in your configuration"
1723 b" (specify a commit editor in your configuration"
1724 b" file)\n"
1724 b" file)\n"
1725 ),
1725 ),
1726 not cmdpath and editorbin,
1726 not cmdpath and editorbin,
1727 )
1727 )
1728 if not cmdpath and editor != b'vi':
1728 if not cmdpath and editor != b'vi':
1729 problems += 1
1729 problems += 1
1730
1730
1731 # check username
1731 # check username
1732 username = None
1732 username = None
1733 err = None
1733 err = None
1734 try:
1734 try:
1735 username = ui.username()
1735 username = ui.username()
1736 except error.Abort as e:
1736 except error.Abort as e:
1737 err = stringutil.forcebytestr(e)
1737 err = stringutil.forcebytestr(e)
1738 problems += 1
1738 problems += 1
1739
1739
1740 fm.condwrite(
1740 fm.condwrite(
1741 username, b'username', _(b"checking username (%s)\n"), username
1741 username, b'username', _(b"checking username (%s)\n"), username
1742 )
1742 )
1743 fm.condwrite(
1743 fm.condwrite(
1744 err,
1744 err,
1745 b'usernameerror',
1745 b'usernameerror',
1746 _(
1746 _(
1747 b"checking username...\n %s\n"
1747 b"checking username...\n %s\n"
1748 b" (specify a username in your configuration file)\n"
1748 b" (specify a username in your configuration file)\n"
1749 ),
1749 ),
1750 err,
1750 err,
1751 )
1751 )
1752
1752
1753 for name, mod in extensions.extensions():
1753 for name, mod in extensions.extensions():
1754 handler = getattr(mod, 'debuginstall', None)
1754 handler = getattr(mod, 'debuginstall', None)
1755 if handler is not None:
1755 if handler is not None:
1756 problems += handler(ui, fm)
1756 problems += handler(ui, fm)
1757
1757
1758 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1758 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1759 if not problems:
1759 if not problems:
1760 fm.data(problems=problems)
1760 fm.data(problems=problems)
1761 fm.condwrite(
1761 fm.condwrite(
1762 problems,
1762 problems,
1763 b'problems',
1763 b'problems',
1764 _(b"%d problems detected, please check your install!\n"),
1764 _(b"%d problems detected, please check your install!\n"),
1765 problems,
1765 problems,
1766 )
1766 )
1767 fm.end()
1767 fm.end()
1768
1768
1769 return problems
1769 return problems
1770
1770
1771
1771
1772 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1772 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1773 def debugknown(ui, repopath, *ids, **opts):
1773 def debugknown(ui, repopath, *ids, **opts):
1774 """test whether node ids are known to a repo
1774 """test whether node ids are known to a repo
1775
1775
1776 Every ID must be a full-length hex node id string. Returns a list of 0s
1776 Every ID must be a full-length hex node id string. Returns a list of 0s
1777 and 1s indicating unknown/known.
1777 and 1s indicating unknown/known.
1778 """
1778 """
1779 opts = pycompat.byteskwargs(opts)
1779 opts = pycompat.byteskwargs(opts)
1780 repo = hg.peer(ui, opts, repopath)
1780 repo = hg.peer(ui, opts, repopath)
1781 if not repo.capable(b'known'):
1781 if not repo.capable(b'known'):
1782 raise error.Abort(b"known() not supported by target repository")
1782 raise error.Abort(b"known() not supported by target repository")
1783 flags = repo.known([bin(s) for s in ids])
1783 flags = repo.known([bin(s) for s in ids])
1784 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1784 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1785
1785
1786
1786
1787 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1787 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1788 def debuglabelcomplete(ui, repo, *args):
1788 def debuglabelcomplete(ui, repo, *args):
1789 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1789 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1790 debugnamecomplete(ui, repo, *args)
1790 debugnamecomplete(ui, repo, *args)
1791
1791
1792
1792
1793 @command(
1793 @command(
1794 b'debuglocks',
1794 b'debuglocks',
1795 [
1795 [
1796 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1796 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1797 (
1797 (
1798 b'W',
1798 b'W',
1799 b'force-wlock',
1799 b'force-wlock',
1800 None,
1800 None,
1801 _(b'free the working state lock (DANGEROUS)'),
1801 _(b'free the working state lock (DANGEROUS)'),
1802 ),
1802 ),
1803 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1803 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1804 (
1804 (
1805 b'S',
1805 b'S',
1806 b'set-wlock',
1806 b'set-wlock',
1807 None,
1807 None,
1808 _(b'set the working state lock until stopped'),
1808 _(b'set the working state lock until stopped'),
1809 ),
1809 ),
1810 ],
1810 ],
1811 _(b'[OPTION]...'),
1811 _(b'[OPTION]...'),
1812 )
1812 )
1813 def debuglocks(ui, repo, **opts):
1813 def debuglocks(ui, repo, **opts):
1814 """show or modify state of locks
1814 """show or modify state of locks
1815
1815
1816 By default, this command will show which locks are held. This
1816 By default, this command will show which locks are held. This
1817 includes the user and process holding the lock, the amount of time
1817 includes the user and process holding the lock, the amount of time
1818 the lock has been held, and the machine name where the process is
1818 the lock has been held, and the machine name where the process is
1819 running if it's not local.
1819 running if it's not local.
1820
1820
1821 Locks protect the integrity of Mercurial's data, so should be
1821 Locks protect the integrity of Mercurial's data, so should be
1822 treated with care. System crashes or other interruptions may cause
1822 treated with care. System crashes or other interruptions may cause
1823 locks to not be properly released, though Mercurial will usually
1823 locks to not be properly released, though Mercurial will usually
1824 detect and remove such stale locks automatically.
1824 detect and remove such stale locks automatically.
1825
1825
1826 However, detecting stale locks may not always be possible (for
1826 However, detecting stale locks may not always be possible (for
1827 instance, on a shared filesystem). Removing locks may also be
1827 instance, on a shared filesystem). Removing locks may also be
1828 blocked by filesystem permissions.
1828 blocked by filesystem permissions.
1829
1829
1830 Setting a lock will prevent other commands from changing the data.
1830 Setting a lock will prevent other commands from changing the data.
1831 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1831 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1832 The set locks are removed when the command exits.
1832 The set locks are removed when the command exits.
1833
1833
1834 Returns 0 if no locks are held.
1834 Returns 0 if no locks are held.
1835
1835
1836 """
1836 """
1837
1837
1838 if opts.get('force_lock'):
1838 if opts.get('force_lock'):
1839 repo.svfs.unlink(b'lock')
1839 repo.svfs.unlink(b'lock')
1840 if opts.get('force_wlock'):
1840 if opts.get('force_wlock'):
1841 repo.vfs.unlink(b'wlock')
1841 repo.vfs.unlink(b'wlock')
1842 if opts.get('force_lock') or opts.get('force_wlock'):
1842 if opts.get('force_lock') or opts.get('force_wlock'):
1843 return 0
1843 return 0
1844
1844
1845 locks = []
1845 locks = []
1846 try:
1846 try:
1847 if opts.get('set_wlock'):
1847 if opts.get('set_wlock'):
1848 try:
1848 try:
1849 locks.append(repo.wlock(False))
1849 locks.append(repo.wlock(False))
1850 except error.LockHeld:
1850 except error.LockHeld:
1851 raise error.Abort(_(b'wlock is already held'))
1851 raise error.Abort(_(b'wlock is already held'))
1852 if opts.get('set_lock'):
1852 if opts.get('set_lock'):
1853 try:
1853 try:
1854 locks.append(repo.lock(False))
1854 locks.append(repo.lock(False))
1855 except error.LockHeld:
1855 except error.LockHeld:
1856 raise error.Abort(_(b'lock is already held'))
1856 raise error.Abort(_(b'lock is already held'))
1857 if len(locks):
1857 if len(locks):
1858 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1858 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1859 return 0
1859 return 0
1860 finally:
1860 finally:
1861 release(*locks)
1861 release(*locks)
1862
1862
1863 now = time.time()
1863 now = time.time()
1864 held = 0
1864 held = 0
1865
1865
1866 def report(vfs, name, method):
1866 def report(vfs, name, method):
1867 # this causes stale locks to get reaped for more accurate reporting
1867 # this causes stale locks to get reaped for more accurate reporting
1868 try:
1868 try:
1869 l = method(False)
1869 l = method(False)
1870 except error.LockHeld:
1870 except error.LockHeld:
1871 l = None
1871 l = None
1872
1872
1873 if l:
1873 if l:
1874 l.release()
1874 l.release()
1875 else:
1875 else:
1876 try:
1876 try:
1877 st = vfs.lstat(name)
1877 st = vfs.lstat(name)
1878 age = now - st[stat.ST_MTIME]
1878 age = now - st[stat.ST_MTIME]
1879 user = util.username(st.st_uid)
1879 user = util.username(st.st_uid)
1880 locker = vfs.readlock(name)
1880 locker = vfs.readlock(name)
1881 if b":" in locker:
1881 if b":" in locker:
1882 host, pid = locker.split(b':')
1882 host, pid = locker.split(b':')
1883 if host == socket.gethostname():
1883 if host == socket.gethostname():
1884 locker = b'user %s, process %s' % (user or b'None', pid)
1884 locker = b'user %s, process %s' % (user or b'None', pid)
1885 else:
1885 else:
1886 locker = b'user %s, process %s, host %s' % (
1886 locker = b'user %s, process %s, host %s' % (
1887 user or b'None',
1887 user or b'None',
1888 pid,
1888 pid,
1889 host,
1889 host,
1890 )
1890 )
1891 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1891 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1892 return 1
1892 return 1
1893 except OSError as e:
1893 except OSError as e:
1894 if e.errno != errno.ENOENT:
1894 if e.errno != errno.ENOENT:
1895 raise
1895 raise
1896
1896
1897 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1897 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1898 return 0
1898 return 0
1899
1899
1900 held += report(repo.svfs, b"lock", repo.lock)
1900 held += report(repo.svfs, b"lock", repo.lock)
1901 held += report(repo.vfs, b"wlock", repo.wlock)
1901 held += report(repo.vfs, b"wlock", repo.wlock)
1902
1902
1903 return held
1903 return held
1904
1904
1905
1905
1906 @command(
1906 @command(
1907 b'debugmanifestfulltextcache',
1907 b'debugmanifestfulltextcache',
1908 [
1908 [
1909 (b'', b'clear', False, _(b'clear the cache')),
1909 (b'', b'clear', False, _(b'clear the cache')),
1910 (
1910 (
1911 b'a',
1911 b'a',
1912 b'add',
1912 b'add',
1913 [],
1913 [],
1914 _(b'add the given manifest nodes to the cache'),
1914 _(b'add the given manifest nodes to the cache'),
1915 _(b'NODE'),
1915 _(b'NODE'),
1916 ),
1916 ),
1917 ],
1917 ],
1918 b'',
1918 b'',
1919 )
1919 )
1920 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1920 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1921 """show, clear or amend the contents of the manifest fulltext cache"""
1921 """show, clear or amend the contents of the manifest fulltext cache"""
1922
1922
1923 def getcache():
1923 def getcache():
1924 r = repo.manifestlog.getstorage(b'')
1924 r = repo.manifestlog.getstorage(b'')
1925 try:
1925 try:
1926 return r._fulltextcache
1926 return r._fulltextcache
1927 except AttributeError:
1927 except AttributeError:
1928 msg = _(
1928 msg = _(
1929 b"Current revlog implementation doesn't appear to have a "
1929 b"Current revlog implementation doesn't appear to have a "
1930 b"manifest fulltext cache\n"
1930 b"manifest fulltext cache\n"
1931 )
1931 )
1932 raise error.Abort(msg)
1932 raise error.Abort(msg)
1933
1933
1934 if opts.get('clear'):
1934 if opts.get('clear'):
1935 with repo.wlock():
1935 with repo.wlock():
1936 cache = getcache()
1936 cache = getcache()
1937 cache.clear(clear_persisted_data=True)
1937 cache.clear(clear_persisted_data=True)
1938 return
1938 return
1939
1939
1940 if add:
1940 if add:
1941 with repo.wlock():
1941 with repo.wlock():
1942 m = repo.manifestlog
1942 m = repo.manifestlog
1943 store = m.getstorage(b'')
1943 store = m.getstorage(b'')
1944 for n in add:
1944 for n in add:
1945 try:
1945 try:
1946 manifest = m[store.lookup(n)]
1946 manifest = m[store.lookup(n)]
1947 except error.LookupError as e:
1947 except error.LookupError as e:
1948 raise error.Abort(e, hint=b"Check your manifest node id")
1948 raise error.Abort(e, hint=b"Check your manifest node id")
1949 manifest.read() # stores revisision in cache too
1949 manifest.read() # stores revisision in cache too
1950 return
1950 return
1951
1951
1952 cache = getcache()
1952 cache = getcache()
1953 if not len(cache):
1953 if not len(cache):
1954 ui.write(_(b'cache empty\n'))
1954 ui.write(_(b'cache empty\n'))
1955 else:
1955 else:
1956 ui.write(
1956 ui.write(
1957 _(
1957 _(
1958 b'cache contains %d manifest entries, in order of most to '
1958 b'cache contains %d manifest entries, in order of most to '
1959 b'least recent:\n'
1959 b'least recent:\n'
1960 )
1960 )
1961 % (len(cache),)
1961 % (len(cache),)
1962 )
1962 )
1963 totalsize = 0
1963 totalsize = 0
1964 for nodeid in cache:
1964 for nodeid in cache:
1965 # Use cache.get to not update the LRU order
1965 # Use cache.get to not update the LRU order
1966 data = cache.peek(nodeid)
1966 data = cache.peek(nodeid)
1967 size = len(data)
1967 size = len(data)
1968 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1968 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1969 ui.write(
1969 ui.write(
1970 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1970 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1971 )
1971 )
1972 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1972 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1973 ui.write(
1973 ui.write(
1974 _(b'total cache data size %s, on-disk %s\n')
1974 _(b'total cache data size %s, on-disk %s\n')
1975 % (util.bytecount(totalsize), util.bytecount(ondisk))
1975 % (util.bytecount(totalsize), util.bytecount(ondisk))
1976 )
1976 )
1977
1977
1978
1978
1979 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
1979 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
1980 def debugmergestate(ui, repo, *args, **opts):
1980 def debugmergestate(ui, repo, *args, **opts):
1981 """print merge state
1981 """print merge state
1982
1982
1983 Use --verbose to print out information about whether v1 or v2 merge state
1983 Use --verbose to print out information about whether v1 or v2 merge state
1984 was chosen."""
1984 was chosen."""
1985
1985
1986 if ui.verbose:
1986 if ui.verbose:
1987 ms = mergestatemod.mergestate(repo)
1987 ms = mergestatemod.mergestate(repo)
1988
1988
1989 # sort so that reasonable information is on top
1989 # sort so that reasonable information is on top
1990 v1records = ms._readrecordsv1()
1990 v1records = ms._readrecordsv1()
1991 v2records = ms._readrecordsv2()
1991 v2records = ms._readrecordsv2()
1992
1992
1993 if not v1records and not v2records:
1993 if not v1records and not v2records:
1994 pass
1994 pass
1995 elif not v2records:
1995 elif not v2records:
1996 ui.writenoi18n(b'no version 2 merge state\n')
1996 ui.writenoi18n(b'no version 2 merge state\n')
1997 elif ms._v1v2match(v1records, v2records):
1997 elif ms._v1v2match(v1records, v2records):
1998 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
1998 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
1999 else:
1999 else:
2000 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2000 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2001
2001
2002 opts = pycompat.byteskwargs(opts)
2002 opts = pycompat.byteskwargs(opts)
2003 if not opts[b'template']:
2003 if not opts[b'template']:
2004 opts[b'template'] = (
2004 opts[b'template'] = (
2005 b'{if(commits, "", "no merge state found\n")}'
2005 b'{if(commits, "", "no merge state found\n")}'
2006 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2006 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2007 b'{files % "file: {path} (state \\"{state}\\")\n'
2007 b'{files % "file: {path} (state \\"{state}\\")\n'
2008 b'{if(local_path, "'
2008 b'{if(local_path, "'
2009 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2009 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2010 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2010 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2011 b' other path: {other_path} (node {other_node})\n'
2011 b' other path: {other_path} (node {other_node})\n'
2012 b'")}'
2012 b'")}'
2013 b'{if(rename_side, "'
2013 b'{if(rename_side, "'
2014 b' rename side: {rename_side}\n'
2014 b' rename side: {rename_side}\n'
2015 b' renamed path: {renamed_path}\n'
2015 b' renamed path: {renamed_path}\n'
2016 b'")}'
2016 b'")}'
2017 b'{extras % " extra: {key} = {value}\n"}'
2017 b'{extras % " extra: {key} = {value}\n"}'
2018 b'"}'
2018 b'"}'
2019 )
2019 )
2020
2020
2021 ms = mergestatemod.mergestate.read(repo)
2021 ms = mergestatemod.mergestate.read(repo)
2022
2022
2023 fm = ui.formatter(b'debugmergestate', opts)
2023 fm = ui.formatter(b'debugmergestate', opts)
2024 fm.startitem()
2024 fm.startitem()
2025
2025
2026 fm_commits = fm.nested(b'commits')
2026 fm_commits = fm.nested(b'commits')
2027 if ms.active():
2027 if ms.active():
2028 for name, node, label_index in (
2028 for name, node, label_index in (
2029 (b'local', ms.local, 0),
2029 (b'local', ms.local, 0),
2030 (b'other', ms.other, 1),
2030 (b'other', ms.other, 1),
2031 ):
2031 ):
2032 fm_commits.startitem()
2032 fm_commits.startitem()
2033 fm_commits.data(name=name)
2033 fm_commits.data(name=name)
2034 fm_commits.data(node=hex(node))
2034 fm_commits.data(node=hex(node))
2035 if ms._labels and len(ms._labels) > label_index:
2035 if ms._labels and len(ms._labels) > label_index:
2036 fm_commits.data(label=ms._labels[label_index])
2036 fm_commits.data(label=ms._labels[label_index])
2037 fm_commits.end()
2037 fm_commits.end()
2038
2038
2039 fm_files = fm.nested(b'files')
2039 fm_files = fm.nested(b'files')
2040 if ms.active():
2040 if ms.active():
2041 for f in ms:
2041 for f in ms:
2042 fm_files.startitem()
2042 fm_files.startitem()
2043 fm_files.data(path=f)
2043 fm_files.data(path=f)
2044 state = ms._state[f]
2044 state = ms._state[f]
2045 fm_files.data(state=state[0])
2045 fm_files.data(state=state[0])
2046 if state[0] in (
2046 if state[0] in (
2047 mergestatemod.MERGE_RECORD_UNRESOLVED,
2047 mergestatemod.MERGE_RECORD_UNRESOLVED,
2048 mergestatemod.MERGE_RECORD_RESOLVED,
2048 mergestatemod.MERGE_RECORD_RESOLVED,
2049 ):
2049 ):
2050 fm_files.data(local_key=state[1])
2050 fm_files.data(local_key=state[1])
2051 fm_files.data(local_path=state[2])
2051 fm_files.data(local_path=state[2])
2052 fm_files.data(ancestor_path=state[3])
2052 fm_files.data(ancestor_path=state[3])
2053 fm_files.data(ancestor_node=state[4])
2053 fm_files.data(ancestor_node=state[4])
2054 fm_files.data(other_path=state[5])
2054 fm_files.data(other_path=state[5])
2055 fm_files.data(other_node=state[6])
2055 fm_files.data(other_node=state[6])
2056 fm_files.data(local_flags=state[7])
2056 fm_files.data(local_flags=state[7])
2057 elif state[0] in (
2057 elif state[0] in (
2058 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2058 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2059 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2059 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2060 ):
2060 ):
2061 fm_files.data(renamed_path=state[1])
2061 fm_files.data(renamed_path=state[1])
2062 fm_files.data(rename_side=state[2])
2062 fm_files.data(rename_side=state[2])
2063 fm_extras = fm_files.nested(b'extras')
2063 fm_extras = fm_files.nested(b'extras')
2064 for k, v in ms.extras(f).items():
2064 for k, v in ms.extras(f).items():
2065 fm_extras.startitem()
2065 fm_extras.startitem()
2066 fm_extras.data(key=k)
2066 fm_extras.data(key=k)
2067 fm_extras.data(value=v)
2067 fm_extras.data(value=v)
2068 fm_extras.end()
2068 fm_extras.end()
2069
2069
2070 fm_files.end()
2070 fm_files.end()
2071
2071
2072 fm.end()
2072 fm.end()
2073
2073
2074
2074
2075 @command(b'debugnamecomplete', [], _(b'NAME...'))
2075 @command(b'debugnamecomplete', [], _(b'NAME...'))
2076 def debugnamecomplete(ui, repo, *args):
2076 def debugnamecomplete(ui, repo, *args):
2077 '''complete "names" - tags, open branch names, bookmark names'''
2077 '''complete "names" - tags, open branch names, bookmark names'''
2078
2078
2079 names = set()
2079 names = set()
2080 # since we previously only listed open branches, we will handle that
2080 # since we previously only listed open branches, we will handle that
2081 # specially (after this for loop)
2081 # specially (after this for loop)
2082 for name, ns in pycompat.iteritems(repo.names):
2082 for name, ns in pycompat.iteritems(repo.names):
2083 if name != b'branches':
2083 if name != b'branches':
2084 names.update(ns.listnames(repo))
2084 names.update(ns.listnames(repo))
2085 names.update(
2085 names.update(
2086 tag
2086 tag
2087 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2087 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2088 if not closed
2088 if not closed
2089 )
2089 )
2090 completions = set()
2090 completions = set()
2091 if not args:
2091 if not args:
2092 args = [b'']
2092 args = [b'']
2093 for a in args:
2093 for a in args:
2094 completions.update(n for n in names if n.startswith(a))
2094 completions.update(n for n in names if n.startswith(a))
2095 ui.write(b'\n'.join(sorted(completions)))
2095 ui.write(b'\n'.join(sorted(completions)))
2096 ui.write(b'\n')
2096 ui.write(b'\n')
2097
2097
2098
2098
2099 @command(
2099 @command(
2100 b'debugnodemap',
2100 b'debugnodemap',
2101 [
2101 [
2102 (
2102 (
2103 b'',
2103 b'',
2104 b'dump-new',
2104 b'dump-new',
2105 False,
2105 False,
2106 _(b'write a (new) persistent binary nodemap on stdin'),
2106 _(b'write a (new) persistent binary nodemap on stdin'),
2107 ),
2107 ),
2108 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2108 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2109 (
2109 (
2110 b'',
2110 b'',
2111 b'check',
2111 b'check',
2112 False,
2112 False,
2113 _(b'check that the data on disk data are correct.'),
2113 _(b'check that the data on disk data are correct.'),
2114 ),
2114 ),
2115 (
2115 (
2116 b'',
2116 b'',
2117 b'metadata',
2117 b'metadata',
2118 False,
2118 False,
2119 _(b'display the on disk meta data for the nodemap'),
2119 _(b'display the on disk meta data for the nodemap'),
2120 ),
2120 ),
2121 ],
2121 ],
2122 )
2122 )
2123 def debugnodemap(ui, repo, **opts):
2123 def debugnodemap(ui, repo, **opts):
2124 """write and inspect on disk nodemap
2124 """write and inspect on disk nodemap
2125 """
2125 """
2126 if opts['dump_new']:
2126 if opts['dump_new']:
2127 unfi = repo.unfiltered()
2127 unfi = repo.unfiltered()
2128 cl = unfi.changelog
2128 cl = unfi.changelog
2129 if util.safehasattr(cl.index, "nodemap_data_all"):
2129 if util.safehasattr(cl.index, "nodemap_data_all"):
2130 data = cl.index.nodemap_data_all()
2130 data = cl.index.nodemap_data_all()
2131 else:
2131 else:
2132 data = nodemap.persistent_data(cl.index)
2132 data = nodemap.persistent_data(cl.index)
2133 ui.write(data)
2133 ui.write(data)
2134 elif opts['dump_disk']:
2134 elif opts['dump_disk']:
2135 unfi = repo.unfiltered()
2135 unfi = repo.unfiltered()
2136 cl = unfi.changelog
2136 cl = unfi.changelog
2137 nm_data = nodemap.persisted_data(cl)
2137 nm_data = nodemap.persisted_data(cl)
2138 if nm_data is not None:
2138 if nm_data is not None:
2139 docket, data = nm_data
2139 docket, data = nm_data
2140 ui.write(data[:])
2140 ui.write(data[:])
2141 elif opts['check']:
2141 elif opts['check']:
2142 unfi = repo.unfiltered()
2142 unfi = repo.unfiltered()
2143 cl = unfi.changelog
2143 cl = unfi.changelog
2144 nm_data = nodemap.persisted_data(cl)
2144 nm_data = nodemap.persisted_data(cl)
2145 if nm_data is not None:
2145 if nm_data is not None:
2146 docket, data = nm_data
2146 docket, data = nm_data
2147 return nodemap.check_data(ui, cl.index, data)
2147 return nodemap.check_data(ui, cl.index, data)
2148 elif opts['metadata']:
2148 elif opts['metadata']:
2149 unfi = repo.unfiltered()
2149 unfi = repo.unfiltered()
2150 cl = unfi.changelog
2150 cl = unfi.changelog
2151 nm_data = nodemap.persisted_data(cl)
2151 nm_data = nodemap.persisted_data(cl)
2152 if nm_data is not None:
2152 if nm_data is not None:
2153 docket, data = nm_data
2153 docket, data = nm_data
2154 ui.write((b"uid: %s\n") % docket.uid)
2154 ui.write((b"uid: %s\n") % docket.uid)
2155 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2155 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2156 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2156 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2157 ui.write((b"data-length: %d\n") % docket.data_length)
2157 ui.write((b"data-length: %d\n") % docket.data_length)
2158 ui.write((b"data-unused: %d\n") % docket.data_unused)
2158 ui.write((b"data-unused: %d\n") % docket.data_unused)
2159 unused_perc = docket.data_unused * 100.0 / docket.data_length
2159 unused_perc = docket.data_unused * 100.0 / docket.data_length
2160 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2160 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2161
2161
2162
2162
2163 @command(
2163 @command(
2164 b'debugobsolete',
2164 b'debugobsolete',
2165 [
2165 [
2166 (b'', b'flags', 0, _(b'markers flag')),
2166 (b'', b'flags', 0, _(b'markers flag')),
2167 (
2167 (
2168 b'',
2168 b'',
2169 b'record-parents',
2169 b'record-parents',
2170 False,
2170 False,
2171 _(b'record parent information for the precursor'),
2171 _(b'record parent information for the precursor'),
2172 ),
2172 ),
2173 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2173 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2174 (
2174 (
2175 b'',
2175 b'',
2176 b'exclusive',
2176 b'exclusive',
2177 False,
2177 False,
2178 _(b'restrict display to markers only relevant to REV'),
2178 _(b'restrict display to markers only relevant to REV'),
2179 ),
2179 ),
2180 (b'', b'index', False, _(b'display index of the marker')),
2180 (b'', b'index', False, _(b'display index of the marker')),
2181 (b'', b'delete', [], _(b'delete markers specified by indices')),
2181 (b'', b'delete', [], _(b'delete markers specified by indices')),
2182 ]
2182 ]
2183 + cmdutil.commitopts2
2183 + cmdutil.commitopts2
2184 + cmdutil.formatteropts,
2184 + cmdutil.formatteropts,
2185 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2185 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2186 )
2186 )
2187 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2187 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2188 """create arbitrary obsolete marker
2188 """create arbitrary obsolete marker
2189
2189
2190 With no arguments, displays the list of obsolescence markers."""
2190 With no arguments, displays the list of obsolescence markers."""
2191
2191
2192 opts = pycompat.byteskwargs(opts)
2192 opts = pycompat.byteskwargs(opts)
2193
2193
2194 def parsenodeid(s):
2194 def parsenodeid(s):
2195 try:
2195 try:
2196 # We do not use revsingle/revrange functions here to accept
2196 # We do not use revsingle/revrange functions here to accept
2197 # arbitrary node identifiers, possibly not present in the
2197 # arbitrary node identifiers, possibly not present in the
2198 # local repository.
2198 # local repository.
2199 n = bin(s)
2199 n = bin(s)
2200 if len(n) != len(nullid):
2200 if len(n) != len(nullid):
2201 raise TypeError()
2201 raise TypeError()
2202 return n
2202 return n
2203 except TypeError:
2203 except TypeError:
2204 raise error.Abort(
2204 raise error.Abort(
2205 b'changeset references must be full hexadecimal '
2205 b'changeset references must be full hexadecimal '
2206 b'node identifiers'
2206 b'node identifiers'
2207 )
2207 )
2208
2208
2209 if opts.get(b'delete'):
2209 if opts.get(b'delete'):
2210 indices = []
2210 indices = []
2211 for v in opts.get(b'delete'):
2211 for v in opts.get(b'delete'):
2212 try:
2212 try:
2213 indices.append(int(v))
2213 indices.append(int(v))
2214 except ValueError:
2214 except ValueError:
2215 raise error.Abort(
2215 raise error.Abort(
2216 _(b'invalid index value: %r') % v,
2216 _(b'invalid index value: %r') % v,
2217 hint=_(b'use integers for indices'),
2217 hint=_(b'use integers for indices'),
2218 )
2218 )
2219
2219
2220 if repo.currenttransaction():
2220 if repo.currenttransaction():
2221 raise error.Abort(
2221 raise error.Abort(
2222 _(b'cannot delete obsmarkers in the middle of transaction.')
2222 _(b'cannot delete obsmarkers in the middle of transaction.')
2223 )
2223 )
2224
2224
2225 with repo.lock():
2225 with repo.lock():
2226 n = repair.deleteobsmarkers(repo.obsstore, indices)
2226 n = repair.deleteobsmarkers(repo.obsstore, indices)
2227 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2227 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2228
2228
2229 return
2229 return
2230
2230
2231 if precursor is not None:
2231 if precursor is not None:
2232 if opts[b'rev']:
2232 if opts[b'rev']:
2233 raise error.Abort(b'cannot select revision when creating marker')
2233 raise error.Abort(b'cannot select revision when creating marker')
2234 metadata = {}
2234 metadata = {}
2235 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2235 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2236 succs = tuple(parsenodeid(succ) for succ in successors)
2236 succs = tuple(parsenodeid(succ) for succ in successors)
2237 l = repo.lock()
2237 l = repo.lock()
2238 try:
2238 try:
2239 tr = repo.transaction(b'debugobsolete')
2239 tr = repo.transaction(b'debugobsolete')
2240 try:
2240 try:
2241 date = opts.get(b'date')
2241 date = opts.get(b'date')
2242 if date:
2242 if date:
2243 date = dateutil.parsedate(date)
2243 date = dateutil.parsedate(date)
2244 else:
2244 else:
2245 date = None
2245 date = None
2246 prec = parsenodeid(precursor)
2246 prec = parsenodeid(precursor)
2247 parents = None
2247 parents = None
2248 if opts[b'record_parents']:
2248 if opts[b'record_parents']:
2249 if prec not in repo.unfiltered():
2249 if prec not in repo.unfiltered():
2250 raise error.Abort(
2250 raise error.Abort(
2251 b'cannot used --record-parents on '
2251 b'cannot used --record-parents on '
2252 b'unknown changesets'
2252 b'unknown changesets'
2253 )
2253 )
2254 parents = repo.unfiltered()[prec].parents()
2254 parents = repo.unfiltered()[prec].parents()
2255 parents = tuple(p.node() for p in parents)
2255 parents = tuple(p.node() for p in parents)
2256 repo.obsstore.create(
2256 repo.obsstore.create(
2257 tr,
2257 tr,
2258 prec,
2258 prec,
2259 succs,
2259 succs,
2260 opts[b'flags'],
2260 opts[b'flags'],
2261 parents=parents,
2261 parents=parents,
2262 date=date,
2262 date=date,
2263 metadata=metadata,
2263 metadata=metadata,
2264 ui=ui,
2264 ui=ui,
2265 )
2265 )
2266 tr.close()
2266 tr.close()
2267 except ValueError as exc:
2267 except ValueError as exc:
2268 raise error.Abort(
2268 raise error.Abort(
2269 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2269 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2270 )
2270 )
2271 finally:
2271 finally:
2272 tr.release()
2272 tr.release()
2273 finally:
2273 finally:
2274 l.release()
2274 l.release()
2275 else:
2275 else:
2276 if opts[b'rev']:
2276 if opts[b'rev']:
2277 revs = scmutil.revrange(repo, opts[b'rev'])
2277 revs = scmutil.revrange(repo, opts[b'rev'])
2278 nodes = [repo[r].node() for r in revs]
2278 nodes = [repo[r].node() for r in revs]
2279 markers = list(
2279 markers = list(
2280 obsutil.getmarkers(
2280 obsutil.getmarkers(
2281 repo, nodes=nodes, exclusive=opts[b'exclusive']
2281 repo, nodes=nodes, exclusive=opts[b'exclusive']
2282 )
2282 )
2283 )
2283 )
2284 markers.sort(key=lambda x: x._data)
2284 markers.sort(key=lambda x: x._data)
2285 else:
2285 else:
2286 markers = obsutil.getmarkers(repo)
2286 markers = obsutil.getmarkers(repo)
2287
2287
2288 markerstoiter = markers
2288 markerstoiter = markers
2289 isrelevant = lambda m: True
2289 isrelevant = lambda m: True
2290 if opts.get(b'rev') and opts.get(b'index'):
2290 if opts.get(b'rev') and opts.get(b'index'):
2291 markerstoiter = obsutil.getmarkers(repo)
2291 markerstoiter = obsutil.getmarkers(repo)
2292 markerset = set(markers)
2292 markerset = set(markers)
2293 isrelevant = lambda m: m in markerset
2293 isrelevant = lambda m: m in markerset
2294
2294
2295 fm = ui.formatter(b'debugobsolete', opts)
2295 fm = ui.formatter(b'debugobsolete', opts)
2296 for i, m in enumerate(markerstoiter):
2296 for i, m in enumerate(markerstoiter):
2297 if not isrelevant(m):
2297 if not isrelevant(m):
2298 # marker can be irrelevant when we're iterating over a set
2298 # marker can be irrelevant when we're iterating over a set
2299 # of markers (markerstoiter) which is bigger than the set
2299 # of markers (markerstoiter) which is bigger than the set
2300 # of markers we want to display (markers)
2300 # of markers we want to display (markers)
2301 # this can happen if both --index and --rev options are
2301 # this can happen if both --index and --rev options are
2302 # provided and thus we need to iterate over all of the markers
2302 # provided and thus we need to iterate over all of the markers
2303 # to get the correct indices, but only display the ones that
2303 # to get the correct indices, but only display the ones that
2304 # are relevant to --rev value
2304 # are relevant to --rev value
2305 continue
2305 continue
2306 fm.startitem()
2306 fm.startitem()
2307 ind = i if opts.get(b'index') else None
2307 ind = i if opts.get(b'index') else None
2308 cmdutil.showmarker(fm, m, index=ind)
2308 cmdutil.showmarker(fm, m, index=ind)
2309 fm.end()
2309 fm.end()
2310
2310
2311
2311
2312 @command(
2312 @command(
2313 b'debugp1copies',
2313 b'debugp1copies',
2314 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2314 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2315 _(b'[-r REV]'),
2315 _(b'[-r REV]'),
2316 )
2316 )
2317 def debugp1copies(ui, repo, **opts):
2317 def debugp1copies(ui, repo, **opts):
2318 """dump copy information compared to p1"""
2318 """dump copy information compared to p1"""
2319
2319
2320 opts = pycompat.byteskwargs(opts)
2320 opts = pycompat.byteskwargs(opts)
2321 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2321 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2322 for dst, src in ctx.p1copies().items():
2322 for dst, src in ctx.p1copies().items():
2323 ui.write(b'%s -> %s\n' % (src, dst))
2323 ui.write(b'%s -> %s\n' % (src, dst))
2324
2324
2325
2325
2326 @command(
2326 @command(
2327 b'debugp2copies',
2327 b'debugp2copies',
2328 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2328 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2329 _(b'[-r REV]'),
2329 _(b'[-r REV]'),
2330 )
2330 )
2331 def debugp1copies(ui, repo, **opts):
2331 def debugp1copies(ui, repo, **opts):
2332 """dump copy information compared to p2"""
2332 """dump copy information compared to p2"""
2333
2333
2334 opts = pycompat.byteskwargs(opts)
2334 opts = pycompat.byteskwargs(opts)
2335 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2335 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2336 for dst, src in ctx.p2copies().items():
2336 for dst, src in ctx.p2copies().items():
2337 ui.write(b'%s -> %s\n' % (src, dst))
2337 ui.write(b'%s -> %s\n' % (src, dst))
2338
2338
2339
2339
2340 @command(
2340 @command(
2341 b'debugpathcomplete',
2341 b'debugpathcomplete',
2342 [
2342 [
2343 (b'f', b'full', None, _(b'complete an entire path')),
2343 (b'f', b'full', None, _(b'complete an entire path')),
2344 (b'n', b'normal', None, _(b'show only normal files')),
2344 (b'n', b'normal', None, _(b'show only normal files')),
2345 (b'a', b'added', None, _(b'show only added files')),
2345 (b'a', b'added', None, _(b'show only added files')),
2346 (b'r', b'removed', None, _(b'show only removed files')),
2346 (b'r', b'removed', None, _(b'show only removed files')),
2347 ],
2347 ],
2348 _(b'FILESPEC...'),
2348 _(b'FILESPEC...'),
2349 )
2349 )
2350 def debugpathcomplete(ui, repo, *specs, **opts):
2350 def debugpathcomplete(ui, repo, *specs, **opts):
2351 '''complete part or all of a tracked path
2351 '''complete part or all of a tracked path
2352
2352
2353 This command supports shells that offer path name completion. It
2353 This command supports shells that offer path name completion. It
2354 currently completes only files already known to the dirstate.
2354 currently completes only files already known to the dirstate.
2355
2355
2356 Completion extends only to the next path segment unless
2356 Completion extends only to the next path segment unless
2357 --full is specified, in which case entire paths are used.'''
2357 --full is specified, in which case entire paths are used.'''
2358
2358
2359 def complete(path, acceptable):
2359 def complete(path, acceptable):
2360 dirstate = repo.dirstate
2360 dirstate = repo.dirstate
2361 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2361 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2362 rootdir = repo.root + pycompat.ossep
2362 rootdir = repo.root + pycompat.ossep
2363 if spec != repo.root and not spec.startswith(rootdir):
2363 if spec != repo.root and not spec.startswith(rootdir):
2364 return [], []
2364 return [], []
2365 if os.path.isdir(spec):
2365 if os.path.isdir(spec):
2366 spec += b'/'
2366 spec += b'/'
2367 spec = spec[len(rootdir) :]
2367 spec = spec[len(rootdir) :]
2368 fixpaths = pycompat.ossep != b'/'
2368 fixpaths = pycompat.ossep != b'/'
2369 if fixpaths:
2369 if fixpaths:
2370 spec = spec.replace(pycompat.ossep, b'/')
2370 spec = spec.replace(pycompat.ossep, b'/')
2371 speclen = len(spec)
2371 speclen = len(spec)
2372 fullpaths = opts['full']
2372 fullpaths = opts['full']
2373 files, dirs = set(), set()
2373 files, dirs = set(), set()
2374 adddir, addfile = dirs.add, files.add
2374 adddir, addfile = dirs.add, files.add
2375 for f, st in pycompat.iteritems(dirstate):
2375 for f, st in pycompat.iteritems(dirstate):
2376 if f.startswith(spec) and st[0] in acceptable:
2376 if f.startswith(spec) and st[0] in acceptable:
2377 if fixpaths:
2377 if fixpaths:
2378 f = f.replace(b'/', pycompat.ossep)
2378 f = f.replace(b'/', pycompat.ossep)
2379 if fullpaths:
2379 if fullpaths:
2380 addfile(f)
2380 addfile(f)
2381 continue
2381 continue
2382 s = f.find(pycompat.ossep, speclen)
2382 s = f.find(pycompat.ossep, speclen)
2383 if s >= 0:
2383 if s >= 0:
2384 adddir(f[:s])
2384 adddir(f[:s])
2385 else:
2385 else:
2386 addfile(f)
2386 addfile(f)
2387 return files, dirs
2387 return files, dirs
2388
2388
2389 acceptable = b''
2389 acceptable = b''
2390 if opts['normal']:
2390 if opts['normal']:
2391 acceptable += b'nm'
2391 acceptable += b'nm'
2392 if opts['added']:
2392 if opts['added']:
2393 acceptable += b'a'
2393 acceptable += b'a'
2394 if opts['removed']:
2394 if opts['removed']:
2395 acceptable += b'r'
2395 acceptable += b'r'
2396 cwd = repo.getcwd()
2396 cwd = repo.getcwd()
2397 if not specs:
2397 if not specs:
2398 specs = [b'.']
2398 specs = [b'.']
2399
2399
2400 files, dirs = set(), set()
2400 files, dirs = set(), set()
2401 for spec in specs:
2401 for spec in specs:
2402 f, d = complete(spec, acceptable or b'nmar')
2402 f, d = complete(spec, acceptable or b'nmar')
2403 files.update(f)
2403 files.update(f)
2404 dirs.update(d)
2404 dirs.update(d)
2405 files.update(dirs)
2405 files.update(dirs)
2406 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2406 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2407 ui.write(b'\n')
2407 ui.write(b'\n')
2408
2408
2409
2409
2410 @command(
2410 @command(
2411 b'debugpathcopies',
2411 b'debugpathcopies',
2412 cmdutil.walkopts,
2412 cmdutil.walkopts,
2413 b'hg debugpathcopies REV1 REV2 [FILE]',
2413 b'hg debugpathcopies REV1 REV2 [FILE]',
2414 inferrepo=True,
2414 inferrepo=True,
2415 )
2415 )
2416 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2416 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2417 """show copies between two revisions"""
2417 """show copies between two revisions"""
2418 ctx1 = scmutil.revsingle(repo, rev1)
2418 ctx1 = scmutil.revsingle(repo, rev1)
2419 ctx2 = scmutil.revsingle(repo, rev2)
2419 ctx2 = scmutil.revsingle(repo, rev2)
2420 m = scmutil.match(ctx1, pats, opts)
2420 m = scmutil.match(ctx1, pats, opts)
2421 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2421 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2422 ui.write(b'%s -> %s\n' % (src, dst))
2422 ui.write(b'%s -> %s\n' % (src, dst))
2423
2423
2424
2424
2425 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2425 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2426 def debugpeer(ui, path):
2426 def debugpeer(ui, path):
2427 """establish a connection to a peer repository"""
2427 """establish a connection to a peer repository"""
2428 # Always enable peer request logging. Requires --debug to display
2428 # Always enable peer request logging. Requires --debug to display
2429 # though.
2429 # though.
2430 overrides = {
2430 overrides = {
2431 (b'devel', b'debug.peer-request'): True,
2431 (b'devel', b'debug.peer-request'): True,
2432 }
2432 }
2433
2433
2434 with ui.configoverride(overrides):
2434 with ui.configoverride(overrides):
2435 peer = hg.peer(ui, {}, path)
2435 peer = hg.peer(ui, {}, path)
2436
2436
2437 local = peer.local() is not None
2437 local = peer.local() is not None
2438 canpush = peer.canpush()
2438 canpush = peer.canpush()
2439
2439
2440 ui.write(_(b'url: %s\n') % peer.url())
2440 ui.write(_(b'url: %s\n') % peer.url())
2441 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2441 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2442 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2442 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2443
2443
2444
2444
2445 @command(
2445 @command(
2446 b'debugpickmergetool',
2446 b'debugpickmergetool',
2447 [
2447 [
2448 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2448 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2449 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2449 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2450 ]
2450 ]
2451 + cmdutil.walkopts
2451 + cmdutil.walkopts
2452 + cmdutil.mergetoolopts,
2452 + cmdutil.mergetoolopts,
2453 _(b'[PATTERN]...'),
2453 _(b'[PATTERN]...'),
2454 inferrepo=True,
2454 inferrepo=True,
2455 )
2455 )
2456 def debugpickmergetool(ui, repo, *pats, **opts):
2456 def debugpickmergetool(ui, repo, *pats, **opts):
2457 """examine which merge tool is chosen for specified file
2457 """examine which merge tool is chosen for specified file
2458
2458
2459 As described in :hg:`help merge-tools`, Mercurial examines
2459 As described in :hg:`help merge-tools`, Mercurial examines
2460 configurations below in this order to decide which merge tool is
2460 configurations below in this order to decide which merge tool is
2461 chosen for specified file.
2461 chosen for specified file.
2462
2462
2463 1. ``--tool`` option
2463 1. ``--tool`` option
2464 2. ``HGMERGE`` environment variable
2464 2. ``HGMERGE`` environment variable
2465 3. configurations in ``merge-patterns`` section
2465 3. configurations in ``merge-patterns`` section
2466 4. configuration of ``ui.merge``
2466 4. configuration of ``ui.merge``
2467 5. configurations in ``merge-tools`` section
2467 5. configurations in ``merge-tools`` section
2468 6. ``hgmerge`` tool (for historical reason only)
2468 6. ``hgmerge`` tool (for historical reason only)
2469 7. default tool for fallback (``:merge`` or ``:prompt``)
2469 7. default tool for fallback (``:merge`` or ``:prompt``)
2470
2470
2471 This command writes out examination result in the style below::
2471 This command writes out examination result in the style below::
2472
2472
2473 FILE = MERGETOOL
2473 FILE = MERGETOOL
2474
2474
2475 By default, all files known in the first parent context of the
2475 By default, all files known in the first parent context of the
2476 working directory are examined. Use file patterns and/or -I/-X
2476 working directory are examined. Use file patterns and/or -I/-X
2477 options to limit target files. -r/--rev is also useful to examine
2477 options to limit target files. -r/--rev is also useful to examine
2478 files in another context without actual updating to it.
2478 files in another context without actual updating to it.
2479
2479
2480 With --debug, this command shows warning messages while matching
2480 With --debug, this command shows warning messages while matching
2481 against ``merge-patterns`` and so on, too. It is recommended to
2481 against ``merge-patterns`` and so on, too. It is recommended to
2482 use this option with explicit file patterns and/or -I/-X options,
2482 use this option with explicit file patterns and/or -I/-X options,
2483 because this option increases amount of output per file according
2483 because this option increases amount of output per file according
2484 to configurations in hgrc.
2484 to configurations in hgrc.
2485
2485
2486 With -v/--verbose, this command shows configurations below at
2486 With -v/--verbose, this command shows configurations below at
2487 first (only if specified).
2487 first (only if specified).
2488
2488
2489 - ``--tool`` option
2489 - ``--tool`` option
2490 - ``HGMERGE`` environment variable
2490 - ``HGMERGE`` environment variable
2491 - configuration of ``ui.merge``
2491 - configuration of ``ui.merge``
2492
2492
2493 If merge tool is chosen before matching against
2493 If merge tool is chosen before matching against
2494 ``merge-patterns``, this command can't show any helpful
2494 ``merge-patterns``, this command can't show any helpful
2495 information, even with --debug. In such case, information above is
2495 information, even with --debug. In such case, information above is
2496 useful to know why a merge tool is chosen.
2496 useful to know why a merge tool is chosen.
2497 """
2497 """
2498 opts = pycompat.byteskwargs(opts)
2498 opts = pycompat.byteskwargs(opts)
2499 overrides = {}
2499 overrides = {}
2500 if opts[b'tool']:
2500 if opts[b'tool']:
2501 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2501 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2502 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2502 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2503
2503
2504 with ui.configoverride(overrides, b'debugmergepatterns'):
2504 with ui.configoverride(overrides, b'debugmergepatterns'):
2505 hgmerge = encoding.environ.get(b"HGMERGE")
2505 hgmerge = encoding.environ.get(b"HGMERGE")
2506 if hgmerge is not None:
2506 if hgmerge is not None:
2507 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2507 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2508 uimerge = ui.config(b"ui", b"merge")
2508 uimerge = ui.config(b"ui", b"merge")
2509 if uimerge:
2509 if uimerge:
2510 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2510 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2511
2511
2512 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2512 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2513 m = scmutil.match(ctx, pats, opts)
2513 m = scmutil.match(ctx, pats, opts)
2514 changedelete = opts[b'changedelete']
2514 changedelete = opts[b'changedelete']
2515 for path in ctx.walk(m):
2515 for path in ctx.walk(m):
2516 fctx = ctx[path]
2516 fctx = ctx[path]
2517 try:
2517 try:
2518 if not ui.debugflag:
2518 if not ui.debugflag:
2519 ui.pushbuffer(error=True)
2519 ui.pushbuffer(error=True)
2520 tool, toolpath = filemerge._picktool(
2520 tool, toolpath = filemerge._picktool(
2521 repo,
2521 repo,
2522 ui,
2522 ui,
2523 path,
2523 path,
2524 fctx.isbinary(),
2524 fctx.isbinary(),
2525 b'l' in fctx.flags(),
2525 b'l' in fctx.flags(),
2526 changedelete,
2526 changedelete,
2527 )
2527 )
2528 finally:
2528 finally:
2529 if not ui.debugflag:
2529 if not ui.debugflag:
2530 ui.popbuffer()
2530 ui.popbuffer()
2531 ui.write(b'%s = %s\n' % (path, tool))
2531 ui.write(b'%s = %s\n' % (path, tool))
2532
2532
2533
2533
2534 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2534 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2535 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2535 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2536 '''access the pushkey key/value protocol
2536 '''access the pushkey key/value protocol
2537
2537
2538 With two args, list the keys in the given namespace.
2538 With two args, list the keys in the given namespace.
2539
2539
2540 With five args, set a key to new if it currently is set to old.
2540 With five args, set a key to new if it currently is set to old.
2541 Reports success or failure.
2541 Reports success or failure.
2542 '''
2542 '''
2543
2543
2544 target = hg.peer(ui, {}, repopath)
2544 target = hg.peer(ui, {}, repopath)
2545 if keyinfo:
2545 if keyinfo:
2546 key, old, new = keyinfo
2546 key, old, new = keyinfo
2547 with target.commandexecutor() as e:
2547 with target.commandexecutor() as e:
2548 r = e.callcommand(
2548 r = e.callcommand(
2549 b'pushkey',
2549 b'pushkey',
2550 {
2550 {
2551 b'namespace': namespace,
2551 b'namespace': namespace,
2552 b'key': key,
2552 b'key': key,
2553 b'old': old,
2553 b'old': old,
2554 b'new': new,
2554 b'new': new,
2555 },
2555 },
2556 ).result()
2556 ).result()
2557
2557
2558 ui.status(pycompat.bytestr(r) + b'\n')
2558 ui.status(pycompat.bytestr(r) + b'\n')
2559 return not r
2559 return not r
2560 else:
2560 else:
2561 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2561 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2562 ui.write(
2562 ui.write(
2563 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2563 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2564 )
2564 )
2565
2565
2566
2566
2567 @command(b'debugpvec', [], _(b'A B'))
2567 @command(b'debugpvec', [], _(b'A B'))
2568 def debugpvec(ui, repo, a, b=None):
2568 def debugpvec(ui, repo, a, b=None):
2569 ca = scmutil.revsingle(repo, a)
2569 ca = scmutil.revsingle(repo, a)
2570 cb = scmutil.revsingle(repo, b)
2570 cb = scmutil.revsingle(repo, b)
2571 pa = pvec.ctxpvec(ca)
2571 pa = pvec.ctxpvec(ca)
2572 pb = pvec.ctxpvec(cb)
2572 pb = pvec.ctxpvec(cb)
2573 if pa == pb:
2573 if pa == pb:
2574 rel = b"="
2574 rel = b"="
2575 elif pa > pb:
2575 elif pa > pb:
2576 rel = b">"
2576 rel = b">"
2577 elif pa < pb:
2577 elif pa < pb:
2578 rel = b"<"
2578 rel = b"<"
2579 elif pa | pb:
2579 elif pa | pb:
2580 rel = b"|"
2580 rel = b"|"
2581 ui.write(_(b"a: %s\n") % pa)
2581 ui.write(_(b"a: %s\n") % pa)
2582 ui.write(_(b"b: %s\n") % pb)
2582 ui.write(_(b"b: %s\n") % pb)
2583 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2583 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2584 ui.write(
2584 ui.write(
2585 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2585 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2586 % (
2586 % (
2587 abs(pa._depth - pb._depth),
2587 abs(pa._depth - pb._depth),
2588 pvec._hamming(pa._vec, pb._vec),
2588 pvec._hamming(pa._vec, pb._vec),
2589 pa.distance(pb),
2589 pa.distance(pb),
2590 rel,
2590 rel,
2591 )
2591 )
2592 )
2592 )
2593
2593
2594
2594
2595 @command(
2595 @command(
2596 b'debugrebuilddirstate|debugrebuildstate',
2596 b'debugrebuilddirstate|debugrebuildstate',
2597 [
2597 [
2598 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2598 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2599 (
2599 (
2600 b'',
2600 b'',
2601 b'minimal',
2601 b'minimal',
2602 None,
2602 None,
2603 _(
2603 _(
2604 b'only rebuild files that are inconsistent with '
2604 b'only rebuild files that are inconsistent with '
2605 b'the working copy parent'
2605 b'the working copy parent'
2606 ),
2606 ),
2607 ),
2607 ),
2608 ],
2608 ],
2609 _(b'[-r REV]'),
2609 _(b'[-r REV]'),
2610 )
2610 )
2611 def debugrebuilddirstate(ui, repo, rev, **opts):
2611 def debugrebuilddirstate(ui, repo, rev, **opts):
2612 """rebuild the dirstate as it would look like for the given revision
2612 """rebuild the dirstate as it would look like for the given revision
2613
2613
2614 If no revision is specified the first current parent will be used.
2614 If no revision is specified the first current parent will be used.
2615
2615
2616 The dirstate will be set to the files of the given revision.
2616 The dirstate will be set to the files of the given revision.
2617 The actual working directory content or existing dirstate
2617 The actual working directory content or existing dirstate
2618 information such as adds or removes is not considered.
2618 information such as adds or removes is not considered.
2619
2619
2620 ``minimal`` will only rebuild the dirstate status for files that claim to be
2620 ``minimal`` will only rebuild the dirstate status for files that claim to be
2621 tracked but are not in the parent manifest, or that exist in the parent
2621 tracked but are not in the parent manifest, or that exist in the parent
2622 manifest but are not in the dirstate. It will not change adds, removes, or
2622 manifest but are not in the dirstate. It will not change adds, removes, or
2623 modified files that are in the working copy parent.
2623 modified files that are in the working copy parent.
2624
2624
2625 One use of this command is to make the next :hg:`status` invocation
2625 One use of this command is to make the next :hg:`status` invocation
2626 check the actual file content.
2626 check the actual file content.
2627 """
2627 """
2628 ctx = scmutil.revsingle(repo, rev)
2628 ctx = scmutil.revsingle(repo, rev)
2629 with repo.wlock():
2629 with repo.wlock():
2630 dirstate = repo.dirstate
2630 dirstate = repo.dirstate
2631 changedfiles = None
2631 changedfiles = None
2632 # See command doc for what minimal does.
2632 # See command doc for what minimal does.
2633 if opts.get('minimal'):
2633 if opts.get('minimal'):
2634 manifestfiles = set(ctx.manifest().keys())
2634 manifestfiles = set(ctx.manifest().keys())
2635 dirstatefiles = set(dirstate)
2635 dirstatefiles = set(dirstate)
2636 manifestonly = manifestfiles - dirstatefiles
2636 manifestonly = manifestfiles - dirstatefiles
2637 dsonly = dirstatefiles - manifestfiles
2637 dsonly = dirstatefiles - manifestfiles
2638 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2638 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2639 changedfiles = manifestonly | dsnotadded
2639 changedfiles = manifestonly | dsnotadded
2640
2640
2641 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2641 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2642
2642
2643
2643
2644 @command(b'debugrebuildfncache', [], b'')
2644 @command(b'debugrebuildfncache', [], b'')
2645 def debugrebuildfncache(ui, repo):
2645 def debugrebuildfncache(ui, repo):
2646 """rebuild the fncache file"""
2646 """rebuild the fncache file"""
2647 repair.rebuildfncache(ui, repo)
2647 repair.rebuildfncache(ui, repo)
2648
2648
2649
2649
2650 @command(
2650 @command(
2651 b'debugrename',
2651 b'debugrename',
2652 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2652 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2653 _(b'[-r REV] [FILE]...'),
2653 _(b'[-r REV] [FILE]...'),
2654 )
2654 )
2655 def debugrename(ui, repo, *pats, **opts):
2655 def debugrename(ui, repo, *pats, **opts):
2656 """dump rename information"""
2656 """dump rename information"""
2657
2657
2658 opts = pycompat.byteskwargs(opts)
2658 opts = pycompat.byteskwargs(opts)
2659 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2659 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2660 m = scmutil.match(ctx, pats, opts)
2660 m = scmutil.match(ctx, pats, opts)
2661 for abs in ctx.walk(m):
2661 for abs in ctx.walk(m):
2662 fctx = ctx[abs]
2662 fctx = ctx[abs]
2663 o = fctx.filelog().renamed(fctx.filenode())
2663 o = fctx.filelog().renamed(fctx.filenode())
2664 rel = repo.pathto(abs)
2664 rel = repo.pathto(abs)
2665 if o:
2665 if o:
2666 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2666 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2667 else:
2667 else:
2668 ui.write(_(b"%s not renamed\n") % rel)
2668 ui.write(_(b"%s not renamed\n") % rel)
2669
2669
2670
2670
2671 @command(b'debugrequires|debugrequirements', [], b'')
2671 @command(b'debugrequires|debugrequirements', [], b'')
2672 def debugrequirements(ui, repo):
2672 def debugrequirements(ui, repo):
2673 """ print the current repo requirements """
2673 """ print the current repo requirements """
2674 for r in sorted(repo.requirements):
2674 for r in sorted(repo.requirements):
2675 ui.write(b"%s\n" % r)
2675 ui.write(b"%s\n" % r)
2676
2676
2677
2677
2678 @command(
2678 @command(
2679 b'debugrevlog',
2679 b'debugrevlog',
2680 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2680 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2681 _(b'-c|-m|FILE'),
2681 _(b'-c|-m|FILE'),
2682 optionalrepo=True,
2682 optionalrepo=True,
2683 )
2683 )
2684 def debugrevlog(ui, repo, file_=None, **opts):
2684 def debugrevlog(ui, repo, file_=None, **opts):
2685 """show data and statistics about a revlog"""
2685 """show data and statistics about a revlog"""
2686 opts = pycompat.byteskwargs(opts)
2686 opts = pycompat.byteskwargs(opts)
2687 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2687 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2688
2688
2689 if opts.get(b"dump"):
2689 if opts.get(b"dump"):
2690 numrevs = len(r)
2690 numrevs = len(r)
2691 ui.write(
2691 ui.write(
2692 (
2692 (
2693 b"# rev p1rev p2rev start end deltastart base p1 p2"
2693 b"# rev p1rev p2rev start end deltastart base p1 p2"
2694 b" rawsize totalsize compression heads chainlen\n"
2694 b" rawsize totalsize compression heads chainlen\n"
2695 )
2695 )
2696 )
2696 )
2697 ts = 0
2697 ts = 0
2698 heads = set()
2698 heads = set()
2699
2699
2700 for rev in pycompat.xrange(numrevs):
2700 for rev in pycompat.xrange(numrevs):
2701 dbase = r.deltaparent(rev)
2701 dbase = r.deltaparent(rev)
2702 if dbase == -1:
2702 if dbase == -1:
2703 dbase = rev
2703 dbase = rev
2704 cbase = r.chainbase(rev)
2704 cbase = r.chainbase(rev)
2705 clen = r.chainlen(rev)
2705 clen = r.chainlen(rev)
2706 p1, p2 = r.parentrevs(rev)
2706 p1, p2 = r.parentrevs(rev)
2707 rs = r.rawsize(rev)
2707 rs = r.rawsize(rev)
2708 ts = ts + rs
2708 ts = ts + rs
2709 heads -= set(r.parentrevs(rev))
2709 heads -= set(r.parentrevs(rev))
2710 heads.add(rev)
2710 heads.add(rev)
2711 try:
2711 try:
2712 compression = ts / r.end(rev)
2712 compression = ts / r.end(rev)
2713 except ZeroDivisionError:
2713 except ZeroDivisionError:
2714 compression = 0
2714 compression = 0
2715 ui.write(
2715 ui.write(
2716 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2716 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2717 b"%11d %5d %8d\n"
2717 b"%11d %5d %8d\n"
2718 % (
2718 % (
2719 rev,
2719 rev,
2720 p1,
2720 p1,
2721 p2,
2721 p2,
2722 r.start(rev),
2722 r.start(rev),
2723 r.end(rev),
2723 r.end(rev),
2724 r.start(dbase),
2724 r.start(dbase),
2725 r.start(cbase),
2725 r.start(cbase),
2726 r.start(p1),
2726 r.start(p1),
2727 r.start(p2),
2727 r.start(p2),
2728 rs,
2728 rs,
2729 ts,
2729 ts,
2730 compression,
2730 compression,
2731 len(heads),
2731 len(heads),
2732 clen,
2732 clen,
2733 )
2733 )
2734 )
2734 )
2735 return 0
2735 return 0
2736
2736
2737 v = r.version
2737 v = r.version
2738 format = v & 0xFFFF
2738 format = v & 0xFFFF
2739 flags = []
2739 flags = []
2740 gdelta = False
2740 gdelta = False
2741 if v & revlog.FLAG_INLINE_DATA:
2741 if v & revlog.FLAG_INLINE_DATA:
2742 flags.append(b'inline')
2742 flags.append(b'inline')
2743 if v & revlog.FLAG_GENERALDELTA:
2743 if v & revlog.FLAG_GENERALDELTA:
2744 gdelta = True
2744 gdelta = True
2745 flags.append(b'generaldelta')
2745 flags.append(b'generaldelta')
2746 if not flags:
2746 if not flags:
2747 flags = [b'(none)']
2747 flags = [b'(none)']
2748
2748
2749 ### tracks merge vs single parent
2749 ### tracks merge vs single parent
2750 nummerges = 0
2750 nummerges = 0
2751
2751
2752 ### tracks ways the "delta" are build
2752 ### tracks ways the "delta" are build
2753 # nodelta
2753 # nodelta
2754 numempty = 0
2754 numempty = 0
2755 numemptytext = 0
2755 numemptytext = 0
2756 numemptydelta = 0
2756 numemptydelta = 0
2757 # full file content
2757 # full file content
2758 numfull = 0
2758 numfull = 0
2759 # intermediate snapshot against a prior snapshot
2759 # intermediate snapshot against a prior snapshot
2760 numsemi = 0
2760 numsemi = 0
2761 # snapshot count per depth
2761 # snapshot count per depth
2762 numsnapdepth = collections.defaultdict(lambda: 0)
2762 numsnapdepth = collections.defaultdict(lambda: 0)
2763 # delta against previous revision
2763 # delta against previous revision
2764 numprev = 0
2764 numprev = 0
2765 # delta against first or second parent (not prev)
2765 # delta against first or second parent (not prev)
2766 nump1 = 0
2766 nump1 = 0
2767 nump2 = 0
2767 nump2 = 0
2768 # delta against neither prev nor parents
2768 # delta against neither prev nor parents
2769 numother = 0
2769 numother = 0
2770 # delta against prev that are also first or second parent
2770 # delta against prev that are also first or second parent
2771 # (details of `numprev`)
2771 # (details of `numprev`)
2772 nump1prev = 0
2772 nump1prev = 0
2773 nump2prev = 0
2773 nump2prev = 0
2774
2774
2775 # data about delta chain of each revs
2775 # data about delta chain of each revs
2776 chainlengths = []
2776 chainlengths = []
2777 chainbases = []
2777 chainbases = []
2778 chainspans = []
2778 chainspans = []
2779
2779
2780 # data about each revision
2780 # data about each revision
2781 datasize = [None, 0, 0]
2781 datasize = [None, 0, 0]
2782 fullsize = [None, 0, 0]
2782 fullsize = [None, 0, 0]
2783 semisize = [None, 0, 0]
2783 semisize = [None, 0, 0]
2784 # snapshot count per depth
2784 # snapshot count per depth
2785 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2785 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2786 deltasize = [None, 0, 0]
2786 deltasize = [None, 0, 0]
2787 chunktypecounts = {}
2787 chunktypecounts = {}
2788 chunktypesizes = {}
2788 chunktypesizes = {}
2789
2789
2790 def addsize(size, l):
2790 def addsize(size, l):
2791 if l[0] is None or size < l[0]:
2791 if l[0] is None or size < l[0]:
2792 l[0] = size
2792 l[0] = size
2793 if size > l[1]:
2793 if size > l[1]:
2794 l[1] = size
2794 l[1] = size
2795 l[2] += size
2795 l[2] += size
2796
2796
2797 numrevs = len(r)
2797 numrevs = len(r)
2798 for rev in pycompat.xrange(numrevs):
2798 for rev in pycompat.xrange(numrevs):
2799 p1, p2 = r.parentrevs(rev)
2799 p1, p2 = r.parentrevs(rev)
2800 delta = r.deltaparent(rev)
2800 delta = r.deltaparent(rev)
2801 if format > 0:
2801 if format > 0:
2802 addsize(r.rawsize(rev), datasize)
2802 addsize(r.rawsize(rev), datasize)
2803 if p2 != nullrev:
2803 if p2 != nullrev:
2804 nummerges += 1
2804 nummerges += 1
2805 size = r.length(rev)
2805 size = r.length(rev)
2806 if delta == nullrev:
2806 if delta == nullrev:
2807 chainlengths.append(0)
2807 chainlengths.append(0)
2808 chainbases.append(r.start(rev))
2808 chainbases.append(r.start(rev))
2809 chainspans.append(size)
2809 chainspans.append(size)
2810 if size == 0:
2810 if size == 0:
2811 numempty += 1
2811 numempty += 1
2812 numemptytext += 1
2812 numemptytext += 1
2813 else:
2813 else:
2814 numfull += 1
2814 numfull += 1
2815 numsnapdepth[0] += 1
2815 numsnapdepth[0] += 1
2816 addsize(size, fullsize)
2816 addsize(size, fullsize)
2817 addsize(size, snapsizedepth[0])
2817 addsize(size, snapsizedepth[0])
2818 else:
2818 else:
2819 chainlengths.append(chainlengths[delta] + 1)
2819 chainlengths.append(chainlengths[delta] + 1)
2820 baseaddr = chainbases[delta]
2820 baseaddr = chainbases[delta]
2821 revaddr = r.start(rev)
2821 revaddr = r.start(rev)
2822 chainbases.append(baseaddr)
2822 chainbases.append(baseaddr)
2823 chainspans.append((revaddr - baseaddr) + size)
2823 chainspans.append((revaddr - baseaddr) + size)
2824 if size == 0:
2824 if size == 0:
2825 numempty += 1
2825 numempty += 1
2826 numemptydelta += 1
2826 numemptydelta += 1
2827 elif r.issnapshot(rev):
2827 elif r.issnapshot(rev):
2828 addsize(size, semisize)
2828 addsize(size, semisize)
2829 numsemi += 1
2829 numsemi += 1
2830 depth = r.snapshotdepth(rev)
2830 depth = r.snapshotdepth(rev)
2831 numsnapdepth[depth] += 1
2831 numsnapdepth[depth] += 1
2832 addsize(size, snapsizedepth[depth])
2832 addsize(size, snapsizedepth[depth])
2833 else:
2833 else:
2834 addsize(size, deltasize)
2834 addsize(size, deltasize)
2835 if delta == rev - 1:
2835 if delta == rev - 1:
2836 numprev += 1
2836 numprev += 1
2837 if delta == p1:
2837 if delta == p1:
2838 nump1prev += 1
2838 nump1prev += 1
2839 elif delta == p2:
2839 elif delta == p2:
2840 nump2prev += 1
2840 nump2prev += 1
2841 elif delta == p1:
2841 elif delta == p1:
2842 nump1 += 1
2842 nump1 += 1
2843 elif delta == p2:
2843 elif delta == p2:
2844 nump2 += 1
2844 nump2 += 1
2845 elif delta != nullrev:
2845 elif delta != nullrev:
2846 numother += 1
2846 numother += 1
2847
2847
2848 # Obtain data on the raw chunks in the revlog.
2848 # Obtain data on the raw chunks in the revlog.
2849 if util.safehasattr(r, b'_getsegmentforrevs'):
2849 if util.safehasattr(r, b'_getsegmentforrevs'):
2850 segment = r._getsegmentforrevs(rev, rev)[1]
2850 segment = r._getsegmentforrevs(rev, rev)[1]
2851 else:
2851 else:
2852 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2852 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2853 if segment:
2853 if segment:
2854 chunktype = bytes(segment[0:1])
2854 chunktype = bytes(segment[0:1])
2855 else:
2855 else:
2856 chunktype = b'empty'
2856 chunktype = b'empty'
2857
2857
2858 if chunktype not in chunktypecounts:
2858 if chunktype not in chunktypecounts:
2859 chunktypecounts[chunktype] = 0
2859 chunktypecounts[chunktype] = 0
2860 chunktypesizes[chunktype] = 0
2860 chunktypesizes[chunktype] = 0
2861
2861
2862 chunktypecounts[chunktype] += 1
2862 chunktypecounts[chunktype] += 1
2863 chunktypesizes[chunktype] += size
2863 chunktypesizes[chunktype] += size
2864
2864
2865 # Adjust size min value for empty cases
2865 # Adjust size min value for empty cases
2866 for size in (datasize, fullsize, semisize, deltasize):
2866 for size in (datasize, fullsize, semisize, deltasize):
2867 if size[0] is None:
2867 if size[0] is None:
2868 size[0] = 0
2868 size[0] = 0
2869
2869
2870 numdeltas = numrevs - numfull - numempty - numsemi
2870 numdeltas = numrevs - numfull - numempty - numsemi
2871 numoprev = numprev - nump1prev - nump2prev
2871 numoprev = numprev - nump1prev - nump2prev
2872 totalrawsize = datasize[2]
2872 totalrawsize = datasize[2]
2873 datasize[2] /= numrevs
2873 datasize[2] /= numrevs
2874 fulltotal = fullsize[2]
2874 fulltotal = fullsize[2]
2875 if numfull == 0:
2875 if numfull == 0:
2876 fullsize[2] = 0
2876 fullsize[2] = 0
2877 else:
2877 else:
2878 fullsize[2] /= numfull
2878 fullsize[2] /= numfull
2879 semitotal = semisize[2]
2879 semitotal = semisize[2]
2880 snaptotal = {}
2880 snaptotal = {}
2881 if numsemi > 0:
2881 if numsemi > 0:
2882 semisize[2] /= numsemi
2882 semisize[2] /= numsemi
2883 for depth in snapsizedepth:
2883 for depth in snapsizedepth:
2884 snaptotal[depth] = snapsizedepth[depth][2]
2884 snaptotal[depth] = snapsizedepth[depth][2]
2885 snapsizedepth[depth][2] /= numsnapdepth[depth]
2885 snapsizedepth[depth][2] /= numsnapdepth[depth]
2886
2886
2887 deltatotal = deltasize[2]
2887 deltatotal = deltasize[2]
2888 if numdeltas > 0:
2888 if numdeltas > 0:
2889 deltasize[2] /= numdeltas
2889 deltasize[2] /= numdeltas
2890 totalsize = fulltotal + semitotal + deltatotal
2890 totalsize = fulltotal + semitotal + deltatotal
2891 avgchainlen = sum(chainlengths) / numrevs
2891 avgchainlen = sum(chainlengths) / numrevs
2892 maxchainlen = max(chainlengths)
2892 maxchainlen = max(chainlengths)
2893 maxchainspan = max(chainspans)
2893 maxchainspan = max(chainspans)
2894 compratio = 1
2894 compratio = 1
2895 if totalsize:
2895 if totalsize:
2896 compratio = totalrawsize / totalsize
2896 compratio = totalrawsize / totalsize
2897
2897
2898 basedfmtstr = b'%%%dd\n'
2898 basedfmtstr = b'%%%dd\n'
2899 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2899 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2900
2900
2901 def dfmtstr(max):
2901 def dfmtstr(max):
2902 return basedfmtstr % len(str(max))
2902 return basedfmtstr % len(str(max))
2903
2903
2904 def pcfmtstr(max, padding=0):
2904 def pcfmtstr(max, padding=0):
2905 return basepcfmtstr % (len(str(max)), b' ' * padding)
2905 return basepcfmtstr % (len(str(max)), b' ' * padding)
2906
2906
2907 def pcfmt(value, total):
2907 def pcfmt(value, total):
2908 if total:
2908 if total:
2909 return (value, 100 * float(value) / total)
2909 return (value, 100 * float(value) / total)
2910 else:
2910 else:
2911 return value, 100.0
2911 return value, 100.0
2912
2912
2913 ui.writenoi18n(b'format : %d\n' % format)
2913 ui.writenoi18n(b'format : %d\n' % format)
2914 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2914 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2915
2915
2916 ui.write(b'\n')
2916 ui.write(b'\n')
2917 fmt = pcfmtstr(totalsize)
2917 fmt = pcfmtstr(totalsize)
2918 fmt2 = dfmtstr(totalsize)
2918 fmt2 = dfmtstr(totalsize)
2919 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2919 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2920 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2920 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2921 ui.writenoi18n(
2921 ui.writenoi18n(
2922 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2922 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2923 )
2923 )
2924 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2924 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2925 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2925 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2926 ui.writenoi18n(
2926 ui.writenoi18n(
2927 b' text : '
2927 b' text : '
2928 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2928 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2929 )
2929 )
2930 ui.writenoi18n(
2930 ui.writenoi18n(
2931 b' delta : '
2931 b' delta : '
2932 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2932 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2933 )
2933 )
2934 ui.writenoi18n(
2934 ui.writenoi18n(
2935 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2935 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2936 )
2936 )
2937 for depth in sorted(numsnapdepth):
2937 for depth in sorted(numsnapdepth):
2938 ui.write(
2938 ui.write(
2939 (b' lvl-%-3d : ' % depth)
2939 (b' lvl-%-3d : ' % depth)
2940 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2940 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2941 )
2941 )
2942 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2942 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2943 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2943 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2944 ui.writenoi18n(
2944 ui.writenoi18n(
2945 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2945 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2946 )
2946 )
2947 for depth in sorted(numsnapdepth):
2947 for depth in sorted(numsnapdepth):
2948 ui.write(
2948 ui.write(
2949 (b' lvl-%-3d : ' % depth)
2949 (b' lvl-%-3d : ' % depth)
2950 + fmt % pcfmt(snaptotal[depth], totalsize)
2950 + fmt % pcfmt(snaptotal[depth], totalsize)
2951 )
2951 )
2952 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2952 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2953
2953
2954 def fmtchunktype(chunktype):
2954 def fmtchunktype(chunktype):
2955 if chunktype == b'empty':
2955 if chunktype == b'empty':
2956 return b' %s : ' % chunktype
2956 return b' %s : ' % chunktype
2957 elif chunktype in pycompat.bytestr(string.ascii_letters):
2957 elif chunktype in pycompat.bytestr(string.ascii_letters):
2958 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2958 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2959 else:
2959 else:
2960 return b' 0x%s : ' % hex(chunktype)
2960 return b' 0x%s : ' % hex(chunktype)
2961
2961
2962 ui.write(b'\n')
2962 ui.write(b'\n')
2963 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2963 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2964 for chunktype in sorted(chunktypecounts):
2964 for chunktype in sorted(chunktypecounts):
2965 ui.write(fmtchunktype(chunktype))
2965 ui.write(fmtchunktype(chunktype))
2966 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2966 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2967 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2967 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2968 for chunktype in sorted(chunktypecounts):
2968 for chunktype in sorted(chunktypecounts):
2969 ui.write(fmtchunktype(chunktype))
2969 ui.write(fmtchunktype(chunktype))
2970 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2970 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2971
2971
2972 ui.write(b'\n')
2972 ui.write(b'\n')
2973 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2973 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2974 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2974 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2975 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2975 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2976 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2976 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2977 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2977 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2978
2978
2979 if format > 0:
2979 if format > 0:
2980 ui.write(b'\n')
2980 ui.write(b'\n')
2981 ui.writenoi18n(
2981 ui.writenoi18n(
2982 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2982 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2983 % tuple(datasize)
2983 % tuple(datasize)
2984 )
2984 )
2985 ui.writenoi18n(
2985 ui.writenoi18n(
2986 b'full revision size (min/max/avg) : %d / %d / %d\n'
2986 b'full revision size (min/max/avg) : %d / %d / %d\n'
2987 % tuple(fullsize)
2987 % tuple(fullsize)
2988 )
2988 )
2989 ui.writenoi18n(
2989 ui.writenoi18n(
2990 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2990 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2991 % tuple(semisize)
2991 % tuple(semisize)
2992 )
2992 )
2993 for depth in sorted(snapsizedepth):
2993 for depth in sorted(snapsizedepth):
2994 if depth == 0:
2994 if depth == 0:
2995 continue
2995 continue
2996 ui.writenoi18n(
2996 ui.writenoi18n(
2997 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2997 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2998 % ((depth,) + tuple(snapsizedepth[depth]))
2998 % ((depth,) + tuple(snapsizedepth[depth]))
2999 )
2999 )
3000 ui.writenoi18n(
3000 ui.writenoi18n(
3001 b'delta size (min/max/avg) : %d / %d / %d\n'
3001 b'delta size (min/max/avg) : %d / %d / %d\n'
3002 % tuple(deltasize)
3002 % tuple(deltasize)
3003 )
3003 )
3004
3004
3005 if numdeltas > 0:
3005 if numdeltas > 0:
3006 ui.write(b'\n')
3006 ui.write(b'\n')
3007 fmt = pcfmtstr(numdeltas)
3007 fmt = pcfmtstr(numdeltas)
3008 fmt2 = pcfmtstr(numdeltas, 4)
3008 fmt2 = pcfmtstr(numdeltas, 4)
3009 ui.writenoi18n(
3009 ui.writenoi18n(
3010 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3010 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3011 )
3011 )
3012 if numprev > 0:
3012 if numprev > 0:
3013 ui.writenoi18n(
3013 ui.writenoi18n(
3014 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3014 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3015 )
3015 )
3016 ui.writenoi18n(
3016 ui.writenoi18n(
3017 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3017 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3018 )
3018 )
3019 ui.writenoi18n(
3019 ui.writenoi18n(
3020 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3020 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3021 )
3021 )
3022 if gdelta:
3022 if gdelta:
3023 ui.writenoi18n(
3023 ui.writenoi18n(
3024 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3024 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3025 )
3025 )
3026 ui.writenoi18n(
3026 ui.writenoi18n(
3027 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3027 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3028 )
3028 )
3029 ui.writenoi18n(
3029 ui.writenoi18n(
3030 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3030 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3031 )
3031 )
3032
3032
3033
3033
3034 @command(
3034 @command(
3035 b'debugrevlogindex',
3035 b'debugrevlogindex',
3036 cmdutil.debugrevlogopts
3036 cmdutil.debugrevlogopts
3037 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3037 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3038 _(b'[-f FORMAT] -c|-m|FILE'),
3038 _(b'[-f FORMAT] -c|-m|FILE'),
3039 optionalrepo=True,
3039 optionalrepo=True,
3040 )
3040 )
3041 def debugrevlogindex(ui, repo, file_=None, **opts):
3041 def debugrevlogindex(ui, repo, file_=None, **opts):
3042 """dump the contents of a revlog index"""
3042 """dump the contents of a revlog index"""
3043 opts = pycompat.byteskwargs(opts)
3043 opts = pycompat.byteskwargs(opts)
3044 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3044 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3045 format = opts.get(b'format', 0)
3045 format = opts.get(b'format', 0)
3046 if format not in (0, 1):
3046 if format not in (0, 1):
3047 raise error.Abort(_(b"unknown format %d") % format)
3047 raise error.Abort(_(b"unknown format %d") % format)
3048
3048
3049 if ui.debugflag:
3049 if ui.debugflag:
3050 shortfn = hex
3050 shortfn = hex
3051 else:
3051 else:
3052 shortfn = short
3052 shortfn = short
3053
3053
3054 # There might not be anything in r, so have a sane default
3054 # There might not be anything in r, so have a sane default
3055 idlen = 12
3055 idlen = 12
3056 for i in r:
3056 for i in r:
3057 idlen = len(shortfn(r.node(i)))
3057 idlen = len(shortfn(r.node(i)))
3058 break
3058 break
3059
3059
3060 if format == 0:
3060 if format == 0:
3061 if ui.verbose:
3061 if ui.verbose:
3062 ui.writenoi18n(
3062 ui.writenoi18n(
3063 b" rev offset length linkrev %s %s p2\n"
3063 b" rev offset length linkrev %s %s p2\n"
3064 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3064 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3065 )
3065 )
3066 else:
3066 else:
3067 ui.writenoi18n(
3067 ui.writenoi18n(
3068 b" rev linkrev %s %s p2\n"
3068 b" rev linkrev %s %s p2\n"
3069 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3069 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3070 )
3070 )
3071 elif format == 1:
3071 elif format == 1:
3072 if ui.verbose:
3072 if ui.verbose:
3073 ui.writenoi18n(
3073 ui.writenoi18n(
3074 (
3074 (
3075 b" rev flag offset length size link p1"
3075 b" rev flag offset length size link p1"
3076 b" p2 %s\n"
3076 b" p2 %s\n"
3077 )
3077 )
3078 % b"nodeid".rjust(idlen)
3078 % b"nodeid".rjust(idlen)
3079 )
3079 )
3080 else:
3080 else:
3081 ui.writenoi18n(
3081 ui.writenoi18n(
3082 b" rev flag size link p1 p2 %s\n"
3082 b" rev flag size link p1 p2 %s\n"
3083 % b"nodeid".rjust(idlen)
3083 % b"nodeid".rjust(idlen)
3084 )
3084 )
3085
3085
3086 for i in r:
3086 for i in r:
3087 node = r.node(i)
3087 node = r.node(i)
3088 if format == 0:
3088 if format == 0:
3089 try:
3089 try:
3090 pp = r.parents(node)
3090 pp = r.parents(node)
3091 except Exception:
3091 except Exception:
3092 pp = [nullid, nullid]
3092 pp = [nullid, nullid]
3093 if ui.verbose:
3093 if ui.verbose:
3094 ui.write(
3094 ui.write(
3095 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3095 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3096 % (
3096 % (
3097 i,
3097 i,
3098 r.start(i),
3098 r.start(i),
3099 r.length(i),
3099 r.length(i),
3100 r.linkrev(i),
3100 r.linkrev(i),
3101 shortfn(node),
3101 shortfn(node),
3102 shortfn(pp[0]),
3102 shortfn(pp[0]),
3103 shortfn(pp[1]),
3103 shortfn(pp[1]),
3104 )
3104 )
3105 )
3105 )
3106 else:
3106 else:
3107 ui.write(
3107 ui.write(
3108 b"% 6d % 7d %s %s %s\n"
3108 b"% 6d % 7d %s %s %s\n"
3109 % (
3109 % (
3110 i,
3110 i,
3111 r.linkrev(i),
3111 r.linkrev(i),
3112 shortfn(node),
3112 shortfn(node),
3113 shortfn(pp[0]),
3113 shortfn(pp[0]),
3114 shortfn(pp[1]),
3114 shortfn(pp[1]),
3115 )
3115 )
3116 )
3116 )
3117 elif format == 1:
3117 elif format == 1:
3118 pr = r.parentrevs(i)
3118 pr = r.parentrevs(i)
3119 if ui.verbose:
3119 if ui.verbose:
3120 ui.write(
3120 ui.write(
3121 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3121 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3122 % (
3122 % (
3123 i,
3123 i,
3124 r.flags(i),
3124 r.flags(i),
3125 r.start(i),
3125 r.start(i),
3126 r.length(i),
3126 r.length(i),
3127 r.rawsize(i),
3127 r.rawsize(i),
3128 r.linkrev(i),
3128 r.linkrev(i),
3129 pr[0],
3129 pr[0],
3130 pr[1],
3130 pr[1],
3131 shortfn(node),
3131 shortfn(node),
3132 )
3132 )
3133 )
3133 )
3134 else:
3134 else:
3135 ui.write(
3135 ui.write(
3136 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3136 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3137 % (
3137 % (
3138 i,
3138 i,
3139 r.flags(i),
3139 r.flags(i),
3140 r.rawsize(i),
3140 r.rawsize(i),
3141 r.linkrev(i),
3141 r.linkrev(i),
3142 pr[0],
3142 pr[0],
3143 pr[1],
3143 pr[1],
3144 shortfn(node),
3144 shortfn(node),
3145 )
3145 )
3146 )
3146 )
3147
3147
3148
3148
3149 @command(
3149 @command(
3150 b'debugrevspec',
3150 b'debugrevspec',
3151 [
3151 [
3152 (
3152 (
3153 b'',
3153 b'',
3154 b'optimize',
3154 b'optimize',
3155 None,
3155 None,
3156 _(b'print parsed tree after optimizing (DEPRECATED)'),
3156 _(b'print parsed tree after optimizing (DEPRECATED)'),
3157 ),
3157 ),
3158 (
3158 (
3159 b'',
3159 b'',
3160 b'show-revs',
3160 b'show-revs',
3161 True,
3161 True,
3162 _(b'print list of result revisions (default)'),
3162 _(b'print list of result revisions (default)'),
3163 ),
3163 ),
3164 (
3164 (
3165 b's',
3165 b's',
3166 b'show-set',
3166 b'show-set',
3167 None,
3167 None,
3168 _(b'print internal representation of result set'),
3168 _(b'print internal representation of result set'),
3169 ),
3169 ),
3170 (
3170 (
3171 b'p',
3171 b'p',
3172 b'show-stage',
3172 b'show-stage',
3173 [],
3173 [],
3174 _(b'print parsed tree at the given stage'),
3174 _(b'print parsed tree at the given stage'),
3175 _(b'NAME'),
3175 _(b'NAME'),
3176 ),
3176 ),
3177 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3177 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3178 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3178 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3179 ],
3179 ],
3180 b'REVSPEC',
3180 b'REVSPEC',
3181 )
3181 )
3182 def debugrevspec(ui, repo, expr, **opts):
3182 def debugrevspec(ui, repo, expr, **opts):
3183 """parse and apply a revision specification
3183 """parse and apply a revision specification
3184
3184
3185 Use -p/--show-stage option to print the parsed tree at the given stages.
3185 Use -p/--show-stage option to print the parsed tree at the given stages.
3186 Use -p all to print tree at every stage.
3186 Use -p all to print tree at every stage.
3187
3187
3188 Use --no-show-revs option with -s or -p to print only the set
3188 Use --no-show-revs option with -s or -p to print only the set
3189 representation or the parsed tree respectively.
3189 representation or the parsed tree respectively.
3190
3190
3191 Use --verify-optimized to compare the optimized result with the unoptimized
3191 Use --verify-optimized to compare the optimized result with the unoptimized
3192 one. Returns 1 if the optimized result differs.
3192 one. Returns 1 if the optimized result differs.
3193 """
3193 """
3194 opts = pycompat.byteskwargs(opts)
3194 opts = pycompat.byteskwargs(opts)
3195 aliases = ui.configitems(b'revsetalias')
3195 aliases = ui.configitems(b'revsetalias')
3196 stages = [
3196 stages = [
3197 (b'parsed', lambda tree: tree),
3197 (b'parsed', lambda tree: tree),
3198 (
3198 (
3199 b'expanded',
3199 b'expanded',
3200 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3200 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3201 ),
3201 ),
3202 (b'concatenated', revsetlang.foldconcat),
3202 (b'concatenated', revsetlang.foldconcat),
3203 (b'analyzed', revsetlang.analyze),
3203 (b'analyzed', revsetlang.analyze),
3204 (b'optimized', revsetlang.optimize),
3204 (b'optimized', revsetlang.optimize),
3205 ]
3205 ]
3206 if opts[b'no_optimized']:
3206 if opts[b'no_optimized']:
3207 stages = stages[:-1]
3207 stages = stages[:-1]
3208 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3208 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3209 raise error.Abort(
3209 raise error.Abort(
3210 _(b'cannot use --verify-optimized with --no-optimized')
3210 _(b'cannot use --verify-optimized with --no-optimized')
3211 )
3211 )
3212 stagenames = {n for n, f in stages}
3212 stagenames = {n for n, f in stages}
3213
3213
3214 showalways = set()
3214 showalways = set()
3215 showchanged = set()
3215 showchanged = set()
3216 if ui.verbose and not opts[b'show_stage']:
3216 if ui.verbose and not opts[b'show_stage']:
3217 # show parsed tree by --verbose (deprecated)
3217 # show parsed tree by --verbose (deprecated)
3218 showalways.add(b'parsed')
3218 showalways.add(b'parsed')
3219 showchanged.update([b'expanded', b'concatenated'])
3219 showchanged.update([b'expanded', b'concatenated'])
3220 if opts[b'optimize']:
3220 if opts[b'optimize']:
3221 showalways.add(b'optimized')
3221 showalways.add(b'optimized')
3222 if opts[b'show_stage'] and opts[b'optimize']:
3222 if opts[b'show_stage'] and opts[b'optimize']:
3223 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3223 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3224 if opts[b'show_stage'] == [b'all']:
3224 if opts[b'show_stage'] == [b'all']:
3225 showalways.update(stagenames)
3225 showalways.update(stagenames)
3226 else:
3226 else:
3227 for n in opts[b'show_stage']:
3227 for n in opts[b'show_stage']:
3228 if n not in stagenames:
3228 if n not in stagenames:
3229 raise error.Abort(_(b'invalid stage name: %s') % n)
3229 raise error.Abort(_(b'invalid stage name: %s') % n)
3230 showalways.update(opts[b'show_stage'])
3230 showalways.update(opts[b'show_stage'])
3231
3231
3232 treebystage = {}
3232 treebystage = {}
3233 printedtree = None
3233 printedtree = None
3234 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3234 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3235 for n, f in stages:
3235 for n, f in stages:
3236 treebystage[n] = tree = f(tree)
3236 treebystage[n] = tree = f(tree)
3237 if n in showalways or (n in showchanged and tree != printedtree):
3237 if n in showalways or (n in showchanged and tree != printedtree):
3238 if opts[b'show_stage'] or n != b'parsed':
3238 if opts[b'show_stage'] or n != b'parsed':
3239 ui.write(b"* %s:\n" % n)
3239 ui.write(b"* %s:\n" % n)
3240 ui.write(revsetlang.prettyformat(tree), b"\n")
3240 ui.write(revsetlang.prettyformat(tree), b"\n")
3241 printedtree = tree
3241 printedtree = tree
3242
3242
3243 if opts[b'verify_optimized']:
3243 if opts[b'verify_optimized']:
3244 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3244 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3245 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3245 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3246 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3246 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3247 ui.writenoi18n(
3247 ui.writenoi18n(
3248 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3248 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3249 )
3249 )
3250 ui.writenoi18n(
3250 ui.writenoi18n(
3251 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3251 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3252 )
3252 )
3253 arevs = list(arevs)
3253 arevs = list(arevs)
3254 brevs = list(brevs)
3254 brevs = list(brevs)
3255 if arevs == brevs:
3255 if arevs == brevs:
3256 return 0
3256 return 0
3257 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3257 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3258 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3258 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3259 sm = difflib.SequenceMatcher(None, arevs, brevs)
3259 sm = difflib.SequenceMatcher(None, arevs, brevs)
3260 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3260 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3261 if tag in ('delete', 'replace'):
3261 if tag in ('delete', 'replace'):
3262 for c in arevs[alo:ahi]:
3262 for c in arevs[alo:ahi]:
3263 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3263 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3264 if tag in ('insert', 'replace'):
3264 if tag in ('insert', 'replace'):
3265 for c in brevs[blo:bhi]:
3265 for c in brevs[blo:bhi]:
3266 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3266 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3267 if tag == 'equal':
3267 if tag == 'equal':
3268 for c in arevs[alo:ahi]:
3268 for c in arevs[alo:ahi]:
3269 ui.write(b' %d\n' % c)
3269 ui.write(b' %d\n' % c)
3270 return 1
3270 return 1
3271
3271
3272 func = revset.makematcher(tree)
3272 func = revset.makematcher(tree)
3273 revs = func(repo)
3273 revs = func(repo)
3274 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3274 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3275 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3275 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3276 if not opts[b'show_revs']:
3276 if not opts[b'show_revs']:
3277 return
3277 return
3278 for c in revs:
3278 for c in revs:
3279 ui.write(b"%d\n" % c)
3279 ui.write(b"%d\n" % c)
3280
3280
3281
3281
3282 @command(
3282 @command(
3283 b'debugserve',
3283 b'debugserve',
3284 [
3284 [
3285 (
3285 (
3286 b'',
3286 b'',
3287 b'sshstdio',
3287 b'sshstdio',
3288 False,
3288 False,
3289 _(b'run an SSH server bound to process handles'),
3289 _(b'run an SSH server bound to process handles'),
3290 ),
3290 ),
3291 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3291 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3292 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3292 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3293 ],
3293 ],
3294 b'',
3294 b'',
3295 )
3295 )
3296 def debugserve(ui, repo, **opts):
3296 def debugserve(ui, repo, **opts):
3297 """run a server with advanced settings
3297 """run a server with advanced settings
3298
3298
3299 This command is similar to :hg:`serve`. It exists partially as a
3299 This command is similar to :hg:`serve`. It exists partially as a
3300 workaround to the fact that ``hg serve --stdio`` must have specific
3300 workaround to the fact that ``hg serve --stdio`` must have specific
3301 arguments for security reasons.
3301 arguments for security reasons.
3302 """
3302 """
3303 opts = pycompat.byteskwargs(opts)
3303 opts = pycompat.byteskwargs(opts)
3304
3304
3305 if not opts[b'sshstdio']:
3305 if not opts[b'sshstdio']:
3306 raise error.Abort(_(b'only --sshstdio is currently supported'))
3306 raise error.Abort(_(b'only --sshstdio is currently supported'))
3307
3307
3308 logfh = None
3308 logfh = None
3309
3309
3310 if opts[b'logiofd'] and opts[b'logiofile']:
3310 if opts[b'logiofd'] and opts[b'logiofile']:
3311 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3311 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3312
3312
3313 if opts[b'logiofd']:
3313 if opts[b'logiofd']:
3314 # Ideally we would be line buffered. But line buffering in binary
3314 # Ideally we would be line buffered. But line buffering in binary
3315 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3315 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3316 # buffering could have performance impacts. But since this isn't
3316 # buffering could have performance impacts. But since this isn't
3317 # performance critical code, it should be fine.
3317 # performance critical code, it should be fine.
3318 try:
3318 try:
3319 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3319 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3320 except OSError as e:
3320 except OSError as e:
3321 if e.errno != errno.ESPIPE:
3321 if e.errno != errno.ESPIPE:
3322 raise
3322 raise
3323 # can't seek a pipe, so `ab` mode fails on py3
3323 # can't seek a pipe, so `ab` mode fails on py3
3324 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3324 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3325 elif opts[b'logiofile']:
3325 elif opts[b'logiofile']:
3326 logfh = open(opts[b'logiofile'], b'ab', 0)
3326 logfh = open(opts[b'logiofile'], b'ab', 0)
3327
3327
3328 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3328 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3329 s.serve_forever()
3329 s.serve_forever()
3330
3330
3331
3331
3332 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3332 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3333 def debugsetparents(ui, repo, rev1, rev2=None):
3333 def debugsetparents(ui, repo, rev1, rev2=None):
3334 """manually set the parents of the current working directory
3334 """manually set the parents of the current working directory
3335
3335
3336 This is useful for writing repository conversion tools, but should
3336 This is useful for writing repository conversion tools, but should
3337 be used with care. For example, neither the working directory nor the
3337 be used with care. For example, neither the working directory nor the
3338 dirstate is updated, so file status may be incorrect after running this
3338 dirstate is updated, so file status may be incorrect after running this
3339 command.
3339 command.
3340
3340
3341 Returns 0 on success.
3341 Returns 0 on success.
3342 """
3342 """
3343
3343
3344 node1 = scmutil.revsingle(repo, rev1).node()
3344 node1 = scmutil.revsingle(repo, rev1).node()
3345 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3345 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3346
3346
3347 with repo.wlock():
3347 with repo.wlock():
3348 repo.setparents(node1, node2)
3348 repo.setparents(node1, node2)
3349
3349
3350
3350
3351 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3351 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3352 def debugsidedata(ui, repo, file_, rev=None, **opts):
3352 def debugsidedata(ui, repo, file_, rev=None, **opts):
3353 """dump the side data for a cl/manifest/file revision
3353 """dump the side data for a cl/manifest/file revision
3354
3354
3355 Use --verbose to dump the sidedata content."""
3355 Use --verbose to dump the sidedata content."""
3356 opts = pycompat.byteskwargs(opts)
3356 opts = pycompat.byteskwargs(opts)
3357 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3357 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3358 if rev is not None:
3358 if rev is not None:
3359 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3359 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3360 file_, rev = None, file_
3360 file_, rev = None, file_
3361 elif rev is None:
3361 elif rev is None:
3362 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3362 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3363 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3363 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3364 r = getattr(r, '_revlog', r)
3364 r = getattr(r, '_revlog', r)
3365 try:
3365 try:
3366 sidedata = r.sidedata(r.lookup(rev))
3366 sidedata = r.sidedata(r.lookup(rev))
3367 except KeyError:
3367 except KeyError:
3368 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3368 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3369 if sidedata:
3369 if sidedata:
3370 sidedata = list(sidedata.items())
3370 sidedata = list(sidedata.items())
3371 sidedata.sort()
3371 sidedata.sort()
3372 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3372 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3373 for key, value in sidedata:
3373 for key, value in sidedata:
3374 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3374 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3375 if ui.verbose:
3375 if ui.verbose:
3376 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3376 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3377
3377
3378
3378
3379 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3379 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3380 def debugssl(ui, repo, source=None, **opts):
3380 def debugssl(ui, repo, source=None, **opts):
3381 '''test a secure connection to a server
3381 '''test a secure connection to a server
3382
3382
3383 This builds the certificate chain for the server on Windows, installing the
3383 This builds the certificate chain for the server on Windows, installing the
3384 missing intermediates and trusted root via Windows Update if necessary. It
3384 missing intermediates and trusted root via Windows Update if necessary. It
3385 does nothing on other platforms.
3385 does nothing on other platforms.
3386
3386
3387 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3387 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3388 that server is used. See :hg:`help urls` for more information.
3388 that server is used. See :hg:`help urls` for more information.
3389
3389
3390 If the update succeeds, retry the original operation. Otherwise, the cause
3390 If the update succeeds, retry the original operation. Otherwise, the cause
3391 of the SSL error is likely another issue.
3391 of the SSL error is likely another issue.
3392 '''
3392 '''
3393 if not pycompat.iswindows:
3393 if not pycompat.iswindows:
3394 raise error.Abort(
3394 raise error.Abort(
3395 _(b'certificate chain building is only possible on Windows')
3395 _(b'certificate chain building is only possible on Windows')
3396 )
3396 )
3397
3397
3398 if not source:
3398 if not source:
3399 if not repo:
3399 if not repo:
3400 raise error.Abort(
3400 raise error.Abort(
3401 _(
3401 _(
3402 b"there is no Mercurial repository here, and no "
3402 b"there is no Mercurial repository here, and no "
3403 b"server specified"
3403 b"server specified"
3404 )
3404 )
3405 )
3405 )
3406 source = b"default"
3406 source = b"default"
3407
3407
3408 source, branches = hg.parseurl(ui.expandpath(source))
3408 source, branches = hg.parseurl(ui.expandpath(source))
3409 url = util.url(source)
3409 url = util.url(source)
3410
3410
3411 defaultport = {b'https': 443, b'ssh': 22}
3411 defaultport = {b'https': 443, b'ssh': 22}
3412 if url.scheme in defaultport:
3412 if url.scheme in defaultport:
3413 try:
3413 try:
3414 addr = (url.host, int(url.port or defaultport[url.scheme]))
3414 addr = (url.host, int(url.port or defaultport[url.scheme]))
3415 except ValueError:
3415 except ValueError:
3416 raise error.Abort(_(b"malformed port number in URL"))
3416 raise error.Abort(_(b"malformed port number in URL"))
3417 else:
3417 else:
3418 raise error.Abort(_(b"only https and ssh connections are supported"))
3418 raise error.Abort(_(b"only https and ssh connections are supported"))
3419
3419
3420 from . import win32
3420 from . import win32
3421
3421
3422 s = ssl.wrap_socket(
3422 s = ssl.wrap_socket(
3423 socket.socket(),
3423 socket.socket(),
3424 ssl_version=ssl.PROTOCOL_TLS,
3424 ssl_version=ssl.PROTOCOL_TLS,
3425 cert_reqs=ssl.CERT_NONE,
3425 cert_reqs=ssl.CERT_NONE,
3426 ca_certs=None,
3426 ca_certs=None,
3427 )
3427 )
3428
3428
3429 try:
3429 try:
3430 s.connect(addr)
3430 s.connect(addr)
3431 cert = s.getpeercert(True)
3431 cert = s.getpeercert(True)
3432
3432
3433 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3433 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3434
3434
3435 complete = win32.checkcertificatechain(cert, build=False)
3435 complete = win32.checkcertificatechain(cert, build=False)
3436
3436
3437 if not complete:
3437 if not complete:
3438 ui.status(_(b'certificate chain is incomplete, updating... '))
3438 ui.status(_(b'certificate chain is incomplete, updating... '))
3439
3439
3440 if not win32.checkcertificatechain(cert):
3440 if not win32.checkcertificatechain(cert):
3441 ui.status(_(b'failed.\n'))
3441 ui.status(_(b'failed.\n'))
3442 else:
3442 else:
3443 ui.status(_(b'done.\n'))
3443 ui.status(_(b'done.\n'))
3444 else:
3444 else:
3445 ui.status(_(b'full certificate chain is available\n'))
3445 ui.status(_(b'full certificate chain is available\n'))
3446 finally:
3446 finally:
3447 s.close()
3447 s.close()
3448
3448
3449
3449
3450 @command(
3450 @command(
3451 b"debugbackupbundle",
3451 b"debugbackupbundle",
3452 [
3452 [
3453 (
3453 (
3454 b"",
3454 b"",
3455 b"recover",
3455 b"recover",
3456 b"",
3456 b"",
3457 b"brings the specified changeset back into the repository",
3457 b"brings the specified changeset back into the repository",
3458 )
3458 )
3459 ]
3459 ]
3460 + cmdutil.logopts,
3460 + cmdutil.logopts,
3461 _(b"hg debugbackupbundle [--recover HASH]"),
3461 _(b"hg debugbackupbundle [--recover HASH]"),
3462 )
3462 )
3463 def debugbackupbundle(ui, repo, *pats, **opts):
3463 def debugbackupbundle(ui, repo, *pats, **opts):
3464 """lists the changesets available in backup bundles
3464 """lists the changesets available in backup bundles
3465
3465
3466 Without any arguments, this command prints a list of the changesets in each
3466 Without any arguments, this command prints a list of the changesets in each
3467 backup bundle.
3467 backup bundle.
3468
3468
3469 --recover takes a changeset hash and unbundles the first bundle that
3469 --recover takes a changeset hash and unbundles the first bundle that
3470 contains that hash, which puts that changeset back in your repository.
3470 contains that hash, which puts that changeset back in your repository.
3471
3471
3472 --verbose will print the entire commit message and the bundle path for that
3472 --verbose will print the entire commit message and the bundle path for that
3473 backup.
3473 backup.
3474 """
3474 """
3475 backups = list(
3475 backups = list(
3476 filter(
3476 filter(
3477 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3477 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3478 )
3478 )
3479 )
3479 )
3480 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3480 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3481
3481
3482 opts = pycompat.byteskwargs(opts)
3482 opts = pycompat.byteskwargs(opts)
3483 opts[b"bundle"] = b""
3483 opts[b"bundle"] = b""
3484 opts[b"force"] = None
3484 opts[b"force"] = None
3485 limit = logcmdutil.getlimit(opts)
3485 limit = logcmdutil.getlimit(opts)
3486
3486
3487 def display(other, chlist, displayer):
3487 def display(other, chlist, displayer):
3488 if opts.get(b"newest_first"):
3488 if opts.get(b"newest_first"):
3489 chlist.reverse()
3489 chlist.reverse()
3490 count = 0
3490 count = 0
3491 for n in chlist:
3491 for n in chlist:
3492 if limit is not None and count >= limit:
3492 if limit is not None and count >= limit:
3493 break
3493 break
3494 parents = [True for p in other.changelog.parents(n) if p != nullid]
3494 parents = [True for p in other.changelog.parents(n) if p != nullid]
3495 if opts.get(b"no_merges") and len(parents) == 2:
3495 if opts.get(b"no_merges") and len(parents) == 2:
3496 continue
3496 continue
3497 count += 1
3497 count += 1
3498 displayer.show(other[n])
3498 displayer.show(other[n])
3499
3499
3500 recovernode = opts.get(b"recover")
3500 recovernode = opts.get(b"recover")
3501 if recovernode:
3501 if recovernode:
3502 if scmutil.isrevsymbol(repo, recovernode):
3502 if scmutil.isrevsymbol(repo, recovernode):
3503 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3503 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3504 return
3504 return
3505 elif backups:
3505 elif backups:
3506 msg = _(
3506 msg = _(
3507 b"Recover changesets using: hg debugbackupbundle --recover "
3507 b"Recover changesets using: hg debugbackupbundle --recover "
3508 b"<changeset hash>\n\nAvailable backup changesets:"
3508 b"<changeset hash>\n\nAvailable backup changesets:"
3509 )
3509 )
3510 ui.status(msg, label=b"status.removed")
3510 ui.status(msg, label=b"status.removed")
3511 else:
3511 else:
3512 ui.status(_(b"no backup changesets found\n"))
3512 ui.status(_(b"no backup changesets found\n"))
3513 return
3513 return
3514
3514
3515 for backup in backups:
3515 for backup in backups:
3516 # Much of this is copied from the hg incoming logic
3516 # Much of this is copied from the hg incoming logic
3517 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3517 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3518 source, branches = hg.parseurl(source, opts.get(b"branch"))
3518 source, branches = hg.parseurl(source, opts.get(b"branch"))
3519 try:
3519 try:
3520 other = hg.peer(repo, opts, source)
3520 other = hg.peer(repo, opts, source)
3521 except error.LookupError as ex:
3521 except error.LookupError as ex:
3522 msg = _(b"\nwarning: unable to open bundle %s") % source
3522 msg = _(b"\nwarning: unable to open bundle %s") % source
3523 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3523 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3524 ui.warn(msg, hint=hint)
3524 ui.warn(msg, hint=hint)
3525 continue
3525 continue
3526 revs, checkout = hg.addbranchrevs(
3526 revs, checkout = hg.addbranchrevs(
3527 repo, other, branches, opts.get(b"rev")
3527 repo, other, branches, opts.get(b"rev")
3528 )
3528 )
3529
3529
3530 if revs:
3530 if revs:
3531 revs = [other.lookup(rev) for rev in revs]
3531 revs = [other.lookup(rev) for rev in revs]
3532
3532
3533 quiet = ui.quiet
3533 quiet = ui.quiet
3534 try:
3534 try:
3535 ui.quiet = True
3535 ui.quiet = True
3536 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3536 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3537 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3537 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3538 )
3538 )
3539 except error.LookupError:
3539 except error.LookupError:
3540 continue
3540 continue
3541 finally:
3541 finally:
3542 ui.quiet = quiet
3542 ui.quiet = quiet
3543
3543
3544 try:
3544 try:
3545 if not chlist:
3545 if not chlist:
3546 continue
3546 continue
3547 if recovernode:
3547 if recovernode:
3548 with repo.lock(), repo.transaction(b"unbundle") as tr:
3548 with repo.lock(), repo.transaction(b"unbundle") as tr:
3549 if scmutil.isrevsymbol(other, recovernode):
3549 if scmutil.isrevsymbol(other, recovernode):
3550 ui.status(_(b"Unbundling %s\n") % (recovernode))
3550 ui.status(_(b"Unbundling %s\n") % (recovernode))
3551 f = hg.openpath(ui, source)
3551 f = hg.openpath(ui, source)
3552 gen = exchange.readbundle(ui, f, source)
3552 gen = exchange.readbundle(ui, f, source)
3553 if isinstance(gen, bundle2.unbundle20):
3553 if isinstance(gen, bundle2.unbundle20):
3554 bundle2.applybundle(
3554 bundle2.applybundle(
3555 repo,
3555 repo,
3556 gen,
3556 gen,
3557 tr,
3557 tr,
3558 source=b"unbundle",
3558 source=b"unbundle",
3559 url=b"bundle:" + source,
3559 url=b"bundle:" + source,
3560 )
3560 )
3561 else:
3561 else:
3562 gen.apply(repo, b"unbundle", b"bundle:" + source)
3562 gen.apply(repo, b"unbundle", b"bundle:" + source)
3563 break
3563 break
3564 else:
3564 else:
3565 backupdate = encoding.strtolocal(
3565 backupdate = encoding.strtolocal(
3566 time.strftime(
3566 time.strftime(
3567 "%a %H:%M, %Y-%m-%d",
3567 "%a %H:%M, %Y-%m-%d",
3568 time.localtime(os.path.getmtime(source)),
3568 time.localtime(os.path.getmtime(source)),
3569 )
3569 )
3570 )
3570 )
3571 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3571 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3572 if ui.verbose:
3572 if ui.verbose:
3573 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3573 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3574 else:
3574 else:
3575 opts[
3575 opts[
3576 b"template"
3576 b"template"
3577 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3577 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3578 displayer = logcmdutil.changesetdisplayer(
3578 displayer = logcmdutil.changesetdisplayer(
3579 ui, other, opts, False
3579 ui, other, opts, False
3580 )
3580 )
3581 display(other, chlist, displayer)
3581 display(other, chlist, displayer)
3582 displayer.close()
3582 displayer.close()
3583 finally:
3583 finally:
3584 cleanupfn()
3584 cleanupfn()
3585
3585
3586
3586
3587 @command(
3587 @command(
3588 b'debugsub',
3588 b'debugsub',
3589 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3589 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3590 _(b'[-r REV] [REV]'),
3590 _(b'[-r REV] [REV]'),
3591 )
3591 )
3592 def debugsub(ui, repo, rev=None):
3592 def debugsub(ui, repo, rev=None):
3593 ctx = scmutil.revsingle(repo, rev, None)
3593 ctx = scmutil.revsingle(repo, rev, None)
3594 for k, v in sorted(ctx.substate.items()):
3594 for k, v in sorted(ctx.substate.items()):
3595 ui.writenoi18n(b'path %s\n' % k)
3595 ui.writenoi18n(b'path %s\n' % k)
3596 ui.writenoi18n(b' source %s\n' % v[0])
3596 ui.writenoi18n(b' source %s\n' % v[0])
3597 ui.writenoi18n(b' revision %s\n' % v[1])
3597 ui.writenoi18n(b' revision %s\n' % v[1])
3598
3598
3599
3599
3600 @command(
3600 @command(
3601 b'debugsuccessorssets',
3601 b'debugsuccessorssets',
3602 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3602 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3603 _(b'[REV]'),
3603 _(b'[REV]'),
3604 )
3604 )
3605 def debugsuccessorssets(ui, repo, *revs, **opts):
3605 def debugsuccessorssets(ui, repo, *revs, **opts):
3606 """show set of successors for revision
3606 """show set of successors for revision
3607
3607
3608 A successors set of changeset A is a consistent group of revisions that
3608 A successors set of changeset A is a consistent group of revisions that
3609 succeed A. It contains non-obsolete changesets only unless closests
3609 succeed A. It contains non-obsolete changesets only unless closests
3610 successors set is set.
3610 successors set is set.
3611
3611
3612 In most cases a changeset A has a single successors set containing a single
3612 In most cases a changeset A has a single successors set containing a single
3613 successor (changeset A replaced by A').
3613 successor (changeset A replaced by A').
3614
3614
3615 A changeset that is made obsolete with no successors are called "pruned".
3615 A changeset that is made obsolete with no successors are called "pruned".
3616 Such changesets have no successors sets at all.
3616 Such changesets have no successors sets at all.
3617
3617
3618 A changeset that has been "split" will have a successors set containing
3618 A changeset that has been "split" will have a successors set containing
3619 more than one successor.
3619 more than one successor.
3620
3620
3621 A changeset that has been rewritten in multiple different ways is called
3621 A changeset that has been rewritten in multiple different ways is called
3622 "divergent". Such changesets have multiple successor sets (each of which
3622 "divergent". Such changesets have multiple successor sets (each of which
3623 may also be split, i.e. have multiple successors).
3623 may also be split, i.e. have multiple successors).
3624
3624
3625 Results are displayed as follows::
3625 Results are displayed as follows::
3626
3626
3627 <rev1>
3627 <rev1>
3628 <successors-1A>
3628 <successors-1A>
3629 <rev2>
3629 <rev2>
3630 <successors-2A>
3630 <successors-2A>
3631 <successors-2B1> <successors-2B2> <successors-2B3>
3631 <successors-2B1> <successors-2B2> <successors-2B3>
3632
3632
3633 Here rev2 has two possible (i.e. divergent) successors sets. The first
3633 Here rev2 has two possible (i.e. divergent) successors sets. The first
3634 holds one element, whereas the second holds three (i.e. the changeset has
3634 holds one element, whereas the second holds three (i.e. the changeset has
3635 been split).
3635 been split).
3636 """
3636 """
3637 # passed to successorssets caching computation from one call to another
3637 # passed to successorssets caching computation from one call to another
3638 cache = {}
3638 cache = {}
3639 ctx2str = bytes
3639 ctx2str = bytes
3640 node2str = short
3640 node2str = short
3641 for rev in scmutil.revrange(repo, revs):
3641 for rev in scmutil.revrange(repo, revs):
3642 ctx = repo[rev]
3642 ctx = repo[rev]
3643 ui.write(b'%s\n' % ctx2str(ctx))
3643 ui.write(b'%s\n' % ctx2str(ctx))
3644 for succsset in obsutil.successorssets(
3644 for succsset in obsutil.successorssets(
3645 repo, ctx.node(), closest=opts['closest'], cache=cache
3645 repo, ctx.node(), closest=opts['closest'], cache=cache
3646 ):
3646 ):
3647 if succsset:
3647 if succsset:
3648 ui.write(b' ')
3648 ui.write(b' ')
3649 ui.write(node2str(succsset[0]))
3649 ui.write(node2str(succsset[0]))
3650 for node in succsset[1:]:
3650 for node in succsset[1:]:
3651 ui.write(b' ')
3651 ui.write(b' ')
3652 ui.write(node2str(node))
3652 ui.write(node2str(node))
3653 ui.write(b'\n')
3653 ui.write(b'\n')
3654
3654
3655
3655
3656 @command(b'debugtagscache', [])
3656 @command(b'debugtagscache', [])
3657 def debugtagscache(ui, repo):
3657 def debugtagscache(ui, repo):
3658 """display the contents of .hg/cache/hgtagsfnodes1"""
3658 """display the contents of .hg/cache/hgtagsfnodes1"""
3659 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3659 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3660 for r in repo:
3660 for r in repo:
3661 node = repo[r].node()
3661 node = repo[r].node()
3662 tagsnode = cache.getfnode(node, computemissing=False)
3662 tagsnode = cache.getfnode(node, computemissing=False)
3663 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3663 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3664 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3664 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3665
3665
3666
3666
3667 @command(
3667 @command(
3668 b'debugtemplate',
3668 b'debugtemplate',
3669 [
3669 [
3670 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3670 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3671 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3671 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3672 ],
3672 ],
3673 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3673 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3674 optionalrepo=True,
3674 optionalrepo=True,
3675 )
3675 )
3676 def debugtemplate(ui, repo, tmpl, **opts):
3676 def debugtemplate(ui, repo, tmpl, **opts):
3677 """parse and apply a template
3677 """parse and apply a template
3678
3678
3679 If -r/--rev is given, the template is processed as a log template and
3679 If -r/--rev is given, the template is processed as a log template and
3680 applied to the given changesets. Otherwise, it is processed as a generic
3680 applied to the given changesets. Otherwise, it is processed as a generic
3681 template.
3681 template.
3682
3682
3683 Use --verbose to print the parsed tree.
3683 Use --verbose to print the parsed tree.
3684 """
3684 """
3685 revs = None
3685 revs = None
3686 if opts['rev']:
3686 if opts['rev']:
3687 if repo is None:
3687 if repo is None:
3688 raise error.RepoError(
3688 raise error.RepoError(
3689 _(b'there is no Mercurial repository here (.hg not found)')
3689 _(b'there is no Mercurial repository here (.hg not found)')
3690 )
3690 )
3691 revs = scmutil.revrange(repo, opts['rev'])
3691 revs = scmutil.revrange(repo, opts['rev'])
3692
3692
3693 props = {}
3693 props = {}
3694 for d in opts['define']:
3694 for d in opts['define']:
3695 try:
3695 try:
3696 k, v = (e.strip() for e in d.split(b'=', 1))
3696 k, v = (e.strip() for e in d.split(b'=', 1))
3697 if not k or k == b'ui':
3697 if not k or k == b'ui':
3698 raise ValueError
3698 raise ValueError
3699 props[k] = v
3699 props[k] = v
3700 except ValueError:
3700 except ValueError:
3701 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3701 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3702
3702
3703 if ui.verbose:
3703 if ui.verbose:
3704 aliases = ui.configitems(b'templatealias')
3704 aliases = ui.configitems(b'templatealias')
3705 tree = templater.parse(tmpl)
3705 tree = templater.parse(tmpl)
3706 ui.note(templater.prettyformat(tree), b'\n')
3706 ui.note(templater.prettyformat(tree), b'\n')
3707 newtree = templater.expandaliases(tree, aliases)
3707 newtree = templater.expandaliases(tree, aliases)
3708 if newtree != tree:
3708 if newtree != tree:
3709 ui.notenoi18n(
3709 ui.notenoi18n(
3710 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3710 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3711 )
3711 )
3712
3712
3713 if revs is None:
3713 if revs is None:
3714 tres = formatter.templateresources(ui, repo)
3714 tres = formatter.templateresources(ui, repo)
3715 t = formatter.maketemplater(ui, tmpl, resources=tres)
3715 t = formatter.maketemplater(ui, tmpl, resources=tres)
3716 if ui.verbose:
3716 if ui.verbose:
3717 kwds, funcs = t.symbolsuseddefault()
3717 kwds, funcs = t.symbolsuseddefault()
3718 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3718 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3719 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3719 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3720 ui.write(t.renderdefault(props))
3720 ui.write(t.renderdefault(props))
3721 else:
3721 else:
3722 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3722 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3723 if ui.verbose:
3723 if ui.verbose:
3724 kwds, funcs = displayer.t.symbolsuseddefault()
3724 kwds, funcs = displayer.t.symbolsuseddefault()
3725 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3725 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3726 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3726 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3727 for r in revs:
3727 for r in revs:
3728 displayer.show(repo[r], **pycompat.strkwargs(props))
3728 displayer.show(repo[r], **pycompat.strkwargs(props))
3729 displayer.close()
3729 displayer.close()
3730
3730
3731
3731
3732 @command(
3732 @command(
3733 b'debuguigetpass',
3733 b'debuguigetpass',
3734 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3734 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3735 _(b'[-p TEXT]'),
3735 _(b'[-p TEXT]'),
3736 norepo=True,
3736 norepo=True,
3737 )
3737 )
3738 def debuguigetpass(ui, prompt=b''):
3738 def debuguigetpass(ui, prompt=b''):
3739 """show prompt to type password"""
3739 """show prompt to type password"""
3740 r = ui.getpass(prompt)
3740 r = ui.getpass(prompt)
3741 ui.writenoi18n(b'response: %s\n' % r)
3741 ui.writenoi18n(b'response: %s\n' % r)
3742
3742
3743
3743
3744 @command(
3744 @command(
3745 b'debuguiprompt',
3745 b'debuguiprompt',
3746 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3746 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3747 _(b'[-p TEXT]'),
3747 _(b'[-p TEXT]'),
3748 norepo=True,
3748 norepo=True,
3749 )
3749 )
3750 def debuguiprompt(ui, prompt=b''):
3750 def debuguiprompt(ui, prompt=b''):
3751 """show plain prompt"""
3751 """show plain prompt"""
3752 r = ui.prompt(prompt)
3752 r = ui.prompt(prompt)
3753 ui.writenoi18n(b'response: %s\n' % r)
3753 ui.writenoi18n(b'response: %s\n' % r)
3754
3754
3755
3755
3756 @command(b'debugupdatecaches', [])
3756 @command(b'debugupdatecaches', [])
3757 def debugupdatecaches(ui, repo, *pats, **opts):
3757 def debugupdatecaches(ui, repo, *pats, **opts):
3758 """warm all known caches in the repository"""
3758 """warm all known caches in the repository"""
3759 with repo.wlock(), repo.lock():
3759 with repo.wlock(), repo.lock():
3760 repo.updatecaches(full=True)
3760 repo.updatecaches(full=True)
3761
3761
3762
3762
3763 @command(
3763 @command(
3764 b'debugupgraderepo',
3764 b'debugupgraderepo',
3765 [
3765 [
3766 (
3766 (
3767 b'o',
3767 b'o',
3768 b'optimize',
3768 b'optimize',
3769 [],
3769 [],
3770 _(b'extra optimization to perform'),
3770 _(b'extra optimization to perform'),
3771 _(b'NAME'),
3771 _(b'NAME'),
3772 ),
3772 ),
3773 (b'', b'run', False, _(b'performs an upgrade')),
3773 (b'', b'run', False, _(b'performs an upgrade')),
3774 (b'', b'backup', True, _(b'keep the old repository content around')),
3774 (b'', b'backup', True, _(b'keep the old repository content around')),
3775 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3775 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3776 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3776 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3777 ],
3777 ],
3778 )
3778 )
3779 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3779 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3780 """upgrade a repository to use different features
3780 """upgrade a repository to use different features
3781
3781
3782 If no arguments are specified, the repository is evaluated for upgrade
3782 If no arguments are specified, the repository is evaluated for upgrade
3783 and a list of problems and potential optimizations is printed.
3783 and a list of problems and potential optimizations is printed.
3784
3784
3785 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3785 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3786 can be influenced via additional arguments. More details will be provided
3786 can be influenced via additional arguments. More details will be provided
3787 by the command output when run without ``--run``.
3787 by the command output when run without ``--run``.
3788
3788
3789 During the upgrade, the repository will be locked and no writes will be
3789 During the upgrade, the repository will be locked and no writes will be
3790 allowed.
3790 allowed.
3791
3791
3792 At the end of the upgrade, the repository may not be readable while new
3792 At the end of the upgrade, the repository may not be readable while new
3793 repository data is swapped in. This window will be as long as it takes to
3793 repository data is swapped in. This window will be as long as it takes to
3794 rename some directories inside the ``.hg`` directory. On most machines, this
3794 rename some directories inside the ``.hg`` directory. On most machines, this
3795 should complete almost instantaneously and the chances of a consumer being
3795 should complete almost instantaneously and the chances of a consumer being
3796 unable to access the repository should be low.
3796 unable to access the repository should be low.
3797
3797
3798 By default, all revlog will be upgraded. You can restrict this using flag
3798 By default, all revlog will be upgraded. You can restrict this using flag
3799 such as `--manifest`:
3799 such as `--manifest`:
3800
3800
3801 * `--manifest`: only optimize the manifest
3801 * `--manifest`: only optimize the manifest
3802 * `--no-manifest`: optimize all revlog but the manifest
3802 * `--no-manifest`: optimize all revlog but the manifest
3803 * `--changelog`: optimize the changelog only
3803 * `--changelog`: optimize the changelog only
3804 * `--no-changelog --no-manifest`: optimize filelogs only
3804 * `--no-changelog --no-manifest`: optimize filelogs only
3805 """
3805 """
3806 return upgrade.upgraderepo(
3806 return upgrade.upgraderepo(
3807 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3807 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3808 )
3808 )
3809
3809
3810
3810
3811 @command(
3811 @command(
3812 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3812 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3813 )
3813 )
3814 def debugwalk(ui, repo, *pats, **opts):
3814 def debugwalk(ui, repo, *pats, **opts):
3815 """show how files match on given patterns"""
3815 """show how files match on given patterns"""
3816 opts = pycompat.byteskwargs(opts)
3816 opts = pycompat.byteskwargs(opts)
3817 m = scmutil.match(repo[None], pats, opts)
3817 m = scmutil.match(repo[None], pats, opts)
3818 if ui.verbose:
3818 if ui.verbose:
3819 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3819 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3820 items = list(repo[None].walk(m))
3820 items = list(repo[None].walk(m))
3821 if not items:
3821 if not items:
3822 return
3822 return
3823 f = lambda fn: fn
3823 f = lambda fn: fn
3824 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3824 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3825 f = lambda fn: util.normpath(fn)
3825 f = lambda fn: util.normpath(fn)
3826 fmt = b'f %%-%ds %%-%ds %%s' % (
3826 fmt = b'f %%-%ds %%-%ds %%s' % (
3827 max([len(abs) for abs in items]),
3827 max([len(abs) for abs in items]),
3828 max([len(repo.pathto(abs)) for abs in items]),
3828 max([len(repo.pathto(abs)) for abs in items]),
3829 )
3829 )
3830 for abs in items:
3830 for abs in items:
3831 line = fmt % (
3831 line = fmt % (
3832 abs,
3832 abs,
3833 f(repo.pathto(abs)),
3833 f(repo.pathto(abs)),
3834 m.exact(abs) and b'exact' or b'',
3834 m.exact(abs) and b'exact' or b'',
3835 )
3835 )
3836 ui.write(b"%s\n" % line.rstrip())
3836 ui.write(b"%s\n" % line.rstrip())
3837
3837
3838
3838
3839 @command(b'debugwhyunstable', [], _(b'REV'))
3839 @command(b'debugwhyunstable', [], _(b'REV'))
3840 def debugwhyunstable(ui, repo, rev):
3840 def debugwhyunstable(ui, repo, rev):
3841 """explain instabilities of a changeset"""
3841 """explain instabilities of a changeset"""
3842 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3842 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3843 dnodes = b''
3843 dnodes = b''
3844 if entry.get(b'divergentnodes'):
3844 if entry.get(b'divergentnodes'):
3845 dnodes = (
3845 dnodes = (
3846 b' '.join(
3846 b' '.join(
3847 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3847 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3848 for ctx in entry[b'divergentnodes']
3848 for ctx in entry[b'divergentnodes']
3849 )
3849 )
3850 + b' '
3850 + b' '
3851 )
3851 )
3852 ui.write(
3852 ui.write(
3853 b'%s: %s%s %s\n'
3853 b'%s: %s%s %s\n'
3854 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3854 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3855 )
3855 )
3856
3856
3857
3857
3858 @command(
3858 @command(
3859 b'debugwireargs',
3859 b'debugwireargs',
3860 [
3860 [
3861 (b'', b'three', b'', b'three'),
3861 (b'', b'three', b'', b'three'),
3862 (b'', b'four', b'', b'four'),
3862 (b'', b'four', b'', b'four'),
3863 (b'', b'five', b'', b'five'),
3863 (b'', b'five', b'', b'five'),
3864 ]
3864 ]
3865 + cmdutil.remoteopts,
3865 + cmdutil.remoteopts,
3866 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3866 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3867 norepo=True,
3867 norepo=True,
3868 )
3868 )
3869 def debugwireargs(ui, repopath, *vals, **opts):
3869 def debugwireargs(ui, repopath, *vals, **opts):
3870 opts = pycompat.byteskwargs(opts)
3870 opts = pycompat.byteskwargs(opts)
3871 repo = hg.peer(ui, opts, repopath)
3871 repo = hg.peer(ui, opts, repopath)
3872 for opt in cmdutil.remoteopts:
3872 for opt in cmdutil.remoteopts:
3873 del opts[opt[1]]
3873 del opts[opt[1]]
3874 args = {}
3874 args = {}
3875 for k, v in pycompat.iteritems(opts):
3875 for k, v in pycompat.iteritems(opts):
3876 if v:
3876 if v:
3877 args[k] = v
3877 args[k] = v
3878 args = pycompat.strkwargs(args)
3878 args = pycompat.strkwargs(args)
3879 # run twice to check that we don't mess up the stream for the next command
3879 # run twice to check that we don't mess up the stream for the next command
3880 res1 = repo.debugwireargs(*vals, **args)
3880 res1 = repo.debugwireargs(*vals, **args)
3881 res2 = repo.debugwireargs(*vals, **args)
3881 res2 = repo.debugwireargs(*vals, **args)
3882 ui.write(b"%s\n" % res1)
3882 ui.write(b"%s\n" % res1)
3883 if res1 != res2:
3883 if res1 != res2:
3884 ui.warn(b"%s\n" % res2)
3884 ui.warn(b"%s\n" % res2)
3885
3885
3886
3886
3887 def _parsewirelangblocks(fh):
3887 def _parsewirelangblocks(fh):
3888 activeaction = None
3888 activeaction = None
3889 blocklines = []
3889 blocklines = []
3890 lastindent = 0
3890 lastindent = 0
3891
3891
3892 for line in fh:
3892 for line in fh:
3893 line = line.rstrip()
3893 line = line.rstrip()
3894 if not line:
3894 if not line:
3895 continue
3895 continue
3896
3896
3897 if line.startswith(b'#'):
3897 if line.startswith(b'#'):
3898 continue
3898 continue
3899
3899
3900 if not line.startswith(b' '):
3900 if not line.startswith(b' '):
3901 # New block. Flush previous one.
3901 # New block. Flush previous one.
3902 if activeaction:
3902 if activeaction:
3903 yield activeaction, blocklines
3903 yield activeaction, blocklines
3904
3904
3905 activeaction = line
3905 activeaction = line
3906 blocklines = []
3906 blocklines = []
3907 lastindent = 0
3907 lastindent = 0
3908 continue
3908 continue
3909
3909
3910 # Else we start with an indent.
3910 # Else we start with an indent.
3911
3911
3912 if not activeaction:
3912 if not activeaction:
3913 raise error.Abort(_(b'indented line outside of block'))
3913 raise error.Abort(_(b'indented line outside of block'))
3914
3914
3915 indent = len(line) - len(line.lstrip())
3915 indent = len(line) - len(line.lstrip())
3916
3916
3917 # If this line is indented more than the last line, concatenate it.
3917 # If this line is indented more than the last line, concatenate it.
3918 if indent > lastindent and blocklines:
3918 if indent > lastindent and blocklines:
3919 blocklines[-1] += line.lstrip()
3919 blocklines[-1] += line.lstrip()
3920 else:
3920 else:
3921 blocklines.append(line)
3921 blocklines.append(line)
3922 lastindent = indent
3922 lastindent = indent
3923
3923
3924 # Flush last block.
3924 # Flush last block.
3925 if activeaction:
3925 if activeaction:
3926 yield activeaction, blocklines
3926 yield activeaction, blocklines
3927
3927
3928
3928
3929 @command(
3929 @command(
3930 b'debugwireproto',
3930 b'debugwireproto',
3931 [
3931 [
3932 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3932 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3933 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3933 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3934 (
3934 (
3935 b'',
3935 b'',
3936 b'noreadstderr',
3936 b'noreadstderr',
3937 False,
3937 False,
3938 _(b'do not read from stderr of the remote'),
3938 _(b'do not read from stderr of the remote'),
3939 ),
3939 ),
3940 (
3940 (
3941 b'',
3941 b'',
3942 b'nologhandshake',
3942 b'nologhandshake',
3943 False,
3943 False,
3944 _(b'do not log I/O related to the peer handshake'),
3944 _(b'do not log I/O related to the peer handshake'),
3945 ),
3945 ),
3946 ]
3946 ]
3947 + cmdutil.remoteopts,
3947 + cmdutil.remoteopts,
3948 _(b'[PATH]'),
3948 _(b'[PATH]'),
3949 optionalrepo=True,
3949 optionalrepo=True,
3950 )
3950 )
3951 def debugwireproto(ui, repo, path=None, **opts):
3951 def debugwireproto(ui, repo, path=None, **opts):
3952 """send wire protocol commands to a server
3952 """send wire protocol commands to a server
3953
3953
3954 This command can be used to issue wire protocol commands to remote
3954 This command can be used to issue wire protocol commands to remote
3955 peers and to debug the raw data being exchanged.
3955 peers and to debug the raw data being exchanged.
3956
3956
3957 ``--localssh`` will start an SSH server against the current repository
3957 ``--localssh`` will start an SSH server against the current repository
3958 and connect to that. By default, the connection will perform a handshake
3958 and connect to that. By default, the connection will perform a handshake
3959 and establish an appropriate peer instance.
3959 and establish an appropriate peer instance.
3960
3960
3961 ``--peer`` can be used to bypass the handshake protocol and construct a
3961 ``--peer`` can be used to bypass the handshake protocol and construct a
3962 peer instance using the specified class type. Valid values are ``raw``,
3962 peer instance using the specified class type. Valid values are ``raw``,
3963 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3963 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3964 raw data payloads and don't support higher-level command actions.
3964 raw data payloads and don't support higher-level command actions.
3965
3965
3966 ``--noreadstderr`` can be used to disable automatic reading from stderr
3966 ``--noreadstderr`` can be used to disable automatic reading from stderr
3967 of the peer (for SSH connections only). Disabling automatic reading of
3967 of the peer (for SSH connections only). Disabling automatic reading of
3968 stderr is useful for making output more deterministic.
3968 stderr is useful for making output more deterministic.
3969
3969
3970 Commands are issued via a mini language which is specified via stdin.
3970 Commands are issued via a mini language which is specified via stdin.
3971 The language consists of individual actions to perform. An action is
3971 The language consists of individual actions to perform. An action is
3972 defined by a block. A block is defined as a line with no leading
3972 defined by a block. A block is defined as a line with no leading
3973 space followed by 0 or more lines with leading space. Blocks are
3973 space followed by 0 or more lines with leading space. Blocks are
3974 effectively a high-level command with additional metadata.
3974 effectively a high-level command with additional metadata.
3975
3975
3976 Lines beginning with ``#`` are ignored.
3976 Lines beginning with ``#`` are ignored.
3977
3977
3978 The following sections denote available actions.
3978 The following sections denote available actions.
3979
3979
3980 raw
3980 raw
3981 ---
3981 ---
3982
3982
3983 Send raw data to the server.
3983 Send raw data to the server.
3984
3984
3985 The block payload contains the raw data to send as one atomic send
3985 The block payload contains the raw data to send as one atomic send
3986 operation. The data may not actually be delivered in a single system
3986 operation. The data may not actually be delivered in a single system
3987 call: it depends on the abilities of the transport being used.
3987 call: it depends on the abilities of the transport being used.
3988
3988
3989 Each line in the block is de-indented and concatenated. Then, that
3989 Each line in the block is de-indented and concatenated. Then, that
3990 value is evaluated as a Python b'' literal. This allows the use of
3990 value is evaluated as a Python b'' literal. This allows the use of
3991 backslash escaping, etc.
3991 backslash escaping, etc.
3992
3992
3993 raw+
3993 raw+
3994 ----
3994 ----
3995
3995
3996 Behaves like ``raw`` except flushes output afterwards.
3996 Behaves like ``raw`` except flushes output afterwards.
3997
3997
3998 command <X>
3998 command <X>
3999 -----------
3999 -----------
4000
4000
4001 Send a request to run a named command, whose name follows the ``command``
4001 Send a request to run a named command, whose name follows the ``command``
4002 string.
4002 string.
4003
4003
4004 Arguments to the command are defined as lines in this block. The format of
4004 Arguments to the command are defined as lines in this block. The format of
4005 each line is ``<key> <value>``. e.g.::
4005 each line is ``<key> <value>``. e.g.::
4006
4006
4007 command listkeys
4007 command listkeys
4008 namespace bookmarks
4008 namespace bookmarks
4009
4009
4010 If the value begins with ``eval:``, it will be interpreted as a Python
4010 If the value begins with ``eval:``, it will be interpreted as a Python
4011 literal expression. Otherwise values are interpreted as Python b'' literals.
4011 literal expression. Otherwise values are interpreted as Python b'' literals.
4012 This allows sending complex types and encoding special byte sequences via
4012 This allows sending complex types and encoding special byte sequences via
4013 backslash escaping.
4013 backslash escaping.
4014
4014
4015 The following arguments have special meaning:
4015 The following arguments have special meaning:
4016
4016
4017 ``PUSHFILE``
4017 ``PUSHFILE``
4018 When defined, the *push* mechanism of the peer will be used instead
4018 When defined, the *push* mechanism of the peer will be used instead
4019 of the static request-response mechanism and the content of the
4019 of the static request-response mechanism and the content of the
4020 file specified in the value of this argument will be sent as the
4020 file specified in the value of this argument will be sent as the
4021 command payload.
4021 command payload.
4022
4022
4023 This can be used to submit a local bundle file to the remote.
4023 This can be used to submit a local bundle file to the remote.
4024
4024
4025 batchbegin
4025 batchbegin
4026 ----------
4026 ----------
4027
4027
4028 Instruct the peer to begin a batched send.
4028 Instruct the peer to begin a batched send.
4029
4029
4030 All ``command`` blocks are queued for execution until the next
4030 All ``command`` blocks are queued for execution until the next
4031 ``batchsubmit`` block.
4031 ``batchsubmit`` block.
4032
4032
4033 batchsubmit
4033 batchsubmit
4034 -----------
4034 -----------
4035
4035
4036 Submit previously queued ``command`` blocks as a batch request.
4036 Submit previously queued ``command`` blocks as a batch request.
4037
4037
4038 This action MUST be paired with a ``batchbegin`` action.
4038 This action MUST be paired with a ``batchbegin`` action.
4039
4039
4040 httprequest <method> <path>
4040 httprequest <method> <path>
4041 ---------------------------
4041 ---------------------------
4042
4042
4043 (HTTP peer only)
4043 (HTTP peer only)
4044
4044
4045 Send an HTTP request to the peer.
4045 Send an HTTP request to the peer.
4046
4046
4047 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4047 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4048
4048
4049 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4049 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4050 headers to add to the request. e.g. ``Accept: foo``.
4050 headers to add to the request. e.g. ``Accept: foo``.
4051
4051
4052 The following arguments are special:
4052 The following arguments are special:
4053
4053
4054 ``BODYFILE``
4054 ``BODYFILE``
4055 The content of the file defined as the value to this argument will be
4055 The content of the file defined as the value to this argument will be
4056 transferred verbatim as the HTTP request body.
4056 transferred verbatim as the HTTP request body.
4057
4057
4058 ``frame <type> <flags> <payload>``
4058 ``frame <type> <flags> <payload>``
4059 Send a unified protocol frame as part of the request body.
4059 Send a unified protocol frame as part of the request body.
4060
4060
4061 All frames will be collected and sent as the body to the HTTP
4061 All frames will be collected and sent as the body to the HTTP
4062 request.
4062 request.
4063
4063
4064 close
4064 close
4065 -----
4065 -----
4066
4066
4067 Close the connection to the server.
4067 Close the connection to the server.
4068
4068
4069 flush
4069 flush
4070 -----
4070 -----
4071
4071
4072 Flush data written to the server.
4072 Flush data written to the server.
4073
4073
4074 readavailable
4074 readavailable
4075 -------------
4075 -------------
4076
4076
4077 Close the write end of the connection and read all available data from
4077 Close the write end of the connection and read all available data from
4078 the server.
4078 the server.
4079
4079
4080 If the connection to the server encompasses multiple pipes, we poll both
4080 If the connection to the server encompasses multiple pipes, we poll both
4081 pipes and read available data.
4081 pipes and read available data.
4082
4082
4083 readline
4083 readline
4084 --------
4084 --------
4085
4085
4086 Read a line of output from the server. If there are multiple output
4086 Read a line of output from the server. If there are multiple output
4087 pipes, reads only the main pipe.
4087 pipes, reads only the main pipe.
4088
4088
4089 ereadline
4089 ereadline
4090 ---------
4090 ---------
4091
4091
4092 Like ``readline``, but read from the stderr pipe, if available.
4092 Like ``readline``, but read from the stderr pipe, if available.
4093
4093
4094 read <X>
4094 read <X>
4095 --------
4095 --------
4096
4096
4097 ``read()`` N bytes from the server's main output pipe.
4097 ``read()`` N bytes from the server's main output pipe.
4098
4098
4099 eread <X>
4099 eread <X>
4100 ---------
4100 ---------
4101
4101
4102 ``read()`` N bytes from the server's stderr pipe, if available.
4102 ``read()`` N bytes from the server's stderr pipe, if available.
4103
4103
4104 Specifying Unified Frame-Based Protocol Frames
4104 Specifying Unified Frame-Based Protocol Frames
4105 ----------------------------------------------
4105 ----------------------------------------------
4106
4106
4107 It is possible to emit a *Unified Frame-Based Protocol* by using special
4107 It is possible to emit a *Unified Frame-Based Protocol* by using special
4108 syntax.
4108 syntax.
4109
4109
4110 A frame is composed as a type, flags, and payload. These can be parsed
4110 A frame is composed as a type, flags, and payload. These can be parsed
4111 from a string of the form:
4111 from a string of the form:
4112
4112
4113 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4113 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4114
4114
4115 ``request-id`` and ``stream-id`` are integers defining the request and
4115 ``request-id`` and ``stream-id`` are integers defining the request and
4116 stream identifiers.
4116 stream identifiers.
4117
4117
4118 ``type`` can be an integer value for the frame type or the string name
4118 ``type`` can be an integer value for the frame type or the string name
4119 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4119 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4120 ``command-name``.
4120 ``command-name``.
4121
4121
4122 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4122 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4123 components. Each component (and there can be just one) can be an integer
4123 components. Each component (and there can be just one) can be an integer
4124 or a flag name for stream flags or frame flags, respectively. Values are
4124 or a flag name for stream flags or frame flags, respectively. Values are
4125 resolved to integers and then bitwise OR'd together.
4125 resolved to integers and then bitwise OR'd together.
4126
4126
4127 ``payload`` represents the raw frame payload. If it begins with
4127 ``payload`` represents the raw frame payload. If it begins with
4128 ``cbor:``, the following string is evaluated as Python code and the
4128 ``cbor:``, the following string is evaluated as Python code and the
4129 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4129 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4130 as a Python byte string literal.
4130 as a Python byte string literal.
4131 """
4131 """
4132 opts = pycompat.byteskwargs(opts)
4132 opts = pycompat.byteskwargs(opts)
4133
4133
4134 if opts[b'localssh'] and not repo:
4134 if opts[b'localssh'] and not repo:
4135 raise error.Abort(_(b'--localssh requires a repository'))
4135 raise error.Abort(_(b'--localssh requires a repository'))
4136
4136
4137 if opts[b'peer'] and opts[b'peer'] not in (
4137 if opts[b'peer'] and opts[b'peer'] not in (
4138 b'raw',
4138 b'raw',
4139 b'http2',
4139 b'http2',
4140 b'ssh1',
4140 b'ssh1',
4141 b'ssh2',
4141 b'ssh2',
4142 ):
4142 ):
4143 raise error.Abort(
4143 raise error.Abort(
4144 _(b'invalid value for --peer'),
4144 _(b'invalid value for --peer'),
4145 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4145 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4146 )
4146 )
4147
4147
4148 if path and opts[b'localssh']:
4148 if path and opts[b'localssh']:
4149 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4149 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4150
4150
4151 if ui.interactive():
4151 if ui.interactive():
4152 ui.write(_(b'(waiting for commands on stdin)\n'))
4152 ui.write(_(b'(waiting for commands on stdin)\n'))
4153
4153
4154 blocks = list(_parsewirelangblocks(ui.fin))
4154 blocks = list(_parsewirelangblocks(ui.fin))
4155
4155
4156 proc = None
4156 proc = None
4157 stdin = None
4157 stdin = None
4158 stdout = None
4158 stdout = None
4159 stderr = None
4159 stderr = None
4160 opener = None
4160 opener = None
4161
4161
4162 if opts[b'localssh']:
4162 if opts[b'localssh']:
4163 # We start the SSH server in its own process so there is process
4163 # We start the SSH server in its own process so there is process
4164 # separation. This prevents a whole class of potential bugs around
4164 # separation. This prevents a whole class of potential bugs around
4165 # shared state from interfering with server operation.
4165 # shared state from interfering with server operation.
4166 args = procutil.hgcmd() + [
4166 args = procutil.hgcmd() + [
4167 b'-R',
4167 b'-R',
4168 repo.root,
4168 repo.root,
4169 b'debugserve',
4169 b'debugserve',
4170 b'--sshstdio',
4170 b'--sshstdio',
4171 ]
4171 ]
4172 proc = subprocess.Popen(
4172 proc = subprocess.Popen(
4173 pycompat.rapply(procutil.tonativestr, args),
4173 pycompat.rapply(procutil.tonativestr, args),
4174 stdin=subprocess.PIPE,
4174 stdin=subprocess.PIPE,
4175 stdout=subprocess.PIPE,
4175 stdout=subprocess.PIPE,
4176 stderr=subprocess.PIPE,
4176 stderr=subprocess.PIPE,
4177 bufsize=0,
4177 bufsize=0,
4178 )
4178 )
4179
4179
4180 stdin = proc.stdin
4180 stdin = proc.stdin
4181 stdout = proc.stdout
4181 stdout = proc.stdout
4182 stderr = proc.stderr
4182 stderr = proc.stderr
4183
4183
4184 # We turn the pipes into observers so we can log I/O.
4184 # We turn the pipes into observers so we can log I/O.
4185 if ui.verbose or opts[b'peer'] == b'raw':
4185 if ui.verbose or opts[b'peer'] == b'raw':
4186 stdin = util.makeloggingfileobject(
4186 stdin = util.makeloggingfileobject(
4187 ui, proc.stdin, b'i', logdata=True
4187 ui, proc.stdin, b'i', logdata=True
4188 )
4188 )
4189 stdout = util.makeloggingfileobject(
4189 stdout = util.makeloggingfileobject(
4190 ui, proc.stdout, b'o', logdata=True
4190 ui, proc.stdout, b'o', logdata=True
4191 )
4191 )
4192 stderr = util.makeloggingfileobject(
4192 stderr = util.makeloggingfileobject(
4193 ui, proc.stderr, b'e', logdata=True
4193 ui, proc.stderr, b'e', logdata=True
4194 )
4194 )
4195
4195
4196 # --localssh also implies the peer connection settings.
4196 # --localssh also implies the peer connection settings.
4197
4197
4198 url = b'ssh://localserver'
4198 url = b'ssh://localserver'
4199 autoreadstderr = not opts[b'noreadstderr']
4199 autoreadstderr = not opts[b'noreadstderr']
4200
4200
4201 if opts[b'peer'] == b'ssh1':
4201 if opts[b'peer'] == b'ssh1':
4202 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4202 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4203 peer = sshpeer.sshv1peer(
4203 peer = sshpeer.sshv1peer(
4204 ui,
4204 ui,
4205 url,
4205 url,
4206 proc,
4206 proc,
4207 stdin,
4207 stdin,
4208 stdout,
4208 stdout,
4209 stderr,
4209 stderr,
4210 None,
4210 None,
4211 autoreadstderr=autoreadstderr,
4211 autoreadstderr=autoreadstderr,
4212 )
4212 )
4213 elif opts[b'peer'] == b'ssh2':
4213 elif opts[b'peer'] == b'ssh2':
4214 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4214 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4215 peer = sshpeer.sshv2peer(
4215 peer = sshpeer.sshv2peer(
4216 ui,
4216 ui,
4217 url,
4217 url,
4218 proc,
4218 proc,
4219 stdin,
4219 stdin,
4220 stdout,
4220 stdout,
4221 stderr,
4221 stderr,
4222 None,
4222 None,
4223 autoreadstderr=autoreadstderr,
4223 autoreadstderr=autoreadstderr,
4224 )
4224 )
4225 elif opts[b'peer'] == b'raw':
4225 elif opts[b'peer'] == b'raw':
4226 ui.write(_(b'using raw connection to peer\n'))
4226 ui.write(_(b'using raw connection to peer\n'))
4227 peer = None
4227 peer = None
4228 else:
4228 else:
4229 ui.write(_(b'creating ssh peer from handshake results\n'))
4229 ui.write(_(b'creating ssh peer from handshake results\n'))
4230 peer = sshpeer.makepeer(
4230 peer = sshpeer.makepeer(
4231 ui,
4231 ui,
4232 url,
4232 url,
4233 proc,
4233 proc,
4234 stdin,
4234 stdin,
4235 stdout,
4235 stdout,
4236 stderr,
4236 stderr,
4237 autoreadstderr=autoreadstderr,
4237 autoreadstderr=autoreadstderr,
4238 )
4238 )
4239
4239
4240 elif path:
4240 elif path:
4241 # We bypass hg.peer() so we can proxy the sockets.
4241 # We bypass hg.peer() so we can proxy the sockets.
4242 # TODO consider not doing this because we skip
4242 # TODO consider not doing this because we skip
4243 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4243 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4244 u = util.url(path)
4244 u = util.url(path)
4245 if u.scheme != b'http':
4245 if u.scheme != b'http':
4246 raise error.Abort(_(b'only http:// paths are currently supported'))
4246 raise error.Abort(_(b'only http:// paths are currently supported'))
4247
4247
4248 url, authinfo = u.authinfo()
4248 url, authinfo = u.authinfo()
4249 openerargs = {
4249 openerargs = {
4250 'useragent': b'Mercurial debugwireproto',
4250 'useragent': b'Mercurial debugwireproto',
4251 }
4251 }
4252
4252
4253 # Turn pipes/sockets into observers so we can log I/O.
4253 # Turn pipes/sockets into observers so we can log I/O.
4254 if ui.verbose:
4254 if ui.verbose:
4255 openerargs.update(
4255 openerargs.update(
4256 {
4256 {
4257 'loggingfh': ui,
4257 'loggingfh': ui,
4258 'loggingname': b's',
4258 'loggingname': b's',
4259 'loggingopts': {'logdata': True, 'logdataapis': False,},
4259 'loggingopts': {'logdata': True, 'logdataapis': False,},
4260 }
4260 }
4261 )
4261 )
4262
4262
4263 if ui.debugflag:
4263 if ui.debugflag:
4264 openerargs['loggingopts']['logdataapis'] = True
4264 openerargs['loggingopts']['logdataapis'] = True
4265
4265
4266 # Don't send default headers when in raw mode. This allows us to
4266 # Don't send default headers when in raw mode. This allows us to
4267 # bypass most of the behavior of our URL handling code so we can
4267 # bypass most of the behavior of our URL handling code so we can
4268 # have near complete control over what's sent on the wire.
4268 # have near complete control over what's sent on the wire.
4269 if opts[b'peer'] == b'raw':
4269 if opts[b'peer'] == b'raw':
4270 openerargs['sendaccept'] = False
4270 openerargs['sendaccept'] = False
4271
4271
4272 opener = urlmod.opener(ui, authinfo, **openerargs)
4272 opener = urlmod.opener(ui, authinfo, **openerargs)
4273
4273
4274 if opts[b'peer'] == b'http2':
4274 if opts[b'peer'] == b'http2':
4275 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4275 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4276 # We go through makepeer() because we need an API descriptor for
4276 # We go through makepeer() because we need an API descriptor for
4277 # the peer instance to be useful.
4277 # the peer instance to be useful.
4278 with ui.configoverride(
4278 with ui.configoverride(
4279 {(b'experimental', b'httppeer.advertise-v2'): True}
4279 {(b'experimental', b'httppeer.advertise-v2'): True}
4280 ):
4280 ):
4281 if opts[b'nologhandshake']:
4281 if opts[b'nologhandshake']:
4282 ui.pushbuffer()
4282 ui.pushbuffer()
4283
4283
4284 peer = httppeer.makepeer(ui, path, opener=opener)
4284 peer = httppeer.makepeer(ui, path, opener=opener)
4285
4285
4286 if opts[b'nologhandshake']:
4286 if opts[b'nologhandshake']:
4287 ui.popbuffer()
4287 ui.popbuffer()
4288
4288
4289 if not isinstance(peer, httppeer.httpv2peer):
4289 if not isinstance(peer, httppeer.httpv2peer):
4290 raise error.Abort(
4290 raise error.Abort(
4291 _(
4291 _(
4292 b'could not instantiate HTTP peer for '
4292 b'could not instantiate HTTP peer for '
4293 b'wire protocol version 2'
4293 b'wire protocol version 2'
4294 ),
4294 ),
4295 hint=_(
4295 hint=_(
4296 b'the server may not have the feature '
4296 b'the server may not have the feature '
4297 b'enabled or is not allowing this '
4297 b'enabled or is not allowing this '
4298 b'client version'
4298 b'client version'
4299 ),
4299 ),
4300 )
4300 )
4301
4301
4302 elif opts[b'peer'] == b'raw':
4302 elif opts[b'peer'] == b'raw':
4303 ui.write(_(b'using raw connection to peer\n'))
4303 ui.write(_(b'using raw connection to peer\n'))
4304 peer = None
4304 peer = None
4305 elif opts[b'peer']:
4305 elif opts[b'peer']:
4306 raise error.Abort(
4306 raise error.Abort(
4307 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4307 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4308 )
4308 )
4309 else:
4309 else:
4310 peer = httppeer.makepeer(ui, path, opener=opener)
4310 peer = httppeer.makepeer(ui, path, opener=opener)
4311
4311
4312 # We /could/ populate stdin/stdout with sock.makefile()...
4312 # We /could/ populate stdin/stdout with sock.makefile()...
4313 else:
4313 else:
4314 raise error.Abort(_(b'unsupported connection configuration'))
4314 raise error.Abort(_(b'unsupported connection configuration'))
4315
4315
4316 batchedcommands = None
4316 batchedcommands = None
4317
4317
4318 # Now perform actions based on the parsed wire language instructions.
4318 # Now perform actions based on the parsed wire language instructions.
4319 for action, lines in blocks:
4319 for action, lines in blocks:
4320 if action in (b'raw', b'raw+'):
4320 if action in (b'raw', b'raw+'):
4321 if not stdin:
4321 if not stdin:
4322 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4322 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4323
4323
4324 # Concatenate the data together.
4324 # Concatenate the data together.
4325 data = b''.join(l.lstrip() for l in lines)
4325 data = b''.join(l.lstrip() for l in lines)
4326 data = stringutil.unescapestr(data)
4326 data = stringutil.unescapestr(data)
4327 stdin.write(data)
4327 stdin.write(data)
4328
4328
4329 if action == b'raw+':
4329 if action == b'raw+':
4330 stdin.flush()
4330 stdin.flush()
4331 elif action == b'flush':
4331 elif action == b'flush':
4332 if not stdin:
4332 if not stdin:
4333 raise error.Abort(_(b'cannot call flush on this peer'))
4333 raise error.Abort(_(b'cannot call flush on this peer'))
4334 stdin.flush()
4334 stdin.flush()
4335 elif action.startswith(b'command'):
4335 elif action.startswith(b'command'):
4336 if not peer:
4336 if not peer:
4337 raise error.Abort(
4337 raise error.Abort(
4338 _(
4338 _(
4339 b'cannot send commands unless peer instance '
4339 b'cannot send commands unless peer instance '
4340 b'is available'
4340 b'is available'
4341 )
4341 )
4342 )
4342 )
4343
4343
4344 command = action.split(b' ', 1)[1]
4344 command = action.split(b' ', 1)[1]
4345
4345
4346 args = {}
4346 args = {}
4347 for line in lines:
4347 for line in lines:
4348 # We need to allow empty values.
4348 # We need to allow empty values.
4349 fields = line.lstrip().split(b' ', 1)
4349 fields = line.lstrip().split(b' ', 1)
4350 if len(fields) == 1:
4350 if len(fields) == 1:
4351 key = fields[0]
4351 key = fields[0]
4352 value = b''
4352 value = b''
4353 else:
4353 else:
4354 key, value = fields
4354 key, value = fields
4355
4355
4356 if value.startswith(b'eval:'):
4356 if value.startswith(b'eval:'):
4357 value = stringutil.evalpythonliteral(value[5:])
4357 value = stringutil.evalpythonliteral(value[5:])
4358 else:
4358 else:
4359 value = stringutil.unescapestr(value)
4359 value = stringutil.unescapestr(value)
4360
4360
4361 args[key] = value
4361 args[key] = value
4362
4362
4363 if batchedcommands is not None:
4363 if batchedcommands is not None:
4364 batchedcommands.append((command, args))
4364 batchedcommands.append((command, args))
4365 continue
4365 continue
4366
4366
4367 ui.status(_(b'sending %s command\n') % command)
4367 ui.status(_(b'sending %s command\n') % command)
4368
4368
4369 if b'PUSHFILE' in args:
4369 if b'PUSHFILE' in args:
4370 with open(args[b'PUSHFILE'], 'rb') as fh:
4370 with open(args[b'PUSHFILE'], 'rb') as fh:
4371 del args[b'PUSHFILE']
4371 del args[b'PUSHFILE']
4372 res, output = peer._callpush(
4372 res, output = peer._callpush(
4373 command, fh, **pycompat.strkwargs(args)
4373 command, fh, **pycompat.strkwargs(args)
4374 )
4374 )
4375 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4375 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4376 ui.status(
4376 ui.status(
4377 _(b'remote output: %s\n') % stringutil.escapestr(output)
4377 _(b'remote output: %s\n') % stringutil.escapestr(output)
4378 )
4378 )
4379 else:
4379 else:
4380 with peer.commandexecutor() as e:
4380 with peer.commandexecutor() as e:
4381 res = e.callcommand(command, args).result()
4381 res = e.callcommand(command, args).result()
4382
4382
4383 if isinstance(res, wireprotov2peer.commandresponse):
4383 if isinstance(res, wireprotov2peer.commandresponse):
4384 val = res.objects()
4384 val = res.objects()
4385 ui.status(
4385 ui.status(
4386 _(b'response: %s\n')
4386 _(b'response: %s\n')
4387 % stringutil.pprint(val, bprefix=True, indent=2)
4387 % stringutil.pprint(val, bprefix=True, indent=2)
4388 )
4388 )
4389 else:
4389 else:
4390 ui.status(
4390 ui.status(
4391 _(b'response: %s\n')
4391 _(b'response: %s\n')
4392 % stringutil.pprint(res, bprefix=True, indent=2)
4392 % stringutil.pprint(res, bprefix=True, indent=2)
4393 )
4393 )
4394
4394
4395 elif action == b'batchbegin':
4395 elif action == b'batchbegin':
4396 if batchedcommands is not None:
4396 if batchedcommands is not None:
4397 raise error.Abort(_(b'nested batchbegin not allowed'))
4397 raise error.Abort(_(b'nested batchbegin not allowed'))
4398
4398
4399 batchedcommands = []
4399 batchedcommands = []
4400 elif action == b'batchsubmit':
4400 elif action == b'batchsubmit':
4401 # There is a batching API we could go through. But it would be
4401 # There is a batching API we could go through. But it would be
4402 # difficult to normalize requests into function calls. It is easier
4402 # difficult to normalize requests into function calls. It is easier
4403 # to bypass this layer and normalize to commands + args.
4403 # to bypass this layer and normalize to commands + args.
4404 ui.status(
4404 ui.status(
4405 _(b'sending batch with %d sub-commands\n')
4405 _(b'sending batch with %d sub-commands\n')
4406 % len(batchedcommands)
4406 % len(batchedcommands)
4407 )
4407 )
4408 assert peer is not None
4408 assert peer is not None
4409 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4409 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4410 ui.status(
4410 ui.status(
4411 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4411 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4412 )
4412 )
4413
4413
4414 batchedcommands = None
4414 batchedcommands = None
4415
4415
4416 elif action.startswith(b'httprequest '):
4416 elif action.startswith(b'httprequest '):
4417 if not opener:
4417 if not opener:
4418 raise error.Abort(
4418 raise error.Abort(
4419 _(b'cannot use httprequest without an HTTP peer')
4419 _(b'cannot use httprequest without an HTTP peer')
4420 )
4420 )
4421
4421
4422 request = action.split(b' ', 2)
4422 request = action.split(b' ', 2)
4423 if len(request) != 3:
4423 if len(request) != 3:
4424 raise error.Abort(
4424 raise error.Abort(
4425 _(
4425 _(
4426 b'invalid httprequest: expected format is '
4426 b'invalid httprequest: expected format is '
4427 b'"httprequest <method> <path>'
4427 b'"httprequest <method> <path>'
4428 )
4428 )
4429 )
4429 )
4430
4430
4431 method, httppath = request[1:]
4431 method, httppath = request[1:]
4432 headers = {}
4432 headers = {}
4433 body = None
4433 body = None
4434 frames = []
4434 frames = []
4435 for line in lines:
4435 for line in lines:
4436 line = line.lstrip()
4436 line = line.lstrip()
4437 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4437 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4438 if m:
4438 if m:
4439 # Headers need to use native strings.
4439 # Headers need to use native strings.
4440 key = pycompat.strurl(m.group(1))
4440 key = pycompat.strurl(m.group(1))
4441 value = pycompat.strurl(m.group(2))
4441 value = pycompat.strurl(m.group(2))
4442 headers[key] = value
4442 headers[key] = value
4443 continue
4443 continue
4444
4444
4445 if line.startswith(b'BODYFILE '):
4445 if line.startswith(b'BODYFILE '):
4446 with open(line.split(b' ', 1), b'rb') as fh:
4446 with open(line.split(b' ', 1), b'rb') as fh:
4447 body = fh.read()
4447 body = fh.read()
4448 elif line.startswith(b'frame '):
4448 elif line.startswith(b'frame '):
4449 frame = wireprotoframing.makeframefromhumanstring(
4449 frame = wireprotoframing.makeframefromhumanstring(
4450 line[len(b'frame ') :]
4450 line[len(b'frame ') :]
4451 )
4451 )
4452
4452
4453 frames.append(frame)
4453 frames.append(frame)
4454 else:
4454 else:
4455 raise error.Abort(
4455 raise error.Abort(
4456 _(b'unknown argument to httprequest: %s') % line
4456 _(b'unknown argument to httprequest: %s') % line
4457 )
4457 )
4458
4458
4459 url = path + httppath
4459 url = path + httppath
4460
4460
4461 if frames:
4461 if frames:
4462 body = b''.join(bytes(f) for f in frames)
4462 body = b''.join(bytes(f) for f in frames)
4463
4463
4464 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4464 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4465
4465
4466 # urllib.Request insists on using has_data() as a proxy for
4466 # urllib.Request insists on using has_data() as a proxy for
4467 # determining the request method. Override that to use our
4467 # determining the request method. Override that to use our
4468 # explicitly requested method.
4468 # explicitly requested method.
4469 req.get_method = lambda: pycompat.sysstr(method)
4469 req.get_method = lambda: pycompat.sysstr(method)
4470
4470
4471 try:
4471 try:
4472 res = opener.open(req)
4472 res = opener.open(req)
4473 body = res.read()
4473 body = res.read()
4474 except util.urlerr.urlerror as e:
4474 except util.urlerr.urlerror as e:
4475 # read() method must be called, but only exists in Python 2
4475 # read() method must be called, but only exists in Python 2
4476 getattr(e, 'read', lambda: None)()
4476 getattr(e, 'read', lambda: None)()
4477 continue
4477 continue
4478
4478
4479 ct = res.headers.get('Content-Type')
4479 ct = res.headers.get('Content-Type')
4480 if ct == 'application/mercurial-cbor':
4480 if ct == 'application/mercurial-cbor':
4481 ui.write(
4481 ui.write(
4482 _(b'cbor> %s\n')
4482 _(b'cbor> %s\n')
4483 % stringutil.pprint(
4483 % stringutil.pprint(
4484 cborutil.decodeall(body), bprefix=True, indent=2
4484 cborutil.decodeall(body), bprefix=True, indent=2
4485 )
4485 )
4486 )
4486 )
4487
4487
4488 elif action == b'close':
4488 elif action == b'close':
4489 assert peer is not None
4489 assert peer is not None
4490 peer.close()
4490 peer.close()
4491 elif action == b'readavailable':
4491 elif action == b'readavailable':
4492 if not stdout or not stderr:
4492 if not stdout or not stderr:
4493 raise error.Abort(
4493 raise error.Abort(
4494 _(b'readavailable not available on this peer')
4494 _(b'readavailable not available on this peer')
4495 )
4495 )
4496
4496
4497 stdin.close()
4497 stdin.close()
4498 stdout.read()
4498 stdout.read()
4499 stderr.read()
4499 stderr.read()
4500
4500
4501 elif action == b'readline':
4501 elif action == b'readline':
4502 if not stdout:
4502 if not stdout:
4503 raise error.Abort(_(b'readline not available on this peer'))
4503 raise error.Abort(_(b'readline not available on this peer'))
4504 stdout.readline()
4504 stdout.readline()
4505 elif action == b'ereadline':
4505 elif action == b'ereadline':
4506 if not stderr:
4506 if not stderr:
4507 raise error.Abort(_(b'ereadline not available on this peer'))
4507 raise error.Abort(_(b'ereadline not available on this peer'))
4508 stderr.readline()
4508 stderr.readline()
4509 elif action.startswith(b'read '):
4509 elif action.startswith(b'read '):
4510 count = int(action.split(b' ', 1)[1])
4510 count = int(action.split(b' ', 1)[1])
4511 if not stdout:
4511 if not stdout:
4512 raise error.Abort(_(b'read not available on this peer'))
4512 raise error.Abort(_(b'read not available on this peer'))
4513 stdout.read(count)
4513 stdout.read(count)
4514 elif action.startswith(b'eread '):
4514 elif action.startswith(b'eread '):
4515 count = int(action.split(b' ', 1)[1])
4515 count = int(action.split(b' ', 1)[1])
4516 if not stderr:
4516 if not stderr:
4517 raise error.Abort(_(b'eread not available on this peer'))
4517 raise error.Abort(_(b'eread not available on this peer'))
4518 stderr.read(count)
4518 stderr.read(count)
4519 else:
4519 else:
4520 raise error.Abort(_(b'unknown action: %s') % action)
4520 raise error.Abort(_(b'unknown action: %s') % action)
4521
4521
4522 if batchedcommands is not None:
4522 if batchedcommands is not None:
4523 raise error.Abort(_(b'unclosed "batchbegin" request'))
4523 raise error.Abort(_(b'unclosed "batchbegin" request'))
4524
4524
4525 if peer:
4525 if peer:
4526 peer.close()
4526 peer.close()
4527
4527
4528 if proc:
4528 if proc:
4529 proc.kill()
4529 proc.kill()
@@ -1,865 +1,865 b''
1 # formatter.py - generic output formatting for mercurial
1 # formatter.py - generic output formatting for mercurial
2 #
2 #
3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """Generic output formatting for Mercurial
8 """Generic output formatting for Mercurial
9
9
10 The formatter provides API to show data in various ways. The following
10 The formatter provides API to show data in various ways. The following
11 functions should be used in place of ui.write():
11 functions should be used in place of ui.write():
12
12
13 - fm.write() for unconditional output
13 - fm.write() for unconditional output
14 - fm.condwrite() to show some extra data conditionally in plain output
14 - fm.condwrite() to show some extra data conditionally in plain output
15 - fm.context() to provide changectx to template output
15 - fm.context() to provide changectx to template output
16 - fm.data() to provide extra data to JSON or template output
16 - fm.data() to provide extra data to JSON or template output
17 - fm.plain() to show raw text that isn't provided to JSON or template output
17 - fm.plain() to show raw text that isn't provided to JSON or template output
18
18
19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
20 beforehand so the data is converted to the appropriate data type. Use
20 beforehand so the data is converted to the appropriate data type. Use
21 fm.isplain() if you need to convert or format data conditionally which isn't
21 fm.isplain() if you need to convert or format data conditionally which isn't
22 supported by the formatter API.
22 supported by the formatter API.
23
23
24 To build nested structure (i.e. a list of dicts), use fm.nested().
24 To build nested structure (i.e. a list of dicts), use fm.nested().
25
25
26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
27
27
28 fm.condwrite() vs 'if cond:':
28 fm.condwrite() vs 'if cond:':
29
29
30 In most cases, use fm.condwrite() so users can selectively show the data
30 In most cases, use fm.condwrite() so users can selectively show the data
31 in template output. If it's costly to build data, use plain 'if cond:' with
31 in template output. If it's costly to build data, use plain 'if cond:' with
32 fm.write().
32 fm.write().
33
33
34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
35
35
36 fm.nested() should be used to form a tree structure (a list of dicts of
36 fm.nested() should be used to form a tree structure (a list of dicts of
37 lists of dicts...) which can be accessed through template keywords, e.g.
37 lists of dicts...) which can be accessed through template keywords, e.g.
38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
39 exports a dict-type object to template, which can be accessed by e.g.
39 exports a dict-type object to template, which can be accessed by e.g.
40 "{get(foo, key)}" function.
40 "{get(foo, key)}" function.
41
41
42 Doctest helper:
42 Doctest helper:
43
43
44 >>> def show(fn, verbose=False, **opts):
44 >>> def show(fn, verbose=False, **opts):
45 ... import sys
45 ... import sys
46 ... from . import ui as uimod
46 ... from . import ui as uimod
47 ... ui = uimod.ui()
47 ... ui = uimod.ui()
48 ... ui.verbose = verbose
48 ... ui.verbose = verbose
49 ... ui.pushbuffer()
49 ... ui.pushbuffer()
50 ... try:
50 ... try:
51 ... return fn(ui, ui.formatter(pycompat.sysbytes(fn.__name__),
51 ... return fn(ui, ui.formatter(pycompat.sysbytes(fn.__name__),
52 ... pycompat.byteskwargs(opts)))
52 ... pycompat.byteskwargs(opts)))
53 ... finally:
53 ... finally:
54 ... print(pycompat.sysstr(ui.popbuffer()), end='')
54 ... print(pycompat.sysstr(ui.popbuffer()), end='')
55
55
56 Basic example:
56 Basic example:
57
57
58 >>> def files(ui, fm):
58 >>> def files(ui, fm):
59 ... files = [(b'foo', 123, (0, 0)), (b'bar', 456, (1, 0))]
59 ... files = [(b'foo', 123, (0, 0)), (b'bar', 456, (1, 0))]
60 ... for f in files:
60 ... for f in files:
61 ... fm.startitem()
61 ... fm.startitem()
62 ... fm.write(b'path', b'%s', f[0])
62 ... fm.write(b'path', b'%s', f[0])
63 ... fm.condwrite(ui.verbose, b'date', b' %s',
63 ... fm.condwrite(ui.verbose, b'date', b' %s',
64 ... fm.formatdate(f[2], b'%Y-%m-%d %H:%M:%S'))
64 ... fm.formatdate(f[2], b'%Y-%m-%d %H:%M:%S'))
65 ... fm.data(size=f[1])
65 ... fm.data(size=f[1])
66 ... fm.plain(b'\\n')
66 ... fm.plain(b'\\n')
67 ... fm.end()
67 ... fm.end()
68 >>> show(files)
68 >>> show(files)
69 foo
69 foo
70 bar
70 bar
71 >>> show(files, verbose=True)
71 >>> show(files, verbose=True)
72 foo 1970-01-01 00:00:00
72 foo 1970-01-01 00:00:00
73 bar 1970-01-01 00:00:01
73 bar 1970-01-01 00:00:01
74 >>> show(files, template=b'json')
74 >>> show(files, template=b'json')
75 [
75 [
76 {
76 {
77 "date": [0, 0],
77 "date": [0, 0],
78 "path": "foo",
78 "path": "foo",
79 "size": 123
79 "size": 123
80 },
80 },
81 {
81 {
82 "date": [1, 0],
82 "date": [1, 0],
83 "path": "bar",
83 "path": "bar",
84 "size": 456
84 "size": 456
85 }
85 }
86 ]
86 ]
87 >>> show(files, template=b'path: {path}\\ndate: {date|rfc3339date}\\n')
87 >>> show(files, template=b'path: {path}\\ndate: {date|rfc3339date}\\n')
88 path: foo
88 path: foo
89 date: 1970-01-01T00:00:00+00:00
89 date: 1970-01-01T00:00:00+00:00
90 path: bar
90 path: bar
91 date: 1970-01-01T00:00:01+00:00
91 date: 1970-01-01T00:00:01+00:00
92
92
93 Nested example:
93 Nested example:
94
94
95 >>> def subrepos(ui, fm):
95 >>> def subrepos(ui, fm):
96 ... fm.startitem()
96 ... fm.startitem()
97 ... fm.write(b'reponame', b'[%s]\\n', b'baz')
97 ... fm.write(b'reponame', b'[%s]\\n', b'baz')
98 ... files(ui, fm.nested(b'files', tmpl=b'{reponame}'))
98 ... files(ui, fm.nested(b'files', tmpl=b'{reponame}'))
99 ... fm.end()
99 ... fm.end()
100 >>> show(subrepos)
100 >>> show(subrepos)
101 [baz]
101 [baz]
102 foo
102 foo
103 bar
103 bar
104 >>> show(subrepos, template=b'{reponame}: {join(files % "{path}", ", ")}\\n')
104 >>> show(subrepos, template=b'{reponame}: {join(files % "{path}", ", ")}\\n')
105 baz: foo, bar
105 baz: foo, bar
106 """
106 """
107
107
108 from __future__ import absolute_import, print_function
108 from __future__ import absolute_import, print_function
109
109
110 import contextlib
110 import contextlib
111 import itertools
111 import itertools
112 import os
112 import os
113
113
114 from .i18n import _
114 from .i18n import _
115 from .node import (
115 from .node import (
116 hex,
116 hex,
117 short,
117 short,
118 )
118 )
119 from .thirdparty import attr
119 from .thirdparty import attr
120
120
121 from . import (
121 from . import (
122 error,
122 error,
123 pycompat,
123 pycompat,
124 templatefilters,
124 templatefilters,
125 templatekw,
125 templatekw,
126 templater,
126 templater,
127 templateutil,
127 templateutil,
128 util,
128 util,
129 )
129 )
130 from .utils import (
130 from .utils import (
131 cborutil,
131 cborutil,
132 dateutil,
132 dateutil,
133 stringutil,
133 stringutil,
134 )
134 )
135
135
136 pickle = util.pickle
136 pickle = util.pickle
137
137
138
138
139 def isprintable(obj):
139 def isprintable(obj):
140 """Check if the given object can be directly passed in to formatter's
140 """Check if the given object can be directly passed in to formatter's
141 write() and data() functions
141 write() and data() functions
142
142
143 Returns False if the object is unsupported or must be pre-processed by
143 Returns False if the object is unsupported or must be pre-processed by
144 formatdate(), formatdict(), or formatlist().
144 formatdate(), formatdict(), or formatlist().
145 """
145 """
146 return isinstance(obj, (type(None), bool, int, pycompat.long, float, bytes))
146 return isinstance(obj, (type(None), bool, int, pycompat.long, float, bytes))
147
147
148
148
149 class _nullconverter(object):
149 class _nullconverter(object):
150 '''convert non-primitive data types to be processed by formatter'''
150 '''convert non-primitive data types to be processed by formatter'''
151
151
152 # set to True if context object should be stored as item
152 # set to True if context object should be stored as item
153 storecontext = False
153 storecontext = False
154
154
155 @staticmethod
155 @staticmethod
156 def wrapnested(data, tmpl, sep):
156 def wrapnested(data, tmpl, sep):
157 '''wrap nested data by appropriate type'''
157 '''wrap nested data by appropriate type'''
158 return data
158 return data
159
159
160 @staticmethod
160 @staticmethod
161 def formatdate(date, fmt):
161 def formatdate(date, fmt):
162 '''convert date tuple to appropriate format'''
162 '''convert date tuple to appropriate format'''
163 # timestamp can be float, but the canonical form should be int
163 # timestamp can be float, but the canonical form should be int
164 ts, tz = date
164 ts, tz = date
165 return (int(ts), tz)
165 return (int(ts), tz)
166
166
167 @staticmethod
167 @staticmethod
168 def formatdict(data, key, value, fmt, sep):
168 def formatdict(data, key, value, fmt, sep):
169 '''convert dict or key-value pairs to appropriate dict format'''
169 '''convert dict or key-value pairs to appropriate dict format'''
170 # use plain dict instead of util.sortdict so that data can be
170 # use plain dict instead of util.sortdict so that data can be
171 # serialized as a builtin dict in pickle output
171 # serialized as a builtin dict in pickle output
172 return dict(data)
172 return dict(data)
173
173
174 @staticmethod
174 @staticmethod
175 def formatlist(data, name, fmt, sep):
175 def formatlist(data, name, fmt, sep):
176 '''convert iterable to appropriate list format'''
176 '''convert iterable to appropriate list format'''
177 return list(data)
177 return list(data)
178
178
179
179
180 class baseformatter(object):
180 class baseformatter(object):
181 def __init__(self, ui, topic, opts, converter):
181 def __init__(self, ui, topic, opts, converter):
182 self._ui = ui
182 self._ui = ui
183 self._topic = topic
183 self._topic = topic
184 self._opts = opts
184 self._opts = opts
185 self._converter = converter
185 self._converter = converter
186 self._item = None
186 self._item = None
187 # function to convert node to string suitable for this output
187 # function to convert node to string suitable for this output
188 self.hexfunc = hex
188 self.hexfunc = hex
189
189
190 def __enter__(self):
190 def __enter__(self):
191 return self
191 return self
192
192
193 def __exit__(self, exctype, excvalue, traceback):
193 def __exit__(self, exctype, excvalue, traceback):
194 if exctype is None:
194 if exctype is None:
195 self.end()
195 self.end()
196
196
197 def _showitem(self):
197 def _showitem(self):
198 '''show a formatted item once all data is collected'''
198 '''show a formatted item once all data is collected'''
199
199
200 def startitem(self):
200 def startitem(self):
201 '''begin an item in the format list'''
201 '''begin an item in the format list'''
202 if self._item is not None:
202 if self._item is not None:
203 self._showitem()
203 self._showitem()
204 self._item = {}
204 self._item = {}
205
205
206 def formatdate(self, date, fmt=b'%a %b %d %H:%M:%S %Y %1%2'):
206 def formatdate(self, date, fmt=b'%a %b %d %H:%M:%S %Y %1%2'):
207 '''convert date tuple to appropriate format'''
207 '''convert date tuple to appropriate format'''
208 return self._converter.formatdate(date, fmt)
208 return self._converter.formatdate(date, fmt)
209
209
210 def formatdict(self, data, key=b'key', value=b'value', fmt=None, sep=b' '):
210 def formatdict(self, data, key=b'key', value=b'value', fmt=None, sep=b' '):
211 '''convert dict or key-value pairs to appropriate dict format'''
211 '''convert dict or key-value pairs to appropriate dict format'''
212 return self._converter.formatdict(data, key, value, fmt, sep)
212 return self._converter.formatdict(data, key, value, fmt, sep)
213
213
214 def formatlist(self, data, name, fmt=None, sep=b' '):
214 def formatlist(self, data, name, fmt=None, sep=b' '):
215 '''convert iterable to appropriate list format'''
215 '''convert iterable to appropriate list format'''
216 # name is mandatory argument for now, but it could be optional if
216 # name is mandatory argument for now, but it could be optional if
217 # we have default template keyword, e.g. {item}
217 # we have default template keyword, e.g. {item}
218 return self._converter.formatlist(data, name, fmt, sep)
218 return self._converter.formatlist(data, name, fmt, sep)
219
219
220 def context(self, **ctxs):
220 def context(self, **ctxs):
221 '''insert context objects to be used to render template keywords'''
221 '''insert context objects to be used to render template keywords'''
222 ctxs = pycompat.byteskwargs(ctxs)
222 ctxs = pycompat.byteskwargs(ctxs)
223 assert all(k in {b'repo', b'ctx', b'fctx'} for k in ctxs)
223 assert all(k in {b'repo', b'ctx', b'fctx'} for k in ctxs)
224 if self._converter.storecontext:
224 if self._converter.storecontext:
225 # populate missing resources in fctx -> ctx -> repo order
225 # populate missing resources in fctx -> ctx -> repo order
226 if b'fctx' in ctxs and b'ctx' not in ctxs:
226 if b'fctx' in ctxs and b'ctx' not in ctxs:
227 ctxs[b'ctx'] = ctxs[b'fctx'].changectx()
227 ctxs[b'ctx'] = ctxs[b'fctx'].changectx()
228 if b'ctx' in ctxs and b'repo' not in ctxs:
228 if b'ctx' in ctxs and b'repo' not in ctxs:
229 ctxs[b'repo'] = ctxs[b'ctx'].repo()
229 ctxs[b'repo'] = ctxs[b'ctx'].repo()
230 self._item.update(ctxs)
230 self._item.update(ctxs)
231
231
232 def datahint(self):
232 def datahint(self):
233 '''set of field names to be referenced'''
233 '''set of field names to be referenced'''
234 return set()
234 return set()
235
235
236 def data(self, **data):
236 def data(self, **data):
237 '''insert data into item that's not shown in default output'''
237 '''insert data into item that's not shown in default output'''
238 data = pycompat.byteskwargs(data)
238 data = pycompat.byteskwargs(data)
239 self._item.update(data)
239 self._item.update(data)
240
240
241 def write(self, fields, deftext, *fielddata, **opts):
241 def write(self, fields, deftext, *fielddata, **opts):
242 '''do default text output while assigning data to item'''
242 '''do default text output while assigning data to item'''
243 fieldkeys = fields.split()
243 fieldkeys = fields.split()
244 assert len(fieldkeys) == len(fielddata), (fieldkeys, fielddata)
244 assert len(fieldkeys) == len(fielddata), (fieldkeys, fielddata)
245 self._item.update(zip(fieldkeys, fielddata))
245 self._item.update(zip(fieldkeys, fielddata))
246
246
247 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
247 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
248 '''do conditional write (primarily for plain formatter)'''
248 '''do conditional write (primarily for plain formatter)'''
249 fieldkeys = fields.split()
249 fieldkeys = fields.split()
250 assert len(fieldkeys) == len(fielddata)
250 assert len(fieldkeys) == len(fielddata)
251 self._item.update(zip(fieldkeys, fielddata))
251 self._item.update(zip(fieldkeys, fielddata))
252
252
253 def plain(self, text, **opts):
253 def plain(self, text, **opts):
254 '''show raw text for non-templated mode'''
254 '''show raw text for non-templated mode'''
255
255
256 def isplain(self):
256 def isplain(self):
257 '''check for plain formatter usage'''
257 '''check for plain formatter usage'''
258 return False
258 return False
259
259
260 def nested(self, field, tmpl=None, sep=b''):
260 def nested(self, field, tmpl=None, sep=b''):
261 '''sub formatter to store nested data in the specified field'''
261 '''sub formatter to store nested data in the specified field'''
262 data = []
262 data = []
263 self._item[field] = self._converter.wrapnested(data, tmpl, sep)
263 self._item[field] = self._converter.wrapnested(data, tmpl, sep)
264 return _nestedformatter(self._ui, self._converter, data)
264 return _nestedformatter(self._ui, self._converter, data)
265
265
266 def end(self):
266 def end(self):
267 '''end output for the formatter'''
267 '''end output for the formatter'''
268 if self._item is not None:
268 if self._item is not None:
269 self._showitem()
269 self._showitem()
270
270
271
271
272 def nullformatter(ui, topic, opts):
272 def nullformatter(ui, topic, opts):
273 '''formatter that prints nothing'''
273 '''formatter that prints nothing'''
274 return baseformatter(ui, topic, opts, converter=_nullconverter)
274 return baseformatter(ui, topic, opts, converter=_nullconverter)
275
275
276
276
277 class _nestedformatter(baseformatter):
277 class _nestedformatter(baseformatter):
278 '''build sub items and store them in the parent formatter'''
278 '''build sub items and store them in the parent formatter'''
279
279
280 def __init__(self, ui, converter, data):
280 def __init__(self, ui, converter, data):
281 baseformatter.__init__(
281 baseformatter.__init__(
282 self, ui, topic=b'', opts={}, converter=converter
282 self, ui, topic=b'', opts={}, converter=converter
283 )
283 )
284 self._data = data
284 self._data = data
285
285
286 def _showitem(self):
286 def _showitem(self):
287 self._data.append(self._item)
287 self._data.append(self._item)
288
288
289
289
290 def _iteritems(data):
290 def _iteritems(data):
291 '''iterate key-value pairs in stable order'''
291 '''iterate key-value pairs in stable order'''
292 if isinstance(data, dict):
292 if isinstance(data, dict):
293 return sorted(pycompat.iteritems(data))
293 return sorted(pycompat.iteritems(data))
294 return data
294 return data
295
295
296
296
297 class _plainconverter(object):
297 class _plainconverter(object):
298 '''convert non-primitive data types to text'''
298 '''convert non-primitive data types to text'''
299
299
300 storecontext = False
300 storecontext = False
301
301
302 @staticmethod
302 @staticmethod
303 def wrapnested(data, tmpl, sep):
303 def wrapnested(data, tmpl, sep):
304 raise error.ProgrammingError(b'plainformatter should never be nested')
304 raise error.ProgrammingError(b'plainformatter should never be nested')
305
305
306 @staticmethod
306 @staticmethod
307 def formatdate(date, fmt):
307 def formatdate(date, fmt):
308 '''stringify date tuple in the given format'''
308 '''stringify date tuple in the given format'''
309 return dateutil.datestr(date, fmt)
309 return dateutil.datestr(date, fmt)
310
310
311 @staticmethod
311 @staticmethod
312 def formatdict(data, key, value, fmt, sep):
312 def formatdict(data, key, value, fmt, sep):
313 '''stringify key-value pairs separated by sep'''
313 '''stringify key-value pairs separated by sep'''
314 prefmt = pycompat.identity
314 prefmt = pycompat.identity
315 if fmt is None:
315 if fmt is None:
316 fmt = b'%s=%s'
316 fmt = b'%s=%s'
317 prefmt = pycompat.bytestr
317 prefmt = pycompat.bytestr
318 return sep.join(
318 return sep.join(
319 fmt % (prefmt(k), prefmt(v)) for k, v in _iteritems(data)
319 fmt % (prefmt(k), prefmt(v)) for k, v in _iteritems(data)
320 )
320 )
321
321
322 @staticmethod
322 @staticmethod
323 def formatlist(data, name, fmt, sep):
323 def formatlist(data, name, fmt, sep):
324 '''stringify iterable separated by sep'''
324 '''stringify iterable separated by sep'''
325 prefmt = pycompat.identity
325 prefmt = pycompat.identity
326 if fmt is None:
326 if fmt is None:
327 fmt = b'%s'
327 fmt = b'%s'
328 prefmt = pycompat.bytestr
328 prefmt = pycompat.bytestr
329 return sep.join(fmt % prefmt(e) for e in data)
329 return sep.join(fmt % prefmt(e) for e in data)
330
330
331
331
332 class plainformatter(baseformatter):
332 class plainformatter(baseformatter):
333 '''the default text output scheme'''
333 '''the default text output scheme'''
334
334
335 def __init__(self, ui, out, topic, opts):
335 def __init__(self, ui, out, topic, opts):
336 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
336 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
337 if ui.debugflag:
337 if ui.debugflag:
338 self.hexfunc = hex
338 self.hexfunc = hex
339 else:
339 else:
340 self.hexfunc = short
340 self.hexfunc = short
341 if ui is out:
341 if ui is out:
342 self._write = ui.write
342 self._write = ui.write
343 else:
343 else:
344 self._write = lambda s, **opts: out.write(s)
344 self._write = lambda s, **opts: out.write(s)
345
345
346 def startitem(self):
346 def startitem(self):
347 pass
347 pass
348
348
349 def data(self, **data):
349 def data(self, **data):
350 pass
350 pass
351
351
352 def write(self, fields, deftext, *fielddata, **opts):
352 def write(self, fields, deftext, *fielddata, **opts):
353 self._write(deftext % fielddata, **opts)
353 self._write(deftext % fielddata, **opts)
354
354
355 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
355 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
356 '''do conditional write'''
356 '''do conditional write'''
357 if cond:
357 if cond:
358 self._write(deftext % fielddata, **opts)
358 self._write(deftext % fielddata, **opts)
359
359
360 def plain(self, text, **opts):
360 def plain(self, text, **opts):
361 self._write(text, **opts)
361 self._write(text, **opts)
362
362
363 def isplain(self):
363 def isplain(self):
364 return True
364 return True
365
365
366 def nested(self, field, tmpl=None, sep=b''):
366 def nested(self, field, tmpl=None, sep=b''):
367 # nested data will be directly written to ui
367 # nested data will be directly written to ui
368 return self
368 return self
369
369
370 def end(self):
370 def end(self):
371 pass
371 pass
372
372
373
373
374 class debugformatter(baseformatter):
374 class debugformatter(baseformatter):
375 def __init__(self, ui, out, topic, opts):
375 def __init__(self, ui, out, topic, opts):
376 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
376 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
377 self._out = out
377 self._out = out
378 self._out.write(b"%s = [\n" % self._topic)
378 self._out.write(b"%s = [\n" % self._topic)
379
379
380 def _showitem(self):
380 def _showitem(self):
381 self._out.write(
381 self._out.write(
382 b' %s,\n' % stringutil.pprint(self._item, indent=4, level=1)
382 b' %s,\n' % stringutil.pprint(self._item, indent=4, level=1)
383 )
383 )
384
384
385 def end(self):
385 def end(self):
386 baseformatter.end(self)
386 baseformatter.end(self)
387 self._out.write(b"]\n")
387 self._out.write(b"]\n")
388
388
389
389
390 class pickleformatter(baseformatter):
390 class pickleformatter(baseformatter):
391 def __init__(self, ui, out, topic, opts):
391 def __init__(self, ui, out, topic, opts):
392 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
392 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
393 self._out = out
393 self._out = out
394 self._data = []
394 self._data = []
395
395
396 def _showitem(self):
396 def _showitem(self):
397 self._data.append(self._item)
397 self._data.append(self._item)
398
398
399 def end(self):
399 def end(self):
400 baseformatter.end(self)
400 baseformatter.end(self)
401 self._out.write(pickle.dumps(self._data))
401 self._out.write(pickle.dumps(self._data))
402
402
403
403
404 class cborformatter(baseformatter):
404 class cborformatter(baseformatter):
405 '''serialize items as an indefinite-length CBOR array'''
405 '''serialize items as an indefinite-length CBOR array'''
406
406
407 def __init__(self, ui, out, topic, opts):
407 def __init__(self, ui, out, topic, opts):
408 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
408 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
409 self._out = out
409 self._out = out
410 self._out.write(cborutil.BEGIN_INDEFINITE_ARRAY)
410 self._out.write(cborutil.BEGIN_INDEFINITE_ARRAY)
411
411
412 def _showitem(self):
412 def _showitem(self):
413 self._out.write(b''.join(cborutil.streamencode(self._item)))
413 self._out.write(b''.join(cborutil.streamencode(self._item)))
414
414
415 def end(self):
415 def end(self):
416 baseformatter.end(self)
416 baseformatter.end(self)
417 self._out.write(cborutil.BREAK)
417 self._out.write(cborutil.BREAK)
418
418
419
419
420 class jsonformatter(baseformatter):
420 class jsonformatter(baseformatter):
421 def __init__(self, ui, out, topic, opts):
421 def __init__(self, ui, out, topic, opts):
422 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
422 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
423 self._out = out
423 self._out = out
424 self._out.write(b"[")
424 self._out.write(b"[")
425 self._first = True
425 self._first = True
426
426
427 def _showitem(self):
427 def _showitem(self):
428 if self._first:
428 if self._first:
429 self._first = False
429 self._first = False
430 else:
430 else:
431 self._out.write(b",")
431 self._out.write(b",")
432
432
433 self._out.write(b"\n {\n")
433 self._out.write(b"\n {\n")
434 first = True
434 first = True
435 for k, v in sorted(self._item.items()):
435 for k, v in sorted(self._item.items()):
436 if first:
436 if first:
437 first = False
437 first = False
438 else:
438 else:
439 self._out.write(b",\n")
439 self._out.write(b",\n")
440 u = templatefilters.json(v, paranoid=False)
440 u = templatefilters.json(v, paranoid=False)
441 self._out.write(b' "%s": %s' % (k, u))
441 self._out.write(b' "%s": %s' % (k, u))
442 self._out.write(b"\n }")
442 self._out.write(b"\n }")
443
443
444 def end(self):
444 def end(self):
445 baseformatter.end(self)
445 baseformatter.end(self)
446 self._out.write(b"\n]\n")
446 self._out.write(b"\n]\n")
447
447
448
448
449 class _templateconverter(object):
449 class _templateconverter(object):
450 '''convert non-primitive data types to be processed by templater'''
450 '''convert non-primitive data types to be processed by templater'''
451
451
452 storecontext = True
452 storecontext = True
453
453
454 @staticmethod
454 @staticmethod
455 def wrapnested(data, tmpl, sep):
455 def wrapnested(data, tmpl, sep):
456 '''wrap nested data by templatable type'''
456 '''wrap nested data by templatable type'''
457 return templateutil.mappinglist(data, tmpl=tmpl, sep=sep)
457 return templateutil.mappinglist(data, tmpl=tmpl, sep=sep)
458
458
459 @staticmethod
459 @staticmethod
460 def formatdate(date, fmt):
460 def formatdate(date, fmt):
461 '''return date tuple'''
461 '''return date tuple'''
462 return templateutil.date(date)
462 return templateutil.date(date)
463
463
464 @staticmethod
464 @staticmethod
465 def formatdict(data, key, value, fmt, sep):
465 def formatdict(data, key, value, fmt, sep):
466 '''build object that can be evaluated as either plain string or dict'''
466 '''build object that can be evaluated as either plain string or dict'''
467 data = util.sortdict(_iteritems(data))
467 data = util.sortdict(_iteritems(data))
468
468
469 def f():
469 def f():
470 yield _plainconverter.formatdict(data, key, value, fmt, sep)
470 yield _plainconverter.formatdict(data, key, value, fmt, sep)
471
471
472 return templateutil.hybriddict(
472 return templateutil.hybriddict(
473 data, key=key, value=value, fmt=fmt, gen=f
473 data, key=key, value=value, fmt=fmt, gen=f
474 )
474 )
475
475
476 @staticmethod
476 @staticmethod
477 def formatlist(data, name, fmt, sep):
477 def formatlist(data, name, fmt, sep):
478 '''build object that can be evaluated as either plain string or list'''
478 '''build object that can be evaluated as either plain string or list'''
479 data = list(data)
479 data = list(data)
480
480
481 def f():
481 def f():
482 yield _plainconverter.formatlist(data, name, fmt, sep)
482 yield _plainconverter.formatlist(data, name, fmt, sep)
483
483
484 return templateutil.hybridlist(data, name=name, fmt=fmt, gen=f)
484 return templateutil.hybridlist(data, name=name, fmt=fmt, gen=f)
485
485
486
486
487 class templateformatter(baseformatter):
487 class templateformatter(baseformatter):
488 def __init__(self, ui, out, topic, opts, spec, overridetemplates=None):
488 def __init__(self, ui, out, topic, opts, spec, overridetemplates=None):
489 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
489 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
490 self._out = out
490 self._out = out
491 self._tref = spec.ref
491 self._tref = spec.ref
492 self._t = loadtemplater(
492 self._t = loadtemplater(
493 ui,
493 ui,
494 spec,
494 spec,
495 defaults=templatekw.keywords,
495 defaults=templatekw.keywords,
496 resources=templateresources(ui),
496 resources=templateresources(ui),
497 cache=templatekw.defaulttempl,
497 cache=templatekw.defaulttempl,
498 )
498 )
499 if overridetemplates:
499 if overridetemplates:
500 self._t.cache.update(overridetemplates)
500 self._t.cache.update(overridetemplates)
501 self._parts = templatepartsmap(
501 self._parts = templatepartsmap(
502 spec, self._t, [b'docheader', b'docfooter', b'separator']
502 spec, self._t, [b'docheader', b'docfooter', b'separator']
503 )
503 )
504 self._counter = itertools.count()
504 self._counter = itertools.count()
505 self._renderitem(b'docheader', {})
505 self._renderitem(b'docheader', {})
506
506
507 def _showitem(self):
507 def _showitem(self):
508 item = self._item.copy()
508 item = self._item.copy()
509 item[b'index'] = index = next(self._counter)
509 item[b'index'] = index = next(self._counter)
510 if index > 0:
510 if index > 0:
511 self._renderitem(b'separator', {})
511 self._renderitem(b'separator', {})
512 self._renderitem(self._tref, item)
512 self._renderitem(self._tref, item)
513
513
514 def _renderitem(self, part, item):
514 def _renderitem(self, part, item):
515 if part not in self._parts:
515 if part not in self._parts:
516 return
516 return
517 ref = self._parts[part]
517 ref = self._parts[part]
518 # None can't be put in the mapping dict since it means <unset>
518 # None can't be put in the mapping dict since it means <unset>
519 for k, v in item.items():
519 for k, v in item.items():
520 if v is None:
520 if v is None:
521 item[k] = templateutil.wrappedvalue(v)
521 item[k] = templateutil.wrappedvalue(v)
522 self._out.write(self._t.render(ref, item))
522 self._out.write(self._t.render(ref, item))
523
523
524 @util.propertycache
524 @util.propertycache
525 def _symbolsused(self):
525 def _symbolsused(self):
526 return self._t.symbolsused(self._tref)
526 return self._t.symbolsused(self._tref)
527
527
528 def datahint(self):
528 def datahint(self):
529 '''set of field names to be referenced from the template'''
529 '''set of field names to be referenced from the template'''
530 return self._symbolsused[0]
530 return self._symbolsused[0]
531
531
532 def end(self):
532 def end(self):
533 baseformatter.end(self)
533 baseformatter.end(self)
534 self._renderitem(b'docfooter', {})
534 self._renderitem(b'docfooter', {})
535
535
536
536
537 @attr.s(frozen=True)
537 @attr.s(frozen=True)
538 class templatespec(object):
538 class templatespec(object):
539 ref = attr.ib()
539 ref = attr.ib()
540 tmpl = attr.ib()
540 tmpl = attr.ib()
541 mapfile = attr.ib()
541 mapfile = attr.ib()
542 refargs = attr.ib(default=None)
542 refargs = attr.ib(default=None)
543 fp = attr.ib(default=None)
543 fp = attr.ib(default=None)
544
544
545
545
546 def empty_templatespec():
546 def empty_templatespec():
547 return templatespec(None, None, None)
547 return templatespec(None, None, None)
548
548
549
549
550 def reference_templatespec(ref, refargs=None):
550 def reference_templatespec(ref, refargs=None):
551 return templatespec(ref, None, None, refargs)
551 return templatespec(ref, None, None, refargs)
552
552
553
553
554 def literal_templatespec(tmpl):
554 def literal_templatespec(tmpl):
555 if pycompat.ispy3:
555 if pycompat.ispy3:
556 assert not isinstance(tmpl, str), b'tmpl must not be a str'
556 assert not isinstance(tmpl, str), b'tmpl must not be a str'
557 return templatespec(b'', tmpl, None)
557 return templatespec(b'', tmpl, None)
558
558
559
559
560 def mapfile_templatespec(topic, mapfile, fp=None):
560 def mapfile_templatespec(topic, mapfile, fp=None):
561 return templatespec(topic, None, mapfile, fp=fp)
561 return templatespec(topic, None, mapfile, fp=fp)
562
562
563
563
564 def lookuptemplate(ui, topic, tmpl):
564 def lookuptemplate(ui, topic, tmpl):
565 """Find the template matching the given -T/--template spec 'tmpl'
565 """Find the template matching the given -T/--template spec 'tmpl'
566
566
567 'tmpl' can be any of the following:
567 'tmpl' can be any of the following:
568
568
569 - a literal template (e.g. '{rev}')
569 - a literal template (e.g. '{rev}')
570 - a reference to built-in template (i.e. formatter)
570 - a reference to built-in template (i.e. formatter)
571 - a map-file name or path (e.g. 'changelog')
571 - a map-file name or path (e.g. 'changelog')
572 - a reference to [templates] in config file
572 - a reference to [templates] in config file
573 - a path to raw template file
573 - a path to raw template file
574
574
575 A map file defines a stand-alone template environment. If a map file
575 A map file defines a stand-alone template environment. If a map file
576 selected, all templates defined in the file will be loaded, and the
576 selected, all templates defined in the file will be loaded, and the
577 template matching the given topic will be rendered. Aliases won't be
577 template matching the given topic will be rendered. Aliases won't be
578 loaded from user config, but from the map file.
578 loaded from user config, but from the map file.
579
579
580 If no map file selected, all templates in [templates] section will be
580 If no map file selected, all templates in [templates] section will be
581 available as well as aliases in [templatealias].
581 available as well as aliases in [templatealias].
582 """
582 """
583
583
584 if not tmpl:
584 if not tmpl:
585 return empty_templatespec()
585 return empty_templatespec()
586
586
587 # looks like a literal template?
587 # looks like a literal template?
588 if b'{' in tmpl:
588 if b'{' in tmpl:
589 return literal_templatespec(tmpl)
589 return literal_templatespec(tmpl)
590
590
591 # a reference to built-in (formatter) template
591 # a reference to built-in (formatter) template
592 if tmpl in {b'cbor', b'json', b'pickle', b'debug'}:
592 if tmpl in {b'cbor', b'json', b'pickle', b'debug'}:
593 return reference_templatespec(tmpl)
593 return reference_templatespec(tmpl)
594
594
595 # a function-style reference to built-in template
595 # a function-style reference to built-in template
596 func, fsep, ftail = tmpl.partition(b'(')
596 func, fsep, ftail = tmpl.partition(b'(')
597 if func in {b'cbor', b'json'} and fsep and ftail.endswith(b')'):
597 if func in {b'cbor', b'json'} and fsep and ftail.endswith(b')'):
598 templater.parseexpr(tmpl) # make sure syntax errors are confined
598 templater.parseexpr(tmpl) # make sure syntax errors are confined
599 return reference_templatespec(func, refargs=ftail[:-1])
599 return reference_templatespec(func, refargs=ftail[:-1])
600
600
601 # perhaps a stock style?
601 # perhaps a stock style?
602 if not os.path.split(tmpl)[0]:
602 if not os.path.split(tmpl)[0]:
603 (mapname, fp) = templater.open_template(
603 (mapname, fp) = templater.try_open_template(
604 b'map-cmdline.' + tmpl
604 b'map-cmdline.' + tmpl
605 ) or templater.open_template(tmpl)
605 ) or templater.try_open_template(tmpl)
606 if mapname:
606 if mapname:
607 return mapfile_templatespec(topic, mapname, fp)
607 return mapfile_templatespec(topic, mapname, fp)
608
608
609 # perhaps it's a reference to [templates]
609 # perhaps it's a reference to [templates]
610 if ui.config(b'templates', tmpl):
610 if ui.config(b'templates', tmpl):
611 return reference_templatespec(tmpl)
611 return reference_templatespec(tmpl)
612
612
613 if tmpl == b'list':
613 if tmpl == b'list':
614 ui.write(_(b"available styles: %s\n") % templater.stylelist())
614 ui.write(_(b"available styles: %s\n") % templater.stylelist())
615 raise error.Abort(_(b"specify a template"))
615 raise error.Abort(_(b"specify a template"))
616
616
617 # perhaps it's a path to a map or a template
617 # perhaps it's a path to a map or a template
618 if (b'/' in tmpl or b'\\' in tmpl) and os.path.isfile(tmpl):
618 if (b'/' in tmpl or b'\\' in tmpl) and os.path.isfile(tmpl):
619 # is it a mapfile for a style?
619 # is it a mapfile for a style?
620 if os.path.basename(tmpl).startswith(b"map-"):
620 if os.path.basename(tmpl).startswith(b"map-"):
621 return mapfile_templatespec(topic, os.path.realpath(tmpl))
621 return mapfile_templatespec(topic, os.path.realpath(tmpl))
622 with util.posixfile(tmpl, b'rb') as f:
622 with util.posixfile(tmpl, b'rb') as f:
623 tmpl = f.read()
623 tmpl = f.read()
624 return literal_templatespec(tmpl)
624 return literal_templatespec(tmpl)
625
625
626 # constant string?
626 # constant string?
627 return literal_templatespec(tmpl)
627 return literal_templatespec(tmpl)
628
628
629
629
630 def templatepartsmap(spec, t, partnames):
630 def templatepartsmap(spec, t, partnames):
631 """Create a mapping of {part: ref}"""
631 """Create a mapping of {part: ref}"""
632 partsmap = {spec.ref: spec.ref} # initial ref must exist in t
632 partsmap = {spec.ref: spec.ref} # initial ref must exist in t
633 if spec.mapfile:
633 if spec.mapfile:
634 partsmap.update((p, p) for p in partnames if p in t)
634 partsmap.update((p, p) for p in partnames if p in t)
635 elif spec.ref:
635 elif spec.ref:
636 for part in partnames:
636 for part in partnames:
637 ref = b'%s:%s' % (spec.ref, part) # select config sub-section
637 ref = b'%s:%s' % (spec.ref, part) # select config sub-section
638 if ref in t:
638 if ref in t:
639 partsmap[part] = ref
639 partsmap[part] = ref
640 return partsmap
640 return partsmap
641
641
642
642
643 def loadtemplater(ui, spec, defaults=None, resources=None, cache=None):
643 def loadtemplater(ui, spec, defaults=None, resources=None, cache=None):
644 """Create a templater from either a literal template or loading from
644 """Create a templater from either a literal template or loading from
645 a map file"""
645 a map file"""
646 assert not (spec.tmpl and spec.mapfile)
646 assert not (spec.tmpl and spec.mapfile)
647 if spec.mapfile:
647 if spec.mapfile:
648 return templater.templater.frommapfile(
648 return templater.templater.frommapfile(
649 spec.mapfile,
649 spec.mapfile,
650 spec.fp,
650 spec.fp,
651 defaults=defaults,
651 defaults=defaults,
652 resources=resources,
652 resources=resources,
653 cache=cache,
653 cache=cache,
654 )
654 )
655 return maketemplater(
655 return maketemplater(
656 ui, spec.tmpl, defaults=defaults, resources=resources, cache=cache
656 ui, spec.tmpl, defaults=defaults, resources=resources, cache=cache
657 )
657 )
658
658
659
659
660 def maketemplater(ui, tmpl, defaults=None, resources=None, cache=None):
660 def maketemplater(ui, tmpl, defaults=None, resources=None, cache=None):
661 """Create a templater from a string template 'tmpl'"""
661 """Create a templater from a string template 'tmpl'"""
662 aliases = ui.configitems(b'templatealias')
662 aliases = ui.configitems(b'templatealias')
663 t = templater.templater(
663 t = templater.templater(
664 defaults=defaults, resources=resources, cache=cache, aliases=aliases
664 defaults=defaults, resources=resources, cache=cache, aliases=aliases
665 )
665 )
666 t.cache.update(
666 t.cache.update(
667 (k, templater.unquotestring(v)) for k, v in ui.configitems(b'templates')
667 (k, templater.unquotestring(v)) for k, v in ui.configitems(b'templates')
668 )
668 )
669 if tmpl:
669 if tmpl:
670 t.cache[b''] = tmpl
670 t.cache[b''] = tmpl
671 return t
671 return t
672
672
673
673
674 # marker to denote a resource to be loaded on demand based on mapping values
674 # marker to denote a resource to be loaded on demand based on mapping values
675 # (e.g. (ctx, path) -> fctx)
675 # (e.g. (ctx, path) -> fctx)
676 _placeholder = object()
676 _placeholder = object()
677
677
678
678
679 class templateresources(templater.resourcemapper):
679 class templateresources(templater.resourcemapper):
680 """Resource mapper designed for the default templatekw and function"""
680 """Resource mapper designed for the default templatekw and function"""
681
681
682 def __init__(self, ui, repo=None):
682 def __init__(self, ui, repo=None):
683 self._resmap = {
683 self._resmap = {
684 b'cache': {}, # for templatekw/funcs to store reusable data
684 b'cache': {}, # for templatekw/funcs to store reusable data
685 b'repo': repo,
685 b'repo': repo,
686 b'ui': ui,
686 b'ui': ui,
687 }
687 }
688
688
689 def availablekeys(self, mapping):
689 def availablekeys(self, mapping):
690 return {
690 return {
691 k for k in self.knownkeys() if self._getsome(mapping, k) is not None
691 k for k in self.knownkeys() if self._getsome(mapping, k) is not None
692 }
692 }
693
693
694 def knownkeys(self):
694 def knownkeys(self):
695 return {b'cache', b'ctx', b'fctx', b'repo', b'revcache', b'ui'}
695 return {b'cache', b'ctx', b'fctx', b'repo', b'revcache', b'ui'}
696
696
697 def lookup(self, mapping, key):
697 def lookup(self, mapping, key):
698 if key not in self.knownkeys():
698 if key not in self.knownkeys():
699 return None
699 return None
700 v = self._getsome(mapping, key)
700 v = self._getsome(mapping, key)
701 if v is _placeholder:
701 if v is _placeholder:
702 v = mapping[key] = self._loadermap[key](self, mapping)
702 v = mapping[key] = self._loadermap[key](self, mapping)
703 return v
703 return v
704
704
705 def populatemap(self, context, origmapping, newmapping):
705 def populatemap(self, context, origmapping, newmapping):
706 mapping = {}
706 mapping = {}
707 if self._hasnodespec(newmapping):
707 if self._hasnodespec(newmapping):
708 mapping[b'revcache'] = {} # per-ctx cache
708 mapping[b'revcache'] = {} # per-ctx cache
709 if self._hasnodespec(origmapping) and self._hasnodespec(newmapping):
709 if self._hasnodespec(origmapping) and self._hasnodespec(newmapping):
710 orignode = templateutil.runsymbol(context, origmapping, b'node')
710 orignode = templateutil.runsymbol(context, origmapping, b'node')
711 mapping[b'originalnode'] = orignode
711 mapping[b'originalnode'] = orignode
712 # put marker to override 'ctx'/'fctx' in mapping if any, and flag
712 # put marker to override 'ctx'/'fctx' in mapping if any, and flag
713 # its existence to be reported by availablekeys()
713 # its existence to be reported by availablekeys()
714 if b'ctx' not in newmapping and self._hasliteral(newmapping, b'node'):
714 if b'ctx' not in newmapping and self._hasliteral(newmapping, b'node'):
715 mapping[b'ctx'] = _placeholder
715 mapping[b'ctx'] = _placeholder
716 if b'fctx' not in newmapping and self._hasliteral(newmapping, b'path'):
716 if b'fctx' not in newmapping and self._hasliteral(newmapping, b'path'):
717 mapping[b'fctx'] = _placeholder
717 mapping[b'fctx'] = _placeholder
718 return mapping
718 return mapping
719
719
720 def _getsome(self, mapping, key):
720 def _getsome(self, mapping, key):
721 v = mapping.get(key)
721 v = mapping.get(key)
722 if v is not None:
722 if v is not None:
723 return v
723 return v
724 return self._resmap.get(key)
724 return self._resmap.get(key)
725
725
726 def _hasliteral(self, mapping, key):
726 def _hasliteral(self, mapping, key):
727 """Test if a literal value is set or unset in the given mapping"""
727 """Test if a literal value is set or unset in the given mapping"""
728 return key in mapping and not callable(mapping[key])
728 return key in mapping and not callable(mapping[key])
729
729
730 def _getliteral(self, mapping, key):
730 def _getliteral(self, mapping, key):
731 """Return value of the given name if it is a literal"""
731 """Return value of the given name if it is a literal"""
732 v = mapping.get(key)
732 v = mapping.get(key)
733 if callable(v):
733 if callable(v):
734 return None
734 return None
735 return v
735 return v
736
736
737 def _hasnodespec(self, mapping):
737 def _hasnodespec(self, mapping):
738 """Test if context revision is set or unset in the given mapping"""
738 """Test if context revision is set or unset in the given mapping"""
739 return b'node' in mapping or b'ctx' in mapping
739 return b'node' in mapping or b'ctx' in mapping
740
740
741 def _loadctx(self, mapping):
741 def _loadctx(self, mapping):
742 repo = self._getsome(mapping, b'repo')
742 repo = self._getsome(mapping, b'repo')
743 node = self._getliteral(mapping, b'node')
743 node = self._getliteral(mapping, b'node')
744 if repo is None or node is None:
744 if repo is None or node is None:
745 return
745 return
746 try:
746 try:
747 return repo[node]
747 return repo[node]
748 except error.RepoLookupError:
748 except error.RepoLookupError:
749 return None # maybe hidden/non-existent node
749 return None # maybe hidden/non-existent node
750
750
751 def _loadfctx(self, mapping):
751 def _loadfctx(self, mapping):
752 ctx = self._getsome(mapping, b'ctx')
752 ctx = self._getsome(mapping, b'ctx')
753 path = self._getliteral(mapping, b'path')
753 path = self._getliteral(mapping, b'path')
754 if ctx is None or path is None:
754 if ctx is None or path is None:
755 return None
755 return None
756 try:
756 try:
757 return ctx[path]
757 return ctx[path]
758 except error.LookupError:
758 except error.LookupError:
759 return None # maybe removed file?
759 return None # maybe removed file?
760
760
761 _loadermap = {
761 _loadermap = {
762 b'ctx': _loadctx,
762 b'ctx': _loadctx,
763 b'fctx': _loadfctx,
763 b'fctx': _loadfctx,
764 }
764 }
765
765
766
766
767 def _internaltemplateformatter(
767 def _internaltemplateformatter(
768 ui,
768 ui,
769 out,
769 out,
770 topic,
770 topic,
771 opts,
771 opts,
772 spec,
772 spec,
773 tmpl,
773 tmpl,
774 docheader=b'',
774 docheader=b'',
775 docfooter=b'',
775 docfooter=b'',
776 separator=b'',
776 separator=b'',
777 ):
777 ):
778 """Build template formatter that handles customizable built-in templates
778 """Build template formatter that handles customizable built-in templates
779 such as -Tjson(...)"""
779 such as -Tjson(...)"""
780 templates = {spec.ref: tmpl}
780 templates = {spec.ref: tmpl}
781 if docheader:
781 if docheader:
782 templates[b'%s:docheader' % spec.ref] = docheader
782 templates[b'%s:docheader' % spec.ref] = docheader
783 if docfooter:
783 if docfooter:
784 templates[b'%s:docfooter' % spec.ref] = docfooter
784 templates[b'%s:docfooter' % spec.ref] = docfooter
785 if separator:
785 if separator:
786 templates[b'%s:separator' % spec.ref] = separator
786 templates[b'%s:separator' % spec.ref] = separator
787 return templateformatter(
787 return templateformatter(
788 ui, out, topic, opts, spec, overridetemplates=templates
788 ui, out, topic, opts, spec, overridetemplates=templates
789 )
789 )
790
790
791
791
792 def formatter(ui, out, topic, opts):
792 def formatter(ui, out, topic, opts):
793 spec = lookuptemplate(ui, topic, opts.get(b'template', b''))
793 spec = lookuptemplate(ui, topic, opts.get(b'template', b''))
794 if spec.ref == b"cbor" and spec.refargs is not None:
794 if spec.ref == b"cbor" and spec.refargs is not None:
795 return _internaltemplateformatter(
795 return _internaltemplateformatter(
796 ui,
796 ui,
797 out,
797 out,
798 topic,
798 topic,
799 opts,
799 opts,
800 spec,
800 spec,
801 tmpl=b'{dict(%s)|cbor}' % spec.refargs,
801 tmpl=b'{dict(%s)|cbor}' % spec.refargs,
802 docheader=cborutil.BEGIN_INDEFINITE_ARRAY,
802 docheader=cborutil.BEGIN_INDEFINITE_ARRAY,
803 docfooter=cborutil.BREAK,
803 docfooter=cborutil.BREAK,
804 )
804 )
805 elif spec.ref == b"cbor":
805 elif spec.ref == b"cbor":
806 return cborformatter(ui, out, topic, opts)
806 return cborformatter(ui, out, topic, opts)
807 elif spec.ref == b"json" and spec.refargs is not None:
807 elif spec.ref == b"json" and spec.refargs is not None:
808 return _internaltemplateformatter(
808 return _internaltemplateformatter(
809 ui,
809 ui,
810 out,
810 out,
811 topic,
811 topic,
812 opts,
812 opts,
813 spec,
813 spec,
814 tmpl=b'{dict(%s)|json}' % spec.refargs,
814 tmpl=b'{dict(%s)|json}' % spec.refargs,
815 docheader=b'[\n ',
815 docheader=b'[\n ',
816 docfooter=b'\n]\n',
816 docfooter=b'\n]\n',
817 separator=b',\n ',
817 separator=b',\n ',
818 )
818 )
819 elif spec.ref == b"json":
819 elif spec.ref == b"json":
820 return jsonformatter(ui, out, topic, opts)
820 return jsonformatter(ui, out, topic, opts)
821 elif spec.ref == b"pickle":
821 elif spec.ref == b"pickle":
822 assert spec.refargs is None, r'function-style not supported'
822 assert spec.refargs is None, r'function-style not supported'
823 return pickleformatter(ui, out, topic, opts)
823 return pickleformatter(ui, out, topic, opts)
824 elif spec.ref == b"debug":
824 elif spec.ref == b"debug":
825 assert spec.refargs is None, r'function-style not supported'
825 assert spec.refargs is None, r'function-style not supported'
826 return debugformatter(ui, out, topic, opts)
826 return debugformatter(ui, out, topic, opts)
827 elif spec.ref or spec.tmpl or spec.mapfile:
827 elif spec.ref or spec.tmpl or spec.mapfile:
828 assert spec.refargs is None, r'function-style not supported'
828 assert spec.refargs is None, r'function-style not supported'
829 return templateformatter(ui, out, topic, opts, spec)
829 return templateformatter(ui, out, topic, opts, spec)
830 # developer config: ui.formatdebug
830 # developer config: ui.formatdebug
831 elif ui.configbool(b'ui', b'formatdebug'):
831 elif ui.configbool(b'ui', b'formatdebug'):
832 return debugformatter(ui, out, topic, opts)
832 return debugformatter(ui, out, topic, opts)
833 # deprecated config: ui.formatjson
833 # deprecated config: ui.formatjson
834 elif ui.configbool(b'ui', b'formatjson'):
834 elif ui.configbool(b'ui', b'formatjson'):
835 return jsonformatter(ui, out, topic, opts)
835 return jsonformatter(ui, out, topic, opts)
836 return plainformatter(ui, out, topic, opts)
836 return plainformatter(ui, out, topic, opts)
837
837
838
838
839 @contextlib.contextmanager
839 @contextlib.contextmanager
840 def openformatter(ui, filename, topic, opts):
840 def openformatter(ui, filename, topic, opts):
841 """Create a formatter that writes outputs to the specified file
841 """Create a formatter that writes outputs to the specified file
842
842
843 Must be invoked using the 'with' statement.
843 Must be invoked using the 'with' statement.
844 """
844 """
845 with util.posixfile(filename, b'wb') as out:
845 with util.posixfile(filename, b'wb') as out:
846 with formatter(ui, out, topic, opts) as fm:
846 with formatter(ui, out, topic, opts) as fm:
847 yield fm
847 yield fm
848
848
849
849
850 @contextlib.contextmanager
850 @contextlib.contextmanager
851 def _neverending(fm):
851 def _neverending(fm):
852 yield fm
852 yield fm
853
853
854
854
855 def maybereopen(fm, filename):
855 def maybereopen(fm, filename):
856 """Create a formatter backed by file if filename specified, else return
856 """Create a formatter backed by file if filename specified, else return
857 the given formatter
857 the given formatter
858
858
859 Must be invoked using the 'with' statement. This will never call fm.end()
859 Must be invoked using the 'with' statement. This will never call fm.end()
860 of the given formatter.
860 of the given formatter.
861 """
861 """
862 if filename:
862 if filename:
863 return openformatter(fm._ui, filename, fm._topic, fm._opts)
863 return openformatter(fm._ui, filename, fm._topic, fm._opts)
864 else:
864 else:
865 return _neverending(fm)
865 return _neverending(fm)
@@ -1,527 +1,527 b''
1 # hgweb/hgweb_mod.py - Web interface for a repository.
1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import contextlib
11 import contextlib
12 import os
12 import os
13
13
14 from .common import (
14 from .common import (
15 ErrorResponse,
15 ErrorResponse,
16 HTTP_BAD_REQUEST,
16 HTTP_BAD_REQUEST,
17 cspvalues,
17 cspvalues,
18 permhooks,
18 permhooks,
19 statusmessage,
19 statusmessage,
20 )
20 )
21 from ..pycompat import getattr
21 from ..pycompat import getattr
22
22
23 from .. import (
23 from .. import (
24 encoding,
24 encoding,
25 error,
25 error,
26 extensions,
26 extensions,
27 formatter,
27 formatter,
28 hg,
28 hg,
29 hook,
29 hook,
30 profiling,
30 profiling,
31 pycompat,
31 pycompat,
32 registrar,
32 registrar,
33 repoview,
33 repoview,
34 templatefilters,
34 templatefilters,
35 templater,
35 templater,
36 templateutil,
36 templateutil,
37 ui as uimod,
37 ui as uimod,
38 util,
38 util,
39 wireprotoserver,
39 wireprotoserver,
40 )
40 )
41
41
42 from . import (
42 from . import (
43 request as requestmod,
43 request as requestmod,
44 webcommands,
44 webcommands,
45 webutil,
45 webutil,
46 wsgicgi,
46 wsgicgi,
47 )
47 )
48
48
49
49
50 def getstyle(req, configfn, templatepath):
50 def getstyle(req, configfn, templatepath):
51 styles = (
51 styles = (
52 req.qsparams.get(b'style', None),
52 req.qsparams.get(b'style', None),
53 configfn(b'web', b'style'),
53 configfn(b'web', b'style'),
54 b'paper',
54 b'paper',
55 )
55 )
56 return styles, _stylemap(styles, templatepath)
56 return styles, _stylemap(styles, templatepath)
57
57
58
58
59 def _stylemap(styles, path=None):
59 def _stylemap(styles, path=None):
60 """Return path to mapfile for a given style.
60 """Return path to mapfile for a given style.
61
61
62 Searches mapfile in the following locations:
62 Searches mapfile in the following locations:
63 1. templatepath/style/map
63 1. templatepath/style/map
64 2. templatepath/map-style
64 2. templatepath/map-style
65 3. templatepath/map
65 3. templatepath/map
66 """
66 """
67
67
68 for style in styles:
68 for style in styles:
69 # only plain name is allowed to honor template paths
69 # only plain name is allowed to honor template paths
70 if (
70 if (
71 not style
71 not style
72 or style in (pycompat.oscurdir, pycompat.ospardir)
72 or style in (pycompat.oscurdir, pycompat.ospardir)
73 or pycompat.ossep in style
73 or pycompat.ossep in style
74 or pycompat.osaltsep
74 or pycompat.osaltsep
75 and pycompat.osaltsep in style
75 and pycompat.osaltsep in style
76 ):
76 ):
77 continue
77 continue
78 locations = (os.path.join(style, b'map'), b'map-' + style, b'map')
78 locations = (os.path.join(style, b'map'), b'map-' + style, b'map')
79
79
80 for location in locations:
80 for location in locations:
81 mapfile, fp = templater.open_template(location, path)
81 mapfile, fp = templater.try_open_template(location, path)
82 if mapfile:
82 if mapfile:
83 return style, mapfile, fp
83 return style, mapfile, fp
84
84
85 raise RuntimeError(b"No hgweb templates found in %r" % path)
85 raise RuntimeError(b"No hgweb templates found in %r" % path)
86
86
87
87
88 def makebreadcrumb(url, prefix=b''):
88 def makebreadcrumb(url, prefix=b''):
89 '''Return a 'URL breadcrumb' list
89 '''Return a 'URL breadcrumb' list
90
90
91 A 'URL breadcrumb' is a list of URL-name pairs,
91 A 'URL breadcrumb' is a list of URL-name pairs,
92 corresponding to each of the path items on a URL.
92 corresponding to each of the path items on a URL.
93 This can be used to create path navigation entries.
93 This can be used to create path navigation entries.
94 '''
94 '''
95 if url.endswith(b'/'):
95 if url.endswith(b'/'):
96 url = url[:-1]
96 url = url[:-1]
97 if prefix:
97 if prefix:
98 url = b'/' + prefix + url
98 url = b'/' + prefix + url
99 relpath = url
99 relpath = url
100 if relpath.startswith(b'/'):
100 if relpath.startswith(b'/'):
101 relpath = relpath[1:]
101 relpath = relpath[1:]
102
102
103 breadcrumb = []
103 breadcrumb = []
104 urlel = url
104 urlel = url
105 pathitems = [b''] + relpath.split(b'/')
105 pathitems = [b''] + relpath.split(b'/')
106 for pathel in reversed(pathitems):
106 for pathel in reversed(pathitems):
107 if not pathel or not urlel:
107 if not pathel or not urlel:
108 break
108 break
109 breadcrumb.append({b'url': urlel, b'name': pathel})
109 breadcrumb.append({b'url': urlel, b'name': pathel})
110 urlel = os.path.dirname(urlel)
110 urlel = os.path.dirname(urlel)
111 return templateutil.mappinglist(reversed(breadcrumb))
111 return templateutil.mappinglist(reversed(breadcrumb))
112
112
113
113
114 class requestcontext(object):
114 class requestcontext(object):
115 """Holds state/context for an individual request.
115 """Holds state/context for an individual request.
116
116
117 Servers can be multi-threaded. Holding state on the WSGI application
117 Servers can be multi-threaded. Holding state on the WSGI application
118 is prone to race conditions. Instances of this class exist to hold
118 is prone to race conditions. Instances of this class exist to hold
119 mutable and race-free state for requests.
119 mutable and race-free state for requests.
120 """
120 """
121
121
122 def __init__(self, app, repo, req, res):
122 def __init__(self, app, repo, req, res):
123 self.repo = repo
123 self.repo = repo
124 self.reponame = app.reponame
124 self.reponame = app.reponame
125 self.req = req
125 self.req = req
126 self.res = res
126 self.res = res
127
127
128 self.maxchanges = self.configint(b'web', b'maxchanges')
128 self.maxchanges = self.configint(b'web', b'maxchanges')
129 self.stripecount = self.configint(b'web', b'stripes')
129 self.stripecount = self.configint(b'web', b'stripes')
130 self.maxshortchanges = self.configint(b'web', b'maxshortchanges')
130 self.maxshortchanges = self.configint(b'web', b'maxshortchanges')
131 self.maxfiles = self.configint(b'web', b'maxfiles')
131 self.maxfiles = self.configint(b'web', b'maxfiles')
132 self.allowpull = self.configbool(b'web', b'allow-pull')
132 self.allowpull = self.configbool(b'web', b'allow-pull')
133
133
134 # we use untrusted=False to prevent a repo owner from using
134 # we use untrusted=False to prevent a repo owner from using
135 # web.templates in .hg/hgrc to get access to any file readable
135 # web.templates in .hg/hgrc to get access to any file readable
136 # by the user running the CGI script
136 # by the user running the CGI script
137 self.templatepath = self.config(b'web', b'templates', untrusted=False)
137 self.templatepath = self.config(b'web', b'templates', untrusted=False)
138
138
139 # This object is more expensive to build than simple config values.
139 # This object is more expensive to build than simple config values.
140 # It is shared across requests. The app will replace the object
140 # It is shared across requests. The app will replace the object
141 # if it is updated. Since this is a reference and nothing should
141 # if it is updated. Since this is a reference and nothing should
142 # modify the underlying object, it should be constant for the lifetime
142 # modify the underlying object, it should be constant for the lifetime
143 # of the request.
143 # of the request.
144 self.websubtable = app.websubtable
144 self.websubtable = app.websubtable
145
145
146 self.csp, self.nonce = cspvalues(self.repo.ui)
146 self.csp, self.nonce = cspvalues(self.repo.ui)
147
147
148 # Trust the settings from the .hg/hgrc files by default.
148 # Trust the settings from the .hg/hgrc files by default.
149 def config(self, *args, **kwargs):
149 def config(self, *args, **kwargs):
150 kwargs.setdefault('untrusted', True)
150 kwargs.setdefault('untrusted', True)
151 return self.repo.ui.config(*args, **kwargs)
151 return self.repo.ui.config(*args, **kwargs)
152
152
153 def configbool(self, *args, **kwargs):
153 def configbool(self, *args, **kwargs):
154 kwargs.setdefault('untrusted', True)
154 kwargs.setdefault('untrusted', True)
155 return self.repo.ui.configbool(*args, **kwargs)
155 return self.repo.ui.configbool(*args, **kwargs)
156
156
157 def configint(self, *args, **kwargs):
157 def configint(self, *args, **kwargs):
158 kwargs.setdefault('untrusted', True)
158 kwargs.setdefault('untrusted', True)
159 return self.repo.ui.configint(*args, **kwargs)
159 return self.repo.ui.configint(*args, **kwargs)
160
160
161 def configlist(self, *args, **kwargs):
161 def configlist(self, *args, **kwargs):
162 kwargs.setdefault('untrusted', True)
162 kwargs.setdefault('untrusted', True)
163 return self.repo.ui.configlist(*args, **kwargs)
163 return self.repo.ui.configlist(*args, **kwargs)
164
164
165 def archivelist(self, nodeid):
165 def archivelist(self, nodeid):
166 return webutil.archivelist(self.repo.ui, nodeid)
166 return webutil.archivelist(self.repo.ui, nodeid)
167
167
168 def templater(self, req):
168 def templater(self, req):
169 # determine scheme, port and server name
169 # determine scheme, port and server name
170 # this is needed to create absolute urls
170 # this is needed to create absolute urls
171 logourl = self.config(b'web', b'logourl')
171 logourl = self.config(b'web', b'logourl')
172 logoimg = self.config(b'web', b'logoimg')
172 logoimg = self.config(b'web', b'logoimg')
173 staticurl = (
173 staticurl = (
174 self.config(b'web', b'staticurl')
174 self.config(b'web', b'staticurl')
175 or req.apppath.rstrip(b'/') + b'/static/'
175 or req.apppath.rstrip(b'/') + b'/static/'
176 )
176 )
177 if not staticurl.endswith(b'/'):
177 if not staticurl.endswith(b'/'):
178 staticurl += b'/'
178 staticurl += b'/'
179
179
180 # figure out which style to use
180 # figure out which style to use
181
181
182 vars = {}
182 vars = {}
183 styles, (style, mapfile, fp) = getstyle(
183 styles, (style, mapfile, fp) = getstyle(
184 req, self.config, self.templatepath
184 req, self.config, self.templatepath
185 )
185 )
186 if style == styles[0]:
186 if style == styles[0]:
187 vars[b'style'] = style
187 vars[b'style'] = style
188
188
189 sessionvars = webutil.sessionvars(vars, b'?')
189 sessionvars = webutil.sessionvars(vars, b'?')
190
190
191 if not self.reponame:
191 if not self.reponame:
192 self.reponame = (
192 self.reponame = (
193 self.config(b'web', b'name', b'')
193 self.config(b'web', b'name', b'')
194 or req.reponame
194 or req.reponame
195 or req.apppath
195 or req.apppath
196 or self.repo.root
196 or self.repo.root
197 )
197 )
198
198
199 filters = {}
199 filters = {}
200 templatefilter = registrar.templatefilter(filters)
200 templatefilter = registrar.templatefilter(filters)
201
201
202 @templatefilter(b'websub', intype=bytes)
202 @templatefilter(b'websub', intype=bytes)
203 def websubfilter(text):
203 def websubfilter(text):
204 return templatefilters.websub(text, self.websubtable)
204 return templatefilters.websub(text, self.websubtable)
205
205
206 # create the templater
206 # create the templater
207 # TODO: export all keywords: defaults = templatekw.keywords.copy()
207 # TODO: export all keywords: defaults = templatekw.keywords.copy()
208 defaults = {
208 defaults = {
209 b'url': req.apppath + b'/',
209 b'url': req.apppath + b'/',
210 b'logourl': logourl,
210 b'logourl': logourl,
211 b'logoimg': logoimg,
211 b'logoimg': logoimg,
212 b'staticurl': staticurl,
212 b'staticurl': staticurl,
213 b'urlbase': req.advertisedbaseurl,
213 b'urlbase': req.advertisedbaseurl,
214 b'repo': self.reponame,
214 b'repo': self.reponame,
215 b'encoding': encoding.encoding,
215 b'encoding': encoding.encoding,
216 b'sessionvars': sessionvars,
216 b'sessionvars': sessionvars,
217 b'pathdef': makebreadcrumb(req.apppath),
217 b'pathdef': makebreadcrumb(req.apppath),
218 b'style': style,
218 b'style': style,
219 b'nonce': self.nonce,
219 b'nonce': self.nonce,
220 }
220 }
221 templatekeyword = registrar.templatekeyword(defaults)
221 templatekeyword = registrar.templatekeyword(defaults)
222
222
223 @templatekeyword(b'motd', requires=())
223 @templatekeyword(b'motd', requires=())
224 def motd(context, mapping):
224 def motd(context, mapping):
225 yield self.config(b'web', b'motd')
225 yield self.config(b'web', b'motd')
226
226
227 tres = formatter.templateresources(self.repo.ui, self.repo)
227 tres = formatter.templateresources(self.repo.ui, self.repo)
228 return templater.templater.frommapfile(
228 return templater.templater.frommapfile(
229 mapfile, fp=fp, filters=filters, defaults=defaults, resources=tres
229 mapfile, fp=fp, filters=filters, defaults=defaults, resources=tres
230 )
230 )
231
231
232 def sendtemplate(self, name, **kwargs):
232 def sendtemplate(self, name, **kwargs):
233 """Helper function to send a response generated from a template."""
233 """Helper function to send a response generated from a template."""
234 kwargs = pycompat.byteskwargs(kwargs)
234 kwargs = pycompat.byteskwargs(kwargs)
235 self.res.setbodygen(self.tmpl.generate(name, kwargs))
235 self.res.setbodygen(self.tmpl.generate(name, kwargs))
236 return self.res.sendresponse()
236 return self.res.sendresponse()
237
237
238
238
239 class hgweb(object):
239 class hgweb(object):
240 """HTTP server for individual repositories.
240 """HTTP server for individual repositories.
241
241
242 Instances of this class serve HTTP responses for a particular
242 Instances of this class serve HTTP responses for a particular
243 repository.
243 repository.
244
244
245 Instances are typically used as WSGI applications.
245 Instances are typically used as WSGI applications.
246
246
247 Some servers are multi-threaded. On these servers, there may
247 Some servers are multi-threaded. On these servers, there may
248 be multiple active threads inside __call__.
248 be multiple active threads inside __call__.
249 """
249 """
250
250
251 def __init__(self, repo, name=None, baseui=None):
251 def __init__(self, repo, name=None, baseui=None):
252 if isinstance(repo, bytes):
252 if isinstance(repo, bytes):
253 if baseui:
253 if baseui:
254 u = baseui.copy()
254 u = baseui.copy()
255 else:
255 else:
256 u = uimod.ui.load()
256 u = uimod.ui.load()
257 extensions.loadall(u)
257 extensions.loadall(u)
258 extensions.populateui(u)
258 extensions.populateui(u)
259 r = hg.repository(u, repo)
259 r = hg.repository(u, repo)
260 else:
260 else:
261 # we trust caller to give us a private copy
261 # we trust caller to give us a private copy
262 r = repo
262 r = repo
263
263
264 r.ui.setconfig(b'ui', b'report_untrusted', b'off', b'hgweb')
264 r.ui.setconfig(b'ui', b'report_untrusted', b'off', b'hgweb')
265 r.baseui.setconfig(b'ui', b'report_untrusted', b'off', b'hgweb')
265 r.baseui.setconfig(b'ui', b'report_untrusted', b'off', b'hgweb')
266 r.ui.setconfig(b'ui', b'nontty', b'true', b'hgweb')
266 r.ui.setconfig(b'ui', b'nontty', b'true', b'hgweb')
267 r.baseui.setconfig(b'ui', b'nontty', b'true', b'hgweb')
267 r.baseui.setconfig(b'ui', b'nontty', b'true', b'hgweb')
268 # resolve file patterns relative to repo root
268 # resolve file patterns relative to repo root
269 r.ui.setconfig(b'ui', b'forcecwd', r.root, b'hgweb')
269 r.ui.setconfig(b'ui', b'forcecwd', r.root, b'hgweb')
270 r.baseui.setconfig(b'ui', b'forcecwd', r.root, b'hgweb')
270 r.baseui.setconfig(b'ui', b'forcecwd', r.root, b'hgweb')
271 # it's unlikely that we can replace signal handlers in WSGI server,
271 # it's unlikely that we can replace signal handlers in WSGI server,
272 # and mod_wsgi issues a big warning. a plain hgweb process (with no
272 # and mod_wsgi issues a big warning. a plain hgweb process (with no
273 # threading) could replace signal handlers, but we don't bother
273 # threading) could replace signal handlers, but we don't bother
274 # conditionally enabling it.
274 # conditionally enabling it.
275 r.ui.setconfig(b'ui', b'signal-safe-lock', b'false', b'hgweb')
275 r.ui.setconfig(b'ui', b'signal-safe-lock', b'false', b'hgweb')
276 r.baseui.setconfig(b'ui', b'signal-safe-lock', b'false', b'hgweb')
276 r.baseui.setconfig(b'ui', b'signal-safe-lock', b'false', b'hgweb')
277 # displaying bundling progress bar while serving feel wrong and may
277 # displaying bundling progress bar while serving feel wrong and may
278 # break some wsgi implementation.
278 # break some wsgi implementation.
279 r.ui.setconfig(b'progress', b'disable', b'true', b'hgweb')
279 r.ui.setconfig(b'progress', b'disable', b'true', b'hgweb')
280 r.baseui.setconfig(b'progress', b'disable', b'true', b'hgweb')
280 r.baseui.setconfig(b'progress', b'disable', b'true', b'hgweb')
281 self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))]
281 self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))]
282 self._lastrepo = self._repos[0]
282 self._lastrepo = self._repos[0]
283 hook.redirect(True)
283 hook.redirect(True)
284 self.reponame = name
284 self.reponame = name
285
285
286 def _webifyrepo(self, repo):
286 def _webifyrepo(self, repo):
287 repo = getwebview(repo)
287 repo = getwebview(repo)
288 self.websubtable = webutil.getwebsubs(repo)
288 self.websubtable = webutil.getwebsubs(repo)
289 return repo
289 return repo
290
290
291 @contextlib.contextmanager
291 @contextlib.contextmanager
292 def _obtainrepo(self):
292 def _obtainrepo(self):
293 """Obtain a repo unique to the caller.
293 """Obtain a repo unique to the caller.
294
294
295 Internally we maintain a stack of cachedlocalrepo instances
295 Internally we maintain a stack of cachedlocalrepo instances
296 to be handed out. If one is available, we pop it and return it,
296 to be handed out. If one is available, we pop it and return it,
297 ensuring it is up to date in the process. If one is not available,
297 ensuring it is up to date in the process. If one is not available,
298 we clone the most recently used repo instance and return it.
298 we clone the most recently used repo instance and return it.
299
299
300 It is currently possible for the stack to grow without bounds
300 It is currently possible for the stack to grow without bounds
301 if the server allows infinite threads. However, servers should
301 if the server allows infinite threads. However, servers should
302 have a thread limit, thus establishing our limit.
302 have a thread limit, thus establishing our limit.
303 """
303 """
304 if self._repos:
304 if self._repos:
305 cached = self._repos.pop()
305 cached = self._repos.pop()
306 r, created = cached.fetch()
306 r, created = cached.fetch()
307 else:
307 else:
308 cached = self._lastrepo.copy()
308 cached = self._lastrepo.copy()
309 r, created = cached.fetch()
309 r, created = cached.fetch()
310 if created:
310 if created:
311 r = self._webifyrepo(r)
311 r = self._webifyrepo(r)
312
312
313 self._lastrepo = cached
313 self._lastrepo = cached
314 self.mtime = cached.mtime
314 self.mtime = cached.mtime
315 try:
315 try:
316 yield r
316 yield r
317 finally:
317 finally:
318 self._repos.append(cached)
318 self._repos.append(cached)
319
319
320 def run(self):
320 def run(self):
321 """Start a server from CGI environment.
321 """Start a server from CGI environment.
322
322
323 Modern servers should be using WSGI and should avoid this
323 Modern servers should be using WSGI and should avoid this
324 method, if possible.
324 method, if possible.
325 """
325 """
326 if not encoding.environ.get(b'GATEWAY_INTERFACE', b'').startswith(
326 if not encoding.environ.get(b'GATEWAY_INTERFACE', b'').startswith(
327 b"CGI/1."
327 b"CGI/1."
328 ):
328 ):
329 raise RuntimeError(
329 raise RuntimeError(
330 b"This function is only intended to be "
330 b"This function is only intended to be "
331 b"called while running as a CGI script."
331 b"called while running as a CGI script."
332 )
332 )
333 wsgicgi.launch(self)
333 wsgicgi.launch(self)
334
334
335 def __call__(self, env, respond):
335 def __call__(self, env, respond):
336 """Run the WSGI application.
336 """Run the WSGI application.
337
337
338 This may be called by multiple threads.
338 This may be called by multiple threads.
339 """
339 """
340 req = requestmod.parserequestfromenv(env)
340 req = requestmod.parserequestfromenv(env)
341 res = requestmod.wsgiresponse(req, respond)
341 res = requestmod.wsgiresponse(req, respond)
342
342
343 return self.run_wsgi(req, res)
343 return self.run_wsgi(req, res)
344
344
345 def run_wsgi(self, req, res):
345 def run_wsgi(self, req, res):
346 """Internal method to run the WSGI application.
346 """Internal method to run the WSGI application.
347
347
348 This is typically only called by Mercurial. External consumers
348 This is typically only called by Mercurial. External consumers
349 should be using instances of this class as the WSGI application.
349 should be using instances of this class as the WSGI application.
350 """
350 """
351 with self._obtainrepo() as repo:
351 with self._obtainrepo() as repo:
352 profile = repo.ui.configbool(b'profiling', b'enabled')
352 profile = repo.ui.configbool(b'profiling', b'enabled')
353 with profiling.profile(repo.ui, enabled=profile):
353 with profiling.profile(repo.ui, enabled=profile):
354 for r in self._runwsgi(req, res, repo):
354 for r in self._runwsgi(req, res, repo):
355 yield r
355 yield r
356
356
357 def _runwsgi(self, req, res, repo):
357 def _runwsgi(self, req, res, repo):
358 rctx = requestcontext(self, repo, req, res)
358 rctx = requestcontext(self, repo, req, res)
359
359
360 # This state is global across all threads.
360 # This state is global across all threads.
361 encoding.encoding = rctx.config(b'web', b'encoding')
361 encoding.encoding = rctx.config(b'web', b'encoding')
362 rctx.repo.ui.environ = req.rawenv
362 rctx.repo.ui.environ = req.rawenv
363
363
364 if rctx.csp:
364 if rctx.csp:
365 # hgwebdir may have added CSP header. Since we generate our own,
365 # hgwebdir may have added CSP header. Since we generate our own,
366 # replace it.
366 # replace it.
367 res.headers[b'Content-Security-Policy'] = rctx.csp
367 res.headers[b'Content-Security-Policy'] = rctx.csp
368
368
369 # /api/* is reserved for various API implementations. Dispatch
369 # /api/* is reserved for various API implementations. Dispatch
370 # accordingly. But URL paths can conflict with subrepos and virtual
370 # accordingly. But URL paths can conflict with subrepos and virtual
371 # repos in hgwebdir. So until we have a workaround for this, only
371 # repos in hgwebdir. So until we have a workaround for this, only
372 # expose the URLs if the feature is enabled.
372 # expose the URLs if the feature is enabled.
373 apienabled = rctx.repo.ui.configbool(b'experimental', b'web.apiserver')
373 apienabled = rctx.repo.ui.configbool(b'experimental', b'web.apiserver')
374 if apienabled and req.dispatchparts and req.dispatchparts[0] == b'api':
374 if apienabled and req.dispatchparts and req.dispatchparts[0] == b'api':
375 wireprotoserver.handlewsgiapirequest(
375 wireprotoserver.handlewsgiapirequest(
376 rctx, req, res, self.check_perm
376 rctx, req, res, self.check_perm
377 )
377 )
378 return res.sendresponse()
378 return res.sendresponse()
379
379
380 handled = wireprotoserver.handlewsgirequest(
380 handled = wireprotoserver.handlewsgirequest(
381 rctx, req, res, self.check_perm
381 rctx, req, res, self.check_perm
382 )
382 )
383 if handled:
383 if handled:
384 return res.sendresponse()
384 return res.sendresponse()
385
385
386 # Old implementations of hgweb supported dispatching the request via
386 # Old implementations of hgweb supported dispatching the request via
387 # the initial query string parameter instead of using PATH_INFO.
387 # the initial query string parameter instead of using PATH_INFO.
388 # If PATH_INFO is present (signaled by ``req.dispatchpath`` having
388 # If PATH_INFO is present (signaled by ``req.dispatchpath`` having
389 # a value), we use it. Otherwise fall back to the query string.
389 # a value), we use it. Otherwise fall back to the query string.
390 if req.dispatchpath is not None:
390 if req.dispatchpath is not None:
391 query = req.dispatchpath
391 query = req.dispatchpath
392 else:
392 else:
393 query = req.querystring.partition(b'&')[0].partition(b';')[0]
393 query = req.querystring.partition(b'&')[0].partition(b';')[0]
394
394
395 # translate user-visible url structure to internal structure
395 # translate user-visible url structure to internal structure
396
396
397 args = query.split(b'/', 2)
397 args = query.split(b'/', 2)
398 if b'cmd' not in req.qsparams and args and args[0]:
398 if b'cmd' not in req.qsparams and args and args[0]:
399 cmd = args.pop(0)
399 cmd = args.pop(0)
400 style = cmd.rfind(b'-')
400 style = cmd.rfind(b'-')
401 if style != -1:
401 if style != -1:
402 req.qsparams[b'style'] = cmd[:style]
402 req.qsparams[b'style'] = cmd[:style]
403 cmd = cmd[style + 1 :]
403 cmd = cmd[style + 1 :]
404
404
405 # avoid accepting e.g. style parameter as command
405 # avoid accepting e.g. style parameter as command
406 if util.safehasattr(webcommands, cmd):
406 if util.safehasattr(webcommands, cmd):
407 req.qsparams[b'cmd'] = cmd
407 req.qsparams[b'cmd'] = cmd
408
408
409 if cmd == b'static':
409 if cmd == b'static':
410 req.qsparams[b'file'] = b'/'.join(args)
410 req.qsparams[b'file'] = b'/'.join(args)
411 else:
411 else:
412 if args and args[0]:
412 if args and args[0]:
413 node = args.pop(0).replace(b'%2F', b'/')
413 node = args.pop(0).replace(b'%2F', b'/')
414 req.qsparams[b'node'] = node
414 req.qsparams[b'node'] = node
415 if args:
415 if args:
416 if b'file' in req.qsparams:
416 if b'file' in req.qsparams:
417 del req.qsparams[b'file']
417 del req.qsparams[b'file']
418 for a in args:
418 for a in args:
419 req.qsparams.add(b'file', a)
419 req.qsparams.add(b'file', a)
420
420
421 ua = req.headers.get(b'User-Agent', b'')
421 ua = req.headers.get(b'User-Agent', b'')
422 if cmd == b'rev' and b'mercurial' in ua:
422 if cmd == b'rev' and b'mercurial' in ua:
423 req.qsparams[b'style'] = b'raw'
423 req.qsparams[b'style'] = b'raw'
424
424
425 if cmd == b'archive':
425 if cmd == b'archive':
426 fn = req.qsparams[b'node']
426 fn = req.qsparams[b'node']
427 for type_, spec in pycompat.iteritems(webutil.archivespecs):
427 for type_, spec in pycompat.iteritems(webutil.archivespecs):
428 ext = spec[2]
428 ext = spec[2]
429 if fn.endswith(ext):
429 if fn.endswith(ext):
430 req.qsparams[b'node'] = fn[: -len(ext)]
430 req.qsparams[b'node'] = fn[: -len(ext)]
431 req.qsparams[b'type'] = type_
431 req.qsparams[b'type'] = type_
432 else:
432 else:
433 cmd = req.qsparams.get(b'cmd', b'')
433 cmd = req.qsparams.get(b'cmd', b'')
434
434
435 # process the web interface request
435 # process the web interface request
436
436
437 try:
437 try:
438 rctx.tmpl = rctx.templater(req)
438 rctx.tmpl = rctx.templater(req)
439 ctype = rctx.tmpl.render(
439 ctype = rctx.tmpl.render(
440 b'mimetype', {b'encoding': encoding.encoding}
440 b'mimetype', {b'encoding': encoding.encoding}
441 )
441 )
442
442
443 # check read permissions non-static content
443 # check read permissions non-static content
444 if cmd != b'static':
444 if cmd != b'static':
445 self.check_perm(rctx, req, None)
445 self.check_perm(rctx, req, None)
446
446
447 if cmd == b'':
447 if cmd == b'':
448 req.qsparams[b'cmd'] = rctx.tmpl.render(b'default', {})
448 req.qsparams[b'cmd'] = rctx.tmpl.render(b'default', {})
449 cmd = req.qsparams[b'cmd']
449 cmd = req.qsparams[b'cmd']
450
450
451 # Don't enable caching if using a CSP nonce because then it wouldn't
451 # Don't enable caching if using a CSP nonce because then it wouldn't
452 # be a nonce.
452 # be a nonce.
453 if rctx.configbool(b'web', b'cache') and not rctx.nonce:
453 if rctx.configbool(b'web', b'cache') and not rctx.nonce:
454 tag = b'W/"%d"' % self.mtime
454 tag = b'W/"%d"' % self.mtime
455 if req.headers.get(b'If-None-Match') == tag:
455 if req.headers.get(b'If-None-Match') == tag:
456 res.status = b'304 Not Modified'
456 res.status = b'304 Not Modified'
457 # Content-Type may be defined globally. It isn't valid on a
457 # Content-Type may be defined globally. It isn't valid on a
458 # 304, so discard it.
458 # 304, so discard it.
459 try:
459 try:
460 del res.headers[b'Content-Type']
460 del res.headers[b'Content-Type']
461 except KeyError:
461 except KeyError:
462 pass
462 pass
463 # Response body not allowed on 304.
463 # Response body not allowed on 304.
464 res.setbodybytes(b'')
464 res.setbodybytes(b'')
465 return res.sendresponse()
465 return res.sendresponse()
466
466
467 res.headers[b'ETag'] = tag
467 res.headers[b'ETag'] = tag
468
468
469 if cmd not in webcommands.__all__:
469 if cmd not in webcommands.__all__:
470 msg = b'no such method: %s' % cmd
470 msg = b'no such method: %s' % cmd
471 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
471 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
472 else:
472 else:
473 # Set some globals appropriate for web handlers. Commands can
473 # Set some globals appropriate for web handlers. Commands can
474 # override easily enough.
474 # override easily enough.
475 res.status = b'200 Script output follows'
475 res.status = b'200 Script output follows'
476 res.headers[b'Content-Type'] = ctype
476 res.headers[b'Content-Type'] = ctype
477 return getattr(webcommands, cmd)(rctx)
477 return getattr(webcommands, cmd)(rctx)
478
478
479 except (error.LookupError, error.RepoLookupError) as err:
479 except (error.LookupError, error.RepoLookupError) as err:
480 msg = pycompat.bytestr(err)
480 msg = pycompat.bytestr(err)
481 if util.safehasattr(err, b'name') and not isinstance(
481 if util.safehasattr(err, b'name') and not isinstance(
482 err, error.ManifestLookupError
482 err, error.ManifestLookupError
483 ):
483 ):
484 msg = b'revision not found: %s' % err.name
484 msg = b'revision not found: %s' % err.name
485
485
486 res.status = b'404 Not Found'
486 res.status = b'404 Not Found'
487 res.headers[b'Content-Type'] = ctype
487 res.headers[b'Content-Type'] = ctype
488 return rctx.sendtemplate(b'error', error=msg)
488 return rctx.sendtemplate(b'error', error=msg)
489 except (error.RepoError, error.StorageError) as e:
489 except (error.RepoError, error.StorageError) as e:
490 res.status = b'500 Internal Server Error'
490 res.status = b'500 Internal Server Error'
491 res.headers[b'Content-Type'] = ctype
491 res.headers[b'Content-Type'] = ctype
492 return rctx.sendtemplate(b'error', error=pycompat.bytestr(e))
492 return rctx.sendtemplate(b'error', error=pycompat.bytestr(e))
493 except error.Abort as e:
493 except error.Abort as e:
494 res.status = b'403 Forbidden'
494 res.status = b'403 Forbidden'
495 res.headers[b'Content-Type'] = ctype
495 res.headers[b'Content-Type'] = ctype
496 return rctx.sendtemplate(b'error', error=pycompat.bytestr(e))
496 return rctx.sendtemplate(b'error', error=pycompat.bytestr(e))
497 except ErrorResponse as e:
497 except ErrorResponse as e:
498 for k, v in e.headers:
498 for k, v in e.headers:
499 res.headers[k] = v
499 res.headers[k] = v
500 res.status = statusmessage(e.code, pycompat.bytestr(e))
500 res.status = statusmessage(e.code, pycompat.bytestr(e))
501 res.headers[b'Content-Type'] = ctype
501 res.headers[b'Content-Type'] = ctype
502 return rctx.sendtemplate(b'error', error=pycompat.bytestr(e))
502 return rctx.sendtemplate(b'error', error=pycompat.bytestr(e))
503
503
504 def check_perm(self, rctx, req, op):
504 def check_perm(self, rctx, req, op):
505 for permhook in permhooks:
505 for permhook in permhooks:
506 permhook(rctx, req, op)
506 permhook(rctx, req, op)
507
507
508
508
509 def getwebview(repo):
509 def getwebview(repo):
510 """The 'web.view' config controls changeset filter to hgweb. Possible
510 """The 'web.view' config controls changeset filter to hgweb. Possible
511 values are ``served``, ``visible`` and ``all``. Default is ``served``.
511 values are ``served``, ``visible`` and ``all``. Default is ``served``.
512 The ``served`` filter only shows changesets that can be pulled from the
512 The ``served`` filter only shows changesets that can be pulled from the
513 hgweb instance. The``visible`` filter includes secret changesets but
513 hgweb instance. The``visible`` filter includes secret changesets but
514 still excludes "hidden" one.
514 still excludes "hidden" one.
515
515
516 See the repoview module for details.
516 See the repoview module for details.
517
517
518 The option has been around undocumented since Mercurial 2.5, but no
518 The option has been around undocumented since Mercurial 2.5, but no
519 user ever asked about it. So we better keep it undocumented for now."""
519 user ever asked about it. So we better keep it undocumented for now."""
520 # experimental config: web.view
520 # experimental config: web.view
521 viewconfig = repo.ui.config(b'web', b'view', untrusted=True)
521 viewconfig = repo.ui.config(b'web', b'view', untrusted=True)
522 if viewconfig == b'all':
522 if viewconfig == b'all':
523 return repo.unfiltered()
523 return repo.unfiltered()
524 elif viewconfig in repoview.filtertable:
524 elif viewconfig in repoview.filtertable:
525 return repo.filtered(viewconfig)
525 return repo.filtered(viewconfig)
526 else:
526 else:
527 return repo.filtered(b'served')
527 return repo.filtered(b'served')
@@ -1,1085 +1,1085 b''
1 # logcmdutil.py - utility for log-like commands
1 # logcmdutil.py - utility for log-like commands
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import itertools
10 import itertools
11 import os
11 import os
12 import posixpath
12 import posixpath
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 nullid,
16 nullid,
17 wdirid,
17 wdirid,
18 wdirrev,
18 wdirrev,
19 )
19 )
20
20
21 from . import (
21 from . import (
22 dagop,
22 dagop,
23 error,
23 error,
24 formatter,
24 formatter,
25 graphmod,
25 graphmod,
26 match as matchmod,
26 match as matchmod,
27 mdiff,
27 mdiff,
28 patch,
28 patch,
29 pathutil,
29 pathutil,
30 pycompat,
30 pycompat,
31 revset,
31 revset,
32 revsetlang,
32 revsetlang,
33 scmutil,
33 scmutil,
34 smartset,
34 smartset,
35 templatekw,
35 templatekw,
36 templater,
36 templater,
37 util,
37 util,
38 )
38 )
39 from .utils import (
39 from .utils import (
40 dateutil,
40 dateutil,
41 stringutil,
41 stringutil,
42 )
42 )
43
43
44
44
45 if pycompat.TYPE_CHECKING:
45 if pycompat.TYPE_CHECKING:
46 from typing import (
46 from typing import (
47 Any,
47 Any,
48 Optional,
48 Optional,
49 Tuple,
49 Tuple,
50 )
50 )
51
51
52 for t in (Any, Optional, Tuple):
52 for t in (Any, Optional, Tuple):
53 assert t
53 assert t
54
54
55
55
56 def getlimit(opts):
56 def getlimit(opts):
57 """get the log limit according to option -l/--limit"""
57 """get the log limit according to option -l/--limit"""
58 limit = opts.get(b'limit')
58 limit = opts.get(b'limit')
59 if limit:
59 if limit:
60 try:
60 try:
61 limit = int(limit)
61 limit = int(limit)
62 except ValueError:
62 except ValueError:
63 raise error.Abort(_(b'limit must be a positive integer'))
63 raise error.Abort(_(b'limit must be a positive integer'))
64 if limit <= 0:
64 if limit <= 0:
65 raise error.Abort(_(b'limit must be positive'))
65 raise error.Abort(_(b'limit must be positive'))
66 else:
66 else:
67 limit = None
67 limit = None
68 return limit
68 return limit
69
69
70
70
71 def diffordiffstat(
71 def diffordiffstat(
72 ui,
72 ui,
73 repo,
73 repo,
74 diffopts,
74 diffopts,
75 ctx1,
75 ctx1,
76 ctx2,
76 ctx2,
77 match,
77 match,
78 changes=None,
78 changes=None,
79 stat=False,
79 stat=False,
80 fp=None,
80 fp=None,
81 graphwidth=0,
81 graphwidth=0,
82 prefix=b'',
82 prefix=b'',
83 root=b'',
83 root=b'',
84 listsubrepos=False,
84 listsubrepos=False,
85 hunksfilterfn=None,
85 hunksfilterfn=None,
86 ):
86 ):
87 '''show diff or diffstat.'''
87 '''show diff or diffstat.'''
88 if root:
88 if root:
89 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
89 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
90 else:
90 else:
91 relroot = b''
91 relroot = b''
92 copysourcematch = None
92 copysourcematch = None
93
93
94 def compose(f, g):
94 def compose(f, g):
95 return lambda x: f(g(x))
95 return lambda x: f(g(x))
96
96
97 def pathfn(f):
97 def pathfn(f):
98 return posixpath.join(prefix, f)
98 return posixpath.join(prefix, f)
99
99
100 if relroot != b'':
100 if relroot != b'':
101 # XXX relative roots currently don't work if the root is within a
101 # XXX relative roots currently don't work if the root is within a
102 # subrepo
102 # subrepo
103 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
103 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
104 uirelroot = uipathfn(pathfn(relroot))
104 uirelroot = uipathfn(pathfn(relroot))
105 relroot += b'/'
105 relroot += b'/'
106 for matchroot in match.files():
106 for matchroot in match.files():
107 if not matchroot.startswith(relroot):
107 if not matchroot.startswith(relroot):
108 ui.warn(
108 ui.warn(
109 _(b'warning: %s not inside relative root %s\n')
109 _(b'warning: %s not inside relative root %s\n')
110 % (uipathfn(pathfn(matchroot)), uirelroot)
110 % (uipathfn(pathfn(matchroot)), uirelroot)
111 )
111 )
112
112
113 relrootmatch = scmutil.match(ctx2, pats=[relroot], default=b'path')
113 relrootmatch = scmutil.match(ctx2, pats=[relroot], default=b'path')
114 match = matchmod.intersectmatchers(match, relrootmatch)
114 match = matchmod.intersectmatchers(match, relrootmatch)
115 copysourcematch = relrootmatch
115 copysourcematch = relrootmatch
116
116
117 checkroot = repo.ui.configbool(
117 checkroot = repo.ui.configbool(
118 b'devel', b'all-warnings'
118 b'devel', b'all-warnings'
119 ) or repo.ui.configbool(b'devel', b'check-relroot')
119 ) or repo.ui.configbool(b'devel', b'check-relroot')
120
120
121 def relrootpathfn(f):
121 def relrootpathfn(f):
122 if checkroot and not f.startswith(relroot):
122 if checkroot and not f.startswith(relroot):
123 raise AssertionError(
123 raise AssertionError(
124 b"file %s doesn't start with relroot %s" % (f, relroot)
124 b"file %s doesn't start with relroot %s" % (f, relroot)
125 )
125 )
126 return f[len(relroot) :]
126 return f[len(relroot) :]
127
127
128 pathfn = compose(relrootpathfn, pathfn)
128 pathfn = compose(relrootpathfn, pathfn)
129
129
130 if stat:
130 if stat:
131 diffopts = diffopts.copy(context=0, noprefix=False)
131 diffopts = diffopts.copy(context=0, noprefix=False)
132 width = 80
132 width = 80
133 if not ui.plain():
133 if not ui.plain():
134 width = ui.termwidth() - graphwidth
134 width = ui.termwidth() - graphwidth
135 # If an explicit --root was given, don't respect ui.relative-paths
135 # If an explicit --root was given, don't respect ui.relative-paths
136 if not relroot:
136 if not relroot:
137 pathfn = compose(scmutil.getuipathfn(repo), pathfn)
137 pathfn = compose(scmutil.getuipathfn(repo), pathfn)
138
138
139 chunks = ctx2.diff(
139 chunks = ctx2.diff(
140 ctx1,
140 ctx1,
141 match,
141 match,
142 changes,
142 changes,
143 opts=diffopts,
143 opts=diffopts,
144 pathfn=pathfn,
144 pathfn=pathfn,
145 copysourcematch=copysourcematch,
145 copysourcematch=copysourcematch,
146 hunksfilterfn=hunksfilterfn,
146 hunksfilterfn=hunksfilterfn,
147 )
147 )
148
148
149 if fp is not None or ui.canwritewithoutlabels():
149 if fp is not None or ui.canwritewithoutlabels():
150 out = fp or ui
150 out = fp or ui
151 if stat:
151 if stat:
152 chunks = [patch.diffstat(util.iterlines(chunks), width=width)]
152 chunks = [patch.diffstat(util.iterlines(chunks), width=width)]
153 for chunk in util.filechunkiter(util.chunkbuffer(chunks)):
153 for chunk in util.filechunkiter(util.chunkbuffer(chunks)):
154 out.write(chunk)
154 out.write(chunk)
155 else:
155 else:
156 if stat:
156 if stat:
157 chunks = patch.diffstatui(util.iterlines(chunks), width=width)
157 chunks = patch.diffstatui(util.iterlines(chunks), width=width)
158 else:
158 else:
159 chunks = patch.difflabel(
159 chunks = patch.difflabel(
160 lambda chunks, **kwargs: chunks, chunks, opts=diffopts
160 lambda chunks, **kwargs: chunks, chunks, opts=diffopts
161 )
161 )
162 if ui.canbatchlabeledwrites():
162 if ui.canbatchlabeledwrites():
163
163
164 def gen():
164 def gen():
165 for chunk, label in chunks:
165 for chunk, label in chunks:
166 yield ui.label(chunk, label=label)
166 yield ui.label(chunk, label=label)
167
167
168 for chunk in util.filechunkiter(util.chunkbuffer(gen())):
168 for chunk in util.filechunkiter(util.chunkbuffer(gen())):
169 ui.write(chunk)
169 ui.write(chunk)
170 else:
170 else:
171 for chunk, label in chunks:
171 for chunk, label in chunks:
172 ui.write(chunk, label=label)
172 ui.write(chunk, label=label)
173
173
174 node2 = ctx2.node()
174 node2 = ctx2.node()
175 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
175 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
176 tempnode2 = node2
176 tempnode2 = node2
177 try:
177 try:
178 if node2 is not None:
178 if node2 is not None:
179 tempnode2 = ctx2.substate[subpath][1]
179 tempnode2 = ctx2.substate[subpath][1]
180 except KeyError:
180 except KeyError:
181 # A subrepo that existed in node1 was deleted between node1 and
181 # A subrepo that existed in node1 was deleted between node1 and
182 # node2 (inclusive). Thus, ctx2's substate won't contain that
182 # node2 (inclusive). Thus, ctx2's substate won't contain that
183 # subpath. The best we can do is to ignore it.
183 # subpath. The best we can do is to ignore it.
184 tempnode2 = None
184 tempnode2 = None
185 submatch = matchmod.subdirmatcher(subpath, match)
185 submatch = matchmod.subdirmatcher(subpath, match)
186 subprefix = repo.wvfs.reljoin(prefix, subpath)
186 subprefix = repo.wvfs.reljoin(prefix, subpath)
187 if listsubrepos or match.exact(subpath) or any(submatch.files()):
187 if listsubrepos or match.exact(subpath) or any(submatch.files()):
188 sub.diff(
188 sub.diff(
189 ui,
189 ui,
190 diffopts,
190 diffopts,
191 tempnode2,
191 tempnode2,
192 submatch,
192 submatch,
193 changes=changes,
193 changes=changes,
194 stat=stat,
194 stat=stat,
195 fp=fp,
195 fp=fp,
196 prefix=subprefix,
196 prefix=subprefix,
197 )
197 )
198
198
199
199
200 class changesetdiffer(object):
200 class changesetdiffer(object):
201 """Generate diff of changeset with pre-configured filtering functions"""
201 """Generate diff of changeset with pre-configured filtering functions"""
202
202
203 def _makefilematcher(self, ctx):
203 def _makefilematcher(self, ctx):
204 return scmutil.matchall(ctx.repo())
204 return scmutil.matchall(ctx.repo())
205
205
206 def _makehunksfilter(self, ctx):
206 def _makehunksfilter(self, ctx):
207 return None
207 return None
208
208
209 def showdiff(self, ui, ctx, diffopts, graphwidth=0, stat=False):
209 def showdiff(self, ui, ctx, diffopts, graphwidth=0, stat=False):
210 diffordiffstat(
210 diffordiffstat(
211 ui,
211 ui,
212 ctx.repo(),
212 ctx.repo(),
213 diffopts,
213 diffopts,
214 ctx.p1(),
214 ctx.p1(),
215 ctx,
215 ctx,
216 match=self._makefilematcher(ctx),
216 match=self._makefilematcher(ctx),
217 stat=stat,
217 stat=stat,
218 graphwidth=graphwidth,
218 graphwidth=graphwidth,
219 hunksfilterfn=self._makehunksfilter(ctx),
219 hunksfilterfn=self._makehunksfilter(ctx),
220 )
220 )
221
221
222
222
223 def changesetlabels(ctx):
223 def changesetlabels(ctx):
224 labels = [b'log.changeset', b'changeset.%s' % ctx.phasestr()]
224 labels = [b'log.changeset', b'changeset.%s' % ctx.phasestr()]
225 if ctx.obsolete():
225 if ctx.obsolete():
226 labels.append(b'changeset.obsolete')
226 labels.append(b'changeset.obsolete')
227 if ctx.isunstable():
227 if ctx.isunstable():
228 labels.append(b'changeset.unstable')
228 labels.append(b'changeset.unstable')
229 for instability in ctx.instabilities():
229 for instability in ctx.instabilities():
230 labels.append(b'instability.%s' % instability)
230 labels.append(b'instability.%s' % instability)
231 return b' '.join(labels)
231 return b' '.join(labels)
232
232
233
233
234 class changesetprinter(object):
234 class changesetprinter(object):
235 '''show changeset information when templating not requested.'''
235 '''show changeset information when templating not requested.'''
236
236
237 def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False):
237 def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False):
238 self.ui = ui
238 self.ui = ui
239 self.repo = repo
239 self.repo = repo
240 self.buffered = buffered
240 self.buffered = buffered
241 self._differ = differ or changesetdiffer()
241 self._differ = differ or changesetdiffer()
242 self._diffopts = patch.diffallopts(ui, diffopts)
242 self._diffopts = patch.diffallopts(ui, diffopts)
243 self._includestat = diffopts and diffopts.get(b'stat')
243 self._includestat = diffopts and diffopts.get(b'stat')
244 self._includediff = diffopts and diffopts.get(b'patch')
244 self._includediff = diffopts and diffopts.get(b'patch')
245 self.header = {}
245 self.header = {}
246 self.hunk = {}
246 self.hunk = {}
247 self.lastheader = None
247 self.lastheader = None
248 self.footer = None
248 self.footer = None
249 self._columns = templatekw.getlogcolumns()
249 self._columns = templatekw.getlogcolumns()
250
250
251 def flush(self, ctx):
251 def flush(self, ctx):
252 rev = ctx.rev()
252 rev = ctx.rev()
253 if rev in self.header:
253 if rev in self.header:
254 h = self.header[rev]
254 h = self.header[rev]
255 if h != self.lastheader:
255 if h != self.lastheader:
256 self.lastheader = h
256 self.lastheader = h
257 self.ui.write(h)
257 self.ui.write(h)
258 del self.header[rev]
258 del self.header[rev]
259 if rev in self.hunk:
259 if rev in self.hunk:
260 self.ui.write(self.hunk[rev])
260 self.ui.write(self.hunk[rev])
261 del self.hunk[rev]
261 del self.hunk[rev]
262
262
263 def close(self):
263 def close(self):
264 if self.footer:
264 if self.footer:
265 self.ui.write(self.footer)
265 self.ui.write(self.footer)
266
266
267 def show(self, ctx, copies=None, **props):
267 def show(self, ctx, copies=None, **props):
268 props = pycompat.byteskwargs(props)
268 props = pycompat.byteskwargs(props)
269 if self.buffered:
269 if self.buffered:
270 self.ui.pushbuffer(labeled=True)
270 self.ui.pushbuffer(labeled=True)
271 self._show(ctx, copies, props)
271 self._show(ctx, copies, props)
272 self.hunk[ctx.rev()] = self.ui.popbuffer()
272 self.hunk[ctx.rev()] = self.ui.popbuffer()
273 else:
273 else:
274 self._show(ctx, copies, props)
274 self._show(ctx, copies, props)
275
275
276 def _show(self, ctx, copies, props):
276 def _show(self, ctx, copies, props):
277 '''show a single changeset or file revision'''
277 '''show a single changeset or file revision'''
278 changenode = ctx.node()
278 changenode = ctx.node()
279 graphwidth = props.get(b'graphwidth', 0)
279 graphwidth = props.get(b'graphwidth', 0)
280
280
281 if self.ui.quiet:
281 if self.ui.quiet:
282 self.ui.write(
282 self.ui.write(
283 b"%s\n" % scmutil.formatchangeid(ctx), label=b'log.node'
283 b"%s\n" % scmutil.formatchangeid(ctx), label=b'log.node'
284 )
284 )
285 return
285 return
286
286
287 columns = self._columns
287 columns = self._columns
288 self.ui.write(
288 self.ui.write(
289 columns[b'changeset'] % scmutil.formatchangeid(ctx),
289 columns[b'changeset'] % scmutil.formatchangeid(ctx),
290 label=changesetlabels(ctx),
290 label=changesetlabels(ctx),
291 )
291 )
292
292
293 # branches are shown first before any other names due to backwards
293 # branches are shown first before any other names due to backwards
294 # compatibility
294 # compatibility
295 branch = ctx.branch()
295 branch = ctx.branch()
296 # don't show the default branch name
296 # don't show the default branch name
297 if branch != b'default':
297 if branch != b'default':
298 self.ui.write(columns[b'branch'] % branch, label=b'log.branch')
298 self.ui.write(columns[b'branch'] % branch, label=b'log.branch')
299
299
300 for nsname, ns in pycompat.iteritems(self.repo.names):
300 for nsname, ns in pycompat.iteritems(self.repo.names):
301 # branches has special logic already handled above, so here we just
301 # branches has special logic already handled above, so here we just
302 # skip it
302 # skip it
303 if nsname == b'branches':
303 if nsname == b'branches':
304 continue
304 continue
305 # we will use the templatename as the color name since those two
305 # we will use the templatename as the color name since those two
306 # should be the same
306 # should be the same
307 for name in ns.names(self.repo, changenode):
307 for name in ns.names(self.repo, changenode):
308 self.ui.write(ns.logfmt % name, label=b'log.%s' % ns.colorname)
308 self.ui.write(ns.logfmt % name, label=b'log.%s' % ns.colorname)
309 if self.ui.debugflag:
309 if self.ui.debugflag:
310 self.ui.write(
310 self.ui.write(
311 columns[b'phase'] % ctx.phasestr(), label=b'log.phase'
311 columns[b'phase'] % ctx.phasestr(), label=b'log.phase'
312 )
312 )
313 for pctx in scmutil.meaningfulparents(self.repo, ctx):
313 for pctx in scmutil.meaningfulparents(self.repo, ctx):
314 label = b'log.parent changeset.%s' % pctx.phasestr()
314 label = b'log.parent changeset.%s' % pctx.phasestr()
315 self.ui.write(
315 self.ui.write(
316 columns[b'parent'] % scmutil.formatchangeid(pctx), label=label
316 columns[b'parent'] % scmutil.formatchangeid(pctx), label=label
317 )
317 )
318
318
319 if self.ui.debugflag:
319 if self.ui.debugflag:
320 mnode = ctx.manifestnode()
320 mnode = ctx.manifestnode()
321 if mnode is None:
321 if mnode is None:
322 mnode = wdirid
322 mnode = wdirid
323 mrev = wdirrev
323 mrev = wdirrev
324 else:
324 else:
325 mrev = self.repo.manifestlog.rev(mnode)
325 mrev = self.repo.manifestlog.rev(mnode)
326 self.ui.write(
326 self.ui.write(
327 columns[b'manifest']
327 columns[b'manifest']
328 % scmutil.formatrevnode(self.ui, mrev, mnode),
328 % scmutil.formatrevnode(self.ui, mrev, mnode),
329 label=b'ui.debug log.manifest',
329 label=b'ui.debug log.manifest',
330 )
330 )
331 self.ui.write(columns[b'user'] % ctx.user(), label=b'log.user')
331 self.ui.write(columns[b'user'] % ctx.user(), label=b'log.user')
332 self.ui.write(
332 self.ui.write(
333 columns[b'date'] % dateutil.datestr(ctx.date()), label=b'log.date'
333 columns[b'date'] % dateutil.datestr(ctx.date()), label=b'log.date'
334 )
334 )
335
335
336 if ctx.isunstable():
336 if ctx.isunstable():
337 instabilities = ctx.instabilities()
337 instabilities = ctx.instabilities()
338 self.ui.write(
338 self.ui.write(
339 columns[b'instability'] % b', '.join(instabilities),
339 columns[b'instability'] % b', '.join(instabilities),
340 label=b'log.instability',
340 label=b'log.instability',
341 )
341 )
342
342
343 elif ctx.obsolete():
343 elif ctx.obsolete():
344 self._showobsfate(ctx)
344 self._showobsfate(ctx)
345
345
346 self._exthook(ctx)
346 self._exthook(ctx)
347
347
348 if self.ui.debugflag:
348 if self.ui.debugflag:
349 files = ctx.p1().status(ctx)
349 files = ctx.p1().status(ctx)
350 for key, value in zip(
350 for key, value in zip(
351 [b'files', b'files+', b'files-'],
351 [b'files', b'files+', b'files-'],
352 [files.modified, files.added, files.removed],
352 [files.modified, files.added, files.removed],
353 ):
353 ):
354 if value:
354 if value:
355 self.ui.write(
355 self.ui.write(
356 columns[key] % b" ".join(value),
356 columns[key] % b" ".join(value),
357 label=b'ui.debug log.files',
357 label=b'ui.debug log.files',
358 )
358 )
359 elif ctx.files() and self.ui.verbose:
359 elif ctx.files() and self.ui.verbose:
360 self.ui.write(
360 self.ui.write(
361 columns[b'files'] % b" ".join(ctx.files()),
361 columns[b'files'] % b" ".join(ctx.files()),
362 label=b'ui.note log.files',
362 label=b'ui.note log.files',
363 )
363 )
364 if copies and self.ui.verbose:
364 if copies and self.ui.verbose:
365 copies = [b'%s (%s)' % c for c in copies]
365 copies = [b'%s (%s)' % c for c in copies]
366 self.ui.write(
366 self.ui.write(
367 columns[b'copies'] % b' '.join(copies),
367 columns[b'copies'] % b' '.join(copies),
368 label=b'ui.note log.copies',
368 label=b'ui.note log.copies',
369 )
369 )
370
370
371 extra = ctx.extra()
371 extra = ctx.extra()
372 if extra and self.ui.debugflag:
372 if extra and self.ui.debugflag:
373 for key, value in sorted(extra.items()):
373 for key, value in sorted(extra.items()):
374 self.ui.write(
374 self.ui.write(
375 columns[b'extra'] % (key, stringutil.escapestr(value)),
375 columns[b'extra'] % (key, stringutil.escapestr(value)),
376 label=b'ui.debug log.extra',
376 label=b'ui.debug log.extra',
377 )
377 )
378
378
379 description = ctx.description().strip()
379 description = ctx.description().strip()
380 if description:
380 if description:
381 if self.ui.verbose:
381 if self.ui.verbose:
382 self.ui.write(
382 self.ui.write(
383 _(b"description:\n"), label=b'ui.note log.description'
383 _(b"description:\n"), label=b'ui.note log.description'
384 )
384 )
385 self.ui.write(description, label=b'ui.note log.description')
385 self.ui.write(description, label=b'ui.note log.description')
386 self.ui.write(b"\n\n")
386 self.ui.write(b"\n\n")
387 else:
387 else:
388 self.ui.write(
388 self.ui.write(
389 columns[b'summary'] % description.splitlines()[0],
389 columns[b'summary'] % description.splitlines()[0],
390 label=b'log.summary',
390 label=b'log.summary',
391 )
391 )
392 self.ui.write(b"\n")
392 self.ui.write(b"\n")
393
393
394 self._showpatch(ctx, graphwidth)
394 self._showpatch(ctx, graphwidth)
395
395
396 def _showobsfate(self, ctx):
396 def _showobsfate(self, ctx):
397 # TODO: do not depend on templater
397 # TODO: do not depend on templater
398 tres = formatter.templateresources(self.repo.ui, self.repo)
398 tres = formatter.templateresources(self.repo.ui, self.repo)
399 t = formatter.maketemplater(
399 t = formatter.maketemplater(
400 self.repo.ui,
400 self.repo.ui,
401 b'{join(obsfate, "\n")}',
401 b'{join(obsfate, "\n")}',
402 defaults=templatekw.keywords,
402 defaults=templatekw.keywords,
403 resources=tres,
403 resources=tres,
404 )
404 )
405 obsfate = t.renderdefault({b'ctx': ctx}).splitlines()
405 obsfate = t.renderdefault({b'ctx': ctx}).splitlines()
406
406
407 if obsfate:
407 if obsfate:
408 for obsfateline in obsfate:
408 for obsfateline in obsfate:
409 self.ui.write(
409 self.ui.write(
410 self._columns[b'obsolete'] % obsfateline,
410 self._columns[b'obsolete'] % obsfateline,
411 label=b'log.obsfate',
411 label=b'log.obsfate',
412 )
412 )
413
413
414 def _exthook(self, ctx):
414 def _exthook(self, ctx):
415 '''empty method used by extension as a hook point
415 '''empty method used by extension as a hook point
416 '''
416 '''
417
417
418 def _showpatch(self, ctx, graphwidth=0):
418 def _showpatch(self, ctx, graphwidth=0):
419 if self._includestat:
419 if self._includestat:
420 self._differ.showdiff(
420 self._differ.showdiff(
421 self.ui, ctx, self._diffopts, graphwidth, stat=True
421 self.ui, ctx, self._diffopts, graphwidth, stat=True
422 )
422 )
423 if self._includestat and self._includediff:
423 if self._includestat and self._includediff:
424 self.ui.write(b"\n")
424 self.ui.write(b"\n")
425 if self._includediff:
425 if self._includediff:
426 self._differ.showdiff(
426 self._differ.showdiff(
427 self.ui, ctx, self._diffopts, graphwidth, stat=False
427 self.ui, ctx, self._diffopts, graphwidth, stat=False
428 )
428 )
429 if self._includestat or self._includediff:
429 if self._includestat or self._includediff:
430 self.ui.write(b"\n")
430 self.ui.write(b"\n")
431
431
432
432
433 class changesetformatter(changesetprinter):
433 class changesetformatter(changesetprinter):
434 """Format changeset information by generic formatter"""
434 """Format changeset information by generic formatter"""
435
435
436 def __init__(
436 def __init__(
437 self, ui, repo, fm, differ=None, diffopts=None, buffered=False
437 self, ui, repo, fm, differ=None, diffopts=None, buffered=False
438 ):
438 ):
439 changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
439 changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
440 self._diffopts = patch.difffeatureopts(ui, diffopts, git=True)
440 self._diffopts = patch.difffeatureopts(ui, diffopts, git=True)
441 self._fm = fm
441 self._fm = fm
442
442
443 def close(self):
443 def close(self):
444 self._fm.end()
444 self._fm.end()
445
445
446 def _show(self, ctx, copies, props):
446 def _show(self, ctx, copies, props):
447 '''show a single changeset or file revision'''
447 '''show a single changeset or file revision'''
448 fm = self._fm
448 fm = self._fm
449 fm.startitem()
449 fm.startitem()
450 fm.context(ctx=ctx)
450 fm.context(ctx=ctx)
451 fm.data(rev=scmutil.intrev(ctx), node=fm.hexfunc(scmutil.binnode(ctx)))
451 fm.data(rev=scmutil.intrev(ctx), node=fm.hexfunc(scmutil.binnode(ctx)))
452
452
453 datahint = fm.datahint()
453 datahint = fm.datahint()
454 if self.ui.quiet and not datahint:
454 if self.ui.quiet and not datahint:
455 return
455 return
456
456
457 fm.data(
457 fm.data(
458 branch=ctx.branch(),
458 branch=ctx.branch(),
459 phase=ctx.phasestr(),
459 phase=ctx.phasestr(),
460 user=ctx.user(),
460 user=ctx.user(),
461 date=fm.formatdate(ctx.date()),
461 date=fm.formatdate(ctx.date()),
462 desc=ctx.description(),
462 desc=ctx.description(),
463 bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'),
463 bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'),
464 tags=fm.formatlist(ctx.tags(), name=b'tag'),
464 tags=fm.formatlist(ctx.tags(), name=b'tag'),
465 parents=fm.formatlist(
465 parents=fm.formatlist(
466 [fm.hexfunc(c.node()) for c in ctx.parents()], name=b'node'
466 [fm.hexfunc(c.node()) for c in ctx.parents()], name=b'node'
467 ),
467 ),
468 )
468 )
469
469
470 if self.ui.debugflag or b'manifest' in datahint:
470 if self.ui.debugflag or b'manifest' in datahint:
471 fm.data(manifest=fm.hexfunc(ctx.manifestnode() or wdirid))
471 fm.data(manifest=fm.hexfunc(ctx.manifestnode() or wdirid))
472 if self.ui.debugflag or b'extra' in datahint:
472 if self.ui.debugflag or b'extra' in datahint:
473 fm.data(extra=fm.formatdict(ctx.extra()))
473 fm.data(extra=fm.formatdict(ctx.extra()))
474
474
475 if (
475 if (
476 self.ui.debugflag
476 self.ui.debugflag
477 or b'modified' in datahint
477 or b'modified' in datahint
478 or b'added' in datahint
478 or b'added' in datahint
479 or b'removed' in datahint
479 or b'removed' in datahint
480 ):
480 ):
481 files = ctx.p1().status(ctx)
481 files = ctx.p1().status(ctx)
482 fm.data(
482 fm.data(
483 modified=fm.formatlist(files.modified, name=b'file'),
483 modified=fm.formatlist(files.modified, name=b'file'),
484 added=fm.formatlist(files.added, name=b'file'),
484 added=fm.formatlist(files.added, name=b'file'),
485 removed=fm.formatlist(files.removed, name=b'file'),
485 removed=fm.formatlist(files.removed, name=b'file'),
486 )
486 )
487
487
488 verbose = not self.ui.debugflag and self.ui.verbose
488 verbose = not self.ui.debugflag and self.ui.verbose
489 if verbose or b'files' in datahint:
489 if verbose or b'files' in datahint:
490 fm.data(files=fm.formatlist(ctx.files(), name=b'file'))
490 fm.data(files=fm.formatlist(ctx.files(), name=b'file'))
491 if verbose and copies or b'copies' in datahint:
491 if verbose and copies or b'copies' in datahint:
492 fm.data(
492 fm.data(
493 copies=fm.formatdict(copies or {}, key=b'name', value=b'source')
493 copies=fm.formatdict(copies or {}, key=b'name', value=b'source')
494 )
494 )
495
495
496 if self._includestat or b'diffstat' in datahint:
496 if self._includestat or b'diffstat' in datahint:
497 self.ui.pushbuffer()
497 self.ui.pushbuffer()
498 self._differ.showdiff(self.ui, ctx, self._diffopts, stat=True)
498 self._differ.showdiff(self.ui, ctx, self._diffopts, stat=True)
499 fm.data(diffstat=self.ui.popbuffer())
499 fm.data(diffstat=self.ui.popbuffer())
500 if self._includediff or b'diff' in datahint:
500 if self._includediff or b'diff' in datahint:
501 self.ui.pushbuffer()
501 self.ui.pushbuffer()
502 self._differ.showdiff(self.ui, ctx, self._diffopts, stat=False)
502 self._differ.showdiff(self.ui, ctx, self._diffopts, stat=False)
503 fm.data(diff=self.ui.popbuffer())
503 fm.data(diff=self.ui.popbuffer())
504
504
505
505
506 class changesettemplater(changesetprinter):
506 class changesettemplater(changesetprinter):
507 '''format changeset information.
507 '''format changeset information.
508
508
509 Note: there are a variety of convenience functions to build a
509 Note: there are a variety of convenience functions to build a
510 changesettemplater for common cases. See functions such as:
510 changesettemplater for common cases. See functions such as:
511 maketemplater, changesetdisplayer, buildcommittemplate, or other
511 maketemplater, changesetdisplayer, buildcommittemplate, or other
512 functions that use changesest_templater.
512 functions that use changesest_templater.
513 '''
513 '''
514
514
515 # Arguments before "buffered" used to be positional. Consider not
515 # Arguments before "buffered" used to be positional. Consider not
516 # adding/removing arguments before "buffered" to not break callers.
516 # adding/removing arguments before "buffered" to not break callers.
517 def __init__(
517 def __init__(
518 self, ui, repo, tmplspec, differ=None, diffopts=None, buffered=False
518 self, ui, repo, tmplspec, differ=None, diffopts=None, buffered=False
519 ):
519 ):
520 changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
520 changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
521 # tres is shared with _graphnodeformatter()
521 # tres is shared with _graphnodeformatter()
522 self._tresources = tres = formatter.templateresources(ui, repo)
522 self._tresources = tres = formatter.templateresources(ui, repo)
523 self.t = formatter.loadtemplater(
523 self.t = formatter.loadtemplater(
524 ui,
524 ui,
525 tmplspec,
525 tmplspec,
526 defaults=templatekw.keywords,
526 defaults=templatekw.keywords,
527 resources=tres,
527 resources=tres,
528 cache=templatekw.defaulttempl,
528 cache=templatekw.defaulttempl,
529 )
529 )
530 self._counter = itertools.count()
530 self._counter = itertools.count()
531
531
532 self._tref = tmplspec.ref
532 self._tref = tmplspec.ref
533 self._parts = {
533 self._parts = {
534 b'header': b'',
534 b'header': b'',
535 b'footer': b'',
535 b'footer': b'',
536 tmplspec.ref: tmplspec.ref,
536 tmplspec.ref: tmplspec.ref,
537 b'docheader': b'',
537 b'docheader': b'',
538 b'docfooter': b'',
538 b'docfooter': b'',
539 b'separator': b'',
539 b'separator': b'',
540 }
540 }
541 if tmplspec.mapfile:
541 if tmplspec.mapfile:
542 # find correct templates for current mode, for backward
542 # find correct templates for current mode, for backward
543 # compatibility with 'log -v/-q/--debug' using a mapfile
543 # compatibility with 'log -v/-q/--debug' using a mapfile
544 tmplmodes = [
544 tmplmodes = [
545 (True, b''),
545 (True, b''),
546 (self.ui.verbose, b'_verbose'),
546 (self.ui.verbose, b'_verbose'),
547 (self.ui.quiet, b'_quiet'),
547 (self.ui.quiet, b'_quiet'),
548 (self.ui.debugflag, b'_debug'),
548 (self.ui.debugflag, b'_debug'),
549 ]
549 ]
550 for mode, postfix in tmplmodes:
550 for mode, postfix in tmplmodes:
551 for t in self._parts:
551 for t in self._parts:
552 cur = t + postfix
552 cur = t + postfix
553 if mode and cur in self.t:
553 if mode and cur in self.t:
554 self._parts[t] = cur
554 self._parts[t] = cur
555 else:
555 else:
556 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
556 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
557 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
557 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
558 self._parts.update(m)
558 self._parts.update(m)
559
559
560 if self._parts[b'docheader']:
560 if self._parts[b'docheader']:
561 self.ui.write(self.t.render(self._parts[b'docheader'], {}))
561 self.ui.write(self.t.render(self._parts[b'docheader'], {}))
562
562
563 def close(self):
563 def close(self):
564 if self._parts[b'docfooter']:
564 if self._parts[b'docfooter']:
565 if not self.footer:
565 if not self.footer:
566 self.footer = b""
566 self.footer = b""
567 self.footer += self.t.render(self._parts[b'docfooter'], {})
567 self.footer += self.t.render(self._parts[b'docfooter'], {})
568 return super(changesettemplater, self).close()
568 return super(changesettemplater, self).close()
569
569
570 def _show(self, ctx, copies, props):
570 def _show(self, ctx, copies, props):
571 '''show a single changeset or file revision'''
571 '''show a single changeset or file revision'''
572 props = props.copy()
572 props = props.copy()
573 props[b'ctx'] = ctx
573 props[b'ctx'] = ctx
574 props[b'index'] = index = next(self._counter)
574 props[b'index'] = index = next(self._counter)
575 props[b'revcache'] = {b'copies': copies}
575 props[b'revcache'] = {b'copies': copies}
576 graphwidth = props.get(b'graphwidth', 0)
576 graphwidth = props.get(b'graphwidth', 0)
577
577
578 # write separator, which wouldn't work well with the header part below
578 # write separator, which wouldn't work well with the header part below
579 # since there's inherently a conflict between header (across items) and
579 # since there's inherently a conflict between header (across items) and
580 # separator (per item)
580 # separator (per item)
581 if self._parts[b'separator'] and index > 0:
581 if self._parts[b'separator'] and index > 0:
582 self.ui.write(self.t.render(self._parts[b'separator'], {}))
582 self.ui.write(self.t.render(self._parts[b'separator'], {}))
583
583
584 # write header
584 # write header
585 if self._parts[b'header']:
585 if self._parts[b'header']:
586 h = self.t.render(self._parts[b'header'], props)
586 h = self.t.render(self._parts[b'header'], props)
587 if self.buffered:
587 if self.buffered:
588 self.header[ctx.rev()] = h
588 self.header[ctx.rev()] = h
589 else:
589 else:
590 if self.lastheader != h:
590 if self.lastheader != h:
591 self.lastheader = h
591 self.lastheader = h
592 self.ui.write(h)
592 self.ui.write(h)
593
593
594 # write changeset metadata, then patch if requested
594 # write changeset metadata, then patch if requested
595 key = self._parts[self._tref]
595 key = self._parts[self._tref]
596 self.ui.write(self.t.render(key, props))
596 self.ui.write(self.t.render(key, props))
597 self._exthook(ctx)
597 self._exthook(ctx)
598 self._showpatch(ctx, graphwidth)
598 self._showpatch(ctx, graphwidth)
599
599
600 if self._parts[b'footer']:
600 if self._parts[b'footer']:
601 if not self.footer:
601 if not self.footer:
602 self.footer = self.t.render(self._parts[b'footer'], props)
602 self.footer = self.t.render(self._parts[b'footer'], props)
603
603
604
604
605 def templatespec(tmpl, mapfile):
605 def templatespec(tmpl, mapfile):
606 assert not (tmpl and mapfile)
606 assert not (tmpl and mapfile)
607 if mapfile:
607 if mapfile:
608 return formatter.mapfile_templatespec(b'changeset', mapfile)
608 return formatter.mapfile_templatespec(b'changeset', mapfile)
609 else:
609 else:
610 return formatter.literal_templatespec(tmpl)
610 return formatter.literal_templatespec(tmpl)
611
611
612
612
613 def _lookuptemplate(ui, tmpl, style):
613 def _lookuptemplate(ui, tmpl, style):
614 """Find the template matching the given template spec or style
614 """Find the template matching the given template spec or style
615
615
616 See formatter.lookuptemplate() for details.
616 See formatter.lookuptemplate() for details.
617 """
617 """
618
618
619 # ui settings
619 # ui settings
620 if not tmpl and not style: # template are stronger than style
620 if not tmpl and not style: # template are stronger than style
621 tmpl = ui.config(b'ui', b'logtemplate')
621 tmpl = ui.config(b'ui', b'logtemplate')
622 if tmpl:
622 if tmpl:
623 return formatter.literal_templatespec(templater.unquotestring(tmpl))
623 return formatter.literal_templatespec(templater.unquotestring(tmpl))
624 else:
624 else:
625 style = util.expandpath(ui.config(b'ui', b'style'))
625 style = util.expandpath(ui.config(b'ui', b'style'))
626
626
627 if not tmpl and style:
627 if not tmpl and style:
628 mapfile = style
628 mapfile = style
629 fp = None
629 fp = None
630 if not os.path.split(mapfile)[0]:
630 if not os.path.split(mapfile)[0]:
631 (mapname, fp) = templater.open_template(
631 (mapname, fp) = templater.try_open_template(
632 b'map-cmdline.' + mapfile
632 b'map-cmdline.' + mapfile
633 ) or templater.open_template(mapfile)
633 ) or templater.try_open_template(mapfile)
634 if mapname:
634 if mapname:
635 mapfile = mapname
635 mapfile = mapname
636 return formatter.mapfile_templatespec(b'changeset', mapfile, fp)
636 return formatter.mapfile_templatespec(b'changeset', mapfile, fp)
637
637
638 return formatter.lookuptemplate(ui, b'changeset', tmpl)
638 return formatter.lookuptemplate(ui, b'changeset', tmpl)
639
639
640
640
641 def maketemplater(ui, repo, tmpl, buffered=False):
641 def maketemplater(ui, repo, tmpl, buffered=False):
642 """Create a changesettemplater from a literal template 'tmpl'
642 """Create a changesettemplater from a literal template 'tmpl'
643 byte-string."""
643 byte-string."""
644 spec = formatter.literal_templatespec(tmpl)
644 spec = formatter.literal_templatespec(tmpl)
645 return changesettemplater(ui, repo, spec, buffered=buffered)
645 return changesettemplater(ui, repo, spec, buffered=buffered)
646
646
647
647
648 def changesetdisplayer(ui, repo, opts, differ=None, buffered=False):
648 def changesetdisplayer(ui, repo, opts, differ=None, buffered=False):
649 """show one changeset using template or regular display.
649 """show one changeset using template or regular display.
650
650
651 Display format will be the first non-empty hit of:
651 Display format will be the first non-empty hit of:
652 1. option 'template'
652 1. option 'template'
653 2. option 'style'
653 2. option 'style'
654 3. [ui] setting 'logtemplate'
654 3. [ui] setting 'logtemplate'
655 4. [ui] setting 'style'
655 4. [ui] setting 'style'
656 If all of these values are either the unset or the empty string,
656 If all of these values are either the unset or the empty string,
657 regular display via changesetprinter() is done.
657 regular display via changesetprinter() is done.
658 """
658 """
659 postargs = (differ, opts, buffered)
659 postargs = (differ, opts, buffered)
660 spec = _lookuptemplate(ui, opts.get(b'template'), opts.get(b'style'))
660 spec = _lookuptemplate(ui, opts.get(b'template'), opts.get(b'style'))
661
661
662 # machine-readable formats have slightly different keyword set than
662 # machine-readable formats have slightly different keyword set than
663 # plain templates, which are handled by changesetformatter.
663 # plain templates, which are handled by changesetformatter.
664 # note that {b'pickle', b'debug'} can also be added to the list if needed.
664 # note that {b'pickle', b'debug'} can also be added to the list if needed.
665 if spec.ref in {b'cbor', b'json'}:
665 if spec.ref in {b'cbor', b'json'}:
666 fm = ui.formatter(b'log', opts)
666 fm = ui.formatter(b'log', opts)
667 return changesetformatter(ui, repo, fm, *postargs)
667 return changesetformatter(ui, repo, fm, *postargs)
668
668
669 if not spec.ref and not spec.tmpl and not spec.mapfile:
669 if not spec.ref and not spec.tmpl and not spec.mapfile:
670 return changesetprinter(ui, repo, *postargs)
670 return changesetprinter(ui, repo, *postargs)
671
671
672 return changesettemplater(ui, repo, spec, *postargs)
672 return changesettemplater(ui, repo, spec, *postargs)
673
673
674
674
675 def _makematcher(repo, revs, pats, opts):
675 def _makematcher(repo, revs, pats, opts):
676 """Build matcher and expanded patterns from log options
676 """Build matcher and expanded patterns from log options
677
677
678 If --follow, revs are the revisions to follow from.
678 If --follow, revs are the revisions to follow from.
679
679
680 Returns (match, pats, slowpath) where
680 Returns (match, pats, slowpath) where
681 - match: a matcher built from the given pats and -I/-X opts
681 - match: a matcher built from the given pats and -I/-X opts
682 - pats: patterns used (globs are expanded on Windows)
682 - pats: patterns used (globs are expanded on Windows)
683 - slowpath: True if patterns aren't as simple as scanning filelogs
683 - slowpath: True if patterns aren't as simple as scanning filelogs
684 """
684 """
685 # pats/include/exclude are passed to match.match() directly in
685 # pats/include/exclude are passed to match.match() directly in
686 # _matchfiles() revset but walkchangerevs() builds its matcher with
686 # _matchfiles() revset but walkchangerevs() builds its matcher with
687 # scmutil.match(). The difference is input pats are globbed on
687 # scmutil.match(). The difference is input pats are globbed on
688 # platforms without shell expansion (windows).
688 # platforms without shell expansion (windows).
689 wctx = repo[None]
689 wctx = repo[None]
690 match, pats = scmutil.matchandpats(wctx, pats, opts)
690 match, pats = scmutil.matchandpats(wctx, pats, opts)
691 slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
691 slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
692 if not slowpath:
692 if not slowpath:
693 follow = opts.get(b'follow') or opts.get(b'follow_first')
693 follow = opts.get(b'follow') or opts.get(b'follow_first')
694 startctxs = []
694 startctxs = []
695 if follow and opts.get(b'rev'):
695 if follow and opts.get(b'rev'):
696 startctxs = [repo[r] for r in revs]
696 startctxs = [repo[r] for r in revs]
697 for f in match.files():
697 for f in match.files():
698 if follow and startctxs:
698 if follow and startctxs:
699 # No idea if the path was a directory at that revision, so
699 # No idea if the path was a directory at that revision, so
700 # take the slow path.
700 # take the slow path.
701 if any(f not in c for c in startctxs):
701 if any(f not in c for c in startctxs):
702 slowpath = True
702 slowpath = True
703 continue
703 continue
704 elif follow and f not in wctx:
704 elif follow and f not in wctx:
705 # If the file exists, it may be a directory, so let it
705 # If the file exists, it may be a directory, so let it
706 # take the slow path.
706 # take the slow path.
707 if os.path.exists(repo.wjoin(f)):
707 if os.path.exists(repo.wjoin(f)):
708 slowpath = True
708 slowpath = True
709 continue
709 continue
710 else:
710 else:
711 raise error.Abort(
711 raise error.Abort(
712 _(
712 _(
713 b'cannot follow file not in parent '
713 b'cannot follow file not in parent '
714 b'revision: "%s"'
714 b'revision: "%s"'
715 )
715 )
716 % f
716 % f
717 )
717 )
718 filelog = repo.file(f)
718 filelog = repo.file(f)
719 if not filelog:
719 if not filelog:
720 # A zero count may be a directory or deleted file, so
720 # A zero count may be a directory or deleted file, so
721 # try to find matching entries on the slow path.
721 # try to find matching entries on the slow path.
722 if follow:
722 if follow:
723 raise error.Abort(
723 raise error.Abort(
724 _(b'cannot follow nonexistent file: "%s"') % f
724 _(b'cannot follow nonexistent file: "%s"') % f
725 )
725 )
726 slowpath = True
726 slowpath = True
727
727
728 # We decided to fall back to the slowpath because at least one
728 # We decided to fall back to the slowpath because at least one
729 # of the paths was not a file. Check to see if at least one of them
729 # of the paths was not a file. Check to see if at least one of them
730 # existed in history - in that case, we'll continue down the
730 # existed in history - in that case, we'll continue down the
731 # slowpath; otherwise, we can turn off the slowpath
731 # slowpath; otherwise, we can turn off the slowpath
732 if slowpath:
732 if slowpath:
733 for path in match.files():
733 for path in match.files():
734 if path == b'.' or path in repo.store:
734 if path == b'.' or path in repo.store:
735 break
735 break
736 else:
736 else:
737 slowpath = False
737 slowpath = False
738
738
739 return match, pats, slowpath
739 return match, pats, slowpath
740
740
741
741
742 def _fileancestors(repo, revs, match, followfirst):
742 def _fileancestors(repo, revs, match, followfirst):
743 fctxs = []
743 fctxs = []
744 for r in revs:
744 for r in revs:
745 ctx = repo[r]
745 ctx = repo[r]
746 fctxs.extend(ctx[f].introfilectx() for f in ctx.walk(match))
746 fctxs.extend(ctx[f].introfilectx() for f in ctx.walk(match))
747
747
748 # When displaying a revision with --patch --follow FILE, we have
748 # When displaying a revision with --patch --follow FILE, we have
749 # to know which file of the revision must be diffed. With
749 # to know which file of the revision must be diffed. With
750 # --follow, we want the names of the ancestors of FILE in the
750 # --follow, we want the names of the ancestors of FILE in the
751 # revision, stored in "fcache". "fcache" is populated as a side effect
751 # revision, stored in "fcache". "fcache" is populated as a side effect
752 # of the graph traversal.
752 # of the graph traversal.
753 fcache = {}
753 fcache = {}
754
754
755 def filematcher(ctx):
755 def filematcher(ctx):
756 return scmutil.matchfiles(repo, fcache.get(ctx.rev(), []))
756 return scmutil.matchfiles(repo, fcache.get(ctx.rev(), []))
757
757
758 def revgen():
758 def revgen():
759 for rev, cs in dagop.filectxancestors(fctxs, followfirst=followfirst):
759 for rev, cs in dagop.filectxancestors(fctxs, followfirst=followfirst):
760 fcache[rev] = [c.path() for c in cs]
760 fcache[rev] = [c.path() for c in cs]
761 yield rev
761 yield rev
762
762
763 return smartset.generatorset(revgen(), iterasc=False), filematcher
763 return smartset.generatorset(revgen(), iterasc=False), filematcher
764
764
765
765
766 def _makenofollowfilematcher(repo, pats, opts):
766 def _makenofollowfilematcher(repo, pats, opts):
767 '''hook for extensions to override the filematcher for non-follow cases'''
767 '''hook for extensions to override the filematcher for non-follow cases'''
768 return None
768 return None
769
769
770
770
771 _opt2logrevset = {
771 _opt2logrevset = {
772 b'no_merges': (b'not merge()', None),
772 b'no_merges': (b'not merge()', None),
773 b'only_merges': (b'merge()', None),
773 b'only_merges': (b'merge()', None),
774 b'_matchfiles': (None, b'_matchfiles(%ps)'),
774 b'_matchfiles': (None, b'_matchfiles(%ps)'),
775 b'date': (b'date(%s)', None),
775 b'date': (b'date(%s)', None),
776 b'branch': (b'branch(%s)', b'%lr'),
776 b'branch': (b'branch(%s)', b'%lr'),
777 b'_patslog': (b'filelog(%s)', b'%lr'),
777 b'_patslog': (b'filelog(%s)', b'%lr'),
778 b'keyword': (b'keyword(%s)', b'%lr'),
778 b'keyword': (b'keyword(%s)', b'%lr'),
779 b'prune': (b'ancestors(%s)', b'not %lr'),
779 b'prune': (b'ancestors(%s)', b'not %lr'),
780 b'user': (b'user(%s)', b'%lr'),
780 b'user': (b'user(%s)', b'%lr'),
781 }
781 }
782
782
783
783
784 def _makerevset(repo, match, pats, slowpath, opts):
784 def _makerevset(repo, match, pats, slowpath, opts):
785 """Return a revset string built from log options and file patterns"""
785 """Return a revset string built from log options and file patterns"""
786 opts = dict(opts)
786 opts = dict(opts)
787 # follow or not follow?
787 # follow or not follow?
788 follow = opts.get(b'follow') or opts.get(b'follow_first')
788 follow = opts.get(b'follow') or opts.get(b'follow_first')
789
789
790 # branch and only_branch are really aliases and must be handled at
790 # branch and only_branch are really aliases and must be handled at
791 # the same time
791 # the same time
792 opts[b'branch'] = opts.get(b'branch', []) + opts.get(b'only_branch', [])
792 opts[b'branch'] = opts.get(b'branch', []) + opts.get(b'only_branch', [])
793 opts[b'branch'] = [repo.lookupbranch(b) for b in opts[b'branch']]
793 opts[b'branch'] = [repo.lookupbranch(b) for b in opts[b'branch']]
794
794
795 if slowpath:
795 if slowpath:
796 # See walkchangerevs() slow path.
796 # See walkchangerevs() slow path.
797 #
797 #
798 # pats/include/exclude cannot be represented as separate
798 # pats/include/exclude cannot be represented as separate
799 # revset expressions as their filtering logic applies at file
799 # revset expressions as their filtering logic applies at file
800 # level. For instance "-I a -X b" matches a revision touching
800 # level. For instance "-I a -X b" matches a revision touching
801 # "a" and "b" while "file(a) and not file(b)" does
801 # "a" and "b" while "file(a) and not file(b)" does
802 # not. Besides, filesets are evaluated against the working
802 # not. Besides, filesets are evaluated against the working
803 # directory.
803 # directory.
804 matchargs = [b'r:', b'd:relpath']
804 matchargs = [b'r:', b'd:relpath']
805 for p in pats:
805 for p in pats:
806 matchargs.append(b'p:' + p)
806 matchargs.append(b'p:' + p)
807 for p in opts.get(b'include', []):
807 for p in opts.get(b'include', []):
808 matchargs.append(b'i:' + p)
808 matchargs.append(b'i:' + p)
809 for p in opts.get(b'exclude', []):
809 for p in opts.get(b'exclude', []):
810 matchargs.append(b'x:' + p)
810 matchargs.append(b'x:' + p)
811 opts[b'_matchfiles'] = matchargs
811 opts[b'_matchfiles'] = matchargs
812 elif not follow:
812 elif not follow:
813 opts[b'_patslog'] = list(pats)
813 opts[b'_patslog'] = list(pats)
814
814
815 expr = []
815 expr = []
816 for op, val in sorted(pycompat.iteritems(opts)):
816 for op, val in sorted(pycompat.iteritems(opts)):
817 if not val:
817 if not val:
818 continue
818 continue
819 if op not in _opt2logrevset:
819 if op not in _opt2logrevset:
820 continue
820 continue
821 revop, listop = _opt2logrevset[op]
821 revop, listop = _opt2logrevset[op]
822 if revop and b'%' not in revop:
822 if revop and b'%' not in revop:
823 expr.append(revop)
823 expr.append(revop)
824 elif not listop:
824 elif not listop:
825 expr.append(revsetlang.formatspec(revop, val))
825 expr.append(revsetlang.formatspec(revop, val))
826 else:
826 else:
827 if revop:
827 if revop:
828 val = [revsetlang.formatspec(revop, v) for v in val]
828 val = [revsetlang.formatspec(revop, v) for v in val]
829 expr.append(revsetlang.formatspec(listop, val))
829 expr.append(revsetlang.formatspec(listop, val))
830
830
831 if expr:
831 if expr:
832 expr = b'(' + b' and '.join(expr) + b')'
832 expr = b'(' + b' and '.join(expr) + b')'
833 else:
833 else:
834 expr = None
834 expr = None
835 return expr
835 return expr
836
836
837
837
838 def _initialrevs(repo, opts):
838 def _initialrevs(repo, opts):
839 """Return the initial set of revisions to be filtered or followed"""
839 """Return the initial set of revisions to be filtered or followed"""
840 follow = opts.get(b'follow') or opts.get(b'follow_first')
840 follow = opts.get(b'follow') or opts.get(b'follow_first')
841 if opts.get(b'rev'):
841 if opts.get(b'rev'):
842 revs = scmutil.revrange(repo, opts[b'rev'])
842 revs = scmutil.revrange(repo, opts[b'rev'])
843 elif follow and repo.dirstate.p1() == nullid:
843 elif follow and repo.dirstate.p1() == nullid:
844 revs = smartset.baseset()
844 revs = smartset.baseset()
845 elif follow:
845 elif follow:
846 revs = repo.revs(b'.')
846 revs = repo.revs(b'.')
847 else:
847 else:
848 revs = smartset.spanset(repo)
848 revs = smartset.spanset(repo)
849 revs.reverse()
849 revs.reverse()
850 return revs
850 return revs
851
851
852
852
853 def getrevs(repo, pats, opts):
853 def getrevs(repo, pats, opts):
854 # type: (Any, Any, Any) -> Tuple[smartset.abstractsmartset, Optional[changesetdiffer]]
854 # type: (Any, Any, Any) -> Tuple[smartset.abstractsmartset, Optional[changesetdiffer]]
855 """Return (revs, differ) where revs is a smartset
855 """Return (revs, differ) where revs is a smartset
856
856
857 differ is a changesetdiffer with pre-configured file matcher.
857 differ is a changesetdiffer with pre-configured file matcher.
858 """
858 """
859 follow = opts.get(b'follow') or opts.get(b'follow_first')
859 follow = opts.get(b'follow') or opts.get(b'follow_first')
860 followfirst = opts.get(b'follow_first')
860 followfirst = opts.get(b'follow_first')
861 limit = getlimit(opts)
861 limit = getlimit(opts)
862 revs = _initialrevs(repo, opts)
862 revs = _initialrevs(repo, opts)
863 if not revs:
863 if not revs:
864 return smartset.baseset(), None
864 return smartset.baseset(), None
865 match, pats, slowpath = _makematcher(repo, revs, pats, opts)
865 match, pats, slowpath = _makematcher(repo, revs, pats, opts)
866 filematcher = None
866 filematcher = None
867 if follow:
867 if follow:
868 if slowpath or match.always():
868 if slowpath or match.always():
869 revs = dagop.revancestors(repo, revs, followfirst=followfirst)
869 revs = dagop.revancestors(repo, revs, followfirst=followfirst)
870 else:
870 else:
871 revs, filematcher = _fileancestors(repo, revs, match, followfirst)
871 revs, filematcher = _fileancestors(repo, revs, match, followfirst)
872 revs.reverse()
872 revs.reverse()
873 if filematcher is None:
873 if filematcher is None:
874 filematcher = _makenofollowfilematcher(repo, pats, opts)
874 filematcher = _makenofollowfilematcher(repo, pats, opts)
875 if filematcher is None:
875 if filematcher is None:
876
876
877 def filematcher(ctx):
877 def filematcher(ctx):
878 return match
878 return match
879
879
880 expr = _makerevset(repo, match, pats, slowpath, opts)
880 expr = _makerevset(repo, match, pats, slowpath, opts)
881 if opts.get(b'graph'):
881 if opts.get(b'graph'):
882 # User-specified revs might be unsorted, but don't sort before
882 # User-specified revs might be unsorted, but don't sort before
883 # _makerevset because it might depend on the order of revs
883 # _makerevset because it might depend on the order of revs
884 if repo.ui.configbool(b'experimental', b'log.topo'):
884 if repo.ui.configbool(b'experimental', b'log.topo'):
885 if not revs.istopo():
885 if not revs.istopo():
886 revs = dagop.toposort(revs, repo.changelog.parentrevs)
886 revs = dagop.toposort(revs, repo.changelog.parentrevs)
887 # TODO: try to iterate the set lazily
887 # TODO: try to iterate the set lazily
888 revs = revset.baseset(list(revs), istopo=True)
888 revs = revset.baseset(list(revs), istopo=True)
889 elif not (revs.isdescending() or revs.istopo()):
889 elif not (revs.isdescending() or revs.istopo()):
890 revs.sort(reverse=True)
890 revs.sort(reverse=True)
891 if expr:
891 if expr:
892 matcher = revset.match(None, expr)
892 matcher = revset.match(None, expr)
893 revs = matcher(repo, revs)
893 revs = matcher(repo, revs)
894 if limit is not None:
894 if limit is not None:
895 revs = revs.slice(0, limit)
895 revs = revs.slice(0, limit)
896
896
897 differ = changesetdiffer()
897 differ = changesetdiffer()
898 differ._makefilematcher = filematcher
898 differ._makefilematcher = filematcher
899 return revs, differ
899 return revs, differ
900
900
901
901
902 def _parselinerangeopt(repo, opts):
902 def _parselinerangeopt(repo, opts):
903 """Parse --line-range log option and return a list of tuples (filename,
903 """Parse --line-range log option and return a list of tuples (filename,
904 (fromline, toline)).
904 (fromline, toline)).
905 """
905 """
906 linerangebyfname = []
906 linerangebyfname = []
907 for pat in opts.get(b'line_range', []):
907 for pat in opts.get(b'line_range', []):
908 try:
908 try:
909 pat, linerange = pat.rsplit(b',', 1)
909 pat, linerange = pat.rsplit(b',', 1)
910 except ValueError:
910 except ValueError:
911 raise error.Abort(_(b'malformatted line-range pattern %s') % pat)
911 raise error.Abort(_(b'malformatted line-range pattern %s') % pat)
912 try:
912 try:
913 fromline, toline = map(int, linerange.split(b':'))
913 fromline, toline = map(int, linerange.split(b':'))
914 except ValueError:
914 except ValueError:
915 raise error.Abort(_(b"invalid line range for %s") % pat)
915 raise error.Abort(_(b"invalid line range for %s") % pat)
916 msg = _(b"line range pattern '%s' must match exactly one file") % pat
916 msg = _(b"line range pattern '%s' must match exactly one file") % pat
917 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
917 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
918 linerangebyfname.append(
918 linerangebyfname.append(
919 (fname, util.processlinerange(fromline, toline))
919 (fname, util.processlinerange(fromline, toline))
920 )
920 )
921 return linerangebyfname
921 return linerangebyfname
922
922
923
923
924 def getlinerangerevs(repo, userrevs, opts):
924 def getlinerangerevs(repo, userrevs, opts):
925 """Return (revs, differ).
925 """Return (revs, differ).
926
926
927 "revs" are revisions obtained by processing "line-range" log options and
927 "revs" are revisions obtained by processing "line-range" log options and
928 walking block ancestors of each specified file/line-range.
928 walking block ancestors of each specified file/line-range.
929
929
930 "differ" is a changesetdiffer with pre-configured file matcher and hunks
930 "differ" is a changesetdiffer with pre-configured file matcher and hunks
931 filter.
931 filter.
932 """
932 """
933 wctx = repo[None]
933 wctx = repo[None]
934
934
935 # Two-levels map of "rev -> file ctx -> [line range]".
935 # Two-levels map of "rev -> file ctx -> [line range]".
936 linerangesbyrev = {}
936 linerangesbyrev = {}
937 for fname, (fromline, toline) in _parselinerangeopt(repo, opts):
937 for fname, (fromline, toline) in _parselinerangeopt(repo, opts):
938 if fname not in wctx:
938 if fname not in wctx:
939 raise error.Abort(
939 raise error.Abort(
940 _(b'cannot follow file not in parent revision: "%s"') % fname
940 _(b'cannot follow file not in parent revision: "%s"') % fname
941 )
941 )
942 fctx = wctx.filectx(fname)
942 fctx = wctx.filectx(fname)
943 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
943 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
944 rev = fctx.introrev()
944 rev = fctx.introrev()
945 if rev is None:
945 if rev is None:
946 rev = wdirrev
946 rev = wdirrev
947 if rev not in userrevs:
947 if rev not in userrevs:
948 continue
948 continue
949 linerangesbyrev.setdefault(rev, {}).setdefault(
949 linerangesbyrev.setdefault(rev, {}).setdefault(
950 fctx.path(), []
950 fctx.path(), []
951 ).append(linerange)
951 ).append(linerange)
952
952
953 def nofilterhunksfn(fctx, hunks):
953 def nofilterhunksfn(fctx, hunks):
954 return hunks
954 return hunks
955
955
956 def hunksfilter(ctx):
956 def hunksfilter(ctx):
957 fctxlineranges = linerangesbyrev.get(scmutil.intrev(ctx))
957 fctxlineranges = linerangesbyrev.get(scmutil.intrev(ctx))
958 if fctxlineranges is None:
958 if fctxlineranges is None:
959 return nofilterhunksfn
959 return nofilterhunksfn
960
960
961 def filterfn(fctx, hunks):
961 def filterfn(fctx, hunks):
962 lineranges = fctxlineranges.get(fctx.path())
962 lineranges = fctxlineranges.get(fctx.path())
963 if lineranges is not None:
963 if lineranges is not None:
964 for hr, lines in hunks:
964 for hr, lines in hunks:
965 if hr is None: # binary
965 if hr is None: # binary
966 yield hr, lines
966 yield hr, lines
967 continue
967 continue
968 if any(mdiff.hunkinrange(hr[2:], lr) for lr in lineranges):
968 if any(mdiff.hunkinrange(hr[2:], lr) for lr in lineranges):
969 yield hr, lines
969 yield hr, lines
970 else:
970 else:
971 for hunk in hunks:
971 for hunk in hunks:
972 yield hunk
972 yield hunk
973
973
974 return filterfn
974 return filterfn
975
975
976 def filematcher(ctx):
976 def filematcher(ctx):
977 files = list(linerangesbyrev.get(scmutil.intrev(ctx), []))
977 files = list(linerangesbyrev.get(scmutil.intrev(ctx), []))
978 return scmutil.matchfiles(repo, files)
978 return scmutil.matchfiles(repo, files)
979
979
980 revs = sorted(linerangesbyrev, reverse=True)
980 revs = sorted(linerangesbyrev, reverse=True)
981
981
982 differ = changesetdiffer()
982 differ = changesetdiffer()
983 differ._makefilematcher = filematcher
983 differ._makefilematcher = filematcher
984 differ._makehunksfilter = hunksfilter
984 differ._makehunksfilter = hunksfilter
985 return smartset.baseset(revs), differ
985 return smartset.baseset(revs), differ
986
986
987
987
988 def _graphnodeformatter(ui, displayer):
988 def _graphnodeformatter(ui, displayer):
989 spec = ui.config(b'ui', b'graphnodetemplate')
989 spec = ui.config(b'ui', b'graphnodetemplate')
990 if not spec:
990 if not spec:
991 return templatekw.getgraphnode # fast path for "{graphnode}"
991 return templatekw.getgraphnode # fast path for "{graphnode}"
992
992
993 spec = templater.unquotestring(spec)
993 spec = templater.unquotestring(spec)
994 if isinstance(displayer, changesettemplater):
994 if isinstance(displayer, changesettemplater):
995 # reuse cache of slow templates
995 # reuse cache of slow templates
996 tres = displayer._tresources
996 tres = displayer._tresources
997 else:
997 else:
998 tres = formatter.templateresources(ui)
998 tres = formatter.templateresources(ui)
999 templ = formatter.maketemplater(
999 templ = formatter.maketemplater(
1000 ui, spec, defaults=templatekw.keywords, resources=tres
1000 ui, spec, defaults=templatekw.keywords, resources=tres
1001 )
1001 )
1002
1002
1003 def formatnode(repo, ctx, cache):
1003 def formatnode(repo, ctx, cache):
1004 props = {b'ctx': ctx, b'repo': repo}
1004 props = {b'ctx': ctx, b'repo': repo}
1005 return templ.renderdefault(props)
1005 return templ.renderdefault(props)
1006
1006
1007 return formatnode
1007 return formatnode
1008
1008
1009
1009
1010 def displaygraph(ui, repo, dag, displayer, edgefn, getcopies=None, props=None):
1010 def displaygraph(ui, repo, dag, displayer, edgefn, getcopies=None, props=None):
1011 props = props or {}
1011 props = props or {}
1012 formatnode = _graphnodeformatter(ui, displayer)
1012 formatnode = _graphnodeformatter(ui, displayer)
1013 state = graphmod.asciistate()
1013 state = graphmod.asciistate()
1014 styles = state.styles
1014 styles = state.styles
1015
1015
1016 # only set graph styling if HGPLAIN is not set.
1016 # only set graph styling if HGPLAIN is not set.
1017 if ui.plain(b'graph'):
1017 if ui.plain(b'graph'):
1018 # set all edge styles to |, the default pre-3.8 behaviour
1018 # set all edge styles to |, the default pre-3.8 behaviour
1019 styles.update(dict.fromkeys(styles, b'|'))
1019 styles.update(dict.fromkeys(styles, b'|'))
1020 else:
1020 else:
1021 edgetypes = {
1021 edgetypes = {
1022 b'parent': graphmod.PARENT,
1022 b'parent': graphmod.PARENT,
1023 b'grandparent': graphmod.GRANDPARENT,
1023 b'grandparent': graphmod.GRANDPARENT,
1024 b'missing': graphmod.MISSINGPARENT,
1024 b'missing': graphmod.MISSINGPARENT,
1025 }
1025 }
1026 for name, key in edgetypes.items():
1026 for name, key in edgetypes.items():
1027 # experimental config: experimental.graphstyle.*
1027 # experimental config: experimental.graphstyle.*
1028 styles[key] = ui.config(
1028 styles[key] = ui.config(
1029 b'experimental', b'graphstyle.%s' % name, styles[key]
1029 b'experimental', b'graphstyle.%s' % name, styles[key]
1030 )
1030 )
1031 if not styles[key]:
1031 if not styles[key]:
1032 styles[key] = None
1032 styles[key] = None
1033
1033
1034 # experimental config: experimental.graphshorten
1034 # experimental config: experimental.graphshorten
1035 state.graphshorten = ui.configbool(b'experimental', b'graphshorten')
1035 state.graphshorten = ui.configbool(b'experimental', b'graphshorten')
1036
1036
1037 formatnode_cache = {}
1037 formatnode_cache = {}
1038 for rev, type, ctx, parents in dag:
1038 for rev, type, ctx, parents in dag:
1039 char = formatnode(repo, ctx, formatnode_cache)
1039 char = formatnode(repo, ctx, formatnode_cache)
1040 copies = getcopies(ctx) if getcopies else None
1040 copies = getcopies(ctx) if getcopies else None
1041 edges = edgefn(type, char, state, rev, parents)
1041 edges = edgefn(type, char, state, rev, parents)
1042 firstedge = next(edges)
1042 firstedge = next(edges)
1043 width = firstedge[2]
1043 width = firstedge[2]
1044 displayer.show(
1044 displayer.show(
1045 ctx, copies=copies, graphwidth=width, **pycompat.strkwargs(props)
1045 ctx, copies=copies, graphwidth=width, **pycompat.strkwargs(props)
1046 )
1046 )
1047 lines = displayer.hunk.pop(rev).split(b'\n')
1047 lines = displayer.hunk.pop(rev).split(b'\n')
1048 if not lines[-1]:
1048 if not lines[-1]:
1049 del lines[-1]
1049 del lines[-1]
1050 displayer.flush(ctx)
1050 displayer.flush(ctx)
1051 for type, char, width, coldata in itertools.chain([firstedge], edges):
1051 for type, char, width, coldata in itertools.chain([firstedge], edges):
1052 graphmod.ascii(ui, state, type, char, lines, coldata)
1052 graphmod.ascii(ui, state, type, char, lines, coldata)
1053 lines = []
1053 lines = []
1054 displayer.close()
1054 displayer.close()
1055
1055
1056
1056
1057 def displaygraphrevs(ui, repo, revs, displayer, getrenamed):
1057 def displaygraphrevs(ui, repo, revs, displayer, getrenamed):
1058 revdag = graphmod.dagwalker(repo, revs)
1058 revdag = graphmod.dagwalker(repo, revs)
1059 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed)
1059 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed)
1060
1060
1061
1061
1062 def displayrevs(ui, repo, revs, displayer, getcopies):
1062 def displayrevs(ui, repo, revs, displayer, getcopies):
1063 for rev in revs:
1063 for rev in revs:
1064 ctx = repo[rev]
1064 ctx = repo[rev]
1065 copies = getcopies(ctx) if getcopies else None
1065 copies = getcopies(ctx) if getcopies else None
1066 displayer.show(ctx, copies=copies)
1066 displayer.show(ctx, copies=copies)
1067 displayer.flush(ctx)
1067 displayer.flush(ctx)
1068 displayer.close()
1068 displayer.close()
1069
1069
1070
1070
1071 def checkunsupportedgraphflags(pats, opts):
1071 def checkunsupportedgraphflags(pats, opts):
1072 for op in [b"newest_first"]:
1072 for op in [b"newest_first"]:
1073 if op in opts and opts[op]:
1073 if op in opts and opts[op]:
1074 raise error.Abort(
1074 raise error.Abort(
1075 _(b"-G/--graph option is incompatible with --%s")
1075 _(b"-G/--graph option is incompatible with --%s")
1076 % op.replace(b"_", b"-")
1076 % op.replace(b"_", b"-")
1077 )
1077 )
1078
1078
1079
1079
1080 def graphrevs(repo, nodes, opts):
1080 def graphrevs(repo, nodes, opts):
1081 limit = getlimit(opts)
1081 limit = getlimit(opts)
1082 nodes.reverse()
1082 nodes.reverse()
1083 if limit is not None:
1083 if limit is not None:
1084 nodes = nodes[:limit]
1084 nodes = nodes[:limit]
1085 return graphmod.nodes(repo, nodes)
1085 return graphmod.nodes(repo, nodes)
@@ -1,1111 +1,1112 b''
1 # templater.py - template expansion for output
1 # templater.py - template expansion for output
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """Slightly complicated template engine for commands and hgweb
8 """Slightly complicated template engine for commands and hgweb
9
9
10 This module provides low-level interface to the template engine. See the
10 This module provides low-level interface to the template engine. See the
11 formatter and cmdutil modules if you are looking for high-level functions
11 formatter and cmdutil modules if you are looking for high-level functions
12 such as ``cmdutil.rendertemplate(ctx, tmpl)``.
12 such as ``cmdutil.rendertemplate(ctx, tmpl)``.
13
13
14 Internal Data Types
14 Internal Data Types
15 -------------------
15 -------------------
16
16
17 Template keywords and functions take a dictionary of current symbols and
17 Template keywords and functions take a dictionary of current symbols and
18 resources (a "mapping") and return result. Inputs and outputs must be one
18 resources (a "mapping") and return result. Inputs and outputs must be one
19 of the following data types:
19 of the following data types:
20
20
21 bytes
21 bytes
22 a byte string, which is generally a human-readable text in local encoding.
22 a byte string, which is generally a human-readable text in local encoding.
23
23
24 generator
24 generator
25 a lazily-evaluated byte string, which is a possibly nested generator of
25 a lazily-evaluated byte string, which is a possibly nested generator of
26 values of any printable types, and will be folded by ``stringify()``
26 values of any printable types, and will be folded by ``stringify()``
27 or ``flatten()``.
27 or ``flatten()``.
28
28
29 None
29 None
30 sometimes represents an empty value, which can be stringified to ''.
30 sometimes represents an empty value, which can be stringified to ''.
31
31
32 True, False, int, float
32 True, False, int, float
33 can be stringified as such.
33 can be stringified as such.
34
34
35 wrappedbytes, wrappedvalue
35 wrappedbytes, wrappedvalue
36 a wrapper for the above printable types.
36 a wrapper for the above printable types.
37
37
38 date
38 date
39 represents a (unixtime, offset) tuple.
39 represents a (unixtime, offset) tuple.
40
40
41 hybrid
41 hybrid
42 represents a list/dict of printable values, which can also be converted
42 represents a list/dict of printable values, which can also be converted
43 to mappings by % operator.
43 to mappings by % operator.
44
44
45 hybriditem
45 hybriditem
46 represents a scalar printable value, also supports % operator.
46 represents a scalar printable value, also supports % operator.
47
47
48 revslist
48 revslist
49 represents a list of revision numbers.
49 represents a list of revision numbers.
50
50
51 mappinggenerator, mappinglist
51 mappinggenerator, mappinglist
52 represents mappings (i.e. a list of dicts), which may have default
52 represents mappings (i.e. a list of dicts), which may have default
53 output format.
53 output format.
54
54
55 mappingdict
55 mappingdict
56 represents a single mapping (i.e. a dict), which may have default output
56 represents a single mapping (i.e. a dict), which may have default output
57 format.
57 format.
58
58
59 mappingnone
59 mappingnone
60 represents None of Optional[mappable], which will be mapped to an empty
60 represents None of Optional[mappable], which will be mapped to an empty
61 string by % operation.
61 string by % operation.
62
62
63 mappedgenerator
63 mappedgenerator
64 a lazily-evaluated list of byte strings, which is e.g. a result of %
64 a lazily-evaluated list of byte strings, which is e.g. a result of %
65 operation.
65 operation.
66 """
66 """
67
67
68 from __future__ import absolute_import, print_function
68 from __future__ import absolute_import, print_function
69
69
70 import abc
70 import abc
71 import os
71 import os
72
72
73 from .i18n import _
73 from .i18n import _
74 from .pycompat import getattr
74 from .pycompat import getattr
75 from . import (
75 from . import (
76 config,
76 config,
77 encoding,
77 encoding,
78 error,
78 error,
79 parser,
79 parser,
80 pycompat,
80 pycompat,
81 templatefilters,
81 templatefilters,
82 templatefuncs,
82 templatefuncs,
83 templateutil,
83 templateutil,
84 util,
84 util,
85 )
85 )
86 from .utils import (
86 from .utils import (
87 resourceutil,
87 resourceutil,
88 stringutil,
88 stringutil,
89 )
89 )
90
90
91 # template parsing
91 # template parsing
92
92
93 elements = {
93 elements = {
94 # token-type: binding-strength, primary, prefix, infix, suffix
94 # token-type: binding-strength, primary, prefix, infix, suffix
95 b"(": (20, None, (b"group", 1, b")"), (b"func", 1, b")"), None),
95 b"(": (20, None, (b"group", 1, b")"), (b"func", 1, b")"), None),
96 b".": (18, None, None, (b".", 18), None),
96 b".": (18, None, None, (b".", 18), None),
97 b"%": (15, None, None, (b"%", 15), None),
97 b"%": (15, None, None, (b"%", 15), None),
98 b"|": (15, None, None, (b"|", 15), None),
98 b"|": (15, None, None, (b"|", 15), None),
99 b"*": (5, None, None, (b"*", 5), None),
99 b"*": (5, None, None, (b"*", 5), None),
100 b"/": (5, None, None, (b"/", 5), None),
100 b"/": (5, None, None, (b"/", 5), None),
101 b"+": (4, None, None, (b"+", 4), None),
101 b"+": (4, None, None, (b"+", 4), None),
102 b"-": (4, None, (b"negate", 19), (b"-", 4), None),
102 b"-": (4, None, (b"negate", 19), (b"-", 4), None),
103 b"=": (3, None, None, (b"keyvalue", 3), None),
103 b"=": (3, None, None, (b"keyvalue", 3), None),
104 b",": (2, None, None, (b"list", 2), None),
104 b",": (2, None, None, (b"list", 2), None),
105 b")": (0, None, None, None, None),
105 b")": (0, None, None, None, None),
106 b"integer": (0, b"integer", None, None, None),
106 b"integer": (0, b"integer", None, None, None),
107 b"symbol": (0, b"symbol", None, None, None),
107 b"symbol": (0, b"symbol", None, None, None),
108 b"string": (0, b"string", None, None, None),
108 b"string": (0, b"string", None, None, None),
109 b"template": (0, b"template", None, None, None),
109 b"template": (0, b"template", None, None, None),
110 b"end": (0, None, None, None, None),
110 b"end": (0, None, None, None, None),
111 }
111 }
112
112
113
113
114 def tokenize(program, start, end, term=None):
114 def tokenize(program, start, end, term=None):
115 """Parse a template expression into a stream of tokens, which must end
115 """Parse a template expression into a stream of tokens, which must end
116 with term if specified"""
116 with term if specified"""
117 pos = start
117 pos = start
118 program = pycompat.bytestr(program)
118 program = pycompat.bytestr(program)
119 while pos < end:
119 while pos < end:
120 c = program[pos]
120 c = program[pos]
121 if c.isspace(): # skip inter-token whitespace
121 if c.isspace(): # skip inter-token whitespace
122 pass
122 pass
123 elif c in b"(=,).%|+-*/": # handle simple operators
123 elif c in b"(=,).%|+-*/": # handle simple operators
124 yield (c, None, pos)
124 yield (c, None, pos)
125 elif c in b'"\'': # handle quoted templates
125 elif c in b'"\'': # handle quoted templates
126 s = pos + 1
126 s = pos + 1
127 data, pos = _parsetemplate(program, s, end, c)
127 data, pos = _parsetemplate(program, s, end, c)
128 yield (b'template', data, s)
128 yield (b'template', data, s)
129 pos -= 1
129 pos -= 1
130 elif c == b'r' and program[pos : pos + 2] in (b"r'", b'r"'):
130 elif c == b'r' and program[pos : pos + 2] in (b"r'", b'r"'):
131 # handle quoted strings
131 # handle quoted strings
132 c = program[pos + 1]
132 c = program[pos + 1]
133 s = pos = pos + 2
133 s = pos = pos + 2
134 while pos < end: # find closing quote
134 while pos < end: # find closing quote
135 d = program[pos]
135 d = program[pos]
136 if d == b'\\': # skip over escaped characters
136 if d == b'\\': # skip over escaped characters
137 pos += 2
137 pos += 2
138 continue
138 continue
139 if d == c:
139 if d == c:
140 yield (b'string', program[s:pos], s)
140 yield (b'string', program[s:pos], s)
141 break
141 break
142 pos += 1
142 pos += 1
143 else:
143 else:
144 raise error.ParseError(_(b"unterminated string"), s)
144 raise error.ParseError(_(b"unterminated string"), s)
145 elif c.isdigit():
145 elif c.isdigit():
146 s = pos
146 s = pos
147 while pos < end:
147 while pos < end:
148 d = program[pos]
148 d = program[pos]
149 if not d.isdigit():
149 if not d.isdigit():
150 break
150 break
151 pos += 1
151 pos += 1
152 yield (b'integer', program[s:pos], s)
152 yield (b'integer', program[s:pos], s)
153 pos -= 1
153 pos -= 1
154 elif (
154 elif (
155 c == b'\\'
155 c == b'\\'
156 and program[pos : pos + 2] in (br"\'", br'\"')
156 and program[pos : pos + 2] in (br"\'", br'\"')
157 or c == b'r'
157 or c == b'r'
158 and program[pos : pos + 3] in (br"r\'", br'r\"')
158 and program[pos : pos + 3] in (br"r\'", br'r\"')
159 ):
159 ):
160 # handle escaped quoted strings for compatibility with 2.9.2-3.4,
160 # handle escaped quoted strings for compatibility with 2.9.2-3.4,
161 # where some of nested templates were preprocessed as strings and
161 # where some of nested templates were preprocessed as strings and
162 # then compiled. therefore, \"...\" was allowed. (issue4733)
162 # then compiled. therefore, \"...\" was allowed. (issue4733)
163 #
163 #
164 # processing flow of _evalifliteral() at 5ab28a2e9962:
164 # processing flow of _evalifliteral() at 5ab28a2e9962:
165 # outer template string -> stringify() -> compiletemplate()
165 # outer template string -> stringify() -> compiletemplate()
166 # ------------------------ ------------ ------------------
166 # ------------------------ ------------ ------------------
167 # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}]
167 # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}]
168 # ~~~~~~~~
168 # ~~~~~~~~
169 # escaped quoted string
169 # escaped quoted string
170 if c == b'r':
170 if c == b'r':
171 pos += 1
171 pos += 1
172 token = b'string'
172 token = b'string'
173 else:
173 else:
174 token = b'template'
174 token = b'template'
175 quote = program[pos : pos + 2]
175 quote = program[pos : pos + 2]
176 s = pos = pos + 2
176 s = pos = pos + 2
177 while pos < end: # find closing escaped quote
177 while pos < end: # find closing escaped quote
178 if program.startswith(b'\\\\\\', pos, end):
178 if program.startswith(b'\\\\\\', pos, end):
179 pos += 4 # skip over double escaped characters
179 pos += 4 # skip over double escaped characters
180 continue
180 continue
181 if program.startswith(quote, pos, end):
181 if program.startswith(quote, pos, end):
182 # interpret as if it were a part of an outer string
182 # interpret as if it were a part of an outer string
183 data = parser.unescapestr(program[s:pos])
183 data = parser.unescapestr(program[s:pos])
184 if token == b'template':
184 if token == b'template':
185 data = _parsetemplate(data, 0, len(data))[0]
185 data = _parsetemplate(data, 0, len(data))[0]
186 yield (token, data, s)
186 yield (token, data, s)
187 pos += 1
187 pos += 1
188 break
188 break
189 pos += 1
189 pos += 1
190 else:
190 else:
191 raise error.ParseError(_(b"unterminated string"), s)
191 raise error.ParseError(_(b"unterminated string"), s)
192 elif c.isalnum() or c in b'_':
192 elif c.isalnum() or c in b'_':
193 s = pos
193 s = pos
194 pos += 1
194 pos += 1
195 while pos < end: # find end of symbol
195 while pos < end: # find end of symbol
196 d = program[pos]
196 d = program[pos]
197 if not (d.isalnum() or d == b"_"):
197 if not (d.isalnum() or d == b"_"):
198 break
198 break
199 pos += 1
199 pos += 1
200 sym = program[s:pos]
200 sym = program[s:pos]
201 yield (b'symbol', sym, s)
201 yield (b'symbol', sym, s)
202 pos -= 1
202 pos -= 1
203 elif c == term:
203 elif c == term:
204 yield (b'end', None, pos)
204 yield (b'end', None, pos)
205 return
205 return
206 else:
206 else:
207 raise error.ParseError(_(b"syntax error"), pos)
207 raise error.ParseError(_(b"syntax error"), pos)
208 pos += 1
208 pos += 1
209 if term:
209 if term:
210 raise error.ParseError(_(b"unterminated template expansion"), start)
210 raise error.ParseError(_(b"unterminated template expansion"), start)
211 yield (b'end', None, pos)
211 yield (b'end', None, pos)
212
212
213
213
214 def _parsetemplate(tmpl, start, stop, quote=b''):
214 def _parsetemplate(tmpl, start, stop, quote=b''):
215 r"""
215 r"""
216 >>> _parsetemplate(b'foo{bar}"baz', 0, 12)
216 >>> _parsetemplate(b'foo{bar}"baz', 0, 12)
217 ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
217 ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
218 >>> _parsetemplate(b'foo{bar}"baz', 0, 12, quote=b'"')
218 >>> _parsetemplate(b'foo{bar}"baz', 0, 12, quote=b'"')
219 ([('string', 'foo'), ('symbol', 'bar')], 9)
219 ([('string', 'foo'), ('symbol', 'bar')], 9)
220 >>> _parsetemplate(b'foo"{bar}', 0, 9, quote=b'"')
220 >>> _parsetemplate(b'foo"{bar}', 0, 9, quote=b'"')
221 ([('string', 'foo')], 4)
221 ([('string', 'foo')], 4)
222 >>> _parsetemplate(br'foo\"bar"baz', 0, 12, quote=b'"')
222 >>> _parsetemplate(br'foo\"bar"baz', 0, 12, quote=b'"')
223 ([('string', 'foo"'), ('string', 'bar')], 9)
223 ([('string', 'foo"'), ('string', 'bar')], 9)
224 >>> _parsetemplate(br'foo\\"bar', 0, 10, quote=b'"')
224 >>> _parsetemplate(br'foo\\"bar', 0, 10, quote=b'"')
225 ([('string', 'foo\\')], 6)
225 ([('string', 'foo\\')], 6)
226 """
226 """
227 parsed = []
227 parsed = []
228 for typ, val, pos in _scantemplate(tmpl, start, stop, quote):
228 for typ, val, pos in _scantemplate(tmpl, start, stop, quote):
229 if typ == b'string':
229 if typ == b'string':
230 parsed.append((typ, val))
230 parsed.append((typ, val))
231 elif typ == b'template':
231 elif typ == b'template':
232 parsed.append(val)
232 parsed.append(val)
233 elif typ == b'end':
233 elif typ == b'end':
234 return parsed, pos
234 return parsed, pos
235 else:
235 else:
236 raise error.ProgrammingError(b'unexpected type: %s' % typ)
236 raise error.ProgrammingError(b'unexpected type: %s' % typ)
237 raise error.ProgrammingError(b'unterminated scanning of template')
237 raise error.ProgrammingError(b'unterminated scanning of template')
238
238
239
239
240 def scantemplate(tmpl, raw=False):
240 def scantemplate(tmpl, raw=False):
241 r"""Scan (type, start, end) positions of outermost elements in template
241 r"""Scan (type, start, end) positions of outermost elements in template
242
242
243 If raw=True, a backslash is not taken as an escape character just like
243 If raw=True, a backslash is not taken as an escape character just like
244 r'' string in Python. Note that this is different from r'' literal in
244 r'' string in Python. Note that this is different from r'' literal in
245 template in that no template fragment can appear in r'', e.g. r'{foo}'
245 template in that no template fragment can appear in r'', e.g. r'{foo}'
246 is a literal '{foo}', but ('{foo}', raw=True) is a template expression
246 is a literal '{foo}', but ('{foo}', raw=True) is a template expression
247 'foo'.
247 'foo'.
248
248
249 >>> list(scantemplate(b'foo{bar}"baz'))
249 >>> list(scantemplate(b'foo{bar}"baz'))
250 [('string', 0, 3), ('template', 3, 8), ('string', 8, 12)]
250 [('string', 0, 3), ('template', 3, 8), ('string', 8, 12)]
251 >>> list(scantemplate(b'outer{"inner"}outer'))
251 >>> list(scantemplate(b'outer{"inner"}outer'))
252 [('string', 0, 5), ('template', 5, 14), ('string', 14, 19)]
252 [('string', 0, 5), ('template', 5, 14), ('string', 14, 19)]
253 >>> list(scantemplate(b'foo\\{escaped}'))
253 >>> list(scantemplate(b'foo\\{escaped}'))
254 [('string', 0, 5), ('string', 5, 13)]
254 [('string', 0, 5), ('string', 5, 13)]
255 >>> list(scantemplate(b'foo\\{escaped}', raw=True))
255 >>> list(scantemplate(b'foo\\{escaped}', raw=True))
256 [('string', 0, 4), ('template', 4, 13)]
256 [('string', 0, 4), ('template', 4, 13)]
257 """
257 """
258 last = None
258 last = None
259 for typ, val, pos in _scantemplate(tmpl, 0, len(tmpl), raw=raw):
259 for typ, val, pos in _scantemplate(tmpl, 0, len(tmpl), raw=raw):
260 if last:
260 if last:
261 yield last + (pos,)
261 yield last + (pos,)
262 if typ == b'end':
262 if typ == b'end':
263 return
263 return
264 else:
264 else:
265 last = (typ, pos)
265 last = (typ, pos)
266 raise error.ProgrammingError(b'unterminated scanning of template')
266 raise error.ProgrammingError(b'unterminated scanning of template')
267
267
268
268
269 def _scantemplate(tmpl, start, stop, quote=b'', raw=False):
269 def _scantemplate(tmpl, start, stop, quote=b'', raw=False):
270 """Parse template string into chunks of strings and template expressions"""
270 """Parse template string into chunks of strings and template expressions"""
271 sepchars = b'{' + quote
271 sepchars = b'{' + quote
272 unescape = [parser.unescapestr, pycompat.identity][raw]
272 unescape = [parser.unescapestr, pycompat.identity][raw]
273 pos = start
273 pos = start
274 p = parser.parser(elements)
274 p = parser.parser(elements)
275 try:
275 try:
276 while pos < stop:
276 while pos < stop:
277 n = min(
277 n = min(
278 (tmpl.find(c, pos, stop) for c in pycompat.bytestr(sepchars)),
278 (tmpl.find(c, pos, stop) for c in pycompat.bytestr(sepchars)),
279 key=lambda n: (n < 0, n),
279 key=lambda n: (n < 0, n),
280 )
280 )
281 if n < 0:
281 if n < 0:
282 yield (b'string', unescape(tmpl[pos:stop]), pos)
282 yield (b'string', unescape(tmpl[pos:stop]), pos)
283 pos = stop
283 pos = stop
284 break
284 break
285 c = tmpl[n : n + 1]
285 c = tmpl[n : n + 1]
286 bs = 0 # count leading backslashes
286 bs = 0 # count leading backslashes
287 if not raw:
287 if not raw:
288 bs = (n - pos) - len(tmpl[pos:n].rstrip(b'\\'))
288 bs = (n - pos) - len(tmpl[pos:n].rstrip(b'\\'))
289 if bs % 2 == 1:
289 if bs % 2 == 1:
290 # escaped (e.g. '\{', '\\\{', but not '\\{')
290 # escaped (e.g. '\{', '\\\{', but not '\\{')
291 yield (b'string', unescape(tmpl[pos : n - 1]) + c, pos)
291 yield (b'string', unescape(tmpl[pos : n - 1]) + c, pos)
292 pos = n + 1
292 pos = n + 1
293 continue
293 continue
294 if n > pos:
294 if n > pos:
295 yield (b'string', unescape(tmpl[pos:n]), pos)
295 yield (b'string', unescape(tmpl[pos:n]), pos)
296 if c == quote:
296 if c == quote:
297 yield (b'end', None, n + 1)
297 yield (b'end', None, n + 1)
298 return
298 return
299
299
300 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, b'}'))
300 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, b'}'))
301 if not tmpl.startswith(b'}', pos):
301 if not tmpl.startswith(b'}', pos):
302 raise error.ParseError(_(b"invalid token"), pos)
302 raise error.ParseError(_(b"invalid token"), pos)
303 yield (b'template', parseres, n)
303 yield (b'template', parseres, n)
304 pos += 1
304 pos += 1
305
305
306 if quote:
306 if quote:
307 raise error.ParseError(_(b"unterminated string"), start)
307 raise error.ParseError(_(b"unterminated string"), start)
308 except error.ParseError as inst:
308 except error.ParseError as inst:
309 _addparseerrorhint(inst, tmpl)
309 _addparseerrorhint(inst, tmpl)
310 raise
310 raise
311 yield (b'end', None, pos)
311 yield (b'end', None, pos)
312
312
313
313
314 def _addparseerrorhint(inst, tmpl):
314 def _addparseerrorhint(inst, tmpl):
315 if len(inst.args) <= 1:
315 if len(inst.args) <= 1:
316 return # no location
316 return # no location
317 loc = inst.args[1]
317 loc = inst.args[1]
318 # Offset the caret location by the number of newlines before the
318 # Offset the caret location by the number of newlines before the
319 # location of the error, since we will replace one-char newlines
319 # location of the error, since we will replace one-char newlines
320 # with the two-char literal r'\n'.
320 # with the two-char literal r'\n'.
321 offset = tmpl[:loc].count(b'\n')
321 offset = tmpl[:loc].count(b'\n')
322 tmpl = tmpl.replace(b'\n', br'\n')
322 tmpl = tmpl.replace(b'\n', br'\n')
323 # We want the caret to point to the place in the template that
323 # We want the caret to point to the place in the template that
324 # failed to parse, but in a hint we get a open paren at the
324 # failed to parse, but in a hint we get a open paren at the
325 # start. Therefore, we print "loc + 1" spaces (instead of "loc")
325 # start. Therefore, we print "loc + 1" spaces (instead of "loc")
326 # to line up the caret with the location of the error.
326 # to line up the caret with the location of the error.
327 inst.hint = tmpl + b'\n' + b' ' * (loc + 1 + offset) + b'^ ' + _(b'here')
327 inst.hint = tmpl + b'\n' + b' ' * (loc + 1 + offset) + b'^ ' + _(b'here')
328
328
329
329
330 def _unnesttemplatelist(tree):
330 def _unnesttemplatelist(tree):
331 """Expand list of templates to node tuple
331 """Expand list of templates to node tuple
332
332
333 >>> def f(tree):
333 >>> def f(tree):
334 ... print(pycompat.sysstr(prettyformat(_unnesttemplatelist(tree))))
334 ... print(pycompat.sysstr(prettyformat(_unnesttemplatelist(tree))))
335 >>> f((b'template', []))
335 >>> f((b'template', []))
336 (string '')
336 (string '')
337 >>> f((b'template', [(b'string', b'foo')]))
337 >>> f((b'template', [(b'string', b'foo')]))
338 (string 'foo')
338 (string 'foo')
339 >>> f((b'template', [(b'string', b'foo'), (b'symbol', b'rev')]))
339 >>> f((b'template', [(b'string', b'foo'), (b'symbol', b'rev')]))
340 (template
340 (template
341 (string 'foo')
341 (string 'foo')
342 (symbol 'rev'))
342 (symbol 'rev'))
343 >>> f((b'template', [(b'symbol', b'rev')])) # template(rev) -> str
343 >>> f((b'template', [(b'symbol', b'rev')])) # template(rev) -> str
344 (template
344 (template
345 (symbol 'rev'))
345 (symbol 'rev'))
346 >>> f((b'template', [(b'template', [(b'string', b'foo')])]))
346 >>> f((b'template', [(b'template', [(b'string', b'foo')])]))
347 (string 'foo')
347 (string 'foo')
348 """
348 """
349 if not isinstance(tree, tuple):
349 if not isinstance(tree, tuple):
350 return tree
350 return tree
351 op = tree[0]
351 op = tree[0]
352 if op != b'template':
352 if op != b'template':
353 return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:])
353 return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:])
354
354
355 assert len(tree) == 2
355 assert len(tree) == 2
356 xs = tuple(_unnesttemplatelist(x) for x in tree[1])
356 xs = tuple(_unnesttemplatelist(x) for x in tree[1])
357 if not xs:
357 if not xs:
358 return (b'string', b'') # empty template ""
358 return (b'string', b'') # empty template ""
359 elif len(xs) == 1 and xs[0][0] == b'string':
359 elif len(xs) == 1 and xs[0][0] == b'string':
360 return xs[0] # fast path for string with no template fragment "x"
360 return xs[0] # fast path for string with no template fragment "x"
361 else:
361 else:
362 return (op,) + xs
362 return (op,) + xs
363
363
364
364
365 def parse(tmpl):
365 def parse(tmpl):
366 """Parse template string into tree"""
366 """Parse template string into tree"""
367 parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
367 parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
368 assert pos == len(tmpl), b'unquoted template should be consumed'
368 assert pos == len(tmpl), b'unquoted template should be consumed'
369 return _unnesttemplatelist((b'template', parsed))
369 return _unnesttemplatelist((b'template', parsed))
370
370
371
371
372 def parseexpr(expr):
372 def parseexpr(expr):
373 """Parse a template expression into tree
373 """Parse a template expression into tree
374
374
375 >>> parseexpr(b'"foo"')
375 >>> parseexpr(b'"foo"')
376 ('string', 'foo')
376 ('string', 'foo')
377 >>> parseexpr(b'foo(bar)')
377 >>> parseexpr(b'foo(bar)')
378 ('func', ('symbol', 'foo'), ('symbol', 'bar'))
378 ('func', ('symbol', 'foo'), ('symbol', 'bar'))
379 >>> parseexpr(b'foo(')
379 >>> parseexpr(b'foo(')
380 Traceback (most recent call last):
380 Traceback (most recent call last):
381 ...
381 ...
382 ParseError: ('not a prefix: end', 4)
382 ParseError: ('not a prefix: end', 4)
383 >>> parseexpr(b'"foo" "bar"')
383 >>> parseexpr(b'"foo" "bar"')
384 Traceback (most recent call last):
384 Traceback (most recent call last):
385 ...
385 ...
386 ParseError: ('invalid token', 7)
386 ParseError: ('invalid token', 7)
387 """
387 """
388 try:
388 try:
389 return _parseexpr(expr)
389 return _parseexpr(expr)
390 except error.ParseError as inst:
390 except error.ParseError as inst:
391 _addparseerrorhint(inst, expr)
391 _addparseerrorhint(inst, expr)
392 raise
392 raise
393
393
394
394
395 def _parseexpr(expr):
395 def _parseexpr(expr):
396 p = parser.parser(elements)
396 p = parser.parser(elements)
397 tree, pos = p.parse(tokenize(expr, 0, len(expr)))
397 tree, pos = p.parse(tokenize(expr, 0, len(expr)))
398 if pos != len(expr):
398 if pos != len(expr):
399 raise error.ParseError(_(b'invalid token'), pos)
399 raise error.ParseError(_(b'invalid token'), pos)
400 return _unnesttemplatelist(tree)
400 return _unnesttemplatelist(tree)
401
401
402
402
403 def prettyformat(tree):
403 def prettyformat(tree):
404 return parser.prettyformat(tree, (b'integer', b'string', b'symbol'))
404 return parser.prettyformat(tree, (b'integer', b'string', b'symbol'))
405
405
406
406
407 def compileexp(exp, context, curmethods):
407 def compileexp(exp, context, curmethods):
408 """Compile parsed template tree to (func, data) pair"""
408 """Compile parsed template tree to (func, data) pair"""
409 if not exp:
409 if not exp:
410 raise error.ParseError(_(b"missing argument"))
410 raise error.ParseError(_(b"missing argument"))
411 t = exp[0]
411 t = exp[0]
412 return curmethods[t](exp, context)
412 return curmethods[t](exp, context)
413
413
414
414
415 # template evaluation
415 # template evaluation
416
416
417
417
418 def getsymbol(exp):
418 def getsymbol(exp):
419 if exp[0] == b'symbol':
419 if exp[0] == b'symbol':
420 return exp[1]
420 return exp[1]
421 raise error.ParseError(_(b"expected a symbol, got '%s'") % exp[0])
421 raise error.ParseError(_(b"expected a symbol, got '%s'") % exp[0])
422
422
423
423
424 def getlist(x):
424 def getlist(x):
425 if not x:
425 if not x:
426 return []
426 return []
427 if x[0] == b'list':
427 if x[0] == b'list':
428 return getlist(x[1]) + [x[2]]
428 return getlist(x[1]) + [x[2]]
429 return [x]
429 return [x]
430
430
431
431
432 def gettemplate(exp, context):
432 def gettemplate(exp, context):
433 """Compile given template tree or load named template from map file;
433 """Compile given template tree or load named template from map file;
434 returns (func, data) pair"""
434 returns (func, data) pair"""
435 if exp[0] in (b'template', b'string'):
435 if exp[0] in (b'template', b'string'):
436 return compileexp(exp, context, methods)
436 return compileexp(exp, context, methods)
437 if exp[0] == b'symbol':
437 if exp[0] == b'symbol':
438 # unlike runsymbol(), here 'symbol' is always taken as template name
438 # unlike runsymbol(), here 'symbol' is always taken as template name
439 # even if it exists in mapping. this allows us to override mapping
439 # even if it exists in mapping. this allows us to override mapping
440 # by web templates, e.g. 'changelogtag' is redefined in map file.
440 # by web templates, e.g. 'changelogtag' is redefined in map file.
441 return context._load(exp[1])
441 return context._load(exp[1])
442 raise error.ParseError(_(b"expected template specifier"))
442 raise error.ParseError(_(b"expected template specifier"))
443
443
444
444
445 def _runrecursivesymbol(context, mapping, key):
445 def _runrecursivesymbol(context, mapping, key):
446 raise error.Abort(_(b"recursive reference '%s' in template") % key)
446 raise error.Abort(_(b"recursive reference '%s' in template") % key)
447
447
448
448
449 def buildtemplate(exp, context):
449 def buildtemplate(exp, context):
450 ctmpl = [compileexp(e, context, methods) for e in exp[1:]]
450 ctmpl = [compileexp(e, context, methods) for e in exp[1:]]
451 return (templateutil.runtemplate, ctmpl)
451 return (templateutil.runtemplate, ctmpl)
452
452
453
453
454 def buildfilter(exp, context):
454 def buildfilter(exp, context):
455 n = getsymbol(exp[2])
455 n = getsymbol(exp[2])
456 if n in context._filters:
456 if n in context._filters:
457 filt = context._filters[n]
457 filt = context._filters[n]
458 arg = compileexp(exp[1], context, methods)
458 arg = compileexp(exp[1], context, methods)
459 return (templateutil.runfilter, (arg, filt))
459 return (templateutil.runfilter, (arg, filt))
460 if n in context._funcs:
460 if n in context._funcs:
461 f = context._funcs[n]
461 f = context._funcs[n]
462 args = _buildfuncargs(exp[1], context, methods, n, f._argspec)
462 args = _buildfuncargs(exp[1], context, methods, n, f._argspec)
463 return (f, args)
463 return (f, args)
464 raise error.ParseError(_(b"unknown function '%s'") % n)
464 raise error.ParseError(_(b"unknown function '%s'") % n)
465
465
466
466
467 def buildmap(exp, context):
467 def buildmap(exp, context):
468 darg = compileexp(exp[1], context, methods)
468 darg = compileexp(exp[1], context, methods)
469 targ = gettemplate(exp[2], context)
469 targ = gettemplate(exp[2], context)
470 return (templateutil.runmap, (darg, targ))
470 return (templateutil.runmap, (darg, targ))
471
471
472
472
473 def buildmember(exp, context):
473 def buildmember(exp, context):
474 darg = compileexp(exp[1], context, methods)
474 darg = compileexp(exp[1], context, methods)
475 memb = getsymbol(exp[2])
475 memb = getsymbol(exp[2])
476 return (templateutil.runmember, (darg, memb))
476 return (templateutil.runmember, (darg, memb))
477
477
478
478
479 def buildnegate(exp, context):
479 def buildnegate(exp, context):
480 arg = compileexp(exp[1], context, exprmethods)
480 arg = compileexp(exp[1], context, exprmethods)
481 return (templateutil.runnegate, arg)
481 return (templateutil.runnegate, arg)
482
482
483
483
484 def buildarithmetic(exp, context, func):
484 def buildarithmetic(exp, context, func):
485 left = compileexp(exp[1], context, exprmethods)
485 left = compileexp(exp[1], context, exprmethods)
486 right = compileexp(exp[2], context, exprmethods)
486 right = compileexp(exp[2], context, exprmethods)
487 return (templateutil.runarithmetic, (func, left, right))
487 return (templateutil.runarithmetic, (func, left, right))
488
488
489
489
490 def buildfunc(exp, context):
490 def buildfunc(exp, context):
491 n = getsymbol(exp[1])
491 n = getsymbol(exp[1])
492 if n in context._funcs:
492 if n in context._funcs:
493 f = context._funcs[n]
493 f = context._funcs[n]
494 args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec)
494 args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec)
495 return (f, args)
495 return (f, args)
496 if n in context._filters:
496 if n in context._filters:
497 args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None)
497 args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None)
498 if len(args) != 1:
498 if len(args) != 1:
499 raise error.ParseError(_(b"filter %s expects one argument") % n)
499 raise error.ParseError(_(b"filter %s expects one argument") % n)
500 f = context._filters[n]
500 f = context._filters[n]
501 return (templateutil.runfilter, (args[0], f))
501 return (templateutil.runfilter, (args[0], f))
502 raise error.ParseError(_(b"unknown function '%s'") % n)
502 raise error.ParseError(_(b"unknown function '%s'") % n)
503
503
504
504
505 def _buildfuncargs(exp, context, curmethods, funcname, argspec):
505 def _buildfuncargs(exp, context, curmethods, funcname, argspec):
506 """Compile parsed tree of function arguments into list or dict of
506 """Compile parsed tree of function arguments into list or dict of
507 (func, data) pairs
507 (func, data) pairs
508
508
509 >>> context = engine(lambda t: (templateutil.runsymbol, t))
509 >>> context = engine(lambda t: (templateutil.runsymbol, t))
510 >>> def fargs(expr, argspec):
510 >>> def fargs(expr, argspec):
511 ... x = _parseexpr(expr)
511 ... x = _parseexpr(expr)
512 ... n = getsymbol(x[1])
512 ... n = getsymbol(x[1])
513 ... return _buildfuncargs(x[2], context, exprmethods, n, argspec)
513 ... return _buildfuncargs(x[2], context, exprmethods, n, argspec)
514 >>> list(fargs(b'a(l=1, k=2)', b'k l m').keys())
514 >>> list(fargs(b'a(l=1, k=2)', b'k l m').keys())
515 ['l', 'k']
515 ['l', 'k']
516 >>> args = fargs(b'a(opts=1, k=2)', b'**opts')
516 >>> args = fargs(b'a(opts=1, k=2)', b'**opts')
517 >>> list(args.keys()), list(args[b'opts'].keys())
517 >>> list(args.keys()), list(args[b'opts'].keys())
518 (['opts'], ['opts', 'k'])
518 (['opts'], ['opts', 'k'])
519 """
519 """
520
520
521 def compiledict(xs):
521 def compiledict(xs):
522 return util.sortdict(
522 return util.sortdict(
523 (k, compileexp(x, context, curmethods))
523 (k, compileexp(x, context, curmethods))
524 for k, x in pycompat.iteritems(xs)
524 for k, x in pycompat.iteritems(xs)
525 )
525 )
526
526
527 def compilelist(xs):
527 def compilelist(xs):
528 return [compileexp(x, context, curmethods) for x in xs]
528 return [compileexp(x, context, curmethods) for x in xs]
529
529
530 if not argspec:
530 if not argspec:
531 # filter or function with no argspec: return list of positional args
531 # filter or function with no argspec: return list of positional args
532 return compilelist(getlist(exp))
532 return compilelist(getlist(exp))
533
533
534 # function with argspec: return dict of named args
534 # function with argspec: return dict of named args
535 _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec)
535 _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec)
536 treeargs = parser.buildargsdict(
536 treeargs = parser.buildargsdict(
537 getlist(exp),
537 getlist(exp),
538 funcname,
538 funcname,
539 argspec,
539 argspec,
540 keyvaluenode=b'keyvalue',
540 keyvaluenode=b'keyvalue',
541 keynode=b'symbol',
541 keynode=b'symbol',
542 )
542 )
543 compargs = util.sortdict()
543 compargs = util.sortdict()
544 if varkey:
544 if varkey:
545 compargs[varkey] = compilelist(treeargs.pop(varkey))
545 compargs[varkey] = compilelist(treeargs.pop(varkey))
546 if optkey:
546 if optkey:
547 compargs[optkey] = compiledict(treeargs.pop(optkey))
547 compargs[optkey] = compiledict(treeargs.pop(optkey))
548 compargs.update(compiledict(treeargs))
548 compargs.update(compiledict(treeargs))
549 return compargs
549 return compargs
550
550
551
551
552 def buildkeyvaluepair(exp, content):
552 def buildkeyvaluepair(exp, content):
553 raise error.ParseError(_(b"can't use a key-value pair in this context"))
553 raise error.ParseError(_(b"can't use a key-value pair in this context"))
554
554
555
555
556 def buildlist(exp, context):
556 def buildlist(exp, context):
557 raise error.ParseError(
557 raise error.ParseError(
558 _(b"can't use a list in this context"),
558 _(b"can't use a list in this context"),
559 hint=_(b'check place of comma and parens'),
559 hint=_(b'check place of comma and parens'),
560 )
560 )
561
561
562
562
563 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
563 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
564 exprmethods = {
564 exprmethods = {
565 b"integer": lambda e, c: (templateutil.runinteger, e[1]),
565 b"integer": lambda e, c: (templateutil.runinteger, e[1]),
566 b"string": lambda e, c: (templateutil.runstring, e[1]),
566 b"string": lambda e, c: (templateutil.runstring, e[1]),
567 b"symbol": lambda e, c: (templateutil.runsymbol, e[1]),
567 b"symbol": lambda e, c: (templateutil.runsymbol, e[1]),
568 b"template": buildtemplate,
568 b"template": buildtemplate,
569 b"group": lambda e, c: compileexp(e[1], c, exprmethods),
569 b"group": lambda e, c: compileexp(e[1], c, exprmethods),
570 b".": buildmember,
570 b".": buildmember,
571 b"|": buildfilter,
571 b"|": buildfilter,
572 b"%": buildmap,
572 b"%": buildmap,
573 b"func": buildfunc,
573 b"func": buildfunc,
574 b"keyvalue": buildkeyvaluepair,
574 b"keyvalue": buildkeyvaluepair,
575 b"list": buildlist,
575 b"list": buildlist,
576 b"+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b),
576 b"+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b),
577 b"-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b),
577 b"-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b),
578 b"negate": buildnegate,
578 b"negate": buildnegate,
579 b"*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
579 b"*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
580 b"/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
580 b"/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
581 }
581 }
582
582
583 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
583 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
584 methods = exprmethods.copy()
584 methods = exprmethods.copy()
585 methods[b"integer"] = exprmethods[b"symbol"] # '{1}' as variable
585 methods[b"integer"] = exprmethods[b"symbol"] # '{1}' as variable
586
586
587
587
588 class _aliasrules(parser.basealiasrules):
588 class _aliasrules(parser.basealiasrules):
589 """Parsing and expansion rule set of template aliases"""
589 """Parsing and expansion rule set of template aliases"""
590
590
591 _section = _(b'template alias')
591 _section = _(b'template alias')
592 _parse = staticmethod(_parseexpr)
592 _parse = staticmethod(_parseexpr)
593
593
594 @staticmethod
594 @staticmethod
595 def _trygetfunc(tree):
595 def _trygetfunc(tree):
596 """Return (name, args) if tree is func(...) or ...|filter; otherwise
596 """Return (name, args) if tree is func(...) or ...|filter; otherwise
597 None"""
597 None"""
598 if tree[0] == b'func' and tree[1][0] == b'symbol':
598 if tree[0] == b'func' and tree[1][0] == b'symbol':
599 return tree[1][1], getlist(tree[2])
599 return tree[1][1], getlist(tree[2])
600 if tree[0] == b'|' and tree[2][0] == b'symbol':
600 if tree[0] == b'|' and tree[2][0] == b'symbol':
601 return tree[2][1], [tree[1]]
601 return tree[2][1], [tree[1]]
602
602
603
603
604 def expandaliases(tree, aliases):
604 def expandaliases(tree, aliases):
605 """Return new tree of aliases are expanded"""
605 """Return new tree of aliases are expanded"""
606 aliasmap = _aliasrules.buildmap(aliases)
606 aliasmap = _aliasrules.buildmap(aliases)
607 return _aliasrules.expand(aliasmap, tree)
607 return _aliasrules.expand(aliasmap, tree)
608
608
609
609
610 # template engine
610 # template engine
611
611
612
612
613 def unquotestring(s):
613 def unquotestring(s):
614 '''unwrap quotes if any; otherwise returns unmodified string'''
614 '''unwrap quotes if any; otherwise returns unmodified string'''
615 if len(s) < 2 or s[0] not in b"'\"" or s[0] != s[-1]:
615 if len(s) < 2 or s[0] not in b"'\"" or s[0] != s[-1]:
616 return s
616 return s
617 return s[1:-1]
617 return s[1:-1]
618
618
619
619
620 class resourcemapper(object): # pytype: disable=ignored-metaclass
620 class resourcemapper(object): # pytype: disable=ignored-metaclass
621 """Mapper of internal template resources"""
621 """Mapper of internal template resources"""
622
622
623 __metaclass__ = abc.ABCMeta
623 __metaclass__ = abc.ABCMeta
624
624
625 @abc.abstractmethod
625 @abc.abstractmethod
626 def availablekeys(self, mapping):
626 def availablekeys(self, mapping):
627 """Return a set of available resource keys based on the given mapping"""
627 """Return a set of available resource keys based on the given mapping"""
628
628
629 @abc.abstractmethod
629 @abc.abstractmethod
630 def knownkeys(self):
630 def knownkeys(self):
631 """Return a set of supported resource keys"""
631 """Return a set of supported resource keys"""
632
632
633 @abc.abstractmethod
633 @abc.abstractmethod
634 def lookup(self, mapping, key):
634 def lookup(self, mapping, key):
635 """Return a resource for the key if available; otherwise None"""
635 """Return a resource for the key if available; otherwise None"""
636
636
637 @abc.abstractmethod
637 @abc.abstractmethod
638 def populatemap(self, context, origmapping, newmapping):
638 def populatemap(self, context, origmapping, newmapping):
639 """Return a dict of additional mapping items which should be paired
639 """Return a dict of additional mapping items which should be paired
640 with the given new mapping"""
640 with the given new mapping"""
641
641
642
642
643 class nullresourcemapper(resourcemapper):
643 class nullresourcemapper(resourcemapper):
644 def availablekeys(self, mapping):
644 def availablekeys(self, mapping):
645 return set()
645 return set()
646
646
647 def knownkeys(self):
647 def knownkeys(self):
648 return set()
648 return set()
649
649
650 def lookup(self, mapping, key):
650 def lookup(self, mapping, key):
651 return None
651 return None
652
652
653 def populatemap(self, context, origmapping, newmapping):
653 def populatemap(self, context, origmapping, newmapping):
654 return {}
654 return {}
655
655
656
656
657 class engine(object):
657 class engine(object):
658 '''template expansion engine.
658 '''template expansion engine.
659
659
660 template expansion works like this. a map file contains key=value
660 template expansion works like this. a map file contains key=value
661 pairs. if value is quoted, it is treated as string. otherwise, it
661 pairs. if value is quoted, it is treated as string. otherwise, it
662 is treated as name of template file.
662 is treated as name of template file.
663
663
664 templater is asked to expand a key in map. it looks up key, and
664 templater is asked to expand a key in map. it looks up key, and
665 looks for strings like this: {foo}. it expands {foo} by looking up
665 looks for strings like this: {foo}. it expands {foo} by looking up
666 foo in map, and substituting it. expansion is recursive: it stops
666 foo in map, and substituting it. expansion is recursive: it stops
667 when there is no more {foo} to replace.
667 when there is no more {foo} to replace.
668
668
669 expansion also allows formatting and filtering.
669 expansion also allows formatting and filtering.
670
670
671 format uses key to expand each item in list. syntax is
671 format uses key to expand each item in list. syntax is
672 {key%format}.
672 {key%format}.
673
673
674 filter uses function to transform value. syntax is
674 filter uses function to transform value. syntax is
675 {key|filter1|filter2|...}.'''
675 {key|filter1|filter2|...}.'''
676
676
677 def __init__(self, loader, filters=None, defaults=None, resources=None):
677 def __init__(self, loader, filters=None, defaults=None, resources=None):
678 self._loader = loader
678 self._loader = loader
679 if filters is None:
679 if filters is None:
680 filters = {}
680 filters = {}
681 self._filters = filters
681 self._filters = filters
682 self._funcs = templatefuncs.funcs # make this a parameter if needed
682 self._funcs = templatefuncs.funcs # make this a parameter if needed
683 if defaults is None:
683 if defaults is None:
684 defaults = {}
684 defaults = {}
685 if resources is None:
685 if resources is None:
686 resources = nullresourcemapper()
686 resources = nullresourcemapper()
687 self._defaults = defaults
687 self._defaults = defaults
688 self._resources = resources
688 self._resources = resources
689 self._cache = {} # key: (func, data)
689 self._cache = {} # key: (func, data)
690 self._tmplcache = {} # literal template: (func, data)
690 self._tmplcache = {} # literal template: (func, data)
691
691
692 def overlaymap(self, origmapping, newmapping):
692 def overlaymap(self, origmapping, newmapping):
693 """Create combined mapping from the original mapping and partial
693 """Create combined mapping from the original mapping and partial
694 mapping to override the original"""
694 mapping to override the original"""
695 # do not copy symbols which overrides the defaults depending on
695 # do not copy symbols which overrides the defaults depending on
696 # new resources, so the defaults will be re-evaluated (issue5612)
696 # new resources, so the defaults will be re-evaluated (issue5612)
697 knownres = self._resources.knownkeys()
697 knownres = self._resources.knownkeys()
698 newres = self._resources.availablekeys(newmapping)
698 newres = self._resources.availablekeys(newmapping)
699 mapping = {
699 mapping = {
700 k: v
700 k: v
701 for k, v in pycompat.iteritems(origmapping)
701 for k, v in pycompat.iteritems(origmapping)
702 if (
702 if (
703 k in knownres # not a symbol per self.symbol()
703 k in knownres # not a symbol per self.symbol()
704 or newres.isdisjoint(self._defaultrequires(k))
704 or newres.isdisjoint(self._defaultrequires(k))
705 )
705 )
706 }
706 }
707 mapping.update(newmapping)
707 mapping.update(newmapping)
708 mapping.update(
708 mapping.update(
709 self._resources.populatemap(self, origmapping, newmapping)
709 self._resources.populatemap(self, origmapping, newmapping)
710 )
710 )
711 return mapping
711 return mapping
712
712
713 def _defaultrequires(self, key):
713 def _defaultrequires(self, key):
714 """Resource keys required by the specified default symbol function"""
714 """Resource keys required by the specified default symbol function"""
715 v = self._defaults.get(key)
715 v = self._defaults.get(key)
716 if v is None or not callable(v):
716 if v is None or not callable(v):
717 return ()
717 return ()
718 return getattr(v, '_requires', ())
718 return getattr(v, '_requires', ())
719
719
720 def symbol(self, mapping, key):
720 def symbol(self, mapping, key):
721 """Resolve symbol to value or function; None if nothing found"""
721 """Resolve symbol to value or function; None if nothing found"""
722 v = None
722 v = None
723 if key not in self._resources.knownkeys():
723 if key not in self._resources.knownkeys():
724 v = mapping.get(key)
724 v = mapping.get(key)
725 if v is None:
725 if v is None:
726 v = self._defaults.get(key)
726 v = self._defaults.get(key)
727 return v
727 return v
728
728
729 def availableresourcekeys(self, mapping):
729 def availableresourcekeys(self, mapping):
730 """Return a set of available resource keys based on the given mapping"""
730 """Return a set of available resource keys based on the given mapping"""
731 return self._resources.availablekeys(mapping)
731 return self._resources.availablekeys(mapping)
732
732
733 def knownresourcekeys(self):
733 def knownresourcekeys(self):
734 """Return a set of supported resource keys"""
734 """Return a set of supported resource keys"""
735 return self._resources.knownkeys()
735 return self._resources.knownkeys()
736
736
737 def resource(self, mapping, key):
737 def resource(self, mapping, key):
738 """Return internal data (e.g. cache) used for keyword/function
738 """Return internal data (e.g. cache) used for keyword/function
739 evaluation"""
739 evaluation"""
740 v = self._resources.lookup(mapping, key)
740 v = self._resources.lookup(mapping, key)
741 if v is None:
741 if v is None:
742 raise templateutil.ResourceUnavailable(
742 raise templateutil.ResourceUnavailable(
743 _(b'template resource not available: %s') % key
743 _(b'template resource not available: %s') % key
744 )
744 )
745 return v
745 return v
746
746
747 def _load(self, t):
747 def _load(self, t):
748 '''load, parse, and cache a template'''
748 '''load, parse, and cache a template'''
749 if t not in self._cache:
749 if t not in self._cache:
750 x = self._loader(t)
750 x = self._loader(t)
751 # put poison to cut recursion while compiling 't'
751 # put poison to cut recursion while compiling 't'
752 self._cache[t] = (_runrecursivesymbol, t)
752 self._cache[t] = (_runrecursivesymbol, t)
753 try:
753 try:
754 self._cache[t] = compileexp(x, self, methods)
754 self._cache[t] = compileexp(x, self, methods)
755 except: # re-raises
755 except: # re-raises
756 del self._cache[t]
756 del self._cache[t]
757 raise
757 raise
758 return self._cache[t]
758 return self._cache[t]
759
759
760 def _parse(self, tmpl):
760 def _parse(self, tmpl):
761 """Parse and cache a literal template"""
761 """Parse and cache a literal template"""
762 if tmpl not in self._tmplcache:
762 if tmpl not in self._tmplcache:
763 x = parse(tmpl)
763 x = parse(tmpl)
764 self._tmplcache[tmpl] = compileexp(x, self, methods)
764 self._tmplcache[tmpl] = compileexp(x, self, methods)
765 return self._tmplcache[tmpl]
765 return self._tmplcache[tmpl]
766
766
767 def preload(self, t):
767 def preload(self, t):
768 """Load, parse, and cache the specified template if available"""
768 """Load, parse, and cache the specified template if available"""
769 try:
769 try:
770 self._load(t)
770 self._load(t)
771 return True
771 return True
772 except templateutil.TemplateNotFound:
772 except templateutil.TemplateNotFound:
773 return False
773 return False
774
774
775 def process(self, t, mapping):
775 def process(self, t, mapping):
776 '''Perform expansion. t is name of map element to expand.
776 '''Perform expansion. t is name of map element to expand.
777 mapping contains added elements for use during expansion. Is a
777 mapping contains added elements for use during expansion. Is a
778 generator.'''
778 generator.'''
779 func, data = self._load(t)
779 func, data = self._load(t)
780 return self._expand(func, data, mapping)
780 return self._expand(func, data, mapping)
781
781
782 def expand(self, tmpl, mapping):
782 def expand(self, tmpl, mapping):
783 """Perform expansion over a literal template
783 """Perform expansion over a literal template
784
784
785 No user aliases will be expanded since this is supposed to be called
785 No user aliases will be expanded since this is supposed to be called
786 with an internal template string.
786 with an internal template string.
787 """
787 """
788 func, data = self._parse(tmpl)
788 func, data = self._parse(tmpl)
789 return self._expand(func, data, mapping)
789 return self._expand(func, data, mapping)
790
790
791 def _expand(self, func, data, mapping):
791 def _expand(self, func, data, mapping):
792 # populate additional items only if they don't exist in the given
792 # populate additional items only if they don't exist in the given
793 # mapping. this is slightly different from overlaymap() because the
793 # mapping. this is slightly different from overlaymap() because the
794 # initial 'revcache' may contain pre-computed items.
794 # initial 'revcache' may contain pre-computed items.
795 extramapping = self._resources.populatemap(self, {}, mapping)
795 extramapping = self._resources.populatemap(self, {}, mapping)
796 if extramapping:
796 if extramapping:
797 extramapping.update(mapping)
797 extramapping.update(mapping)
798 mapping = extramapping
798 mapping = extramapping
799 return templateutil.flatten(self, mapping, func(self, mapping, data))
799 return templateutil.flatten(self, mapping, func(self, mapping, data))
800
800
801
801
802 def stylelist():
802 def stylelist():
803 path = templatedir()
803 path = templatedir()
804 if not path:
804 if not path:
805 return _(b'no templates found, try `hg debuginstall` for more info')
805 return _(b'no templates found, try `hg debuginstall` for more info')
806 dirlist = os.listdir(path)
806 dirlist = os.listdir(path)
807 stylelist = []
807 stylelist = []
808 for file in dirlist:
808 for file in dirlist:
809 split = file.split(b".")
809 split = file.split(b".")
810 if split[-1] in (b'orig', b'rej'):
810 if split[-1] in (b'orig', b'rej'):
811 continue
811 continue
812 if split[0] == b"map-cmdline":
812 if split[0] == b"map-cmdline":
813 stylelist.append(split[1])
813 stylelist.append(split[1])
814 return b", ".join(sorted(stylelist))
814 return b", ".join(sorted(stylelist))
815
815
816
816
817 def _open_mapfile(mapfile):
817 def _open_mapfile(mapfile):
818 if os.path.exists(mapfile):
818 if os.path.exists(mapfile):
819 return util.posixfile(mapfile, b'rb')
819 return util.posixfile(mapfile, b'rb')
820 raise error.Abort(
820 raise error.Abort(
821 _(b"style '%s' not found") % mapfile,
821 _(b"style '%s' not found") % mapfile,
822 hint=_(b"available styles: %s") % stylelist(),
822 hint=_(b"available styles: %s") % stylelist(),
823 )
823 )
824
824
825
825
826 def _readmapfile(fp, mapfile):
826 def _readmapfile(fp, mapfile):
827 """Load template elements from the given map file"""
827 """Load template elements from the given map file"""
828 base = os.path.dirname(mapfile)
828 base = os.path.dirname(mapfile)
829 conf = config.config()
829 conf = config.config()
830
830
831 def include(rel, remap, sections):
831 def include(rel, remap, sections):
832 subresource = None
832 subresource = None
833 if base:
833 if base:
834 abs = os.path.normpath(os.path.join(base, rel))
834 abs = os.path.normpath(os.path.join(base, rel))
835 if os.path.isfile(abs):
835 if os.path.isfile(abs):
836 subresource = util.posixfile(abs, b'rb')
836 subresource = util.posixfile(abs, b'rb')
837 if not subresource:
837 if not subresource:
838 if pycompat.ossep not in rel:
838 if pycompat.ossep not in rel:
839 abs = rel
839 abs = rel
840 subresource = resourceutil.open_resource(
840 subresource = resourceutil.open_resource(
841 b'mercurial.templates', rel
841 b'mercurial.templates', rel
842 )
842 )
843 else:
843 else:
844 dir = templatedir()
844 dir = templatedir()
845 if dir:
845 if dir:
846 abs = os.path.normpath(os.path.join(dir, rel))
846 abs = os.path.normpath(os.path.join(dir, rel))
847 if os.path.isfile(abs):
847 if os.path.isfile(abs):
848 subresource = util.posixfile(abs, b'rb')
848 subresource = util.posixfile(abs, b'rb')
849 if subresource:
849 if subresource:
850 data = subresource.read()
850 data = subresource.read()
851 conf.parse(
851 conf.parse(
852 abs, data, sections=sections, remap=remap, include=include,
852 abs, data, sections=sections, remap=remap, include=include,
853 )
853 )
854
854
855 data = fp.read()
855 data = fp.read()
856 conf.parse(mapfile, data, remap={b'': b'templates'}, include=include)
856 conf.parse(mapfile, data, remap={b'': b'templates'}, include=include)
857
857
858 cache = {}
858 cache = {}
859 tmap = {}
859 tmap = {}
860 aliases = []
860 aliases = []
861
861
862 val = conf.get(b'templates', b'__base__')
862 val = conf.get(b'templates', b'__base__')
863 if val and val[0] not in b"'\"":
863 if val and val[0] not in b"'\"":
864 # treat as a pointer to a base class for this style
864 # treat as a pointer to a base class for this style
865 path = os.path.normpath(os.path.join(base, val))
865 path = os.path.normpath(os.path.join(base, val))
866
866
867 # fallback check in template paths
867 # fallback check in template paths
868 if not os.path.exists(path):
868 if not os.path.exists(path):
869 dir = templatedir()
869 dir = templatedir()
870 if dir is not None:
870 if dir is not None:
871 p2 = os.path.normpath(os.path.join(dir, val))
871 p2 = os.path.normpath(os.path.join(dir, val))
872 if os.path.isfile(p2):
872 if os.path.isfile(p2):
873 path = p2
873 path = p2
874 else:
874 else:
875 p3 = os.path.normpath(os.path.join(p2, b"map"))
875 p3 = os.path.normpath(os.path.join(p2, b"map"))
876 if os.path.isfile(p3):
876 if os.path.isfile(p3):
877 path = p3
877 path = p3
878
878
879 fp = _open_mapfile(path)
879 fp = _open_mapfile(path)
880 cache, tmap, aliases = _readmapfile(fp, path)
880 cache, tmap, aliases = _readmapfile(fp, path)
881
881
882 for key, val in conf[b'templates'].items():
882 for key, val in conf[b'templates'].items():
883 if not val:
883 if not val:
884 raise error.ParseError(
884 raise error.ParseError(
885 _(b'missing value'), conf.source(b'templates', key)
885 _(b'missing value'), conf.source(b'templates', key)
886 )
886 )
887 if val[0] in b"'\"":
887 if val[0] in b"'\"":
888 if val[0] != val[-1]:
888 if val[0] != val[-1]:
889 raise error.ParseError(
889 raise error.ParseError(
890 _(b'unmatched quotes'), conf.source(b'templates', key)
890 _(b'unmatched quotes'), conf.source(b'templates', key)
891 )
891 )
892 cache[key] = unquotestring(val)
892 cache[key] = unquotestring(val)
893 elif key != b'__base__':
893 elif key != b'__base__':
894 tmap[key] = os.path.join(base, val)
894 tmap[key] = os.path.join(base, val)
895 aliases.extend(conf[b'templatealias'].items())
895 aliases.extend(conf[b'templatealias'].items())
896 return cache, tmap, aliases
896 return cache, tmap, aliases
897
897
898
898
899 class loader(object):
899 class loader(object):
900 """Load template fragments optionally from a map file"""
900 """Load template fragments optionally from a map file"""
901
901
902 def __init__(self, cache, aliases):
902 def __init__(self, cache, aliases):
903 if cache is None:
903 if cache is None:
904 cache = {}
904 cache = {}
905 self.cache = cache.copy()
905 self.cache = cache.copy()
906 self._map = {}
906 self._map = {}
907 self._aliasmap = _aliasrules.buildmap(aliases)
907 self._aliasmap = _aliasrules.buildmap(aliases)
908
908
909 def __contains__(self, key):
909 def __contains__(self, key):
910 return key in self.cache or key in self._map
910 return key in self.cache or key in self._map
911
911
912 def load(self, t):
912 def load(self, t):
913 """Get parsed tree for the given template name. Use a local cache."""
913 """Get parsed tree for the given template name. Use a local cache."""
914 if t not in self.cache:
914 if t not in self.cache:
915 try:
915 try:
916 self.cache[t] = util.readfile(self._map[t])
916 self.cache[t] = util.readfile(self._map[t])
917 except KeyError as inst:
917 except KeyError as inst:
918 raise templateutil.TemplateNotFound(
918 raise templateutil.TemplateNotFound(
919 _(b'"%s" not in template map') % inst.args[0]
919 _(b'"%s" not in template map') % inst.args[0]
920 )
920 )
921 except IOError as inst:
921 except IOError as inst:
922 reason = _(b'template file %s: %s') % (
922 reason = _(b'template file %s: %s') % (
923 self._map[t],
923 self._map[t],
924 stringutil.forcebytestr(inst.args[1]),
924 stringutil.forcebytestr(inst.args[1]),
925 )
925 )
926 raise IOError(inst.args[0], encoding.strfromlocal(reason))
926 raise IOError(inst.args[0], encoding.strfromlocal(reason))
927 return self._parse(self.cache[t])
927 return self._parse(self.cache[t])
928
928
929 def _parse(self, tmpl):
929 def _parse(self, tmpl):
930 x = parse(tmpl)
930 x = parse(tmpl)
931 if self._aliasmap:
931 if self._aliasmap:
932 x = _aliasrules.expand(self._aliasmap, x)
932 x = _aliasrules.expand(self._aliasmap, x)
933 return x
933 return x
934
934
935 def _findsymbolsused(self, tree, syms):
935 def _findsymbolsused(self, tree, syms):
936 if not tree:
936 if not tree:
937 return
937 return
938 op = tree[0]
938 op = tree[0]
939 if op == b'symbol':
939 if op == b'symbol':
940 s = tree[1]
940 s = tree[1]
941 if s in syms[0]:
941 if s in syms[0]:
942 return # avoid recursion: s -> cache[s] -> s
942 return # avoid recursion: s -> cache[s] -> s
943 syms[0].add(s)
943 syms[0].add(s)
944 if s in self.cache or s in self._map:
944 if s in self.cache or s in self._map:
945 # s may be a reference for named template
945 # s may be a reference for named template
946 self._findsymbolsused(self.load(s), syms)
946 self._findsymbolsused(self.load(s), syms)
947 return
947 return
948 if op in {b'integer', b'string'}:
948 if op in {b'integer', b'string'}:
949 return
949 return
950 # '{arg|func}' == '{func(arg)}'
950 # '{arg|func}' == '{func(arg)}'
951 if op == b'|':
951 if op == b'|':
952 syms[1].add(getsymbol(tree[2]))
952 syms[1].add(getsymbol(tree[2]))
953 self._findsymbolsused(tree[1], syms)
953 self._findsymbolsused(tree[1], syms)
954 return
954 return
955 if op == b'func':
955 if op == b'func':
956 syms[1].add(getsymbol(tree[1]))
956 syms[1].add(getsymbol(tree[1]))
957 self._findsymbolsused(tree[2], syms)
957 self._findsymbolsused(tree[2], syms)
958 return
958 return
959 for x in tree[1:]:
959 for x in tree[1:]:
960 self._findsymbolsused(x, syms)
960 self._findsymbolsused(x, syms)
961
961
962 def symbolsused(self, t):
962 def symbolsused(self, t):
963 """Look up (keywords, filters/functions) referenced from the name
963 """Look up (keywords, filters/functions) referenced from the name
964 template 't'
964 template 't'
965
965
966 This may load additional templates from the map file.
966 This may load additional templates from the map file.
967 """
967 """
968 syms = (set(), set())
968 syms = (set(), set())
969 self._findsymbolsused(self.load(t), syms)
969 self._findsymbolsused(self.load(t), syms)
970 return syms
970 return syms
971
971
972
972
973 class templater(object):
973 class templater(object):
974 def __init__(
974 def __init__(
975 self,
975 self,
976 filters=None,
976 filters=None,
977 defaults=None,
977 defaults=None,
978 resources=None,
978 resources=None,
979 cache=None,
979 cache=None,
980 aliases=(),
980 aliases=(),
981 minchunk=1024,
981 minchunk=1024,
982 maxchunk=65536,
982 maxchunk=65536,
983 ):
983 ):
984 """Create template engine optionally with preloaded template fragments
984 """Create template engine optionally with preloaded template fragments
985
985
986 - ``filters``: a dict of functions to transform a value into another.
986 - ``filters``: a dict of functions to transform a value into another.
987 - ``defaults``: a dict of symbol values/functions; may be overridden
987 - ``defaults``: a dict of symbol values/functions; may be overridden
988 by a ``mapping`` dict.
988 by a ``mapping`` dict.
989 - ``resources``: a resourcemapper object to look up internal data
989 - ``resources``: a resourcemapper object to look up internal data
990 (e.g. cache), inaccessible from user template.
990 (e.g. cache), inaccessible from user template.
991 - ``cache``: a dict of preloaded template fragments.
991 - ``cache``: a dict of preloaded template fragments.
992 - ``aliases``: a list of alias (name, replacement) pairs.
992 - ``aliases``: a list of alias (name, replacement) pairs.
993
993
994 self.cache may be updated later to register additional template
994 self.cache may be updated later to register additional template
995 fragments.
995 fragments.
996 """
996 """
997 allfilters = templatefilters.filters.copy()
997 allfilters = templatefilters.filters.copy()
998 if filters:
998 if filters:
999 allfilters.update(filters)
999 allfilters.update(filters)
1000 self._loader = loader(cache, aliases)
1000 self._loader = loader(cache, aliases)
1001 self._proc = engine(self._loader.load, allfilters, defaults, resources)
1001 self._proc = engine(self._loader.load, allfilters, defaults, resources)
1002 self._minchunk, self._maxchunk = minchunk, maxchunk
1002 self._minchunk, self._maxchunk = minchunk, maxchunk
1003
1003
1004 @classmethod
1004 @classmethod
1005 def frommapfile(
1005 def frommapfile(
1006 cls,
1006 cls,
1007 mapfile,
1007 mapfile,
1008 fp=None,
1008 fp=None,
1009 filters=None,
1009 filters=None,
1010 defaults=None,
1010 defaults=None,
1011 resources=None,
1011 resources=None,
1012 cache=None,
1012 cache=None,
1013 minchunk=1024,
1013 minchunk=1024,
1014 maxchunk=65536,
1014 maxchunk=65536,
1015 ):
1015 ):
1016 """Create templater from the specified map file"""
1016 """Create templater from the specified map file"""
1017 t = cls(filters, defaults, resources, cache, [], minchunk, maxchunk)
1017 t = cls(filters, defaults, resources, cache, [], minchunk, maxchunk)
1018 if not fp:
1018 if not fp:
1019 fp = _open_mapfile(mapfile)
1019 fp = _open_mapfile(mapfile)
1020 cache, tmap, aliases = _readmapfile(fp, mapfile)
1020 cache, tmap, aliases = _readmapfile(fp, mapfile)
1021 t._loader.cache.update(cache)
1021 t._loader.cache.update(cache)
1022 t._loader._map = tmap
1022 t._loader._map = tmap
1023 t._loader._aliasmap = _aliasrules.buildmap(aliases)
1023 t._loader._aliasmap = _aliasrules.buildmap(aliases)
1024 return t
1024 return t
1025
1025
1026 def __contains__(self, key):
1026 def __contains__(self, key):
1027 return key in self._loader
1027 return key in self._loader
1028
1028
1029 @property
1029 @property
1030 def cache(self):
1030 def cache(self):
1031 return self._loader.cache
1031 return self._loader.cache
1032
1032
1033 # for highlight extension to insert one-time 'colorize' filter
1033 # for highlight extension to insert one-time 'colorize' filter
1034 @property
1034 @property
1035 def _filters(self):
1035 def _filters(self):
1036 return self._proc._filters
1036 return self._proc._filters
1037
1037
1038 @property
1038 @property
1039 def defaults(self):
1039 def defaults(self):
1040 return self._proc._defaults
1040 return self._proc._defaults
1041
1041
1042 def load(self, t):
1042 def load(self, t):
1043 """Get parsed tree for the given template name. Use a local cache."""
1043 """Get parsed tree for the given template name. Use a local cache."""
1044 return self._loader.load(t)
1044 return self._loader.load(t)
1045
1045
1046 def symbolsuseddefault(self):
1046 def symbolsuseddefault(self):
1047 """Look up (keywords, filters/functions) referenced from the default
1047 """Look up (keywords, filters/functions) referenced from the default
1048 unnamed template
1048 unnamed template
1049
1049
1050 This may load additional templates from the map file.
1050 This may load additional templates from the map file.
1051 """
1051 """
1052 return self.symbolsused(b'')
1052 return self.symbolsused(b'')
1053
1053
1054 def symbolsused(self, t):
1054 def symbolsused(self, t):
1055 """Look up (keywords, filters/functions) referenced from the name
1055 """Look up (keywords, filters/functions) referenced from the name
1056 template 't'
1056 template 't'
1057
1057
1058 This may load additional templates from the map file.
1058 This may load additional templates from the map file.
1059 """
1059 """
1060 return self._loader.symbolsused(t)
1060 return self._loader.symbolsused(t)
1061
1061
1062 def renderdefault(self, mapping):
1062 def renderdefault(self, mapping):
1063 """Render the default unnamed template and return result as string"""
1063 """Render the default unnamed template and return result as string"""
1064 return self.render(b'', mapping)
1064 return self.render(b'', mapping)
1065
1065
1066 def render(self, t, mapping):
1066 def render(self, t, mapping):
1067 """Render the specified named template and return result as string"""
1067 """Render the specified named template and return result as string"""
1068 return b''.join(self.generate(t, mapping))
1068 return b''.join(self.generate(t, mapping))
1069
1069
1070 def generate(self, t, mapping):
1070 def generate(self, t, mapping):
1071 """Return a generator that renders the specified named template and
1071 """Return a generator that renders the specified named template and
1072 yields chunks"""
1072 yields chunks"""
1073 stream = self._proc.process(t, mapping)
1073 stream = self._proc.process(t, mapping)
1074 if self._minchunk:
1074 if self._minchunk:
1075 stream = util.increasingchunks(
1075 stream = util.increasingchunks(
1076 stream, min=self._minchunk, max=self._maxchunk
1076 stream, min=self._minchunk, max=self._maxchunk
1077 )
1077 )
1078 return stream
1078 return stream
1079
1079
1080
1080
1081 def templatedir():
1081 def templatedir():
1082 '''return the directory used for template files, or None.'''
1082 '''return the directory used for template files, or None.'''
1083 path = os.path.normpath(os.path.join(resourceutil.datapath, b'templates'))
1083 path = os.path.normpath(os.path.join(resourceutil.datapath, b'templates'))
1084 return path if os.path.isdir(path) else None
1084 return path if os.path.isdir(path) else None
1085
1085
1086
1086
1087 def open_template(name, templatepath=None):
1087 def open_template(name, templatepath=None):
1088 '''returns a file-like object for the given template, and its full path
1088 '''returns a file-like object for the given template, and its full path
1089
1089
1090 If the name is a relative path and we're in a frozen binary, the template
1090 If the name is a relative path and we're in a frozen binary, the template
1091 will be read from the mercurial.templates package instead. The returned path
1091 will be read from the mercurial.templates package instead. The returned path
1092 will then be the relative path.
1092 will then be the relative path.
1093 '''
1093 '''
1094 if templatepath is None:
1094 if templatepath is None:
1095 templatepath = templatedir()
1095 templatepath = templatedir()
1096 if templatepath is not None or os.path.isabs(name):
1096 if templatepath is not None or os.path.isabs(name):
1097 f = os.path.join(templatepath, name)
1097 f = os.path.join(templatepath, name)
1098 try:
1098 return f, open(f, mode='rb')
1099 return f, open(f, mode='rb')
1100 except EnvironmentError:
1101 return None, None
1102 else:
1099 else:
1103 name_parts = pycompat.sysstr(name).split('/')
1100 name_parts = pycompat.sysstr(name).split('/')
1104 package_name = '.'.join(['mercurial', 'templates'] + name_parts[:-1])
1101 package_name = '.'.join(['mercurial', 'templates'] + name_parts[:-1])
1105 try:
1102 return (
1106 return (
1103 name,
1107 name,
1104 resourceutil.open_resource(package_name, name_parts[-1]),
1108 resourceutil.open_resource(package_name, name_parts[-1]),
1105 )
1109 )
1106
1110 except (ImportError, OSError):
1107
1111 return None, None
1108 def try_open_template(name, templatepath=None):
1109 try:
1110 return open_template(name, templatepath)
1111 except (EnvironmentError, ImportError):
1112 return None, None
General Comments 0
You need to be logged in to leave comments. Login now