##// END OF EJS Templates
simplemerge: convert `merge_lines()` away from generator...
Martin von Zweigbergk -
r49405:ce8c82a5 default
parent child Browse files
Show More
@@ -1,4874 +1,4874 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import binascii
10 import binascii
11 import codecs
11 import codecs
12 import collections
12 import collections
13 import contextlib
13 import contextlib
14 import difflib
14 import difflib
15 import errno
15 import errno
16 import glob
16 import glob
17 import operator
17 import operator
18 import os
18 import os
19 import platform
19 import platform
20 import random
20 import random
21 import re
21 import re
22 import socket
22 import socket
23 import ssl
23 import ssl
24 import stat
24 import stat
25 import string
25 import string
26 import subprocess
26 import subprocess
27 import sys
27 import sys
28 import time
28 import time
29
29
30 from .i18n import _
30 from .i18n import _
31 from .node import (
31 from .node import (
32 bin,
32 bin,
33 hex,
33 hex,
34 nullrev,
34 nullrev,
35 short,
35 short,
36 )
36 )
37 from .pycompat import (
37 from .pycompat import (
38 getattr,
38 getattr,
39 open,
39 open,
40 )
40 )
41 from . import (
41 from . import (
42 bundle2,
42 bundle2,
43 bundlerepo,
43 bundlerepo,
44 changegroup,
44 changegroup,
45 cmdutil,
45 cmdutil,
46 color,
46 color,
47 context,
47 context,
48 copies,
48 copies,
49 dagparser,
49 dagparser,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revset,
76 revset,
77 revsetlang,
77 revsetlang,
78 scmutil,
78 scmutil,
79 setdiscovery,
79 setdiscovery,
80 simplemerge,
80 simplemerge,
81 sshpeer,
81 sshpeer,
82 sslutil,
82 sslutil,
83 streamclone,
83 streamclone,
84 strip,
84 strip,
85 tags as tagsmod,
85 tags as tagsmod,
86 templater,
86 templater,
87 treediscovery,
87 treediscovery,
88 upgrade,
88 upgrade,
89 url as urlmod,
89 url as urlmod,
90 util,
90 util,
91 vfs as vfsmod,
91 vfs as vfsmod,
92 wireprotoframing,
92 wireprotoframing,
93 wireprotoserver,
93 wireprotoserver,
94 )
94 )
95 from .interfaces import repository
95 from .interfaces import repository
96 from .utils import (
96 from .utils import (
97 cborutil,
97 cborutil,
98 compression,
98 compression,
99 dateutil,
99 dateutil,
100 procutil,
100 procutil,
101 stringutil,
101 stringutil,
102 urlutil,
102 urlutil,
103 )
103 )
104
104
105 from .revlogutils import (
105 from .revlogutils import (
106 deltas as deltautil,
106 deltas as deltautil,
107 nodemap,
107 nodemap,
108 rewrite,
108 rewrite,
109 sidedata,
109 sidedata,
110 )
110 )
111
111
112 release = lockmod.release
112 release = lockmod.release
113
113
114 table = {}
114 table = {}
115 table.update(strip.command._table)
115 table.update(strip.command._table)
116 command = registrar.command(table)
116 command = registrar.command(table)
117
117
118
118
119 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
119 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 def debugancestor(ui, repo, *args):
120 def debugancestor(ui, repo, *args):
121 """find the ancestor revision of two revisions in a given index"""
121 """find the ancestor revision of two revisions in a given index"""
122 if len(args) == 3:
122 if len(args) == 3:
123 index, rev1, rev2 = args
123 index, rev1, rev2 = args
124 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
124 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 lookup = r.lookup
125 lookup = r.lookup
126 elif len(args) == 2:
126 elif len(args) == 2:
127 if not repo:
127 if not repo:
128 raise error.Abort(
128 raise error.Abort(
129 _(b'there is no Mercurial repository here (.hg not found)')
129 _(b'there is no Mercurial repository here (.hg not found)')
130 )
130 )
131 rev1, rev2 = args
131 rev1, rev2 = args
132 r = repo.changelog
132 r = repo.changelog
133 lookup = repo.lookup
133 lookup = repo.lookup
134 else:
134 else:
135 raise error.Abort(_(b'either two or three arguments required'))
135 raise error.Abort(_(b'either two or three arguments required'))
136 a = r.ancestor(lookup(rev1), lookup(rev2))
136 a = r.ancestor(lookup(rev1), lookup(rev2))
137 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
137 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138
138
139
139
140 @command(b'debugantivirusrunning', [])
140 @command(b'debugantivirusrunning', [])
141 def debugantivirusrunning(ui, repo):
141 def debugantivirusrunning(ui, repo):
142 """attempt to trigger an antivirus scanner to see if one is active"""
142 """attempt to trigger an antivirus scanner to see if one is active"""
143 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
143 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 f.write(
144 f.write(
145 util.b85decode(
145 util.b85decode(
146 # This is a base85-armored version of the EICAR test file. See
146 # This is a base85-armored version of the EICAR test file. See
147 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
147 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
148 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
149 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 )
150 )
151 )
151 )
152 # Give an AV engine time to scan the file.
152 # Give an AV engine time to scan the file.
153 time.sleep(2)
153 time.sleep(2)
154 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
154 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155
155
156
156
157 @command(b'debugapplystreamclonebundle', [], b'FILE')
157 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 def debugapplystreamclonebundle(ui, repo, fname):
158 def debugapplystreamclonebundle(ui, repo, fname):
159 """apply a stream clone bundle file"""
159 """apply a stream clone bundle file"""
160 f = hg.openpath(ui, fname)
160 f = hg.openpath(ui, fname)
161 gen = exchange.readbundle(ui, f, fname)
161 gen = exchange.readbundle(ui, f, fname)
162 gen.apply(repo)
162 gen.apply(repo)
163
163
164
164
165 @command(
165 @command(
166 b'debugbuilddag',
166 b'debugbuilddag',
167 [
167 [
168 (
168 (
169 b'm',
169 b'm',
170 b'mergeable-file',
170 b'mergeable-file',
171 None,
171 None,
172 _(b'add single file mergeable changes'),
172 _(b'add single file mergeable changes'),
173 ),
173 ),
174 (
174 (
175 b'o',
175 b'o',
176 b'overwritten-file',
176 b'overwritten-file',
177 None,
177 None,
178 _(b'add single file all revs overwrite'),
178 _(b'add single file all revs overwrite'),
179 ),
179 ),
180 (b'n', b'new-file', None, _(b'add new file at each rev')),
180 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 ],
181 ],
182 _(b'[OPTION]... [TEXT]'),
182 _(b'[OPTION]... [TEXT]'),
183 )
183 )
184 def debugbuilddag(
184 def debugbuilddag(
185 ui,
185 ui,
186 repo,
186 repo,
187 text=None,
187 text=None,
188 mergeable_file=False,
188 mergeable_file=False,
189 overwritten_file=False,
189 overwritten_file=False,
190 new_file=False,
190 new_file=False,
191 ):
191 ):
192 """builds a repo with a given DAG from scratch in the current empty repo
192 """builds a repo with a given DAG from scratch in the current empty repo
193
193
194 The description of the DAG is read from stdin if not given on the
194 The description of the DAG is read from stdin if not given on the
195 command line.
195 command line.
196
196
197 Elements:
197 Elements:
198
198
199 - "+n" is a linear run of n nodes based on the current default parent
199 - "+n" is a linear run of n nodes based on the current default parent
200 - "." is a single node based on the current default parent
200 - "." is a single node based on the current default parent
201 - "$" resets the default parent to null (implied at the start);
201 - "$" resets the default parent to null (implied at the start);
202 otherwise the default parent is always the last node created
202 otherwise the default parent is always the last node created
203 - "<p" sets the default parent to the backref p
203 - "<p" sets the default parent to the backref p
204 - "*p" is a fork at parent p, which is a backref
204 - "*p" is a fork at parent p, which is a backref
205 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
205 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
206 - "/p2" is a merge of the preceding node and p2
206 - "/p2" is a merge of the preceding node and p2
207 - ":tag" defines a local tag for the preceding node
207 - ":tag" defines a local tag for the preceding node
208 - "@branch" sets the named branch for subsequent nodes
208 - "@branch" sets the named branch for subsequent nodes
209 - "#...\\n" is a comment up to the end of the line
209 - "#...\\n" is a comment up to the end of the line
210
210
211 Whitespace between the above elements is ignored.
211 Whitespace between the above elements is ignored.
212
212
213 A backref is either
213 A backref is either
214
214
215 - a number n, which references the node curr-n, where curr is the current
215 - a number n, which references the node curr-n, where curr is the current
216 node, or
216 node, or
217 - the name of a local tag you placed earlier using ":tag", or
217 - the name of a local tag you placed earlier using ":tag", or
218 - empty to denote the default parent.
218 - empty to denote the default parent.
219
219
220 All string valued-elements are either strictly alphanumeric, or must
220 All string valued-elements are either strictly alphanumeric, or must
221 be enclosed in double quotes ("..."), with "\\" as escape character.
221 be enclosed in double quotes ("..."), with "\\" as escape character.
222 """
222 """
223
223
224 if text is None:
224 if text is None:
225 ui.status(_(b"reading DAG from stdin\n"))
225 ui.status(_(b"reading DAG from stdin\n"))
226 text = ui.fin.read()
226 text = ui.fin.read()
227
227
228 cl = repo.changelog
228 cl = repo.changelog
229 if len(cl) > 0:
229 if len(cl) > 0:
230 raise error.Abort(_(b'repository is not empty'))
230 raise error.Abort(_(b'repository is not empty'))
231
231
232 # determine number of revs in DAG
232 # determine number of revs in DAG
233 total = 0
233 total = 0
234 for type, data in dagparser.parsedag(text):
234 for type, data in dagparser.parsedag(text):
235 if type == b'n':
235 if type == b'n':
236 total += 1
236 total += 1
237
237
238 if mergeable_file:
238 if mergeable_file:
239 linesperrev = 2
239 linesperrev = 2
240 # make a file with k lines per rev
240 # make a file with k lines per rev
241 initialmergedlines = [
241 initialmergedlines = [
242 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
242 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
243 ]
243 ]
244 initialmergedlines.append(b"")
244 initialmergedlines.append(b"")
245
245
246 tags = []
246 tags = []
247 progress = ui.makeprogress(
247 progress = ui.makeprogress(
248 _(b'building'), unit=_(b'revisions'), total=total
248 _(b'building'), unit=_(b'revisions'), total=total
249 )
249 )
250 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
250 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
251 at = -1
251 at = -1
252 atbranch = b'default'
252 atbranch = b'default'
253 nodeids = []
253 nodeids = []
254 id = 0
254 id = 0
255 progress.update(id)
255 progress.update(id)
256 for type, data in dagparser.parsedag(text):
256 for type, data in dagparser.parsedag(text):
257 if type == b'n':
257 if type == b'n':
258 ui.note((b'node %s\n' % pycompat.bytestr(data)))
258 ui.note((b'node %s\n' % pycompat.bytestr(data)))
259 id, ps = data
259 id, ps = data
260
260
261 files = []
261 files = []
262 filecontent = {}
262 filecontent = {}
263
263
264 p2 = None
264 p2 = None
265 if mergeable_file:
265 if mergeable_file:
266 fn = b"mf"
266 fn = b"mf"
267 p1 = repo[ps[0]]
267 p1 = repo[ps[0]]
268 if len(ps) > 1:
268 if len(ps) > 1:
269 p2 = repo[ps[1]]
269 p2 = repo[ps[1]]
270 pa = p1.ancestor(p2)
270 pa = p1.ancestor(p2)
271 base, local, other = [
271 base, local, other = [
272 x[fn].data() for x in (pa, p1, p2)
272 x[fn].data() for x in (pa, p1, p2)
273 ]
273 ]
274 m3 = simplemerge.Merge3Text(base, local, other)
274 m3 = simplemerge.Merge3Text(base, local, other)
275 ml = [l.strip() for l in m3.merge_lines()]
275 ml = [l.strip() for l in m3.merge_lines()[0]]
276 ml.append(b"")
276 ml.append(b"")
277 elif at > 0:
277 elif at > 0:
278 ml = p1[fn].data().split(b"\n")
278 ml = p1[fn].data().split(b"\n")
279 else:
279 else:
280 ml = initialmergedlines
280 ml = initialmergedlines
281 ml[id * linesperrev] += b" r%i" % id
281 ml[id * linesperrev] += b" r%i" % id
282 mergedtext = b"\n".join(ml)
282 mergedtext = b"\n".join(ml)
283 files.append(fn)
283 files.append(fn)
284 filecontent[fn] = mergedtext
284 filecontent[fn] = mergedtext
285
285
286 if overwritten_file:
286 if overwritten_file:
287 fn = b"of"
287 fn = b"of"
288 files.append(fn)
288 files.append(fn)
289 filecontent[fn] = b"r%i\n" % id
289 filecontent[fn] = b"r%i\n" % id
290
290
291 if new_file:
291 if new_file:
292 fn = b"nf%i" % id
292 fn = b"nf%i" % id
293 files.append(fn)
293 files.append(fn)
294 filecontent[fn] = b"r%i\n" % id
294 filecontent[fn] = b"r%i\n" % id
295 if len(ps) > 1:
295 if len(ps) > 1:
296 if not p2:
296 if not p2:
297 p2 = repo[ps[1]]
297 p2 = repo[ps[1]]
298 for fn in p2:
298 for fn in p2:
299 if fn.startswith(b"nf"):
299 if fn.startswith(b"nf"):
300 files.append(fn)
300 files.append(fn)
301 filecontent[fn] = p2[fn].data()
301 filecontent[fn] = p2[fn].data()
302
302
303 def fctxfn(repo, cx, path):
303 def fctxfn(repo, cx, path):
304 if path in filecontent:
304 if path in filecontent:
305 return context.memfilectx(
305 return context.memfilectx(
306 repo, cx, path, filecontent[path]
306 repo, cx, path, filecontent[path]
307 )
307 )
308 return None
308 return None
309
309
310 if len(ps) == 0 or ps[0] < 0:
310 if len(ps) == 0 or ps[0] < 0:
311 pars = [None, None]
311 pars = [None, None]
312 elif len(ps) == 1:
312 elif len(ps) == 1:
313 pars = [nodeids[ps[0]], None]
313 pars = [nodeids[ps[0]], None]
314 else:
314 else:
315 pars = [nodeids[p] for p in ps]
315 pars = [nodeids[p] for p in ps]
316 cx = context.memctx(
316 cx = context.memctx(
317 repo,
317 repo,
318 pars,
318 pars,
319 b"r%i" % id,
319 b"r%i" % id,
320 files,
320 files,
321 fctxfn,
321 fctxfn,
322 date=(id, 0),
322 date=(id, 0),
323 user=b"debugbuilddag",
323 user=b"debugbuilddag",
324 extra={b'branch': atbranch},
324 extra={b'branch': atbranch},
325 )
325 )
326 nodeid = repo.commitctx(cx)
326 nodeid = repo.commitctx(cx)
327 nodeids.append(nodeid)
327 nodeids.append(nodeid)
328 at = id
328 at = id
329 elif type == b'l':
329 elif type == b'l':
330 id, name = data
330 id, name = data
331 ui.note((b'tag %s\n' % name))
331 ui.note((b'tag %s\n' % name))
332 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
332 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
333 elif type == b'a':
333 elif type == b'a':
334 ui.note((b'branch %s\n' % data))
334 ui.note((b'branch %s\n' % data))
335 atbranch = data
335 atbranch = data
336 progress.update(id)
336 progress.update(id)
337
337
338 if tags:
338 if tags:
339 repo.vfs.write(b"localtags", b"".join(tags))
339 repo.vfs.write(b"localtags", b"".join(tags))
340
340
341
341
342 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
342 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
343 indent_string = b' ' * indent
343 indent_string = b' ' * indent
344 if all:
344 if all:
345 ui.writenoi18n(
345 ui.writenoi18n(
346 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
346 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
347 % indent_string
347 % indent_string
348 )
348 )
349
349
350 def showchunks(named):
350 def showchunks(named):
351 ui.write(b"\n%s%s\n" % (indent_string, named))
351 ui.write(b"\n%s%s\n" % (indent_string, named))
352 for deltadata in gen.deltaiter():
352 for deltadata in gen.deltaiter():
353 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
353 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
354 ui.write(
354 ui.write(
355 b"%s%s %s %s %s %s %d\n"
355 b"%s%s %s %s %s %s %d\n"
356 % (
356 % (
357 indent_string,
357 indent_string,
358 hex(node),
358 hex(node),
359 hex(p1),
359 hex(p1),
360 hex(p2),
360 hex(p2),
361 hex(cs),
361 hex(cs),
362 hex(deltabase),
362 hex(deltabase),
363 len(delta),
363 len(delta),
364 )
364 )
365 )
365 )
366
366
367 gen.changelogheader()
367 gen.changelogheader()
368 showchunks(b"changelog")
368 showchunks(b"changelog")
369 gen.manifestheader()
369 gen.manifestheader()
370 showchunks(b"manifest")
370 showchunks(b"manifest")
371 for chunkdata in iter(gen.filelogheader, {}):
371 for chunkdata in iter(gen.filelogheader, {}):
372 fname = chunkdata[b'filename']
372 fname = chunkdata[b'filename']
373 showchunks(fname)
373 showchunks(fname)
374 else:
374 else:
375 if isinstance(gen, bundle2.unbundle20):
375 if isinstance(gen, bundle2.unbundle20):
376 raise error.Abort(_(b'use debugbundle2 for this file'))
376 raise error.Abort(_(b'use debugbundle2 for this file'))
377 gen.changelogheader()
377 gen.changelogheader()
378 for deltadata in gen.deltaiter():
378 for deltadata in gen.deltaiter():
379 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
379 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
380 ui.write(b"%s%s\n" % (indent_string, hex(node)))
380 ui.write(b"%s%s\n" % (indent_string, hex(node)))
381
381
382
382
383 def _debugobsmarkers(ui, part, indent=0, **opts):
383 def _debugobsmarkers(ui, part, indent=0, **opts):
384 """display version and markers contained in 'data'"""
384 """display version and markers contained in 'data'"""
385 opts = pycompat.byteskwargs(opts)
385 opts = pycompat.byteskwargs(opts)
386 data = part.read()
386 data = part.read()
387 indent_string = b' ' * indent
387 indent_string = b' ' * indent
388 try:
388 try:
389 version, markers = obsolete._readmarkers(data)
389 version, markers = obsolete._readmarkers(data)
390 except error.UnknownVersion as exc:
390 except error.UnknownVersion as exc:
391 msg = b"%sunsupported version: %s (%d bytes)\n"
391 msg = b"%sunsupported version: %s (%d bytes)\n"
392 msg %= indent_string, exc.version, len(data)
392 msg %= indent_string, exc.version, len(data)
393 ui.write(msg)
393 ui.write(msg)
394 else:
394 else:
395 msg = b"%sversion: %d (%d bytes)\n"
395 msg = b"%sversion: %d (%d bytes)\n"
396 msg %= indent_string, version, len(data)
396 msg %= indent_string, version, len(data)
397 ui.write(msg)
397 ui.write(msg)
398 fm = ui.formatter(b'debugobsolete', opts)
398 fm = ui.formatter(b'debugobsolete', opts)
399 for rawmarker in sorted(markers):
399 for rawmarker in sorted(markers):
400 m = obsutil.marker(None, rawmarker)
400 m = obsutil.marker(None, rawmarker)
401 fm.startitem()
401 fm.startitem()
402 fm.plain(indent_string)
402 fm.plain(indent_string)
403 cmdutil.showmarker(fm, m)
403 cmdutil.showmarker(fm, m)
404 fm.end()
404 fm.end()
405
405
406
406
407 def _debugphaseheads(ui, data, indent=0):
407 def _debugphaseheads(ui, data, indent=0):
408 """display version and markers contained in 'data'"""
408 """display version and markers contained in 'data'"""
409 indent_string = b' ' * indent
409 indent_string = b' ' * indent
410 headsbyphase = phases.binarydecode(data)
410 headsbyphase = phases.binarydecode(data)
411 for phase in phases.allphases:
411 for phase in phases.allphases:
412 for head in headsbyphase[phase]:
412 for head in headsbyphase[phase]:
413 ui.write(indent_string)
413 ui.write(indent_string)
414 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
414 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
415
415
416
416
417 def _quasirepr(thing):
417 def _quasirepr(thing):
418 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
418 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
419 return b'{%s}' % (
419 return b'{%s}' % (
420 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
420 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
421 )
421 )
422 return pycompat.bytestr(repr(thing))
422 return pycompat.bytestr(repr(thing))
423
423
424
424
425 def _debugbundle2(ui, gen, all=None, **opts):
425 def _debugbundle2(ui, gen, all=None, **opts):
426 """lists the contents of a bundle2"""
426 """lists the contents of a bundle2"""
427 if not isinstance(gen, bundle2.unbundle20):
427 if not isinstance(gen, bundle2.unbundle20):
428 raise error.Abort(_(b'not a bundle2 file'))
428 raise error.Abort(_(b'not a bundle2 file'))
429 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
429 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
430 parttypes = opts.get('part_type', [])
430 parttypes = opts.get('part_type', [])
431 for part in gen.iterparts():
431 for part in gen.iterparts():
432 if parttypes and part.type not in parttypes:
432 if parttypes and part.type not in parttypes:
433 continue
433 continue
434 msg = b'%s -- %s (mandatory: %r)\n'
434 msg = b'%s -- %s (mandatory: %r)\n'
435 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
435 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
436 if part.type == b'changegroup':
436 if part.type == b'changegroup':
437 version = part.params.get(b'version', b'01')
437 version = part.params.get(b'version', b'01')
438 cg = changegroup.getunbundler(version, part, b'UN')
438 cg = changegroup.getunbundler(version, part, b'UN')
439 if not ui.quiet:
439 if not ui.quiet:
440 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
440 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
441 if part.type == b'obsmarkers':
441 if part.type == b'obsmarkers':
442 if not ui.quiet:
442 if not ui.quiet:
443 _debugobsmarkers(ui, part, indent=4, **opts)
443 _debugobsmarkers(ui, part, indent=4, **opts)
444 if part.type == b'phase-heads':
444 if part.type == b'phase-heads':
445 if not ui.quiet:
445 if not ui.quiet:
446 _debugphaseheads(ui, part, indent=4)
446 _debugphaseheads(ui, part, indent=4)
447
447
448
448
449 @command(
449 @command(
450 b'debugbundle',
450 b'debugbundle',
451 [
451 [
452 (b'a', b'all', None, _(b'show all details')),
452 (b'a', b'all', None, _(b'show all details')),
453 (b'', b'part-type', [], _(b'show only the named part type')),
453 (b'', b'part-type', [], _(b'show only the named part type')),
454 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
454 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
455 ],
455 ],
456 _(b'FILE'),
456 _(b'FILE'),
457 norepo=True,
457 norepo=True,
458 )
458 )
459 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
459 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
460 """lists the contents of a bundle"""
460 """lists the contents of a bundle"""
461 with hg.openpath(ui, bundlepath) as f:
461 with hg.openpath(ui, bundlepath) as f:
462 if spec:
462 if spec:
463 spec = exchange.getbundlespec(ui, f)
463 spec = exchange.getbundlespec(ui, f)
464 ui.write(b'%s\n' % spec)
464 ui.write(b'%s\n' % spec)
465 return
465 return
466
466
467 gen = exchange.readbundle(ui, f, bundlepath)
467 gen = exchange.readbundle(ui, f, bundlepath)
468 if isinstance(gen, bundle2.unbundle20):
468 if isinstance(gen, bundle2.unbundle20):
469 return _debugbundle2(ui, gen, all=all, **opts)
469 return _debugbundle2(ui, gen, all=all, **opts)
470 _debugchangegroup(ui, gen, all=all, **opts)
470 _debugchangegroup(ui, gen, all=all, **opts)
471
471
472
472
473 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
473 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
474 def debugcapabilities(ui, path, **opts):
474 def debugcapabilities(ui, path, **opts):
475 """lists the capabilities of a remote peer"""
475 """lists the capabilities of a remote peer"""
476 opts = pycompat.byteskwargs(opts)
476 opts = pycompat.byteskwargs(opts)
477 peer = hg.peer(ui, opts, path)
477 peer = hg.peer(ui, opts, path)
478 try:
478 try:
479 caps = peer.capabilities()
479 caps = peer.capabilities()
480 ui.writenoi18n(b'Main capabilities:\n')
480 ui.writenoi18n(b'Main capabilities:\n')
481 for c in sorted(caps):
481 for c in sorted(caps):
482 ui.write(b' %s\n' % c)
482 ui.write(b' %s\n' % c)
483 b2caps = bundle2.bundle2caps(peer)
483 b2caps = bundle2.bundle2caps(peer)
484 if b2caps:
484 if b2caps:
485 ui.writenoi18n(b'Bundle2 capabilities:\n')
485 ui.writenoi18n(b'Bundle2 capabilities:\n')
486 for key, values in sorted(pycompat.iteritems(b2caps)):
486 for key, values in sorted(pycompat.iteritems(b2caps)):
487 ui.write(b' %s\n' % key)
487 ui.write(b' %s\n' % key)
488 for v in values:
488 for v in values:
489 ui.write(b' %s\n' % v)
489 ui.write(b' %s\n' % v)
490 finally:
490 finally:
491 peer.close()
491 peer.close()
492
492
493
493
494 @command(
494 @command(
495 b'debugchangedfiles',
495 b'debugchangedfiles',
496 [
496 [
497 (
497 (
498 b'',
498 b'',
499 b'compute',
499 b'compute',
500 False,
500 False,
501 b"compute information instead of reading it from storage",
501 b"compute information instead of reading it from storage",
502 ),
502 ),
503 ],
503 ],
504 b'REV',
504 b'REV',
505 )
505 )
506 def debugchangedfiles(ui, repo, rev, **opts):
506 def debugchangedfiles(ui, repo, rev, **opts):
507 """list the stored files changes for a revision"""
507 """list the stored files changes for a revision"""
508 ctx = logcmdutil.revsingle(repo, rev, None)
508 ctx = logcmdutil.revsingle(repo, rev, None)
509 files = None
509 files = None
510
510
511 if opts['compute']:
511 if opts['compute']:
512 files = metadata.compute_all_files_changes(ctx)
512 files = metadata.compute_all_files_changes(ctx)
513 else:
513 else:
514 sd = repo.changelog.sidedata(ctx.rev())
514 sd = repo.changelog.sidedata(ctx.rev())
515 files_block = sd.get(sidedata.SD_FILES)
515 files_block = sd.get(sidedata.SD_FILES)
516 if files_block is not None:
516 if files_block is not None:
517 files = metadata.decode_files_sidedata(sd)
517 files = metadata.decode_files_sidedata(sd)
518 if files is not None:
518 if files is not None:
519 for f in sorted(files.touched):
519 for f in sorted(files.touched):
520 if f in files.added:
520 if f in files.added:
521 action = b"added"
521 action = b"added"
522 elif f in files.removed:
522 elif f in files.removed:
523 action = b"removed"
523 action = b"removed"
524 elif f in files.merged:
524 elif f in files.merged:
525 action = b"merged"
525 action = b"merged"
526 elif f in files.salvaged:
526 elif f in files.salvaged:
527 action = b"salvaged"
527 action = b"salvaged"
528 else:
528 else:
529 action = b"touched"
529 action = b"touched"
530
530
531 copy_parent = b""
531 copy_parent = b""
532 copy_source = b""
532 copy_source = b""
533 if f in files.copied_from_p1:
533 if f in files.copied_from_p1:
534 copy_parent = b"p1"
534 copy_parent = b"p1"
535 copy_source = files.copied_from_p1[f]
535 copy_source = files.copied_from_p1[f]
536 elif f in files.copied_from_p2:
536 elif f in files.copied_from_p2:
537 copy_parent = b"p2"
537 copy_parent = b"p2"
538 copy_source = files.copied_from_p2[f]
538 copy_source = files.copied_from_p2[f]
539
539
540 data = (action, copy_parent, f, copy_source)
540 data = (action, copy_parent, f, copy_source)
541 template = b"%-8s %2s: %s, %s;\n"
541 template = b"%-8s %2s: %s, %s;\n"
542 ui.write(template % data)
542 ui.write(template % data)
543
543
544
544
545 @command(b'debugcheckstate', [], b'')
545 @command(b'debugcheckstate', [], b'')
546 def debugcheckstate(ui, repo):
546 def debugcheckstate(ui, repo):
547 """validate the correctness of the current dirstate"""
547 """validate the correctness of the current dirstate"""
548 parent1, parent2 = repo.dirstate.parents()
548 parent1, parent2 = repo.dirstate.parents()
549 m1 = repo[parent1].manifest()
549 m1 = repo[parent1].manifest()
550 m2 = repo[parent2].manifest()
550 m2 = repo[parent2].manifest()
551 errors = 0
551 errors = 0
552 for err in repo.dirstate.verify(m1, m2):
552 for err in repo.dirstate.verify(m1, m2):
553 ui.warn(err[0] % err[1:])
553 ui.warn(err[0] % err[1:])
554 errors += 1
554 errors += 1
555 if errors:
555 if errors:
556 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
556 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
557 raise error.Abort(errstr)
557 raise error.Abort(errstr)
558
558
559
559
560 @command(
560 @command(
561 b'debugcolor',
561 b'debugcolor',
562 [(b'', b'style', None, _(b'show all configured styles'))],
562 [(b'', b'style', None, _(b'show all configured styles'))],
563 b'hg debugcolor',
563 b'hg debugcolor',
564 )
564 )
565 def debugcolor(ui, repo, **opts):
565 def debugcolor(ui, repo, **opts):
566 """show available color, effects or style"""
566 """show available color, effects or style"""
567 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
567 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
568 if opts.get('style'):
568 if opts.get('style'):
569 return _debugdisplaystyle(ui)
569 return _debugdisplaystyle(ui)
570 else:
570 else:
571 return _debugdisplaycolor(ui)
571 return _debugdisplaycolor(ui)
572
572
573
573
574 def _debugdisplaycolor(ui):
574 def _debugdisplaycolor(ui):
575 ui = ui.copy()
575 ui = ui.copy()
576 ui._styles.clear()
576 ui._styles.clear()
577 for effect in color._activeeffects(ui).keys():
577 for effect in color._activeeffects(ui).keys():
578 ui._styles[effect] = effect
578 ui._styles[effect] = effect
579 if ui._terminfoparams:
579 if ui._terminfoparams:
580 for k, v in ui.configitems(b'color'):
580 for k, v in ui.configitems(b'color'):
581 if k.startswith(b'color.'):
581 if k.startswith(b'color.'):
582 ui._styles[k] = k[6:]
582 ui._styles[k] = k[6:]
583 elif k.startswith(b'terminfo.'):
583 elif k.startswith(b'terminfo.'):
584 ui._styles[k] = k[9:]
584 ui._styles[k] = k[9:]
585 ui.write(_(b'available colors:\n'))
585 ui.write(_(b'available colors:\n'))
586 # sort label with a '_' after the other to group '_background' entry.
586 # sort label with a '_' after the other to group '_background' entry.
587 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
587 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
588 for colorname, label in items:
588 for colorname, label in items:
589 ui.write(b'%s\n' % colorname, label=label)
589 ui.write(b'%s\n' % colorname, label=label)
590
590
591
591
592 def _debugdisplaystyle(ui):
592 def _debugdisplaystyle(ui):
593 ui.write(_(b'available style:\n'))
593 ui.write(_(b'available style:\n'))
594 if not ui._styles:
594 if not ui._styles:
595 return
595 return
596 width = max(len(s) for s in ui._styles)
596 width = max(len(s) for s in ui._styles)
597 for label, effects in sorted(ui._styles.items()):
597 for label, effects in sorted(ui._styles.items()):
598 ui.write(b'%s' % label, label=label)
598 ui.write(b'%s' % label, label=label)
599 if effects:
599 if effects:
600 # 50
600 # 50
601 ui.write(b': ')
601 ui.write(b': ')
602 ui.write(b' ' * (max(0, width - len(label))))
602 ui.write(b' ' * (max(0, width - len(label))))
603 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
603 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
604 ui.write(b'\n')
604 ui.write(b'\n')
605
605
606
606
607 @command(b'debugcreatestreamclonebundle', [], b'FILE')
607 @command(b'debugcreatestreamclonebundle', [], b'FILE')
608 def debugcreatestreamclonebundle(ui, repo, fname):
608 def debugcreatestreamclonebundle(ui, repo, fname):
609 """create a stream clone bundle file
609 """create a stream clone bundle file
610
610
611 Stream bundles are special bundles that are essentially archives of
611 Stream bundles are special bundles that are essentially archives of
612 revlog files. They are commonly used for cloning very quickly.
612 revlog files. They are commonly used for cloning very quickly.
613 """
613 """
614 # TODO we may want to turn this into an abort when this functionality
614 # TODO we may want to turn this into an abort when this functionality
615 # is moved into `hg bundle`.
615 # is moved into `hg bundle`.
616 if phases.hassecret(repo):
616 if phases.hassecret(repo):
617 ui.warn(
617 ui.warn(
618 _(
618 _(
619 b'(warning: stream clone bundle will contain secret '
619 b'(warning: stream clone bundle will contain secret '
620 b'revisions)\n'
620 b'revisions)\n'
621 )
621 )
622 )
622 )
623
623
624 requirements, gen = streamclone.generatebundlev1(repo)
624 requirements, gen = streamclone.generatebundlev1(repo)
625 changegroup.writechunks(ui, gen, fname)
625 changegroup.writechunks(ui, gen, fname)
626
626
627 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
627 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
628
628
629
629
630 @command(
630 @command(
631 b'debugdag',
631 b'debugdag',
632 [
632 [
633 (b't', b'tags', None, _(b'use tags as labels')),
633 (b't', b'tags', None, _(b'use tags as labels')),
634 (b'b', b'branches', None, _(b'annotate with branch names')),
634 (b'b', b'branches', None, _(b'annotate with branch names')),
635 (b'', b'dots', None, _(b'use dots for runs')),
635 (b'', b'dots', None, _(b'use dots for runs')),
636 (b's', b'spaces', None, _(b'separate elements by spaces')),
636 (b's', b'spaces', None, _(b'separate elements by spaces')),
637 ],
637 ],
638 _(b'[OPTION]... [FILE [REV]...]'),
638 _(b'[OPTION]... [FILE [REV]...]'),
639 optionalrepo=True,
639 optionalrepo=True,
640 )
640 )
641 def debugdag(ui, repo, file_=None, *revs, **opts):
641 def debugdag(ui, repo, file_=None, *revs, **opts):
642 """format the changelog or an index DAG as a concise textual description
642 """format the changelog or an index DAG as a concise textual description
643
643
644 If you pass a revlog index, the revlog's DAG is emitted. If you list
644 If you pass a revlog index, the revlog's DAG is emitted. If you list
645 revision numbers, they get labeled in the output as rN.
645 revision numbers, they get labeled in the output as rN.
646
646
647 Otherwise, the changelog DAG of the current repo is emitted.
647 Otherwise, the changelog DAG of the current repo is emitted.
648 """
648 """
649 spaces = opts.get('spaces')
649 spaces = opts.get('spaces')
650 dots = opts.get('dots')
650 dots = opts.get('dots')
651 if file_:
651 if file_:
652 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
652 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
653 revs = {int(r) for r in revs}
653 revs = {int(r) for r in revs}
654
654
655 def events():
655 def events():
656 for r in rlog:
656 for r in rlog:
657 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
657 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
658 if r in revs:
658 if r in revs:
659 yield b'l', (r, b"r%i" % r)
659 yield b'l', (r, b"r%i" % r)
660
660
661 elif repo:
661 elif repo:
662 cl = repo.changelog
662 cl = repo.changelog
663 tags = opts.get('tags')
663 tags = opts.get('tags')
664 branches = opts.get('branches')
664 branches = opts.get('branches')
665 if tags:
665 if tags:
666 labels = {}
666 labels = {}
667 for l, n in repo.tags().items():
667 for l, n in repo.tags().items():
668 labels.setdefault(cl.rev(n), []).append(l)
668 labels.setdefault(cl.rev(n), []).append(l)
669
669
670 def events():
670 def events():
671 b = b"default"
671 b = b"default"
672 for r in cl:
672 for r in cl:
673 if branches:
673 if branches:
674 newb = cl.read(cl.node(r))[5][b'branch']
674 newb = cl.read(cl.node(r))[5][b'branch']
675 if newb != b:
675 if newb != b:
676 yield b'a', newb
676 yield b'a', newb
677 b = newb
677 b = newb
678 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
678 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
679 if tags:
679 if tags:
680 ls = labels.get(r)
680 ls = labels.get(r)
681 if ls:
681 if ls:
682 for l in ls:
682 for l in ls:
683 yield b'l', (r, l)
683 yield b'l', (r, l)
684
684
685 else:
685 else:
686 raise error.Abort(_(b'need repo for changelog dag'))
686 raise error.Abort(_(b'need repo for changelog dag'))
687
687
688 for line in dagparser.dagtextlines(
688 for line in dagparser.dagtextlines(
689 events(),
689 events(),
690 addspaces=spaces,
690 addspaces=spaces,
691 wraplabels=True,
691 wraplabels=True,
692 wrapannotations=True,
692 wrapannotations=True,
693 wrapnonlinear=dots,
693 wrapnonlinear=dots,
694 usedots=dots,
694 usedots=dots,
695 maxlinewidth=70,
695 maxlinewidth=70,
696 ):
696 ):
697 ui.write(line)
697 ui.write(line)
698 ui.write(b"\n")
698 ui.write(b"\n")
699
699
700
700
701 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
701 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
702 def debugdata(ui, repo, file_, rev=None, **opts):
702 def debugdata(ui, repo, file_, rev=None, **opts):
703 """dump the contents of a data file revision"""
703 """dump the contents of a data file revision"""
704 opts = pycompat.byteskwargs(opts)
704 opts = pycompat.byteskwargs(opts)
705 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
705 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
706 if rev is not None:
706 if rev is not None:
707 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
707 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
708 file_, rev = None, file_
708 file_, rev = None, file_
709 elif rev is None:
709 elif rev is None:
710 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
710 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
711 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
711 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
712 try:
712 try:
713 ui.write(r.rawdata(r.lookup(rev)))
713 ui.write(r.rawdata(r.lookup(rev)))
714 except KeyError:
714 except KeyError:
715 raise error.Abort(_(b'invalid revision identifier %s') % rev)
715 raise error.Abort(_(b'invalid revision identifier %s') % rev)
716
716
717
717
718 @command(
718 @command(
719 b'debugdate',
719 b'debugdate',
720 [(b'e', b'extended', None, _(b'try extended date formats'))],
720 [(b'e', b'extended', None, _(b'try extended date formats'))],
721 _(b'[-e] DATE [RANGE]'),
721 _(b'[-e] DATE [RANGE]'),
722 norepo=True,
722 norepo=True,
723 optionalrepo=True,
723 optionalrepo=True,
724 )
724 )
725 def debugdate(ui, date, range=None, **opts):
725 def debugdate(ui, date, range=None, **opts):
726 """parse and display a date"""
726 """parse and display a date"""
727 if opts["extended"]:
727 if opts["extended"]:
728 d = dateutil.parsedate(date, dateutil.extendeddateformats)
728 d = dateutil.parsedate(date, dateutil.extendeddateformats)
729 else:
729 else:
730 d = dateutil.parsedate(date)
730 d = dateutil.parsedate(date)
731 ui.writenoi18n(b"internal: %d %d\n" % d)
731 ui.writenoi18n(b"internal: %d %d\n" % d)
732 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
732 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
733 if range:
733 if range:
734 m = dateutil.matchdate(range)
734 m = dateutil.matchdate(range)
735 ui.writenoi18n(b"match: %s\n" % m(d[0]))
735 ui.writenoi18n(b"match: %s\n" % m(d[0]))
736
736
737
737
738 @command(
738 @command(
739 b'debugdeltachain',
739 b'debugdeltachain',
740 cmdutil.debugrevlogopts + cmdutil.formatteropts,
740 cmdutil.debugrevlogopts + cmdutil.formatteropts,
741 _(b'-c|-m|FILE'),
741 _(b'-c|-m|FILE'),
742 optionalrepo=True,
742 optionalrepo=True,
743 )
743 )
744 def debugdeltachain(ui, repo, file_=None, **opts):
744 def debugdeltachain(ui, repo, file_=None, **opts):
745 """dump information about delta chains in a revlog
745 """dump information about delta chains in a revlog
746
746
747 Output can be templatized. Available template keywords are:
747 Output can be templatized. Available template keywords are:
748
748
749 :``rev``: revision number
749 :``rev``: revision number
750 :``chainid``: delta chain identifier (numbered by unique base)
750 :``chainid``: delta chain identifier (numbered by unique base)
751 :``chainlen``: delta chain length to this revision
751 :``chainlen``: delta chain length to this revision
752 :``prevrev``: previous revision in delta chain
752 :``prevrev``: previous revision in delta chain
753 :``deltatype``: role of delta / how it was computed
753 :``deltatype``: role of delta / how it was computed
754 :``compsize``: compressed size of revision
754 :``compsize``: compressed size of revision
755 :``uncompsize``: uncompressed size of revision
755 :``uncompsize``: uncompressed size of revision
756 :``chainsize``: total size of compressed revisions in chain
756 :``chainsize``: total size of compressed revisions in chain
757 :``chainratio``: total chain size divided by uncompressed revision size
757 :``chainratio``: total chain size divided by uncompressed revision size
758 (new delta chains typically start at ratio 2.00)
758 (new delta chains typically start at ratio 2.00)
759 :``lindist``: linear distance from base revision in delta chain to end
759 :``lindist``: linear distance from base revision in delta chain to end
760 of this revision
760 of this revision
761 :``extradist``: total size of revisions not part of this delta chain from
761 :``extradist``: total size of revisions not part of this delta chain from
762 base of delta chain to end of this revision; a measurement
762 base of delta chain to end of this revision; a measurement
763 of how much extra data we need to read/seek across to read
763 of how much extra data we need to read/seek across to read
764 the delta chain for this revision
764 the delta chain for this revision
765 :``extraratio``: extradist divided by chainsize; another representation of
765 :``extraratio``: extradist divided by chainsize; another representation of
766 how much unrelated data is needed to load this delta chain
766 how much unrelated data is needed to load this delta chain
767
767
768 If the repository is configured to use the sparse read, additional keywords
768 If the repository is configured to use the sparse read, additional keywords
769 are available:
769 are available:
770
770
771 :``readsize``: total size of data read from the disk for a revision
771 :``readsize``: total size of data read from the disk for a revision
772 (sum of the sizes of all the blocks)
772 (sum of the sizes of all the blocks)
773 :``largestblock``: size of the largest block of data read from the disk
773 :``largestblock``: size of the largest block of data read from the disk
774 :``readdensity``: density of useful bytes in the data read from the disk
774 :``readdensity``: density of useful bytes in the data read from the disk
775 :``srchunks``: in how many data hunks the whole revision would be read
775 :``srchunks``: in how many data hunks the whole revision would be read
776
776
777 The sparse read can be enabled with experimental.sparse-read = True
777 The sparse read can be enabled with experimental.sparse-read = True
778 """
778 """
779 opts = pycompat.byteskwargs(opts)
779 opts = pycompat.byteskwargs(opts)
780 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
780 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
781 index = r.index
781 index = r.index
782 start = r.start
782 start = r.start
783 length = r.length
783 length = r.length
784 generaldelta = r._generaldelta
784 generaldelta = r._generaldelta
785 withsparseread = getattr(r, '_withsparseread', False)
785 withsparseread = getattr(r, '_withsparseread', False)
786
786
787 def revinfo(rev):
787 def revinfo(rev):
788 e = index[rev]
788 e = index[rev]
789 compsize = e[1]
789 compsize = e[1]
790 uncompsize = e[2]
790 uncompsize = e[2]
791 chainsize = 0
791 chainsize = 0
792
792
793 if generaldelta:
793 if generaldelta:
794 if e[3] == e[5]:
794 if e[3] == e[5]:
795 deltatype = b'p1'
795 deltatype = b'p1'
796 elif e[3] == e[6]:
796 elif e[3] == e[6]:
797 deltatype = b'p2'
797 deltatype = b'p2'
798 elif e[3] == rev - 1:
798 elif e[3] == rev - 1:
799 deltatype = b'prev'
799 deltatype = b'prev'
800 elif e[3] == rev:
800 elif e[3] == rev:
801 deltatype = b'base'
801 deltatype = b'base'
802 else:
802 else:
803 deltatype = b'other'
803 deltatype = b'other'
804 else:
804 else:
805 if e[3] == rev:
805 if e[3] == rev:
806 deltatype = b'base'
806 deltatype = b'base'
807 else:
807 else:
808 deltatype = b'prev'
808 deltatype = b'prev'
809
809
810 chain = r._deltachain(rev)[0]
810 chain = r._deltachain(rev)[0]
811 for iterrev in chain:
811 for iterrev in chain:
812 e = index[iterrev]
812 e = index[iterrev]
813 chainsize += e[1]
813 chainsize += e[1]
814
814
815 return compsize, uncompsize, deltatype, chain, chainsize
815 return compsize, uncompsize, deltatype, chain, chainsize
816
816
817 fm = ui.formatter(b'debugdeltachain', opts)
817 fm = ui.formatter(b'debugdeltachain', opts)
818
818
819 fm.plain(
819 fm.plain(
820 b' rev chain# chainlen prev delta '
820 b' rev chain# chainlen prev delta '
821 b'size rawsize chainsize ratio lindist extradist '
821 b'size rawsize chainsize ratio lindist extradist '
822 b'extraratio'
822 b'extraratio'
823 )
823 )
824 if withsparseread:
824 if withsparseread:
825 fm.plain(b' readsize largestblk rddensity srchunks')
825 fm.plain(b' readsize largestblk rddensity srchunks')
826 fm.plain(b'\n')
826 fm.plain(b'\n')
827
827
828 chainbases = {}
828 chainbases = {}
829 for rev in r:
829 for rev in r:
830 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
830 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
831 chainbase = chain[0]
831 chainbase = chain[0]
832 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
832 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
833 basestart = start(chainbase)
833 basestart = start(chainbase)
834 revstart = start(rev)
834 revstart = start(rev)
835 lineardist = revstart + comp - basestart
835 lineardist = revstart + comp - basestart
836 extradist = lineardist - chainsize
836 extradist = lineardist - chainsize
837 try:
837 try:
838 prevrev = chain[-2]
838 prevrev = chain[-2]
839 except IndexError:
839 except IndexError:
840 prevrev = -1
840 prevrev = -1
841
841
842 if uncomp != 0:
842 if uncomp != 0:
843 chainratio = float(chainsize) / float(uncomp)
843 chainratio = float(chainsize) / float(uncomp)
844 else:
844 else:
845 chainratio = chainsize
845 chainratio = chainsize
846
846
847 if chainsize != 0:
847 if chainsize != 0:
848 extraratio = float(extradist) / float(chainsize)
848 extraratio = float(extradist) / float(chainsize)
849 else:
849 else:
850 extraratio = extradist
850 extraratio = extradist
851
851
852 fm.startitem()
852 fm.startitem()
853 fm.write(
853 fm.write(
854 b'rev chainid chainlen prevrev deltatype compsize '
854 b'rev chainid chainlen prevrev deltatype compsize '
855 b'uncompsize chainsize chainratio lindist extradist '
855 b'uncompsize chainsize chainratio lindist extradist '
856 b'extraratio',
856 b'extraratio',
857 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
857 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
858 rev,
858 rev,
859 chainid,
859 chainid,
860 len(chain),
860 len(chain),
861 prevrev,
861 prevrev,
862 deltatype,
862 deltatype,
863 comp,
863 comp,
864 uncomp,
864 uncomp,
865 chainsize,
865 chainsize,
866 chainratio,
866 chainratio,
867 lineardist,
867 lineardist,
868 extradist,
868 extradist,
869 extraratio,
869 extraratio,
870 rev=rev,
870 rev=rev,
871 chainid=chainid,
871 chainid=chainid,
872 chainlen=len(chain),
872 chainlen=len(chain),
873 prevrev=prevrev,
873 prevrev=prevrev,
874 deltatype=deltatype,
874 deltatype=deltatype,
875 compsize=comp,
875 compsize=comp,
876 uncompsize=uncomp,
876 uncompsize=uncomp,
877 chainsize=chainsize,
877 chainsize=chainsize,
878 chainratio=chainratio,
878 chainratio=chainratio,
879 lindist=lineardist,
879 lindist=lineardist,
880 extradist=extradist,
880 extradist=extradist,
881 extraratio=extraratio,
881 extraratio=extraratio,
882 )
882 )
883 if withsparseread:
883 if withsparseread:
884 readsize = 0
884 readsize = 0
885 largestblock = 0
885 largestblock = 0
886 srchunks = 0
886 srchunks = 0
887
887
888 for revschunk in deltautil.slicechunk(r, chain):
888 for revschunk in deltautil.slicechunk(r, chain):
889 srchunks += 1
889 srchunks += 1
890 blkend = start(revschunk[-1]) + length(revschunk[-1])
890 blkend = start(revschunk[-1]) + length(revschunk[-1])
891 blksize = blkend - start(revschunk[0])
891 blksize = blkend - start(revschunk[0])
892
892
893 readsize += blksize
893 readsize += blksize
894 if largestblock < blksize:
894 if largestblock < blksize:
895 largestblock = blksize
895 largestblock = blksize
896
896
897 if readsize:
897 if readsize:
898 readdensity = float(chainsize) / float(readsize)
898 readdensity = float(chainsize) / float(readsize)
899 else:
899 else:
900 readdensity = 1
900 readdensity = 1
901
901
902 fm.write(
902 fm.write(
903 b'readsize largestblock readdensity srchunks',
903 b'readsize largestblock readdensity srchunks',
904 b' %10d %10d %9.5f %8d',
904 b' %10d %10d %9.5f %8d',
905 readsize,
905 readsize,
906 largestblock,
906 largestblock,
907 readdensity,
907 readdensity,
908 srchunks,
908 srchunks,
909 readsize=readsize,
909 readsize=readsize,
910 largestblock=largestblock,
910 largestblock=largestblock,
911 readdensity=readdensity,
911 readdensity=readdensity,
912 srchunks=srchunks,
912 srchunks=srchunks,
913 )
913 )
914
914
915 fm.plain(b'\n')
915 fm.plain(b'\n')
916
916
917 fm.end()
917 fm.end()
918
918
919
919
920 @command(
920 @command(
921 b'debugdirstate|debugstate',
921 b'debugdirstate|debugstate',
922 [
922 [
923 (
923 (
924 b'',
924 b'',
925 b'nodates',
925 b'nodates',
926 None,
926 None,
927 _(b'do not display the saved mtime (DEPRECATED)'),
927 _(b'do not display the saved mtime (DEPRECATED)'),
928 ),
928 ),
929 (b'', b'dates', True, _(b'display the saved mtime')),
929 (b'', b'dates', True, _(b'display the saved mtime')),
930 (b'', b'datesort', None, _(b'sort by saved mtime')),
930 (b'', b'datesort', None, _(b'sort by saved mtime')),
931 (
931 (
932 b'',
932 b'',
933 b'all',
933 b'all',
934 False,
934 False,
935 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
935 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
936 ),
936 ),
937 ],
937 ],
938 _(b'[OPTION]...'),
938 _(b'[OPTION]...'),
939 )
939 )
940 def debugstate(ui, repo, **opts):
940 def debugstate(ui, repo, **opts):
941 """show the contents of the current dirstate"""
941 """show the contents of the current dirstate"""
942
942
943 nodates = not opts['dates']
943 nodates = not opts['dates']
944 if opts.get('nodates') is not None:
944 if opts.get('nodates') is not None:
945 nodates = True
945 nodates = True
946 datesort = opts.get('datesort')
946 datesort = opts.get('datesort')
947
947
948 if datesort:
948 if datesort:
949
949
950 def keyfunc(entry):
950 def keyfunc(entry):
951 filename, _state, _mode, _size, mtime = entry
951 filename, _state, _mode, _size, mtime = entry
952 return (mtime, filename)
952 return (mtime, filename)
953
953
954 else:
954 else:
955 keyfunc = None # sort by filename
955 keyfunc = None # sort by filename
956 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
956 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
957 entries.sort(key=keyfunc)
957 entries.sort(key=keyfunc)
958 for entry in entries:
958 for entry in entries:
959 filename, state, mode, size, mtime = entry
959 filename, state, mode, size, mtime = entry
960 if mtime == -1:
960 if mtime == -1:
961 timestr = b'unset '
961 timestr = b'unset '
962 elif nodates:
962 elif nodates:
963 timestr = b'set '
963 timestr = b'set '
964 else:
964 else:
965 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
965 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
966 timestr = encoding.strtolocal(timestr)
966 timestr = encoding.strtolocal(timestr)
967 if mode & 0o20000:
967 if mode & 0o20000:
968 mode = b'lnk'
968 mode = b'lnk'
969 else:
969 else:
970 mode = b'%3o' % (mode & 0o777 & ~util.umask)
970 mode = b'%3o' % (mode & 0o777 & ~util.umask)
971 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
971 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
972 for f in repo.dirstate.copies():
972 for f in repo.dirstate.copies():
973 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
973 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
974
974
975
975
976 @command(
976 @command(
977 b'debugdirstateignorepatternshash',
977 b'debugdirstateignorepatternshash',
978 [],
978 [],
979 _(b''),
979 _(b''),
980 )
980 )
981 def debugdirstateignorepatternshash(ui, repo, **opts):
981 def debugdirstateignorepatternshash(ui, repo, **opts):
982 """show the hash of ignore patterns stored in dirstate if v2,
982 """show the hash of ignore patterns stored in dirstate if v2,
983 or nothing for dirstate-v2
983 or nothing for dirstate-v2
984 """
984 """
985 if repo.dirstate._use_dirstate_v2:
985 if repo.dirstate._use_dirstate_v2:
986 docket = repo.dirstate._map.docket
986 docket = repo.dirstate._map.docket
987 hash_len = 20 # 160 bits for SHA-1
987 hash_len = 20 # 160 bits for SHA-1
988 hash_bytes = docket.tree_metadata[-hash_len:]
988 hash_bytes = docket.tree_metadata[-hash_len:]
989 ui.write(binascii.hexlify(hash_bytes) + b'\n')
989 ui.write(binascii.hexlify(hash_bytes) + b'\n')
990
990
991
991
992 @command(
992 @command(
993 b'debugdiscovery',
993 b'debugdiscovery',
994 [
994 [
995 (b'', b'old', None, _(b'use old-style discovery')),
995 (b'', b'old', None, _(b'use old-style discovery')),
996 (
996 (
997 b'',
997 b'',
998 b'nonheads',
998 b'nonheads',
999 None,
999 None,
1000 _(b'use old-style discovery with non-heads included'),
1000 _(b'use old-style discovery with non-heads included'),
1001 ),
1001 ),
1002 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1002 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1003 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1003 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1004 (
1004 (
1005 b'',
1005 b'',
1006 b'local-as-revs',
1006 b'local-as-revs',
1007 b"",
1007 b"",
1008 b'treat local has having these revisions only',
1008 b'treat local has having these revisions only',
1009 ),
1009 ),
1010 (
1010 (
1011 b'',
1011 b'',
1012 b'remote-as-revs',
1012 b'remote-as-revs',
1013 b"",
1013 b"",
1014 b'use local as remote, with only these these revisions',
1014 b'use local as remote, with only these these revisions',
1015 ),
1015 ),
1016 ]
1016 ]
1017 + cmdutil.remoteopts
1017 + cmdutil.remoteopts
1018 + cmdutil.formatteropts,
1018 + cmdutil.formatteropts,
1019 _(b'[--rev REV] [OTHER]'),
1019 _(b'[--rev REV] [OTHER]'),
1020 )
1020 )
1021 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1021 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1022 """runs the changeset discovery protocol in isolation
1022 """runs the changeset discovery protocol in isolation
1023
1023
1024 The local peer can be "replaced" by a subset of the local repository by
1024 The local peer can be "replaced" by a subset of the local repository by
1025 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1025 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1026 be "replaced" by a subset of the local repository using the
1026 be "replaced" by a subset of the local repository using the
1027 `--local-as-revs` flag. This is useful to efficiently debug pathological
1027 `--local-as-revs` flag. This is useful to efficiently debug pathological
1028 discovery situation.
1028 discovery situation.
1029
1029
1030 The following developer oriented config are relevant for people playing with this command:
1030 The following developer oriented config are relevant for people playing with this command:
1031
1031
1032 * devel.discovery.exchange-heads=True
1032 * devel.discovery.exchange-heads=True
1033
1033
1034 If False, the discovery will not start with
1034 If False, the discovery will not start with
1035 remote head fetching and local head querying.
1035 remote head fetching and local head querying.
1036
1036
1037 * devel.discovery.grow-sample=True
1037 * devel.discovery.grow-sample=True
1038
1038
1039 If False, the sample size used in set discovery will not be increased
1039 If False, the sample size used in set discovery will not be increased
1040 through the process
1040 through the process
1041
1041
1042 * devel.discovery.grow-sample.dynamic=True
1042 * devel.discovery.grow-sample.dynamic=True
1043
1043
1044 When discovery.grow-sample.dynamic is True, the default, the sample size is
1044 When discovery.grow-sample.dynamic is True, the default, the sample size is
1045 adapted to the shape of the undecided set (it is set to the max of:
1045 adapted to the shape of the undecided set (it is set to the max of:
1046 <target-size>, len(roots(undecided)), len(heads(undecided)
1046 <target-size>, len(roots(undecided)), len(heads(undecided)
1047
1047
1048 * devel.discovery.grow-sample.rate=1.05
1048 * devel.discovery.grow-sample.rate=1.05
1049
1049
1050 the rate at which the sample grow
1050 the rate at which the sample grow
1051
1051
1052 * devel.discovery.randomize=True
1052 * devel.discovery.randomize=True
1053
1053
1054 If andom sampling during discovery are deterministic. It is meant for
1054 If andom sampling during discovery are deterministic. It is meant for
1055 integration tests.
1055 integration tests.
1056
1056
1057 * devel.discovery.sample-size=200
1057 * devel.discovery.sample-size=200
1058
1058
1059 Control the initial size of the discovery sample
1059 Control the initial size of the discovery sample
1060
1060
1061 * devel.discovery.sample-size.initial=100
1061 * devel.discovery.sample-size.initial=100
1062
1062
1063 Control the initial size of the discovery for initial change
1063 Control the initial size of the discovery for initial change
1064 """
1064 """
1065 opts = pycompat.byteskwargs(opts)
1065 opts = pycompat.byteskwargs(opts)
1066 unfi = repo.unfiltered()
1066 unfi = repo.unfiltered()
1067
1067
1068 # setup potential extra filtering
1068 # setup potential extra filtering
1069 local_revs = opts[b"local_as_revs"]
1069 local_revs = opts[b"local_as_revs"]
1070 remote_revs = opts[b"remote_as_revs"]
1070 remote_revs = opts[b"remote_as_revs"]
1071
1071
1072 # make sure tests are repeatable
1072 # make sure tests are repeatable
1073 random.seed(int(opts[b'seed']))
1073 random.seed(int(opts[b'seed']))
1074
1074
1075 if not remote_revs:
1075 if not remote_revs:
1076
1076
1077 remoteurl, branches = urlutil.get_unique_pull_path(
1077 remoteurl, branches = urlutil.get_unique_pull_path(
1078 b'debugdiscovery', repo, ui, remoteurl
1078 b'debugdiscovery', repo, ui, remoteurl
1079 )
1079 )
1080 remote = hg.peer(repo, opts, remoteurl)
1080 remote = hg.peer(repo, opts, remoteurl)
1081 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1081 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1082 else:
1082 else:
1083 branches = (None, [])
1083 branches = (None, [])
1084 remote_filtered_revs = logcmdutil.revrange(
1084 remote_filtered_revs = logcmdutil.revrange(
1085 unfi, [b"not (::(%s))" % remote_revs]
1085 unfi, [b"not (::(%s))" % remote_revs]
1086 )
1086 )
1087 remote_filtered_revs = frozenset(remote_filtered_revs)
1087 remote_filtered_revs = frozenset(remote_filtered_revs)
1088
1088
1089 def remote_func(x):
1089 def remote_func(x):
1090 return remote_filtered_revs
1090 return remote_filtered_revs
1091
1091
1092 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1092 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1093
1093
1094 remote = repo.peer()
1094 remote = repo.peer()
1095 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1095 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1096
1096
1097 if local_revs:
1097 if local_revs:
1098 local_filtered_revs = logcmdutil.revrange(
1098 local_filtered_revs = logcmdutil.revrange(
1099 unfi, [b"not (::(%s))" % local_revs]
1099 unfi, [b"not (::(%s))" % local_revs]
1100 )
1100 )
1101 local_filtered_revs = frozenset(local_filtered_revs)
1101 local_filtered_revs = frozenset(local_filtered_revs)
1102
1102
1103 def local_func(x):
1103 def local_func(x):
1104 return local_filtered_revs
1104 return local_filtered_revs
1105
1105
1106 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1106 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1107 repo = repo.filtered(b'debug-discovery-local-filter')
1107 repo = repo.filtered(b'debug-discovery-local-filter')
1108
1108
1109 data = {}
1109 data = {}
1110 if opts.get(b'old'):
1110 if opts.get(b'old'):
1111
1111
1112 def doit(pushedrevs, remoteheads, remote=remote):
1112 def doit(pushedrevs, remoteheads, remote=remote):
1113 if not util.safehasattr(remote, b'branches'):
1113 if not util.safehasattr(remote, b'branches'):
1114 # enable in-client legacy support
1114 # enable in-client legacy support
1115 remote = localrepo.locallegacypeer(remote.local())
1115 remote = localrepo.locallegacypeer(remote.local())
1116 common, _in, hds = treediscovery.findcommonincoming(
1116 common, _in, hds = treediscovery.findcommonincoming(
1117 repo, remote, force=True, audit=data
1117 repo, remote, force=True, audit=data
1118 )
1118 )
1119 common = set(common)
1119 common = set(common)
1120 if not opts.get(b'nonheads'):
1120 if not opts.get(b'nonheads'):
1121 ui.writenoi18n(
1121 ui.writenoi18n(
1122 b"unpruned common: %s\n"
1122 b"unpruned common: %s\n"
1123 % b" ".join(sorted(short(n) for n in common))
1123 % b" ".join(sorted(short(n) for n in common))
1124 )
1124 )
1125
1125
1126 clnode = repo.changelog.node
1126 clnode = repo.changelog.node
1127 common = repo.revs(b'heads(::%ln)', common)
1127 common = repo.revs(b'heads(::%ln)', common)
1128 common = {clnode(r) for r in common}
1128 common = {clnode(r) for r in common}
1129 return common, hds
1129 return common, hds
1130
1130
1131 else:
1131 else:
1132
1132
1133 def doit(pushedrevs, remoteheads, remote=remote):
1133 def doit(pushedrevs, remoteheads, remote=remote):
1134 nodes = None
1134 nodes = None
1135 if pushedrevs:
1135 if pushedrevs:
1136 revs = logcmdutil.revrange(repo, pushedrevs)
1136 revs = logcmdutil.revrange(repo, pushedrevs)
1137 nodes = [repo[r].node() for r in revs]
1137 nodes = [repo[r].node() for r in revs]
1138 common, any, hds = setdiscovery.findcommonheads(
1138 common, any, hds = setdiscovery.findcommonheads(
1139 ui, repo, remote, ancestorsof=nodes, audit=data
1139 ui, repo, remote, ancestorsof=nodes, audit=data
1140 )
1140 )
1141 return common, hds
1141 return common, hds
1142
1142
1143 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1143 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1144 localrevs = opts[b'rev']
1144 localrevs = opts[b'rev']
1145
1145
1146 fm = ui.formatter(b'debugdiscovery', opts)
1146 fm = ui.formatter(b'debugdiscovery', opts)
1147 if fm.strict_format:
1147 if fm.strict_format:
1148
1148
1149 @contextlib.contextmanager
1149 @contextlib.contextmanager
1150 def may_capture_output():
1150 def may_capture_output():
1151 ui.pushbuffer()
1151 ui.pushbuffer()
1152 yield
1152 yield
1153 data[b'output'] = ui.popbuffer()
1153 data[b'output'] = ui.popbuffer()
1154
1154
1155 else:
1155 else:
1156 may_capture_output = util.nullcontextmanager
1156 may_capture_output = util.nullcontextmanager
1157 with may_capture_output():
1157 with may_capture_output():
1158 with util.timedcm('debug-discovery') as t:
1158 with util.timedcm('debug-discovery') as t:
1159 common, hds = doit(localrevs, remoterevs)
1159 common, hds = doit(localrevs, remoterevs)
1160
1160
1161 # compute all statistics
1161 # compute all statistics
1162 heads_common = set(common)
1162 heads_common = set(common)
1163 heads_remote = set(hds)
1163 heads_remote = set(hds)
1164 heads_local = set(repo.heads())
1164 heads_local = set(repo.heads())
1165 # note: they cannot be a local or remote head that is in common and not
1165 # note: they cannot be a local or remote head that is in common and not
1166 # itself a head of common.
1166 # itself a head of common.
1167 heads_common_local = heads_common & heads_local
1167 heads_common_local = heads_common & heads_local
1168 heads_common_remote = heads_common & heads_remote
1168 heads_common_remote = heads_common & heads_remote
1169 heads_common_both = heads_common & heads_remote & heads_local
1169 heads_common_both = heads_common & heads_remote & heads_local
1170
1170
1171 all = repo.revs(b'all()')
1171 all = repo.revs(b'all()')
1172 common = repo.revs(b'::%ln', common)
1172 common = repo.revs(b'::%ln', common)
1173 roots_common = repo.revs(b'roots(::%ld)', common)
1173 roots_common = repo.revs(b'roots(::%ld)', common)
1174 missing = repo.revs(b'not ::%ld', common)
1174 missing = repo.revs(b'not ::%ld', common)
1175 heads_missing = repo.revs(b'heads(%ld)', missing)
1175 heads_missing = repo.revs(b'heads(%ld)', missing)
1176 roots_missing = repo.revs(b'roots(%ld)', missing)
1176 roots_missing = repo.revs(b'roots(%ld)', missing)
1177 assert len(common) + len(missing) == len(all)
1177 assert len(common) + len(missing) == len(all)
1178
1178
1179 initial_undecided = repo.revs(
1179 initial_undecided = repo.revs(
1180 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1180 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1181 )
1181 )
1182 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1182 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1183 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1183 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1184 common_initial_undecided = initial_undecided & common
1184 common_initial_undecided = initial_undecided & common
1185 missing_initial_undecided = initial_undecided & missing
1185 missing_initial_undecided = initial_undecided & missing
1186
1186
1187 data[b'elapsed'] = t.elapsed
1187 data[b'elapsed'] = t.elapsed
1188 data[b'nb-common-heads'] = len(heads_common)
1188 data[b'nb-common-heads'] = len(heads_common)
1189 data[b'nb-common-heads-local'] = len(heads_common_local)
1189 data[b'nb-common-heads-local'] = len(heads_common_local)
1190 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1190 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1191 data[b'nb-common-heads-both'] = len(heads_common_both)
1191 data[b'nb-common-heads-both'] = len(heads_common_both)
1192 data[b'nb-common-roots'] = len(roots_common)
1192 data[b'nb-common-roots'] = len(roots_common)
1193 data[b'nb-head-local'] = len(heads_local)
1193 data[b'nb-head-local'] = len(heads_local)
1194 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1194 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1195 data[b'nb-head-remote'] = len(heads_remote)
1195 data[b'nb-head-remote'] = len(heads_remote)
1196 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1196 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1197 heads_common_remote
1197 heads_common_remote
1198 )
1198 )
1199 data[b'nb-revs'] = len(all)
1199 data[b'nb-revs'] = len(all)
1200 data[b'nb-revs-common'] = len(common)
1200 data[b'nb-revs-common'] = len(common)
1201 data[b'nb-revs-missing'] = len(missing)
1201 data[b'nb-revs-missing'] = len(missing)
1202 data[b'nb-missing-heads'] = len(heads_missing)
1202 data[b'nb-missing-heads'] = len(heads_missing)
1203 data[b'nb-missing-roots'] = len(roots_missing)
1203 data[b'nb-missing-roots'] = len(roots_missing)
1204 data[b'nb-ini_und'] = len(initial_undecided)
1204 data[b'nb-ini_und'] = len(initial_undecided)
1205 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1205 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1206 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1206 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1207 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1207 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1208 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1208 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1209
1209
1210 fm.startitem()
1210 fm.startitem()
1211 fm.data(**pycompat.strkwargs(data))
1211 fm.data(**pycompat.strkwargs(data))
1212 # display discovery summary
1212 # display discovery summary
1213 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1213 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1214 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1214 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1215 fm.plain(b"heads summary:\n")
1215 fm.plain(b"heads summary:\n")
1216 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1216 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1217 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1217 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1218 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1218 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1219 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1219 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1220 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1220 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1221 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1221 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1222 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1222 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1223 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1223 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1224 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1224 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1225 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1225 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1226 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1226 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1227 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1227 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1228 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1228 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1229 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1229 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1230 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1230 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1231 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1231 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1232 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1232 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1233 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1233 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1234 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1234 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1235 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1235 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1236 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1236 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1237 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1237 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1238
1238
1239 if ui.verbose:
1239 if ui.verbose:
1240 fm.plain(
1240 fm.plain(
1241 b"common heads: %s\n"
1241 b"common heads: %s\n"
1242 % b" ".join(sorted(short(n) for n in heads_common))
1242 % b" ".join(sorted(short(n) for n in heads_common))
1243 )
1243 )
1244 fm.end()
1244 fm.end()
1245
1245
1246
1246
1247 _chunksize = 4 << 10
1247 _chunksize = 4 << 10
1248
1248
1249
1249
1250 @command(
1250 @command(
1251 b'debugdownload',
1251 b'debugdownload',
1252 [
1252 [
1253 (b'o', b'output', b'', _(b'path')),
1253 (b'o', b'output', b'', _(b'path')),
1254 ],
1254 ],
1255 optionalrepo=True,
1255 optionalrepo=True,
1256 )
1256 )
1257 def debugdownload(ui, repo, url, output=None, **opts):
1257 def debugdownload(ui, repo, url, output=None, **opts):
1258 """download a resource using Mercurial logic and config"""
1258 """download a resource using Mercurial logic and config"""
1259 fh = urlmod.open(ui, url, output)
1259 fh = urlmod.open(ui, url, output)
1260
1260
1261 dest = ui
1261 dest = ui
1262 if output:
1262 if output:
1263 dest = open(output, b"wb", _chunksize)
1263 dest = open(output, b"wb", _chunksize)
1264 try:
1264 try:
1265 data = fh.read(_chunksize)
1265 data = fh.read(_chunksize)
1266 while data:
1266 while data:
1267 dest.write(data)
1267 dest.write(data)
1268 data = fh.read(_chunksize)
1268 data = fh.read(_chunksize)
1269 finally:
1269 finally:
1270 if output:
1270 if output:
1271 dest.close()
1271 dest.close()
1272
1272
1273
1273
1274 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1274 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1275 def debugextensions(ui, repo, **opts):
1275 def debugextensions(ui, repo, **opts):
1276 '''show information about active extensions'''
1276 '''show information about active extensions'''
1277 opts = pycompat.byteskwargs(opts)
1277 opts = pycompat.byteskwargs(opts)
1278 exts = extensions.extensions(ui)
1278 exts = extensions.extensions(ui)
1279 hgver = util.version()
1279 hgver = util.version()
1280 fm = ui.formatter(b'debugextensions', opts)
1280 fm = ui.formatter(b'debugextensions', opts)
1281 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1281 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1282 isinternal = extensions.ismoduleinternal(extmod)
1282 isinternal = extensions.ismoduleinternal(extmod)
1283 extsource = None
1283 extsource = None
1284
1284
1285 if util.safehasattr(extmod, '__file__'):
1285 if util.safehasattr(extmod, '__file__'):
1286 extsource = pycompat.fsencode(extmod.__file__)
1286 extsource = pycompat.fsencode(extmod.__file__)
1287 elif getattr(sys, 'oxidized', False):
1287 elif getattr(sys, 'oxidized', False):
1288 extsource = pycompat.sysexecutable
1288 extsource = pycompat.sysexecutable
1289 if isinternal:
1289 if isinternal:
1290 exttestedwith = [] # never expose magic string to users
1290 exttestedwith = [] # never expose magic string to users
1291 else:
1291 else:
1292 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1292 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1293 extbuglink = getattr(extmod, 'buglink', None)
1293 extbuglink = getattr(extmod, 'buglink', None)
1294
1294
1295 fm.startitem()
1295 fm.startitem()
1296
1296
1297 if ui.quiet or ui.verbose:
1297 if ui.quiet or ui.verbose:
1298 fm.write(b'name', b'%s\n', extname)
1298 fm.write(b'name', b'%s\n', extname)
1299 else:
1299 else:
1300 fm.write(b'name', b'%s', extname)
1300 fm.write(b'name', b'%s', extname)
1301 if isinternal or hgver in exttestedwith:
1301 if isinternal or hgver in exttestedwith:
1302 fm.plain(b'\n')
1302 fm.plain(b'\n')
1303 elif not exttestedwith:
1303 elif not exttestedwith:
1304 fm.plain(_(b' (untested!)\n'))
1304 fm.plain(_(b' (untested!)\n'))
1305 else:
1305 else:
1306 lasttestedversion = exttestedwith[-1]
1306 lasttestedversion = exttestedwith[-1]
1307 fm.plain(b' (%s!)\n' % lasttestedversion)
1307 fm.plain(b' (%s!)\n' % lasttestedversion)
1308
1308
1309 fm.condwrite(
1309 fm.condwrite(
1310 ui.verbose and extsource,
1310 ui.verbose and extsource,
1311 b'source',
1311 b'source',
1312 _(b' location: %s\n'),
1312 _(b' location: %s\n'),
1313 extsource or b"",
1313 extsource or b"",
1314 )
1314 )
1315
1315
1316 if ui.verbose:
1316 if ui.verbose:
1317 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1317 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1318 fm.data(bundled=isinternal)
1318 fm.data(bundled=isinternal)
1319
1319
1320 fm.condwrite(
1320 fm.condwrite(
1321 ui.verbose and exttestedwith,
1321 ui.verbose and exttestedwith,
1322 b'testedwith',
1322 b'testedwith',
1323 _(b' tested with: %s\n'),
1323 _(b' tested with: %s\n'),
1324 fm.formatlist(exttestedwith, name=b'ver'),
1324 fm.formatlist(exttestedwith, name=b'ver'),
1325 )
1325 )
1326
1326
1327 fm.condwrite(
1327 fm.condwrite(
1328 ui.verbose and extbuglink,
1328 ui.verbose and extbuglink,
1329 b'buglink',
1329 b'buglink',
1330 _(b' bug reporting: %s\n'),
1330 _(b' bug reporting: %s\n'),
1331 extbuglink or b"",
1331 extbuglink or b"",
1332 )
1332 )
1333
1333
1334 fm.end()
1334 fm.end()
1335
1335
1336
1336
1337 @command(
1337 @command(
1338 b'debugfileset',
1338 b'debugfileset',
1339 [
1339 [
1340 (
1340 (
1341 b'r',
1341 b'r',
1342 b'rev',
1342 b'rev',
1343 b'',
1343 b'',
1344 _(b'apply the filespec on this revision'),
1344 _(b'apply the filespec on this revision'),
1345 _(b'REV'),
1345 _(b'REV'),
1346 ),
1346 ),
1347 (
1347 (
1348 b'',
1348 b'',
1349 b'all-files',
1349 b'all-files',
1350 False,
1350 False,
1351 _(b'test files from all revisions and working directory'),
1351 _(b'test files from all revisions and working directory'),
1352 ),
1352 ),
1353 (
1353 (
1354 b's',
1354 b's',
1355 b'show-matcher',
1355 b'show-matcher',
1356 None,
1356 None,
1357 _(b'print internal representation of matcher'),
1357 _(b'print internal representation of matcher'),
1358 ),
1358 ),
1359 (
1359 (
1360 b'p',
1360 b'p',
1361 b'show-stage',
1361 b'show-stage',
1362 [],
1362 [],
1363 _(b'print parsed tree at the given stage'),
1363 _(b'print parsed tree at the given stage'),
1364 _(b'NAME'),
1364 _(b'NAME'),
1365 ),
1365 ),
1366 ],
1366 ],
1367 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1367 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1368 )
1368 )
1369 def debugfileset(ui, repo, expr, **opts):
1369 def debugfileset(ui, repo, expr, **opts):
1370 '''parse and apply a fileset specification'''
1370 '''parse and apply a fileset specification'''
1371 from . import fileset
1371 from . import fileset
1372
1372
1373 fileset.symbols # force import of fileset so we have predicates to optimize
1373 fileset.symbols # force import of fileset so we have predicates to optimize
1374 opts = pycompat.byteskwargs(opts)
1374 opts = pycompat.byteskwargs(opts)
1375 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1375 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1376
1376
1377 stages = [
1377 stages = [
1378 (b'parsed', pycompat.identity),
1378 (b'parsed', pycompat.identity),
1379 (b'analyzed', filesetlang.analyze),
1379 (b'analyzed', filesetlang.analyze),
1380 (b'optimized', filesetlang.optimize),
1380 (b'optimized', filesetlang.optimize),
1381 ]
1381 ]
1382 stagenames = {n for n, f in stages}
1382 stagenames = {n for n, f in stages}
1383
1383
1384 showalways = set()
1384 showalways = set()
1385 if ui.verbose and not opts[b'show_stage']:
1385 if ui.verbose and not opts[b'show_stage']:
1386 # show parsed tree by --verbose (deprecated)
1386 # show parsed tree by --verbose (deprecated)
1387 showalways.add(b'parsed')
1387 showalways.add(b'parsed')
1388 if opts[b'show_stage'] == [b'all']:
1388 if opts[b'show_stage'] == [b'all']:
1389 showalways.update(stagenames)
1389 showalways.update(stagenames)
1390 else:
1390 else:
1391 for n in opts[b'show_stage']:
1391 for n in opts[b'show_stage']:
1392 if n not in stagenames:
1392 if n not in stagenames:
1393 raise error.Abort(_(b'invalid stage name: %s') % n)
1393 raise error.Abort(_(b'invalid stage name: %s') % n)
1394 showalways.update(opts[b'show_stage'])
1394 showalways.update(opts[b'show_stage'])
1395
1395
1396 tree = filesetlang.parse(expr)
1396 tree = filesetlang.parse(expr)
1397 for n, f in stages:
1397 for n, f in stages:
1398 tree = f(tree)
1398 tree = f(tree)
1399 if n in showalways:
1399 if n in showalways:
1400 if opts[b'show_stage'] or n != b'parsed':
1400 if opts[b'show_stage'] or n != b'parsed':
1401 ui.write(b"* %s:\n" % n)
1401 ui.write(b"* %s:\n" % n)
1402 ui.write(filesetlang.prettyformat(tree), b"\n")
1402 ui.write(filesetlang.prettyformat(tree), b"\n")
1403
1403
1404 files = set()
1404 files = set()
1405 if opts[b'all_files']:
1405 if opts[b'all_files']:
1406 for r in repo:
1406 for r in repo:
1407 c = repo[r]
1407 c = repo[r]
1408 files.update(c.files())
1408 files.update(c.files())
1409 files.update(c.substate)
1409 files.update(c.substate)
1410 if opts[b'all_files'] or ctx.rev() is None:
1410 if opts[b'all_files'] or ctx.rev() is None:
1411 wctx = repo[None]
1411 wctx = repo[None]
1412 files.update(
1412 files.update(
1413 repo.dirstate.walk(
1413 repo.dirstate.walk(
1414 scmutil.matchall(repo),
1414 scmutil.matchall(repo),
1415 subrepos=list(wctx.substate),
1415 subrepos=list(wctx.substate),
1416 unknown=True,
1416 unknown=True,
1417 ignored=True,
1417 ignored=True,
1418 )
1418 )
1419 )
1419 )
1420 files.update(wctx.substate)
1420 files.update(wctx.substate)
1421 else:
1421 else:
1422 files.update(ctx.files())
1422 files.update(ctx.files())
1423 files.update(ctx.substate)
1423 files.update(ctx.substate)
1424
1424
1425 m = ctx.matchfileset(repo.getcwd(), expr)
1425 m = ctx.matchfileset(repo.getcwd(), expr)
1426 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1426 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1427 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1427 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1428 for f in sorted(files):
1428 for f in sorted(files):
1429 if not m(f):
1429 if not m(f):
1430 continue
1430 continue
1431 ui.write(b"%s\n" % f)
1431 ui.write(b"%s\n" % f)
1432
1432
1433
1433
1434 @command(
1434 @command(
1435 b"debug-repair-issue6528",
1435 b"debug-repair-issue6528",
1436 [
1436 [
1437 (
1437 (
1438 b'',
1438 b'',
1439 b'to-report',
1439 b'to-report',
1440 b'',
1440 b'',
1441 _(b'build a report of affected revisions to this file'),
1441 _(b'build a report of affected revisions to this file'),
1442 _(b'FILE'),
1442 _(b'FILE'),
1443 ),
1443 ),
1444 (
1444 (
1445 b'',
1445 b'',
1446 b'from-report',
1446 b'from-report',
1447 b'',
1447 b'',
1448 _(b'repair revisions listed in this report file'),
1448 _(b'repair revisions listed in this report file'),
1449 _(b'FILE'),
1449 _(b'FILE'),
1450 ),
1450 ),
1451 (
1451 (
1452 b'',
1452 b'',
1453 b'paranoid',
1453 b'paranoid',
1454 False,
1454 False,
1455 _(b'check that both detection methods do the same thing'),
1455 _(b'check that both detection methods do the same thing'),
1456 ),
1456 ),
1457 ]
1457 ]
1458 + cmdutil.dryrunopts,
1458 + cmdutil.dryrunopts,
1459 )
1459 )
1460 def debug_repair_issue6528(ui, repo, **opts):
1460 def debug_repair_issue6528(ui, repo, **opts):
1461 """find affected revisions and repair them. See issue6528 for more details.
1461 """find affected revisions and repair them. See issue6528 for more details.
1462
1462
1463 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1463 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1464 computation of affected revisions for a given repository across clones.
1464 computation of affected revisions for a given repository across clones.
1465 The report format is line-based (with empty lines ignored):
1465 The report format is line-based (with empty lines ignored):
1466
1466
1467 ```
1467 ```
1468 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1468 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1469 ```
1469 ```
1470
1470
1471 There can be multiple broken revisions per filelog, they are separated by
1471 There can be multiple broken revisions per filelog, they are separated by
1472 a comma with no spaces. The only space is between the revision(s) and the
1472 a comma with no spaces. The only space is between the revision(s) and the
1473 filename.
1473 filename.
1474
1474
1475 Note that this does *not* mean that this repairs future affected revisions,
1475 Note that this does *not* mean that this repairs future affected revisions,
1476 that needs a separate fix at the exchange level that was introduced in
1476 that needs a separate fix at the exchange level that was introduced in
1477 Mercurial 5.9.1.
1477 Mercurial 5.9.1.
1478
1478
1479 There is a `--paranoid` flag to test that the fast implementation is correct
1479 There is a `--paranoid` flag to test that the fast implementation is correct
1480 by checking it against the slow implementation. Since this matter is quite
1480 by checking it against the slow implementation. Since this matter is quite
1481 urgent and testing every edge-case is probably quite costly, we use this
1481 urgent and testing every edge-case is probably quite costly, we use this
1482 method to test on large repositories as a fuzzing method of sorts.
1482 method to test on large repositories as a fuzzing method of sorts.
1483 """
1483 """
1484 cmdutil.check_incompatible_arguments(
1484 cmdutil.check_incompatible_arguments(
1485 opts, 'to_report', ['from_report', 'dry_run']
1485 opts, 'to_report', ['from_report', 'dry_run']
1486 )
1486 )
1487 dry_run = opts.get('dry_run')
1487 dry_run = opts.get('dry_run')
1488 to_report = opts.get('to_report')
1488 to_report = opts.get('to_report')
1489 from_report = opts.get('from_report')
1489 from_report = opts.get('from_report')
1490 paranoid = opts.get('paranoid')
1490 paranoid = opts.get('paranoid')
1491 # TODO maybe add filelog pattern and revision pattern parameters to help
1491 # TODO maybe add filelog pattern and revision pattern parameters to help
1492 # narrow down the search for users that know what they're looking for?
1492 # narrow down the search for users that know what they're looking for?
1493
1493
1494 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1494 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1495 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1495 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1496 raise error.Abort(_(msg))
1496 raise error.Abort(_(msg))
1497
1497
1498 rewrite.repair_issue6528(
1498 rewrite.repair_issue6528(
1499 ui,
1499 ui,
1500 repo,
1500 repo,
1501 dry_run=dry_run,
1501 dry_run=dry_run,
1502 to_report=to_report,
1502 to_report=to_report,
1503 from_report=from_report,
1503 from_report=from_report,
1504 paranoid=paranoid,
1504 paranoid=paranoid,
1505 )
1505 )
1506
1506
1507
1507
1508 @command(b'debugformat', [] + cmdutil.formatteropts)
1508 @command(b'debugformat', [] + cmdutil.formatteropts)
1509 def debugformat(ui, repo, **opts):
1509 def debugformat(ui, repo, **opts):
1510 """display format information about the current repository
1510 """display format information about the current repository
1511
1511
1512 Use --verbose to get extra information about current config value and
1512 Use --verbose to get extra information about current config value and
1513 Mercurial default."""
1513 Mercurial default."""
1514 opts = pycompat.byteskwargs(opts)
1514 opts = pycompat.byteskwargs(opts)
1515 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1515 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1516 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1516 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1517
1517
1518 def makeformatname(name):
1518 def makeformatname(name):
1519 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1519 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1520
1520
1521 fm = ui.formatter(b'debugformat', opts)
1521 fm = ui.formatter(b'debugformat', opts)
1522 if fm.isplain():
1522 if fm.isplain():
1523
1523
1524 def formatvalue(value):
1524 def formatvalue(value):
1525 if util.safehasattr(value, b'startswith'):
1525 if util.safehasattr(value, b'startswith'):
1526 return value
1526 return value
1527 if value:
1527 if value:
1528 return b'yes'
1528 return b'yes'
1529 else:
1529 else:
1530 return b'no'
1530 return b'no'
1531
1531
1532 else:
1532 else:
1533 formatvalue = pycompat.identity
1533 formatvalue = pycompat.identity
1534
1534
1535 fm.plain(b'format-variant')
1535 fm.plain(b'format-variant')
1536 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1536 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1537 fm.plain(b' repo')
1537 fm.plain(b' repo')
1538 if ui.verbose:
1538 if ui.verbose:
1539 fm.plain(b' config default')
1539 fm.plain(b' config default')
1540 fm.plain(b'\n')
1540 fm.plain(b'\n')
1541 for fv in upgrade.allformatvariant:
1541 for fv in upgrade.allformatvariant:
1542 fm.startitem()
1542 fm.startitem()
1543 repovalue = fv.fromrepo(repo)
1543 repovalue = fv.fromrepo(repo)
1544 configvalue = fv.fromconfig(repo)
1544 configvalue = fv.fromconfig(repo)
1545
1545
1546 if repovalue != configvalue:
1546 if repovalue != configvalue:
1547 namelabel = b'formatvariant.name.mismatchconfig'
1547 namelabel = b'formatvariant.name.mismatchconfig'
1548 repolabel = b'formatvariant.repo.mismatchconfig'
1548 repolabel = b'formatvariant.repo.mismatchconfig'
1549 elif repovalue != fv.default:
1549 elif repovalue != fv.default:
1550 namelabel = b'formatvariant.name.mismatchdefault'
1550 namelabel = b'formatvariant.name.mismatchdefault'
1551 repolabel = b'formatvariant.repo.mismatchdefault'
1551 repolabel = b'formatvariant.repo.mismatchdefault'
1552 else:
1552 else:
1553 namelabel = b'formatvariant.name.uptodate'
1553 namelabel = b'formatvariant.name.uptodate'
1554 repolabel = b'formatvariant.repo.uptodate'
1554 repolabel = b'formatvariant.repo.uptodate'
1555
1555
1556 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1556 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1557 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1557 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1558 if fv.default != configvalue:
1558 if fv.default != configvalue:
1559 configlabel = b'formatvariant.config.special'
1559 configlabel = b'formatvariant.config.special'
1560 else:
1560 else:
1561 configlabel = b'formatvariant.config.default'
1561 configlabel = b'formatvariant.config.default'
1562 fm.condwrite(
1562 fm.condwrite(
1563 ui.verbose,
1563 ui.verbose,
1564 b'config',
1564 b'config',
1565 b' %6s',
1565 b' %6s',
1566 formatvalue(configvalue),
1566 formatvalue(configvalue),
1567 label=configlabel,
1567 label=configlabel,
1568 )
1568 )
1569 fm.condwrite(
1569 fm.condwrite(
1570 ui.verbose,
1570 ui.verbose,
1571 b'default',
1571 b'default',
1572 b' %7s',
1572 b' %7s',
1573 formatvalue(fv.default),
1573 formatvalue(fv.default),
1574 label=b'formatvariant.default',
1574 label=b'formatvariant.default',
1575 )
1575 )
1576 fm.plain(b'\n')
1576 fm.plain(b'\n')
1577 fm.end()
1577 fm.end()
1578
1578
1579
1579
1580 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1580 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1581 def debugfsinfo(ui, path=b"."):
1581 def debugfsinfo(ui, path=b"."):
1582 """show information detected about current filesystem"""
1582 """show information detected about current filesystem"""
1583 ui.writenoi18n(b'path: %s\n' % path)
1583 ui.writenoi18n(b'path: %s\n' % path)
1584 ui.writenoi18n(
1584 ui.writenoi18n(
1585 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1585 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1586 )
1586 )
1587 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1587 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1588 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1588 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1589 ui.writenoi18n(
1589 ui.writenoi18n(
1590 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1590 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1591 )
1591 )
1592 ui.writenoi18n(
1592 ui.writenoi18n(
1593 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1593 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1594 )
1594 )
1595 casesensitive = b'(unknown)'
1595 casesensitive = b'(unknown)'
1596 try:
1596 try:
1597 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1597 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1598 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1598 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1599 except OSError:
1599 except OSError:
1600 pass
1600 pass
1601 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1601 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1602
1602
1603
1603
1604 @command(
1604 @command(
1605 b'debuggetbundle',
1605 b'debuggetbundle',
1606 [
1606 [
1607 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1607 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1608 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1608 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1609 (
1609 (
1610 b't',
1610 b't',
1611 b'type',
1611 b'type',
1612 b'bzip2',
1612 b'bzip2',
1613 _(b'bundle compression type to use'),
1613 _(b'bundle compression type to use'),
1614 _(b'TYPE'),
1614 _(b'TYPE'),
1615 ),
1615 ),
1616 ],
1616 ],
1617 _(b'REPO FILE [-H|-C ID]...'),
1617 _(b'REPO FILE [-H|-C ID]...'),
1618 norepo=True,
1618 norepo=True,
1619 )
1619 )
1620 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1620 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1621 """retrieves a bundle from a repo
1621 """retrieves a bundle from a repo
1622
1622
1623 Every ID must be a full-length hex node id string. Saves the bundle to the
1623 Every ID must be a full-length hex node id string. Saves the bundle to the
1624 given file.
1624 given file.
1625 """
1625 """
1626 opts = pycompat.byteskwargs(opts)
1626 opts = pycompat.byteskwargs(opts)
1627 repo = hg.peer(ui, opts, repopath)
1627 repo = hg.peer(ui, opts, repopath)
1628 if not repo.capable(b'getbundle'):
1628 if not repo.capable(b'getbundle'):
1629 raise error.Abort(b"getbundle() not supported by target repository")
1629 raise error.Abort(b"getbundle() not supported by target repository")
1630 args = {}
1630 args = {}
1631 if common:
1631 if common:
1632 args['common'] = [bin(s) for s in common]
1632 args['common'] = [bin(s) for s in common]
1633 if head:
1633 if head:
1634 args['heads'] = [bin(s) for s in head]
1634 args['heads'] = [bin(s) for s in head]
1635 # TODO: get desired bundlecaps from command line.
1635 # TODO: get desired bundlecaps from command line.
1636 args['bundlecaps'] = None
1636 args['bundlecaps'] = None
1637 bundle = repo.getbundle(b'debug', **args)
1637 bundle = repo.getbundle(b'debug', **args)
1638
1638
1639 bundletype = opts.get(b'type', b'bzip2').lower()
1639 bundletype = opts.get(b'type', b'bzip2').lower()
1640 btypes = {
1640 btypes = {
1641 b'none': b'HG10UN',
1641 b'none': b'HG10UN',
1642 b'bzip2': b'HG10BZ',
1642 b'bzip2': b'HG10BZ',
1643 b'gzip': b'HG10GZ',
1643 b'gzip': b'HG10GZ',
1644 b'bundle2': b'HG20',
1644 b'bundle2': b'HG20',
1645 }
1645 }
1646 bundletype = btypes.get(bundletype)
1646 bundletype = btypes.get(bundletype)
1647 if bundletype not in bundle2.bundletypes:
1647 if bundletype not in bundle2.bundletypes:
1648 raise error.Abort(_(b'unknown bundle type specified with --type'))
1648 raise error.Abort(_(b'unknown bundle type specified with --type'))
1649 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1649 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1650
1650
1651
1651
1652 @command(b'debugignore', [], b'[FILE]')
1652 @command(b'debugignore', [], b'[FILE]')
1653 def debugignore(ui, repo, *files, **opts):
1653 def debugignore(ui, repo, *files, **opts):
1654 """display the combined ignore pattern and information about ignored files
1654 """display the combined ignore pattern and information about ignored files
1655
1655
1656 With no argument display the combined ignore pattern.
1656 With no argument display the combined ignore pattern.
1657
1657
1658 Given space separated file names, shows if the given file is ignored and
1658 Given space separated file names, shows if the given file is ignored and
1659 if so, show the ignore rule (file and line number) that matched it.
1659 if so, show the ignore rule (file and line number) that matched it.
1660 """
1660 """
1661 ignore = repo.dirstate._ignore
1661 ignore = repo.dirstate._ignore
1662 if not files:
1662 if not files:
1663 # Show all the patterns
1663 # Show all the patterns
1664 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1664 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1665 else:
1665 else:
1666 m = scmutil.match(repo[None], pats=files)
1666 m = scmutil.match(repo[None], pats=files)
1667 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1667 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1668 for f in m.files():
1668 for f in m.files():
1669 nf = util.normpath(f)
1669 nf = util.normpath(f)
1670 ignored = None
1670 ignored = None
1671 ignoredata = None
1671 ignoredata = None
1672 if nf != b'.':
1672 if nf != b'.':
1673 if ignore(nf):
1673 if ignore(nf):
1674 ignored = nf
1674 ignored = nf
1675 ignoredata = repo.dirstate._ignorefileandline(nf)
1675 ignoredata = repo.dirstate._ignorefileandline(nf)
1676 else:
1676 else:
1677 for p in pathutil.finddirs(nf):
1677 for p in pathutil.finddirs(nf):
1678 if ignore(p):
1678 if ignore(p):
1679 ignored = p
1679 ignored = p
1680 ignoredata = repo.dirstate._ignorefileandline(p)
1680 ignoredata = repo.dirstate._ignorefileandline(p)
1681 break
1681 break
1682 if ignored:
1682 if ignored:
1683 if ignored == nf:
1683 if ignored == nf:
1684 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1684 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1685 else:
1685 else:
1686 ui.write(
1686 ui.write(
1687 _(
1687 _(
1688 b"%s is ignored because of "
1688 b"%s is ignored because of "
1689 b"containing directory %s\n"
1689 b"containing directory %s\n"
1690 )
1690 )
1691 % (uipathfn(f), ignored)
1691 % (uipathfn(f), ignored)
1692 )
1692 )
1693 ignorefile, lineno, line = ignoredata
1693 ignorefile, lineno, line = ignoredata
1694 ui.write(
1694 ui.write(
1695 _(b"(ignore rule in %s, line %d: '%s')\n")
1695 _(b"(ignore rule in %s, line %d: '%s')\n")
1696 % (ignorefile, lineno, line)
1696 % (ignorefile, lineno, line)
1697 )
1697 )
1698 else:
1698 else:
1699 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1699 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1700
1700
1701
1701
1702 @command(
1702 @command(
1703 b'debugindex',
1703 b'debugindex',
1704 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1704 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1705 _(b'-c|-m|FILE'),
1705 _(b'-c|-m|FILE'),
1706 )
1706 )
1707 def debugindex(ui, repo, file_=None, **opts):
1707 def debugindex(ui, repo, file_=None, **opts):
1708 """dump index data for a storage primitive"""
1708 """dump index data for a storage primitive"""
1709 opts = pycompat.byteskwargs(opts)
1709 opts = pycompat.byteskwargs(opts)
1710 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1710 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1711
1711
1712 if ui.debugflag:
1712 if ui.debugflag:
1713 shortfn = hex
1713 shortfn = hex
1714 else:
1714 else:
1715 shortfn = short
1715 shortfn = short
1716
1716
1717 idlen = 12
1717 idlen = 12
1718 for i in store:
1718 for i in store:
1719 idlen = len(shortfn(store.node(i)))
1719 idlen = len(shortfn(store.node(i)))
1720 break
1720 break
1721
1721
1722 fm = ui.formatter(b'debugindex', opts)
1722 fm = ui.formatter(b'debugindex', opts)
1723 fm.plain(
1723 fm.plain(
1724 b' rev linkrev %s %s p2\n'
1724 b' rev linkrev %s %s p2\n'
1725 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1725 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1726 )
1726 )
1727
1727
1728 for rev in store:
1728 for rev in store:
1729 node = store.node(rev)
1729 node = store.node(rev)
1730 parents = store.parents(node)
1730 parents = store.parents(node)
1731
1731
1732 fm.startitem()
1732 fm.startitem()
1733 fm.write(b'rev', b'%6d ', rev)
1733 fm.write(b'rev', b'%6d ', rev)
1734 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1734 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1735 fm.write(b'node', b'%s ', shortfn(node))
1735 fm.write(b'node', b'%s ', shortfn(node))
1736 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1736 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1737 fm.write(b'p2', b'%s', shortfn(parents[1]))
1737 fm.write(b'p2', b'%s', shortfn(parents[1]))
1738 fm.plain(b'\n')
1738 fm.plain(b'\n')
1739
1739
1740 fm.end()
1740 fm.end()
1741
1741
1742
1742
1743 @command(
1743 @command(
1744 b'debugindexdot',
1744 b'debugindexdot',
1745 cmdutil.debugrevlogopts,
1745 cmdutil.debugrevlogopts,
1746 _(b'-c|-m|FILE'),
1746 _(b'-c|-m|FILE'),
1747 optionalrepo=True,
1747 optionalrepo=True,
1748 )
1748 )
1749 def debugindexdot(ui, repo, file_=None, **opts):
1749 def debugindexdot(ui, repo, file_=None, **opts):
1750 """dump an index DAG as a graphviz dot file"""
1750 """dump an index DAG as a graphviz dot file"""
1751 opts = pycompat.byteskwargs(opts)
1751 opts = pycompat.byteskwargs(opts)
1752 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1752 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1753 ui.writenoi18n(b"digraph G {\n")
1753 ui.writenoi18n(b"digraph G {\n")
1754 for i in r:
1754 for i in r:
1755 node = r.node(i)
1755 node = r.node(i)
1756 pp = r.parents(node)
1756 pp = r.parents(node)
1757 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1757 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1758 if pp[1] != repo.nullid:
1758 if pp[1] != repo.nullid:
1759 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1759 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1760 ui.write(b"}\n")
1760 ui.write(b"}\n")
1761
1761
1762
1762
1763 @command(b'debugindexstats', [])
1763 @command(b'debugindexstats', [])
1764 def debugindexstats(ui, repo):
1764 def debugindexstats(ui, repo):
1765 """show stats related to the changelog index"""
1765 """show stats related to the changelog index"""
1766 repo.changelog.shortest(repo.nullid, 1)
1766 repo.changelog.shortest(repo.nullid, 1)
1767 index = repo.changelog.index
1767 index = repo.changelog.index
1768 if not util.safehasattr(index, b'stats'):
1768 if not util.safehasattr(index, b'stats'):
1769 raise error.Abort(_(b'debugindexstats only works with native code'))
1769 raise error.Abort(_(b'debugindexstats only works with native code'))
1770 for k, v in sorted(index.stats().items()):
1770 for k, v in sorted(index.stats().items()):
1771 ui.write(b'%s: %d\n' % (k, v))
1771 ui.write(b'%s: %d\n' % (k, v))
1772
1772
1773
1773
1774 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1774 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1775 def debuginstall(ui, **opts):
1775 def debuginstall(ui, **opts):
1776 """test Mercurial installation
1776 """test Mercurial installation
1777
1777
1778 Returns 0 on success.
1778 Returns 0 on success.
1779 """
1779 """
1780 opts = pycompat.byteskwargs(opts)
1780 opts = pycompat.byteskwargs(opts)
1781
1781
1782 problems = 0
1782 problems = 0
1783
1783
1784 fm = ui.formatter(b'debuginstall', opts)
1784 fm = ui.formatter(b'debuginstall', opts)
1785 fm.startitem()
1785 fm.startitem()
1786
1786
1787 # encoding might be unknown or wrong. don't translate these messages.
1787 # encoding might be unknown or wrong. don't translate these messages.
1788 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1788 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1789 err = None
1789 err = None
1790 try:
1790 try:
1791 codecs.lookup(pycompat.sysstr(encoding.encoding))
1791 codecs.lookup(pycompat.sysstr(encoding.encoding))
1792 except LookupError as inst:
1792 except LookupError as inst:
1793 err = stringutil.forcebytestr(inst)
1793 err = stringutil.forcebytestr(inst)
1794 problems += 1
1794 problems += 1
1795 fm.condwrite(
1795 fm.condwrite(
1796 err,
1796 err,
1797 b'encodingerror',
1797 b'encodingerror',
1798 b" %s\n (check that your locale is properly set)\n",
1798 b" %s\n (check that your locale is properly set)\n",
1799 err,
1799 err,
1800 )
1800 )
1801
1801
1802 # Python
1802 # Python
1803 pythonlib = None
1803 pythonlib = None
1804 if util.safehasattr(os, '__file__'):
1804 if util.safehasattr(os, '__file__'):
1805 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1805 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1806 elif getattr(sys, 'oxidized', False):
1806 elif getattr(sys, 'oxidized', False):
1807 pythonlib = pycompat.sysexecutable
1807 pythonlib = pycompat.sysexecutable
1808
1808
1809 fm.write(
1809 fm.write(
1810 b'pythonexe',
1810 b'pythonexe',
1811 _(b"checking Python executable (%s)\n"),
1811 _(b"checking Python executable (%s)\n"),
1812 pycompat.sysexecutable or _(b"unknown"),
1812 pycompat.sysexecutable or _(b"unknown"),
1813 )
1813 )
1814 fm.write(
1814 fm.write(
1815 b'pythonimplementation',
1815 b'pythonimplementation',
1816 _(b"checking Python implementation (%s)\n"),
1816 _(b"checking Python implementation (%s)\n"),
1817 pycompat.sysbytes(platform.python_implementation()),
1817 pycompat.sysbytes(platform.python_implementation()),
1818 )
1818 )
1819 fm.write(
1819 fm.write(
1820 b'pythonver',
1820 b'pythonver',
1821 _(b"checking Python version (%s)\n"),
1821 _(b"checking Python version (%s)\n"),
1822 (b"%d.%d.%d" % sys.version_info[:3]),
1822 (b"%d.%d.%d" % sys.version_info[:3]),
1823 )
1823 )
1824 fm.write(
1824 fm.write(
1825 b'pythonlib',
1825 b'pythonlib',
1826 _(b"checking Python lib (%s)...\n"),
1826 _(b"checking Python lib (%s)...\n"),
1827 pythonlib or _(b"unknown"),
1827 pythonlib or _(b"unknown"),
1828 )
1828 )
1829
1829
1830 try:
1830 try:
1831 from . import rustext # pytype: disable=import-error
1831 from . import rustext # pytype: disable=import-error
1832
1832
1833 rustext.__doc__ # trigger lazy import
1833 rustext.__doc__ # trigger lazy import
1834 except ImportError:
1834 except ImportError:
1835 rustext = None
1835 rustext = None
1836
1836
1837 security = set(sslutil.supportedprotocols)
1837 security = set(sslutil.supportedprotocols)
1838 if sslutil.hassni:
1838 if sslutil.hassni:
1839 security.add(b'sni')
1839 security.add(b'sni')
1840
1840
1841 fm.write(
1841 fm.write(
1842 b'pythonsecurity',
1842 b'pythonsecurity',
1843 _(b"checking Python security support (%s)\n"),
1843 _(b"checking Python security support (%s)\n"),
1844 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1844 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1845 )
1845 )
1846
1846
1847 # These are warnings, not errors. So don't increment problem count. This
1847 # These are warnings, not errors. So don't increment problem count. This
1848 # may change in the future.
1848 # may change in the future.
1849 if b'tls1.2' not in security:
1849 if b'tls1.2' not in security:
1850 fm.plain(
1850 fm.plain(
1851 _(
1851 _(
1852 b' TLS 1.2 not supported by Python install; '
1852 b' TLS 1.2 not supported by Python install; '
1853 b'network connections lack modern security\n'
1853 b'network connections lack modern security\n'
1854 )
1854 )
1855 )
1855 )
1856 if b'sni' not in security:
1856 if b'sni' not in security:
1857 fm.plain(
1857 fm.plain(
1858 _(
1858 _(
1859 b' SNI not supported by Python install; may have '
1859 b' SNI not supported by Python install; may have '
1860 b'connectivity issues with some servers\n'
1860 b'connectivity issues with some servers\n'
1861 )
1861 )
1862 )
1862 )
1863
1863
1864 fm.plain(
1864 fm.plain(
1865 _(
1865 _(
1866 b"checking Rust extensions (%s)\n"
1866 b"checking Rust extensions (%s)\n"
1867 % (b'missing' if rustext is None else b'installed')
1867 % (b'missing' if rustext is None else b'installed')
1868 ),
1868 ),
1869 )
1869 )
1870
1870
1871 # TODO print CA cert info
1871 # TODO print CA cert info
1872
1872
1873 # hg version
1873 # hg version
1874 hgver = util.version()
1874 hgver = util.version()
1875 fm.write(
1875 fm.write(
1876 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1876 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1877 )
1877 )
1878 fm.write(
1878 fm.write(
1879 b'hgverextra',
1879 b'hgverextra',
1880 _(b"checking Mercurial custom build (%s)\n"),
1880 _(b"checking Mercurial custom build (%s)\n"),
1881 b'+'.join(hgver.split(b'+')[1:]),
1881 b'+'.join(hgver.split(b'+')[1:]),
1882 )
1882 )
1883
1883
1884 # compiled modules
1884 # compiled modules
1885 hgmodules = None
1885 hgmodules = None
1886 if util.safehasattr(sys.modules[__name__], '__file__'):
1886 if util.safehasattr(sys.modules[__name__], '__file__'):
1887 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1887 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1888 elif getattr(sys, 'oxidized', False):
1888 elif getattr(sys, 'oxidized', False):
1889 hgmodules = pycompat.sysexecutable
1889 hgmodules = pycompat.sysexecutable
1890
1890
1891 fm.write(
1891 fm.write(
1892 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1892 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1893 )
1893 )
1894 fm.write(
1894 fm.write(
1895 b'hgmodules',
1895 b'hgmodules',
1896 _(b"checking installed modules (%s)...\n"),
1896 _(b"checking installed modules (%s)...\n"),
1897 hgmodules or _(b"unknown"),
1897 hgmodules or _(b"unknown"),
1898 )
1898 )
1899
1899
1900 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1900 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1901 rustext = rustandc # for now, that's the only case
1901 rustext = rustandc # for now, that's the only case
1902 cext = policy.policy in (b'c', b'allow') or rustandc
1902 cext = policy.policy in (b'c', b'allow') or rustandc
1903 nopure = cext or rustext
1903 nopure = cext or rustext
1904 if nopure:
1904 if nopure:
1905 err = None
1905 err = None
1906 try:
1906 try:
1907 if cext:
1907 if cext:
1908 from .cext import ( # pytype: disable=import-error
1908 from .cext import ( # pytype: disable=import-error
1909 base85,
1909 base85,
1910 bdiff,
1910 bdiff,
1911 mpatch,
1911 mpatch,
1912 osutil,
1912 osutil,
1913 )
1913 )
1914
1914
1915 # quiet pyflakes
1915 # quiet pyflakes
1916 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1916 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1917 if rustext:
1917 if rustext:
1918 from .rustext import ( # pytype: disable=import-error
1918 from .rustext import ( # pytype: disable=import-error
1919 ancestor,
1919 ancestor,
1920 dirstate,
1920 dirstate,
1921 )
1921 )
1922
1922
1923 dir(ancestor), dir(dirstate) # quiet pyflakes
1923 dir(ancestor), dir(dirstate) # quiet pyflakes
1924 except Exception as inst:
1924 except Exception as inst:
1925 err = stringutil.forcebytestr(inst)
1925 err = stringutil.forcebytestr(inst)
1926 problems += 1
1926 problems += 1
1927 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1927 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1928
1928
1929 compengines = util.compengines._engines.values()
1929 compengines = util.compengines._engines.values()
1930 fm.write(
1930 fm.write(
1931 b'compengines',
1931 b'compengines',
1932 _(b'checking registered compression engines (%s)\n'),
1932 _(b'checking registered compression engines (%s)\n'),
1933 fm.formatlist(
1933 fm.formatlist(
1934 sorted(e.name() for e in compengines),
1934 sorted(e.name() for e in compengines),
1935 name=b'compengine',
1935 name=b'compengine',
1936 fmt=b'%s',
1936 fmt=b'%s',
1937 sep=b', ',
1937 sep=b', ',
1938 ),
1938 ),
1939 )
1939 )
1940 fm.write(
1940 fm.write(
1941 b'compenginesavail',
1941 b'compenginesavail',
1942 _(b'checking available compression engines (%s)\n'),
1942 _(b'checking available compression engines (%s)\n'),
1943 fm.formatlist(
1943 fm.formatlist(
1944 sorted(e.name() for e in compengines if e.available()),
1944 sorted(e.name() for e in compengines if e.available()),
1945 name=b'compengine',
1945 name=b'compengine',
1946 fmt=b'%s',
1946 fmt=b'%s',
1947 sep=b', ',
1947 sep=b', ',
1948 ),
1948 ),
1949 )
1949 )
1950 wirecompengines = compression.compengines.supportedwireengines(
1950 wirecompengines = compression.compengines.supportedwireengines(
1951 compression.SERVERROLE
1951 compression.SERVERROLE
1952 )
1952 )
1953 fm.write(
1953 fm.write(
1954 b'compenginesserver',
1954 b'compenginesserver',
1955 _(
1955 _(
1956 b'checking available compression engines '
1956 b'checking available compression engines '
1957 b'for wire protocol (%s)\n'
1957 b'for wire protocol (%s)\n'
1958 ),
1958 ),
1959 fm.formatlist(
1959 fm.formatlist(
1960 [e.name() for e in wirecompengines if e.wireprotosupport()],
1960 [e.name() for e in wirecompengines if e.wireprotosupport()],
1961 name=b'compengine',
1961 name=b'compengine',
1962 fmt=b'%s',
1962 fmt=b'%s',
1963 sep=b', ',
1963 sep=b', ',
1964 ),
1964 ),
1965 )
1965 )
1966 re2 = b'missing'
1966 re2 = b'missing'
1967 if util._re2:
1967 if util._re2:
1968 re2 = b'available'
1968 re2 = b'available'
1969 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1969 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1970 fm.data(re2=bool(util._re2))
1970 fm.data(re2=bool(util._re2))
1971
1971
1972 # templates
1972 # templates
1973 p = templater.templatedir()
1973 p = templater.templatedir()
1974 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1974 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1975 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1975 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1976 if p:
1976 if p:
1977 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1977 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1978 if m:
1978 if m:
1979 # template found, check if it is working
1979 # template found, check if it is working
1980 err = None
1980 err = None
1981 try:
1981 try:
1982 templater.templater.frommapfile(m)
1982 templater.templater.frommapfile(m)
1983 except Exception as inst:
1983 except Exception as inst:
1984 err = stringutil.forcebytestr(inst)
1984 err = stringutil.forcebytestr(inst)
1985 p = None
1985 p = None
1986 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1986 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1987 else:
1987 else:
1988 p = None
1988 p = None
1989 fm.condwrite(
1989 fm.condwrite(
1990 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1990 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1991 )
1991 )
1992 fm.condwrite(
1992 fm.condwrite(
1993 not m,
1993 not m,
1994 b'defaulttemplatenotfound',
1994 b'defaulttemplatenotfound',
1995 _(b" template '%s' not found\n"),
1995 _(b" template '%s' not found\n"),
1996 b"default",
1996 b"default",
1997 )
1997 )
1998 if not p:
1998 if not p:
1999 problems += 1
1999 problems += 1
2000 fm.condwrite(
2000 fm.condwrite(
2001 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2001 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2002 )
2002 )
2003
2003
2004 # editor
2004 # editor
2005 editor = ui.geteditor()
2005 editor = ui.geteditor()
2006 editor = util.expandpath(editor)
2006 editor = util.expandpath(editor)
2007 editorbin = procutil.shellsplit(editor)[0]
2007 editorbin = procutil.shellsplit(editor)[0]
2008 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2008 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2009 cmdpath = procutil.findexe(editorbin)
2009 cmdpath = procutil.findexe(editorbin)
2010 fm.condwrite(
2010 fm.condwrite(
2011 not cmdpath and editor == b'vi',
2011 not cmdpath and editor == b'vi',
2012 b'vinotfound',
2012 b'vinotfound',
2013 _(
2013 _(
2014 b" No commit editor set and can't find %s in PATH\n"
2014 b" No commit editor set and can't find %s in PATH\n"
2015 b" (specify a commit editor in your configuration"
2015 b" (specify a commit editor in your configuration"
2016 b" file)\n"
2016 b" file)\n"
2017 ),
2017 ),
2018 not cmdpath and editor == b'vi' and editorbin,
2018 not cmdpath and editor == b'vi' and editorbin,
2019 )
2019 )
2020 fm.condwrite(
2020 fm.condwrite(
2021 not cmdpath and editor != b'vi',
2021 not cmdpath and editor != b'vi',
2022 b'editornotfound',
2022 b'editornotfound',
2023 _(
2023 _(
2024 b" Can't find editor '%s' in PATH\n"
2024 b" Can't find editor '%s' in PATH\n"
2025 b" (specify a commit editor in your configuration"
2025 b" (specify a commit editor in your configuration"
2026 b" file)\n"
2026 b" file)\n"
2027 ),
2027 ),
2028 not cmdpath and editorbin,
2028 not cmdpath and editorbin,
2029 )
2029 )
2030 if not cmdpath and editor != b'vi':
2030 if not cmdpath and editor != b'vi':
2031 problems += 1
2031 problems += 1
2032
2032
2033 # check username
2033 # check username
2034 username = None
2034 username = None
2035 err = None
2035 err = None
2036 try:
2036 try:
2037 username = ui.username()
2037 username = ui.username()
2038 except error.Abort as e:
2038 except error.Abort as e:
2039 err = e.message
2039 err = e.message
2040 problems += 1
2040 problems += 1
2041
2041
2042 fm.condwrite(
2042 fm.condwrite(
2043 username, b'username', _(b"checking username (%s)\n"), username
2043 username, b'username', _(b"checking username (%s)\n"), username
2044 )
2044 )
2045 fm.condwrite(
2045 fm.condwrite(
2046 err,
2046 err,
2047 b'usernameerror',
2047 b'usernameerror',
2048 _(
2048 _(
2049 b"checking username...\n %s\n"
2049 b"checking username...\n %s\n"
2050 b" (specify a username in your configuration file)\n"
2050 b" (specify a username in your configuration file)\n"
2051 ),
2051 ),
2052 err,
2052 err,
2053 )
2053 )
2054
2054
2055 for name, mod in extensions.extensions():
2055 for name, mod in extensions.extensions():
2056 handler = getattr(mod, 'debuginstall', None)
2056 handler = getattr(mod, 'debuginstall', None)
2057 if handler is not None:
2057 if handler is not None:
2058 problems += handler(ui, fm)
2058 problems += handler(ui, fm)
2059
2059
2060 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2060 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2061 if not problems:
2061 if not problems:
2062 fm.data(problems=problems)
2062 fm.data(problems=problems)
2063 fm.condwrite(
2063 fm.condwrite(
2064 problems,
2064 problems,
2065 b'problems',
2065 b'problems',
2066 _(b"%d problems detected, please check your install!\n"),
2066 _(b"%d problems detected, please check your install!\n"),
2067 problems,
2067 problems,
2068 )
2068 )
2069 fm.end()
2069 fm.end()
2070
2070
2071 return problems
2071 return problems
2072
2072
2073
2073
2074 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2074 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2075 def debugknown(ui, repopath, *ids, **opts):
2075 def debugknown(ui, repopath, *ids, **opts):
2076 """test whether node ids are known to a repo
2076 """test whether node ids are known to a repo
2077
2077
2078 Every ID must be a full-length hex node id string. Returns a list of 0s
2078 Every ID must be a full-length hex node id string. Returns a list of 0s
2079 and 1s indicating unknown/known.
2079 and 1s indicating unknown/known.
2080 """
2080 """
2081 opts = pycompat.byteskwargs(opts)
2081 opts = pycompat.byteskwargs(opts)
2082 repo = hg.peer(ui, opts, repopath)
2082 repo = hg.peer(ui, opts, repopath)
2083 if not repo.capable(b'known'):
2083 if not repo.capable(b'known'):
2084 raise error.Abort(b"known() not supported by target repository")
2084 raise error.Abort(b"known() not supported by target repository")
2085 flags = repo.known([bin(s) for s in ids])
2085 flags = repo.known([bin(s) for s in ids])
2086 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2086 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2087
2087
2088
2088
2089 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2089 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2090 def debuglabelcomplete(ui, repo, *args):
2090 def debuglabelcomplete(ui, repo, *args):
2091 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2091 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2092 debugnamecomplete(ui, repo, *args)
2092 debugnamecomplete(ui, repo, *args)
2093
2093
2094
2094
2095 @command(
2095 @command(
2096 b'debuglocks',
2096 b'debuglocks',
2097 [
2097 [
2098 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2098 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2099 (
2099 (
2100 b'W',
2100 b'W',
2101 b'force-free-wlock',
2101 b'force-free-wlock',
2102 None,
2102 None,
2103 _(b'free the working state lock (DANGEROUS)'),
2103 _(b'free the working state lock (DANGEROUS)'),
2104 ),
2104 ),
2105 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2105 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2106 (
2106 (
2107 b'S',
2107 b'S',
2108 b'set-wlock',
2108 b'set-wlock',
2109 None,
2109 None,
2110 _(b'set the working state lock until stopped'),
2110 _(b'set the working state lock until stopped'),
2111 ),
2111 ),
2112 ],
2112 ],
2113 _(b'[OPTION]...'),
2113 _(b'[OPTION]...'),
2114 )
2114 )
2115 def debuglocks(ui, repo, **opts):
2115 def debuglocks(ui, repo, **opts):
2116 """show or modify state of locks
2116 """show or modify state of locks
2117
2117
2118 By default, this command will show which locks are held. This
2118 By default, this command will show which locks are held. This
2119 includes the user and process holding the lock, the amount of time
2119 includes the user and process holding the lock, the amount of time
2120 the lock has been held, and the machine name where the process is
2120 the lock has been held, and the machine name where the process is
2121 running if it's not local.
2121 running if it's not local.
2122
2122
2123 Locks protect the integrity of Mercurial's data, so should be
2123 Locks protect the integrity of Mercurial's data, so should be
2124 treated with care. System crashes or other interruptions may cause
2124 treated with care. System crashes or other interruptions may cause
2125 locks to not be properly released, though Mercurial will usually
2125 locks to not be properly released, though Mercurial will usually
2126 detect and remove such stale locks automatically.
2126 detect and remove such stale locks automatically.
2127
2127
2128 However, detecting stale locks may not always be possible (for
2128 However, detecting stale locks may not always be possible (for
2129 instance, on a shared filesystem). Removing locks may also be
2129 instance, on a shared filesystem). Removing locks may also be
2130 blocked by filesystem permissions.
2130 blocked by filesystem permissions.
2131
2131
2132 Setting a lock will prevent other commands from changing the data.
2132 Setting a lock will prevent other commands from changing the data.
2133 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2133 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2134 The set locks are removed when the command exits.
2134 The set locks are removed when the command exits.
2135
2135
2136 Returns 0 if no locks are held.
2136 Returns 0 if no locks are held.
2137
2137
2138 """
2138 """
2139
2139
2140 if opts.get('force_free_lock'):
2140 if opts.get('force_free_lock'):
2141 repo.svfs.unlink(b'lock')
2141 repo.svfs.unlink(b'lock')
2142 if opts.get('force_free_wlock'):
2142 if opts.get('force_free_wlock'):
2143 repo.vfs.unlink(b'wlock')
2143 repo.vfs.unlink(b'wlock')
2144 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2144 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2145 return 0
2145 return 0
2146
2146
2147 locks = []
2147 locks = []
2148 try:
2148 try:
2149 if opts.get('set_wlock'):
2149 if opts.get('set_wlock'):
2150 try:
2150 try:
2151 locks.append(repo.wlock(False))
2151 locks.append(repo.wlock(False))
2152 except error.LockHeld:
2152 except error.LockHeld:
2153 raise error.Abort(_(b'wlock is already held'))
2153 raise error.Abort(_(b'wlock is already held'))
2154 if opts.get('set_lock'):
2154 if opts.get('set_lock'):
2155 try:
2155 try:
2156 locks.append(repo.lock(False))
2156 locks.append(repo.lock(False))
2157 except error.LockHeld:
2157 except error.LockHeld:
2158 raise error.Abort(_(b'lock is already held'))
2158 raise error.Abort(_(b'lock is already held'))
2159 if len(locks):
2159 if len(locks):
2160 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2160 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2161 return 0
2161 return 0
2162 finally:
2162 finally:
2163 release(*locks)
2163 release(*locks)
2164
2164
2165 now = time.time()
2165 now = time.time()
2166 held = 0
2166 held = 0
2167
2167
2168 def report(vfs, name, method):
2168 def report(vfs, name, method):
2169 # this causes stale locks to get reaped for more accurate reporting
2169 # this causes stale locks to get reaped for more accurate reporting
2170 try:
2170 try:
2171 l = method(False)
2171 l = method(False)
2172 except error.LockHeld:
2172 except error.LockHeld:
2173 l = None
2173 l = None
2174
2174
2175 if l:
2175 if l:
2176 l.release()
2176 l.release()
2177 else:
2177 else:
2178 try:
2178 try:
2179 st = vfs.lstat(name)
2179 st = vfs.lstat(name)
2180 age = now - st[stat.ST_MTIME]
2180 age = now - st[stat.ST_MTIME]
2181 user = util.username(st.st_uid)
2181 user = util.username(st.st_uid)
2182 locker = vfs.readlock(name)
2182 locker = vfs.readlock(name)
2183 if b":" in locker:
2183 if b":" in locker:
2184 host, pid = locker.split(b':')
2184 host, pid = locker.split(b':')
2185 if host == socket.gethostname():
2185 if host == socket.gethostname():
2186 locker = b'user %s, process %s' % (user or b'None', pid)
2186 locker = b'user %s, process %s' % (user or b'None', pid)
2187 else:
2187 else:
2188 locker = b'user %s, process %s, host %s' % (
2188 locker = b'user %s, process %s, host %s' % (
2189 user or b'None',
2189 user or b'None',
2190 pid,
2190 pid,
2191 host,
2191 host,
2192 )
2192 )
2193 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2193 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2194 return 1
2194 return 1
2195 except OSError as e:
2195 except OSError as e:
2196 if e.errno != errno.ENOENT:
2196 if e.errno != errno.ENOENT:
2197 raise
2197 raise
2198
2198
2199 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2199 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2200 return 0
2200 return 0
2201
2201
2202 held += report(repo.svfs, b"lock", repo.lock)
2202 held += report(repo.svfs, b"lock", repo.lock)
2203 held += report(repo.vfs, b"wlock", repo.wlock)
2203 held += report(repo.vfs, b"wlock", repo.wlock)
2204
2204
2205 return held
2205 return held
2206
2206
2207
2207
2208 @command(
2208 @command(
2209 b'debugmanifestfulltextcache',
2209 b'debugmanifestfulltextcache',
2210 [
2210 [
2211 (b'', b'clear', False, _(b'clear the cache')),
2211 (b'', b'clear', False, _(b'clear the cache')),
2212 (
2212 (
2213 b'a',
2213 b'a',
2214 b'add',
2214 b'add',
2215 [],
2215 [],
2216 _(b'add the given manifest nodes to the cache'),
2216 _(b'add the given manifest nodes to the cache'),
2217 _(b'NODE'),
2217 _(b'NODE'),
2218 ),
2218 ),
2219 ],
2219 ],
2220 b'',
2220 b'',
2221 )
2221 )
2222 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2222 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2223 """show, clear or amend the contents of the manifest fulltext cache"""
2223 """show, clear or amend the contents of the manifest fulltext cache"""
2224
2224
2225 def getcache():
2225 def getcache():
2226 r = repo.manifestlog.getstorage(b'')
2226 r = repo.manifestlog.getstorage(b'')
2227 try:
2227 try:
2228 return r._fulltextcache
2228 return r._fulltextcache
2229 except AttributeError:
2229 except AttributeError:
2230 msg = _(
2230 msg = _(
2231 b"Current revlog implementation doesn't appear to have a "
2231 b"Current revlog implementation doesn't appear to have a "
2232 b"manifest fulltext cache\n"
2232 b"manifest fulltext cache\n"
2233 )
2233 )
2234 raise error.Abort(msg)
2234 raise error.Abort(msg)
2235
2235
2236 if opts.get('clear'):
2236 if opts.get('clear'):
2237 with repo.wlock():
2237 with repo.wlock():
2238 cache = getcache()
2238 cache = getcache()
2239 cache.clear(clear_persisted_data=True)
2239 cache.clear(clear_persisted_data=True)
2240 return
2240 return
2241
2241
2242 if add:
2242 if add:
2243 with repo.wlock():
2243 with repo.wlock():
2244 m = repo.manifestlog
2244 m = repo.manifestlog
2245 store = m.getstorage(b'')
2245 store = m.getstorage(b'')
2246 for n in add:
2246 for n in add:
2247 try:
2247 try:
2248 manifest = m[store.lookup(n)]
2248 manifest = m[store.lookup(n)]
2249 except error.LookupError as e:
2249 except error.LookupError as e:
2250 raise error.Abort(
2250 raise error.Abort(
2251 bytes(e), hint=b"Check your manifest node id"
2251 bytes(e), hint=b"Check your manifest node id"
2252 )
2252 )
2253 manifest.read() # stores revisision in cache too
2253 manifest.read() # stores revisision in cache too
2254 return
2254 return
2255
2255
2256 cache = getcache()
2256 cache = getcache()
2257 if not len(cache):
2257 if not len(cache):
2258 ui.write(_(b'cache empty\n'))
2258 ui.write(_(b'cache empty\n'))
2259 else:
2259 else:
2260 ui.write(
2260 ui.write(
2261 _(
2261 _(
2262 b'cache contains %d manifest entries, in order of most to '
2262 b'cache contains %d manifest entries, in order of most to '
2263 b'least recent:\n'
2263 b'least recent:\n'
2264 )
2264 )
2265 % (len(cache),)
2265 % (len(cache),)
2266 )
2266 )
2267 totalsize = 0
2267 totalsize = 0
2268 for nodeid in cache:
2268 for nodeid in cache:
2269 # Use cache.get to not update the LRU order
2269 # Use cache.get to not update the LRU order
2270 data = cache.peek(nodeid)
2270 data = cache.peek(nodeid)
2271 size = len(data)
2271 size = len(data)
2272 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2272 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2273 ui.write(
2273 ui.write(
2274 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2274 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2275 )
2275 )
2276 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2276 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2277 ui.write(
2277 ui.write(
2278 _(b'total cache data size %s, on-disk %s\n')
2278 _(b'total cache data size %s, on-disk %s\n')
2279 % (util.bytecount(totalsize), util.bytecount(ondisk))
2279 % (util.bytecount(totalsize), util.bytecount(ondisk))
2280 )
2280 )
2281
2281
2282
2282
2283 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2283 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2284 def debugmergestate(ui, repo, *args, **opts):
2284 def debugmergestate(ui, repo, *args, **opts):
2285 """print merge state
2285 """print merge state
2286
2286
2287 Use --verbose to print out information about whether v1 or v2 merge state
2287 Use --verbose to print out information about whether v1 or v2 merge state
2288 was chosen."""
2288 was chosen."""
2289
2289
2290 if ui.verbose:
2290 if ui.verbose:
2291 ms = mergestatemod.mergestate(repo)
2291 ms = mergestatemod.mergestate(repo)
2292
2292
2293 # sort so that reasonable information is on top
2293 # sort so that reasonable information is on top
2294 v1records = ms._readrecordsv1()
2294 v1records = ms._readrecordsv1()
2295 v2records = ms._readrecordsv2()
2295 v2records = ms._readrecordsv2()
2296
2296
2297 if not v1records and not v2records:
2297 if not v1records and not v2records:
2298 pass
2298 pass
2299 elif not v2records:
2299 elif not v2records:
2300 ui.writenoi18n(b'no version 2 merge state\n')
2300 ui.writenoi18n(b'no version 2 merge state\n')
2301 elif ms._v1v2match(v1records, v2records):
2301 elif ms._v1v2match(v1records, v2records):
2302 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2302 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2303 else:
2303 else:
2304 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2304 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2305
2305
2306 opts = pycompat.byteskwargs(opts)
2306 opts = pycompat.byteskwargs(opts)
2307 if not opts[b'template']:
2307 if not opts[b'template']:
2308 opts[b'template'] = (
2308 opts[b'template'] = (
2309 b'{if(commits, "", "no merge state found\n")}'
2309 b'{if(commits, "", "no merge state found\n")}'
2310 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2310 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2311 b'{files % "file: {path} (state \\"{state}\\")\n'
2311 b'{files % "file: {path} (state \\"{state}\\")\n'
2312 b'{if(local_path, "'
2312 b'{if(local_path, "'
2313 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2313 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2314 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2314 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2315 b' other path: {other_path} (node {other_node})\n'
2315 b' other path: {other_path} (node {other_node})\n'
2316 b'")}'
2316 b'")}'
2317 b'{if(rename_side, "'
2317 b'{if(rename_side, "'
2318 b' rename side: {rename_side}\n'
2318 b' rename side: {rename_side}\n'
2319 b' renamed path: {renamed_path}\n'
2319 b' renamed path: {renamed_path}\n'
2320 b'")}'
2320 b'")}'
2321 b'{extras % " extra: {key} = {value}\n"}'
2321 b'{extras % " extra: {key} = {value}\n"}'
2322 b'"}'
2322 b'"}'
2323 b'{extras % "extra: {file} ({key} = {value})\n"}'
2323 b'{extras % "extra: {file} ({key} = {value})\n"}'
2324 )
2324 )
2325
2325
2326 ms = mergestatemod.mergestate.read(repo)
2326 ms = mergestatemod.mergestate.read(repo)
2327
2327
2328 fm = ui.formatter(b'debugmergestate', opts)
2328 fm = ui.formatter(b'debugmergestate', opts)
2329 fm.startitem()
2329 fm.startitem()
2330
2330
2331 fm_commits = fm.nested(b'commits')
2331 fm_commits = fm.nested(b'commits')
2332 if ms.active():
2332 if ms.active():
2333 for name, node, label_index in (
2333 for name, node, label_index in (
2334 (b'local', ms.local, 0),
2334 (b'local', ms.local, 0),
2335 (b'other', ms.other, 1),
2335 (b'other', ms.other, 1),
2336 ):
2336 ):
2337 fm_commits.startitem()
2337 fm_commits.startitem()
2338 fm_commits.data(name=name)
2338 fm_commits.data(name=name)
2339 fm_commits.data(node=hex(node))
2339 fm_commits.data(node=hex(node))
2340 if ms._labels and len(ms._labels) > label_index:
2340 if ms._labels and len(ms._labels) > label_index:
2341 fm_commits.data(label=ms._labels[label_index])
2341 fm_commits.data(label=ms._labels[label_index])
2342 fm_commits.end()
2342 fm_commits.end()
2343
2343
2344 fm_files = fm.nested(b'files')
2344 fm_files = fm.nested(b'files')
2345 if ms.active():
2345 if ms.active():
2346 for f in ms:
2346 for f in ms:
2347 fm_files.startitem()
2347 fm_files.startitem()
2348 fm_files.data(path=f)
2348 fm_files.data(path=f)
2349 state = ms._state[f]
2349 state = ms._state[f]
2350 fm_files.data(state=state[0])
2350 fm_files.data(state=state[0])
2351 if state[0] in (
2351 if state[0] in (
2352 mergestatemod.MERGE_RECORD_UNRESOLVED,
2352 mergestatemod.MERGE_RECORD_UNRESOLVED,
2353 mergestatemod.MERGE_RECORD_RESOLVED,
2353 mergestatemod.MERGE_RECORD_RESOLVED,
2354 ):
2354 ):
2355 fm_files.data(local_key=state[1])
2355 fm_files.data(local_key=state[1])
2356 fm_files.data(local_path=state[2])
2356 fm_files.data(local_path=state[2])
2357 fm_files.data(ancestor_path=state[3])
2357 fm_files.data(ancestor_path=state[3])
2358 fm_files.data(ancestor_node=state[4])
2358 fm_files.data(ancestor_node=state[4])
2359 fm_files.data(other_path=state[5])
2359 fm_files.data(other_path=state[5])
2360 fm_files.data(other_node=state[6])
2360 fm_files.data(other_node=state[6])
2361 fm_files.data(local_flags=state[7])
2361 fm_files.data(local_flags=state[7])
2362 elif state[0] in (
2362 elif state[0] in (
2363 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2363 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2364 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2364 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2365 ):
2365 ):
2366 fm_files.data(renamed_path=state[1])
2366 fm_files.data(renamed_path=state[1])
2367 fm_files.data(rename_side=state[2])
2367 fm_files.data(rename_side=state[2])
2368 fm_extras = fm_files.nested(b'extras')
2368 fm_extras = fm_files.nested(b'extras')
2369 for k, v in sorted(ms.extras(f).items()):
2369 for k, v in sorted(ms.extras(f).items()):
2370 fm_extras.startitem()
2370 fm_extras.startitem()
2371 fm_extras.data(key=k)
2371 fm_extras.data(key=k)
2372 fm_extras.data(value=v)
2372 fm_extras.data(value=v)
2373 fm_extras.end()
2373 fm_extras.end()
2374
2374
2375 fm_files.end()
2375 fm_files.end()
2376
2376
2377 fm_extras = fm.nested(b'extras')
2377 fm_extras = fm.nested(b'extras')
2378 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2378 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2379 if f in ms:
2379 if f in ms:
2380 # If file is in mergestate, we have already processed it's extras
2380 # If file is in mergestate, we have already processed it's extras
2381 continue
2381 continue
2382 for k, v in pycompat.iteritems(d):
2382 for k, v in pycompat.iteritems(d):
2383 fm_extras.startitem()
2383 fm_extras.startitem()
2384 fm_extras.data(file=f)
2384 fm_extras.data(file=f)
2385 fm_extras.data(key=k)
2385 fm_extras.data(key=k)
2386 fm_extras.data(value=v)
2386 fm_extras.data(value=v)
2387 fm_extras.end()
2387 fm_extras.end()
2388
2388
2389 fm.end()
2389 fm.end()
2390
2390
2391
2391
2392 @command(b'debugnamecomplete', [], _(b'NAME...'))
2392 @command(b'debugnamecomplete', [], _(b'NAME...'))
2393 def debugnamecomplete(ui, repo, *args):
2393 def debugnamecomplete(ui, repo, *args):
2394 '''complete "names" - tags, open branch names, bookmark names'''
2394 '''complete "names" - tags, open branch names, bookmark names'''
2395
2395
2396 names = set()
2396 names = set()
2397 # since we previously only listed open branches, we will handle that
2397 # since we previously only listed open branches, we will handle that
2398 # specially (after this for loop)
2398 # specially (after this for loop)
2399 for name, ns in pycompat.iteritems(repo.names):
2399 for name, ns in pycompat.iteritems(repo.names):
2400 if name != b'branches':
2400 if name != b'branches':
2401 names.update(ns.listnames(repo))
2401 names.update(ns.listnames(repo))
2402 names.update(
2402 names.update(
2403 tag
2403 tag
2404 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2404 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2405 if not closed
2405 if not closed
2406 )
2406 )
2407 completions = set()
2407 completions = set()
2408 if not args:
2408 if not args:
2409 args = [b'']
2409 args = [b'']
2410 for a in args:
2410 for a in args:
2411 completions.update(n for n in names if n.startswith(a))
2411 completions.update(n for n in names if n.startswith(a))
2412 ui.write(b'\n'.join(sorted(completions)))
2412 ui.write(b'\n'.join(sorted(completions)))
2413 ui.write(b'\n')
2413 ui.write(b'\n')
2414
2414
2415
2415
2416 @command(
2416 @command(
2417 b'debugnodemap',
2417 b'debugnodemap',
2418 [
2418 [
2419 (
2419 (
2420 b'',
2420 b'',
2421 b'dump-new',
2421 b'dump-new',
2422 False,
2422 False,
2423 _(b'write a (new) persistent binary nodemap on stdout'),
2423 _(b'write a (new) persistent binary nodemap on stdout'),
2424 ),
2424 ),
2425 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2425 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2426 (
2426 (
2427 b'',
2427 b'',
2428 b'check',
2428 b'check',
2429 False,
2429 False,
2430 _(b'check that the data on disk data are correct.'),
2430 _(b'check that the data on disk data are correct.'),
2431 ),
2431 ),
2432 (
2432 (
2433 b'',
2433 b'',
2434 b'metadata',
2434 b'metadata',
2435 False,
2435 False,
2436 _(b'display the on disk meta data for the nodemap'),
2436 _(b'display the on disk meta data for the nodemap'),
2437 ),
2437 ),
2438 ],
2438 ],
2439 )
2439 )
2440 def debugnodemap(ui, repo, **opts):
2440 def debugnodemap(ui, repo, **opts):
2441 """write and inspect on disk nodemap"""
2441 """write and inspect on disk nodemap"""
2442 if opts['dump_new']:
2442 if opts['dump_new']:
2443 unfi = repo.unfiltered()
2443 unfi = repo.unfiltered()
2444 cl = unfi.changelog
2444 cl = unfi.changelog
2445 if util.safehasattr(cl.index, "nodemap_data_all"):
2445 if util.safehasattr(cl.index, "nodemap_data_all"):
2446 data = cl.index.nodemap_data_all()
2446 data = cl.index.nodemap_data_all()
2447 else:
2447 else:
2448 data = nodemap.persistent_data(cl.index)
2448 data = nodemap.persistent_data(cl.index)
2449 ui.write(data)
2449 ui.write(data)
2450 elif opts['dump_disk']:
2450 elif opts['dump_disk']:
2451 unfi = repo.unfiltered()
2451 unfi = repo.unfiltered()
2452 cl = unfi.changelog
2452 cl = unfi.changelog
2453 nm_data = nodemap.persisted_data(cl)
2453 nm_data = nodemap.persisted_data(cl)
2454 if nm_data is not None:
2454 if nm_data is not None:
2455 docket, data = nm_data
2455 docket, data = nm_data
2456 ui.write(data[:])
2456 ui.write(data[:])
2457 elif opts['check']:
2457 elif opts['check']:
2458 unfi = repo.unfiltered()
2458 unfi = repo.unfiltered()
2459 cl = unfi.changelog
2459 cl = unfi.changelog
2460 nm_data = nodemap.persisted_data(cl)
2460 nm_data = nodemap.persisted_data(cl)
2461 if nm_data is not None:
2461 if nm_data is not None:
2462 docket, data = nm_data
2462 docket, data = nm_data
2463 return nodemap.check_data(ui, cl.index, data)
2463 return nodemap.check_data(ui, cl.index, data)
2464 elif opts['metadata']:
2464 elif opts['metadata']:
2465 unfi = repo.unfiltered()
2465 unfi = repo.unfiltered()
2466 cl = unfi.changelog
2466 cl = unfi.changelog
2467 nm_data = nodemap.persisted_data(cl)
2467 nm_data = nodemap.persisted_data(cl)
2468 if nm_data is not None:
2468 if nm_data is not None:
2469 docket, data = nm_data
2469 docket, data = nm_data
2470 ui.write((b"uid: %s\n") % docket.uid)
2470 ui.write((b"uid: %s\n") % docket.uid)
2471 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2471 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2472 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2472 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2473 ui.write((b"data-length: %d\n") % docket.data_length)
2473 ui.write((b"data-length: %d\n") % docket.data_length)
2474 ui.write((b"data-unused: %d\n") % docket.data_unused)
2474 ui.write((b"data-unused: %d\n") % docket.data_unused)
2475 unused_perc = docket.data_unused * 100.0 / docket.data_length
2475 unused_perc = docket.data_unused * 100.0 / docket.data_length
2476 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2476 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2477
2477
2478
2478
2479 @command(
2479 @command(
2480 b'debugobsolete',
2480 b'debugobsolete',
2481 [
2481 [
2482 (b'', b'flags', 0, _(b'markers flag')),
2482 (b'', b'flags', 0, _(b'markers flag')),
2483 (
2483 (
2484 b'',
2484 b'',
2485 b'record-parents',
2485 b'record-parents',
2486 False,
2486 False,
2487 _(b'record parent information for the precursor'),
2487 _(b'record parent information for the precursor'),
2488 ),
2488 ),
2489 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2489 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2490 (
2490 (
2491 b'',
2491 b'',
2492 b'exclusive',
2492 b'exclusive',
2493 False,
2493 False,
2494 _(b'restrict display to markers only relevant to REV'),
2494 _(b'restrict display to markers only relevant to REV'),
2495 ),
2495 ),
2496 (b'', b'index', False, _(b'display index of the marker')),
2496 (b'', b'index', False, _(b'display index of the marker')),
2497 (b'', b'delete', [], _(b'delete markers specified by indices')),
2497 (b'', b'delete', [], _(b'delete markers specified by indices')),
2498 ]
2498 ]
2499 + cmdutil.commitopts2
2499 + cmdutil.commitopts2
2500 + cmdutil.formatteropts,
2500 + cmdutil.formatteropts,
2501 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2501 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2502 )
2502 )
2503 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2503 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2504 """create arbitrary obsolete marker
2504 """create arbitrary obsolete marker
2505
2505
2506 With no arguments, displays the list of obsolescence markers."""
2506 With no arguments, displays the list of obsolescence markers."""
2507
2507
2508 opts = pycompat.byteskwargs(opts)
2508 opts = pycompat.byteskwargs(opts)
2509
2509
2510 def parsenodeid(s):
2510 def parsenodeid(s):
2511 try:
2511 try:
2512 # We do not use revsingle/revrange functions here to accept
2512 # We do not use revsingle/revrange functions here to accept
2513 # arbitrary node identifiers, possibly not present in the
2513 # arbitrary node identifiers, possibly not present in the
2514 # local repository.
2514 # local repository.
2515 n = bin(s)
2515 n = bin(s)
2516 if len(n) != repo.nodeconstants.nodelen:
2516 if len(n) != repo.nodeconstants.nodelen:
2517 raise TypeError()
2517 raise TypeError()
2518 return n
2518 return n
2519 except TypeError:
2519 except TypeError:
2520 raise error.InputError(
2520 raise error.InputError(
2521 b'changeset references must be full hexadecimal '
2521 b'changeset references must be full hexadecimal '
2522 b'node identifiers'
2522 b'node identifiers'
2523 )
2523 )
2524
2524
2525 if opts.get(b'delete'):
2525 if opts.get(b'delete'):
2526 indices = []
2526 indices = []
2527 for v in opts.get(b'delete'):
2527 for v in opts.get(b'delete'):
2528 try:
2528 try:
2529 indices.append(int(v))
2529 indices.append(int(v))
2530 except ValueError:
2530 except ValueError:
2531 raise error.InputError(
2531 raise error.InputError(
2532 _(b'invalid index value: %r') % v,
2532 _(b'invalid index value: %r') % v,
2533 hint=_(b'use integers for indices'),
2533 hint=_(b'use integers for indices'),
2534 )
2534 )
2535
2535
2536 if repo.currenttransaction():
2536 if repo.currenttransaction():
2537 raise error.Abort(
2537 raise error.Abort(
2538 _(b'cannot delete obsmarkers in the middle of transaction.')
2538 _(b'cannot delete obsmarkers in the middle of transaction.')
2539 )
2539 )
2540
2540
2541 with repo.lock():
2541 with repo.lock():
2542 n = repair.deleteobsmarkers(repo.obsstore, indices)
2542 n = repair.deleteobsmarkers(repo.obsstore, indices)
2543 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2543 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2544
2544
2545 return
2545 return
2546
2546
2547 if precursor is not None:
2547 if precursor is not None:
2548 if opts[b'rev']:
2548 if opts[b'rev']:
2549 raise error.InputError(
2549 raise error.InputError(
2550 b'cannot select revision when creating marker'
2550 b'cannot select revision when creating marker'
2551 )
2551 )
2552 metadata = {}
2552 metadata = {}
2553 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2553 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2554 succs = tuple(parsenodeid(succ) for succ in successors)
2554 succs = tuple(parsenodeid(succ) for succ in successors)
2555 l = repo.lock()
2555 l = repo.lock()
2556 try:
2556 try:
2557 tr = repo.transaction(b'debugobsolete')
2557 tr = repo.transaction(b'debugobsolete')
2558 try:
2558 try:
2559 date = opts.get(b'date')
2559 date = opts.get(b'date')
2560 if date:
2560 if date:
2561 date = dateutil.parsedate(date)
2561 date = dateutil.parsedate(date)
2562 else:
2562 else:
2563 date = None
2563 date = None
2564 prec = parsenodeid(precursor)
2564 prec = parsenodeid(precursor)
2565 parents = None
2565 parents = None
2566 if opts[b'record_parents']:
2566 if opts[b'record_parents']:
2567 if prec not in repo.unfiltered():
2567 if prec not in repo.unfiltered():
2568 raise error.Abort(
2568 raise error.Abort(
2569 b'cannot used --record-parents on '
2569 b'cannot used --record-parents on '
2570 b'unknown changesets'
2570 b'unknown changesets'
2571 )
2571 )
2572 parents = repo.unfiltered()[prec].parents()
2572 parents = repo.unfiltered()[prec].parents()
2573 parents = tuple(p.node() for p in parents)
2573 parents = tuple(p.node() for p in parents)
2574 repo.obsstore.create(
2574 repo.obsstore.create(
2575 tr,
2575 tr,
2576 prec,
2576 prec,
2577 succs,
2577 succs,
2578 opts[b'flags'],
2578 opts[b'flags'],
2579 parents=parents,
2579 parents=parents,
2580 date=date,
2580 date=date,
2581 metadata=metadata,
2581 metadata=metadata,
2582 ui=ui,
2582 ui=ui,
2583 )
2583 )
2584 tr.close()
2584 tr.close()
2585 except ValueError as exc:
2585 except ValueError as exc:
2586 raise error.Abort(
2586 raise error.Abort(
2587 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2587 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2588 )
2588 )
2589 finally:
2589 finally:
2590 tr.release()
2590 tr.release()
2591 finally:
2591 finally:
2592 l.release()
2592 l.release()
2593 else:
2593 else:
2594 if opts[b'rev']:
2594 if opts[b'rev']:
2595 revs = logcmdutil.revrange(repo, opts[b'rev'])
2595 revs = logcmdutil.revrange(repo, opts[b'rev'])
2596 nodes = [repo[r].node() for r in revs]
2596 nodes = [repo[r].node() for r in revs]
2597 markers = list(
2597 markers = list(
2598 obsutil.getmarkers(
2598 obsutil.getmarkers(
2599 repo, nodes=nodes, exclusive=opts[b'exclusive']
2599 repo, nodes=nodes, exclusive=opts[b'exclusive']
2600 )
2600 )
2601 )
2601 )
2602 markers.sort(key=lambda x: x._data)
2602 markers.sort(key=lambda x: x._data)
2603 else:
2603 else:
2604 markers = obsutil.getmarkers(repo)
2604 markers = obsutil.getmarkers(repo)
2605
2605
2606 markerstoiter = markers
2606 markerstoiter = markers
2607 isrelevant = lambda m: True
2607 isrelevant = lambda m: True
2608 if opts.get(b'rev') and opts.get(b'index'):
2608 if opts.get(b'rev') and opts.get(b'index'):
2609 markerstoiter = obsutil.getmarkers(repo)
2609 markerstoiter = obsutil.getmarkers(repo)
2610 markerset = set(markers)
2610 markerset = set(markers)
2611 isrelevant = lambda m: m in markerset
2611 isrelevant = lambda m: m in markerset
2612
2612
2613 fm = ui.formatter(b'debugobsolete', opts)
2613 fm = ui.formatter(b'debugobsolete', opts)
2614 for i, m in enumerate(markerstoiter):
2614 for i, m in enumerate(markerstoiter):
2615 if not isrelevant(m):
2615 if not isrelevant(m):
2616 # marker can be irrelevant when we're iterating over a set
2616 # marker can be irrelevant when we're iterating over a set
2617 # of markers (markerstoiter) which is bigger than the set
2617 # of markers (markerstoiter) which is bigger than the set
2618 # of markers we want to display (markers)
2618 # of markers we want to display (markers)
2619 # this can happen if both --index and --rev options are
2619 # this can happen if both --index and --rev options are
2620 # provided and thus we need to iterate over all of the markers
2620 # provided and thus we need to iterate over all of the markers
2621 # to get the correct indices, but only display the ones that
2621 # to get the correct indices, but only display the ones that
2622 # are relevant to --rev value
2622 # are relevant to --rev value
2623 continue
2623 continue
2624 fm.startitem()
2624 fm.startitem()
2625 ind = i if opts.get(b'index') else None
2625 ind = i if opts.get(b'index') else None
2626 cmdutil.showmarker(fm, m, index=ind)
2626 cmdutil.showmarker(fm, m, index=ind)
2627 fm.end()
2627 fm.end()
2628
2628
2629
2629
2630 @command(
2630 @command(
2631 b'debugp1copies',
2631 b'debugp1copies',
2632 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2632 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2633 _(b'[-r REV]'),
2633 _(b'[-r REV]'),
2634 )
2634 )
2635 def debugp1copies(ui, repo, **opts):
2635 def debugp1copies(ui, repo, **opts):
2636 """dump copy information compared to p1"""
2636 """dump copy information compared to p1"""
2637
2637
2638 opts = pycompat.byteskwargs(opts)
2638 opts = pycompat.byteskwargs(opts)
2639 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2639 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2640 for dst, src in ctx.p1copies().items():
2640 for dst, src in ctx.p1copies().items():
2641 ui.write(b'%s -> %s\n' % (src, dst))
2641 ui.write(b'%s -> %s\n' % (src, dst))
2642
2642
2643
2643
2644 @command(
2644 @command(
2645 b'debugp2copies',
2645 b'debugp2copies',
2646 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2646 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2647 _(b'[-r REV]'),
2647 _(b'[-r REV]'),
2648 )
2648 )
2649 def debugp1copies(ui, repo, **opts):
2649 def debugp1copies(ui, repo, **opts):
2650 """dump copy information compared to p2"""
2650 """dump copy information compared to p2"""
2651
2651
2652 opts = pycompat.byteskwargs(opts)
2652 opts = pycompat.byteskwargs(opts)
2653 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2653 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2654 for dst, src in ctx.p2copies().items():
2654 for dst, src in ctx.p2copies().items():
2655 ui.write(b'%s -> %s\n' % (src, dst))
2655 ui.write(b'%s -> %s\n' % (src, dst))
2656
2656
2657
2657
2658 @command(
2658 @command(
2659 b'debugpathcomplete',
2659 b'debugpathcomplete',
2660 [
2660 [
2661 (b'f', b'full', None, _(b'complete an entire path')),
2661 (b'f', b'full', None, _(b'complete an entire path')),
2662 (b'n', b'normal', None, _(b'show only normal files')),
2662 (b'n', b'normal', None, _(b'show only normal files')),
2663 (b'a', b'added', None, _(b'show only added files')),
2663 (b'a', b'added', None, _(b'show only added files')),
2664 (b'r', b'removed', None, _(b'show only removed files')),
2664 (b'r', b'removed', None, _(b'show only removed files')),
2665 ],
2665 ],
2666 _(b'FILESPEC...'),
2666 _(b'FILESPEC...'),
2667 )
2667 )
2668 def debugpathcomplete(ui, repo, *specs, **opts):
2668 def debugpathcomplete(ui, repo, *specs, **opts):
2669 """complete part or all of a tracked path
2669 """complete part or all of a tracked path
2670
2670
2671 This command supports shells that offer path name completion. It
2671 This command supports shells that offer path name completion. It
2672 currently completes only files already known to the dirstate.
2672 currently completes only files already known to the dirstate.
2673
2673
2674 Completion extends only to the next path segment unless
2674 Completion extends only to the next path segment unless
2675 --full is specified, in which case entire paths are used."""
2675 --full is specified, in which case entire paths are used."""
2676
2676
2677 def complete(path, acceptable):
2677 def complete(path, acceptable):
2678 dirstate = repo.dirstate
2678 dirstate = repo.dirstate
2679 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2679 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2680 rootdir = repo.root + pycompat.ossep
2680 rootdir = repo.root + pycompat.ossep
2681 if spec != repo.root and not spec.startswith(rootdir):
2681 if spec != repo.root and not spec.startswith(rootdir):
2682 return [], []
2682 return [], []
2683 if os.path.isdir(spec):
2683 if os.path.isdir(spec):
2684 spec += b'/'
2684 spec += b'/'
2685 spec = spec[len(rootdir) :]
2685 spec = spec[len(rootdir) :]
2686 fixpaths = pycompat.ossep != b'/'
2686 fixpaths = pycompat.ossep != b'/'
2687 if fixpaths:
2687 if fixpaths:
2688 spec = spec.replace(pycompat.ossep, b'/')
2688 spec = spec.replace(pycompat.ossep, b'/')
2689 speclen = len(spec)
2689 speclen = len(spec)
2690 fullpaths = opts['full']
2690 fullpaths = opts['full']
2691 files, dirs = set(), set()
2691 files, dirs = set(), set()
2692 adddir, addfile = dirs.add, files.add
2692 adddir, addfile = dirs.add, files.add
2693 for f, st in pycompat.iteritems(dirstate):
2693 for f, st in pycompat.iteritems(dirstate):
2694 if f.startswith(spec) and st.state in acceptable:
2694 if f.startswith(spec) and st.state in acceptable:
2695 if fixpaths:
2695 if fixpaths:
2696 f = f.replace(b'/', pycompat.ossep)
2696 f = f.replace(b'/', pycompat.ossep)
2697 if fullpaths:
2697 if fullpaths:
2698 addfile(f)
2698 addfile(f)
2699 continue
2699 continue
2700 s = f.find(pycompat.ossep, speclen)
2700 s = f.find(pycompat.ossep, speclen)
2701 if s >= 0:
2701 if s >= 0:
2702 adddir(f[:s])
2702 adddir(f[:s])
2703 else:
2703 else:
2704 addfile(f)
2704 addfile(f)
2705 return files, dirs
2705 return files, dirs
2706
2706
2707 acceptable = b''
2707 acceptable = b''
2708 if opts['normal']:
2708 if opts['normal']:
2709 acceptable += b'nm'
2709 acceptable += b'nm'
2710 if opts['added']:
2710 if opts['added']:
2711 acceptable += b'a'
2711 acceptable += b'a'
2712 if opts['removed']:
2712 if opts['removed']:
2713 acceptable += b'r'
2713 acceptable += b'r'
2714 cwd = repo.getcwd()
2714 cwd = repo.getcwd()
2715 if not specs:
2715 if not specs:
2716 specs = [b'.']
2716 specs = [b'.']
2717
2717
2718 files, dirs = set(), set()
2718 files, dirs = set(), set()
2719 for spec in specs:
2719 for spec in specs:
2720 f, d = complete(spec, acceptable or b'nmar')
2720 f, d = complete(spec, acceptable or b'nmar')
2721 files.update(f)
2721 files.update(f)
2722 dirs.update(d)
2722 dirs.update(d)
2723 files.update(dirs)
2723 files.update(dirs)
2724 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2724 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2725 ui.write(b'\n')
2725 ui.write(b'\n')
2726
2726
2727
2727
2728 @command(
2728 @command(
2729 b'debugpathcopies',
2729 b'debugpathcopies',
2730 cmdutil.walkopts,
2730 cmdutil.walkopts,
2731 b'hg debugpathcopies REV1 REV2 [FILE]',
2731 b'hg debugpathcopies REV1 REV2 [FILE]',
2732 inferrepo=True,
2732 inferrepo=True,
2733 )
2733 )
2734 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2734 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2735 """show copies between two revisions"""
2735 """show copies between two revisions"""
2736 ctx1 = scmutil.revsingle(repo, rev1)
2736 ctx1 = scmutil.revsingle(repo, rev1)
2737 ctx2 = scmutil.revsingle(repo, rev2)
2737 ctx2 = scmutil.revsingle(repo, rev2)
2738 m = scmutil.match(ctx1, pats, opts)
2738 m = scmutil.match(ctx1, pats, opts)
2739 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2739 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2740 ui.write(b'%s -> %s\n' % (src, dst))
2740 ui.write(b'%s -> %s\n' % (src, dst))
2741
2741
2742
2742
2743 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2743 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2744 def debugpeer(ui, path):
2744 def debugpeer(ui, path):
2745 """establish a connection to a peer repository"""
2745 """establish a connection to a peer repository"""
2746 # Always enable peer request logging. Requires --debug to display
2746 # Always enable peer request logging. Requires --debug to display
2747 # though.
2747 # though.
2748 overrides = {
2748 overrides = {
2749 (b'devel', b'debug.peer-request'): True,
2749 (b'devel', b'debug.peer-request'): True,
2750 }
2750 }
2751
2751
2752 with ui.configoverride(overrides):
2752 with ui.configoverride(overrides):
2753 peer = hg.peer(ui, {}, path)
2753 peer = hg.peer(ui, {}, path)
2754
2754
2755 try:
2755 try:
2756 local = peer.local() is not None
2756 local = peer.local() is not None
2757 canpush = peer.canpush()
2757 canpush = peer.canpush()
2758
2758
2759 ui.write(_(b'url: %s\n') % peer.url())
2759 ui.write(_(b'url: %s\n') % peer.url())
2760 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2760 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2761 ui.write(
2761 ui.write(
2762 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2762 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2763 )
2763 )
2764 finally:
2764 finally:
2765 peer.close()
2765 peer.close()
2766
2766
2767
2767
2768 @command(
2768 @command(
2769 b'debugpickmergetool',
2769 b'debugpickmergetool',
2770 [
2770 [
2771 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2771 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2772 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2772 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2773 ]
2773 ]
2774 + cmdutil.walkopts
2774 + cmdutil.walkopts
2775 + cmdutil.mergetoolopts,
2775 + cmdutil.mergetoolopts,
2776 _(b'[PATTERN]...'),
2776 _(b'[PATTERN]...'),
2777 inferrepo=True,
2777 inferrepo=True,
2778 )
2778 )
2779 def debugpickmergetool(ui, repo, *pats, **opts):
2779 def debugpickmergetool(ui, repo, *pats, **opts):
2780 """examine which merge tool is chosen for specified file
2780 """examine which merge tool is chosen for specified file
2781
2781
2782 As described in :hg:`help merge-tools`, Mercurial examines
2782 As described in :hg:`help merge-tools`, Mercurial examines
2783 configurations below in this order to decide which merge tool is
2783 configurations below in this order to decide which merge tool is
2784 chosen for specified file.
2784 chosen for specified file.
2785
2785
2786 1. ``--tool`` option
2786 1. ``--tool`` option
2787 2. ``HGMERGE`` environment variable
2787 2. ``HGMERGE`` environment variable
2788 3. configurations in ``merge-patterns`` section
2788 3. configurations in ``merge-patterns`` section
2789 4. configuration of ``ui.merge``
2789 4. configuration of ``ui.merge``
2790 5. configurations in ``merge-tools`` section
2790 5. configurations in ``merge-tools`` section
2791 6. ``hgmerge`` tool (for historical reason only)
2791 6. ``hgmerge`` tool (for historical reason only)
2792 7. default tool for fallback (``:merge`` or ``:prompt``)
2792 7. default tool for fallback (``:merge`` or ``:prompt``)
2793
2793
2794 This command writes out examination result in the style below::
2794 This command writes out examination result in the style below::
2795
2795
2796 FILE = MERGETOOL
2796 FILE = MERGETOOL
2797
2797
2798 By default, all files known in the first parent context of the
2798 By default, all files known in the first parent context of the
2799 working directory are examined. Use file patterns and/or -I/-X
2799 working directory are examined. Use file patterns and/or -I/-X
2800 options to limit target files. -r/--rev is also useful to examine
2800 options to limit target files. -r/--rev is also useful to examine
2801 files in another context without actual updating to it.
2801 files in another context without actual updating to it.
2802
2802
2803 With --debug, this command shows warning messages while matching
2803 With --debug, this command shows warning messages while matching
2804 against ``merge-patterns`` and so on, too. It is recommended to
2804 against ``merge-patterns`` and so on, too. It is recommended to
2805 use this option with explicit file patterns and/or -I/-X options,
2805 use this option with explicit file patterns and/or -I/-X options,
2806 because this option increases amount of output per file according
2806 because this option increases amount of output per file according
2807 to configurations in hgrc.
2807 to configurations in hgrc.
2808
2808
2809 With -v/--verbose, this command shows configurations below at
2809 With -v/--verbose, this command shows configurations below at
2810 first (only if specified).
2810 first (only if specified).
2811
2811
2812 - ``--tool`` option
2812 - ``--tool`` option
2813 - ``HGMERGE`` environment variable
2813 - ``HGMERGE`` environment variable
2814 - configuration of ``ui.merge``
2814 - configuration of ``ui.merge``
2815
2815
2816 If merge tool is chosen before matching against
2816 If merge tool is chosen before matching against
2817 ``merge-patterns``, this command can't show any helpful
2817 ``merge-patterns``, this command can't show any helpful
2818 information, even with --debug. In such case, information above is
2818 information, even with --debug. In such case, information above is
2819 useful to know why a merge tool is chosen.
2819 useful to know why a merge tool is chosen.
2820 """
2820 """
2821 opts = pycompat.byteskwargs(opts)
2821 opts = pycompat.byteskwargs(opts)
2822 overrides = {}
2822 overrides = {}
2823 if opts[b'tool']:
2823 if opts[b'tool']:
2824 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2824 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2825 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2825 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2826
2826
2827 with ui.configoverride(overrides, b'debugmergepatterns'):
2827 with ui.configoverride(overrides, b'debugmergepatterns'):
2828 hgmerge = encoding.environ.get(b"HGMERGE")
2828 hgmerge = encoding.environ.get(b"HGMERGE")
2829 if hgmerge is not None:
2829 if hgmerge is not None:
2830 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2830 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2831 uimerge = ui.config(b"ui", b"merge")
2831 uimerge = ui.config(b"ui", b"merge")
2832 if uimerge:
2832 if uimerge:
2833 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2833 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2834
2834
2835 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2835 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2836 m = scmutil.match(ctx, pats, opts)
2836 m = scmutil.match(ctx, pats, opts)
2837 changedelete = opts[b'changedelete']
2837 changedelete = opts[b'changedelete']
2838 for path in ctx.walk(m):
2838 for path in ctx.walk(m):
2839 fctx = ctx[path]
2839 fctx = ctx[path]
2840 with ui.silent(
2840 with ui.silent(
2841 error=True
2841 error=True
2842 ) if not ui.debugflag else util.nullcontextmanager():
2842 ) if not ui.debugflag else util.nullcontextmanager():
2843 tool, toolpath = filemerge._picktool(
2843 tool, toolpath = filemerge._picktool(
2844 repo,
2844 repo,
2845 ui,
2845 ui,
2846 path,
2846 path,
2847 fctx.isbinary(),
2847 fctx.isbinary(),
2848 b'l' in fctx.flags(),
2848 b'l' in fctx.flags(),
2849 changedelete,
2849 changedelete,
2850 )
2850 )
2851 ui.write(b'%s = %s\n' % (path, tool))
2851 ui.write(b'%s = %s\n' % (path, tool))
2852
2852
2853
2853
2854 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2854 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2855 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2855 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2856 """access the pushkey key/value protocol
2856 """access the pushkey key/value protocol
2857
2857
2858 With two args, list the keys in the given namespace.
2858 With two args, list the keys in the given namespace.
2859
2859
2860 With five args, set a key to new if it currently is set to old.
2860 With five args, set a key to new if it currently is set to old.
2861 Reports success or failure.
2861 Reports success or failure.
2862 """
2862 """
2863
2863
2864 target = hg.peer(ui, {}, repopath)
2864 target = hg.peer(ui, {}, repopath)
2865 try:
2865 try:
2866 if keyinfo:
2866 if keyinfo:
2867 key, old, new = keyinfo
2867 key, old, new = keyinfo
2868 with target.commandexecutor() as e:
2868 with target.commandexecutor() as e:
2869 r = e.callcommand(
2869 r = e.callcommand(
2870 b'pushkey',
2870 b'pushkey',
2871 {
2871 {
2872 b'namespace': namespace,
2872 b'namespace': namespace,
2873 b'key': key,
2873 b'key': key,
2874 b'old': old,
2874 b'old': old,
2875 b'new': new,
2875 b'new': new,
2876 },
2876 },
2877 ).result()
2877 ).result()
2878
2878
2879 ui.status(pycompat.bytestr(r) + b'\n')
2879 ui.status(pycompat.bytestr(r) + b'\n')
2880 return not r
2880 return not r
2881 else:
2881 else:
2882 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2882 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2883 ui.write(
2883 ui.write(
2884 b"%s\t%s\n"
2884 b"%s\t%s\n"
2885 % (stringutil.escapestr(k), stringutil.escapestr(v))
2885 % (stringutil.escapestr(k), stringutil.escapestr(v))
2886 )
2886 )
2887 finally:
2887 finally:
2888 target.close()
2888 target.close()
2889
2889
2890
2890
2891 @command(b'debugpvec', [], _(b'A B'))
2891 @command(b'debugpvec', [], _(b'A B'))
2892 def debugpvec(ui, repo, a, b=None):
2892 def debugpvec(ui, repo, a, b=None):
2893 ca = scmutil.revsingle(repo, a)
2893 ca = scmutil.revsingle(repo, a)
2894 cb = scmutil.revsingle(repo, b)
2894 cb = scmutil.revsingle(repo, b)
2895 pa = pvec.ctxpvec(ca)
2895 pa = pvec.ctxpvec(ca)
2896 pb = pvec.ctxpvec(cb)
2896 pb = pvec.ctxpvec(cb)
2897 if pa == pb:
2897 if pa == pb:
2898 rel = b"="
2898 rel = b"="
2899 elif pa > pb:
2899 elif pa > pb:
2900 rel = b">"
2900 rel = b">"
2901 elif pa < pb:
2901 elif pa < pb:
2902 rel = b"<"
2902 rel = b"<"
2903 elif pa | pb:
2903 elif pa | pb:
2904 rel = b"|"
2904 rel = b"|"
2905 ui.write(_(b"a: %s\n") % pa)
2905 ui.write(_(b"a: %s\n") % pa)
2906 ui.write(_(b"b: %s\n") % pb)
2906 ui.write(_(b"b: %s\n") % pb)
2907 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2907 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2908 ui.write(
2908 ui.write(
2909 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2909 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2910 % (
2910 % (
2911 abs(pa._depth - pb._depth),
2911 abs(pa._depth - pb._depth),
2912 pvec._hamming(pa._vec, pb._vec),
2912 pvec._hamming(pa._vec, pb._vec),
2913 pa.distance(pb),
2913 pa.distance(pb),
2914 rel,
2914 rel,
2915 )
2915 )
2916 )
2916 )
2917
2917
2918
2918
2919 @command(
2919 @command(
2920 b'debugrebuilddirstate|debugrebuildstate',
2920 b'debugrebuilddirstate|debugrebuildstate',
2921 [
2921 [
2922 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2922 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2923 (
2923 (
2924 b'',
2924 b'',
2925 b'minimal',
2925 b'minimal',
2926 None,
2926 None,
2927 _(
2927 _(
2928 b'only rebuild files that are inconsistent with '
2928 b'only rebuild files that are inconsistent with '
2929 b'the working copy parent'
2929 b'the working copy parent'
2930 ),
2930 ),
2931 ),
2931 ),
2932 ],
2932 ],
2933 _(b'[-r REV]'),
2933 _(b'[-r REV]'),
2934 )
2934 )
2935 def debugrebuilddirstate(ui, repo, rev, **opts):
2935 def debugrebuilddirstate(ui, repo, rev, **opts):
2936 """rebuild the dirstate as it would look like for the given revision
2936 """rebuild the dirstate as it would look like for the given revision
2937
2937
2938 If no revision is specified the first current parent will be used.
2938 If no revision is specified the first current parent will be used.
2939
2939
2940 The dirstate will be set to the files of the given revision.
2940 The dirstate will be set to the files of the given revision.
2941 The actual working directory content or existing dirstate
2941 The actual working directory content or existing dirstate
2942 information such as adds or removes is not considered.
2942 information such as adds or removes is not considered.
2943
2943
2944 ``minimal`` will only rebuild the dirstate status for files that claim to be
2944 ``minimal`` will only rebuild the dirstate status for files that claim to be
2945 tracked but are not in the parent manifest, or that exist in the parent
2945 tracked but are not in the parent manifest, or that exist in the parent
2946 manifest but are not in the dirstate. It will not change adds, removes, or
2946 manifest but are not in the dirstate. It will not change adds, removes, or
2947 modified files that are in the working copy parent.
2947 modified files that are in the working copy parent.
2948
2948
2949 One use of this command is to make the next :hg:`status` invocation
2949 One use of this command is to make the next :hg:`status` invocation
2950 check the actual file content.
2950 check the actual file content.
2951 """
2951 """
2952 ctx = scmutil.revsingle(repo, rev)
2952 ctx = scmutil.revsingle(repo, rev)
2953 with repo.wlock():
2953 with repo.wlock():
2954 dirstate = repo.dirstate
2954 dirstate = repo.dirstate
2955 changedfiles = None
2955 changedfiles = None
2956 # See command doc for what minimal does.
2956 # See command doc for what minimal does.
2957 if opts.get('minimal'):
2957 if opts.get('minimal'):
2958 manifestfiles = set(ctx.manifest().keys())
2958 manifestfiles = set(ctx.manifest().keys())
2959 dirstatefiles = set(dirstate)
2959 dirstatefiles = set(dirstate)
2960 manifestonly = manifestfiles - dirstatefiles
2960 manifestonly = manifestfiles - dirstatefiles
2961 dsonly = dirstatefiles - manifestfiles
2961 dsonly = dirstatefiles - manifestfiles
2962 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2962 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2963 changedfiles = manifestonly | dsnotadded
2963 changedfiles = manifestonly | dsnotadded
2964
2964
2965 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2965 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2966
2966
2967
2967
2968 @command(
2968 @command(
2969 b'debugrebuildfncache',
2969 b'debugrebuildfncache',
2970 [
2970 [
2971 (
2971 (
2972 b'',
2972 b'',
2973 b'only-data',
2973 b'only-data',
2974 False,
2974 False,
2975 _(b'only look for wrong .d files (much faster)'),
2975 _(b'only look for wrong .d files (much faster)'),
2976 )
2976 )
2977 ],
2977 ],
2978 b'',
2978 b'',
2979 )
2979 )
2980 def debugrebuildfncache(ui, repo, **opts):
2980 def debugrebuildfncache(ui, repo, **opts):
2981 """rebuild the fncache file"""
2981 """rebuild the fncache file"""
2982 opts = pycompat.byteskwargs(opts)
2982 opts = pycompat.byteskwargs(opts)
2983 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2983 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2984
2984
2985
2985
2986 @command(
2986 @command(
2987 b'debugrename',
2987 b'debugrename',
2988 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2988 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2989 _(b'[-r REV] [FILE]...'),
2989 _(b'[-r REV] [FILE]...'),
2990 )
2990 )
2991 def debugrename(ui, repo, *pats, **opts):
2991 def debugrename(ui, repo, *pats, **opts):
2992 """dump rename information"""
2992 """dump rename information"""
2993
2993
2994 opts = pycompat.byteskwargs(opts)
2994 opts = pycompat.byteskwargs(opts)
2995 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2995 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2996 m = scmutil.match(ctx, pats, opts)
2996 m = scmutil.match(ctx, pats, opts)
2997 for abs in ctx.walk(m):
2997 for abs in ctx.walk(m):
2998 fctx = ctx[abs]
2998 fctx = ctx[abs]
2999 o = fctx.filelog().renamed(fctx.filenode())
2999 o = fctx.filelog().renamed(fctx.filenode())
3000 rel = repo.pathto(abs)
3000 rel = repo.pathto(abs)
3001 if o:
3001 if o:
3002 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3002 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3003 else:
3003 else:
3004 ui.write(_(b"%s not renamed\n") % rel)
3004 ui.write(_(b"%s not renamed\n") % rel)
3005
3005
3006
3006
3007 @command(b'debugrequires|debugrequirements', [], b'')
3007 @command(b'debugrequires|debugrequirements', [], b'')
3008 def debugrequirements(ui, repo):
3008 def debugrequirements(ui, repo):
3009 """print the current repo requirements"""
3009 """print the current repo requirements"""
3010 for r in sorted(repo.requirements):
3010 for r in sorted(repo.requirements):
3011 ui.write(b"%s\n" % r)
3011 ui.write(b"%s\n" % r)
3012
3012
3013
3013
3014 @command(
3014 @command(
3015 b'debugrevlog',
3015 b'debugrevlog',
3016 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3016 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3017 _(b'-c|-m|FILE'),
3017 _(b'-c|-m|FILE'),
3018 optionalrepo=True,
3018 optionalrepo=True,
3019 )
3019 )
3020 def debugrevlog(ui, repo, file_=None, **opts):
3020 def debugrevlog(ui, repo, file_=None, **opts):
3021 """show data and statistics about a revlog"""
3021 """show data and statistics about a revlog"""
3022 opts = pycompat.byteskwargs(opts)
3022 opts = pycompat.byteskwargs(opts)
3023 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3023 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3024
3024
3025 if opts.get(b"dump"):
3025 if opts.get(b"dump"):
3026 numrevs = len(r)
3026 numrevs = len(r)
3027 ui.write(
3027 ui.write(
3028 (
3028 (
3029 b"# rev p1rev p2rev start end deltastart base p1 p2"
3029 b"# rev p1rev p2rev start end deltastart base p1 p2"
3030 b" rawsize totalsize compression heads chainlen\n"
3030 b" rawsize totalsize compression heads chainlen\n"
3031 )
3031 )
3032 )
3032 )
3033 ts = 0
3033 ts = 0
3034 heads = set()
3034 heads = set()
3035
3035
3036 for rev in pycompat.xrange(numrevs):
3036 for rev in pycompat.xrange(numrevs):
3037 dbase = r.deltaparent(rev)
3037 dbase = r.deltaparent(rev)
3038 if dbase == -1:
3038 if dbase == -1:
3039 dbase = rev
3039 dbase = rev
3040 cbase = r.chainbase(rev)
3040 cbase = r.chainbase(rev)
3041 clen = r.chainlen(rev)
3041 clen = r.chainlen(rev)
3042 p1, p2 = r.parentrevs(rev)
3042 p1, p2 = r.parentrevs(rev)
3043 rs = r.rawsize(rev)
3043 rs = r.rawsize(rev)
3044 ts = ts + rs
3044 ts = ts + rs
3045 heads -= set(r.parentrevs(rev))
3045 heads -= set(r.parentrevs(rev))
3046 heads.add(rev)
3046 heads.add(rev)
3047 try:
3047 try:
3048 compression = ts / r.end(rev)
3048 compression = ts / r.end(rev)
3049 except ZeroDivisionError:
3049 except ZeroDivisionError:
3050 compression = 0
3050 compression = 0
3051 ui.write(
3051 ui.write(
3052 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3052 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3053 b"%11d %5d %8d\n"
3053 b"%11d %5d %8d\n"
3054 % (
3054 % (
3055 rev,
3055 rev,
3056 p1,
3056 p1,
3057 p2,
3057 p2,
3058 r.start(rev),
3058 r.start(rev),
3059 r.end(rev),
3059 r.end(rev),
3060 r.start(dbase),
3060 r.start(dbase),
3061 r.start(cbase),
3061 r.start(cbase),
3062 r.start(p1),
3062 r.start(p1),
3063 r.start(p2),
3063 r.start(p2),
3064 rs,
3064 rs,
3065 ts,
3065 ts,
3066 compression,
3066 compression,
3067 len(heads),
3067 len(heads),
3068 clen,
3068 clen,
3069 )
3069 )
3070 )
3070 )
3071 return 0
3071 return 0
3072
3072
3073 format = r._format_version
3073 format = r._format_version
3074 v = r._format_flags
3074 v = r._format_flags
3075 flags = []
3075 flags = []
3076 gdelta = False
3076 gdelta = False
3077 if v & revlog.FLAG_INLINE_DATA:
3077 if v & revlog.FLAG_INLINE_DATA:
3078 flags.append(b'inline')
3078 flags.append(b'inline')
3079 if v & revlog.FLAG_GENERALDELTA:
3079 if v & revlog.FLAG_GENERALDELTA:
3080 gdelta = True
3080 gdelta = True
3081 flags.append(b'generaldelta')
3081 flags.append(b'generaldelta')
3082 if not flags:
3082 if not flags:
3083 flags = [b'(none)']
3083 flags = [b'(none)']
3084
3084
3085 ### tracks merge vs single parent
3085 ### tracks merge vs single parent
3086 nummerges = 0
3086 nummerges = 0
3087
3087
3088 ### tracks ways the "delta" are build
3088 ### tracks ways the "delta" are build
3089 # nodelta
3089 # nodelta
3090 numempty = 0
3090 numempty = 0
3091 numemptytext = 0
3091 numemptytext = 0
3092 numemptydelta = 0
3092 numemptydelta = 0
3093 # full file content
3093 # full file content
3094 numfull = 0
3094 numfull = 0
3095 # intermediate snapshot against a prior snapshot
3095 # intermediate snapshot against a prior snapshot
3096 numsemi = 0
3096 numsemi = 0
3097 # snapshot count per depth
3097 # snapshot count per depth
3098 numsnapdepth = collections.defaultdict(lambda: 0)
3098 numsnapdepth = collections.defaultdict(lambda: 0)
3099 # delta against previous revision
3099 # delta against previous revision
3100 numprev = 0
3100 numprev = 0
3101 # delta against first or second parent (not prev)
3101 # delta against first or second parent (not prev)
3102 nump1 = 0
3102 nump1 = 0
3103 nump2 = 0
3103 nump2 = 0
3104 # delta against neither prev nor parents
3104 # delta against neither prev nor parents
3105 numother = 0
3105 numother = 0
3106 # delta against prev that are also first or second parent
3106 # delta against prev that are also first or second parent
3107 # (details of `numprev`)
3107 # (details of `numprev`)
3108 nump1prev = 0
3108 nump1prev = 0
3109 nump2prev = 0
3109 nump2prev = 0
3110
3110
3111 # data about delta chain of each revs
3111 # data about delta chain of each revs
3112 chainlengths = []
3112 chainlengths = []
3113 chainbases = []
3113 chainbases = []
3114 chainspans = []
3114 chainspans = []
3115
3115
3116 # data about each revision
3116 # data about each revision
3117 datasize = [None, 0, 0]
3117 datasize = [None, 0, 0]
3118 fullsize = [None, 0, 0]
3118 fullsize = [None, 0, 0]
3119 semisize = [None, 0, 0]
3119 semisize = [None, 0, 0]
3120 # snapshot count per depth
3120 # snapshot count per depth
3121 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3121 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3122 deltasize = [None, 0, 0]
3122 deltasize = [None, 0, 0]
3123 chunktypecounts = {}
3123 chunktypecounts = {}
3124 chunktypesizes = {}
3124 chunktypesizes = {}
3125
3125
3126 def addsize(size, l):
3126 def addsize(size, l):
3127 if l[0] is None or size < l[0]:
3127 if l[0] is None or size < l[0]:
3128 l[0] = size
3128 l[0] = size
3129 if size > l[1]:
3129 if size > l[1]:
3130 l[1] = size
3130 l[1] = size
3131 l[2] += size
3131 l[2] += size
3132
3132
3133 numrevs = len(r)
3133 numrevs = len(r)
3134 for rev in pycompat.xrange(numrevs):
3134 for rev in pycompat.xrange(numrevs):
3135 p1, p2 = r.parentrevs(rev)
3135 p1, p2 = r.parentrevs(rev)
3136 delta = r.deltaparent(rev)
3136 delta = r.deltaparent(rev)
3137 if format > 0:
3137 if format > 0:
3138 addsize(r.rawsize(rev), datasize)
3138 addsize(r.rawsize(rev), datasize)
3139 if p2 != nullrev:
3139 if p2 != nullrev:
3140 nummerges += 1
3140 nummerges += 1
3141 size = r.length(rev)
3141 size = r.length(rev)
3142 if delta == nullrev:
3142 if delta == nullrev:
3143 chainlengths.append(0)
3143 chainlengths.append(0)
3144 chainbases.append(r.start(rev))
3144 chainbases.append(r.start(rev))
3145 chainspans.append(size)
3145 chainspans.append(size)
3146 if size == 0:
3146 if size == 0:
3147 numempty += 1
3147 numempty += 1
3148 numemptytext += 1
3148 numemptytext += 1
3149 else:
3149 else:
3150 numfull += 1
3150 numfull += 1
3151 numsnapdepth[0] += 1
3151 numsnapdepth[0] += 1
3152 addsize(size, fullsize)
3152 addsize(size, fullsize)
3153 addsize(size, snapsizedepth[0])
3153 addsize(size, snapsizedepth[0])
3154 else:
3154 else:
3155 chainlengths.append(chainlengths[delta] + 1)
3155 chainlengths.append(chainlengths[delta] + 1)
3156 baseaddr = chainbases[delta]
3156 baseaddr = chainbases[delta]
3157 revaddr = r.start(rev)
3157 revaddr = r.start(rev)
3158 chainbases.append(baseaddr)
3158 chainbases.append(baseaddr)
3159 chainspans.append((revaddr - baseaddr) + size)
3159 chainspans.append((revaddr - baseaddr) + size)
3160 if size == 0:
3160 if size == 0:
3161 numempty += 1
3161 numempty += 1
3162 numemptydelta += 1
3162 numemptydelta += 1
3163 elif r.issnapshot(rev):
3163 elif r.issnapshot(rev):
3164 addsize(size, semisize)
3164 addsize(size, semisize)
3165 numsemi += 1
3165 numsemi += 1
3166 depth = r.snapshotdepth(rev)
3166 depth = r.snapshotdepth(rev)
3167 numsnapdepth[depth] += 1
3167 numsnapdepth[depth] += 1
3168 addsize(size, snapsizedepth[depth])
3168 addsize(size, snapsizedepth[depth])
3169 else:
3169 else:
3170 addsize(size, deltasize)
3170 addsize(size, deltasize)
3171 if delta == rev - 1:
3171 if delta == rev - 1:
3172 numprev += 1
3172 numprev += 1
3173 if delta == p1:
3173 if delta == p1:
3174 nump1prev += 1
3174 nump1prev += 1
3175 elif delta == p2:
3175 elif delta == p2:
3176 nump2prev += 1
3176 nump2prev += 1
3177 elif delta == p1:
3177 elif delta == p1:
3178 nump1 += 1
3178 nump1 += 1
3179 elif delta == p2:
3179 elif delta == p2:
3180 nump2 += 1
3180 nump2 += 1
3181 elif delta != nullrev:
3181 elif delta != nullrev:
3182 numother += 1
3182 numother += 1
3183
3183
3184 # Obtain data on the raw chunks in the revlog.
3184 # Obtain data on the raw chunks in the revlog.
3185 if util.safehasattr(r, b'_getsegmentforrevs'):
3185 if util.safehasattr(r, b'_getsegmentforrevs'):
3186 segment = r._getsegmentforrevs(rev, rev)[1]
3186 segment = r._getsegmentforrevs(rev, rev)[1]
3187 else:
3187 else:
3188 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3188 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3189 if segment:
3189 if segment:
3190 chunktype = bytes(segment[0:1])
3190 chunktype = bytes(segment[0:1])
3191 else:
3191 else:
3192 chunktype = b'empty'
3192 chunktype = b'empty'
3193
3193
3194 if chunktype not in chunktypecounts:
3194 if chunktype not in chunktypecounts:
3195 chunktypecounts[chunktype] = 0
3195 chunktypecounts[chunktype] = 0
3196 chunktypesizes[chunktype] = 0
3196 chunktypesizes[chunktype] = 0
3197
3197
3198 chunktypecounts[chunktype] += 1
3198 chunktypecounts[chunktype] += 1
3199 chunktypesizes[chunktype] += size
3199 chunktypesizes[chunktype] += size
3200
3200
3201 # Adjust size min value for empty cases
3201 # Adjust size min value for empty cases
3202 for size in (datasize, fullsize, semisize, deltasize):
3202 for size in (datasize, fullsize, semisize, deltasize):
3203 if size[0] is None:
3203 if size[0] is None:
3204 size[0] = 0
3204 size[0] = 0
3205
3205
3206 numdeltas = numrevs - numfull - numempty - numsemi
3206 numdeltas = numrevs - numfull - numempty - numsemi
3207 numoprev = numprev - nump1prev - nump2prev
3207 numoprev = numprev - nump1prev - nump2prev
3208 totalrawsize = datasize[2]
3208 totalrawsize = datasize[2]
3209 datasize[2] /= numrevs
3209 datasize[2] /= numrevs
3210 fulltotal = fullsize[2]
3210 fulltotal = fullsize[2]
3211 if numfull == 0:
3211 if numfull == 0:
3212 fullsize[2] = 0
3212 fullsize[2] = 0
3213 else:
3213 else:
3214 fullsize[2] /= numfull
3214 fullsize[2] /= numfull
3215 semitotal = semisize[2]
3215 semitotal = semisize[2]
3216 snaptotal = {}
3216 snaptotal = {}
3217 if numsemi > 0:
3217 if numsemi > 0:
3218 semisize[2] /= numsemi
3218 semisize[2] /= numsemi
3219 for depth in snapsizedepth:
3219 for depth in snapsizedepth:
3220 snaptotal[depth] = snapsizedepth[depth][2]
3220 snaptotal[depth] = snapsizedepth[depth][2]
3221 snapsizedepth[depth][2] /= numsnapdepth[depth]
3221 snapsizedepth[depth][2] /= numsnapdepth[depth]
3222
3222
3223 deltatotal = deltasize[2]
3223 deltatotal = deltasize[2]
3224 if numdeltas > 0:
3224 if numdeltas > 0:
3225 deltasize[2] /= numdeltas
3225 deltasize[2] /= numdeltas
3226 totalsize = fulltotal + semitotal + deltatotal
3226 totalsize = fulltotal + semitotal + deltatotal
3227 avgchainlen = sum(chainlengths) / numrevs
3227 avgchainlen = sum(chainlengths) / numrevs
3228 maxchainlen = max(chainlengths)
3228 maxchainlen = max(chainlengths)
3229 maxchainspan = max(chainspans)
3229 maxchainspan = max(chainspans)
3230 compratio = 1
3230 compratio = 1
3231 if totalsize:
3231 if totalsize:
3232 compratio = totalrawsize / totalsize
3232 compratio = totalrawsize / totalsize
3233
3233
3234 basedfmtstr = b'%%%dd\n'
3234 basedfmtstr = b'%%%dd\n'
3235 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3235 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3236
3236
3237 def dfmtstr(max):
3237 def dfmtstr(max):
3238 return basedfmtstr % len(str(max))
3238 return basedfmtstr % len(str(max))
3239
3239
3240 def pcfmtstr(max, padding=0):
3240 def pcfmtstr(max, padding=0):
3241 return basepcfmtstr % (len(str(max)), b' ' * padding)
3241 return basepcfmtstr % (len(str(max)), b' ' * padding)
3242
3242
3243 def pcfmt(value, total):
3243 def pcfmt(value, total):
3244 if total:
3244 if total:
3245 return (value, 100 * float(value) / total)
3245 return (value, 100 * float(value) / total)
3246 else:
3246 else:
3247 return value, 100.0
3247 return value, 100.0
3248
3248
3249 ui.writenoi18n(b'format : %d\n' % format)
3249 ui.writenoi18n(b'format : %d\n' % format)
3250 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3250 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3251
3251
3252 ui.write(b'\n')
3252 ui.write(b'\n')
3253 fmt = pcfmtstr(totalsize)
3253 fmt = pcfmtstr(totalsize)
3254 fmt2 = dfmtstr(totalsize)
3254 fmt2 = dfmtstr(totalsize)
3255 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3255 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3256 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3256 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3257 ui.writenoi18n(
3257 ui.writenoi18n(
3258 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3258 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3259 )
3259 )
3260 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3260 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3261 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3261 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3262 ui.writenoi18n(
3262 ui.writenoi18n(
3263 b' text : '
3263 b' text : '
3264 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3264 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3265 )
3265 )
3266 ui.writenoi18n(
3266 ui.writenoi18n(
3267 b' delta : '
3267 b' delta : '
3268 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3268 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3269 )
3269 )
3270 ui.writenoi18n(
3270 ui.writenoi18n(
3271 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3271 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3272 )
3272 )
3273 for depth in sorted(numsnapdepth):
3273 for depth in sorted(numsnapdepth):
3274 ui.write(
3274 ui.write(
3275 (b' lvl-%-3d : ' % depth)
3275 (b' lvl-%-3d : ' % depth)
3276 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3276 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3277 )
3277 )
3278 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3278 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3279 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3279 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3280 ui.writenoi18n(
3280 ui.writenoi18n(
3281 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3281 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3282 )
3282 )
3283 for depth in sorted(numsnapdepth):
3283 for depth in sorted(numsnapdepth):
3284 ui.write(
3284 ui.write(
3285 (b' lvl-%-3d : ' % depth)
3285 (b' lvl-%-3d : ' % depth)
3286 + fmt % pcfmt(snaptotal[depth], totalsize)
3286 + fmt % pcfmt(snaptotal[depth], totalsize)
3287 )
3287 )
3288 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3288 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3289
3289
3290 def fmtchunktype(chunktype):
3290 def fmtchunktype(chunktype):
3291 if chunktype == b'empty':
3291 if chunktype == b'empty':
3292 return b' %s : ' % chunktype
3292 return b' %s : ' % chunktype
3293 elif chunktype in pycompat.bytestr(string.ascii_letters):
3293 elif chunktype in pycompat.bytestr(string.ascii_letters):
3294 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3294 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3295 else:
3295 else:
3296 return b' 0x%s : ' % hex(chunktype)
3296 return b' 0x%s : ' % hex(chunktype)
3297
3297
3298 ui.write(b'\n')
3298 ui.write(b'\n')
3299 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3299 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3300 for chunktype in sorted(chunktypecounts):
3300 for chunktype in sorted(chunktypecounts):
3301 ui.write(fmtchunktype(chunktype))
3301 ui.write(fmtchunktype(chunktype))
3302 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3302 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3303 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3303 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3304 for chunktype in sorted(chunktypecounts):
3304 for chunktype in sorted(chunktypecounts):
3305 ui.write(fmtchunktype(chunktype))
3305 ui.write(fmtchunktype(chunktype))
3306 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3306 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3307
3307
3308 ui.write(b'\n')
3308 ui.write(b'\n')
3309 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3309 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3310 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3310 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3311 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3311 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3312 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3312 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3313 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3313 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3314
3314
3315 if format > 0:
3315 if format > 0:
3316 ui.write(b'\n')
3316 ui.write(b'\n')
3317 ui.writenoi18n(
3317 ui.writenoi18n(
3318 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3318 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3319 % tuple(datasize)
3319 % tuple(datasize)
3320 )
3320 )
3321 ui.writenoi18n(
3321 ui.writenoi18n(
3322 b'full revision size (min/max/avg) : %d / %d / %d\n'
3322 b'full revision size (min/max/avg) : %d / %d / %d\n'
3323 % tuple(fullsize)
3323 % tuple(fullsize)
3324 )
3324 )
3325 ui.writenoi18n(
3325 ui.writenoi18n(
3326 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3326 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3327 % tuple(semisize)
3327 % tuple(semisize)
3328 )
3328 )
3329 for depth in sorted(snapsizedepth):
3329 for depth in sorted(snapsizedepth):
3330 if depth == 0:
3330 if depth == 0:
3331 continue
3331 continue
3332 ui.writenoi18n(
3332 ui.writenoi18n(
3333 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3333 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3334 % ((depth,) + tuple(snapsizedepth[depth]))
3334 % ((depth,) + tuple(snapsizedepth[depth]))
3335 )
3335 )
3336 ui.writenoi18n(
3336 ui.writenoi18n(
3337 b'delta size (min/max/avg) : %d / %d / %d\n'
3337 b'delta size (min/max/avg) : %d / %d / %d\n'
3338 % tuple(deltasize)
3338 % tuple(deltasize)
3339 )
3339 )
3340
3340
3341 if numdeltas > 0:
3341 if numdeltas > 0:
3342 ui.write(b'\n')
3342 ui.write(b'\n')
3343 fmt = pcfmtstr(numdeltas)
3343 fmt = pcfmtstr(numdeltas)
3344 fmt2 = pcfmtstr(numdeltas, 4)
3344 fmt2 = pcfmtstr(numdeltas, 4)
3345 ui.writenoi18n(
3345 ui.writenoi18n(
3346 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3346 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3347 )
3347 )
3348 if numprev > 0:
3348 if numprev > 0:
3349 ui.writenoi18n(
3349 ui.writenoi18n(
3350 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3350 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3351 )
3351 )
3352 ui.writenoi18n(
3352 ui.writenoi18n(
3353 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3353 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3354 )
3354 )
3355 ui.writenoi18n(
3355 ui.writenoi18n(
3356 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3356 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3357 )
3357 )
3358 if gdelta:
3358 if gdelta:
3359 ui.writenoi18n(
3359 ui.writenoi18n(
3360 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3360 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3361 )
3361 )
3362 ui.writenoi18n(
3362 ui.writenoi18n(
3363 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3363 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3364 )
3364 )
3365 ui.writenoi18n(
3365 ui.writenoi18n(
3366 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3366 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3367 )
3367 )
3368
3368
3369
3369
3370 @command(
3370 @command(
3371 b'debugrevlogindex',
3371 b'debugrevlogindex',
3372 cmdutil.debugrevlogopts
3372 cmdutil.debugrevlogopts
3373 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3373 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3374 _(b'[-f FORMAT] -c|-m|FILE'),
3374 _(b'[-f FORMAT] -c|-m|FILE'),
3375 optionalrepo=True,
3375 optionalrepo=True,
3376 )
3376 )
3377 def debugrevlogindex(ui, repo, file_=None, **opts):
3377 def debugrevlogindex(ui, repo, file_=None, **opts):
3378 """dump the contents of a revlog index"""
3378 """dump the contents of a revlog index"""
3379 opts = pycompat.byteskwargs(opts)
3379 opts = pycompat.byteskwargs(opts)
3380 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3380 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3381 format = opts.get(b'format', 0)
3381 format = opts.get(b'format', 0)
3382 if format not in (0, 1):
3382 if format not in (0, 1):
3383 raise error.Abort(_(b"unknown format %d") % format)
3383 raise error.Abort(_(b"unknown format %d") % format)
3384
3384
3385 if ui.debugflag:
3385 if ui.debugflag:
3386 shortfn = hex
3386 shortfn = hex
3387 else:
3387 else:
3388 shortfn = short
3388 shortfn = short
3389
3389
3390 # There might not be anything in r, so have a sane default
3390 # There might not be anything in r, so have a sane default
3391 idlen = 12
3391 idlen = 12
3392 for i in r:
3392 for i in r:
3393 idlen = len(shortfn(r.node(i)))
3393 idlen = len(shortfn(r.node(i)))
3394 break
3394 break
3395
3395
3396 if format == 0:
3396 if format == 0:
3397 if ui.verbose:
3397 if ui.verbose:
3398 ui.writenoi18n(
3398 ui.writenoi18n(
3399 b" rev offset length linkrev %s %s p2\n"
3399 b" rev offset length linkrev %s %s p2\n"
3400 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3400 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3401 )
3401 )
3402 else:
3402 else:
3403 ui.writenoi18n(
3403 ui.writenoi18n(
3404 b" rev linkrev %s %s p2\n"
3404 b" rev linkrev %s %s p2\n"
3405 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3405 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3406 )
3406 )
3407 elif format == 1:
3407 elif format == 1:
3408 if ui.verbose:
3408 if ui.verbose:
3409 ui.writenoi18n(
3409 ui.writenoi18n(
3410 (
3410 (
3411 b" rev flag offset length size link p1"
3411 b" rev flag offset length size link p1"
3412 b" p2 %s\n"
3412 b" p2 %s\n"
3413 )
3413 )
3414 % b"nodeid".rjust(idlen)
3414 % b"nodeid".rjust(idlen)
3415 )
3415 )
3416 else:
3416 else:
3417 ui.writenoi18n(
3417 ui.writenoi18n(
3418 b" rev flag size link p1 p2 %s\n"
3418 b" rev flag size link p1 p2 %s\n"
3419 % b"nodeid".rjust(idlen)
3419 % b"nodeid".rjust(idlen)
3420 )
3420 )
3421
3421
3422 for i in r:
3422 for i in r:
3423 node = r.node(i)
3423 node = r.node(i)
3424 if format == 0:
3424 if format == 0:
3425 try:
3425 try:
3426 pp = r.parents(node)
3426 pp = r.parents(node)
3427 except Exception:
3427 except Exception:
3428 pp = [repo.nullid, repo.nullid]
3428 pp = [repo.nullid, repo.nullid]
3429 if ui.verbose:
3429 if ui.verbose:
3430 ui.write(
3430 ui.write(
3431 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3431 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3432 % (
3432 % (
3433 i,
3433 i,
3434 r.start(i),
3434 r.start(i),
3435 r.length(i),
3435 r.length(i),
3436 r.linkrev(i),
3436 r.linkrev(i),
3437 shortfn(node),
3437 shortfn(node),
3438 shortfn(pp[0]),
3438 shortfn(pp[0]),
3439 shortfn(pp[1]),
3439 shortfn(pp[1]),
3440 )
3440 )
3441 )
3441 )
3442 else:
3442 else:
3443 ui.write(
3443 ui.write(
3444 b"% 6d % 7d %s %s %s\n"
3444 b"% 6d % 7d %s %s %s\n"
3445 % (
3445 % (
3446 i,
3446 i,
3447 r.linkrev(i),
3447 r.linkrev(i),
3448 shortfn(node),
3448 shortfn(node),
3449 shortfn(pp[0]),
3449 shortfn(pp[0]),
3450 shortfn(pp[1]),
3450 shortfn(pp[1]),
3451 )
3451 )
3452 )
3452 )
3453 elif format == 1:
3453 elif format == 1:
3454 pr = r.parentrevs(i)
3454 pr = r.parentrevs(i)
3455 if ui.verbose:
3455 if ui.verbose:
3456 ui.write(
3456 ui.write(
3457 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3457 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3458 % (
3458 % (
3459 i,
3459 i,
3460 r.flags(i),
3460 r.flags(i),
3461 r.start(i),
3461 r.start(i),
3462 r.length(i),
3462 r.length(i),
3463 r.rawsize(i),
3463 r.rawsize(i),
3464 r.linkrev(i),
3464 r.linkrev(i),
3465 pr[0],
3465 pr[0],
3466 pr[1],
3466 pr[1],
3467 shortfn(node),
3467 shortfn(node),
3468 )
3468 )
3469 )
3469 )
3470 else:
3470 else:
3471 ui.write(
3471 ui.write(
3472 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3472 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3473 % (
3473 % (
3474 i,
3474 i,
3475 r.flags(i),
3475 r.flags(i),
3476 r.rawsize(i),
3476 r.rawsize(i),
3477 r.linkrev(i),
3477 r.linkrev(i),
3478 pr[0],
3478 pr[0],
3479 pr[1],
3479 pr[1],
3480 shortfn(node),
3480 shortfn(node),
3481 )
3481 )
3482 )
3482 )
3483
3483
3484
3484
3485 @command(
3485 @command(
3486 b'debugrevspec',
3486 b'debugrevspec',
3487 [
3487 [
3488 (
3488 (
3489 b'',
3489 b'',
3490 b'optimize',
3490 b'optimize',
3491 None,
3491 None,
3492 _(b'print parsed tree after optimizing (DEPRECATED)'),
3492 _(b'print parsed tree after optimizing (DEPRECATED)'),
3493 ),
3493 ),
3494 (
3494 (
3495 b'',
3495 b'',
3496 b'show-revs',
3496 b'show-revs',
3497 True,
3497 True,
3498 _(b'print list of result revisions (default)'),
3498 _(b'print list of result revisions (default)'),
3499 ),
3499 ),
3500 (
3500 (
3501 b's',
3501 b's',
3502 b'show-set',
3502 b'show-set',
3503 None,
3503 None,
3504 _(b'print internal representation of result set'),
3504 _(b'print internal representation of result set'),
3505 ),
3505 ),
3506 (
3506 (
3507 b'p',
3507 b'p',
3508 b'show-stage',
3508 b'show-stage',
3509 [],
3509 [],
3510 _(b'print parsed tree at the given stage'),
3510 _(b'print parsed tree at the given stage'),
3511 _(b'NAME'),
3511 _(b'NAME'),
3512 ),
3512 ),
3513 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3513 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3514 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3514 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3515 ],
3515 ],
3516 b'REVSPEC',
3516 b'REVSPEC',
3517 )
3517 )
3518 def debugrevspec(ui, repo, expr, **opts):
3518 def debugrevspec(ui, repo, expr, **opts):
3519 """parse and apply a revision specification
3519 """parse and apply a revision specification
3520
3520
3521 Use -p/--show-stage option to print the parsed tree at the given stages.
3521 Use -p/--show-stage option to print the parsed tree at the given stages.
3522 Use -p all to print tree at every stage.
3522 Use -p all to print tree at every stage.
3523
3523
3524 Use --no-show-revs option with -s or -p to print only the set
3524 Use --no-show-revs option with -s or -p to print only the set
3525 representation or the parsed tree respectively.
3525 representation or the parsed tree respectively.
3526
3526
3527 Use --verify-optimized to compare the optimized result with the unoptimized
3527 Use --verify-optimized to compare the optimized result with the unoptimized
3528 one. Returns 1 if the optimized result differs.
3528 one. Returns 1 if the optimized result differs.
3529 """
3529 """
3530 opts = pycompat.byteskwargs(opts)
3530 opts = pycompat.byteskwargs(opts)
3531 aliases = ui.configitems(b'revsetalias')
3531 aliases = ui.configitems(b'revsetalias')
3532 stages = [
3532 stages = [
3533 (b'parsed', lambda tree: tree),
3533 (b'parsed', lambda tree: tree),
3534 (
3534 (
3535 b'expanded',
3535 b'expanded',
3536 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3536 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3537 ),
3537 ),
3538 (b'concatenated', revsetlang.foldconcat),
3538 (b'concatenated', revsetlang.foldconcat),
3539 (b'analyzed', revsetlang.analyze),
3539 (b'analyzed', revsetlang.analyze),
3540 (b'optimized', revsetlang.optimize),
3540 (b'optimized', revsetlang.optimize),
3541 ]
3541 ]
3542 if opts[b'no_optimized']:
3542 if opts[b'no_optimized']:
3543 stages = stages[:-1]
3543 stages = stages[:-1]
3544 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3544 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3545 raise error.Abort(
3545 raise error.Abort(
3546 _(b'cannot use --verify-optimized with --no-optimized')
3546 _(b'cannot use --verify-optimized with --no-optimized')
3547 )
3547 )
3548 stagenames = {n for n, f in stages}
3548 stagenames = {n for n, f in stages}
3549
3549
3550 showalways = set()
3550 showalways = set()
3551 showchanged = set()
3551 showchanged = set()
3552 if ui.verbose and not opts[b'show_stage']:
3552 if ui.verbose and not opts[b'show_stage']:
3553 # show parsed tree by --verbose (deprecated)
3553 # show parsed tree by --verbose (deprecated)
3554 showalways.add(b'parsed')
3554 showalways.add(b'parsed')
3555 showchanged.update([b'expanded', b'concatenated'])
3555 showchanged.update([b'expanded', b'concatenated'])
3556 if opts[b'optimize']:
3556 if opts[b'optimize']:
3557 showalways.add(b'optimized')
3557 showalways.add(b'optimized')
3558 if opts[b'show_stage'] and opts[b'optimize']:
3558 if opts[b'show_stage'] and opts[b'optimize']:
3559 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3559 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3560 if opts[b'show_stage'] == [b'all']:
3560 if opts[b'show_stage'] == [b'all']:
3561 showalways.update(stagenames)
3561 showalways.update(stagenames)
3562 else:
3562 else:
3563 for n in opts[b'show_stage']:
3563 for n in opts[b'show_stage']:
3564 if n not in stagenames:
3564 if n not in stagenames:
3565 raise error.Abort(_(b'invalid stage name: %s') % n)
3565 raise error.Abort(_(b'invalid stage name: %s') % n)
3566 showalways.update(opts[b'show_stage'])
3566 showalways.update(opts[b'show_stage'])
3567
3567
3568 treebystage = {}
3568 treebystage = {}
3569 printedtree = None
3569 printedtree = None
3570 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3570 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3571 for n, f in stages:
3571 for n, f in stages:
3572 treebystage[n] = tree = f(tree)
3572 treebystage[n] = tree = f(tree)
3573 if n in showalways or (n in showchanged and tree != printedtree):
3573 if n in showalways or (n in showchanged and tree != printedtree):
3574 if opts[b'show_stage'] or n != b'parsed':
3574 if opts[b'show_stage'] or n != b'parsed':
3575 ui.write(b"* %s:\n" % n)
3575 ui.write(b"* %s:\n" % n)
3576 ui.write(revsetlang.prettyformat(tree), b"\n")
3576 ui.write(revsetlang.prettyformat(tree), b"\n")
3577 printedtree = tree
3577 printedtree = tree
3578
3578
3579 if opts[b'verify_optimized']:
3579 if opts[b'verify_optimized']:
3580 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3580 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3581 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3581 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3582 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3582 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3583 ui.writenoi18n(
3583 ui.writenoi18n(
3584 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3584 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3585 )
3585 )
3586 ui.writenoi18n(
3586 ui.writenoi18n(
3587 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3587 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3588 )
3588 )
3589 arevs = list(arevs)
3589 arevs = list(arevs)
3590 brevs = list(brevs)
3590 brevs = list(brevs)
3591 if arevs == brevs:
3591 if arevs == brevs:
3592 return 0
3592 return 0
3593 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3593 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3594 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3594 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3595 sm = difflib.SequenceMatcher(None, arevs, brevs)
3595 sm = difflib.SequenceMatcher(None, arevs, brevs)
3596 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3596 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3597 if tag in ('delete', 'replace'):
3597 if tag in ('delete', 'replace'):
3598 for c in arevs[alo:ahi]:
3598 for c in arevs[alo:ahi]:
3599 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3599 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3600 if tag in ('insert', 'replace'):
3600 if tag in ('insert', 'replace'):
3601 for c in brevs[blo:bhi]:
3601 for c in brevs[blo:bhi]:
3602 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3602 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3603 if tag == 'equal':
3603 if tag == 'equal':
3604 for c in arevs[alo:ahi]:
3604 for c in arevs[alo:ahi]:
3605 ui.write(b' %d\n' % c)
3605 ui.write(b' %d\n' % c)
3606 return 1
3606 return 1
3607
3607
3608 func = revset.makematcher(tree)
3608 func = revset.makematcher(tree)
3609 revs = func(repo)
3609 revs = func(repo)
3610 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3610 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3611 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3611 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3612 if not opts[b'show_revs']:
3612 if not opts[b'show_revs']:
3613 return
3613 return
3614 for c in revs:
3614 for c in revs:
3615 ui.write(b"%d\n" % c)
3615 ui.write(b"%d\n" % c)
3616
3616
3617
3617
3618 @command(
3618 @command(
3619 b'debugserve',
3619 b'debugserve',
3620 [
3620 [
3621 (
3621 (
3622 b'',
3622 b'',
3623 b'sshstdio',
3623 b'sshstdio',
3624 False,
3624 False,
3625 _(b'run an SSH server bound to process handles'),
3625 _(b'run an SSH server bound to process handles'),
3626 ),
3626 ),
3627 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3627 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3628 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3628 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3629 ],
3629 ],
3630 b'',
3630 b'',
3631 )
3631 )
3632 def debugserve(ui, repo, **opts):
3632 def debugserve(ui, repo, **opts):
3633 """run a server with advanced settings
3633 """run a server with advanced settings
3634
3634
3635 This command is similar to :hg:`serve`. It exists partially as a
3635 This command is similar to :hg:`serve`. It exists partially as a
3636 workaround to the fact that ``hg serve --stdio`` must have specific
3636 workaround to the fact that ``hg serve --stdio`` must have specific
3637 arguments for security reasons.
3637 arguments for security reasons.
3638 """
3638 """
3639 opts = pycompat.byteskwargs(opts)
3639 opts = pycompat.byteskwargs(opts)
3640
3640
3641 if not opts[b'sshstdio']:
3641 if not opts[b'sshstdio']:
3642 raise error.Abort(_(b'only --sshstdio is currently supported'))
3642 raise error.Abort(_(b'only --sshstdio is currently supported'))
3643
3643
3644 logfh = None
3644 logfh = None
3645
3645
3646 if opts[b'logiofd'] and opts[b'logiofile']:
3646 if opts[b'logiofd'] and opts[b'logiofile']:
3647 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3647 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3648
3648
3649 if opts[b'logiofd']:
3649 if opts[b'logiofd']:
3650 # Ideally we would be line buffered. But line buffering in binary
3650 # Ideally we would be line buffered. But line buffering in binary
3651 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3651 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3652 # buffering could have performance impacts. But since this isn't
3652 # buffering could have performance impacts. But since this isn't
3653 # performance critical code, it should be fine.
3653 # performance critical code, it should be fine.
3654 try:
3654 try:
3655 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3655 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3656 except OSError as e:
3656 except OSError as e:
3657 if e.errno != errno.ESPIPE:
3657 if e.errno != errno.ESPIPE:
3658 raise
3658 raise
3659 # can't seek a pipe, so `ab` mode fails on py3
3659 # can't seek a pipe, so `ab` mode fails on py3
3660 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3660 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3661 elif opts[b'logiofile']:
3661 elif opts[b'logiofile']:
3662 logfh = open(opts[b'logiofile'], b'ab', 0)
3662 logfh = open(opts[b'logiofile'], b'ab', 0)
3663
3663
3664 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3664 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3665 s.serve_forever()
3665 s.serve_forever()
3666
3666
3667
3667
3668 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3668 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3669 def debugsetparents(ui, repo, rev1, rev2=None):
3669 def debugsetparents(ui, repo, rev1, rev2=None):
3670 """manually set the parents of the current working directory (DANGEROUS)
3670 """manually set the parents of the current working directory (DANGEROUS)
3671
3671
3672 This command is not what you are looking for and should not be used. Using
3672 This command is not what you are looking for and should not be used. Using
3673 this command will most certainly results in slight corruption of the file
3673 this command will most certainly results in slight corruption of the file
3674 level histories withing your repository. DO NOT USE THIS COMMAND.
3674 level histories withing your repository. DO NOT USE THIS COMMAND.
3675
3675
3676 The command update the p1 and p2 field in the dirstate, and not touching
3676 The command update the p1 and p2 field in the dirstate, and not touching
3677 anything else. This useful for writing repository conversion tools, but
3677 anything else. This useful for writing repository conversion tools, but
3678 should be used with extreme care. For example, neither the working
3678 should be used with extreme care. For example, neither the working
3679 directory nor the dirstate is updated, so file status may be incorrect
3679 directory nor the dirstate is updated, so file status may be incorrect
3680 after running this command. Only used if you are one of the few people that
3680 after running this command. Only used if you are one of the few people that
3681 deeply unstand both conversion tools and file level histories. If you are
3681 deeply unstand both conversion tools and file level histories. If you are
3682 reading this help, you are not one of this people (most of them sailed west
3682 reading this help, you are not one of this people (most of them sailed west
3683 from Mithlond anyway.
3683 from Mithlond anyway.
3684
3684
3685 So one last time DO NOT USE THIS COMMAND.
3685 So one last time DO NOT USE THIS COMMAND.
3686
3686
3687 Returns 0 on success.
3687 Returns 0 on success.
3688 """
3688 """
3689
3689
3690 node1 = scmutil.revsingle(repo, rev1).node()
3690 node1 = scmutil.revsingle(repo, rev1).node()
3691 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3691 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3692
3692
3693 with repo.wlock():
3693 with repo.wlock():
3694 repo.setparents(node1, node2)
3694 repo.setparents(node1, node2)
3695
3695
3696
3696
3697 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3697 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3698 def debugsidedata(ui, repo, file_, rev=None, **opts):
3698 def debugsidedata(ui, repo, file_, rev=None, **opts):
3699 """dump the side data for a cl/manifest/file revision
3699 """dump the side data for a cl/manifest/file revision
3700
3700
3701 Use --verbose to dump the sidedata content."""
3701 Use --verbose to dump the sidedata content."""
3702 opts = pycompat.byteskwargs(opts)
3702 opts = pycompat.byteskwargs(opts)
3703 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3703 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3704 if rev is not None:
3704 if rev is not None:
3705 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3705 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3706 file_, rev = None, file_
3706 file_, rev = None, file_
3707 elif rev is None:
3707 elif rev is None:
3708 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3708 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3709 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3709 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3710 r = getattr(r, '_revlog', r)
3710 r = getattr(r, '_revlog', r)
3711 try:
3711 try:
3712 sidedata = r.sidedata(r.lookup(rev))
3712 sidedata = r.sidedata(r.lookup(rev))
3713 except KeyError:
3713 except KeyError:
3714 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3714 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3715 if sidedata:
3715 if sidedata:
3716 sidedata = list(sidedata.items())
3716 sidedata = list(sidedata.items())
3717 sidedata.sort()
3717 sidedata.sort()
3718 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3718 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3719 for key, value in sidedata:
3719 for key, value in sidedata:
3720 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3720 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3721 if ui.verbose:
3721 if ui.verbose:
3722 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3722 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3723
3723
3724
3724
3725 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3725 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3726 def debugssl(ui, repo, source=None, **opts):
3726 def debugssl(ui, repo, source=None, **opts):
3727 """test a secure connection to a server
3727 """test a secure connection to a server
3728
3728
3729 This builds the certificate chain for the server on Windows, installing the
3729 This builds the certificate chain for the server on Windows, installing the
3730 missing intermediates and trusted root via Windows Update if necessary. It
3730 missing intermediates and trusted root via Windows Update if necessary. It
3731 does nothing on other platforms.
3731 does nothing on other platforms.
3732
3732
3733 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3733 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3734 that server is used. See :hg:`help urls` for more information.
3734 that server is used. See :hg:`help urls` for more information.
3735
3735
3736 If the update succeeds, retry the original operation. Otherwise, the cause
3736 If the update succeeds, retry the original operation. Otherwise, the cause
3737 of the SSL error is likely another issue.
3737 of the SSL error is likely another issue.
3738 """
3738 """
3739 if not pycompat.iswindows:
3739 if not pycompat.iswindows:
3740 raise error.Abort(
3740 raise error.Abort(
3741 _(b'certificate chain building is only possible on Windows')
3741 _(b'certificate chain building is only possible on Windows')
3742 )
3742 )
3743
3743
3744 if not source:
3744 if not source:
3745 if not repo:
3745 if not repo:
3746 raise error.Abort(
3746 raise error.Abort(
3747 _(
3747 _(
3748 b"there is no Mercurial repository here, and no "
3748 b"there is no Mercurial repository here, and no "
3749 b"server specified"
3749 b"server specified"
3750 )
3750 )
3751 )
3751 )
3752 source = b"default"
3752 source = b"default"
3753
3753
3754 source, branches = urlutil.get_unique_pull_path(
3754 source, branches = urlutil.get_unique_pull_path(
3755 b'debugssl', repo, ui, source
3755 b'debugssl', repo, ui, source
3756 )
3756 )
3757 url = urlutil.url(source)
3757 url = urlutil.url(source)
3758
3758
3759 defaultport = {b'https': 443, b'ssh': 22}
3759 defaultport = {b'https': 443, b'ssh': 22}
3760 if url.scheme in defaultport:
3760 if url.scheme in defaultport:
3761 try:
3761 try:
3762 addr = (url.host, int(url.port or defaultport[url.scheme]))
3762 addr = (url.host, int(url.port or defaultport[url.scheme]))
3763 except ValueError:
3763 except ValueError:
3764 raise error.Abort(_(b"malformed port number in URL"))
3764 raise error.Abort(_(b"malformed port number in URL"))
3765 else:
3765 else:
3766 raise error.Abort(_(b"only https and ssh connections are supported"))
3766 raise error.Abort(_(b"only https and ssh connections are supported"))
3767
3767
3768 from . import win32
3768 from . import win32
3769
3769
3770 s = ssl.wrap_socket(
3770 s = ssl.wrap_socket(
3771 socket.socket(),
3771 socket.socket(),
3772 ssl_version=ssl.PROTOCOL_TLS,
3772 ssl_version=ssl.PROTOCOL_TLS,
3773 cert_reqs=ssl.CERT_NONE,
3773 cert_reqs=ssl.CERT_NONE,
3774 ca_certs=None,
3774 ca_certs=None,
3775 )
3775 )
3776
3776
3777 try:
3777 try:
3778 s.connect(addr)
3778 s.connect(addr)
3779 cert = s.getpeercert(True)
3779 cert = s.getpeercert(True)
3780
3780
3781 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3781 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3782
3782
3783 complete = win32.checkcertificatechain(cert, build=False)
3783 complete = win32.checkcertificatechain(cert, build=False)
3784
3784
3785 if not complete:
3785 if not complete:
3786 ui.status(_(b'certificate chain is incomplete, updating... '))
3786 ui.status(_(b'certificate chain is incomplete, updating... '))
3787
3787
3788 if not win32.checkcertificatechain(cert):
3788 if not win32.checkcertificatechain(cert):
3789 ui.status(_(b'failed.\n'))
3789 ui.status(_(b'failed.\n'))
3790 else:
3790 else:
3791 ui.status(_(b'done.\n'))
3791 ui.status(_(b'done.\n'))
3792 else:
3792 else:
3793 ui.status(_(b'full certificate chain is available\n'))
3793 ui.status(_(b'full certificate chain is available\n'))
3794 finally:
3794 finally:
3795 s.close()
3795 s.close()
3796
3796
3797
3797
3798 @command(
3798 @command(
3799 b"debugbackupbundle",
3799 b"debugbackupbundle",
3800 [
3800 [
3801 (
3801 (
3802 b"",
3802 b"",
3803 b"recover",
3803 b"recover",
3804 b"",
3804 b"",
3805 b"brings the specified changeset back into the repository",
3805 b"brings the specified changeset back into the repository",
3806 )
3806 )
3807 ]
3807 ]
3808 + cmdutil.logopts,
3808 + cmdutil.logopts,
3809 _(b"hg debugbackupbundle [--recover HASH]"),
3809 _(b"hg debugbackupbundle [--recover HASH]"),
3810 )
3810 )
3811 def debugbackupbundle(ui, repo, *pats, **opts):
3811 def debugbackupbundle(ui, repo, *pats, **opts):
3812 """lists the changesets available in backup bundles
3812 """lists the changesets available in backup bundles
3813
3813
3814 Without any arguments, this command prints a list of the changesets in each
3814 Without any arguments, this command prints a list of the changesets in each
3815 backup bundle.
3815 backup bundle.
3816
3816
3817 --recover takes a changeset hash and unbundles the first bundle that
3817 --recover takes a changeset hash and unbundles the first bundle that
3818 contains that hash, which puts that changeset back in your repository.
3818 contains that hash, which puts that changeset back in your repository.
3819
3819
3820 --verbose will print the entire commit message and the bundle path for that
3820 --verbose will print the entire commit message and the bundle path for that
3821 backup.
3821 backup.
3822 """
3822 """
3823 backups = list(
3823 backups = list(
3824 filter(
3824 filter(
3825 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3825 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3826 )
3826 )
3827 )
3827 )
3828 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3828 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3829
3829
3830 opts = pycompat.byteskwargs(opts)
3830 opts = pycompat.byteskwargs(opts)
3831 opts[b"bundle"] = b""
3831 opts[b"bundle"] = b""
3832 opts[b"force"] = None
3832 opts[b"force"] = None
3833 limit = logcmdutil.getlimit(opts)
3833 limit = logcmdutil.getlimit(opts)
3834
3834
3835 def display(other, chlist, displayer):
3835 def display(other, chlist, displayer):
3836 if opts.get(b"newest_first"):
3836 if opts.get(b"newest_first"):
3837 chlist.reverse()
3837 chlist.reverse()
3838 count = 0
3838 count = 0
3839 for n in chlist:
3839 for n in chlist:
3840 if limit is not None and count >= limit:
3840 if limit is not None and count >= limit:
3841 break
3841 break
3842 parents = [
3842 parents = [
3843 True for p in other.changelog.parents(n) if p != repo.nullid
3843 True for p in other.changelog.parents(n) if p != repo.nullid
3844 ]
3844 ]
3845 if opts.get(b"no_merges") and len(parents) == 2:
3845 if opts.get(b"no_merges") and len(parents) == 2:
3846 continue
3846 continue
3847 count += 1
3847 count += 1
3848 displayer.show(other[n])
3848 displayer.show(other[n])
3849
3849
3850 recovernode = opts.get(b"recover")
3850 recovernode = opts.get(b"recover")
3851 if recovernode:
3851 if recovernode:
3852 if scmutil.isrevsymbol(repo, recovernode):
3852 if scmutil.isrevsymbol(repo, recovernode):
3853 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3853 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3854 return
3854 return
3855 elif backups:
3855 elif backups:
3856 msg = _(
3856 msg = _(
3857 b"Recover changesets using: hg debugbackupbundle --recover "
3857 b"Recover changesets using: hg debugbackupbundle --recover "
3858 b"<changeset hash>\n\nAvailable backup changesets:"
3858 b"<changeset hash>\n\nAvailable backup changesets:"
3859 )
3859 )
3860 ui.status(msg, label=b"status.removed")
3860 ui.status(msg, label=b"status.removed")
3861 else:
3861 else:
3862 ui.status(_(b"no backup changesets found\n"))
3862 ui.status(_(b"no backup changesets found\n"))
3863 return
3863 return
3864
3864
3865 for backup in backups:
3865 for backup in backups:
3866 # Much of this is copied from the hg incoming logic
3866 # Much of this is copied from the hg incoming logic
3867 source = os.path.relpath(backup, encoding.getcwd())
3867 source = os.path.relpath(backup, encoding.getcwd())
3868 source, branches = urlutil.get_unique_pull_path(
3868 source, branches = urlutil.get_unique_pull_path(
3869 b'debugbackupbundle',
3869 b'debugbackupbundle',
3870 repo,
3870 repo,
3871 ui,
3871 ui,
3872 source,
3872 source,
3873 default_branches=opts.get(b'branch'),
3873 default_branches=opts.get(b'branch'),
3874 )
3874 )
3875 try:
3875 try:
3876 other = hg.peer(repo, opts, source)
3876 other = hg.peer(repo, opts, source)
3877 except error.LookupError as ex:
3877 except error.LookupError as ex:
3878 msg = _(b"\nwarning: unable to open bundle %s") % source
3878 msg = _(b"\nwarning: unable to open bundle %s") % source
3879 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3879 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3880 ui.warn(msg, hint=hint)
3880 ui.warn(msg, hint=hint)
3881 continue
3881 continue
3882 revs, checkout = hg.addbranchrevs(
3882 revs, checkout = hg.addbranchrevs(
3883 repo, other, branches, opts.get(b"rev")
3883 repo, other, branches, opts.get(b"rev")
3884 )
3884 )
3885
3885
3886 if revs:
3886 if revs:
3887 revs = [other.lookup(rev) for rev in revs]
3887 revs = [other.lookup(rev) for rev in revs]
3888
3888
3889 with ui.silent():
3889 with ui.silent():
3890 try:
3890 try:
3891 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3891 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3892 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3892 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3893 )
3893 )
3894 except error.LookupError:
3894 except error.LookupError:
3895 continue
3895 continue
3896
3896
3897 try:
3897 try:
3898 if not chlist:
3898 if not chlist:
3899 continue
3899 continue
3900 if recovernode:
3900 if recovernode:
3901 with repo.lock(), repo.transaction(b"unbundle") as tr:
3901 with repo.lock(), repo.transaction(b"unbundle") as tr:
3902 if scmutil.isrevsymbol(other, recovernode):
3902 if scmutil.isrevsymbol(other, recovernode):
3903 ui.status(_(b"Unbundling %s\n") % (recovernode))
3903 ui.status(_(b"Unbundling %s\n") % (recovernode))
3904 f = hg.openpath(ui, source)
3904 f = hg.openpath(ui, source)
3905 gen = exchange.readbundle(ui, f, source)
3905 gen = exchange.readbundle(ui, f, source)
3906 if isinstance(gen, bundle2.unbundle20):
3906 if isinstance(gen, bundle2.unbundle20):
3907 bundle2.applybundle(
3907 bundle2.applybundle(
3908 repo,
3908 repo,
3909 gen,
3909 gen,
3910 tr,
3910 tr,
3911 source=b"unbundle",
3911 source=b"unbundle",
3912 url=b"bundle:" + source,
3912 url=b"bundle:" + source,
3913 )
3913 )
3914 else:
3914 else:
3915 gen.apply(repo, b"unbundle", b"bundle:" + source)
3915 gen.apply(repo, b"unbundle", b"bundle:" + source)
3916 break
3916 break
3917 else:
3917 else:
3918 backupdate = encoding.strtolocal(
3918 backupdate = encoding.strtolocal(
3919 time.strftime(
3919 time.strftime(
3920 "%a %H:%M, %Y-%m-%d",
3920 "%a %H:%M, %Y-%m-%d",
3921 time.localtime(os.path.getmtime(source)),
3921 time.localtime(os.path.getmtime(source)),
3922 )
3922 )
3923 )
3923 )
3924 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3924 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3925 if ui.verbose:
3925 if ui.verbose:
3926 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3926 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3927 else:
3927 else:
3928 opts[
3928 opts[
3929 b"template"
3929 b"template"
3930 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3930 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3931 displayer = logcmdutil.changesetdisplayer(
3931 displayer = logcmdutil.changesetdisplayer(
3932 ui, other, opts, False
3932 ui, other, opts, False
3933 )
3933 )
3934 display(other, chlist, displayer)
3934 display(other, chlist, displayer)
3935 displayer.close()
3935 displayer.close()
3936 finally:
3936 finally:
3937 cleanupfn()
3937 cleanupfn()
3938
3938
3939
3939
3940 @command(
3940 @command(
3941 b'debugsub',
3941 b'debugsub',
3942 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3942 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3943 _(b'[-r REV] [REV]'),
3943 _(b'[-r REV] [REV]'),
3944 )
3944 )
3945 def debugsub(ui, repo, rev=None):
3945 def debugsub(ui, repo, rev=None):
3946 ctx = scmutil.revsingle(repo, rev, None)
3946 ctx = scmutil.revsingle(repo, rev, None)
3947 for k, v in sorted(ctx.substate.items()):
3947 for k, v in sorted(ctx.substate.items()):
3948 ui.writenoi18n(b'path %s\n' % k)
3948 ui.writenoi18n(b'path %s\n' % k)
3949 ui.writenoi18n(b' source %s\n' % v[0])
3949 ui.writenoi18n(b' source %s\n' % v[0])
3950 ui.writenoi18n(b' revision %s\n' % v[1])
3950 ui.writenoi18n(b' revision %s\n' % v[1])
3951
3951
3952
3952
3953 @command(b'debugshell', optionalrepo=True)
3953 @command(b'debugshell', optionalrepo=True)
3954 def debugshell(ui, repo):
3954 def debugshell(ui, repo):
3955 """run an interactive Python interpreter
3955 """run an interactive Python interpreter
3956
3956
3957 The local namespace is provided with a reference to the ui and
3957 The local namespace is provided with a reference to the ui and
3958 the repo instance (if available).
3958 the repo instance (if available).
3959 """
3959 """
3960 import code
3960 import code
3961
3961
3962 imported_objects = {
3962 imported_objects = {
3963 'ui': ui,
3963 'ui': ui,
3964 'repo': repo,
3964 'repo': repo,
3965 }
3965 }
3966
3966
3967 code.interact(local=imported_objects)
3967 code.interact(local=imported_objects)
3968
3968
3969
3969
3970 @command(
3970 @command(
3971 b'debugsuccessorssets',
3971 b'debugsuccessorssets',
3972 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3972 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3973 _(b'[REV]'),
3973 _(b'[REV]'),
3974 )
3974 )
3975 def debugsuccessorssets(ui, repo, *revs, **opts):
3975 def debugsuccessorssets(ui, repo, *revs, **opts):
3976 """show set of successors for revision
3976 """show set of successors for revision
3977
3977
3978 A successors set of changeset A is a consistent group of revisions that
3978 A successors set of changeset A is a consistent group of revisions that
3979 succeed A. It contains non-obsolete changesets only unless closests
3979 succeed A. It contains non-obsolete changesets only unless closests
3980 successors set is set.
3980 successors set is set.
3981
3981
3982 In most cases a changeset A has a single successors set containing a single
3982 In most cases a changeset A has a single successors set containing a single
3983 successor (changeset A replaced by A').
3983 successor (changeset A replaced by A').
3984
3984
3985 A changeset that is made obsolete with no successors are called "pruned".
3985 A changeset that is made obsolete with no successors are called "pruned".
3986 Such changesets have no successors sets at all.
3986 Such changesets have no successors sets at all.
3987
3987
3988 A changeset that has been "split" will have a successors set containing
3988 A changeset that has been "split" will have a successors set containing
3989 more than one successor.
3989 more than one successor.
3990
3990
3991 A changeset that has been rewritten in multiple different ways is called
3991 A changeset that has been rewritten in multiple different ways is called
3992 "divergent". Such changesets have multiple successor sets (each of which
3992 "divergent". Such changesets have multiple successor sets (each of which
3993 may also be split, i.e. have multiple successors).
3993 may also be split, i.e. have multiple successors).
3994
3994
3995 Results are displayed as follows::
3995 Results are displayed as follows::
3996
3996
3997 <rev1>
3997 <rev1>
3998 <successors-1A>
3998 <successors-1A>
3999 <rev2>
3999 <rev2>
4000 <successors-2A>
4000 <successors-2A>
4001 <successors-2B1> <successors-2B2> <successors-2B3>
4001 <successors-2B1> <successors-2B2> <successors-2B3>
4002
4002
4003 Here rev2 has two possible (i.e. divergent) successors sets. The first
4003 Here rev2 has two possible (i.e. divergent) successors sets. The first
4004 holds one element, whereas the second holds three (i.e. the changeset has
4004 holds one element, whereas the second holds three (i.e. the changeset has
4005 been split).
4005 been split).
4006 """
4006 """
4007 # passed to successorssets caching computation from one call to another
4007 # passed to successorssets caching computation from one call to another
4008 cache = {}
4008 cache = {}
4009 ctx2str = bytes
4009 ctx2str = bytes
4010 node2str = short
4010 node2str = short
4011 for rev in logcmdutil.revrange(repo, revs):
4011 for rev in logcmdutil.revrange(repo, revs):
4012 ctx = repo[rev]
4012 ctx = repo[rev]
4013 ui.write(b'%s\n' % ctx2str(ctx))
4013 ui.write(b'%s\n' % ctx2str(ctx))
4014 for succsset in obsutil.successorssets(
4014 for succsset in obsutil.successorssets(
4015 repo, ctx.node(), closest=opts['closest'], cache=cache
4015 repo, ctx.node(), closest=opts['closest'], cache=cache
4016 ):
4016 ):
4017 if succsset:
4017 if succsset:
4018 ui.write(b' ')
4018 ui.write(b' ')
4019 ui.write(node2str(succsset[0]))
4019 ui.write(node2str(succsset[0]))
4020 for node in succsset[1:]:
4020 for node in succsset[1:]:
4021 ui.write(b' ')
4021 ui.write(b' ')
4022 ui.write(node2str(node))
4022 ui.write(node2str(node))
4023 ui.write(b'\n')
4023 ui.write(b'\n')
4024
4024
4025
4025
4026 @command(b'debugtagscache', [])
4026 @command(b'debugtagscache', [])
4027 def debugtagscache(ui, repo):
4027 def debugtagscache(ui, repo):
4028 """display the contents of .hg/cache/hgtagsfnodes1"""
4028 """display the contents of .hg/cache/hgtagsfnodes1"""
4029 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4029 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4030 flog = repo.file(b'.hgtags')
4030 flog = repo.file(b'.hgtags')
4031 for r in repo:
4031 for r in repo:
4032 node = repo[r].node()
4032 node = repo[r].node()
4033 tagsnode = cache.getfnode(node, computemissing=False)
4033 tagsnode = cache.getfnode(node, computemissing=False)
4034 if tagsnode:
4034 if tagsnode:
4035 tagsnodedisplay = hex(tagsnode)
4035 tagsnodedisplay = hex(tagsnode)
4036 if not flog.hasnode(tagsnode):
4036 if not flog.hasnode(tagsnode):
4037 tagsnodedisplay += b' (unknown node)'
4037 tagsnodedisplay += b' (unknown node)'
4038 elif tagsnode is None:
4038 elif tagsnode is None:
4039 tagsnodedisplay = b'missing'
4039 tagsnodedisplay = b'missing'
4040 else:
4040 else:
4041 tagsnodedisplay = b'invalid'
4041 tagsnodedisplay = b'invalid'
4042
4042
4043 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4043 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4044
4044
4045
4045
4046 @command(
4046 @command(
4047 b'debugtemplate',
4047 b'debugtemplate',
4048 [
4048 [
4049 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4049 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4050 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4050 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4051 ],
4051 ],
4052 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4052 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4053 optionalrepo=True,
4053 optionalrepo=True,
4054 )
4054 )
4055 def debugtemplate(ui, repo, tmpl, **opts):
4055 def debugtemplate(ui, repo, tmpl, **opts):
4056 """parse and apply a template
4056 """parse and apply a template
4057
4057
4058 If -r/--rev is given, the template is processed as a log template and
4058 If -r/--rev is given, the template is processed as a log template and
4059 applied to the given changesets. Otherwise, it is processed as a generic
4059 applied to the given changesets. Otherwise, it is processed as a generic
4060 template.
4060 template.
4061
4061
4062 Use --verbose to print the parsed tree.
4062 Use --verbose to print the parsed tree.
4063 """
4063 """
4064 revs = None
4064 revs = None
4065 if opts['rev']:
4065 if opts['rev']:
4066 if repo is None:
4066 if repo is None:
4067 raise error.RepoError(
4067 raise error.RepoError(
4068 _(b'there is no Mercurial repository here (.hg not found)')
4068 _(b'there is no Mercurial repository here (.hg not found)')
4069 )
4069 )
4070 revs = logcmdutil.revrange(repo, opts['rev'])
4070 revs = logcmdutil.revrange(repo, opts['rev'])
4071
4071
4072 props = {}
4072 props = {}
4073 for d in opts['define']:
4073 for d in opts['define']:
4074 try:
4074 try:
4075 k, v = (e.strip() for e in d.split(b'=', 1))
4075 k, v = (e.strip() for e in d.split(b'=', 1))
4076 if not k or k == b'ui':
4076 if not k or k == b'ui':
4077 raise ValueError
4077 raise ValueError
4078 props[k] = v
4078 props[k] = v
4079 except ValueError:
4079 except ValueError:
4080 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4080 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4081
4081
4082 if ui.verbose:
4082 if ui.verbose:
4083 aliases = ui.configitems(b'templatealias')
4083 aliases = ui.configitems(b'templatealias')
4084 tree = templater.parse(tmpl)
4084 tree = templater.parse(tmpl)
4085 ui.note(templater.prettyformat(tree), b'\n')
4085 ui.note(templater.prettyformat(tree), b'\n')
4086 newtree = templater.expandaliases(tree, aliases)
4086 newtree = templater.expandaliases(tree, aliases)
4087 if newtree != tree:
4087 if newtree != tree:
4088 ui.notenoi18n(
4088 ui.notenoi18n(
4089 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4089 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4090 )
4090 )
4091
4091
4092 if revs is None:
4092 if revs is None:
4093 tres = formatter.templateresources(ui, repo)
4093 tres = formatter.templateresources(ui, repo)
4094 t = formatter.maketemplater(ui, tmpl, resources=tres)
4094 t = formatter.maketemplater(ui, tmpl, resources=tres)
4095 if ui.verbose:
4095 if ui.verbose:
4096 kwds, funcs = t.symbolsuseddefault()
4096 kwds, funcs = t.symbolsuseddefault()
4097 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4097 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4098 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4098 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4099 ui.write(t.renderdefault(props))
4099 ui.write(t.renderdefault(props))
4100 else:
4100 else:
4101 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4101 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4102 if ui.verbose:
4102 if ui.verbose:
4103 kwds, funcs = displayer.t.symbolsuseddefault()
4103 kwds, funcs = displayer.t.symbolsuseddefault()
4104 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4104 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4105 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4105 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4106 for r in revs:
4106 for r in revs:
4107 displayer.show(repo[r], **pycompat.strkwargs(props))
4107 displayer.show(repo[r], **pycompat.strkwargs(props))
4108 displayer.close()
4108 displayer.close()
4109
4109
4110
4110
4111 @command(
4111 @command(
4112 b'debuguigetpass',
4112 b'debuguigetpass',
4113 [
4113 [
4114 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4114 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4115 ],
4115 ],
4116 _(b'[-p TEXT]'),
4116 _(b'[-p TEXT]'),
4117 norepo=True,
4117 norepo=True,
4118 )
4118 )
4119 def debuguigetpass(ui, prompt=b''):
4119 def debuguigetpass(ui, prompt=b''):
4120 """show prompt to type password"""
4120 """show prompt to type password"""
4121 r = ui.getpass(prompt)
4121 r = ui.getpass(prompt)
4122 if r is None:
4122 if r is None:
4123 r = b"<default response>"
4123 r = b"<default response>"
4124 ui.writenoi18n(b'response: %s\n' % r)
4124 ui.writenoi18n(b'response: %s\n' % r)
4125
4125
4126
4126
4127 @command(
4127 @command(
4128 b'debuguiprompt',
4128 b'debuguiprompt',
4129 [
4129 [
4130 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4130 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4131 ],
4131 ],
4132 _(b'[-p TEXT]'),
4132 _(b'[-p TEXT]'),
4133 norepo=True,
4133 norepo=True,
4134 )
4134 )
4135 def debuguiprompt(ui, prompt=b''):
4135 def debuguiprompt(ui, prompt=b''):
4136 """show plain prompt"""
4136 """show plain prompt"""
4137 r = ui.prompt(prompt)
4137 r = ui.prompt(prompt)
4138 ui.writenoi18n(b'response: %s\n' % r)
4138 ui.writenoi18n(b'response: %s\n' % r)
4139
4139
4140
4140
4141 @command(b'debugupdatecaches', [])
4141 @command(b'debugupdatecaches', [])
4142 def debugupdatecaches(ui, repo, *pats, **opts):
4142 def debugupdatecaches(ui, repo, *pats, **opts):
4143 """warm all known caches in the repository"""
4143 """warm all known caches in the repository"""
4144 with repo.wlock(), repo.lock():
4144 with repo.wlock(), repo.lock():
4145 repo.updatecaches(caches=repository.CACHES_ALL)
4145 repo.updatecaches(caches=repository.CACHES_ALL)
4146
4146
4147
4147
4148 @command(
4148 @command(
4149 b'debugupgraderepo',
4149 b'debugupgraderepo',
4150 [
4150 [
4151 (
4151 (
4152 b'o',
4152 b'o',
4153 b'optimize',
4153 b'optimize',
4154 [],
4154 [],
4155 _(b'extra optimization to perform'),
4155 _(b'extra optimization to perform'),
4156 _(b'NAME'),
4156 _(b'NAME'),
4157 ),
4157 ),
4158 (b'', b'run', False, _(b'performs an upgrade')),
4158 (b'', b'run', False, _(b'performs an upgrade')),
4159 (b'', b'backup', True, _(b'keep the old repository content around')),
4159 (b'', b'backup', True, _(b'keep the old repository content around')),
4160 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4160 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4161 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4161 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4162 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4162 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4163 ],
4163 ],
4164 )
4164 )
4165 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4165 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4166 """upgrade a repository to use different features
4166 """upgrade a repository to use different features
4167
4167
4168 If no arguments are specified, the repository is evaluated for upgrade
4168 If no arguments are specified, the repository is evaluated for upgrade
4169 and a list of problems and potential optimizations is printed.
4169 and a list of problems and potential optimizations is printed.
4170
4170
4171 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4171 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4172 can be influenced via additional arguments. More details will be provided
4172 can be influenced via additional arguments. More details will be provided
4173 by the command output when run without ``--run``.
4173 by the command output when run without ``--run``.
4174
4174
4175 During the upgrade, the repository will be locked and no writes will be
4175 During the upgrade, the repository will be locked and no writes will be
4176 allowed.
4176 allowed.
4177
4177
4178 At the end of the upgrade, the repository may not be readable while new
4178 At the end of the upgrade, the repository may not be readable while new
4179 repository data is swapped in. This window will be as long as it takes to
4179 repository data is swapped in. This window will be as long as it takes to
4180 rename some directories inside the ``.hg`` directory. On most machines, this
4180 rename some directories inside the ``.hg`` directory. On most machines, this
4181 should complete almost instantaneously and the chances of a consumer being
4181 should complete almost instantaneously and the chances of a consumer being
4182 unable to access the repository should be low.
4182 unable to access the repository should be low.
4183
4183
4184 By default, all revlogs will be upgraded. You can restrict this using flags
4184 By default, all revlogs will be upgraded. You can restrict this using flags
4185 such as `--manifest`:
4185 such as `--manifest`:
4186
4186
4187 * `--manifest`: only optimize the manifest
4187 * `--manifest`: only optimize the manifest
4188 * `--no-manifest`: optimize all revlog but the manifest
4188 * `--no-manifest`: optimize all revlog but the manifest
4189 * `--changelog`: optimize the changelog only
4189 * `--changelog`: optimize the changelog only
4190 * `--no-changelog --no-manifest`: optimize filelogs only
4190 * `--no-changelog --no-manifest`: optimize filelogs only
4191 * `--filelogs`: optimize the filelogs only
4191 * `--filelogs`: optimize the filelogs only
4192 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4192 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4193 """
4193 """
4194 return upgrade.upgraderepo(
4194 return upgrade.upgraderepo(
4195 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4195 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4196 )
4196 )
4197
4197
4198
4198
4199 @command(
4199 @command(
4200 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4200 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4201 )
4201 )
4202 def debugwalk(ui, repo, *pats, **opts):
4202 def debugwalk(ui, repo, *pats, **opts):
4203 """show how files match on given patterns"""
4203 """show how files match on given patterns"""
4204 opts = pycompat.byteskwargs(opts)
4204 opts = pycompat.byteskwargs(opts)
4205 m = scmutil.match(repo[None], pats, opts)
4205 m = scmutil.match(repo[None], pats, opts)
4206 if ui.verbose:
4206 if ui.verbose:
4207 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4207 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4208 items = list(repo[None].walk(m))
4208 items = list(repo[None].walk(m))
4209 if not items:
4209 if not items:
4210 return
4210 return
4211 f = lambda fn: fn
4211 f = lambda fn: fn
4212 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4212 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4213 f = lambda fn: util.normpath(fn)
4213 f = lambda fn: util.normpath(fn)
4214 fmt = b'f %%-%ds %%-%ds %%s' % (
4214 fmt = b'f %%-%ds %%-%ds %%s' % (
4215 max([len(abs) for abs in items]),
4215 max([len(abs) for abs in items]),
4216 max([len(repo.pathto(abs)) for abs in items]),
4216 max([len(repo.pathto(abs)) for abs in items]),
4217 )
4217 )
4218 for abs in items:
4218 for abs in items:
4219 line = fmt % (
4219 line = fmt % (
4220 abs,
4220 abs,
4221 f(repo.pathto(abs)),
4221 f(repo.pathto(abs)),
4222 m.exact(abs) and b'exact' or b'',
4222 m.exact(abs) and b'exact' or b'',
4223 )
4223 )
4224 ui.write(b"%s\n" % line.rstrip())
4224 ui.write(b"%s\n" % line.rstrip())
4225
4225
4226
4226
4227 @command(b'debugwhyunstable', [], _(b'REV'))
4227 @command(b'debugwhyunstable', [], _(b'REV'))
4228 def debugwhyunstable(ui, repo, rev):
4228 def debugwhyunstable(ui, repo, rev):
4229 """explain instabilities of a changeset"""
4229 """explain instabilities of a changeset"""
4230 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4230 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4231 dnodes = b''
4231 dnodes = b''
4232 if entry.get(b'divergentnodes'):
4232 if entry.get(b'divergentnodes'):
4233 dnodes = (
4233 dnodes = (
4234 b' '.join(
4234 b' '.join(
4235 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4235 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4236 for ctx in entry[b'divergentnodes']
4236 for ctx in entry[b'divergentnodes']
4237 )
4237 )
4238 + b' '
4238 + b' '
4239 )
4239 )
4240 ui.write(
4240 ui.write(
4241 b'%s: %s%s %s\n'
4241 b'%s: %s%s %s\n'
4242 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4242 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4243 )
4243 )
4244
4244
4245
4245
4246 @command(
4246 @command(
4247 b'debugwireargs',
4247 b'debugwireargs',
4248 [
4248 [
4249 (b'', b'three', b'', b'three'),
4249 (b'', b'three', b'', b'three'),
4250 (b'', b'four', b'', b'four'),
4250 (b'', b'four', b'', b'four'),
4251 (b'', b'five', b'', b'five'),
4251 (b'', b'five', b'', b'five'),
4252 ]
4252 ]
4253 + cmdutil.remoteopts,
4253 + cmdutil.remoteopts,
4254 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4254 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4255 norepo=True,
4255 norepo=True,
4256 )
4256 )
4257 def debugwireargs(ui, repopath, *vals, **opts):
4257 def debugwireargs(ui, repopath, *vals, **opts):
4258 opts = pycompat.byteskwargs(opts)
4258 opts = pycompat.byteskwargs(opts)
4259 repo = hg.peer(ui, opts, repopath)
4259 repo = hg.peer(ui, opts, repopath)
4260 try:
4260 try:
4261 for opt in cmdutil.remoteopts:
4261 for opt in cmdutil.remoteopts:
4262 del opts[opt[1]]
4262 del opts[opt[1]]
4263 args = {}
4263 args = {}
4264 for k, v in pycompat.iteritems(opts):
4264 for k, v in pycompat.iteritems(opts):
4265 if v:
4265 if v:
4266 args[k] = v
4266 args[k] = v
4267 args = pycompat.strkwargs(args)
4267 args = pycompat.strkwargs(args)
4268 # run twice to check that we don't mess up the stream for the next command
4268 # run twice to check that we don't mess up the stream for the next command
4269 res1 = repo.debugwireargs(*vals, **args)
4269 res1 = repo.debugwireargs(*vals, **args)
4270 res2 = repo.debugwireargs(*vals, **args)
4270 res2 = repo.debugwireargs(*vals, **args)
4271 ui.write(b"%s\n" % res1)
4271 ui.write(b"%s\n" % res1)
4272 if res1 != res2:
4272 if res1 != res2:
4273 ui.warn(b"%s\n" % res2)
4273 ui.warn(b"%s\n" % res2)
4274 finally:
4274 finally:
4275 repo.close()
4275 repo.close()
4276
4276
4277
4277
4278 def _parsewirelangblocks(fh):
4278 def _parsewirelangblocks(fh):
4279 activeaction = None
4279 activeaction = None
4280 blocklines = []
4280 blocklines = []
4281 lastindent = 0
4281 lastindent = 0
4282
4282
4283 for line in fh:
4283 for line in fh:
4284 line = line.rstrip()
4284 line = line.rstrip()
4285 if not line:
4285 if not line:
4286 continue
4286 continue
4287
4287
4288 if line.startswith(b'#'):
4288 if line.startswith(b'#'):
4289 continue
4289 continue
4290
4290
4291 if not line.startswith(b' '):
4291 if not line.startswith(b' '):
4292 # New block. Flush previous one.
4292 # New block. Flush previous one.
4293 if activeaction:
4293 if activeaction:
4294 yield activeaction, blocklines
4294 yield activeaction, blocklines
4295
4295
4296 activeaction = line
4296 activeaction = line
4297 blocklines = []
4297 blocklines = []
4298 lastindent = 0
4298 lastindent = 0
4299 continue
4299 continue
4300
4300
4301 # Else we start with an indent.
4301 # Else we start with an indent.
4302
4302
4303 if not activeaction:
4303 if not activeaction:
4304 raise error.Abort(_(b'indented line outside of block'))
4304 raise error.Abort(_(b'indented line outside of block'))
4305
4305
4306 indent = len(line) - len(line.lstrip())
4306 indent = len(line) - len(line.lstrip())
4307
4307
4308 # If this line is indented more than the last line, concatenate it.
4308 # If this line is indented more than the last line, concatenate it.
4309 if indent > lastindent and blocklines:
4309 if indent > lastindent and blocklines:
4310 blocklines[-1] += line.lstrip()
4310 blocklines[-1] += line.lstrip()
4311 else:
4311 else:
4312 blocklines.append(line)
4312 blocklines.append(line)
4313 lastindent = indent
4313 lastindent = indent
4314
4314
4315 # Flush last block.
4315 # Flush last block.
4316 if activeaction:
4316 if activeaction:
4317 yield activeaction, blocklines
4317 yield activeaction, blocklines
4318
4318
4319
4319
4320 @command(
4320 @command(
4321 b'debugwireproto',
4321 b'debugwireproto',
4322 [
4322 [
4323 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4323 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4324 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4324 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4325 (
4325 (
4326 b'',
4326 b'',
4327 b'noreadstderr',
4327 b'noreadstderr',
4328 False,
4328 False,
4329 _(b'do not read from stderr of the remote'),
4329 _(b'do not read from stderr of the remote'),
4330 ),
4330 ),
4331 (
4331 (
4332 b'',
4332 b'',
4333 b'nologhandshake',
4333 b'nologhandshake',
4334 False,
4334 False,
4335 _(b'do not log I/O related to the peer handshake'),
4335 _(b'do not log I/O related to the peer handshake'),
4336 ),
4336 ),
4337 ]
4337 ]
4338 + cmdutil.remoteopts,
4338 + cmdutil.remoteopts,
4339 _(b'[PATH]'),
4339 _(b'[PATH]'),
4340 optionalrepo=True,
4340 optionalrepo=True,
4341 )
4341 )
4342 def debugwireproto(ui, repo, path=None, **opts):
4342 def debugwireproto(ui, repo, path=None, **opts):
4343 """send wire protocol commands to a server
4343 """send wire protocol commands to a server
4344
4344
4345 This command can be used to issue wire protocol commands to remote
4345 This command can be used to issue wire protocol commands to remote
4346 peers and to debug the raw data being exchanged.
4346 peers and to debug the raw data being exchanged.
4347
4347
4348 ``--localssh`` will start an SSH server against the current repository
4348 ``--localssh`` will start an SSH server against the current repository
4349 and connect to that. By default, the connection will perform a handshake
4349 and connect to that. By default, the connection will perform a handshake
4350 and establish an appropriate peer instance.
4350 and establish an appropriate peer instance.
4351
4351
4352 ``--peer`` can be used to bypass the handshake protocol and construct a
4352 ``--peer`` can be used to bypass the handshake protocol and construct a
4353 peer instance using the specified class type. Valid values are ``raw``,
4353 peer instance using the specified class type. Valid values are ``raw``,
4354 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4354 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4355 don't support higher-level command actions.
4355 don't support higher-level command actions.
4356
4356
4357 ``--noreadstderr`` can be used to disable automatic reading from stderr
4357 ``--noreadstderr`` can be used to disable automatic reading from stderr
4358 of the peer (for SSH connections only). Disabling automatic reading of
4358 of the peer (for SSH connections only). Disabling automatic reading of
4359 stderr is useful for making output more deterministic.
4359 stderr is useful for making output more deterministic.
4360
4360
4361 Commands are issued via a mini language which is specified via stdin.
4361 Commands are issued via a mini language which is specified via stdin.
4362 The language consists of individual actions to perform. An action is
4362 The language consists of individual actions to perform. An action is
4363 defined by a block. A block is defined as a line with no leading
4363 defined by a block. A block is defined as a line with no leading
4364 space followed by 0 or more lines with leading space. Blocks are
4364 space followed by 0 or more lines with leading space. Blocks are
4365 effectively a high-level command with additional metadata.
4365 effectively a high-level command with additional metadata.
4366
4366
4367 Lines beginning with ``#`` are ignored.
4367 Lines beginning with ``#`` are ignored.
4368
4368
4369 The following sections denote available actions.
4369 The following sections denote available actions.
4370
4370
4371 raw
4371 raw
4372 ---
4372 ---
4373
4373
4374 Send raw data to the server.
4374 Send raw data to the server.
4375
4375
4376 The block payload contains the raw data to send as one atomic send
4376 The block payload contains the raw data to send as one atomic send
4377 operation. The data may not actually be delivered in a single system
4377 operation. The data may not actually be delivered in a single system
4378 call: it depends on the abilities of the transport being used.
4378 call: it depends on the abilities of the transport being used.
4379
4379
4380 Each line in the block is de-indented and concatenated. Then, that
4380 Each line in the block is de-indented and concatenated. Then, that
4381 value is evaluated as a Python b'' literal. This allows the use of
4381 value is evaluated as a Python b'' literal. This allows the use of
4382 backslash escaping, etc.
4382 backslash escaping, etc.
4383
4383
4384 raw+
4384 raw+
4385 ----
4385 ----
4386
4386
4387 Behaves like ``raw`` except flushes output afterwards.
4387 Behaves like ``raw`` except flushes output afterwards.
4388
4388
4389 command <X>
4389 command <X>
4390 -----------
4390 -----------
4391
4391
4392 Send a request to run a named command, whose name follows the ``command``
4392 Send a request to run a named command, whose name follows the ``command``
4393 string.
4393 string.
4394
4394
4395 Arguments to the command are defined as lines in this block. The format of
4395 Arguments to the command are defined as lines in this block. The format of
4396 each line is ``<key> <value>``. e.g.::
4396 each line is ``<key> <value>``. e.g.::
4397
4397
4398 command listkeys
4398 command listkeys
4399 namespace bookmarks
4399 namespace bookmarks
4400
4400
4401 If the value begins with ``eval:``, it will be interpreted as a Python
4401 If the value begins with ``eval:``, it will be interpreted as a Python
4402 literal expression. Otherwise values are interpreted as Python b'' literals.
4402 literal expression. Otherwise values are interpreted as Python b'' literals.
4403 This allows sending complex types and encoding special byte sequences via
4403 This allows sending complex types and encoding special byte sequences via
4404 backslash escaping.
4404 backslash escaping.
4405
4405
4406 The following arguments have special meaning:
4406 The following arguments have special meaning:
4407
4407
4408 ``PUSHFILE``
4408 ``PUSHFILE``
4409 When defined, the *push* mechanism of the peer will be used instead
4409 When defined, the *push* mechanism of the peer will be used instead
4410 of the static request-response mechanism and the content of the
4410 of the static request-response mechanism and the content of the
4411 file specified in the value of this argument will be sent as the
4411 file specified in the value of this argument will be sent as the
4412 command payload.
4412 command payload.
4413
4413
4414 This can be used to submit a local bundle file to the remote.
4414 This can be used to submit a local bundle file to the remote.
4415
4415
4416 batchbegin
4416 batchbegin
4417 ----------
4417 ----------
4418
4418
4419 Instruct the peer to begin a batched send.
4419 Instruct the peer to begin a batched send.
4420
4420
4421 All ``command`` blocks are queued for execution until the next
4421 All ``command`` blocks are queued for execution until the next
4422 ``batchsubmit`` block.
4422 ``batchsubmit`` block.
4423
4423
4424 batchsubmit
4424 batchsubmit
4425 -----------
4425 -----------
4426
4426
4427 Submit previously queued ``command`` blocks as a batch request.
4427 Submit previously queued ``command`` blocks as a batch request.
4428
4428
4429 This action MUST be paired with a ``batchbegin`` action.
4429 This action MUST be paired with a ``batchbegin`` action.
4430
4430
4431 httprequest <method> <path>
4431 httprequest <method> <path>
4432 ---------------------------
4432 ---------------------------
4433
4433
4434 (HTTP peer only)
4434 (HTTP peer only)
4435
4435
4436 Send an HTTP request to the peer.
4436 Send an HTTP request to the peer.
4437
4437
4438 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4438 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4439
4439
4440 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4440 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4441 headers to add to the request. e.g. ``Accept: foo``.
4441 headers to add to the request. e.g. ``Accept: foo``.
4442
4442
4443 The following arguments are special:
4443 The following arguments are special:
4444
4444
4445 ``BODYFILE``
4445 ``BODYFILE``
4446 The content of the file defined as the value to this argument will be
4446 The content of the file defined as the value to this argument will be
4447 transferred verbatim as the HTTP request body.
4447 transferred verbatim as the HTTP request body.
4448
4448
4449 ``frame <type> <flags> <payload>``
4449 ``frame <type> <flags> <payload>``
4450 Send a unified protocol frame as part of the request body.
4450 Send a unified protocol frame as part of the request body.
4451
4451
4452 All frames will be collected and sent as the body to the HTTP
4452 All frames will be collected and sent as the body to the HTTP
4453 request.
4453 request.
4454
4454
4455 close
4455 close
4456 -----
4456 -----
4457
4457
4458 Close the connection to the server.
4458 Close the connection to the server.
4459
4459
4460 flush
4460 flush
4461 -----
4461 -----
4462
4462
4463 Flush data written to the server.
4463 Flush data written to the server.
4464
4464
4465 readavailable
4465 readavailable
4466 -------------
4466 -------------
4467
4467
4468 Close the write end of the connection and read all available data from
4468 Close the write end of the connection and read all available data from
4469 the server.
4469 the server.
4470
4470
4471 If the connection to the server encompasses multiple pipes, we poll both
4471 If the connection to the server encompasses multiple pipes, we poll both
4472 pipes and read available data.
4472 pipes and read available data.
4473
4473
4474 readline
4474 readline
4475 --------
4475 --------
4476
4476
4477 Read a line of output from the server. If there are multiple output
4477 Read a line of output from the server. If there are multiple output
4478 pipes, reads only the main pipe.
4478 pipes, reads only the main pipe.
4479
4479
4480 ereadline
4480 ereadline
4481 ---------
4481 ---------
4482
4482
4483 Like ``readline``, but read from the stderr pipe, if available.
4483 Like ``readline``, but read from the stderr pipe, if available.
4484
4484
4485 read <X>
4485 read <X>
4486 --------
4486 --------
4487
4487
4488 ``read()`` N bytes from the server's main output pipe.
4488 ``read()`` N bytes from the server's main output pipe.
4489
4489
4490 eread <X>
4490 eread <X>
4491 ---------
4491 ---------
4492
4492
4493 ``read()`` N bytes from the server's stderr pipe, if available.
4493 ``read()`` N bytes from the server's stderr pipe, if available.
4494
4494
4495 Specifying Unified Frame-Based Protocol Frames
4495 Specifying Unified Frame-Based Protocol Frames
4496 ----------------------------------------------
4496 ----------------------------------------------
4497
4497
4498 It is possible to emit a *Unified Frame-Based Protocol* by using special
4498 It is possible to emit a *Unified Frame-Based Protocol* by using special
4499 syntax.
4499 syntax.
4500
4500
4501 A frame is composed as a type, flags, and payload. These can be parsed
4501 A frame is composed as a type, flags, and payload. These can be parsed
4502 from a string of the form:
4502 from a string of the form:
4503
4503
4504 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4504 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4505
4505
4506 ``request-id`` and ``stream-id`` are integers defining the request and
4506 ``request-id`` and ``stream-id`` are integers defining the request and
4507 stream identifiers.
4507 stream identifiers.
4508
4508
4509 ``type`` can be an integer value for the frame type or the string name
4509 ``type`` can be an integer value for the frame type or the string name
4510 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4510 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4511 ``command-name``.
4511 ``command-name``.
4512
4512
4513 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4513 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4514 components. Each component (and there can be just one) can be an integer
4514 components. Each component (and there can be just one) can be an integer
4515 or a flag name for stream flags or frame flags, respectively. Values are
4515 or a flag name for stream flags or frame flags, respectively. Values are
4516 resolved to integers and then bitwise OR'd together.
4516 resolved to integers and then bitwise OR'd together.
4517
4517
4518 ``payload`` represents the raw frame payload. If it begins with
4518 ``payload`` represents the raw frame payload. If it begins with
4519 ``cbor:``, the following string is evaluated as Python code and the
4519 ``cbor:``, the following string is evaluated as Python code and the
4520 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4520 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4521 as a Python byte string literal.
4521 as a Python byte string literal.
4522 """
4522 """
4523 opts = pycompat.byteskwargs(opts)
4523 opts = pycompat.byteskwargs(opts)
4524
4524
4525 if opts[b'localssh'] and not repo:
4525 if opts[b'localssh'] and not repo:
4526 raise error.Abort(_(b'--localssh requires a repository'))
4526 raise error.Abort(_(b'--localssh requires a repository'))
4527
4527
4528 if opts[b'peer'] and opts[b'peer'] not in (
4528 if opts[b'peer'] and opts[b'peer'] not in (
4529 b'raw',
4529 b'raw',
4530 b'ssh1',
4530 b'ssh1',
4531 ):
4531 ):
4532 raise error.Abort(
4532 raise error.Abort(
4533 _(b'invalid value for --peer'),
4533 _(b'invalid value for --peer'),
4534 hint=_(b'valid values are "raw" and "ssh1"'),
4534 hint=_(b'valid values are "raw" and "ssh1"'),
4535 )
4535 )
4536
4536
4537 if path and opts[b'localssh']:
4537 if path and opts[b'localssh']:
4538 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4538 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4539
4539
4540 if ui.interactive():
4540 if ui.interactive():
4541 ui.write(_(b'(waiting for commands on stdin)\n'))
4541 ui.write(_(b'(waiting for commands on stdin)\n'))
4542
4542
4543 blocks = list(_parsewirelangblocks(ui.fin))
4543 blocks = list(_parsewirelangblocks(ui.fin))
4544
4544
4545 proc = None
4545 proc = None
4546 stdin = None
4546 stdin = None
4547 stdout = None
4547 stdout = None
4548 stderr = None
4548 stderr = None
4549 opener = None
4549 opener = None
4550
4550
4551 if opts[b'localssh']:
4551 if opts[b'localssh']:
4552 # We start the SSH server in its own process so there is process
4552 # We start the SSH server in its own process so there is process
4553 # separation. This prevents a whole class of potential bugs around
4553 # separation. This prevents a whole class of potential bugs around
4554 # shared state from interfering with server operation.
4554 # shared state from interfering with server operation.
4555 args = procutil.hgcmd() + [
4555 args = procutil.hgcmd() + [
4556 b'-R',
4556 b'-R',
4557 repo.root,
4557 repo.root,
4558 b'debugserve',
4558 b'debugserve',
4559 b'--sshstdio',
4559 b'--sshstdio',
4560 ]
4560 ]
4561 proc = subprocess.Popen(
4561 proc = subprocess.Popen(
4562 pycompat.rapply(procutil.tonativestr, args),
4562 pycompat.rapply(procutil.tonativestr, args),
4563 stdin=subprocess.PIPE,
4563 stdin=subprocess.PIPE,
4564 stdout=subprocess.PIPE,
4564 stdout=subprocess.PIPE,
4565 stderr=subprocess.PIPE,
4565 stderr=subprocess.PIPE,
4566 bufsize=0,
4566 bufsize=0,
4567 )
4567 )
4568
4568
4569 stdin = proc.stdin
4569 stdin = proc.stdin
4570 stdout = proc.stdout
4570 stdout = proc.stdout
4571 stderr = proc.stderr
4571 stderr = proc.stderr
4572
4572
4573 # We turn the pipes into observers so we can log I/O.
4573 # We turn the pipes into observers so we can log I/O.
4574 if ui.verbose or opts[b'peer'] == b'raw':
4574 if ui.verbose or opts[b'peer'] == b'raw':
4575 stdin = util.makeloggingfileobject(
4575 stdin = util.makeloggingfileobject(
4576 ui, proc.stdin, b'i', logdata=True
4576 ui, proc.stdin, b'i', logdata=True
4577 )
4577 )
4578 stdout = util.makeloggingfileobject(
4578 stdout = util.makeloggingfileobject(
4579 ui, proc.stdout, b'o', logdata=True
4579 ui, proc.stdout, b'o', logdata=True
4580 )
4580 )
4581 stderr = util.makeloggingfileobject(
4581 stderr = util.makeloggingfileobject(
4582 ui, proc.stderr, b'e', logdata=True
4582 ui, proc.stderr, b'e', logdata=True
4583 )
4583 )
4584
4584
4585 # --localssh also implies the peer connection settings.
4585 # --localssh also implies the peer connection settings.
4586
4586
4587 url = b'ssh://localserver'
4587 url = b'ssh://localserver'
4588 autoreadstderr = not opts[b'noreadstderr']
4588 autoreadstderr = not opts[b'noreadstderr']
4589
4589
4590 if opts[b'peer'] == b'ssh1':
4590 if opts[b'peer'] == b'ssh1':
4591 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4591 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4592 peer = sshpeer.sshv1peer(
4592 peer = sshpeer.sshv1peer(
4593 ui,
4593 ui,
4594 url,
4594 url,
4595 proc,
4595 proc,
4596 stdin,
4596 stdin,
4597 stdout,
4597 stdout,
4598 stderr,
4598 stderr,
4599 None,
4599 None,
4600 autoreadstderr=autoreadstderr,
4600 autoreadstderr=autoreadstderr,
4601 )
4601 )
4602 elif opts[b'peer'] == b'raw':
4602 elif opts[b'peer'] == b'raw':
4603 ui.write(_(b'using raw connection to peer\n'))
4603 ui.write(_(b'using raw connection to peer\n'))
4604 peer = None
4604 peer = None
4605 else:
4605 else:
4606 ui.write(_(b'creating ssh peer from handshake results\n'))
4606 ui.write(_(b'creating ssh peer from handshake results\n'))
4607 peer = sshpeer.makepeer(
4607 peer = sshpeer.makepeer(
4608 ui,
4608 ui,
4609 url,
4609 url,
4610 proc,
4610 proc,
4611 stdin,
4611 stdin,
4612 stdout,
4612 stdout,
4613 stderr,
4613 stderr,
4614 autoreadstderr=autoreadstderr,
4614 autoreadstderr=autoreadstderr,
4615 )
4615 )
4616
4616
4617 elif path:
4617 elif path:
4618 # We bypass hg.peer() so we can proxy the sockets.
4618 # We bypass hg.peer() so we can proxy the sockets.
4619 # TODO consider not doing this because we skip
4619 # TODO consider not doing this because we skip
4620 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4620 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4621 u = urlutil.url(path)
4621 u = urlutil.url(path)
4622 if u.scheme != b'http':
4622 if u.scheme != b'http':
4623 raise error.Abort(_(b'only http:// paths are currently supported'))
4623 raise error.Abort(_(b'only http:// paths are currently supported'))
4624
4624
4625 url, authinfo = u.authinfo()
4625 url, authinfo = u.authinfo()
4626 openerargs = {
4626 openerargs = {
4627 'useragent': b'Mercurial debugwireproto',
4627 'useragent': b'Mercurial debugwireproto',
4628 }
4628 }
4629
4629
4630 # Turn pipes/sockets into observers so we can log I/O.
4630 # Turn pipes/sockets into observers so we can log I/O.
4631 if ui.verbose:
4631 if ui.verbose:
4632 openerargs.update(
4632 openerargs.update(
4633 {
4633 {
4634 'loggingfh': ui,
4634 'loggingfh': ui,
4635 'loggingname': b's',
4635 'loggingname': b's',
4636 'loggingopts': {
4636 'loggingopts': {
4637 'logdata': True,
4637 'logdata': True,
4638 'logdataapis': False,
4638 'logdataapis': False,
4639 },
4639 },
4640 }
4640 }
4641 )
4641 )
4642
4642
4643 if ui.debugflag:
4643 if ui.debugflag:
4644 openerargs['loggingopts']['logdataapis'] = True
4644 openerargs['loggingopts']['logdataapis'] = True
4645
4645
4646 # Don't send default headers when in raw mode. This allows us to
4646 # Don't send default headers when in raw mode. This allows us to
4647 # bypass most of the behavior of our URL handling code so we can
4647 # bypass most of the behavior of our URL handling code so we can
4648 # have near complete control over what's sent on the wire.
4648 # have near complete control over what's sent on the wire.
4649 if opts[b'peer'] == b'raw':
4649 if opts[b'peer'] == b'raw':
4650 openerargs['sendaccept'] = False
4650 openerargs['sendaccept'] = False
4651
4651
4652 opener = urlmod.opener(ui, authinfo, **openerargs)
4652 opener = urlmod.opener(ui, authinfo, **openerargs)
4653
4653
4654 if opts[b'peer'] == b'raw':
4654 if opts[b'peer'] == b'raw':
4655 ui.write(_(b'using raw connection to peer\n'))
4655 ui.write(_(b'using raw connection to peer\n'))
4656 peer = None
4656 peer = None
4657 elif opts[b'peer']:
4657 elif opts[b'peer']:
4658 raise error.Abort(
4658 raise error.Abort(
4659 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4659 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4660 )
4660 )
4661 else:
4661 else:
4662 peer = httppeer.makepeer(ui, path, opener=opener)
4662 peer = httppeer.makepeer(ui, path, opener=opener)
4663
4663
4664 # We /could/ populate stdin/stdout with sock.makefile()...
4664 # We /could/ populate stdin/stdout with sock.makefile()...
4665 else:
4665 else:
4666 raise error.Abort(_(b'unsupported connection configuration'))
4666 raise error.Abort(_(b'unsupported connection configuration'))
4667
4667
4668 batchedcommands = None
4668 batchedcommands = None
4669
4669
4670 # Now perform actions based on the parsed wire language instructions.
4670 # Now perform actions based on the parsed wire language instructions.
4671 for action, lines in blocks:
4671 for action, lines in blocks:
4672 if action in (b'raw', b'raw+'):
4672 if action in (b'raw', b'raw+'):
4673 if not stdin:
4673 if not stdin:
4674 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4674 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4675
4675
4676 # Concatenate the data together.
4676 # Concatenate the data together.
4677 data = b''.join(l.lstrip() for l in lines)
4677 data = b''.join(l.lstrip() for l in lines)
4678 data = stringutil.unescapestr(data)
4678 data = stringutil.unescapestr(data)
4679 stdin.write(data)
4679 stdin.write(data)
4680
4680
4681 if action == b'raw+':
4681 if action == b'raw+':
4682 stdin.flush()
4682 stdin.flush()
4683 elif action == b'flush':
4683 elif action == b'flush':
4684 if not stdin:
4684 if not stdin:
4685 raise error.Abort(_(b'cannot call flush on this peer'))
4685 raise error.Abort(_(b'cannot call flush on this peer'))
4686 stdin.flush()
4686 stdin.flush()
4687 elif action.startswith(b'command'):
4687 elif action.startswith(b'command'):
4688 if not peer:
4688 if not peer:
4689 raise error.Abort(
4689 raise error.Abort(
4690 _(
4690 _(
4691 b'cannot send commands unless peer instance '
4691 b'cannot send commands unless peer instance '
4692 b'is available'
4692 b'is available'
4693 )
4693 )
4694 )
4694 )
4695
4695
4696 command = action.split(b' ', 1)[1]
4696 command = action.split(b' ', 1)[1]
4697
4697
4698 args = {}
4698 args = {}
4699 for line in lines:
4699 for line in lines:
4700 # We need to allow empty values.
4700 # We need to allow empty values.
4701 fields = line.lstrip().split(b' ', 1)
4701 fields = line.lstrip().split(b' ', 1)
4702 if len(fields) == 1:
4702 if len(fields) == 1:
4703 key = fields[0]
4703 key = fields[0]
4704 value = b''
4704 value = b''
4705 else:
4705 else:
4706 key, value = fields
4706 key, value = fields
4707
4707
4708 if value.startswith(b'eval:'):
4708 if value.startswith(b'eval:'):
4709 value = stringutil.evalpythonliteral(value[5:])
4709 value = stringutil.evalpythonliteral(value[5:])
4710 else:
4710 else:
4711 value = stringutil.unescapestr(value)
4711 value = stringutil.unescapestr(value)
4712
4712
4713 args[key] = value
4713 args[key] = value
4714
4714
4715 if batchedcommands is not None:
4715 if batchedcommands is not None:
4716 batchedcommands.append((command, args))
4716 batchedcommands.append((command, args))
4717 continue
4717 continue
4718
4718
4719 ui.status(_(b'sending %s command\n') % command)
4719 ui.status(_(b'sending %s command\n') % command)
4720
4720
4721 if b'PUSHFILE' in args:
4721 if b'PUSHFILE' in args:
4722 with open(args[b'PUSHFILE'], 'rb') as fh:
4722 with open(args[b'PUSHFILE'], 'rb') as fh:
4723 del args[b'PUSHFILE']
4723 del args[b'PUSHFILE']
4724 res, output = peer._callpush(
4724 res, output = peer._callpush(
4725 command, fh, **pycompat.strkwargs(args)
4725 command, fh, **pycompat.strkwargs(args)
4726 )
4726 )
4727 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4727 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4728 ui.status(
4728 ui.status(
4729 _(b'remote output: %s\n') % stringutil.escapestr(output)
4729 _(b'remote output: %s\n') % stringutil.escapestr(output)
4730 )
4730 )
4731 else:
4731 else:
4732 with peer.commandexecutor() as e:
4732 with peer.commandexecutor() as e:
4733 res = e.callcommand(command, args).result()
4733 res = e.callcommand(command, args).result()
4734
4734
4735 ui.status(
4735 ui.status(
4736 _(b'response: %s\n')
4736 _(b'response: %s\n')
4737 % stringutil.pprint(res, bprefix=True, indent=2)
4737 % stringutil.pprint(res, bprefix=True, indent=2)
4738 )
4738 )
4739
4739
4740 elif action == b'batchbegin':
4740 elif action == b'batchbegin':
4741 if batchedcommands is not None:
4741 if batchedcommands is not None:
4742 raise error.Abort(_(b'nested batchbegin not allowed'))
4742 raise error.Abort(_(b'nested batchbegin not allowed'))
4743
4743
4744 batchedcommands = []
4744 batchedcommands = []
4745 elif action == b'batchsubmit':
4745 elif action == b'batchsubmit':
4746 # There is a batching API we could go through. But it would be
4746 # There is a batching API we could go through. But it would be
4747 # difficult to normalize requests into function calls. It is easier
4747 # difficult to normalize requests into function calls. It is easier
4748 # to bypass this layer and normalize to commands + args.
4748 # to bypass this layer and normalize to commands + args.
4749 ui.status(
4749 ui.status(
4750 _(b'sending batch with %d sub-commands\n')
4750 _(b'sending batch with %d sub-commands\n')
4751 % len(batchedcommands)
4751 % len(batchedcommands)
4752 )
4752 )
4753 assert peer is not None
4753 assert peer is not None
4754 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4754 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4755 ui.status(
4755 ui.status(
4756 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4756 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4757 )
4757 )
4758
4758
4759 batchedcommands = None
4759 batchedcommands = None
4760
4760
4761 elif action.startswith(b'httprequest '):
4761 elif action.startswith(b'httprequest '):
4762 if not opener:
4762 if not opener:
4763 raise error.Abort(
4763 raise error.Abort(
4764 _(b'cannot use httprequest without an HTTP peer')
4764 _(b'cannot use httprequest without an HTTP peer')
4765 )
4765 )
4766
4766
4767 request = action.split(b' ', 2)
4767 request = action.split(b' ', 2)
4768 if len(request) != 3:
4768 if len(request) != 3:
4769 raise error.Abort(
4769 raise error.Abort(
4770 _(
4770 _(
4771 b'invalid httprequest: expected format is '
4771 b'invalid httprequest: expected format is '
4772 b'"httprequest <method> <path>'
4772 b'"httprequest <method> <path>'
4773 )
4773 )
4774 )
4774 )
4775
4775
4776 method, httppath = request[1:]
4776 method, httppath = request[1:]
4777 headers = {}
4777 headers = {}
4778 body = None
4778 body = None
4779 frames = []
4779 frames = []
4780 for line in lines:
4780 for line in lines:
4781 line = line.lstrip()
4781 line = line.lstrip()
4782 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4782 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4783 if m:
4783 if m:
4784 # Headers need to use native strings.
4784 # Headers need to use native strings.
4785 key = pycompat.strurl(m.group(1))
4785 key = pycompat.strurl(m.group(1))
4786 value = pycompat.strurl(m.group(2))
4786 value = pycompat.strurl(m.group(2))
4787 headers[key] = value
4787 headers[key] = value
4788 continue
4788 continue
4789
4789
4790 if line.startswith(b'BODYFILE '):
4790 if line.startswith(b'BODYFILE '):
4791 with open(line.split(b' ', 1), b'rb') as fh:
4791 with open(line.split(b' ', 1), b'rb') as fh:
4792 body = fh.read()
4792 body = fh.read()
4793 elif line.startswith(b'frame '):
4793 elif line.startswith(b'frame '):
4794 frame = wireprotoframing.makeframefromhumanstring(
4794 frame = wireprotoframing.makeframefromhumanstring(
4795 line[len(b'frame ') :]
4795 line[len(b'frame ') :]
4796 )
4796 )
4797
4797
4798 frames.append(frame)
4798 frames.append(frame)
4799 else:
4799 else:
4800 raise error.Abort(
4800 raise error.Abort(
4801 _(b'unknown argument to httprequest: %s') % line
4801 _(b'unknown argument to httprequest: %s') % line
4802 )
4802 )
4803
4803
4804 url = path + httppath
4804 url = path + httppath
4805
4805
4806 if frames:
4806 if frames:
4807 body = b''.join(bytes(f) for f in frames)
4807 body = b''.join(bytes(f) for f in frames)
4808
4808
4809 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4809 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4810
4810
4811 # urllib.Request insists on using has_data() as a proxy for
4811 # urllib.Request insists on using has_data() as a proxy for
4812 # determining the request method. Override that to use our
4812 # determining the request method. Override that to use our
4813 # explicitly requested method.
4813 # explicitly requested method.
4814 req.get_method = lambda: pycompat.sysstr(method)
4814 req.get_method = lambda: pycompat.sysstr(method)
4815
4815
4816 try:
4816 try:
4817 res = opener.open(req)
4817 res = opener.open(req)
4818 body = res.read()
4818 body = res.read()
4819 except util.urlerr.urlerror as e:
4819 except util.urlerr.urlerror as e:
4820 # read() method must be called, but only exists in Python 2
4820 # read() method must be called, but only exists in Python 2
4821 getattr(e, 'read', lambda: None)()
4821 getattr(e, 'read', lambda: None)()
4822 continue
4822 continue
4823
4823
4824 ct = res.headers.get('Content-Type')
4824 ct = res.headers.get('Content-Type')
4825 if ct == 'application/mercurial-cbor':
4825 if ct == 'application/mercurial-cbor':
4826 ui.write(
4826 ui.write(
4827 _(b'cbor> %s\n')
4827 _(b'cbor> %s\n')
4828 % stringutil.pprint(
4828 % stringutil.pprint(
4829 cborutil.decodeall(body), bprefix=True, indent=2
4829 cborutil.decodeall(body), bprefix=True, indent=2
4830 )
4830 )
4831 )
4831 )
4832
4832
4833 elif action == b'close':
4833 elif action == b'close':
4834 assert peer is not None
4834 assert peer is not None
4835 peer.close()
4835 peer.close()
4836 elif action == b'readavailable':
4836 elif action == b'readavailable':
4837 if not stdout or not stderr:
4837 if not stdout or not stderr:
4838 raise error.Abort(
4838 raise error.Abort(
4839 _(b'readavailable not available on this peer')
4839 _(b'readavailable not available on this peer')
4840 )
4840 )
4841
4841
4842 stdin.close()
4842 stdin.close()
4843 stdout.read()
4843 stdout.read()
4844 stderr.read()
4844 stderr.read()
4845
4845
4846 elif action == b'readline':
4846 elif action == b'readline':
4847 if not stdout:
4847 if not stdout:
4848 raise error.Abort(_(b'readline not available on this peer'))
4848 raise error.Abort(_(b'readline not available on this peer'))
4849 stdout.readline()
4849 stdout.readline()
4850 elif action == b'ereadline':
4850 elif action == b'ereadline':
4851 if not stderr:
4851 if not stderr:
4852 raise error.Abort(_(b'ereadline not available on this peer'))
4852 raise error.Abort(_(b'ereadline not available on this peer'))
4853 stderr.readline()
4853 stderr.readline()
4854 elif action.startswith(b'read '):
4854 elif action.startswith(b'read '):
4855 count = int(action.split(b' ', 1)[1])
4855 count = int(action.split(b' ', 1)[1])
4856 if not stdout:
4856 if not stdout:
4857 raise error.Abort(_(b'read not available on this peer'))
4857 raise error.Abort(_(b'read not available on this peer'))
4858 stdout.read(count)
4858 stdout.read(count)
4859 elif action.startswith(b'eread '):
4859 elif action.startswith(b'eread '):
4860 count = int(action.split(b' ', 1)[1])
4860 count = int(action.split(b' ', 1)[1])
4861 if not stderr:
4861 if not stderr:
4862 raise error.Abort(_(b'eread not available on this peer'))
4862 raise error.Abort(_(b'eread not available on this peer'))
4863 stderr.read(count)
4863 stderr.read(count)
4864 else:
4864 else:
4865 raise error.Abort(_(b'unknown action: %s') % action)
4865 raise error.Abort(_(b'unknown action: %s') % action)
4866
4866
4867 if batchedcommands is not None:
4867 if batchedcommands is not None:
4868 raise error.Abort(_(b'unclosed "batchbegin" request'))
4868 raise error.Abort(_(b'unclosed "batchbegin" request'))
4869
4869
4870 if peer:
4870 if peer:
4871 peer.close()
4871 peer.close()
4872
4872
4873 if proc:
4873 if proc:
4874 proc.kill()
4874 proc.kill()
@@ -1,527 +1,524 b''
1 # Copyright (C) 2004, 2005 Canonical Ltd
1 # Copyright (C) 2004, 2005 Canonical Ltd
2 #
2 #
3 # This program is free software; you can redistribute it and/or modify
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
6 # (at your option) any later version.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU General Public License
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, see <http://www.gnu.org/licenses/>.
14 # along with this program; if not, see <http://www.gnu.org/licenses/>.
15
15
16 # mbp: "you know that thing where cvs gives you conflict markers?"
16 # mbp: "you know that thing where cvs gives you conflict markers?"
17 # s: "i hate that."
17 # s: "i hate that."
18
18
19 from __future__ import absolute_import
19 from __future__ import absolute_import
20
20
21 from .i18n import _
21 from .i18n import _
22 from . import (
22 from . import (
23 error,
23 error,
24 mdiff,
24 mdiff,
25 pycompat,
25 pycompat,
26 )
26 )
27 from .utils import stringutil
27 from .utils import stringutil
28
28
29
29
30 class CantReprocessAndShowBase(Exception):
30 class CantReprocessAndShowBase(Exception):
31 pass
31 pass
32
32
33
33
34 def intersect(ra, rb):
34 def intersect(ra, rb):
35 """Given two ranges return the range where they intersect or None.
35 """Given two ranges return the range where they intersect or None.
36
36
37 >>> intersect((0, 10), (0, 6))
37 >>> intersect((0, 10), (0, 6))
38 (0, 6)
38 (0, 6)
39 >>> intersect((0, 10), (5, 15))
39 >>> intersect((0, 10), (5, 15))
40 (5, 10)
40 (5, 10)
41 >>> intersect((0, 10), (10, 15))
41 >>> intersect((0, 10), (10, 15))
42 >>> intersect((0, 9), (10, 15))
42 >>> intersect((0, 9), (10, 15))
43 >>> intersect((0, 9), (7, 15))
43 >>> intersect((0, 9), (7, 15))
44 (7, 9)
44 (7, 9)
45 """
45 """
46 assert ra[0] <= ra[1]
46 assert ra[0] <= ra[1]
47 assert rb[0] <= rb[1]
47 assert rb[0] <= rb[1]
48
48
49 sa = max(ra[0], rb[0])
49 sa = max(ra[0], rb[0])
50 sb = min(ra[1], rb[1])
50 sb = min(ra[1], rb[1])
51 if sa < sb:
51 if sa < sb:
52 return sa, sb
52 return sa, sb
53 else:
53 else:
54 return None
54 return None
55
55
56
56
57 def compare_range(a, astart, aend, b, bstart, bend):
57 def compare_range(a, astart, aend, b, bstart, bend):
58 """Compare a[astart:aend] == b[bstart:bend], without slicing."""
58 """Compare a[astart:aend] == b[bstart:bend], without slicing."""
59 if (aend - astart) != (bend - bstart):
59 if (aend - astart) != (bend - bstart):
60 return False
60 return False
61 for ia, ib in zip(
61 for ia, ib in zip(
62 pycompat.xrange(astart, aend), pycompat.xrange(bstart, bend)
62 pycompat.xrange(astart, aend), pycompat.xrange(bstart, bend)
63 ):
63 ):
64 if a[ia] != b[ib]:
64 if a[ia] != b[ib]:
65 return False
65 return False
66 else:
66 else:
67 return True
67 return True
68
68
69
69
70 class Merge3Text(object):
70 class Merge3Text(object):
71 """3-way merge of texts.
71 """3-way merge of texts.
72
72
73 Given strings BASE, OTHER, THIS, tries to produce a combined text
73 Given strings BASE, OTHER, THIS, tries to produce a combined text
74 incorporating the changes from both BASE->OTHER and BASE->THIS."""
74 incorporating the changes from both BASE->OTHER and BASE->THIS."""
75
75
76 def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
76 def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
77 self.basetext = basetext
77 self.basetext = basetext
78 self.atext = atext
78 self.atext = atext
79 self.btext = btext
79 self.btext = btext
80 if base is None:
80 if base is None:
81 base = mdiff.splitnewlines(basetext)
81 base = mdiff.splitnewlines(basetext)
82 if a is None:
82 if a is None:
83 a = mdiff.splitnewlines(atext)
83 a = mdiff.splitnewlines(atext)
84 if b is None:
84 if b is None:
85 b = mdiff.splitnewlines(btext)
85 b = mdiff.splitnewlines(btext)
86 self.base = base
86 self.base = base
87 self.a = a
87 self.a = a
88 self.b = b
88 self.b = b
89
89
90 def merge_lines(
90 def merge_lines(
91 self,
91 self,
92 name_a=None,
92 name_a=None,
93 name_b=None,
93 name_b=None,
94 name_base=None,
94 name_base=None,
95 start_marker=b'<<<<<<<',
95 start_marker=b'<<<<<<<',
96 mid_marker=b'=======',
96 mid_marker=b'=======',
97 end_marker=b'>>>>>>>',
97 end_marker=b'>>>>>>>',
98 base_marker=None,
98 base_marker=None,
99 minimize=False,
99 minimize=False,
100 ):
100 ):
101 """Return merge in cvs-like form."""
101 """Return merge in cvs-like form."""
102 self.conflicts = False
102 conflicts = False
103 newline = b'\n'
103 newline = b'\n'
104 if len(self.a) > 0:
104 if len(self.a) > 0:
105 if self.a[0].endswith(b'\r\n'):
105 if self.a[0].endswith(b'\r\n'):
106 newline = b'\r\n'
106 newline = b'\r\n'
107 elif self.a[0].endswith(b'\r'):
107 elif self.a[0].endswith(b'\r'):
108 newline = b'\r'
108 newline = b'\r'
109 if name_a and start_marker:
109 if name_a and start_marker:
110 start_marker = start_marker + b' ' + name_a
110 start_marker = start_marker + b' ' + name_a
111 if name_b and end_marker:
111 if name_b and end_marker:
112 end_marker = end_marker + b' ' + name_b
112 end_marker = end_marker + b' ' + name_b
113 if name_base and base_marker:
113 if name_base and base_marker:
114 base_marker = base_marker + b' ' + name_base
114 base_marker = base_marker + b' ' + name_base
115 merge_groups = self.merge_groups()
115 merge_groups = self.merge_groups()
116 if minimize:
116 if minimize:
117 merge_groups = self.minimize(merge_groups)
117 merge_groups = self.minimize(merge_groups)
118 for what, lines in merge_groups:
118 lines = []
119 for what, group_lines in merge_groups:
119 if what == b'conflict':
120 if what == b'conflict':
120 base_lines, a_lines, b_lines = lines
121 base_lines, a_lines, b_lines = group_lines
121 self.conflicts = True
122 conflicts = True
122 if start_marker is not None:
123 if start_marker is not None:
123 yield start_marker + newline
124 lines.append(start_marker + newline)
124 for line in a_lines:
125 lines.extend(a_lines)
125 yield line
126 if base_marker is not None:
126 if base_marker is not None:
127 yield base_marker + newline
127 lines.append(base_marker + newline)
128 for line in base_lines:
128 lines.extend(base_lines)
129 yield line
130 if mid_marker is not None:
129 if mid_marker is not None:
131 yield mid_marker + newline
130 lines.append(mid_marker + newline)
132 for line in b_lines:
131 lines.extend(b_lines)
133 yield line
134 if end_marker is not None:
132 if end_marker is not None:
135 yield end_marker + newline
133 lines.append(end_marker + newline)
136 else:
134 else:
137 for line in lines:
135 lines.extend(group_lines)
138 yield line
136 return lines, conflicts
139
137
140 def merge_groups(self):
138 def merge_groups(self):
141 """Yield sequence of line groups. Each one is a tuple:
139 """Yield sequence of line groups. Each one is a tuple:
142
140
143 'unchanged', lines
141 'unchanged', lines
144 Lines unchanged from base
142 Lines unchanged from base
145
143
146 'a', lines
144 'a', lines
147 Lines taken from a
145 Lines taken from a
148
146
149 'same', lines
147 'same', lines
150 Lines taken from a (and equal to b)
148 Lines taken from a (and equal to b)
151
149
152 'b', lines
150 'b', lines
153 Lines taken from b
151 Lines taken from b
154
152
155 'conflict', (base_lines, a_lines, b_lines)
153 'conflict', (base_lines, a_lines, b_lines)
156 Lines from base were changed to either a or b and conflict.
154 Lines from base were changed to either a or b and conflict.
157 """
155 """
158 for t in self.merge_regions():
156 for t in self.merge_regions():
159 what = t[0]
157 what = t[0]
160 if what == b'unchanged':
158 if what == b'unchanged':
161 yield what, self.base[t[1] : t[2]]
159 yield what, self.base[t[1] : t[2]]
162 elif what == b'a' or what == b'same':
160 elif what == b'a' or what == b'same':
163 yield what, self.a[t[1] : t[2]]
161 yield what, self.a[t[1] : t[2]]
164 elif what == b'b':
162 elif what == b'b':
165 yield what, self.b[t[1] : t[2]]
163 yield what, self.b[t[1] : t[2]]
166 elif what == b'conflict':
164 elif what == b'conflict':
167 yield (
165 yield (
168 what,
166 what,
169 (
167 (
170 self.base[t[1] : t[2]],
168 self.base[t[1] : t[2]],
171 self.a[t[3] : t[4]],
169 self.a[t[3] : t[4]],
172 self.b[t[5] : t[6]],
170 self.b[t[5] : t[6]],
173 ),
171 ),
174 )
172 )
175 else:
173 else:
176 raise ValueError(what)
174 raise ValueError(what)
177
175
178 def merge_regions(self):
176 def merge_regions(self):
179 """Return sequences of matching and conflicting regions.
177 """Return sequences of matching and conflicting regions.
180
178
181 This returns tuples, where the first value says what kind we
179 This returns tuples, where the first value says what kind we
182 have:
180 have:
183
181
184 'unchanged', start, end
182 'unchanged', start, end
185 Take a region of base[start:end]
183 Take a region of base[start:end]
186
184
187 'same', astart, aend
185 'same', astart, aend
188 b and a are different from base but give the same result
186 b and a are different from base but give the same result
189
187
190 'a', start, end
188 'a', start, end
191 Non-clashing insertion from a[start:end]
189 Non-clashing insertion from a[start:end]
192
190
193 'conflict', zstart, zend, astart, aend, bstart, bend
191 'conflict', zstart, zend, astart, aend, bstart, bend
194 Conflict between a and b, with z as common ancestor
192 Conflict between a and b, with z as common ancestor
195
193
196 Method is as follows:
194 Method is as follows:
197
195
198 The two sequences align only on regions which match the base
196 The two sequences align only on regions which match the base
199 and both descendants. These are found by doing a two-way diff
197 and both descendants. These are found by doing a two-way diff
200 of each one against the base, and then finding the
198 of each one against the base, and then finding the
201 intersections between those regions. These "sync regions"
199 intersections between those regions. These "sync regions"
202 are by definition unchanged in both and easily dealt with.
200 are by definition unchanged in both and easily dealt with.
203
201
204 The regions in between can be in any of three cases:
202 The regions in between can be in any of three cases:
205 conflicted, or changed on only one side.
203 conflicted, or changed on only one side.
206 """
204 """
207
205
208 # section a[0:ia] has been disposed of, etc
206 # section a[0:ia] has been disposed of, etc
209 iz = ia = ib = 0
207 iz = ia = ib = 0
210
208
211 for region in self.find_sync_regions():
209 for region in self.find_sync_regions():
212 zmatch, zend, amatch, aend, bmatch, bend = region
210 zmatch, zend, amatch, aend, bmatch, bend = region
213 # print 'match base [%d:%d]' % (zmatch, zend)
211 # print 'match base [%d:%d]' % (zmatch, zend)
214
212
215 matchlen = zend - zmatch
213 matchlen = zend - zmatch
216 assert matchlen >= 0
214 assert matchlen >= 0
217 assert matchlen == (aend - amatch)
215 assert matchlen == (aend - amatch)
218 assert matchlen == (bend - bmatch)
216 assert matchlen == (bend - bmatch)
219
217
220 len_a = amatch - ia
218 len_a = amatch - ia
221 len_b = bmatch - ib
219 len_b = bmatch - ib
222 len_base = zmatch - iz
220 len_base = zmatch - iz
223 assert len_a >= 0
221 assert len_a >= 0
224 assert len_b >= 0
222 assert len_b >= 0
225 assert len_base >= 0
223 assert len_base >= 0
226
224
227 # print 'unmatched a=%d, b=%d' % (len_a, len_b)
225 # print 'unmatched a=%d, b=%d' % (len_a, len_b)
228
226
229 if len_a or len_b:
227 if len_a or len_b:
230 # try to avoid actually slicing the lists
228 # try to avoid actually slicing the lists
231 equal_a = compare_range(
229 equal_a = compare_range(
232 self.a, ia, amatch, self.base, iz, zmatch
230 self.a, ia, amatch, self.base, iz, zmatch
233 )
231 )
234 equal_b = compare_range(
232 equal_b = compare_range(
235 self.b, ib, bmatch, self.base, iz, zmatch
233 self.b, ib, bmatch, self.base, iz, zmatch
236 )
234 )
237 same = compare_range(self.a, ia, amatch, self.b, ib, bmatch)
235 same = compare_range(self.a, ia, amatch, self.b, ib, bmatch)
238
236
239 if same:
237 if same:
240 yield b'same', ia, amatch
238 yield b'same', ia, amatch
241 elif equal_a and not equal_b:
239 elif equal_a and not equal_b:
242 yield b'b', ib, bmatch
240 yield b'b', ib, bmatch
243 elif equal_b and not equal_a:
241 elif equal_b and not equal_a:
244 yield b'a', ia, amatch
242 yield b'a', ia, amatch
245 elif not equal_a and not equal_b:
243 elif not equal_a and not equal_b:
246 yield b'conflict', iz, zmatch, ia, amatch, ib, bmatch
244 yield b'conflict', iz, zmatch, ia, amatch, ib, bmatch
247 else:
245 else:
248 raise AssertionError(b"can't handle a=b=base but unmatched")
246 raise AssertionError(b"can't handle a=b=base but unmatched")
249
247
250 ia = amatch
248 ia = amatch
251 ib = bmatch
249 ib = bmatch
252 iz = zmatch
250 iz = zmatch
253
251
254 # if the same part of the base was deleted on both sides
252 # if the same part of the base was deleted on both sides
255 # that's OK, we can just skip it.
253 # that's OK, we can just skip it.
256
254
257 if matchlen > 0:
255 if matchlen > 0:
258 assert ia == amatch
256 assert ia == amatch
259 assert ib == bmatch
257 assert ib == bmatch
260 assert iz == zmatch
258 assert iz == zmatch
261
259
262 yield b'unchanged', zmatch, zend
260 yield b'unchanged', zmatch, zend
263 iz = zend
261 iz = zend
264 ia = aend
262 ia = aend
265 ib = bend
263 ib = bend
266
264
267 def minimize(self, merge_groups):
265 def minimize(self, merge_groups):
268 """Trim conflict regions of lines where A and B sides match.
266 """Trim conflict regions of lines where A and B sides match.
269
267
270 Lines where both A and B have made the same changes at the beginning
268 Lines where both A and B have made the same changes at the beginning
271 or the end of each merge region are eliminated from the conflict
269 or the end of each merge region are eliminated from the conflict
272 region and are instead considered the same.
270 region and are instead considered the same.
273 """
271 """
274 for what, lines in merge_groups:
272 for what, lines in merge_groups:
275 if what != b"conflict":
273 if what != b"conflict":
276 yield what, lines
274 yield what, lines
277 continue
275 continue
278 base_lines, a_lines, b_lines = lines
276 base_lines, a_lines, b_lines = lines
279 alen = len(a_lines)
277 alen = len(a_lines)
280 blen = len(b_lines)
278 blen = len(b_lines)
281
279
282 # find matches at the front
280 # find matches at the front
283 ii = 0
281 ii = 0
284 while ii < alen and ii < blen and a_lines[ii] == b_lines[ii]:
282 while ii < alen and ii < blen and a_lines[ii] == b_lines[ii]:
285 ii += 1
283 ii += 1
286 startmatches = ii
284 startmatches = ii
287
285
288 # find matches at the end
286 # find matches at the end
289 ii = 0
287 ii = 0
290 while (
288 while (
291 ii < alen and ii < blen and a_lines[-ii - 1] == b_lines[-ii - 1]
289 ii < alen and ii < blen and a_lines[-ii - 1] == b_lines[-ii - 1]
292 ):
290 ):
293 ii += 1
291 ii += 1
294 endmatches = ii
292 endmatches = ii
295
293
296 if startmatches > 0:
294 if startmatches > 0:
297 yield b'same', a_lines[:startmatches]
295 yield b'same', a_lines[:startmatches]
298
296
299 yield (
297 yield (
300 b'conflict',
298 b'conflict',
301 (
299 (
302 base_lines,
300 base_lines,
303 a_lines[startmatches : alen - endmatches],
301 a_lines[startmatches : alen - endmatches],
304 b_lines[startmatches : blen - endmatches],
302 b_lines[startmatches : blen - endmatches],
305 ),
303 ),
306 )
304 )
307
305
308 if endmatches > 0:
306 if endmatches > 0:
309 yield b'same', a_lines[alen - endmatches :]
307 yield b'same', a_lines[alen - endmatches :]
310
308
311 def find_sync_regions(self):
309 def find_sync_regions(self):
312 """Return a list of sync regions, where both descendants match the base.
310 """Return a list of sync regions, where both descendants match the base.
313
311
314 Generates a list of (base1, base2, a1, a2, b1, b2). There is
312 Generates a list of (base1, base2, a1, a2, b1, b2). There is
315 always a zero-length sync region at the end of all the files.
313 always a zero-length sync region at the end of all the files.
316 """
314 """
317
315
318 ia = ib = 0
316 ia = ib = 0
319 amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
317 amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
320 bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
318 bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
321 len_a = len(amatches)
319 len_a = len(amatches)
322 len_b = len(bmatches)
320 len_b = len(bmatches)
323
321
324 sl = []
322 sl = []
325
323
326 while ia < len_a and ib < len_b:
324 while ia < len_a and ib < len_b:
327 abase, amatch, alen = amatches[ia]
325 abase, amatch, alen = amatches[ia]
328 bbase, bmatch, blen = bmatches[ib]
326 bbase, bmatch, blen = bmatches[ib]
329
327
330 # there is an unconflicted block at i; how long does it
328 # there is an unconflicted block at i; how long does it
331 # extend? until whichever one ends earlier.
329 # extend? until whichever one ends earlier.
332 i = intersect((abase, abase + alen), (bbase, bbase + blen))
330 i = intersect((abase, abase + alen), (bbase, bbase + blen))
333 if i:
331 if i:
334 intbase = i[0]
332 intbase = i[0]
335 intend = i[1]
333 intend = i[1]
336 intlen = intend - intbase
334 intlen = intend - intbase
337
335
338 # found a match of base[i[0], i[1]]; this may be less than
336 # found a match of base[i[0], i[1]]; this may be less than
339 # the region that matches in either one
337 # the region that matches in either one
340 assert intlen <= alen
338 assert intlen <= alen
341 assert intlen <= blen
339 assert intlen <= blen
342 assert abase <= intbase
340 assert abase <= intbase
343 assert bbase <= intbase
341 assert bbase <= intbase
344
342
345 asub = amatch + (intbase - abase)
343 asub = amatch + (intbase - abase)
346 bsub = bmatch + (intbase - bbase)
344 bsub = bmatch + (intbase - bbase)
347 aend = asub + intlen
345 aend = asub + intlen
348 bend = bsub + intlen
346 bend = bsub + intlen
349
347
350 assert self.base[intbase:intend] == self.a[asub:aend], (
348 assert self.base[intbase:intend] == self.a[asub:aend], (
351 self.base[intbase:intend],
349 self.base[intbase:intend],
352 self.a[asub:aend],
350 self.a[asub:aend],
353 )
351 )
354
352
355 assert self.base[intbase:intend] == self.b[bsub:bend]
353 assert self.base[intbase:intend] == self.b[bsub:bend]
356
354
357 sl.append((intbase, intend, asub, aend, bsub, bend))
355 sl.append((intbase, intend, asub, aend, bsub, bend))
358
356
359 # advance whichever one ends first in the base text
357 # advance whichever one ends first in the base text
360 if (abase + alen) < (bbase + blen):
358 if (abase + alen) < (bbase + blen):
361 ia += 1
359 ia += 1
362 else:
360 else:
363 ib += 1
361 ib += 1
364
362
365 intbase = len(self.base)
363 intbase = len(self.base)
366 abase = len(self.a)
364 abase = len(self.a)
367 bbase = len(self.b)
365 bbase = len(self.b)
368 sl.append((intbase, intbase, abase, abase, bbase, bbase))
366 sl.append((intbase, intbase, abase, abase, bbase, bbase))
369
367
370 return sl
368 return sl
371
369
372
370
373 def _verifytext(text, path, ui, opts):
371 def _verifytext(text, path, ui, opts):
374 """verifies that text is non-binary (unless opts[text] is passed,
372 """verifies that text is non-binary (unless opts[text] is passed,
375 then we just warn)"""
373 then we just warn)"""
376 if stringutil.binary(text):
374 if stringutil.binary(text):
377 msg = _(b"%s looks like a binary file.") % path
375 msg = _(b"%s looks like a binary file.") % path
378 if not opts.get('quiet'):
376 if not opts.get('quiet'):
379 ui.warn(_(b'warning: %s\n') % msg)
377 ui.warn(_(b'warning: %s\n') % msg)
380 if not opts.get('text'):
378 if not opts.get('text'):
381 raise error.Abort(msg)
379 raise error.Abort(msg)
382 return text
380 return text
383
381
384
382
385 def _picklabels(defaults, overrides):
383 def _picklabels(defaults, overrides):
386 if len(overrides) > 3:
384 if len(overrides) > 3:
387 raise error.Abort(_(b"can only specify three labels."))
385 raise error.Abort(_(b"can only specify three labels."))
388 result = defaults[:]
386 result = defaults[:]
389 for i, override in enumerate(overrides):
387 for i, override in enumerate(overrides):
390 result[i] = override
388 result[i] = override
391 return result
389 return result
392
390
393
391
394 def _mergediff(m3, name_a, name_b, name_base):
392 def _mergediff(m3, name_a, name_b, name_base):
395 lines = []
393 lines = []
396 conflicts = False
394 conflicts = False
397 for what, group_lines in m3.merge_groups():
395 for what, group_lines in m3.merge_groups():
398 if what == b'conflict':
396 if what == b'conflict':
399 base_lines, a_lines, b_lines = group_lines
397 base_lines, a_lines, b_lines = group_lines
400 base_text = b''.join(base_lines)
398 base_text = b''.join(base_lines)
401 b_blocks = list(
399 b_blocks = list(
402 mdiff.allblocks(
400 mdiff.allblocks(
403 base_text,
401 base_text,
404 b''.join(b_lines),
402 b''.join(b_lines),
405 lines1=base_lines,
403 lines1=base_lines,
406 lines2=b_lines,
404 lines2=b_lines,
407 )
405 )
408 )
406 )
409 a_blocks = list(
407 a_blocks = list(
410 mdiff.allblocks(
408 mdiff.allblocks(
411 base_text,
409 base_text,
412 b''.join(a_lines),
410 b''.join(a_lines),
413 lines1=base_lines,
411 lines1=base_lines,
414 lines2=b_lines,
412 lines2=b_lines,
415 )
413 )
416 )
414 )
417
415
418 def matching_lines(blocks):
416 def matching_lines(blocks):
419 return sum(
417 return sum(
420 block[1] - block[0]
418 block[1] - block[0]
421 for block, kind in blocks
419 for block, kind in blocks
422 if kind == b'='
420 if kind == b'='
423 )
421 )
424
422
425 def diff_lines(blocks, lines1, lines2):
423 def diff_lines(blocks, lines1, lines2):
426 for block, kind in blocks:
424 for block, kind in blocks:
427 if kind == b'=':
425 if kind == b'=':
428 for line in lines1[block[0] : block[1]]:
426 for line in lines1[block[0] : block[1]]:
429 yield b' ' + line
427 yield b' ' + line
430 else:
428 else:
431 for line in lines1[block[0] : block[1]]:
429 for line in lines1[block[0] : block[1]]:
432 yield b'-' + line
430 yield b'-' + line
433 for line in lines2[block[2] : block[3]]:
431 for line in lines2[block[2] : block[3]]:
434 yield b'+' + line
432 yield b'+' + line
435
433
436 lines.append(b"<<<<<<<\n")
434 lines.append(b"<<<<<<<\n")
437 if matching_lines(a_blocks) < matching_lines(b_blocks):
435 if matching_lines(a_blocks) < matching_lines(b_blocks):
438 lines.append(b"======= %s\n" % name_a)
436 lines.append(b"======= %s\n" % name_a)
439 lines.extend(a_lines)
437 lines.extend(a_lines)
440 lines.append(b"------- %s\n" % name_base)
438 lines.append(b"------- %s\n" % name_base)
441 lines.append(b"+++++++ %s\n" % name_b)
439 lines.append(b"+++++++ %s\n" % name_b)
442 lines.extend(diff_lines(b_blocks, base_lines, b_lines))
440 lines.extend(diff_lines(b_blocks, base_lines, b_lines))
443 else:
441 else:
444 lines.append(b"------- %s\n" % name_base)
442 lines.append(b"------- %s\n" % name_base)
445 lines.append(b"+++++++ %s\n" % name_a)
443 lines.append(b"+++++++ %s\n" % name_a)
446 lines.extend(diff_lines(a_blocks, base_lines, a_lines))
444 lines.extend(diff_lines(a_blocks, base_lines, a_lines))
447 lines.append(b"======= %s\n" % name_b)
445 lines.append(b"======= %s\n" % name_b)
448 lines.extend(b_lines)
446 lines.extend(b_lines)
449 lines.append(b">>>>>>>\n")
447 lines.append(b">>>>>>>\n")
450 conflicts = True
448 conflicts = True
451 else:
449 else:
452 lines.extend(group_lines)
450 lines.extend(group_lines)
453 return lines, conflicts
451 return lines, conflicts
454
452
455
453
456 def _resolve(m3, sides):
454 def _resolve(m3, sides):
457 lines = []
455 lines = []
458 for what, group_lines in m3.merge_groups():
456 for what, group_lines in m3.merge_groups():
459 if what == b'conflict':
457 if what == b'conflict':
460 for side in sides:
458 for side in sides:
461 lines.extend(group_lines[side])
459 lines.extend(group_lines[side])
462 else:
460 else:
463 lines.extend(group_lines)
461 lines.extend(group_lines)
464 return lines
462 return lines
465
463
466
464
467 def simplemerge(ui, localctx, basectx, otherctx, **opts):
465 def simplemerge(ui, localctx, basectx, otherctx, **opts):
468 """Performs the simplemerge algorithm.
466 """Performs the simplemerge algorithm.
469
467
470 The merged result is written into `localctx`.
468 The merged result is written into `localctx`.
471 """
469 """
472
470
473 def readctx(ctx):
471 def readctx(ctx):
474 # Merges were always run in the working copy before, which means
472 # Merges were always run in the working copy before, which means
475 # they used decoded data, if the user defined any repository
473 # they used decoded data, if the user defined any repository
476 # filters.
474 # filters.
477 #
475 #
478 # Maintain that behavior today for BC, though perhaps in the future
476 # Maintain that behavior today for BC, though perhaps in the future
479 # it'd be worth considering whether merging encoded data (what the
477 # it'd be worth considering whether merging encoded data (what the
480 # repository usually sees) might be more useful.
478 # repository usually sees) might be more useful.
481 return _verifytext(ctx.decodeddata(), ctx.path(), ui, opts)
479 return _verifytext(ctx.decodeddata(), ctx.path(), ui, opts)
482
480
483 try:
481 try:
484 localtext = readctx(localctx)
482 localtext = readctx(localctx)
485 basetext = readctx(basectx)
483 basetext = readctx(basectx)
486 othertext = readctx(otherctx)
484 othertext = readctx(otherctx)
487 except error.Abort:
485 except error.Abort:
488 return 1
486 return 1
489
487
490 m3 = Merge3Text(basetext, localtext, othertext)
488 m3 = Merge3Text(basetext, localtext, othertext)
491 conflicts = False
489 conflicts = False
492 mode = opts.get('mode', b'merge')
490 mode = opts.get('mode', b'merge')
493 if mode == b'union':
491 if mode == b'union':
494 lines = _resolve(m3, (1, 2))
492 lines = _resolve(m3, (1, 2))
495 elif mode == b'local':
493 elif mode == b'local':
496 lines = _resolve(m3, (1,))
494 lines = _resolve(m3, (1,))
497 elif mode == b'other':
495 elif mode == b'other':
498 lines = _resolve(m3, (2,))
496 lines = _resolve(m3, (2,))
499 else:
497 else:
500 name_a, name_b, name_base = _picklabels(
498 name_a, name_b, name_base = _picklabels(
501 [localctx.path(), otherctx.path(), None], opts.get('label', [])
499 [localctx.path(), otherctx.path(), None], opts.get('label', [])
502 )
500 )
503 if mode == b'mergediff':
501 if mode == b'mergediff':
504 lines, conflicts = _mergediff(m3, name_a, name_b, name_base)
502 lines, conflicts = _mergediff(m3, name_a, name_b, name_base)
505 else:
503 else:
506 extrakwargs = {
504 extrakwargs = {
507 'minimize': True,
505 'minimize': True,
508 }
506 }
509 if mode == b'merge3':
507 if mode == b'merge3':
510 extrakwargs['base_marker'] = b'|||||||'
508 extrakwargs['base_marker'] = b'|||||||'
511 extrakwargs['name_base'] = name_base
509 extrakwargs['name_base'] = name_base
512 extrakwargs['minimize'] = False
510 extrakwargs['minimize'] = False
513 lines = list(
511 lines, conflicts = m3.merge_lines(
514 m3.merge_lines(name_a=name_a, name_b=name_b, **extrakwargs)
512 name_a=name_a, name_b=name_b, **extrakwargs
515 )
513 )
516 conflicts = m3.conflicts
517
514
518 mergedtext = b''.join(lines)
515 mergedtext = b''.join(lines)
519 if opts.get('print'):
516 if opts.get('print'):
520 ui.fout.write(mergedtext)
517 ui.fout.write(mergedtext)
521 else:
518 else:
522 # localctx.flags() already has the merged flags (done in
519 # localctx.flags() already has the merged flags (done in
523 # mergestate.resolve())
520 # mergestate.resolve())
524 localctx.write(mergedtext, localctx.flags())
521 localctx.write(mergedtext, localctx.flags())
525
522
526 if conflicts:
523 if conflicts:
527 return 1
524 return 1
@@ -1,386 +1,386 b''
1 # Copyright (C) 2004, 2005 Canonical Ltd
1 # Copyright (C) 2004, 2005 Canonical Ltd
2 #
2 #
3 # This program is free software; you can redistribute it and/or modify
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
6 # (at your option) any later version.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU General Public License
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, see <http://www.gnu.org/licenses/>.
14 # along with this program; if not, see <http://www.gnu.org/licenses/>.
15
15
16 from __future__ import absolute_import
16 from __future__ import absolute_import
17
17
18 import unittest
18 import unittest
19 from mercurial import (
19 from mercurial import (
20 error,
20 error,
21 simplemerge,
21 simplemerge,
22 util,
22 util,
23 )
23 )
24
24
25 from mercurial.utils import stringutil
25 from mercurial.utils import stringutil
26
26
27 TestCase = unittest.TestCase
27 TestCase = unittest.TestCase
28 # bzr compatible interface, for the tests
28 # bzr compatible interface, for the tests
29 class Merge3(simplemerge.Merge3Text):
29 class Merge3(simplemerge.Merge3Text):
30 """3-way merge of texts.
30 """3-way merge of texts.
31
31
32 Given BASE, OTHER, THIS, tries to produce a combined text
32 Given BASE, OTHER, THIS, tries to produce a combined text
33 incorporating the changes from both BASE->OTHER and BASE->THIS.
33 incorporating the changes from both BASE->OTHER and BASE->THIS.
34 All three will typically be sequences of lines."""
34 All three will typically be sequences of lines."""
35
35
36 def __init__(self, base, a, b):
36 def __init__(self, base, a, b):
37 basetext = b'\n'.join([i.strip(b'\n') for i in base] + [b''])
37 basetext = b'\n'.join([i.strip(b'\n') for i in base] + [b''])
38 atext = b'\n'.join([i.strip(b'\n') for i in a] + [b''])
38 atext = b'\n'.join([i.strip(b'\n') for i in a] + [b''])
39 btext = b'\n'.join([i.strip(b'\n') for i in b] + [b''])
39 btext = b'\n'.join([i.strip(b'\n') for i in b] + [b''])
40 if (
40 if (
41 stringutil.binary(basetext)
41 stringutil.binary(basetext)
42 or stringutil.binary(atext)
42 or stringutil.binary(atext)
43 or stringutil.binary(btext)
43 or stringutil.binary(btext)
44 ):
44 ):
45 raise error.Abort(b"don't know how to merge binary files")
45 raise error.Abort(b"don't know how to merge binary files")
46 simplemerge.Merge3Text.__init__(
46 simplemerge.Merge3Text.__init__(
47 self, basetext, atext, btext, base, a, b
47 self, basetext, atext, btext, base, a, b
48 )
48 )
49
49
50
50
51 CantReprocessAndShowBase = simplemerge.CantReprocessAndShowBase
51 CantReprocessAndShowBase = simplemerge.CantReprocessAndShowBase
52
52
53
53
54 def split_lines(t):
54 def split_lines(t):
55 return util.stringio(t).readlines()
55 return util.stringio(t).readlines()
56
56
57
57
58 ############################################################
58 ############################################################
59 # test case data from the gnu diffutils manual
59 # test case data from the gnu diffutils manual
60 # common base
60 # common base
61 TZU = split_lines(
61 TZU = split_lines(
62 b""" The Nameless is the origin of Heaven and Earth;
62 b""" The Nameless is the origin of Heaven and Earth;
63 The named is the mother of all things.
63 The named is the mother of all things.
64
64
65 Therefore let there always be non-being,
65 Therefore let there always be non-being,
66 so we may see their subtlety,
66 so we may see their subtlety,
67 And let there always be being,
67 And let there always be being,
68 so we may see their outcome.
68 so we may see their outcome.
69 The two are the same,
69 The two are the same,
70 But after they are produced,
70 But after they are produced,
71 they have different names.
71 they have different names.
72 They both may be called deep and profound.
72 They both may be called deep and profound.
73 Deeper and more profound,
73 Deeper and more profound,
74 The door of all subtleties!
74 The door of all subtleties!
75 """
75 """
76 )
76 )
77
77
78 LAO = split_lines(
78 LAO = split_lines(
79 b""" The Way that can be told of is not the eternal Way;
79 b""" The Way that can be told of is not the eternal Way;
80 The name that can be named is not the eternal name.
80 The name that can be named is not the eternal name.
81 The Nameless is the origin of Heaven and Earth;
81 The Nameless is the origin of Heaven and Earth;
82 The Named is the mother of all things.
82 The Named is the mother of all things.
83 Therefore let there always be non-being,
83 Therefore let there always be non-being,
84 so we may see their subtlety,
84 so we may see their subtlety,
85 And let there always be being,
85 And let there always be being,
86 so we may see their outcome.
86 so we may see their outcome.
87 The two are the same,
87 The two are the same,
88 But after they are produced,
88 But after they are produced,
89 they have different names.
89 they have different names.
90 """
90 """
91 )
91 )
92
92
93
93
94 TAO = split_lines(
94 TAO = split_lines(
95 b""" The Way that can be told of is not the eternal Way;
95 b""" The Way that can be told of is not the eternal Way;
96 The name that can be named is not the eternal name.
96 The name that can be named is not the eternal name.
97 The Nameless is the origin of Heaven and Earth;
97 The Nameless is the origin of Heaven and Earth;
98 The named is the mother of all things.
98 The named is the mother of all things.
99
99
100 Therefore let there always be non-being,
100 Therefore let there always be non-being,
101 so we may see their subtlety,
101 so we may see their subtlety,
102 And let there always be being,
102 And let there always be being,
103 so we may see their result.
103 so we may see their result.
104 The two are the same,
104 The two are the same,
105 But after they are produced,
105 But after they are produced,
106 they have different names.
106 they have different names.
107
107
108 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
108 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
109
109
110 """
110 """
111 )
111 )
112
112
113 MERGED_RESULT = split_lines(
113 MERGED_RESULT = split_lines(
114 b"""\
114 b"""\
115 The Way that can be told of is not the eternal Way;
115 The Way that can be told of is not the eternal Way;
116 The name that can be named is not the eternal name.
116 The name that can be named is not the eternal name.
117 The Nameless is the origin of Heaven and Earth;
117 The Nameless is the origin of Heaven and Earth;
118 The Named is the mother of all things.
118 The Named is the mother of all things.
119 Therefore let there always be non-being,
119 Therefore let there always be non-being,
120 so we may see their subtlety,
120 so we may see their subtlety,
121 And let there always be being,
121 And let there always be being,
122 so we may see their result.
122 so we may see their result.
123 The two are the same,
123 The two are the same,
124 But after they are produced,
124 But after they are produced,
125 they have different names.\
125 they have different names.\
126 \n<<<<<<< LAO\
126 \n<<<<<<< LAO\
127 \n=======
127 \n=======
128
128
129 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
129 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
130 \
130 \
131 \n>>>>>>> TAO
131 \n>>>>>>> TAO
132 """
132 """
133 )
133 )
134
134
135
135
136 class TestMerge3(TestCase):
136 class TestMerge3(TestCase):
137 def log(self, msg):
137 def log(self, msg):
138 pass
138 pass
139
139
140 def test_no_changes(self):
140 def test_no_changes(self):
141 """No conflicts because nothing changed"""
141 """No conflicts because nothing changed"""
142 m3 = Merge3([b'aaa', b'bbb'], [b'aaa', b'bbb'], [b'aaa', b'bbb'])
142 m3 = Merge3([b'aaa', b'bbb'], [b'aaa', b'bbb'], [b'aaa', b'bbb'])
143
143
144 self.assertEqual(
144 self.assertEqual(
145 list(m3.find_sync_regions()),
145 list(m3.find_sync_regions()),
146 [(0, 2, 0, 2, 0, 2), (2, 2, 2, 2, 2, 2)],
146 [(0, 2, 0, 2, 0, 2), (2, 2, 2, 2, 2, 2)],
147 )
147 )
148
148
149 self.assertEqual(list(m3.merge_regions()), [(b'unchanged', 0, 2)])
149 self.assertEqual(list(m3.merge_regions()), [(b'unchanged', 0, 2)])
150
150
151 self.assertEqual(
151 self.assertEqual(
152 list(m3.merge_groups()), [(b'unchanged', [b'aaa', b'bbb'])]
152 list(m3.merge_groups()), [(b'unchanged', [b'aaa', b'bbb'])]
153 )
153 )
154
154
155 def test_front_insert(self):
155 def test_front_insert(self):
156 m3 = Merge3([b'zz'], [b'aaa', b'bbb', b'zz'], [b'zz'])
156 m3 = Merge3([b'zz'], [b'aaa', b'bbb', b'zz'], [b'zz'])
157
157
158 # todo: should use a sentinel at end as from get_matching_blocks
158 # todo: should use a sentinel at end as from get_matching_blocks
159 # to match without zz
159 # to match without zz
160 self.assertEqual(
160 self.assertEqual(
161 list(m3.find_sync_regions()),
161 list(m3.find_sync_regions()),
162 [(0, 1, 2, 3, 0, 1), (1, 1, 3, 3, 1, 1)],
162 [(0, 1, 2, 3, 0, 1), (1, 1, 3, 3, 1, 1)],
163 )
163 )
164
164
165 self.assertEqual(
165 self.assertEqual(
166 list(m3.merge_regions()), [(b'a', 0, 2), (b'unchanged', 0, 1)]
166 list(m3.merge_regions()), [(b'a', 0, 2), (b'unchanged', 0, 1)]
167 )
167 )
168
168
169 self.assertEqual(
169 self.assertEqual(
170 list(m3.merge_groups()),
170 list(m3.merge_groups()),
171 [(b'a', [b'aaa', b'bbb']), (b'unchanged', [b'zz'])],
171 [(b'a', [b'aaa', b'bbb']), (b'unchanged', [b'zz'])],
172 )
172 )
173
173
174 def test_null_insert(self):
174 def test_null_insert(self):
175 m3 = Merge3([], [b'aaa', b'bbb'], [])
175 m3 = Merge3([], [b'aaa', b'bbb'], [])
176 # todo: should use a sentinel at end as from get_matching_blocks
176 # todo: should use a sentinel at end as from get_matching_blocks
177 # to match without zz
177 # to match without zz
178 self.assertEqual(list(m3.find_sync_regions()), [(0, 0, 2, 2, 0, 0)])
178 self.assertEqual(list(m3.find_sync_regions()), [(0, 0, 2, 2, 0, 0)])
179
179
180 self.assertEqual(list(m3.merge_regions()), [(b'a', 0, 2)])
180 self.assertEqual(list(m3.merge_regions()), [(b'a', 0, 2)])
181
181
182 self.assertEqual(list(m3.merge_lines()), [b'aaa', b'bbb'])
182 self.assertEqual(m3.merge_lines(), ([b'aaa', b'bbb'], False))
183
183
184 def test_no_conflicts(self):
184 def test_no_conflicts(self):
185 """No conflicts because only one side changed"""
185 """No conflicts because only one side changed"""
186 m3 = Merge3(
186 m3 = Merge3(
187 [b'aaa', b'bbb'], [b'aaa', b'111', b'bbb'], [b'aaa', b'bbb']
187 [b'aaa', b'bbb'], [b'aaa', b'111', b'bbb'], [b'aaa', b'bbb']
188 )
188 )
189
189
190 self.assertEqual(
190 self.assertEqual(
191 list(m3.find_sync_regions()),
191 list(m3.find_sync_regions()),
192 [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 1, 2), (2, 2, 3, 3, 2, 2)],
192 [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 1, 2), (2, 2, 3, 3, 2, 2)],
193 )
193 )
194
194
195 self.assertEqual(
195 self.assertEqual(
196 list(m3.merge_regions()),
196 list(m3.merge_regions()),
197 [(b'unchanged', 0, 1), (b'a', 1, 2), (b'unchanged', 1, 2)],
197 [(b'unchanged', 0, 1), (b'a', 1, 2), (b'unchanged', 1, 2)],
198 )
198 )
199
199
200 def test_append_a(self):
200 def test_append_a(self):
201 m3 = Merge3(
201 m3 = Merge3(
202 [b'aaa\n', b'bbb\n'],
202 [b'aaa\n', b'bbb\n'],
203 [b'aaa\n', b'bbb\n', b'222\n'],
203 [b'aaa\n', b'bbb\n', b'222\n'],
204 [b'aaa\n', b'bbb\n'],
204 [b'aaa\n', b'bbb\n'],
205 )
205 )
206
206
207 self.assertEqual(b''.join(m3.merge_lines()), b'aaa\nbbb\n222\n')
207 self.assertEqual(b''.join(m3.merge_lines()[0]), b'aaa\nbbb\n222\n')
208
208
209 def test_append_b(self):
209 def test_append_b(self):
210 m3 = Merge3(
210 m3 = Merge3(
211 [b'aaa\n', b'bbb\n'],
211 [b'aaa\n', b'bbb\n'],
212 [b'aaa\n', b'bbb\n'],
212 [b'aaa\n', b'bbb\n'],
213 [b'aaa\n', b'bbb\n', b'222\n'],
213 [b'aaa\n', b'bbb\n', b'222\n'],
214 )
214 )
215
215
216 self.assertEqual(b''.join(m3.merge_lines()), b'aaa\nbbb\n222\n')
216 self.assertEqual(b''.join(m3.merge_lines()[0]), b'aaa\nbbb\n222\n')
217
217
218 def test_append_agreement(self):
218 def test_append_agreement(self):
219 m3 = Merge3(
219 m3 = Merge3(
220 [b'aaa\n', b'bbb\n'],
220 [b'aaa\n', b'bbb\n'],
221 [b'aaa\n', b'bbb\n', b'222\n'],
221 [b'aaa\n', b'bbb\n', b'222\n'],
222 [b'aaa\n', b'bbb\n', b'222\n'],
222 [b'aaa\n', b'bbb\n', b'222\n'],
223 )
223 )
224
224
225 self.assertEqual(b''.join(m3.merge_lines()), b'aaa\nbbb\n222\n')
225 self.assertEqual(b''.join(m3.merge_lines()[0]), b'aaa\nbbb\n222\n')
226
226
227 def test_append_clash(self):
227 def test_append_clash(self):
228 m3 = Merge3(
228 m3 = Merge3(
229 [b'aaa\n', b'bbb\n'],
229 [b'aaa\n', b'bbb\n'],
230 [b'aaa\n', b'bbb\n', b'222\n'],
230 [b'aaa\n', b'bbb\n', b'222\n'],
231 [b'aaa\n', b'bbb\n', b'333\n'],
231 [b'aaa\n', b'bbb\n', b'333\n'],
232 )
232 )
233
233
234 ml = m3.merge_lines(
234 ml, conflicts = m3.merge_lines(
235 name_a=b'a',
235 name_a=b'a',
236 name_b=b'b',
236 name_b=b'b',
237 start_marker=b'<<',
237 start_marker=b'<<',
238 mid_marker=b'--',
238 mid_marker=b'--',
239 end_marker=b'>>',
239 end_marker=b'>>',
240 )
240 )
241 self.assertEqual(
241 self.assertEqual(
242 b''.join(ml),
242 b''.join(ml),
243 b'aaa\n' b'bbb\n' b'<< a\n' b'222\n' b'--\n' b'333\n' b'>> b\n',
243 b'aaa\n' b'bbb\n' b'<< a\n' b'222\n' b'--\n' b'333\n' b'>> b\n',
244 )
244 )
245
245
246 def test_insert_agreement(self):
246 def test_insert_agreement(self):
247 m3 = Merge3(
247 m3 = Merge3(
248 [b'aaa\n', b'bbb\n'],
248 [b'aaa\n', b'bbb\n'],
249 [b'aaa\n', b'222\n', b'bbb\n'],
249 [b'aaa\n', b'222\n', b'bbb\n'],
250 [b'aaa\n', b'222\n', b'bbb\n'],
250 [b'aaa\n', b'222\n', b'bbb\n'],
251 )
251 )
252
252
253 ml = m3.merge_lines(
253 ml, conflicts = m3.merge_lines(
254 name_a=b'a',
254 name_a=b'a',
255 name_b=b'b',
255 name_b=b'b',
256 start_marker=b'<<',
256 start_marker=b'<<',
257 mid_marker=b'--',
257 mid_marker=b'--',
258 end_marker=b'>>',
258 end_marker=b'>>',
259 )
259 )
260 self.assertEqual(b''.join(ml), b'aaa\n222\nbbb\n')
260 self.assertEqual(b''.join(ml), b'aaa\n222\nbbb\n')
261
261
262 def test_insert_clash(self):
262 def test_insert_clash(self):
263 """Both try to insert lines in the same place."""
263 """Both try to insert lines in the same place."""
264 m3 = Merge3(
264 m3 = Merge3(
265 [b'aaa\n', b'bbb\n'],
265 [b'aaa\n', b'bbb\n'],
266 [b'aaa\n', b'111\n', b'bbb\n'],
266 [b'aaa\n', b'111\n', b'bbb\n'],
267 [b'aaa\n', b'222\n', b'bbb\n'],
267 [b'aaa\n', b'222\n', b'bbb\n'],
268 )
268 )
269
269
270 self.assertEqual(
270 self.assertEqual(
271 list(m3.find_sync_regions()),
271 list(m3.find_sync_regions()),
272 [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 2, 3), (2, 2, 3, 3, 3, 3)],
272 [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 2, 3), (2, 2, 3, 3, 3, 3)],
273 )
273 )
274
274
275 self.assertEqual(
275 self.assertEqual(
276 list(m3.merge_regions()),
276 list(m3.merge_regions()),
277 [
277 [
278 (b'unchanged', 0, 1),
278 (b'unchanged', 0, 1),
279 (b'conflict', 1, 1, 1, 2, 1, 2),
279 (b'conflict', 1, 1, 1, 2, 1, 2),
280 (b'unchanged', 1, 2),
280 (b'unchanged', 1, 2),
281 ],
281 ],
282 )
282 )
283
283
284 self.assertEqual(
284 self.assertEqual(
285 list(m3.merge_groups()),
285 list(m3.merge_groups()),
286 [
286 [
287 (b'unchanged', [b'aaa\n']),
287 (b'unchanged', [b'aaa\n']),
288 (b'conflict', ([], [b'111\n'], [b'222\n'])),
288 (b'conflict', ([], [b'111\n'], [b'222\n'])),
289 (b'unchanged', [b'bbb\n']),
289 (b'unchanged', [b'bbb\n']),
290 ],
290 ],
291 )
291 )
292
292
293 ml = m3.merge_lines(
293 ml, conflicts = m3.merge_lines(
294 name_a=b'a',
294 name_a=b'a',
295 name_b=b'b',
295 name_b=b'b',
296 start_marker=b'<<',
296 start_marker=b'<<',
297 mid_marker=b'--',
297 mid_marker=b'--',
298 end_marker=b'>>',
298 end_marker=b'>>',
299 )
299 )
300 self.assertEqual(
300 self.assertEqual(
301 b''.join(ml),
301 b''.join(ml),
302 b'''aaa
302 b'''aaa
303 << a
303 << a
304 111
304 111
305 --
305 --
306 222
306 222
307 >> b
307 >> b
308 bbb
308 bbb
309 ''',
309 ''',
310 )
310 )
311
311
312 def test_replace_clash(self):
312 def test_replace_clash(self):
313 """Both try to insert lines in the same place."""
313 """Both try to insert lines in the same place."""
314 m3 = Merge3(
314 m3 = Merge3(
315 [b'aaa', b'000', b'bbb'],
315 [b'aaa', b'000', b'bbb'],
316 [b'aaa', b'111', b'bbb'],
316 [b'aaa', b'111', b'bbb'],
317 [b'aaa', b'222', b'bbb'],
317 [b'aaa', b'222', b'bbb'],
318 )
318 )
319
319
320 self.assertEqual(
320 self.assertEqual(
321 list(m3.find_sync_regions()),
321 list(m3.find_sync_regions()),
322 [(0, 1, 0, 1, 0, 1), (2, 3, 2, 3, 2, 3), (3, 3, 3, 3, 3, 3)],
322 [(0, 1, 0, 1, 0, 1), (2, 3, 2, 3, 2, 3), (3, 3, 3, 3, 3, 3)],
323 )
323 )
324
324
325 def test_replace_multi(self):
325 def test_replace_multi(self):
326 """Replacement with regions of different size."""
326 """Replacement with regions of different size."""
327 m3 = Merge3(
327 m3 = Merge3(
328 [b'aaa', b'000', b'000', b'bbb'],
328 [b'aaa', b'000', b'000', b'bbb'],
329 [b'aaa', b'111', b'111', b'111', b'bbb'],
329 [b'aaa', b'111', b'111', b'111', b'bbb'],
330 [b'aaa', b'222', b'222', b'222', b'222', b'bbb'],
330 [b'aaa', b'222', b'222', b'222', b'222', b'bbb'],
331 )
331 )
332
332
333 self.assertEqual(
333 self.assertEqual(
334 list(m3.find_sync_regions()),
334 list(m3.find_sync_regions()),
335 [(0, 1, 0, 1, 0, 1), (3, 4, 4, 5, 5, 6), (4, 4, 5, 5, 6, 6)],
335 [(0, 1, 0, 1, 0, 1), (3, 4, 4, 5, 5, 6), (4, 4, 5, 5, 6, 6)],
336 )
336 )
337
337
338 def test_merge_poem(self):
338 def test_merge_poem(self):
339 """Test case from diff3 manual"""
339 """Test case from diff3 manual"""
340 m3 = Merge3(TZU, LAO, TAO)
340 m3 = Merge3(TZU, LAO, TAO)
341 ml = list(m3.merge_lines(b'LAO', b'TAO'))
341 ml, conflicts = m3.merge_lines(b'LAO', b'TAO')
342 self.log(b'merge result:')
342 self.log(b'merge result:')
343 self.log(b''.join(ml))
343 self.log(b''.join(ml))
344 self.assertEqual(ml, MERGED_RESULT)
344 self.assertEqual(ml, MERGED_RESULT)
345
345
346 def test_binary(self):
346 def test_binary(self):
347 with self.assertRaises(error.Abort):
347 with self.assertRaises(error.Abort):
348 Merge3([b'\x00'], [b'a'], [b'b'])
348 Merge3([b'\x00'], [b'a'], [b'b'])
349
349
350 def test_dos_text(self):
350 def test_dos_text(self):
351 base_text = b'a\r\n'
351 base_text = b'a\r\n'
352 this_text = b'b\r\n'
352 this_text = b'b\r\n'
353 other_text = b'c\r\n'
353 other_text = b'c\r\n'
354 m3 = Merge3(
354 m3 = Merge3(
355 base_text.splitlines(True),
355 base_text.splitlines(True),
356 other_text.splitlines(True),
356 other_text.splitlines(True),
357 this_text.splitlines(True),
357 this_text.splitlines(True),
358 )
358 )
359 m_lines = m3.merge_lines(b'OTHER', b'THIS')
359 m_lines, conflicts = m3.merge_lines(b'OTHER', b'THIS')
360 self.assertEqual(
360 self.assertEqual(
361 b'<<<<<<< OTHER\r\nc\r\n=======\r\nb\r\n'
361 b'<<<<<<< OTHER\r\nc\r\n=======\r\nb\r\n'
362 b'>>>>>>> THIS\r\n'.splitlines(True),
362 b'>>>>>>> THIS\r\n'.splitlines(True),
363 list(m_lines),
363 m_lines,
364 )
364 )
365
365
366 def test_mac_text(self):
366 def test_mac_text(self):
367 base_text = b'a\r'
367 base_text = b'a\r'
368 this_text = b'b\r'
368 this_text = b'b\r'
369 other_text = b'c\r'
369 other_text = b'c\r'
370 m3 = Merge3(
370 m3 = Merge3(
371 base_text.splitlines(True),
371 base_text.splitlines(True),
372 other_text.splitlines(True),
372 other_text.splitlines(True),
373 this_text.splitlines(True),
373 this_text.splitlines(True),
374 )
374 )
375 m_lines = m3.merge_lines(b'OTHER', b'THIS')
375 m_lines, conflicts = m3.merge_lines(b'OTHER', b'THIS')
376 self.assertEqual(
376 self.assertEqual(
377 b'<<<<<<< OTHER\rc\r=======\rb\r'
377 b'<<<<<<< OTHER\rc\r=======\rb\r'
378 b'>>>>>>> THIS\r'.splitlines(True),
378 b'>>>>>>> THIS\r'.splitlines(True),
379 list(m_lines),
379 m_lines,
380 )
380 )
381
381
382
382
383 if __name__ == '__main__':
383 if __name__ == '__main__':
384 import silenttestrunner
384 import silenttestrunner
385
385
386 silenttestrunner.main(__name__)
386 silenttestrunner.main(__name__)
General Comments 0
You need to be logged in to leave comments. Login now