##// END OF EJS Templates
simplemerge: simplify and rename `render_markers()`...
Martin von Zweigbergk -
r49411:12ac4401 default
parent child Browse files
Show More
@@ -1,4876 +1,4877 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import binascii
10 import binascii
11 import codecs
11 import codecs
12 import collections
12 import collections
13 import contextlib
13 import contextlib
14 import difflib
14 import difflib
15 import errno
15 import errno
16 import glob
16 import glob
17 import operator
17 import operator
18 import os
18 import os
19 import platform
19 import platform
20 import random
20 import random
21 import re
21 import re
22 import socket
22 import socket
23 import ssl
23 import ssl
24 import stat
24 import stat
25 import string
25 import string
26 import subprocess
26 import subprocess
27 import sys
27 import sys
28 import time
28 import time
29
29
30 from .i18n import _
30 from .i18n import _
31 from .node import (
31 from .node import (
32 bin,
32 bin,
33 hex,
33 hex,
34 nullrev,
34 nullrev,
35 short,
35 short,
36 )
36 )
37 from .pycompat import (
37 from .pycompat import (
38 getattr,
38 getattr,
39 open,
39 open,
40 )
40 )
41 from . import (
41 from . import (
42 bundle2,
42 bundle2,
43 bundlerepo,
43 bundlerepo,
44 changegroup,
44 changegroup,
45 cmdutil,
45 cmdutil,
46 color,
46 color,
47 context,
47 context,
48 copies,
48 copies,
49 dagparser,
49 dagparser,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revset,
76 revset,
77 revsetlang,
77 revsetlang,
78 scmutil,
78 scmutil,
79 setdiscovery,
79 setdiscovery,
80 simplemerge,
80 simplemerge,
81 sshpeer,
81 sshpeer,
82 sslutil,
82 sslutil,
83 streamclone,
83 streamclone,
84 strip,
84 strip,
85 tags as tagsmod,
85 tags as tagsmod,
86 templater,
86 templater,
87 treediscovery,
87 treediscovery,
88 upgrade,
88 upgrade,
89 url as urlmod,
89 url as urlmod,
90 util,
90 util,
91 vfs as vfsmod,
91 vfs as vfsmod,
92 wireprotoframing,
92 wireprotoframing,
93 wireprotoserver,
93 wireprotoserver,
94 )
94 )
95 from .interfaces import repository
95 from .interfaces import repository
96 from .utils import (
96 from .utils import (
97 cborutil,
97 cborutil,
98 compression,
98 compression,
99 dateutil,
99 dateutil,
100 procutil,
100 procutil,
101 stringutil,
101 stringutil,
102 urlutil,
102 urlutil,
103 )
103 )
104
104
105 from .revlogutils import (
105 from .revlogutils import (
106 deltas as deltautil,
106 deltas as deltautil,
107 nodemap,
107 nodemap,
108 rewrite,
108 rewrite,
109 sidedata,
109 sidedata,
110 )
110 )
111
111
112 release = lockmod.release
112 release = lockmod.release
113
113
114 table = {}
114 table = {}
115 table.update(strip.command._table)
115 table.update(strip.command._table)
116 command = registrar.command(table)
116 command = registrar.command(table)
117
117
118
118
119 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
119 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 def debugancestor(ui, repo, *args):
120 def debugancestor(ui, repo, *args):
121 """find the ancestor revision of two revisions in a given index"""
121 """find the ancestor revision of two revisions in a given index"""
122 if len(args) == 3:
122 if len(args) == 3:
123 index, rev1, rev2 = args
123 index, rev1, rev2 = args
124 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
124 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 lookup = r.lookup
125 lookup = r.lookup
126 elif len(args) == 2:
126 elif len(args) == 2:
127 if not repo:
127 if not repo:
128 raise error.Abort(
128 raise error.Abort(
129 _(b'there is no Mercurial repository here (.hg not found)')
129 _(b'there is no Mercurial repository here (.hg not found)')
130 )
130 )
131 rev1, rev2 = args
131 rev1, rev2 = args
132 r = repo.changelog
132 r = repo.changelog
133 lookup = repo.lookup
133 lookup = repo.lookup
134 else:
134 else:
135 raise error.Abort(_(b'either two or three arguments required'))
135 raise error.Abort(_(b'either two or three arguments required'))
136 a = r.ancestor(lookup(rev1), lookup(rev2))
136 a = r.ancestor(lookup(rev1), lookup(rev2))
137 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
137 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138
138
139
139
140 @command(b'debugantivirusrunning', [])
140 @command(b'debugantivirusrunning', [])
141 def debugantivirusrunning(ui, repo):
141 def debugantivirusrunning(ui, repo):
142 """attempt to trigger an antivirus scanner to see if one is active"""
142 """attempt to trigger an antivirus scanner to see if one is active"""
143 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
143 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 f.write(
144 f.write(
145 util.b85decode(
145 util.b85decode(
146 # This is a base85-armored version of the EICAR test file. See
146 # This is a base85-armored version of the EICAR test file. See
147 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
147 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
148 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
149 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 )
150 )
151 )
151 )
152 # Give an AV engine time to scan the file.
152 # Give an AV engine time to scan the file.
153 time.sleep(2)
153 time.sleep(2)
154 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
154 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155
155
156
156
157 @command(b'debugapplystreamclonebundle', [], b'FILE')
157 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 def debugapplystreamclonebundle(ui, repo, fname):
158 def debugapplystreamclonebundle(ui, repo, fname):
159 """apply a stream clone bundle file"""
159 """apply a stream clone bundle file"""
160 f = hg.openpath(ui, fname)
160 f = hg.openpath(ui, fname)
161 gen = exchange.readbundle(ui, f, fname)
161 gen = exchange.readbundle(ui, f, fname)
162 gen.apply(repo)
162 gen.apply(repo)
163
163
164
164
165 @command(
165 @command(
166 b'debugbuilddag',
166 b'debugbuilddag',
167 [
167 [
168 (
168 (
169 b'm',
169 b'm',
170 b'mergeable-file',
170 b'mergeable-file',
171 None,
171 None,
172 _(b'add single file mergeable changes'),
172 _(b'add single file mergeable changes'),
173 ),
173 ),
174 (
174 (
175 b'o',
175 b'o',
176 b'overwritten-file',
176 b'overwritten-file',
177 None,
177 None,
178 _(b'add single file all revs overwrite'),
178 _(b'add single file all revs overwrite'),
179 ),
179 ),
180 (b'n', b'new-file', None, _(b'add new file at each rev')),
180 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 ],
181 ],
182 _(b'[OPTION]... [TEXT]'),
182 _(b'[OPTION]... [TEXT]'),
183 )
183 )
184 def debugbuilddag(
184 def debugbuilddag(
185 ui,
185 ui,
186 repo,
186 repo,
187 text=None,
187 text=None,
188 mergeable_file=False,
188 mergeable_file=False,
189 overwritten_file=False,
189 overwritten_file=False,
190 new_file=False,
190 new_file=False,
191 ):
191 ):
192 """builds a repo with a given DAG from scratch in the current empty repo
192 """builds a repo with a given DAG from scratch in the current empty repo
193
193
194 The description of the DAG is read from stdin if not given on the
194 The description of the DAG is read from stdin if not given on the
195 command line.
195 command line.
196
196
197 Elements:
197 Elements:
198
198
199 - "+n" is a linear run of n nodes based on the current default parent
199 - "+n" is a linear run of n nodes based on the current default parent
200 - "." is a single node based on the current default parent
200 - "." is a single node based on the current default parent
201 - "$" resets the default parent to null (implied at the start);
201 - "$" resets the default parent to null (implied at the start);
202 otherwise the default parent is always the last node created
202 otherwise the default parent is always the last node created
203 - "<p" sets the default parent to the backref p
203 - "<p" sets the default parent to the backref p
204 - "*p" is a fork at parent p, which is a backref
204 - "*p" is a fork at parent p, which is a backref
205 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
205 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
206 - "/p2" is a merge of the preceding node and p2
206 - "/p2" is a merge of the preceding node and p2
207 - ":tag" defines a local tag for the preceding node
207 - ":tag" defines a local tag for the preceding node
208 - "@branch" sets the named branch for subsequent nodes
208 - "@branch" sets the named branch for subsequent nodes
209 - "#...\\n" is a comment up to the end of the line
209 - "#...\\n" is a comment up to the end of the line
210
210
211 Whitespace between the above elements is ignored.
211 Whitespace between the above elements is ignored.
212
212
213 A backref is either
213 A backref is either
214
214
215 - a number n, which references the node curr-n, where curr is the current
215 - a number n, which references the node curr-n, where curr is the current
216 node, or
216 node, or
217 - the name of a local tag you placed earlier using ":tag", or
217 - the name of a local tag you placed earlier using ":tag", or
218 - empty to denote the default parent.
218 - empty to denote the default parent.
219
219
220 All string valued-elements are either strictly alphanumeric, or must
220 All string valued-elements are either strictly alphanumeric, or must
221 be enclosed in double quotes ("..."), with "\\" as escape character.
221 be enclosed in double quotes ("..."), with "\\" as escape character.
222 """
222 """
223
223
224 if text is None:
224 if text is None:
225 ui.status(_(b"reading DAG from stdin\n"))
225 ui.status(_(b"reading DAG from stdin\n"))
226 text = ui.fin.read()
226 text = ui.fin.read()
227
227
228 cl = repo.changelog
228 cl = repo.changelog
229 if len(cl) > 0:
229 if len(cl) > 0:
230 raise error.Abort(_(b'repository is not empty'))
230 raise error.Abort(_(b'repository is not empty'))
231
231
232 # determine number of revs in DAG
232 # determine number of revs in DAG
233 total = 0
233 total = 0
234 for type, data in dagparser.parsedag(text):
234 for type, data in dagparser.parsedag(text):
235 if type == b'n':
235 if type == b'n':
236 total += 1
236 total += 1
237
237
238 if mergeable_file:
238 if mergeable_file:
239 linesperrev = 2
239 linesperrev = 2
240 # make a file with k lines per rev
240 # make a file with k lines per rev
241 initialmergedlines = [
241 initialmergedlines = [
242 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
242 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
243 ]
243 ]
244 initialmergedlines.append(b"")
244 initialmergedlines.append(b"")
245
245
246 tags = []
246 tags = []
247 progress = ui.makeprogress(
247 progress = ui.makeprogress(
248 _(b'building'), unit=_(b'revisions'), total=total
248 _(b'building'), unit=_(b'revisions'), total=total
249 )
249 )
250 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
250 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
251 at = -1
251 at = -1
252 atbranch = b'default'
252 atbranch = b'default'
253 nodeids = []
253 nodeids = []
254 id = 0
254 id = 0
255 progress.update(id)
255 progress.update(id)
256 for type, data in dagparser.parsedag(text):
256 for type, data in dagparser.parsedag(text):
257 if type == b'n':
257 if type == b'n':
258 ui.note((b'node %s\n' % pycompat.bytestr(data)))
258 ui.note((b'node %s\n' % pycompat.bytestr(data)))
259 id, ps = data
259 id, ps = data
260
260
261 files = []
261 files = []
262 filecontent = {}
262 filecontent = {}
263
263
264 p2 = None
264 p2 = None
265 if mergeable_file:
265 if mergeable_file:
266 fn = b"mf"
266 fn = b"mf"
267 p1 = repo[ps[0]]
267 p1 = repo[ps[0]]
268 if len(ps) > 1:
268 if len(ps) > 1:
269 p2 = repo[ps[1]]
269 p2 = repo[ps[1]]
270 pa = p1.ancestor(p2)
270 pa = p1.ancestor(p2)
271 base, local, other = [
271 base, local, other = [
272 x[fn].data() for x in (pa, p1, p2)
272 x[fn].data() for x in (pa, p1, p2)
273 ]
273 ]
274 m3 = simplemerge.Merge3Text(base, local, other)
274 m3 = simplemerge.Merge3Text(base, local, other)
275 ml = [
275 ml = [
276 l.strip() for l in simplemerge.render_markers(m3)[0]
276 l.strip()
277 for l in simplemerge.render_minimized(m3)[0]
277 ]
278 ]
278 ml.append(b"")
279 ml.append(b"")
279 elif at > 0:
280 elif at > 0:
280 ml = p1[fn].data().split(b"\n")
281 ml = p1[fn].data().split(b"\n")
281 else:
282 else:
282 ml = initialmergedlines
283 ml = initialmergedlines
283 ml[id * linesperrev] += b" r%i" % id
284 ml[id * linesperrev] += b" r%i" % id
284 mergedtext = b"\n".join(ml)
285 mergedtext = b"\n".join(ml)
285 files.append(fn)
286 files.append(fn)
286 filecontent[fn] = mergedtext
287 filecontent[fn] = mergedtext
287
288
288 if overwritten_file:
289 if overwritten_file:
289 fn = b"of"
290 fn = b"of"
290 files.append(fn)
291 files.append(fn)
291 filecontent[fn] = b"r%i\n" % id
292 filecontent[fn] = b"r%i\n" % id
292
293
293 if new_file:
294 if new_file:
294 fn = b"nf%i" % id
295 fn = b"nf%i" % id
295 files.append(fn)
296 files.append(fn)
296 filecontent[fn] = b"r%i\n" % id
297 filecontent[fn] = b"r%i\n" % id
297 if len(ps) > 1:
298 if len(ps) > 1:
298 if not p2:
299 if not p2:
299 p2 = repo[ps[1]]
300 p2 = repo[ps[1]]
300 for fn in p2:
301 for fn in p2:
301 if fn.startswith(b"nf"):
302 if fn.startswith(b"nf"):
302 files.append(fn)
303 files.append(fn)
303 filecontent[fn] = p2[fn].data()
304 filecontent[fn] = p2[fn].data()
304
305
305 def fctxfn(repo, cx, path):
306 def fctxfn(repo, cx, path):
306 if path in filecontent:
307 if path in filecontent:
307 return context.memfilectx(
308 return context.memfilectx(
308 repo, cx, path, filecontent[path]
309 repo, cx, path, filecontent[path]
309 )
310 )
310 return None
311 return None
311
312
312 if len(ps) == 0 or ps[0] < 0:
313 if len(ps) == 0 or ps[0] < 0:
313 pars = [None, None]
314 pars = [None, None]
314 elif len(ps) == 1:
315 elif len(ps) == 1:
315 pars = [nodeids[ps[0]], None]
316 pars = [nodeids[ps[0]], None]
316 else:
317 else:
317 pars = [nodeids[p] for p in ps]
318 pars = [nodeids[p] for p in ps]
318 cx = context.memctx(
319 cx = context.memctx(
319 repo,
320 repo,
320 pars,
321 pars,
321 b"r%i" % id,
322 b"r%i" % id,
322 files,
323 files,
323 fctxfn,
324 fctxfn,
324 date=(id, 0),
325 date=(id, 0),
325 user=b"debugbuilddag",
326 user=b"debugbuilddag",
326 extra={b'branch': atbranch},
327 extra={b'branch': atbranch},
327 )
328 )
328 nodeid = repo.commitctx(cx)
329 nodeid = repo.commitctx(cx)
329 nodeids.append(nodeid)
330 nodeids.append(nodeid)
330 at = id
331 at = id
331 elif type == b'l':
332 elif type == b'l':
332 id, name = data
333 id, name = data
333 ui.note((b'tag %s\n' % name))
334 ui.note((b'tag %s\n' % name))
334 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
335 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
335 elif type == b'a':
336 elif type == b'a':
336 ui.note((b'branch %s\n' % data))
337 ui.note((b'branch %s\n' % data))
337 atbranch = data
338 atbranch = data
338 progress.update(id)
339 progress.update(id)
339
340
340 if tags:
341 if tags:
341 repo.vfs.write(b"localtags", b"".join(tags))
342 repo.vfs.write(b"localtags", b"".join(tags))
342
343
343
344
344 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
345 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
345 indent_string = b' ' * indent
346 indent_string = b' ' * indent
346 if all:
347 if all:
347 ui.writenoi18n(
348 ui.writenoi18n(
348 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
349 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
349 % indent_string
350 % indent_string
350 )
351 )
351
352
352 def showchunks(named):
353 def showchunks(named):
353 ui.write(b"\n%s%s\n" % (indent_string, named))
354 ui.write(b"\n%s%s\n" % (indent_string, named))
354 for deltadata in gen.deltaiter():
355 for deltadata in gen.deltaiter():
355 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
356 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
356 ui.write(
357 ui.write(
357 b"%s%s %s %s %s %s %d\n"
358 b"%s%s %s %s %s %s %d\n"
358 % (
359 % (
359 indent_string,
360 indent_string,
360 hex(node),
361 hex(node),
361 hex(p1),
362 hex(p1),
362 hex(p2),
363 hex(p2),
363 hex(cs),
364 hex(cs),
364 hex(deltabase),
365 hex(deltabase),
365 len(delta),
366 len(delta),
366 )
367 )
367 )
368 )
368
369
369 gen.changelogheader()
370 gen.changelogheader()
370 showchunks(b"changelog")
371 showchunks(b"changelog")
371 gen.manifestheader()
372 gen.manifestheader()
372 showchunks(b"manifest")
373 showchunks(b"manifest")
373 for chunkdata in iter(gen.filelogheader, {}):
374 for chunkdata in iter(gen.filelogheader, {}):
374 fname = chunkdata[b'filename']
375 fname = chunkdata[b'filename']
375 showchunks(fname)
376 showchunks(fname)
376 else:
377 else:
377 if isinstance(gen, bundle2.unbundle20):
378 if isinstance(gen, bundle2.unbundle20):
378 raise error.Abort(_(b'use debugbundle2 for this file'))
379 raise error.Abort(_(b'use debugbundle2 for this file'))
379 gen.changelogheader()
380 gen.changelogheader()
380 for deltadata in gen.deltaiter():
381 for deltadata in gen.deltaiter():
381 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
382 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
382 ui.write(b"%s%s\n" % (indent_string, hex(node)))
383 ui.write(b"%s%s\n" % (indent_string, hex(node)))
383
384
384
385
385 def _debugobsmarkers(ui, part, indent=0, **opts):
386 def _debugobsmarkers(ui, part, indent=0, **opts):
386 """display version and markers contained in 'data'"""
387 """display version and markers contained in 'data'"""
387 opts = pycompat.byteskwargs(opts)
388 opts = pycompat.byteskwargs(opts)
388 data = part.read()
389 data = part.read()
389 indent_string = b' ' * indent
390 indent_string = b' ' * indent
390 try:
391 try:
391 version, markers = obsolete._readmarkers(data)
392 version, markers = obsolete._readmarkers(data)
392 except error.UnknownVersion as exc:
393 except error.UnknownVersion as exc:
393 msg = b"%sunsupported version: %s (%d bytes)\n"
394 msg = b"%sunsupported version: %s (%d bytes)\n"
394 msg %= indent_string, exc.version, len(data)
395 msg %= indent_string, exc.version, len(data)
395 ui.write(msg)
396 ui.write(msg)
396 else:
397 else:
397 msg = b"%sversion: %d (%d bytes)\n"
398 msg = b"%sversion: %d (%d bytes)\n"
398 msg %= indent_string, version, len(data)
399 msg %= indent_string, version, len(data)
399 ui.write(msg)
400 ui.write(msg)
400 fm = ui.formatter(b'debugobsolete', opts)
401 fm = ui.formatter(b'debugobsolete', opts)
401 for rawmarker in sorted(markers):
402 for rawmarker in sorted(markers):
402 m = obsutil.marker(None, rawmarker)
403 m = obsutil.marker(None, rawmarker)
403 fm.startitem()
404 fm.startitem()
404 fm.plain(indent_string)
405 fm.plain(indent_string)
405 cmdutil.showmarker(fm, m)
406 cmdutil.showmarker(fm, m)
406 fm.end()
407 fm.end()
407
408
408
409
409 def _debugphaseheads(ui, data, indent=0):
410 def _debugphaseheads(ui, data, indent=0):
410 """display version and markers contained in 'data'"""
411 """display version and markers contained in 'data'"""
411 indent_string = b' ' * indent
412 indent_string = b' ' * indent
412 headsbyphase = phases.binarydecode(data)
413 headsbyphase = phases.binarydecode(data)
413 for phase in phases.allphases:
414 for phase in phases.allphases:
414 for head in headsbyphase[phase]:
415 for head in headsbyphase[phase]:
415 ui.write(indent_string)
416 ui.write(indent_string)
416 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
417 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
417
418
418
419
419 def _quasirepr(thing):
420 def _quasirepr(thing):
420 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
421 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
421 return b'{%s}' % (
422 return b'{%s}' % (
422 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
423 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
423 )
424 )
424 return pycompat.bytestr(repr(thing))
425 return pycompat.bytestr(repr(thing))
425
426
426
427
427 def _debugbundle2(ui, gen, all=None, **opts):
428 def _debugbundle2(ui, gen, all=None, **opts):
428 """lists the contents of a bundle2"""
429 """lists the contents of a bundle2"""
429 if not isinstance(gen, bundle2.unbundle20):
430 if not isinstance(gen, bundle2.unbundle20):
430 raise error.Abort(_(b'not a bundle2 file'))
431 raise error.Abort(_(b'not a bundle2 file'))
431 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
432 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
432 parttypes = opts.get('part_type', [])
433 parttypes = opts.get('part_type', [])
433 for part in gen.iterparts():
434 for part in gen.iterparts():
434 if parttypes and part.type not in parttypes:
435 if parttypes and part.type not in parttypes:
435 continue
436 continue
436 msg = b'%s -- %s (mandatory: %r)\n'
437 msg = b'%s -- %s (mandatory: %r)\n'
437 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
438 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
438 if part.type == b'changegroup':
439 if part.type == b'changegroup':
439 version = part.params.get(b'version', b'01')
440 version = part.params.get(b'version', b'01')
440 cg = changegroup.getunbundler(version, part, b'UN')
441 cg = changegroup.getunbundler(version, part, b'UN')
441 if not ui.quiet:
442 if not ui.quiet:
442 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
443 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
443 if part.type == b'obsmarkers':
444 if part.type == b'obsmarkers':
444 if not ui.quiet:
445 if not ui.quiet:
445 _debugobsmarkers(ui, part, indent=4, **opts)
446 _debugobsmarkers(ui, part, indent=4, **opts)
446 if part.type == b'phase-heads':
447 if part.type == b'phase-heads':
447 if not ui.quiet:
448 if not ui.quiet:
448 _debugphaseheads(ui, part, indent=4)
449 _debugphaseheads(ui, part, indent=4)
449
450
450
451
451 @command(
452 @command(
452 b'debugbundle',
453 b'debugbundle',
453 [
454 [
454 (b'a', b'all', None, _(b'show all details')),
455 (b'a', b'all', None, _(b'show all details')),
455 (b'', b'part-type', [], _(b'show only the named part type')),
456 (b'', b'part-type', [], _(b'show only the named part type')),
456 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
457 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
457 ],
458 ],
458 _(b'FILE'),
459 _(b'FILE'),
459 norepo=True,
460 norepo=True,
460 )
461 )
461 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
462 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
462 """lists the contents of a bundle"""
463 """lists the contents of a bundle"""
463 with hg.openpath(ui, bundlepath) as f:
464 with hg.openpath(ui, bundlepath) as f:
464 if spec:
465 if spec:
465 spec = exchange.getbundlespec(ui, f)
466 spec = exchange.getbundlespec(ui, f)
466 ui.write(b'%s\n' % spec)
467 ui.write(b'%s\n' % spec)
467 return
468 return
468
469
469 gen = exchange.readbundle(ui, f, bundlepath)
470 gen = exchange.readbundle(ui, f, bundlepath)
470 if isinstance(gen, bundle2.unbundle20):
471 if isinstance(gen, bundle2.unbundle20):
471 return _debugbundle2(ui, gen, all=all, **opts)
472 return _debugbundle2(ui, gen, all=all, **opts)
472 _debugchangegroup(ui, gen, all=all, **opts)
473 _debugchangegroup(ui, gen, all=all, **opts)
473
474
474
475
475 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
476 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
476 def debugcapabilities(ui, path, **opts):
477 def debugcapabilities(ui, path, **opts):
477 """lists the capabilities of a remote peer"""
478 """lists the capabilities of a remote peer"""
478 opts = pycompat.byteskwargs(opts)
479 opts = pycompat.byteskwargs(opts)
479 peer = hg.peer(ui, opts, path)
480 peer = hg.peer(ui, opts, path)
480 try:
481 try:
481 caps = peer.capabilities()
482 caps = peer.capabilities()
482 ui.writenoi18n(b'Main capabilities:\n')
483 ui.writenoi18n(b'Main capabilities:\n')
483 for c in sorted(caps):
484 for c in sorted(caps):
484 ui.write(b' %s\n' % c)
485 ui.write(b' %s\n' % c)
485 b2caps = bundle2.bundle2caps(peer)
486 b2caps = bundle2.bundle2caps(peer)
486 if b2caps:
487 if b2caps:
487 ui.writenoi18n(b'Bundle2 capabilities:\n')
488 ui.writenoi18n(b'Bundle2 capabilities:\n')
488 for key, values in sorted(pycompat.iteritems(b2caps)):
489 for key, values in sorted(pycompat.iteritems(b2caps)):
489 ui.write(b' %s\n' % key)
490 ui.write(b' %s\n' % key)
490 for v in values:
491 for v in values:
491 ui.write(b' %s\n' % v)
492 ui.write(b' %s\n' % v)
492 finally:
493 finally:
493 peer.close()
494 peer.close()
494
495
495
496
496 @command(
497 @command(
497 b'debugchangedfiles',
498 b'debugchangedfiles',
498 [
499 [
499 (
500 (
500 b'',
501 b'',
501 b'compute',
502 b'compute',
502 False,
503 False,
503 b"compute information instead of reading it from storage",
504 b"compute information instead of reading it from storage",
504 ),
505 ),
505 ],
506 ],
506 b'REV',
507 b'REV',
507 )
508 )
508 def debugchangedfiles(ui, repo, rev, **opts):
509 def debugchangedfiles(ui, repo, rev, **opts):
509 """list the stored files changes for a revision"""
510 """list the stored files changes for a revision"""
510 ctx = logcmdutil.revsingle(repo, rev, None)
511 ctx = logcmdutil.revsingle(repo, rev, None)
511 files = None
512 files = None
512
513
513 if opts['compute']:
514 if opts['compute']:
514 files = metadata.compute_all_files_changes(ctx)
515 files = metadata.compute_all_files_changes(ctx)
515 else:
516 else:
516 sd = repo.changelog.sidedata(ctx.rev())
517 sd = repo.changelog.sidedata(ctx.rev())
517 files_block = sd.get(sidedata.SD_FILES)
518 files_block = sd.get(sidedata.SD_FILES)
518 if files_block is not None:
519 if files_block is not None:
519 files = metadata.decode_files_sidedata(sd)
520 files = metadata.decode_files_sidedata(sd)
520 if files is not None:
521 if files is not None:
521 for f in sorted(files.touched):
522 for f in sorted(files.touched):
522 if f in files.added:
523 if f in files.added:
523 action = b"added"
524 action = b"added"
524 elif f in files.removed:
525 elif f in files.removed:
525 action = b"removed"
526 action = b"removed"
526 elif f in files.merged:
527 elif f in files.merged:
527 action = b"merged"
528 action = b"merged"
528 elif f in files.salvaged:
529 elif f in files.salvaged:
529 action = b"salvaged"
530 action = b"salvaged"
530 else:
531 else:
531 action = b"touched"
532 action = b"touched"
532
533
533 copy_parent = b""
534 copy_parent = b""
534 copy_source = b""
535 copy_source = b""
535 if f in files.copied_from_p1:
536 if f in files.copied_from_p1:
536 copy_parent = b"p1"
537 copy_parent = b"p1"
537 copy_source = files.copied_from_p1[f]
538 copy_source = files.copied_from_p1[f]
538 elif f in files.copied_from_p2:
539 elif f in files.copied_from_p2:
539 copy_parent = b"p2"
540 copy_parent = b"p2"
540 copy_source = files.copied_from_p2[f]
541 copy_source = files.copied_from_p2[f]
541
542
542 data = (action, copy_parent, f, copy_source)
543 data = (action, copy_parent, f, copy_source)
543 template = b"%-8s %2s: %s, %s;\n"
544 template = b"%-8s %2s: %s, %s;\n"
544 ui.write(template % data)
545 ui.write(template % data)
545
546
546
547
547 @command(b'debugcheckstate', [], b'')
548 @command(b'debugcheckstate', [], b'')
548 def debugcheckstate(ui, repo):
549 def debugcheckstate(ui, repo):
549 """validate the correctness of the current dirstate"""
550 """validate the correctness of the current dirstate"""
550 parent1, parent2 = repo.dirstate.parents()
551 parent1, parent2 = repo.dirstate.parents()
551 m1 = repo[parent1].manifest()
552 m1 = repo[parent1].manifest()
552 m2 = repo[parent2].manifest()
553 m2 = repo[parent2].manifest()
553 errors = 0
554 errors = 0
554 for err in repo.dirstate.verify(m1, m2):
555 for err in repo.dirstate.verify(m1, m2):
555 ui.warn(err[0] % err[1:])
556 ui.warn(err[0] % err[1:])
556 errors += 1
557 errors += 1
557 if errors:
558 if errors:
558 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
559 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
559 raise error.Abort(errstr)
560 raise error.Abort(errstr)
560
561
561
562
562 @command(
563 @command(
563 b'debugcolor',
564 b'debugcolor',
564 [(b'', b'style', None, _(b'show all configured styles'))],
565 [(b'', b'style', None, _(b'show all configured styles'))],
565 b'hg debugcolor',
566 b'hg debugcolor',
566 )
567 )
567 def debugcolor(ui, repo, **opts):
568 def debugcolor(ui, repo, **opts):
568 """show available color, effects or style"""
569 """show available color, effects or style"""
569 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
570 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
570 if opts.get('style'):
571 if opts.get('style'):
571 return _debugdisplaystyle(ui)
572 return _debugdisplaystyle(ui)
572 else:
573 else:
573 return _debugdisplaycolor(ui)
574 return _debugdisplaycolor(ui)
574
575
575
576
576 def _debugdisplaycolor(ui):
577 def _debugdisplaycolor(ui):
577 ui = ui.copy()
578 ui = ui.copy()
578 ui._styles.clear()
579 ui._styles.clear()
579 for effect in color._activeeffects(ui).keys():
580 for effect in color._activeeffects(ui).keys():
580 ui._styles[effect] = effect
581 ui._styles[effect] = effect
581 if ui._terminfoparams:
582 if ui._terminfoparams:
582 for k, v in ui.configitems(b'color'):
583 for k, v in ui.configitems(b'color'):
583 if k.startswith(b'color.'):
584 if k.startswith(b'color.'):
584 ui._styles[k] = k[6:]
585 ui._styles[k] = k[6:]
585 elif k.startswith(b'terminfo.'):
586 elif k.startswith(b'terminfo.'):
586 ui._styles[k] = k[9:]
587 ui._styles[k] = k[9:]
587 ui.write(_(b'available colors:\n'))
588 ui.write(_(b'available colors:\n'))
588 # sort label with a '_' after the other to group '_background' entry.
589 # sort label with a '_' after the other to group '_background' entry.
589 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
590 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
590 for colorname, label in items:
591 for colorname, label in items:
591 ui.write(b'%s\n' % colorname, label=label)
592 ui.write(b'%s\n' % colorname, label=label)
592
593
593
594
594 def _debugdisplaystyle(ui):
595 def _debugdisplaystyle(ui):
595 ui.write(_(b'available style:\n'))
596 ui.write(_(b'available style:\n'))
596 if not ui._styles:
597 if not ui._styles:
597 return
598 return
598 width = max(len(s) for s in ui._styles)
599 width = max(len(s) for s in ui._styles)
599 for label, effects in sorted(ui._styles.items()):
600 for label, effects in sorted(ui._styles.items()):
600 ui.write(b'%s' % label, label=label)
601 ui.write(b'%s' % label, label=label)
601 if effects:
602 if effects:
602 # 50
603 # 50
603 ui.write(b': ')
604 ui.write(b': ')
604 ui.write(b' ' * (max(0, width - len(label))))
605 ui.write(b' ' * (max(0, width - len(label))))
605 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
606 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
606 ui.write(b'\n')
607 ui.write(b'\n')
607
608
608
609
609 @command(b'debugcreatestreamclonebundle', [], b'FILE')
610 @command(b'debugcreatestreamclonebundle', [], b'FILE')
610 def debugcreatestreamclonebundle(ui, repo, fname):
611 def debugcreatestreamclonebundle(ui, repo, fname):
611 """create a stream clone bundle file
612 """create a stream clone bundle file
612
613
613 Stream bundles are special bundles that are essentially archives of
614 Stream bundles are special bundles that are essentially archives of
614 revlog files. They are commonly used for cloning very quickly.
615 revlog files. They are commonly used for cloning very quickly.
615 """
616 """
616 # TODO we may want to turn this into an abort when this functionality
617 # TODO we may want to turn this into an abort when this functionality
617 # is moved into `hg bundle`.
618 # is moved into `hg bundle`.
618 if phases.hassecret(repo):
619 if phases.hassecret(repo):
619 ui.warn(
620 ui.warn(
620 _(
621 _(
621 b'(warning: stream clone bundle will contain secret '
622 b'(warning: stream clone bundle will contain secret '
622 b'revisions)\n'
623 b'revisions)\n'
623 )
624 )
624 )
625 )
625
626
626 requirements, gen = streamclone.generatebundlev1(repo)
627 requirements, gen = streamclone.generatebundlev1(repo)
627 changegroup.writechunks(ui, gen, fname)
628 changegroup.writechunks(ui, gen, fname)
628
629
629 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
630 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
630
631
631
632
632 @command(
633 @command(
633 b'debugdag',
634 b'debugdag',
634 [
635 [
635 (b't', b'tags', None, _(b'use tags as labels')),
636 (b't', b'tags', None, _(b'use tags as labels')),
636 (b'b', b'branches', None, _(b'annotate with branch names')),
637 (b'b', b'branches', None, _(b'annotate with branch names')),
637 (b'', b'dots', None, _(b'use dots for runs')),
638 (b'', b'dots', None, _(b'use dots for runs')),
638 (b's', b'spaces', None, _(b'separate elements by spaces')),
639 (b's', b'spaces', None, _(b'separate elements by spaces')),
639 ],
640 ],
640 _(b'[OPTION]... [FILE [REV]...]'),
641 _(b'[OPTION]... [FILE [REV]...]'),
641 optionalrepo=True,
642 optionalrepo=True,
642 )
643 )
643 def debugdag(ui, repo, file_=None, *revs, **opts):
644 def debugdag(ui, repo, file_=None, *revs, **opts):
644 """format the changelog or an index DAG as a concise textual description
645 """format the changelog or an index DAG as a concise textual description
645
646
646 If you pass a revlog index, the revlog's DAG is emitted. If you list
647 If you pass a revlog index, the revlog's DAG is emitted. If you list
647 revision numbers, they get labeled in the output as rN.
648 revision numbers, they get labeled in the output as rN.
648
649
649 Otherwise, the changelog DAG of the current repo is emitted.
650 Otherwise, the changelog DAG of the current repo is emitted.
650 """
651 """
651 spaces = opts.get('spaces')
652 spaces = opts.get('spaces')
652 dots = opts.get('dots')
653 dots = opts.get('dots')
653 if file_:
654 if file_:
654 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
655 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
655 revs = {int(r) for r in revs}
656 revs = {int(r) for r in revs}
656
657
657 def events():
658 def events():
658 for r in rlog:
659 for r in rlog:
659 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
660 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
660 if r in revs:
661 if r in revs:
661 yield b'l', (r, b"r%i" % r)
662 yield b'l', (r, b"r%i" % r)
662
663
663 elif repo:
664 elif repo:
664 cl = repo.changelog
665 cl = repo.changelog
665 tags = opts.get('tags')
666 tags = opts.get('tags')
666 branches = opts.get('branches')
667 branches = opts.get('branches')
667 if tags:
668 if tags:
668 labels = {}
669 labels = {}
669 for l, n in repo.tags().items():
670 for l, n in repo.tags().items():
670 labels.setdefault(cl.rev(n), []).append(l)
671 labels.setdefault(cl.rev(n), []).append(l)
671
672
672 def events():
673 def events():
673 b = b"default"
674 b = b"default"
674 for r in cl:
675 for r in cl:
675 if branches:
676 if branches:
676 newb = cl.read(cl.node(r))[5][b'branch']
677 newb = cl.read(cl.node(r))[5][b'branch']
677 if newb != b:
678 if newb != b:
678 yield b'a', newb
679 yield b'a', newb
679 b = newb
680 b = newb
680 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
681 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
681 if tags:
682 if tags:
682 ls = labels.get(r)
683 ls = labels.get(r)
683 if ls:
684 if ls:
684 for l in ls:
685 for l in ls:
685 yield b'l', (r, l)
686 yield b'l', (r, l)
686
687
687 else:
688 else:
688 raise error.Abort(_(b'need repo for changelog dag'))
689 raise error.Abort(_(b'need repo for changelog dag'))
689
690
690 for line in dagparser.dagtextlines(
691 for line in dagparser.dagtextlines(
691 events(),
692 events(),
692 addspaces=spaces,
693 addspaces=spaces,
693 wraplabels=True,
694 wraplabels=True,
694 wrapannotations=True,
695 wrapannotations=True,
695 wrapnonlinear=dots,
696 wrapnonlinear=dots,
696 usedots=dots,
697 usedots=dots,
697 maxlinewidth=70,
698 maxlinewidth=70,
698 ):
699 ):
699 ui.write(line)
700 ui.write(line)
700 ui.write(b"\n")
701 ui.write(b"\n")
701
702
702
703
703 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
704 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
704 def debugdata(ui, repo, file_, rev=None, **opts):
705 def debugdata(ui, repo, file_, rev=None, **opts):
705 """dump the contents of a data file revision"""
706 """dump the contents of a data file revision"""
706 opts = pycompat.byteskwargs(opts)
707 opts = pycompat.byteskwargs(opts)
707 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
708 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
708 if rev is not None:
709 if rev is not None:
709 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
710 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
710 file_, rev = None, file_
711 file_, rev = None, file_
711 elif rev is None:
712 elif rev is None:
712 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
713 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
713 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
714 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
714 try:
715 try:
715 ui.write(r.rawdata(r.lookup(rev)))
716 ui.write(r.rawdata(r.lookup(rev)))
716 except KeyError:
717 except KeyError:
717 raise error.Abort(_(b'invalid revision identifier %s') % rev)
718 raise error.Abort(_(b'invalid revision identifier %s') % rev)
718
719
719
720
720 @command(
721 @command(
721 b'debugdate',
722 b'debugdate',
722 [(b'e', b'extended', None, _(b'try extended date formats'))],
723 [(b'e', b'extended', None, _(b'try extended date formats'))],
723 _(b'[-e] DATE [RANGE]'),
724 _(b'[-e] DATE [RANGE]'),
724 norepo=True,
725 norepo=True,
725 optionalrepo=True,
726 optionalrepo=True,
726 )
727 )
727 def debugdate(ui, date, range=None, **opts):
728 def debugdate(ui, date, range=None, **opts):
728 """parse and display a date"""
729 """parse and display a date"""
729 if opts["extended"]:
730 if opts["extended"]:
730 d = dateutil.parsedate(date, dateutil.extendeddateformats)
731 d = dateutil.parsedate(date, dateutil.extendeddateformats)
731 else:
732 else:
732 d = dateutil.parsedate(date)
733 d = dateutil.parsedate(date)
733 ui.writenoi18n(b"internal: %d %d\n" % d)
734 ui.writenoi18n(b"internal: %d %d\n" % d)
734 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
735 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
735 if range:
736 if range:
736 m = dateutil.matchdate(range)
737 m = dateutil.matchdate(range)
737 ui.writenoi18n(b"match: %s\n" % m(d[0]))
738 ui.writenoi18n(b"match: %s\n" % m(d[0]))
738
739
739
740
740 @command(
741 @command(
741 b'debugdeltachain',
742 b'debugdeltachain',
742 cmdutil.debugrevlogopts + cmdutil.formatteropts,
743 cmdutil.debugrevlogopts + cmdutil.formatteropts,
743 _(b'-c|-m|FILE'),
744 _(b'-c|-m|FILE'),
744 optionalrepo=True,
745 optionalrepo=True,
745 )
746 )
746 def debugdeltachain(ui, repo, file_=None, **opts):
747 def debugdeltachain(ui, repo, file_=None, **opts):
747 """dump information about delta chains in a revlog
748 """dump information about delta chains in a revlog
748
749
749 Output can be templatized. Available template keywords are:
750 Output can be templatized. Available template keywords are:
750
751
751 :``rev``: revision number
752 :``rev``: revision number
752 :``chainid``: delta chain identifier (numbered by unique base)
753 :``chainid``: delta chain identifier (numbered by unique base)
753 :``chainlen``: delta chain length to this revision
754 :``chainlen``: delta chain length to this revision
754 :``prevrev``: previous revision in delta chain
755 :``prevrev``: previous revision in delta chain
755 :``deltatype``: role of delta / how it was computed
756 :``deltatype``: role of delta / how it was computed
756 :``compsize``: compressed size of revision
757 :``compsize``: compressed size of revision
757 :``uncompsize``: uncompressed size of revision
758 :``uncompsize``: uncompressed size of revision
758 :``chainsize``: total size of compressed revisions in chain
759 :``chainsize``: total size of compressed revisions in chain
759 :``chainratio``: total chain size divided by uncompressed revision size
760 :``chainratio``: total chain size divided by uncompressed revision size
760 (new delta chains typically start at ratio 2.00)
761 (new delta chains typically start at ratio 2.00)
761 :``lindist``: linear distance from base revision in delta chain to end
762 :``lindist``: linear distance from base revision in delta chain to end
762 of this revision
763 of this revision
763 :``extradist``: total size of revisions not part of this delta chain from
764 :``extradist``: total size of revisions not part of this delta chain from
764 base of delta chain to end of this revision; a measurement
765 base of delta chain to end of this revision; a measurement
765 of how much extra data we need to read/seek across to read
766 of how much extra data we need to read/seek across to read
766 the delta chain for this revision
767 the delta chain for this revision
767 :``extraratio``: extradist divided by chainsize; another representation of
768 :``extraratio``: extradist divided by chainsize; another representation of
768 how much unrelated data is needed to load this delta chain
769 how much unrelated data is needed to load this delta chain
769
770
770 If the repository is configured to use the sparse read, additional keywords
771 If the repository is configured to use the sparse read, additional keywords
771 are available:
772 are available:
772
773
773 :``readsize``: total size of data read from the disk for a revision
774 :``readsize``: total size of data read from the disk for a revision
774 (sum of the sizes of all the blocks)
775 (sum of the sizes of all the blocks)
775 :``largestblock``: size of the largest block of data read from the disk
776 :``largestblock``: size of the largest block of data read from the disk
776 :``readdensity``: density of useful bytes in the data read from the disk
777 :``readdensity``: density of useful bytes in the data read from the disk
777 :``srchunks``: in how many data hunks the whole revision would be read
778 :``srchunks``: in how many data hunks the whole revision would be read
778
779
779 The sparse read can be enabled with experimental.sparse-read = True
780 The sparse read can be enabled with experimental.sparse-read = True
780 """
781 """
781 opts = pycompat.byteskwargs(opts)
782 opts = pycompat.byteskwargs(opts)
782 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
783 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
783 index = r.index
784 index = r.index
784 start = r.start
785 start = r.start
785 length = r.length
786 length = r.length
786 generaldelta = r._generaldelta
787 generaldelta = r._generaldelta
787 withsparseread = getattr(r, '_withsparseread', False)
788 withsparseread = getattr(r, '_withsparseread', False)
788
789
789 def revinfo(rev):
790 def revinfo(rev):
790 e = index[rev]
791 e = index[rev]
791 compsize = e[1]
792 compsize = e[1]
792 uncompsize = e[2]
793 uncompsize = e[2]
793 chainsize = 0
794 chainsize = 0
794
795
795 if generaldelta:
796 if generaldelta:
796 if e[3] == e[5]:
797 if e[3] == e[5]:
797 deltatype = b'p1'
798 deltatype = b'p1'
798 elif e[3] == e[6]:
799 elif e[3] == e[6]:
799 deltatype = b'p2'
800 deltatype = b'p2'
800 elif e[3] == rev - 1:
801 elif e[3] == rev - 1:
801 deltatype = b'prev'
802 deltatype = b'prev'
802 elif e[3] == rev:
803 elif e[3] == rev:
803 deltatype = b'base'
804 deltatype = b'base'
804 else:
805 else:
805 deltatype = b'other'
806 deltatype = b'other'
806 else:
807 else:
807 if e[3] == rev:
808 if e[3] == rev:
808 deltatype = b'base'
809 deltatype = b'base'
809 else:
810 else:
810 deltatype = b'prev'
811 deltatype = b'prev'
811
812
812 chain = r._deltachain(rev)[0]
813 chain = r._deltachain(rev)[0]
813 for iterrev in chain:
814 for iterrev in chain:
814 e = index[iterrev]
815 e = index[iterrev]
815 chainsize += e[1]
816 chainsize += e[1]
816
817
817 return compsize, uncompsize, deltatype, chain, chainsize
818 return compsize, uncompsize, deltatype, chain, chainsize
818
819
819 fm = ui.formatter(b'debugdeltachain', opts)
820 fm = ui.formatter(b'debugdeltachain', opts)
820
821
821 fm.plain(
822 fm.plain(
822 b' rev chain# chainlen prev delta '
823 b' rev chain# chainlen prev delta '
823 b'size rawsize chainsize ratio lindist extradist '
824 b'size rawsize chainsize ratio lindist extradist '
824 b'extraratio'
825 b'extraratio'
825 )
826 )
826 if withsparseread:
827 if withsparseread:
827 fm.plain(b' readsize largestblk rddensity srchunks')
828 fm.plain(b' readsize largestblk rddensity srchunks')
828 fm.plain(b'\n')
829 fm.plain(b'\n')
829
830
830 chainbases = {}
831 chainbases = {}
831 for rev in r:
832 for rev in r:
832 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
833 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
833 chainbase = chain[0]
834 chainbase = chain[0]
834 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
835 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
835 basestart = start(chainbase)
836 basestart = start(chainbase)
836 revstart = start(rev)
837 revstart = start(rev)
837 lineardist = revstart + comp - basestart
838 lineardist = revstart + comp - basestart
838 extradist = lineardist - chainsize
839 extradist = lineardist - chainsize
839 try:
840 try:
840 prevrev = chain[-2]
841 prevrev = chain[-2]
841 except IndexError:
842 except IndexError:
842 prevrev = -1
843 prevrev = -1
843
844
844 if uncomp != 0:
845 if uncomp != 0:
845 chainratio = float(chainsize) / float(uncomp)
846 chainratio = float(chainsize) / float(uncomp)
846 else:
847 else:
847 chainratio = chainsize
848 chainratio = chainsize
848
849
849 if chainsize != 0:
850 if chainsize != 0:
850 extraratio = float(extradist) / float(chainsize)
851 extraratio = float(extradist) / float(chainsize)
851 else:
852 else:
852 extraratio = extradist
853 extraratio = extradist
853
854
854 fm.startitem()
855 fm.startitem()
855 fm.write(
856 fm.write(
856 b'rev chainid chainlen prevrev deltatype compsize '
857 b'rev chainid chainlen prevrev deltatype compsize '
857 b'uncompsize chainsize chainratio lindist extradist '
858 b'uncompsize chainsize chainratio lindist extradist '
858 b'extraratio',
859 b'extraratio',
859 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
860 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
860 rev,
861 rev,
861 chainid,
862 chainid,
862 len(chain),
863 len(chain),
863 prevrev,
864 prevrev,
864 deltatype,
865 deltatype,
865 comp,
866 comp,
866 uncomp,
867 uncomp,
867 chainsize,
868 chainsize,
868 chainratio,
869 chainratio,
869 lineardist,
870 lineardist,
870 extradist,
871 extradist,
871 extraratio,
872 extraratio,
872 rev=rev,
873 rev=rev,
873 chainid=chainid,
874 chainid=chainid,
874 chainlen=len(chain),
875 chainlen=len(chain),
875 prevrev=prevrev,
876 prevrev=prevrev,
876 deltatype=deltatype,
877 deltatype=deltatype,
877 compsize=comp,
878 compsize=comp,
878 uncompsize=uncomp,
879 uncompsize=uncomp,
879 chainsize=chainsize,
880 chainsize=chainsize,
880 chainratio=chainratio,
881 chainratio=chainratio,
881 lindist=lineardist,
882 lindist=lineardist,
882 extradist=extradist,
883 extradist=extradist,
883 extraratio=extraratio,
884 extraratio=extraratio,
884 )
885 )
885 if withsparseread:
886 if withsparseread:
886 readsize = 0
887 readsize = 0
887 largestblock = 0
888 largestblock = 0
888 srchunks = 0
889 srchunks = 0
889
890
890 for revschunk in deltautil.slicechunk(r, chain):
891 for revschunk in deltautil.slicechunk(r, chain):
891 srchunks += 1
892 srchunks += 1
892 blkend = start(revschunk[-1]) + length(revschunk[-1])
893 blkend = start(revschunk[-1]) + length(revschunk[-1])
893 blksize = blkend - start(revschunk[0])
894 blksize = blkend - start(revschunk[0])
894
895
895 readsize += blksize
896 readsize += blksize
896 if largestblock < blksize:
897 if largestblock < blksize:
897 largestblock = blksize
898 largestblock = blksize
898
899
899 if readsize:
900 if readsize:
900 readdensity = float(chainsize) / float(readsize)
901 readdensity = float(chainsize) / float(readsize)
901 else:
902 else:
902 readdensity = 1
903 readdensity = 1
903
904
904 fm.write(
905 fm.write(
905 b'readsize largestblock readdensity srchunks',
906 b'readsize largestblock readdensity srchunks',
906 b' %10d %10d %9.5f %8d',
907 b' %10d %10d %9.5f %8d',
907 readsize,
908 readsize,
908 largestblock,
909 largestblock,
909 readdensity,
910 readdensity,
910 srchunks,
911 srchunks,
911 readsize=readsize,
912 readsize=readsize,
912 largestblock=largestblock,
913 largestblock=largestblock,
913 readdensity=readdensity,
914 readdensity=readdensity,
914 srchunks=srchunks,
915 srchunks=srchunks,
915 )
916 )
916
917
917 fm.plain(b'\n')
918 fm.plain(b'\n')
918
919
919 fm.end()
920 fm.end()
920
921
921
922
922 @command(
923 @command(
923 b'debugdirstate|debugstate',
924 b'debugdirstate|debugstate',
924 [
925 [
925 (
926 (
926 b'',
927 b'',
927 b'nodates',
928 b'nodates',
928 None,
929 None,
929 _(b'do not display the saved mtime (DEPRECATED)'),
930 _(b'do not display the saved mtime (DEPRECATED)'),
930 ),
931 ),
931 (b'', b'dates', True, _(b'display the saved mtime')),
932 (b'', b'dates', True, _(b'display the saved mtime')),
932 (b'', b'datesort', None, _(b'sort by saved mtime')),
933 (b'', b'datesort', None, _(b'sort by saved mtime')),
933 (
934 (
934 b'',
935 b'',
935 b'all',
936 b'all',
936 False,
937 False,
937 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
938 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
938 ),
939 ),
939 ],
940 ],
940 _(b'[OPTION]...'),
941 _(b'[OPTION]...'),
941 )
942 )
942 def debugstate(ui, repo, **opts):
943 def debugstate(ui, repo, **opts):
943 """show the contents of the current dirstate"""
944 """show the contents of the current dirstate"""
944
945
945 nodates = not opts['dates']
946 nodates = not opts['dates']
946 if opts.get('nodates') is not None:
947 if opts.get('nodates') is not None:
947 nodates = True
948 nodates = True
948 datesort = opts.get('datesort')
949 datesort = opts.get('datesort')
949
950
950 if datesort:
951 if datesort:
951
952
952 def keyfunc(entry):
953 def keyfunc(entry):
953 filename, _state, _mode, _size, mtime = entry
954 filename, _state, _mode, _size, mtime = entry
954 return (mtime, filename)
955 return (mtime, filename)
955
956
956 else:
957 else:
957 keyfunc = None # sort by filename
958 keyfunc = None # sort by filename
958 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
959 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
959 entries.sort(key=keyfunc)
960 entries.sort(key=keyfunc)
960 for entry in entries:
961 for entry in entries:
961 filename, state, mode, size, mtime = entry
962 filename, state, mode, size, mtime = entry
962 if mtime == -1:
963 if mtime == -1:
963 timestr = b'unset '
964 timestr = b'unset '
964 elif nodates:
965 elif nodates:
965 timestr = b'set '
966 timestr = b'set '
966 else:
967 else:
967 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
968 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
968 timestr = encoding.strtolocal(timestr)
969 timestr = encoding.strtolocal(timestr)
969 if mode & 0o20000:
970 if mode & 0o20000:
970 mode = b'lnk'
971 mode = b'lnk'
971 else:
972 else:
972 mode = b'%3o' % (mode & 0o777 & ~util.umask)
973 mode = b'%3o' % (mode & 0o777 & ~util.umask)
973 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
974 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
974 for f in repo.dirstate.copies():
975 for f in repo.dirstate.copies():
975 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
976 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
976
977
977
978
978 @command(
979 @command(
979 b'debugdirstateignorepatternshash',
980 b'debugdirstateignorepatternshash',
980 [],
981 [],
981 _(b''),
982 _(b''),
982 )
983 )
983 def debugdirstateignorepatternshash(ui, repo, **opts):
984 def debugdirstateignorepatternshash(ui, repo, **opts):
984 """show the hash of ignore patterns stored in dirstate if v2,
985 """show the hash of ignore patterns stored in dirstate if v2,
985 or nothing for dirstate-v2
986 or nothing for dirstate-v2
986 """
987 """
987 if repo.dirstate._use_dirstate_v2:
988 if repo.dirstate._use_dirstate_v2:
988 docket = repo.dirstate._map.docket
989 docket = repo.dirstate._map.docket
989 hash_len = 20 # 160 bits for SHA-1
990 hash_len = 20 # 160 bits for SHA-1
990 hash_bytes = docket.tree_metadata[-hash_len:]
991 hash_bytes = docket.tree_metadata[-hash_len:]
991 ui.write(binascii.hexlify(hash_bytes) + b'\n')
992 ui.write(binascii.hexlify(hash_bytes) + b'\n')
992
993
993
994
994 @command(
995 @command(
995 b'debugdiscovery',
996 b'debugdiscovery',
996 [
997 [
997 (b'', b'old', None, _(b'use old-style discovery')),
998 (b'', b'old', None, _(b'use old-style discovery')),
998 (
999 (
999 b'',
1000 b'',
1000 b'nonheads',
1001 b'nonheads',
1001 None,
1002 None,
1002 _(b'use old-style discovery with non-heads included'),
1003 _(b'use old-style discovery with non-heads included'),
1003 ),
1004 ),
1004 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1005 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1005 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1006 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1006 (
1007 (
1007 b'',
1008 b'',
1008 b'local-as-revs',
1009 b'local-as-revs',
1009 b"",
1010 b"",
1010 b'treat local has having these revisions only',
1011 b'treat local has having these revisions only',
1011 ),
1012 ),
1012 (
1013 (
1013 b'',
1014 b'',
1014 b'remote-as-revs',
1015 b'remote-as-revs',
1015 b"",
1016 b"",
1016 b'use local as remote, with only these these revisions',
1017 b'use local as remote, with only these these revisions',
1017 ),
1018 ),
1018 ]
1019 ]
1019 + cmdutil.remoteopts
1020 + cmdutil.remoteopts
1020 + cmdutil.formatteropts,
1021 + cmdutil.formatteropts,
1021 _(b'[--rev REV] [OTHER]'),
1022 _(b'[--rev REV] [OTHER]'),
1022 )
1023 )
1023 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1024 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1024 """runs the changeset discovery protocol in isolation
1025 """runs the changeset discovery protocol in isolation
1025
1026
1026 The local peer can be "replaced" by a subset of the local repository by
1027 The local peer can be "replaced" by a subset of the local repository by
1027 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1028 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1028 be "replaced" by a subset of the local repository using the
1029 be "replaced" by a subset of the local repository using the
1029 `--local-as-revs` flag. This is useful to efficiently debug pathological
1030 `--local-as-revs` flag. This is useful to efficiently debug pathological
1030 discovery situation.
1031 discovery situation.
1031
1032
1032 The following developer oriented config are relevant for people playing with this command:
1033 The following developer oriented config are relevant for people playing with this command:
1033
1034
1034 * devel.discovery.exchange-heads=True
1035 * devel.discovery.exchange-heads=True
1035
1036
1036 If False, the discovery will not start with
1037 If False, the discovery will not start with
1037 remote head fetching and local head querying.
1038 remote head fetching and local head querying.
1038
1039
1039 * devel.discovery.grow-sample=True
1040 * devel.discovery.grow-sample=True
1040
1041
1041 If False, the sample size used in set discovery will not be increased
1042 If False, the sample size used in set discovery will not be increased
1042 through the process
1043 through the process
1043
1044
1044 * devel.discovery.grow-sample.dynamic=True
1045 * devel.discovery.grow-sample.dynamic=True
1045
1046
1046 When discovery.grow-sample.dynamic is True, the default, the sample size is
1047 When discovery.grow-sample.dynamic is True, the default, the sample size is
1047 adapted to the shape of the undecided set (it is set to the max of:
1048 adapted to the shape of the undecided set (it is set to the max of:
1048 <target-size>, len(roots(undecided)), len(heads(undecided)
1049 <target-size>, len(roots(undecided)), len(heads(undecided)
1049
1050
1050 * devel.discovery.grow-sample.rate=1.05
1051 * devel.discovery.grow-sample.rate=1.05
1051
1052
1052 the rate at which the sample grow
1053 the rate at which the sample grow
1053
1054
1054 * devel.discovery.randomize=True
1055 * devel.discovery.randomize=True
1055
1056
1056 If andom sampling during discovery are deterministic. It is meant for
1057 If andom sampling during discovery are deterministic. It is meant for
1057 integration tests.
1058 integration tests.
1058
1059
1059 * devel.discovery.sample-size=200
1060 * devel.discovery.sample-size=200
1060
1061
1061 Control the initial size of the discovery sample
1062 Control the initial size of the discovery sample
1062
1063
1063 * devel.discovery.sample-size.initial=100
1064 * devel.discovery.sample-size.initial=100
1064
1065
1065 Control the initial size of the discovery for initial change
1066 Control the initial size of the discovery for initial change
1066 """
1067 """
1067 opts = pycompat.byteskwargs(opts)
1068 opts = pycompat.byteskwargs(opts)
1068 unfi = repo.unfiltered()
1069 unfi = repo.unfiltered()
1069
1070
1070 # setup potential extra filtering
1071 # setup potential extra filtering
1071 local_revs = opts[b"local_as_revs"]
1072 local_revs = opts[b"local_as_revs"]
1072 remote_revs = opts[b"remote_as_revs"]
1073 remote_revs = opts[b"remote_as_revs"]
1073
1074
1074 # make sure tests are repeatable
1075 # make sure tests are repeatable
1075 random.seed(int(opts[b'seed']))
1076 random.seed(int(opts[b'seed']))
1076
1077
1077 if not remote_revs:
1078 if not remote_revs:
1078
1079
1079 remoteurl, branches = urlutil.get_unique_pull_path(
1080 remoteurl, branches = urlutil.get_unique_pull_path(
1080 b'debugdiscovery', repo, ui, remoteurl
1081 b'debugdiscovery', repo, ui, remoteurl
1081 )
1082 )
1082 remote = hg.peer(repo, opts, remoteurl)
1083 remote = hg.peer(repo, opts, remoteurl)
1083 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1084 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1084 else:
1085 else:
1085 branches = (None, [])
1086 branches = (None, [])
1086 remote_filtered_revs = logcmdutil.revrange(
1087 remote_filtered_revs = logcmdutil.revrange(
1087 unfi, [b"not (::(%s))" % remote_revs]
1088 unfi, [b"not (::(%s))" % remote_revs]
1088 )
1089 )
1089 remote_filtered_revs = frozenset(remote_filtered_revs)
1090 remote_filtered_revs = frozenset(remote_filtered_revs)
1090
1091
1091 def remote_func(x):
1092 def remote_func(x):
1092 return remote_filtered_revs
1093 return remote_filtered_revs
1093
1094
1094 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1095 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1095
1096
1096 remote = repo.peer()
1097 remote = repo.peer()
1097 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1098 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1098
1099
1099 if local_revs:
1100 if local_revs:
1100 local_filtered_revs = logcmdutil.revrange(
1101 local_filtered_revs = logcmdutil.revrange(
1101 unfi, [b"not (::(%s))" % local_revs]
1102 unfi, [b"not (::(%s))" % local_revs]
1102 )
1103 )
1103 local_filtered_revs = frozenset(local_filtered_revs)
1104 local_filtered_revs = frozenset(local_filtered_revs)
1104
1105
1105 def local_func(x):
1106 def local_func(x):
1106 return local_filtered_revs
1107 return local_filtered_revs
1107
1108
1108 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1109 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1109 repo = repo.filtered(b'debug-discovery-local-filter')
1110 repo = repo.filtered(b'debug-discovery-local-filter')
1110
1111
1111 data = {}
1112 data = {}
1112 if opts.get(b'old'):
1113 if opts.get(b'old'):
1113
1114
1114 def doit(pushedrevs, remoteheads, remote=remote):
1115 def doit(pushedrevs, remoteheads, remote=remote):
1115 if not util.safehasattr(remote, b'branches'):
1116 if not util.safehasattr(remote, b'branches'):
1116 # enable in-client legacy support
1117 # enable in-client legacy support
1117 remote = localrepo.locallegacypeer(remote.local())
1118 remote = localrepo.locallegacypeer(remote.local())
1118 common, _in, hds = treediscovery.findcommonincoming(
1119 common, _in, hds = treediscovery.findcommonincoming(
1119 repo, remote, force=True, audit=data
1120 repo, remote, force=True, audit=data
1120 )
1121 )
1121 common = set(common)
1122 common = set(common)
1122 if not opts.get(b'nonheads'):
1123 if not opts.get(b'nonheads'):
1123 ui.writenoi18n(
1124 ui.writenoi18n(
1124 b"unpruned common: %s\n"
1125 b"unpruned common: %s\n"
1125 % b" ".join(sorted(short(n) for n in common))
1126 % b" ".join(sorted(short(n) for n in common))
1126 )
1127 )
1127
1128
1128 clnode = repo.changelog.node
1129 clnode = repo.changelog.node
1129 common = repo.revs(b'heads(::%ln)', common)
1130 common = repo.revs(b'heads(::%ln)', common)
1130 common = {clnode(r) for r in common}
1131 common = {clnode(r) for r in common}
1131 return common, hds
1132 return common, hds
1132
1133
1133 else:
1134 else:
1134
1135
1135 def doit(pushedrevs, remoteheads, remote=remote):
1136 def doit(pushedrevs, remoteheads, remote=remote):
1136 nodes = None
1137 nodes = None
1137 if pushedrevs:
1138 if pushedrevs:
1138 revs = logcmdutil.revrange(repo, pushedrevs)
1139 revs = logcmdutil.revrange(repo, pushedrevs)
1139 nodes = [repo[r].node() for r in revs]
1140 nodes = [repo[r].node() for r in revs]
1140 common, any, hds = setdiscovery.findcommonheads(
1141 common, any, hds = setdiscovery.findcommonheads(
1141 ui, repo, remote, ancestorsof=nodes, audit=data
1142 ui, repo, remote, ancestorsof=nodes, audit=data
1142 )
1143 )
1143 return common, hds
1144 return common, hds
1144
1145
1145 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1146 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1146 localrevs = opts[b'rev']
1147 localrevs = opts[b'rev']
1147
1148
1148 fm = ui.formatter(b'debugdiscovery', opts)
1149 fm = ui.formatter(b'debugdiscovery', opts)
1149 if fm.strict_format:
1150 if fm.strict_format:
1150
1151
1151 @contextlib.contextmanager
1152 @contextlib.contextmanager
1152 def may_capture_output():
1153 def may_capture_output():
1153 ui.pushbuffer()
1154 ui.pushbuffer()
1154 yield
1155 yield
1155 data[b'output'] = ui.popbuffer()
1156 data[b'output'] = ui.popbuffer()
1156
1157
1157 else:
1158 else:
1158 may_capture_output = util.nullcontextmanager
1159 may_capture_output = util.nullcontextmanager
1159 with may_capture_output():
1160 with may_capture_output():
1160 with util.timedcm('debug-discovery') as t:
1161 with util.timedcm('debug-discovery') as t:
1161 common, hds = doit(localrevs, remoterevs)
1162 common, hds = doit(localrevs, remoterevs)
1162
1163
1163 # compute all statistics
1164 # compute all statistics
1164 heads_common = set(common)
1165 heads_common = set(common)
1165 heads_remote = set(hds)
1166 heads_remote = set(hds)
1166 heads_local = set(repo.heads())
1167 heads_local = set(repo.heads())
1167 # note: they cannot be a local or remote head that is in common and not
1168 # note: they cannot be a local or remote head that is in common and not
1168 # itself a head of common.
1169 # itself a head of common.
1169 heads_common_local = heads_common & heads_local
1170 heads_common_local = heads_common & heads_local
1170 heads_common_remote = heads_common & heads_remote
1171 heads_common_remote = heads_common & heads_remote
1171 heads_common_both = heads_common & heads_remote & heads_local
1172 heads_common_both = heads_common & heads_remote & heads_local
1172
1173
1173 all = repo.revs(b'all()')
1174 all = repo.revs(b'all()')
1174 common = repo.revs(b'::%ln', common)
1175 common = repo.revs(b'::%ln', common)
1175 roots_common = repo.revs(b'roots(::%ld)', common)
1176 roots_common = repo.revs(b'roots(::%ld)', common)
1176 missing = repo.revs(b'not ::%ld', common)
1177 missing = repo.revs(b'not ::%ld', common)
1177 heads_missing = repo.revs(b'heads(%ld)', missing)
1178 heads_missing = repo.revs(b'heads(%ld)', missing)
1178 roots_missing = repo.revs(b'roots(%ld)', missing)
1179 roots_missing = repo.revs(b'roots(%ld)', missing)
1179 assert len(common) + len(missing) == len(all)
1180 assert len(common) + len(missing) == len(all)
1180
1181
1181 initial_undecided = repo.revs(
1182 initial_undecided = repo.revs(
1182 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1183 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1183 )
1184 )
1184 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1185 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1185 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1186 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1186 common_initial_undecided = initial_undecided & common
1187 common_initial_undecided = initial_undecided & common
1187 missing_initial_undecided = initial_undecided & missing
1188 missing_initial_undecided = initial_undecided & missing
1188
1189
1189 data[b'elapsed'] = t.elapsed
1190 data[b'elapsed'] = t.elapsed
1190 data[b'nb-common-heads'] = len(heads_common)
1191 data[b'nb-common-heads'] = len(heads_common)
1191 data[b'nb-common-heads-local'] = len(heads_common_local)
1192 data[b'nb-common-heads-local'] = len(heads_common_local)
1192 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1193 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1193 data[b'nb-common-heads-both'] = len(heads_common_both)
1194 data[b'nb-common-heads-both'] = len(heads_common_both)
1194 data[b'nb-common-roots'] = len(roots_common)
1195 data[b'nb-common-roots'] = len(roots_common)
1195 data[b'nb-head-local'] = len(heads_local)
1196 data[b'nb-head-local'] = len(heads_local)
1196 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1197 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1197 data[b'nb-head-remote'] = len(heads_remote)
1198 data[b'nb-head-remote'] = len(heads_remote)
1198 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1199 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1199 heads_common_remote
1200 heads_common_remote
1200 )
1201 )
1201 data[b'nb-revs'] = len(all)
1202 data[b'nb-revs'] = len(all)
1202 data[b'nb-revs-common'] = len(common)
1203 data[b'nb-revs-common'] = len(common)
1203 data[b'nb-revs-missing'] = len(missing)
1204 data[b'nb-revs-missing'] = len(missing)
1204 data[b'nb-missing-heads'] = len(heads_missing)
1205 data[b'nb-missing-heads'] = len(heads_missing)
1205 data[b'nb-missing-roots'] = len(roots_missing)
1206 data[b'nb-missing-roots'] = len(roots_missing)
1206 data[b'nb-ini_und'] = len(initial_undecided)
1207 data[b'nb-ini_und'] = len(initial_undecided)
1207 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1208 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1208 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1209 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1209 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1210 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1210 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1211 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1211
1212
1212 fm.startitem()
1213 fm.startitem()
1213 fm.data(**pycompat.strkwargs(data))
1214 fm.data(**pycompat.strkwargs(data))
1214 # display discovery summary
1215 # display discovery summary
1215 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1216 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1216 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1217 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1217 fm.plain(b"heads summary:\n")
1218 fm.plain(b"heads summary:\n")
1218 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1219 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1219 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1220 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1220 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1221 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1221 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1222 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1222 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1223 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1223 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1224 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1224 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1225 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1225 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1226 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1226 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1227 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1227 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1228 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1228 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1229 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1229 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1230 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1230 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1231 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1231 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1232 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1232 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1233 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1233 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1234 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1234 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1235 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1235 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1236 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1236 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1237 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1237 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1238 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1238 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1239 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1239 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1240 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1240
1241
1241 if ui.verbose:
1242 if ui.verbose:
1242 fm.plain(
1243 fm.plain(
1243 b"common heads: %s\n"
1244 b"common heads: %s\n"
1244 % b" ".join(sorted(short(n) for n in heads_common))
1245 % b" ".join(sorted(short(n) for n in heads_common))
1245 )
1246 )
1246 fm.end()
1247 fm.end()
1247
1248
1248
1249
1249 _chunksize = 4 << 10
1250 _chunksize = 4 << 10
1250
1251
1251
1252
1252 @command(
1253 @command(
1253 b'debugdownload',
1254 b'debugdownload',
1254 [
1255 [
1255 (b'o', b'output', b'', _(b'path')),
1256 (b'o', b'output', b'', _(b'path')),
1256 ],
1257 ],
1257 optionalrepo=True,
1258 optionalrepo=True,
1258 )
1259 )
1259 def debugdownload(ui, repo, url, output=None, **opts):
1260 def debugdownload(ui, repo, url, output=None, **opts):
1260 """download a resource using Mercurial logic and config"""
1261 """download a resource using Mercurial logic and config"""
1261 fh = urlmod.open(ui, url, output)
1262 fh = urlmod.open(ui, url, output)
1262
1263
1263 dest = ui
1264 dest = ui
1264 if output:
1265 if output:
1265 dest = open(output, b"wb", _chunksize)
1266 dest = open(output, b"wb", _chunksize)
1266 try:
1267 try:
1267 data = fh.read(_chunksize)
1268 data = fh.read(_chunksize)
1268 while data:
1269 while data:
1269 dest.write(data)
1270 dest.write(data)
1270 data = fh.read(_chunksize)
1271 data = fh.read(_chunksize)
1271 finally:
1272 finally:
1272 if output:
1273 if output:
1273 dest.close()
1274 dest.close()
1274
1275
1275
1276
1276 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1277 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1277 def debugextensions(ui, repo, **opts):
1278 def debugextensions(ui, repo, **opts):
1278 '''show information about active extensions'''
1279 '''show information about active extensions'''
1279 opts = pycompat.byteskwargs(opts)
1280 opts = pycompat.byteskwargs(opts)
1280 exts = extensions.extensions(ui)
1281 exts = extensions.extensions(ui)
1281 hgver = util.version()
1282 hgver = util.version()
1282 fm = ui.formatter(b'debugextensions', opts)
1283 fm = ui.formatter(b'debugextensions', opts)
1283 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1284 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1284 isinternal = extensions.ismoduleinternal(extmod)
1285 isinternal = extensions.ismoduleinternal(extmod)
1285 extsource = None
1286 extsource = None
1286
1287
1287 if util.safehasattr(extmod, '__file__'):
1288 if util.safehasattr(extmod, '__file__'):
1288 extsource = pycompat.fsencode(extmod.__file__)
1289 extsource = pycompat.fsencode(extmod.__file__)
1289 elif getattr(sys, 'oxidized', False):
1290 elif getattr(sys, 'oxidized', False):
1290 extsource = pycompat.sysexecutable
1291 extsource = pycompat.sysexecutable
1291 if isinternal:
1292 if isinternal:
1292 exttestedwith = [] # never expose magic string to users
1293 exttestedwith = [] # never expose magic string to users
1293 else:
1294 else:
1294 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1295 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1295 extbuglink = getattr(extmod, 'buglink', None)
1296 extbuglink = getattr(extmod, 'buglink', None)
1296
1297
1297 fm.startitem()
1298 fm.startitem()
1298
1299
1299 if ui.quiet or ui.verbose:
1300 if ui.quiet or ui.verbose:
1300 fm.write(b'name', b'%s\n', extname)
1301 fm.write(b'name', b'%s\n', extname)
1301 else:
1302 else:
1302 fm.write(b'name', b'%s', extname)
1303 fm.write(b'name', b'%s', extname)
1303 if isinternal or hgver in exttestedwith:
1304 if isinternal or hgver in exttestedwith:
1304 fm.plain(b'\n')
1305 fm.plain(b'\n')
1305 elif not exttestedwith:
1306 elif not exttestedwith:
1306 fm.plain(_(b' (untested!)\n'))
1307 fm.plain(_(b' (untested!)\n'))
1307 else:
1308 else:
1308 lasttestedversion = exttestedwith[-1]
1309 lasttestedversion = exttestedwith[-1]
1309 fm.plain(b' (%s!)\n' % lasttestedversion)
1310 fm.plain(b' (%s!)\n' % lasttestedversion)
1310
1311
1311 fm.condwrite(
1312 fm.condwrite(
1312 ui.verbose and extsource,
1313 ui.verbose and extsource,
1313 b'source',
1314 b'source',
1314 _(b' location: %s\n'),
1315 _(b' location: %s\n'),
1315 extsource or b"",
1316 extsource or b"",
1316 )
1317 )
1317
1318
1318 if ui.verbose:
1319 if ui.verbose:
1319 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1320 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1320 fm.data(bundled=isinternal)
1321 fm.data(bundled=isinternal)
1321
1322
1322 fm.condwrite(
1323 fm.condwrite(
1323 ui.verbose and exttestedwith,
1324 ui.verbose and exttestedwith,
1324 b'testedwith',
1325 b'testedwith',
1325 _(b' tested with: %s\n'),
1326 _(b' tested with: %s\n'),
1326 fm.formatlist(exttestedwith, name=b'ver'),
1327 fm.formatlist(exttestedwith, name=b'ver'),
1327 )
1328 )
1328
1329
1329 fm.condwrite(
1330 fm.condwrite(
1330 ui.verbose and extbuglink,
1331 ui.verbose and extbuglink,
1331 b'buglink',
1332 b'buglink',
1332 _(b' bug reporting: %s\n'),
1333 _(b' bug reporting: %s\n'),
1333 extbuglink or b"",
1334 extbuglink or b"",
1334 )
1335 )
1335
1336
1336 fm.end()
1337 fm.end()
1337
1338
1338
1339
1339 @command(
1340 @command(
1340 b'debugfileset',
1341 b'debugfileset',
1341 [
1342 [
1342 (
1343 (
1343 b'r',
1344 b'r',
1344 b'rev',
1345 b'rev',
1345 b'',
1346 b'',
1346 _(b'apply the filespec on this revision'),
1347 _(b'apply the filespec on this revision'),
1347 _(b'REV'),
1348 _(b'REV'),
1348 ),
1349 ),
1349 (
1350 (
1350 b'',
1351 b'',
1351 b'all-files',
1352 b'all-files',
1352 False,
1353 False,
1353 _(b'test files from all revisions and working directory'),
1354 _(b'test files from all revisions and working directory'),
1354 ),
1355 ),
1355 (
1356 (
1356 b's',
1357 b's',
1357 b'show-matcher',
1358 b'show-matcher',
1358 None,
1359 None,
1359 _(b'print internal representation of matcher'),
1360 _(b'print internal representation of matcher'),
1360 ),
1361 ),
1361 (
1362 (
1362 b'p',
1363 b'p',
1363 b'show-stage',
1364 b'show-stage',
1364 [],
1365 [],
1365 _(b'print parsed tree at the given stage'),
1366 _(b'print parsed tree at the given stage'),
1366 _(b'NAME'),
1367 _(b'NAME'),
1367 ),
1368 ),
1368 ],
1369 ],
1369 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1370 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1370 )
1371 )
1371 def debugfileset(ui, repo, expr, **opts):
1372 def debugfileset(ui, repo, expr, **opts):
1372 '''parse and apply a fileset specification'''
1373 '''parse and apply a fileset specification'''
1373 from . import fileset
1374 from . import fileset
1374
1375
1375 fileset.symbols # force import of fileset so we have predicates to optimize
1376 fileset.symbols # force import of fileset so we have predicates to optimize
1376 opts = pycompat.byteskwargs(opts)
1377 opts = pycompat.byteskwargs(opts)
1377 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1378 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1378
1379
1379 stages = [
1380 stages = [
1380 (b'parsed', pycompat.identity),
1381 (b'parsed', pycompat.identity),
1381 (b'analyzed', filesetlang.analyze),
1382 (b'analyzed', filesetlang.analyze),
1382 (b'optimized', filesetlang.optimize),
1383 (b'optimized', filesetlang.optimize),
1383 ]
1384 ]
1384 stagenames = {n for n, f in stages}
1385 stagenames = {n for n, f in stages}
1385
1386
1386 showalways = set()
1387 showalways = set()
1387 if ui.verbose and not opts[b'show_stage']:
1388 if ui.verbose and not opts[b'show_stage']:
1388 # show parsed tree by --verbose (deprecated)
1389 # show parsed tree by --verbose (deprecated)
1389 showalways.add(b'parsed')
1390 showalways.add(b'parsed')
1390 if opts[b'show_stage'] == [b'all']:
1391 if opts[b'show_stage'] == [b'all']:
1391 showalways.update(stagenames)
1392 showalways.update(stagenames)
1392 else:
1393 else:
1393 for n in opts[b'show_stage']:
1394 for n in opts[b'show_stage']:
1394 if n not in stagenames:
1395 if n not in stagenames:
1395 raise error.Abort(_(b'invalid stage name: %s') % n)
1396 raise error.Abort(_(b'invalid stage name: %s') % n)
1396 showalways.update(opts[b'show_stage'])
1397 showalways.update(opts[b'show_stage'])
1397
1398
1398 tree = filesetlang.parse(expr)
1399 tree = filesetlang.parse(expr)
1399 for n, f in stages:
1400 for n, f in stages:
1400 tree = f(tree)
1401 tree = f(tree)
1401 if n in showalways:
1402 if n in showalways:
1402 if opts[b'show_stage'] or n != b'parsed':
1403 if opts[b'show_stage'] or n != b'parsed':
1403 ui.write(b"* %s:\n" % n)
1404 ui.write(b"* %s:\n" % n)
1404 ui.write(filesetlang.prettyformat(tree), b"\n")
1405 ui.write(filesetlang.prettyformat(tree), b"\n")
1405
1406
1406 files = set()
1407 files = set()
1407 if opts[b'all_files']:
1408 if opts[b'all_files']:
1408 for r in repo:
1409 for r in repo:
1409 c = repo[r]
1410 c = repo[r]
1410 files.update(c.files())
1411 files.update(c.files())
1411 files.update(c.substate)
1412 files.update(c.substate)
1412 if opts[b'all_files'] or ctx.rev() is None:
1413 if opts[b'all_files'] or ctx.rev() is None:
1413 wctx = repo[None]
1414 wctx = repo[None]
1414 files.update(
1415 files.update(
1415 repo.dirstate.walk(
1416 repo.dirstate.walk(
1416 scmutil.matchall(repo),
1417 scmutil.matchall(repo),
1417 subrepos=list(wctx.substate),
1418 subrepos=list(wctx.substate),
1418 unknown=True,
1419 unknown=True,
1419 ignored=True,
1420 ignored=True,
1420 )
1421 )
1421 )
1422 )
1422 files.update(wctx.substate)
1423 files.update(wctx.substate)
1423 else:
1424 else:
1424 files.update(ctx.files())
1425 files.update(ctx.files())
1425 files.update(ctx.substate)
1426 files.update(ctx.substate)
1426
1427
1427 m = ctx.matchfileset(repo.getcwd(), expr)
1428 m = ctx.matchfileset(repo.getcwd(), expr)
1428 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1429 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1429 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1430 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1430 for f in sorted(files):
1431 for f in sorted(files):
1431 if not m(f):
1432 if not m(f):
1432 continue
1433 continue
1433 ui.write(b"%s\n" % f)
1434 ui.write(b"%s\n" % f)
1434
1435
1435
1436
1436 @command(
1437 @command(
1437 b"debug-repair-issue6528",
1438 b"debug-repair-issue6528",
1438 [
1439 [
1439 (
1440 (
1440 b'',
1441 b'',
1441 b'to-report',
1442 b'to-report',
1442 b'',
1443 b'',
1443 _(b'build a report of affected revisions to this file'),
1444 _(b'build a report of affected revisions to this file'),
1444 _(b'FILE'),
1445 _(b'FILE'),
1445 ),
1446 ),
1446 (
1447 (
1447 b'',
1448 b'',
1448 b'from-report',
1449 b'from-report',
1449 b'',
1450 b'',
1450 _(b'repair revisions listed in this report file'),
1451 _(b'repair revisions listed in this report file'),
1451 _(b'FILE'),
1452 _(b'FILE'),
1452 ),
1453 ),
1453 (
1454 (
1454 b'',
1455 b'',
1455 b'paranoid',
1456 b'paranoid',
1456 False,
1457 False,
1457 _(b'check that both detection methods do the same thing'),
1458 _(b'check that both detection methods do the same thing'),
1458 ),
1459 ),
1459 ]
1460 ]
1460 + cmdutil.dryrunopts,
1461 + cmdutil.dryrunopts,
1461 )
1462 )
1462 def debug_repair_issue6528(ui, repo, **opts):
1463 def debug_repair_issue6528(ui, repo, **opts):
1463 """find affected revisions and repair them. See issue6528 for more details.
1464 """find affected revisions and repair them. See issue6528 for more details.
1464
1465
1465 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1466 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1466 computation of affected revisions for a given repository across clones.
1467 computation of affected revisions for a given repository across clones.
1467 The report format is line-based (with empty lines ignored):
1468 The report format is line-based (with empty lines ignored):
1468
1469
1469 ```
1470 ```
1470 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1471 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1471 ```
1472 ```
1472
1473
1473 There can be multiple broken revisions per filelog, they are separated by
1474 There can be multiple broken revisions per filelog, they are separated by
1474 a comma with no spaces. The only space is between the revision(s) and the
1475 a comma with no spaces. The only space is between the revision(s) and the
1475 filename.
1476 filename.
1476
1477
1477 Note that this does *not* mean that this repairs future affected revisions,
1478 Note that this does *not* mean that this repairs future affected revisions,
1478 that needs a separate fix at the exchange level that was introduced in
1479 that needs a separate fix at the exchange level that was introduced in
1479 Mercurial 5.9.1.
1480 Mercurial 5.9.1.
1480
1481
1481 There is a `--paranoid` flag to test that the fast implementation is correct
1482 There is a `--paranoid` flag to test that the fast implementation is correct
1482 by checking it against the slow implementation. Since this matter is quite
1483 by checking it against the slow implementation. Since this matter is quite
1483 urgent and testing every edge-case is probably quite costly, we use this
1484 urgent and testing every edge-case is probably quite costly, we use this
1484 method to test on large repositories as a fuzzing method of sorts.
1485 method to test on large repositories as a fuzzing method of sorts.
1485 """
1486 """
1486 cmdutil.check_incompatible_arguments(
1487 cmdutil.check_incompatible_arguments(
1487 opts, 'to_report', ['from_report', 'dry_run']
1488 opts, 'to_report', ['from_report', 'dry_run']
1488 )
1489 )
1489 dry_run = opts.get('dry_run')
1490 dry_run = opts.get('dry_run')
1490 to_report = opts.get('to_report')
1491 to_report = opts.get('to_report')
1491 from_report = opts.get('from_report')
1492 from_report = opts.get('from_report')
1492 paranoid = opts.get('paranoid')
1493 paranoid = opts.get('paranoid')
1493 # TODO maybe add filelog pattern and revision pattern parameters to help
1494 # TODO maybe add filelog pattern and revision pattern parameters to help
1494 # narrow down the search for users that know what they're looking for?
1495 # narrow down the search for users that know what they're looking for?
1495
1496
1496 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1497 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1497 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1498 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1498 raise error.Abort(_(msg))
1499 raise error.Abort(_(msg))
1499
1500
1500 rewrite.repair_issue6528(
1501 rewrite.repair_issue6528(
1501 ui,
1502 ui,
1502 repo,
1503 repo,
1503 dry_run=dry_run,
1504 dry_run=dry_run,
1504 to_report=to_report,
1505 to_report=to_report,
1505 from_report=from_report,
1506 from_report=from_report,
1506 paranoid=paranoid,
1507 paranoid=paranoid,
1507 )
1508 )
1508
1509
1509
1510
1510 @command(b'debugformat', [] + cmdutil.formatteropts)
1511 @command(b'debugformat', [] + cmdutil.formatteropts)
1511 def debugformat(ui, repo, **opts):
1512 def debugformat(ui, repo, **opts):
1512 """display format information about the current repository
1513 """display format information about the current repository
1513
1514
1514 Use --verbose to get extra information about current config value and
1515 Use --verbose to get extra information about current config value and
1515 Mercurial default."""
1516 Mercurial default."""
1516 opts = pycompat.byteskwargs(opts)
1517 opts = pycompat.byteskwargs(opts)
1517 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1518 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1518 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1519 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1519
1520
1520 def makeformatname(name):
1521 def makeformatname(name):
1521 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1522 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1522
1523
1523 fm = ui.formatter(b'debugformat', opts)
1524 fm = ui.formatter(b'debugformat', opts)
1524 if fm.isplain():
1525 if fm.isplain():
1525
1526
1526 def formatvalue(value):
1527 def formatvalue(value):
1527 if util.safehasattr(value, b'startswith'):
1528 if util.safehasattr(value, b'startswith'):
1528 return value
1529 return value
1529 if value:
1530 if value:
1530 return b'yes'
1531 return b'yes'
1531 else:
1532 else:
1532 return b'no'
1533 return b'no'
1533
1534
1534 else:
1535 else:
1535 formatvalue = pycompat.identity
1536 formatvalue = pycompat.identity
1536
1537
1537 fm.plain(b'format-variant')
1538 fm.plain(b'format-variant')
1538 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1539 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1539 fm.plain(b' repo')
1540 fm.plain(b' repo')
1540 if ui.verbose:
1541 if ui.verbose:
1541 fm.plain(b' config default')
1542 fm.plain(b' config default')
1542 fm.plain(b'\n')
1543 fm.plain(b'\n')
1543 for fv in upgrade.allformatvariant:
1544 for fv in upgrade.allformatvariant:
1544 fm.startitem()
1545 fm.startitem()
1545 repovalue = fv.fromrepo(repo)
1546 repovalue = fv.fromrepo(repo)
1546 configvalue = fv.fromconfig(repo)
1547 configvalue = fv.fromconfig(repo)
1547
1548
1548 if repovalue != configvalue:
1549 if repovalue != configvalue:
1549 namelabel = b'formatvariant.name.mismatchconfig'
1550 namelabel = b'formatvariant.name.mismatchconfig'
1550 repolabel = b'formatvariant.repo.mismatchconfig'
1551 repolabel = b'formatvariant.repo.mismatchconfig'
1551 elif repovalue != fv.default:
1552 elif repovalue != fv.default:
1552 namelabel = b'formatvariant.name.mismatchdefault'
1553 namelabel = b'formatvariant.name.mismatchdefault'
1553 repolabel = b'formatvariant.repo.mismatchdefault'
1554 repolabel = b'formatvariant.repo.mismatchdefault'
1554 else:
1555 else:
1555 namelabel = b'formatvariant.name.uptodate'
1556 namelabel = b'formatvariant.name.uptodate'
1556 repolabel = b'formatvariant.repo.uptodate'
1557 repolabel = b'formatvariant.repo.uptodate'
1557
1558
1558 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1559 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1559 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1560 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1560 if fv.default != configvalue:
1561 if fv.default != configvalue:
1561 configlabel = b'formatvariant.config.special'
1562 configlabel = b'formatvariant.config.special'
1562 else:
1563 else:
1563 configlabel = b'formatvariant.config.default'
1564 configlabel = b'formatvariant.config.default'
1564 fm.condwrite(
1565 fm.condwrite(
1565 ui.verbose,
1566 ui.verbose,
1566 b'config',
1567 b'config',
1567 b' %6s',
1568 b' %6s',
1568 formatvalue(configvalue),
1569 formatvalue(configvalue),
1569 label=configlabel,
1570 label=configlabel,
1570 )
1571 )
1571 fm.condwrite(
1572 fm.condwrite(
1572 ui.verbose,
1573 ui.verbose,
1573 b'default',
1574 b'default',
1574 b' %7s',
1575 b' %7s',
1575 formatvalue(fv.default),
1576 formatvalue(fv.default),
1576 label=b'formatvariant.default',
1577 label=b'formatvariant.default',
1577 )
1578 )
1578 fm.plain(b'\n')
1579 fm.plain(b'\n')
1579 fm.end()
1580 fm.end()
1580
1581
1581
1582
1582 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1583 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1583 def debugfsinfo(ui, path=b"."):
1584 def debugfsinfo(ui, path=b"."):
1584 """show information detected about current filesystem"""
1585 """show information detected about current filesystem"""
1585 ui.writenoi18n(b'path: %s\n' % path)
1586 ui.writenoi18n(b'path: %s\n' % path)
1586 ui.writenoi18n(
1587 ui.writenoi18n(
1587 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1588 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1588 )
1589 )
1589 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1590 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1590 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1591 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1591 ui.writenoi18n(
1592 ui.writenoi18n(
1592 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1593 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1593 )
1594 )
1594 ui.writenoi18n(
1595 ui.writenoi18n(
1595 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1596 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1596 )
1597 )
1597 casesensitive = b'(unknown)'
1598 casesensitive = b'(unknown)'
1598 try:
1599 try:
1599 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1600 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1600 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1601 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1601 except OSError:
1602 except OSError:
1602 pass
1603 pass
1603 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1604 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1604
1605
1605
1606
1606 @command(
1607 @command(
1607 b'debuggetbundle',
1608 b'debuggetbundle',
1608 [
1609 [
1609 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1610 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1610 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1611 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1611 (
1612 (
1612 b't',
1613 b't',
1613 b'type',
1614 b'type',
1614 b'bzip2',
1615 b'bzip2',
1615 _(b'bundle compression type to use'),
1616 _(b'bundle compression type to use'),
1616 _(b'TYPE'),
1617 _(b'TYPE'),
1617 ),
1618 ),
1618 ],
1619 ],
1619 _(b'REPO FILE [-H|-C ID]...'),
1620 _(b'REPO FILE [-H|-C ID]...'),
1620 norepo=True,
1621 norepo=True,
1621 )
1622 )
1622 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1623 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1623 """retrieves a bundle from a repo
1624 """retrieves a bundle from a repo
1624
1625
1625 Every ID must be a full-length hex node id string. Saves the bundle to the
1626 Every ID must be a full-length hex node id string. Saves the bundle to the
1626 given file.
1627 given file.
1627 """
1628 """
1628 opts = pycompat.byteskwargs(opts)
1629 opts = pycompat.byteskwargs(opts)
1629 repo = hg.peer(ui, opts, repopath)
1630 repo = hg.peer(ui, opts, repopath)
1630 if not repo.capable(b'getbundle'):
1631 if not repo.capable(b'getbundle'):
1631 raise error.Abort(b"getbundle() not supported by target repository")
1632 raise error.Abort(b"getbundle() not supported by target repository")
1632 args = {}
1633 args = {}
1633 if common:
1634 if common:
1634 args['common'] = [bin(s) for s in common]
1635 args['common'] = [bin(s) for s in common]
1635 if head:
1636 if head:
1636 args['heads'] = [bin(s) for s in head]
1637 args['heads'] = [bin(s) for s in head]
1637 # TODO: get desired bundlecaps from command line.
1638 # TODO: get desired bundlecaps from command line.
1638 args['bundlecaps'] = None
1639 args['bundlecaps'] = None
1639 bundle = repo.getbundle(b'debug', **args)
1640 bundle = repo.getbundle(b'debug', **args)
1640
1641
1641 bundletype = opts.get(b'type', b'bzip2').lower()
1642 bundletype = opts.get(b'type', b'bzip2').lower()
1642 btypes = {
1643 btypes = {
1643 b'none': b'HG10UN',
1644 b'none': b'HG10UN',
1644 b'bzip2': b'HG10BZ',
1645 b'bzip2': b'HG10BZ',
1645 b'gzip': b'HG10GZ',
1646 b'gzip': b'HG10GZ',
1646 b'bundle2': b'HG20',
1647 b'bundle2': b'HG20',
1647 }
1648 }
1648 bundletype = btypes.get(bundletype)
1649 bundletype = btypes.get(bundletype)
1649 if bundletype not in bundle2.bundletypes:
1650 if bundletype not in bundle2.bundletypes:
1650 raise error.Abort(_(b'unknown bundle type specified with --type'))
1651 raise error.Abort(_(b'unknown bundle type specified with --type'))
1651 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1652 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1652
1653
1653
1654
1654 @command(b'debugignore', [], b'[FILE]')
1655 @command(b'debugignore', [], b'[FILE]')
1655 def debugignore(ui, repo, *files, **opts):
1656 def debugignore(ui, repo, *files, **opts):
1656 """display the combined ignore pattern and information about ignored files
1657 """display the combined ignore pattern and information about ignored files
1657
1658
1658 With no argument display the combined ignore pattern.
1659 With no argument display the combined ignore pattern.
1659
1660
1660 Given space separated file names, shows if the given file is ignored and
1661 Given space separated file names, shows if the given file is ignored and
1661 if so, show the ignore rule (file and line number) that matched it.
1662 if so, show the ignore rule (file and line number) that matched it.
1662 """
1663 """
1663 ignore = repo.dirstate._ignore
1664 ignore = repo.dirstate._ignore
1664 if not files:
1665 if not files:
1665 # Show all the patterns
1666 # Show all the patterns
1666 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1667 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1667 else:
1668 else:
1668 m = scmutil.match(repo[None], pats=files)
1669 m = scmutil.match(repo[None], pats=files)
1669 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1670 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1670 for f in m.files():
1671 for f in m.files():
1671 nf = util.normpath(f)
1672 nf = util.normpath(f)
1672 ignored = None
1673 ignored = None
1673 ignoredata = None
1674 ignoredata = None
1674 if nf != b'.':
1675 if nf != b'.':
1675 if ignore(nf):
1676 if ignore(nf):
1676 ignored = nf
1677 ignored = nf
1677 ignoredata = repo.dirstate._ignorefileandline(nf)
1678 ignoredata = repo.dirstate._ignorefileandline(nf)
1678 else:
1679 else:
1679 for p in pathutil.finddirs(nf):
1680 for p in pathutil.finddirs(nf):
1680 if ignore(p):
1681 if ignore(p):
1681 ignored = p
1682 ignored = p
1682 ignoredata = repo.dirstate._ignorefileandline(p)
1683 ignoredata = repo.dirstate._ignorefileandline(p)
1683 break
1684 break
1684 if ignored:
1685 if ignored:
1685 if ignored == nf:
1686 if ignored == nf:
1686 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1687 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1687 else:
1688 else:
1688 ui.write(
1689 ui.write(
1689 _(
1690 _(
1690 b"%s is ignored because of "
1691 b"%s is ignored because of "
1691 b"containing directory %s\n"
1692 b"containing directory %s\n"
1692 )
1693 )
1693 % (uipathfn(f), ignored)
1694 % (uipathfn(f), ignored)
1694 )
1695 )
1695 ignorefile, lineno, line = ignoredata
1696 ignorefile, lineno, line = ignoredata
1696 ui.write(
1697 ui.write(
1697 _(b"(ignore rule in %s, line %d: '%s')\n")
1698 _(b"(ignore rule in %s, line %d: '%s')\n")
1698 % (ignorefile, lineno, line)
1699 % (ignorefile, lineno, line)
1699 )
1700 )
1700 else:
1701 else:
1701 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1702 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1702
1703
1703
1704
1704 @command(
1705 @command(
1705 b'debugindex',
1706 b'debugindex',
1706 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1707 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1707 _(b'-c|-m|FILE'),
1708 _(b'-c|-m|FILE'),
1708 )
1709 )
1709 def debugindex(ui, repo, file_=None, **opts):
1710 def debugindex(ui, repo, file_=None, **opts):
1710 """dump index data for a storage primitive"""
1711 """dump index data for a storage primitive"""
1711 opts = pycompat.byteskwargs(opts)
1712 opts = pycompat.byteskwargs(opts)
1712 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1713 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1713
1714
1714 if ui.debugflag:
1715 if ui.debugflag:
1715 shortfn = hex
1716 shortfn = hex
1716 else:
1717 else:
1717 shortfn = short
1718 shortfn = short
1718
1719
1719 idlen = 12
1720 idlen = 12
1720 for i in store:
1721 for i in store:
1721 idlen = len(shortfn(store.node(i)))
1722 idlen = len(shortfn(store.node(i)))
1722 break
1723 break
1723
1724
1724 fm = ui.formatter(b'debugindex', opts)
1725 fm = ui.formatter(b'debugindex', opts)
1725 fm.plain(
1726 fm.plain(
1726 b' rev linkrev %s %s p2\n'
1727 b' rev linkrev %s %s p2\n'
1727 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1728 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1728 )
1729 )
1729
1730
1730 for rev in store:
1731 for rev in store:
1731 node = store.node(rev)
1732 node = store.node(rev)
1732 parents = store.parents(node)
1733 parents = store.parents(node)
1733
1734
1734 fm.startitem()
1735 fm.startitem()
1735 fm.write(b'rev', b'%6d ', rev)
1736 fm.write(b'rev', b'%6d ', rev)
1736 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1737 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1737 fm.write(b'node', b'%s ', shortfn(node))
1738 fm.write(b'node', b'%s ', shortfn(node))
1738 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1739 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1739 fm.write(b'p2', b'%s', shortfn(parents[1]))
1740 fm.write(b'p2', b'%s', shortfn(parents[1]))
1740 fm.plain(b'\n')
1741 fm.plain(b'\n')
1741
1742
1742 fm.end()
1743 fm.end()
1743
1744
1744
1745
1745 @command(
1746 @command(
1746 b'debugindexdot',
1747 b'debugindexdot',
1747 cmdutil.debugrevlogopts,
1748 cmdutil.debugrevlogopts,
1748 _(b'-c|-m|FILE'),
1749 _(b'-c|-m|FILE'),
1749 optionalrepo=True,
1750 optionalrepo=True,
1750 )
1751 )
1751 def debugindexdot(ui, repo, file_=None, **opts):
1752 def debugindexdot(ui, repo, file_=None, **opts):
1752 """dump an index DAG as a graphviz dot file"""
1753 """dump an index DAG as a graphviz dot file"""
1753 opts = pycompat.byteskwargs(opts)
1754 opts = pycompat.byteskwargs(opts)
1754 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1755 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1755 ui.writenoi18n(b"digraph G {\n")
1756 ui.writenoi18n(b"digraph G {\n")
1756 for i in r:
1757 for i in r:
1757 node = r.node(i)
1758 node = r.node(i)
1758 pp = r.parents(node)
1759 pp = r.parents(node)
1759 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1760 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1760 if pp[1] != repo.nullid:
1761 if pp[1] != repo.nullid:
1761 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1762 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1762 ui.write(b"}\n")
1763 ui.write(b"}\n")
1763
1764
1764
1765
1765 @command(b'debugindexstats', [])
1766 @command(b'debugindexstats', [])
1766 def debugindexstats(ui, repo):
1767 def debugindexstats(ui, repo):
1767 """show stats related to the changelog index"""
1768 """show stats related to the changelog index"""
1768 repo.changelog.shortest(repo.nullid, 1)
1769 repo.changelog.shortest(repo.nullid, 1)
1769 index = repo.changelog.index
1770 index = repo.changelog.index
1770 if not util.safehasattr(index, b'stats'):
1771 if not util.safehasattr(index, b'stats'):
1771 raise error.Abort(_(b'debugindexstats only works with native code'))
1772 raise error.Abort(_(b'debugindexstats only works with native code'))
1772 for k, v in sorted(index.stats().items()):
1773 for k, v in sorted(index.stats().items()):
1773 ui.write(b'%s: %d\n' % (k, v))
1774 ui.write(b'%s: %d\n' % (k, v))
1774
1775
1775
1776
1776 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1777 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1777 def debuginstall(ui, **opts):
1778 def debuginstall(ui, **opts):
1778 """test Mercurial installation
1779 """test Mercurial installation
1779
1780
1780 Returns 0 on success.
1781 Returns 0 on success.
1781 """
1782 """
1782 opts = pycompat.byteskwargs(opts)
1783 opts = pycompat.byteskwargs(opts)
1783
1784
1784 problems = 0
1785 problems = 0
1785
1786
1786 fm = ui.formatter(b'debuginstall', opts)
1787 fm = ui.formatter(b'debuginstall', opts)
1787 fm.startitem()
1788 fm.startitem()
1788
1789
1789 # encoding might be unknown or wrong. don't translate these messages.
1790 # encoding might be unknown or wrong. don't translate these messages.
1790 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1791 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1791 err = None
1792 err = None
1792 try:
1793 try:
1793 codecs.lookup(pycompat.sysstr(encoding.encoding))
1794 codecs.lookup(pycompat.sysstr(encoding.encoding))
1794 except LookupError as inst:
1795 except LookupError as inst:
1795 err = stringutil.forcebytestr(inst)
1796 err = stringutil.forcebytestr(inst)
1796 problems += 1
1797 problems += 1
1797 fm.condwrite(
1798 fm.condwrite(
1798 err,
1799 err,
1799 b'encodingerror',
1800 b'encodingerror',
1800 b" %s\n (check that your locale is properly set)\n",
1801 b" %s\n (check that your locale is properly set)\n",
1801 err,
1802 err,
1802 )
1803 )
1803
1804
1804 # Python
1805 # Python
1805 pythonlib = None
1806 pythonlib = None
1806 if util.safehasattr(os, '__file__'):
1807 if util.safehasattr(os, '__file__'):
1807 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1808 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1808 elif getattr(sys, 'oxidized', False):
1809 elif getattr(sys, 'oxidized', False):
1809 pythonlib = pycompat.sysexecutable
1810 pythonlib = pycompat.sysexecutable
1810
1811
1811 fm.write(
1812 fm.write(
1812 b'pythonexe',
1813 b'pythonexe',
1813 _(b"checking Python executable (%s)\n"),
1814 _(b"checking Python executable (%s)\n"),
1814 pycompat.sysexecutable or _(b"unknown"),
1815 pycompat.sysexecutable or _(b"unknown"),
1815 )
1816 )
1816 fm.write(
1817 fm.write(
1817 b'pythonimplementation',
1818 b'pythonimplementation',
1818 _(b"checking Python implementation (%s)\n"),
1819 _(b"checking Python implementation (%s)\n"),
1819 pycompat.sysbytes(platform.python_implementation()),
1820 pycompat.sysbytes(platform.python_implementation()),
1820 )
1821 )
1821 fm.write(
1822 fm.write(
1822 b'pythonver',
1823 b'pythonver',
1823 _(b"checking Python version (%s)\n"),
1824 _(b"checking Python version (%s)\n"),
1824 (b"%d.%d.%d" % sys.version_info[:3]),
1825 (b"%d.%d.%d" % sys.version_info[:3]),
1825 )
1826 )
1826 fm.write(
1827 fm.write(
1827 b'pythonlib',
1828 b'pythonlib',
1828 _(b"checking Python lib (%s)...\n"),
1829 _(b"checking Python lib (%s)...\n"),
1829 pythonlib or _(b"unknown"),
1830 pythonlib or _(b"unknown"),
1830 )
1831 )
1831
1832
1832 try:
1833 try:
1833 from . import rustext # pytype: disable=import-error
1834 from . import rustext # pytype: disable=import-error
1834
1835
1835 rustext.__doc__ # trigger lazy import
1836 rustext.__doc__ # trigger lazy import
1836 except ImportError:
1837 except ImportError:
1837 rustext = None
1838 rustext = None
1838
1839
1839 security = set(sslutil.supportedprotocols)
1840 security = set(sslutil.supportedprotocols)
1840 if sslutil.hassni:
1841 if sslutil.hassni:
1841 security.add(b'sni')
1842 security.add(b'sni')
1842
1843
1843 fm.write(
1844 fm.write(
1844 b'pythonsecurity',
1845 b'pythonsecurity',
1845 _(b"checking Python security support (%s)\n"),
1846 _(b"checking Python security support (%s)\n"),
1846 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1847 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1847 )
1848 )
1848
1849
1849 # These are warnings, not errors. So don't increment problem count. This
1850 # These are warnings, not errors. So don't increment problem count. This
1850 # may change in the future.
1851 # may change in the future.
1851 if b'tls1.2' not in security:
1852 if b'tls1.2' not in security:
1852 fm.plain(
1853 fm.plain(
1853 _(
1854 _(
1854 b' TLS 1.2 not supported by Python install; '
1855 b' TLS 1.2 not supported by Python install; '
1855 b'network connections lack modern security\n'
1856 b'network connections lack modern security\n'
1856 )
1857 )
1857 )
1858 )
1858 if b'sni' not in security:
1859 if b'sni' not in security:
1859 fm.plain(
1860 fm.plain(
1860 _(
1861 _(
1861 b' SNI not supported by Python install; may have '
1862 b' SNI not supported by Python install; may have '
1862 b'connectivity issues with some servers\n'
1863 b'connectivity issues with some servers\n'
1863 )
1864 )
1864 )
1865 )
1865
1866
1866 fm.plain(
1867 fm.plain(
1867 _(
1868 _(
1868 b"checking Rust extensions (%s)\n"
1869 b"checking Rust extensions (%s)\n"
1869 % (b'missing' if rustext is None else b'installed')
1870 % (b'missing' if rustext is None else b'installed')
1870 ),
1871 ),
1871 )
1872 )
1872
1873
1873 # TODO print CA cert info
1874 # TODO print CA cert info
1874
1875
1875 # hg version
1876 # hg version
1876 hgver = util.version()
1877 hgver = util.version()
1877 fm.write(
1878 fm.write(
1878 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1879 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1879 )
1880 )
1880 fm.write(
1881 fm.write(
1881 b'hgverextra',
1882 b'hgverextra',
1882 _(b"checking Mercurial custom build (%s)\n"),
1883 _(b"checking Mercurial custom build (%s)\n"),
1883 b'+'.join(hgver.split(b'+')[1:]),
1884 b'+'.join(hgver.split(b'+')[1:]),
1884 )
1885 )
1885
1886
1886 # compiled modules
1887 # compiled modules
1887 hgmodules = None
1888 hgmodules = None
1888 if util.safehasattr(sys.modules[__name__], '__file__'):
1889 if util.safehasattr(sys.modules[__name__], '__file__'):
1889 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1890 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1890 elif getattr(sys, 'oxidized', False):
1891 elif getattr(sys, 'oxidized', False):
1891 hgmodules = pycompat.sysexecutable
1892 hgmodules = pycompat.sysexecutable
1892
1893
1893 fm.write(
1894 fm.write(
1894 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1895 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1895 )
1896 )
1896 fm.write(
1897 fm.write(
1897 b'hgmodules',
1898 b'hgmodules',
1898 _(b"checking installed modules (%s)...\n"),
1899 _(b"checking installed modules (%s)...\n"),
1899 hgmodules or _(b"unknown"),
1900 hgmodules or _(b"unknown"),
1900 )
1901 )
1901
1902
1902 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1903 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1903 rustext = rustandc # for now, that's the only case
1904 rustext = rustandc # for now, that's the only case
1904 cext = policy.policy in (b'c', b'allow') or rustandc
1905 cext = policy.policy in (b'c', b'allow') or rustandc
1905 nopure = cext or rustext
1906 nopure = cext or rustext
1906 if nopure:
1907 if nopure:
1907 err = None
1908 err = None
1908 try:
1909 try:
1909 if cext:
1910 if cext:
1910 from .cext import ( # pytype: disable=import-error
1911 from .cext import ( # pytype: disable=import-error
1911 base85,
1912 base85,
1912 bdiff,
1913 bdiff,
1913 mpatch,
1914 mpatch,
1914 osutil,
1915 osutil,
1915 )
1916 )
1916
1917
1917 # quiet pyflakes
1918 # quiet pyflakes
1918 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1919 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1919 if rustext:
1920 if rustext:
1920 from .rustext import ( # pytype: disable=import-error
1921 from .rustext import ( # pytype: disable=import-error
1921 ancestor,
1922 ancestor,
1922 dirstate,
1923 dirstate,
1923 )
1924 )
1924
1925
1925 dir(ancestor), dir(dirstate) # quiet pyflakes
1926 dir(ancestor), dir(dirstate) # quiet pyflakes
1926 except Exception as inst:
1927 except Exception as inst:
1927 err = stringutil.forcebytestr(inst)
1928 err = stringutil.forcebytestr(inst)
1928 problems += 1
1929 problems += 1
1929 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1930 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1930
1931
1931 compengines = util.compengines._engines.values()
1932 compengines = util.compengines._engines.values()
1932 fm.write(
1933 fm.write(
1933 b'compengines',
1934 b'compengines',
1934 _(b'checking registered compression engines (%s)\n'),
1935 _(b'checking registered compression engines (%s)\n'),
1935 fm.formatlist(
1936 fm.formatlist(
1936 sorted(e.name() for e in compengines),
1937 sorted(e.name() for e in compengines),
1937 name=b'compengine',
1938 name=b'compengine',
1938 fmt=b'%s',
1939 fmt=b'%s',
1939 sep=b', ',
1940 sep=b', ',
1940 ),
1941 ),
1941 )
1942 )
1942 fm.write(
1943 fm.write(
1943 b'compenginesavail',
1944 b'compenginesavail',
1944 _(b'checking available compression engines (%s)\n'),
1945 _(b'checking available compression engines (%s)\n'),
1945 fm.formatlist(
1946 fm.formatlist(
1946 sorted(e.name() for e in compengines if e.available()),
1947 sorted(e.name() for e in compengines if e.available()),
1947 name=b'compengine',
1948 name=b'compengine',
1948 fmt=b'%s',
1949 fmt=b'%s',
1949 sep=b', ',
1950 sep=b', ',
1950 ),
1951 ),
1951 )
1952 )
1952 wirecompengines = compression.compengines.supportedwireengines(
1953 wirecompengines = compression.compengines.supportedwireengines(
1953 compression.SERVERROLE
1954 compression.SERVERROLE
1954 )
1955 )
1955 fm.write(
1956 fm.write(
1956 b'compenginesserver',
1957 b'compenginesserver',
1957 _(
1958 _(
1958 b'checking available compression engines '
1959 b'checking available compression engines '
1959 b'for wire protocol (%s)\n'
1960 b'for wire protocol (%s)\n'
1960 ),
1961 ),
1961 fm.formatlist(
1962 fm.formatlist(
1962 [e.name() for e in wirecompengines if e.wireprotosupport()],
1963 [e.name() for e in wirecompengines if e.wireprotosupport()],
1963 name=b'compengine',
1964 name=b'compengine',
1964 fmt=b'%s',
1965 fmt=b'%s',
1965 sep=b', ',
1966 sep=b', ',
1966 ),
1967 ),
1967 )
1968 )
1968 re2 = b'missing'
1969 re2 = b'missing'
1969 if util._re2:
1970 if util._re2:
1970 re2 = b'available'
1971 re2 = b'available'
1971 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1972 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1972 fm.data(re2=bool(util._re2))
1973 fm.data(re2=bool(util._re2))
1973
1974
1974 # templates
1975 # templates
1975 p = templater.templatedir()
1976 p = templater.templatedir()
1976 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1977 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1977 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1978 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1978 if p:
1979 if p:
1979 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1980 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1980 if m:
1981 if m:
1981 # template found, check if it is working
1982 # template found, check if it is working
1982 err = None
1983 err = None
1983 try:
1984 try:
1984 templater.templater.frommapfile(m)
1985 templater.templater.frommapfile(m)
1985 except Exception as inst:
1986 except Exception as inst:
1986 err = stringutil.forcebytestr(inst)
1987 err = stringutil.forcebytestr(inst)
1987 p = None
1988 p = None
1988 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1989 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1989 else:
1990 else:
1990 p = None
1991 p = None
1991 fm.condwrite(
1992 fm.condwrite(
1992 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1993 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1993 )
1994 )
1994 fm.condwrite(
1995 fm.condwrite(
1995 not m,
1996 not m,
1996 b'defaulttemplatenotfound',
1997 b'defaulttemplatenotfound',
1997 _(b" template '%s' not found\n"),
1998 _(b" template '%s' not found\n"),
1998 b"default",
1999 b"default",
1999 )
2000 )
2000 if not p:
2001 if not p:
2001 problems += 1
2002 problems += 1
2002 fm.condwrite(
2003 fm.condwrite(
2003 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2004 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2004 )
2005 )
2005
2006
2006 # editor
2007 # editor
2007 editor = ui.geteditor()
2008 editor = ui.geteditor()
2008 editor = util.expandpath(editor)
2009 editor = util.expandpath(editor)
2009 editorbin = procutil.shellsplit(editor)[0]
2010 editorbin = procutil.shellsplit(editor)[0]
2010 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2011 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2011 cmdpath = procutil.findexe(editorbin)
2012 cmdpath = procutil.findexe(editorbin)
2012 fm.condwrite(
2013 fm.condwrite(
2013 not cmdpath and editor == b'vi',
2014 not cmdpath and editor == b'vi',
2014 b'vinotfound',
2015 b'vinotfound',
2015 _(
2016 _(
2016 b" No commit editor set and can't find %s in PATH\n"
2017 b" No commit editor set and can't find %s in PATH\n"
2017 b" (specify a commit editor in your configuration"
2018 b" (specify a commit editor in your configuration"
2018 b" file)\n"
2019 b" file)\n"
2019 ),
2020 ),
2020 not cmdpath and editor == b'vi' and editorbin,
2021 not cmdpath and editor == b'vi' and editorbin,
2021 )
2022 )
2022 fm.condwrite(
2023 fm.condwrite(
2023 not cmdpath and editor != b'vi',
2024 not cmdpath and editor != b'vi',
2024 b'editornotfound',
2025 b'editornotfound',
2025 _(
2026 _(
2026 b" Can't find editor '%s' in PATH\n"
2027 b" Can't find editor '%s' in PATH\n"
2027 b" (specify a commit editor in your configuration"
2028 b" (specify a commit editor in your configuration"
2028 b" file)\n"
2029 b" file)\n"
2029 ),
2030 ),
2030 not cmdpath and editorbin,
2031 not cmdpath and editorbin,
2031 )
2032 )
2032 if not cmdpath and editor != b'vi':
2033 if not cmdpath and editor != b'vi':
2033 problems += 1
2034 problems += 1
2034
2035
2035 # check username
2036 # check username
2036 username = None
2037 username = None
2037 err = None
2038 err = None
2038 try:
2039 try:
2039 username = ui.username()
2040 username = ui.username()
2040 except error.Abort as e:
2041 except error.Abort as e:
2041 err = e.message
2042 err = e.message
2042 problems += 1
2043 problems += 1
2043
2044
2044 fm.condwrite(
2045 fm.condwrite(
2045 username, b'username', _(b"checking username (%s)\n"), username
2046 username, b'username', _(b"checking username (%s)\n"), username
2046 )
2047 )
2047 fm.condwrite(
2048 fm.condwrite(
2048 err,
2049 err,
2049 b'usernameerror',
2050 b'usernameerror',
2050 _(
2051 _(
2051 b"checking username...\n %s\n"
2052 b"checking username...\n %s\n"
2052 b" (specify a username in your configuration file)\n"
2053 b" (specify a username in your configuration file)\n"
2053 ),
2054 ),
2054 err,
2055 err,
2055 )
2056 )
2056
2057
2057 for name, mod in extensions.extensions():
2058 for name, mod in extensions.extensions():
2058 handler = getattr(mod, 'debuginstall', None)
2059 handler = getattr(mod, 'debuginstall', None)
2059 if handler is not None:
2060 if handler is not None:
2060 problems += handler(ui, fm)
2061 problems += handler(ui, fm)
2061
2062
2062 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2063 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2063 if not problems:
2064 if not problems:
2064 fm.data(problems=problems)
2065 fm.data(problems=problems)
2065 fm.condwrite(
2066 fm.condwrite(
2066 problems,
2067 problems,
2067 b'problems',
2068 b'problems',
2068 _(b"%d problems detected, please check your install!\n"),
2069 _(b"%d problems detected, please check your install!\n"),
2069 problems,
2070 problems,
2070 )
2071 )
2071 fm.end()
2072 fm.end()
2072
2073
2073 return problems
2074 return problems
2074
2075
2075
2076
2076 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2077 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2077 def debugknown(ui, repopath, *ids, **opts):
2078 def debugknown(ui, repopath, *ids, **opts):
2078 """test whether node ids are known to a repo
2079 """test whether node ids are known to a repo
2079
2080
2080 Every ID must be a full-length hex node id string. Returns a list of 0s
2081 Every ID must be a full-length hex node id string. Returns a list of 0s
2081 and 1s indicating unknown/known.
2082 and 1s indicating unknown/known.
2082 """
2083 """
2083 opts = pycompat.byteskwargs(opts)
2084 opts = pycompat.byteskwargs(opts)
2084 repo = hg.peer(ui, opts, repopath)
2085 repo = hg.peer(ui, opts, repopath)
2085 if not repo.capable(b'known'):
2086 if not repo.capable(b'known'):
2086 raise error.Abort(b"known() not supported by target repository")
2087 raise error.Abort(b"known() not supported by target repository")
2087 flags = repo.known([bin(s) for s in ids])
2088 flags = repo.known([bin(s) for s in ids])
2088 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2089 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2089
2090
2090
2091
2091 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2092 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2092 def debuglabelcomplete(ui, repo, *args):
2093 def debuglabelcomplete(ui, repo, *args):
2093 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2094 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2094 debugnamecomplete(ui, repo, *args)
2095 debugnamecomplete(ui, repo, *args)
2095
2096
2096
2097
2097 @command(
2098 @command(
2098 b'debuglocks',
2099 b'debuglocks',
2099 [
2100 [
2100 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2101 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2101 (
2102 (
2102 b'W',
2103 b'W',
2103 b'force-free-wlock',
2104 b'force-free-wlock',
2104 None,
2105 None,
2105 _(b'free the working state lock (DANGEROUS)'),
2106 _(b'free the working state lock (DANGEROUS)'),
2106 ),
2107 ),
2107 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2108 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2108 (
2109 (
2109 b'S',
2110 b'S',
2110 b'set-wlock',
2111 b'set-wlock',
2111 None,
2112 None,
2112 _(b'set the working state lock until stopped'),
2113 _(b'set the working state lock until stopped'),
2113 ),
2114 ),
2114 ],
2115 ],
2115 _(b'[OPTION]...'),
2116 _(b'[OPTION]...'),
2116 )
2117 )
2117 def debuglocks(ui, repo, **opts):
2118 def debuglocks(ui, repo, **opts):
2118 """show or modify state of locks
2119 """show or modify state of locks
2119
2120
2120 By default, this command will show which locks are held. This
2121 By default, this command will show which locks are held. This
2121 includes the user and process holding the lock, the amount of time
2122 includes the user and process holding the lock, the amount of time
2122 the lock has been held, and the machine name where the process is
2123 the lock has been held, and the machine name where the process is
2123 running if it's not local.
2124 running if it's not local.
2124
2125
2125 Locks protect the integrity of Mercurial's data, so should be
2126 Locks protect the integrity of Mercurial's data, so should be
2126 treated with care. System crashes or other interruptions may cause
2127 treated with care. System crashes or other interruptions may cause
2127 locks to not be properly released, though Mercurial will usually
2128 locks to not be properly released, though Mercurial will usually
2128 detect and remove such stale locks automatically.
2129 detect and remove such stale locks automatically.
2129
2130
2130 However, detecting stale locks may not always be possible (for
2131 However, detecting stale locks may not always be possible (for
2131 instance, on a shared filesystem). Removing locks may also be
2132 instance, on a shared filesystem). Removing locks may also be
2132 blocked by filesystem permissions.
2133 blocked by filesystem permissions.
2133
2134
2134 Setting a lock will prevent other commands from changing the data.
2135 Setting a lock will prevent other commands from changing the data.
2135 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2136 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2136 The set locks are removed when the command exits.
2137 The set locks are removed when the command exits.
2137
2138
2138 Returns 0 if no locks are held.
2139 Returns 0 if no locks are held.
2139
2140
2140 """
2141 """
2141
2142
2142 if opts.get('force_free_lock'):
2143 if opts.get('force_free_lock'):
2143 repo.svfs.unlink(b'lock')
2144 repo.svfs.unlink(b'lock')
2144 if opts.get('force_free_wlock'):
2145 if opts.get('force_free_wlock'):
2145 repo.vfs.unlink(b'wlock')
2146 repo.vfs.unlink(b'wlock')
2146 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2147 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2147 return 0
2148 return 0
2148
2149
2149 locks = []
2150 locks = []
2150 try:
2151 try:
2151 if opts.get('set_wlock'):
2152 if opts.get('set_wlock'):
2152 try:
2153 try:
2153 locks.append(repo.wlock(False))
2154 locks.append(repo.wlock(False))
2154 except error.LockHeld:
2155 except error.LockHeld:
2155 raise error.Abort(_(b'wlock is already held'))
2156 raise error.Abort(_(b'wlock is already held'))
2156 if opts.get('set_lock'):
2157 if opts.get('set_lock'):
2157 try:
2158 try:
2158 locks.append(repo.lock(False))
2159 locks.append(repo.lock(False))
2159 except error.LockHeld:
2160 except error.LockHeld:
2160 raise error.Abort(_(b'lock is already held'))
2161 raise error.Abort(_(b'lock is already held'))
2161 if len(locks):
2162 if len(locks):
2162 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2163 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2163 return 0
2164 return 0
2164 finally:
2165 finally:
2165 release(*locks)
2166 release(*locks)
2166
2167
2167 now = time.time()
2168 now = time.time()
2168 held = 0
2169 held = 0
2169
2170
2170 def report(vfs, name, method):
2171 def report(vfs, name, method):
2171 # this causes stale locks to get reaped for more accurate reporting
2172 # this causes stale locks to get reaped for more accurate reporting
2172 try:
2173 try:
2173 l = method(False)
2174 l = method(False)
2174 except error.LockHeld:
2175 except error.LockHeld:
2175 l = None
2176 l = None
2176
2177
2177 if l:
2178 if l:
2178 l.release()
2179 l.release()
2179 else:
2180 else:
2180 try:
2181 try:
2181 st = vfs.lstat(name)
2182 st = vfs.lstat(name)
2182 age = now - st[stat.ST_MTIME]
2183 age = now - st[stat.ST_MTIME]
2183 user = util.username(st.st_uid)
2184 user = util.username(st.st_uid)
2184 locker = vfs.readlock(name)
2185 locker = vfs.readlock(name)
2185 if b":" in locker:
2186 if b":" in locker:
2186 host, pid = locker.split(b':')
2187 host, pid = locker.split(b':')
2187 if host == socket.gethostname():
2188 if host == socket.gethostname():
2188 locker = b'user %s, process %s' % (user or b'None', pid)
2189 locker = b'user %s, process %s' % (user or b'None', pid)
2189 else:
2190 else:
2190 locker = b'user %s, process %s, host %s' % (
2191 locker = b'user %s, process %s, host %s' % (
2191 user or b'None',
2192 user or b'None',
2192 pid,
2193 pid,
2193 host,
2194 host,
2194 )
2195 )
2195 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2196 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2196 return 1
2197 return 1
2197 except OSError as e:
2198 except OSError as e:
2198 if e.errno != errno.ENOENT:
2199 if e.errno != errno.ENOENT:
2199 raise
2200 raise
2200
2201
2201 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2202 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2202 return 0
2203 return 0
2203
2204
2204 held += report(repo.svfs, b"lock", repo.lock)
2205 held += report(repo.svfs, b"lock", repo.lock)
2205 held += report(repo.vfs, b"wlock", repo.wlock)
2206 held += report(repo.vfs, b"wlock", repo.wlock)
2206
2207
2207 return held
2208 return held
2208
2209
2209
2210
2210 @command(
2211 @command(
2211 b'debugmanifestfulltextcache',
2212 b'debugmanifestfulltextcache',
2212 [
2213 [
2213 (b'', b'clear', False, _(b'clear the cache')),
2214 (b'', b'clear', False, _(b'clear the cache')),
2214 (
2215 (
2215 b'a',
2216 b'a',
2216 b'add',
2217 b'add',
2217 [],
2218 [],
2218 _(b'add the given manifest nodes to the cache'),
2219 _(b'add the given manifest nodes to the cache'),
2219 _(b'NODE'),
2220 _(b'NODE'),
2220 ),
2221 ),
2221 ],
2222 ],
2222 b'',
2223 b'',
2223 )
2224 )
2224 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2225 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2225 """show, clear or amend the contents of the manifest fulltext cache"""
2226 """show, clear or amend the contents of the manifest fulltext cache"""
2226
2227
2227 def getcache():
2228 def getcache():
2228 r = repo.manifestlog.getstorage(b'')
2229 r = repo.manifestlog.getstorage(b'')
2229 try:
2230 try:
2230 return r._fulltextcache
2231 return r._fulltextcache
2231 except AttributeError:
2232 except AttributeError:
2232 msg = _(
2233 msg = _(
2233 b"Current revlog implementation doesn't appear to have a "
2234 b"Current revlog implementation doesn't appear to have a "
2234 b"manifest fulltext cache\n"
2235 b"manifest fulltext cache\n"
2235 )
2236 )
2236 raise error.Abort(msg)
2237 raise error.Abort(msg)
2237
2238
2238 if opts.get('clear'):
2239 if opts.get('clear'):
2239 with repo.wlock():
2240 with repo.wlock():
2240 cache = getcache()
2241 cache = getcache()
2241 cache.clear(clear_persisted_data=True)
2242 cache.clear(clear_persisted_data=True)
2242 return
2243 return
2243
2244
2244 if add:
2245 if add:
2245 with repo.wlock():
2246 with repo.wlock():
2246 m = repo.manifestlog
2247 m = repo.manifestlog
2247 store = m.getstorage(b'')
2248 store = m.getstorage(b'')
2248 for n in add:
2249 for n in add:
2249 try:
2250 try:
2250 manifest = m[store.lookup(n)]
2251 manifest = m[store.lookup(n)]
2251 except error.LookupError as e:
2252 except error.LookupError as e:
2252 raise error.Abort(
2253 raise error.Abort(
2253 bytes(e), hint=b"Check your manifest node id"
2254 bytes(e), hint=b"Check your manifest node id"
2254 )
2255 )
2255 manifest.read() # stores revisision in cache too
2256 manifest.read() # stores revisision in cache too
2256 return
2257 return
2257
2258
2258 cache = getcache()
2259 cache = getcache()
2259 if not len(cache):
2260 if not len(cache):
2260 ui.write(_(b'cache empty\n'))
2261 ui.write(_(b'cache empty\n'))
2261 else:
2262 else:
2262 ui.write(
2263 ui.write(
2263 _(
2264 _(
2264 b'cache contains %d manifest entries, in order of most to '
2265 b'cache contains %d manifest entries, in order of most to '
2265 b'least recent:\n'
2266 b'least recent:\n'
2266 )
2267 )
2267 % (len(cache),)
2268 % (len(cache),)
2268 )
2269 )
2269 totalsize = 0
2270 totalsize = 0
2270 for nodeid in cache:
2271 for nodeid in cache:
2271 # Use cache.get to not update the LRU order
2272 # Use cache.get to not update the LRU order
2272 data = cache.peek(nodeid)
2273 data = cache.peek(nodeid)
2273 size = len(data)
2274 size = len(data)
2274 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2275 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2275 ui.write(
2276 ui.write(
2276 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2277 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2277 )
2278 )
2278 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2279 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2279 ui.write(
2280 ui.write(
2280 _(b'total cache data size %s, on-disk %s\n')
2281 _(b'total cache data size %s, on-disk %s\n')
2281 % (util.bytecount(totalsize), util.bytecount(ondisk))
2282 % (util.bytecount(totalsize), util.bytecount(ondisk))
2282 )
2283 )
2283
2284
2284
2285
2285 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2286 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2286 def debugmergestate(ui, repo, *args, **opts):
2287 def debugmergestate(ui, repo, *args, **opts):
2287 """print merge state
2288 """print merge state
2288
2289
2289 Use --verbose to print out information about whether v1 or v2 merge state
2290 Use --verbose to print out information about whether v1 or v2 merge state
2290 was chosen."""
2291 was chosen."""
2291
2292
2292 if ui.verbose:
2293 if ui.verbose:
2293 ms = mergestatemod.mergestate(repo)
2294 ms = mergestatemod.mergestate(repo)
2294
2295
2295 # sort so that reasonable information is on top
2296 # sort so that reasonable information is on top
2296 v1records = ms._readrecordsv1()
2297 v1records = ms._readrecordsv1()
2297 v2records = ms._readrecordsv2()
2298 v2records = ms._readrecordsv2()
2298
2299
2299 if not v1records and not v2records:
2300 if not v1records and not v2records:
2300 pass
2301 pass
2301 elif not v2records:
2302 elif not v2records:
2302 ui.writenoi18n(b'no version 2 merge state\n')
2303 ui.writenoi18n(b'no version 2 merge state\n')
2303 elif ms._v1v2match(v1records, v2records):
2304 elif ms._v1v2match(v1records, v2records):
2304 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2305 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2305 else:
2306 else:
2306 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2307 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2307
2308
2308 opts = pycompat.byteskwargs(opts)
2309 opts = pycompat.byteskwargs(opts)
2309 if not opts[b'template']:
2310 if not opts[b'template']:
2310 opts[b'template'] = (
2311 opts[b'template'] = (
2311 b'{if(commits, "", "no merge state found\n")}'
2312 b'{if(commits, "", "no merge state found\n")}'
2312 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2313 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2313 b'{files % "file: {path} (state \\"{state}\\")\n'
2314 b'{files % "file: {path} (state \\"{state}\\")\n'
2314 b'{if(local_path, "'
2315 b'{if(local_path, "'
2315 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2316 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2316 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2317 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2317 b' other path: {other_path} (node {other_node})\n'
2318 b' other path: {other_path} (node {other_node})\n'
2318 b'")}'
2319 b'")}'
2319 b'{if(rename_side, "'
2320 b'{if(rename_side, "'
2320 b' rename side: {rename_side}\n'
2321 b' rename side: {rename_side}\n'
2321 b' renamed path: {renamed_path}\n'
2322 b' renamed path: {renamed_path}\n'
2322 b'")}'
2323 b'")}'
2323 b'{extras % " extra: {key} = {value}\n"}'
2324 b'{extras % " extra: {key} = {value}\n"}'
2324 b'"}'
2325 b'"}'
2325 b'{extras % "extra: {file} ({key} = {value})\n"}'
2326 b'{extras % "extra: {file} ({key} = {value})\n"}'
2326 )
2327 )
2327
2328
2328 ms = mergestatemod.mergestate.read(repo)
2329 ms = mergestatemod.mergestate.read(repo)
2329
2330
2330 fm = ui.formatter(b'debugmergestate', opts)
2331 fm = ui.formatter(b'debugmergestate', opts)
2331 fm.startitem()
2332 fm.startitem()
2332
2333
2333 fm_commits = fm.nested(b'commits')
2334 fm_commits = fm.nested(b'commits')
2334 if ms.active():
2335 if ms.active():
2335 for name, node, label_index in (
2336 for name, node, label_index in (
2336 (b'local', ms.local, 0),
2337 (b'local', ms.local, 0),
2337 (b'other', ms.other, 1),
2338 (b'other', ms.other, 1),
2338 ):
2339 ):
2339 fm_commits.startitem()
2340 fm_commits.startitem()
2340 fm_commits.data(name=name)
2341 fm_commits.data(name=name)
2341 fm_commits.data(node=hex(node))
2342 fm_commits.data(node=hex(node))
2342 if ms._labels and len(ms._labels) > label_index:
2343 if ms._labels and len(ms._labels) > label_index:
2343 fm_commits.data(label=ms._labels[label_index])
2344 fm_commits.data(label=ms._labels[label_index])
2344 fm_commits.end()
2345 fm_commits.end()
2345
2346
2346 fm_files = fm.nested(b'files')
2347 fm_files = fm.nested(b'files')
2347 if ms.active():
2348 if ms.active():
2348 for f in ms:
2349 for f in ms:
2349 fm_files.startitem()
2350 fm_files.startitem()
2350 fm_files.data(path=f)
2351 fm_files.data(path=f)
2351 state = ms._state[f]
2352 state = ms._state[f]
2352 fm_files.data(state=state[0])
2353 fm_files.data(state=state[0])
2353 if state[0] in (
2354 if state[0] in (
2354 mergestatemod.MERGE_RECORD_UNRESOLVED,
2355 mergestatemod.MERGE_RECORD_UNRESOLVED,
2355 mergestatemod.MERGE_RECORD_RESOLVED,
2356 mergestatemod.MERGE_RECORD_RESOLVED,
2356 ):
2357 ):
2357 fm_files.data(local_key=state[1])
2358 fm_files.data(local_key=state[1])
2358 fm_files.data(local_path=state[2])
2359 fm_files.data(local_path=state[2])
2359 fm_files.data(ancestor_path=state[3])
2360 fm_files.data(ancestor_path=state[3])
2360 fm_files.data(ancestor_node=state[4])
2361 fm_files.data(ancestor_node=state[4])
2361 fm_files.data(other_path=state[5])
2362 fm_files.data(other_path=state[5])
2362 fm_files.data(other_node=state[6])
2363 fm_files.data(other_node=state[6])
2363 fm_files.data(local_flags=state[7])
2364 fm_files.data(local_flags=state[7])
2364 elif state[0] in (
2365 elif state[0] in (
2365 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2366 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2366 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2367 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2367 ):
2368 ):
2368 fm_files.data(renamed_path=state[1])
2369 fm_files.data(renamed_path=state[1])
2369 fm_files.data(rename_side=state[2])
2370 fm_files.data(rename_side=state[2])
2370 fm_extras = fm_files.nested(b'extras')
2371 fm_extras = fm_files.nested(b'extras')
2371 for k, v in sorted(ms.extras(f).items()):
2372 for k, v in sorted(ms.extras(f).items()):
2372 fm_extras.startitem()
2373 fm_extras.startitem()
2373 fm_extras.data(key=k)
2374 fm_extras.data(key=k)
2374 fm_extras.data(value=v)
2375 fm_extras.data(value=v)
2375 fm_extras.end()
2376 fm_extras.end()
2376
2377
2377 fm_files.end()
2378 fm_files.end()
2378
2379
2379 fm_extras = fm.nested(b'extras')
2380 fm_extras = fm.nested(b'extras')
2380 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2381 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2381 if f in ms:
2382 if f in ms:
2382 # If file is in mergestate, we have already processed it's extras
2383 # If file is in mergestate, we have already processed it's extras
2383 continue
2384 continue
2384 for k, v in pycompat.iteritems(d):
2385 for k, v in pycompat.iteritems(d):
2385 fm_extras.startitem()
2386 fm_extras.startitem()
2386 fm_extras.data(file=f)
2387 fm_extras.data(file=f)
2387 fm_extras.data(key=k)
2388 fm_extras.data(key=k)
2388 fm_extras.data(value=v)
2389 fm_extras.data(value=v)
2389 fm_extras.end()
2390 fm_extras.end()
2390
2391
2391 fm.end()
2392 fm.end()
2392
2393
2393
2394
2394 @command(b'debugnamecomplete', [], _(b'NAME...'))
2395 @command(b'debugnamecomplete', [], _(b'NAME...'))
2395 def debugnamecomplete(ui, repo, *args):
2396 def debugnamecomplete(ui, repo, *args):
2396 '''complete "names" - tags, open branch names, bookmark names'''
2397 '''complete "names" - tags, open branch names, bookmark names'''
2397
2398
2398 names = set()
2399 names = set()
2399 # since we previously only listed open branches, we will handle that
2400 # since we previously only listed open branches, we will handle that
2400 # specially (after this for loop)
2401 # specially (after this for loop)
2401 for name, ns in pycompat.iteritems(repo.names):
2402 for name, ns in pycompat.iteritems(repo.names):
2402 if name != b'branches':
2403 if name != b'branches':
2403 names.update(ns.listnames(repo))
2404 names.update(ns.listnames(repo))
2404 names.update(
2405 names.update(
2405 tag
2406 tag
2406 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2407 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2407 if not closed
2408 if not closed
2408 )
2409 )
2409 completions = set()
2410 completions = set()
2410 if not args:
2411 if not args:
2411 args = [b'']
2412 args = [b'']
2412 for a in args:
2413 for a in args:
2413 completions.update(n for n in names if n.startswith(a))
2414 completions.update(n for n in names if n.startswith(a))
2414 ui.write(b'\n'.join(sorted(completions)))
2415 ui.write(b'\n'.join(sorted(completions)))
2415 ui.write(b'\n')
2416 ui.write(b'\n')
2416
2417
2417
2418
2418 @command(
2419 @command(
2419 b'debugnodemap',
2420 b'debugnodemap',
2420 [
2421 [
2421 (
2422 (
2422 b'',
2423 b'',
2423 b'dump-new',
2424 b'dump-new',
2424 False,
2425 False,
2425 _(b'write a (new) persistent binary nodemap on stdout'),
2426 _(b'write a (new) persistent binary nodemap on stdout'),
2426 ),
2427 ),
2427 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2428 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2428 (
2429 (
2429 b'',
2430 b'',
2430 b'check',
2431 b'check',
2431 False,
2432 False,
2432 _(b'check that the data on disk data are correct.'),
2433 _(b'check that the data on disk data are correct.'),
2433 ),
2434 ),
2434 (
2435 (
2435 b'',
2436 b'',
2436 b'metadata',
2437 b'metadata',
2437 False,
2438 False,
2438 _(b'display the on disk meta data for the nodemap'),
2439 _(b'display the on disk meta data for the nodemap'),
2439 ),
2440 ),
2440 ],
2441 ],
2441 )
2442 )
2442 def debugnodemap(ui, repo, **opts):
2443 def debugnodemap(ui, repo, **opts):
2443 """write and inspect on disk nodemap"""
2444 """write and inspect on disk nodemap"""
2444 if opts['dump_new']:
2445 if opts['dump_new']:
2445 unfi = repo.unfiltered()
2446 unfi = repo.unfiltered()
2446 cl = unfi.changelog
2447 cl = unfi.changelog
2447 if util.safehasattr(cl.index, "nodemap_data_all"):
2448 if util.safehasattr(cl.index, "nodemap_data_all"):
2448 data = cl.index.nodemap_data_all()
2449 data = cl.index.nodemap_data_all()
2449 else:
2450 else:
2450 data = nodemap.persistent_data(cl.index)
2451 data = nodemap.persistent_data(cl.index)
2451 ui.write(data)
2452 ui.write(data)
2452 elif opts['dump_disk']:
2453 elif opts['dump_disk']:
2453 unfi = repo.unfiltered()
2454 unfi = repo.unfiltered()
2454 cl = unfi.changelog
2455 cl = unfi.changelog
2455 nm_data = nodemap.persisted_data(cl)
2456 nm_data = nodemap.persisted_data(cl)
2456 if nm_data is not None:
2457 if nm_data is not None:
2457 docket, data = nm_data
2458 docket, data = nm_data
2458 ui.write(data[:])
2459 ui.write(data[:])
2459 elif opts['check']:
2460 elif opts['check']:
2460 unfi = repo.unfiltered()
2461 unfi = repo.unfiltered()
2461 cl = unfi.changelog
2462 cl = unfi.changelog
2462 nm_data = nodemap.persisted_data(cl)
2463 nm_data = nodemap.persisted_data(cl)
2463 if nm_data is not None:
2464 if nm_data is not None:
2464 docket, data = nm_data
2465 docket, data = nm_data
2465 return nodemap.check_data(ui, cl.index, data)
2466 return nodemap.check_data(ui, cl.index, data)
2466 elif opts['metadata']:
2467 elif opts['metadata']:
2467 unfi = repo.unfiltered()
2468 unfi = repo.unfiltered()
2468 cl = unfi.changelog
2469 cl = unfi.changelog
2469 nm_data = nodemap.persisted_data(cl)
2470 nm_data = nodemap.persisted_data(cl)
2470 if nm_data is not None:
2471 if nm_data is not None:
2471 docket, data = nm_data
2472 docket, data = nm_data
2472 ui.write((b"uid: %s\n") % docket.uid)
2473 ui.write((b"uid: %s\n") % docket.uid)
2473 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2474 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2474 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2475 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2475 ui.write((b"data-length: %d\n") % docket.data_length)
2476 ui.write((b"data-length: %d\n") % docket.data_length)
2476 ui.write((b"data-unused: %d\n") % docket.data_unused)
2477 ui.write((b"data-unused: %d\n") % docket.data_unused)
2477 unused_perc = docket.data_unused * 100.0 / docket.data_length
2478 unused_perc = docket.data_unused * 100.0 / docket.data_length
2478 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2479 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2479
2480
2480
2481
2481 @command(
2482 @command(
2482 b'debugobsolete',
2483 b'debugobsolete',
2483 [
2484 [
2484 (b'', b'flags', 0, _(b'markers flag')),
2485 (b'', b'flags', 0, _(b'markers flag')),
2485 (
2486 (
2486 b'',
2487 b'',
2487 b'record-parents',
2488 b'record-parents',
2488 False,
2489 False,
2489 _(b'record parent information for the precursor'),
2490 _(b'record parent information for the precursor'),
2490 ),
2491 ),
2491 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2492 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2492 (
2493 (
2493 b'',
2494 b'',
2494 b'exclusive',
2495 b'exclusive',
2495 False,
2496 False,
2496 _(b'restrict display to markers only relevant to REV'),
2497 _(b'restrict display to markers only relevant to REV'),
2497 ),
2498 ),
2498 (b'', b'index', False, _(b'display index of the marker')),
2499 (b'', b'index', False, _(b'display index of the marker')),
2499 (b'', b'delete', [], _(b'delete markers specified by indices')),
2500 (b'', b'delete', [], _(b'delete markers specified by indices')),
2500 ]
2501 ]
2501 + cmdutil.commitopts2
2502 + cmdutil.commitopts2
2502 + cmdutil.formatteropts,
2503 + cmdutil.formatteropts,
2503 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2504 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2504 )
2505 )
2505 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2506 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2506 """create arbitrary obsolete marker
2507 """create arbitrary obsolete marker
2507
2508
2508 With no arguments, displays the list of obsolescence markers."""
2509 With no arguments, displays the list of obsolescence markers."""
2509
2510
2510 opts = pycompat.byteskwargs(opts)
2511 opts = pycompat.byteskwargs(opts)
2511
2512
2512 def parsenodeid(s):
2513 def parsenodeid(s):
2513 try:
2514 try:
2514 # We do not use revsingle/revrange functions here to accept
2515 # We do not use revsingle/revrange functions here to accept
2515 # arbitrary node identifiers, possibly not present in the
2516 # arbitrary node identifiers, possibly not present in the
2516 # local repository.
2517 # local repository.
2517 n = bin(s)
2518 n = bin(s)
2518 if len(n) != repo.nodeconstants.nodelen:
2519 if len(n) != repo.nodeconstants.nodelen:
2519 raise TypeError()
2520 raise TypeError()
2520 return n
2521 return n
2521 except TypeError:
2522 except TypeError:
2522 raise error.InputError(
2523 raise error.InputError(
2523 b'changeset references must be full hexadecimal '
2524 b'changeset references must be full hexadecimal '
2524 b'node identifiers'
2525 b'node identifiers'
2525 )
2526 )
2526
2527
2527 if opts.get(b'delete'):
2528 if opts.get(b'delete'):
2528 indices = []
2529 indices = []
2529 for v in opts.get(b'delete'):
2530 for v in opts.get(b'delete'):
2530 try:
2531 try:
2531 indices.append(int(v))
2532 indices.append(int(v))
2532 except ValueError:
2533 except ValueError:
2533 raise error.InputError(
2534 raise error.InputError(
2534 _(b'invalid index value: %r') % v,
2535 _(b'invalid index value: %r') % v,
2535 hint=_(b'use integers for indices'),
2536 hint=_(b'use integers for indices'),
2536 )
2537 )
2537
2538
2538 if repo.currenttransaction():
2539 if repo.currenttransaction():
2539 raise error.Abort(
2540 raise error.Abort(
2540 _(b'cannot delete obsmarkers in the middle of transaction.')
2541 _(b'cannot delete obsmarkers in the middle of transaction.')
2541 )
2542 )
2542
2543
2543 with repo.lock():
2544 with repo.lock():
2544 n = repair.deleteobsmarkers(repo.obsstore, indices)
2545 n = repair.deleteobsmarkers(repo.obsstore, indices)
2545 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2546 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2546
2547
2547 return
2548 return
2548
2549
2549 if precursor is not None:
2550 if precursor is not None:
2550 if opts[b'rev']:
2551 if opts[b'rev']:
2551 raise error.InputError(
2552 raise error.InputError(
2552 b'cannot select revision when creating marker'
2553 b'cannot select revision when creating marker'
2553 )
2554 )
2554 metadata = {}
2555 metadata = {}
2555 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2556 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2556 succs = tuple(parsenodeid(succ) for succ in successors)
2557 succs = tuple(parsenodeid(succ) for succ in successors)
2557 l = repo.lock()
2558 l = repo.lock()
2558 try:
2559 try:
2559 tr = repo.transaction(b'debugobsolete')
2560 tr = repo.transaction(b'debugobsolete')
2560 try:
2561 try:
2561 date = opts.get(b'date')
2562 date = opts.get(b'date')
2562 if date:
2563 if date:
2563 date = dateutil.parsedate(date)
2564 date = dateutil.parsedate(date)
2564 else:
2565 else:
2565 date = None
2566 date = None
2566 prec = parsenodeid(precursor)
2567 prec = parsenodeid(precursor)
2567 parents = None
2568 parents = None
2568 if opts[b'record_parents']:
2569 if opts[b'record_parents']:
2569 if prec not in repo.unfiltered():
2570 if prec not in repo.unfiltered():
2570 raise error.Abort(
2571 raise error.Abort(
2571 b'cannot used --record-parents on '
2572 b'cannot used --record-parents on '
2572 b'unknown changesets'
2573 b'unknown changesets'
2573 )
2574 )
2574 parents = repo.unfiltered()[prec].parents()
2575 parents = repo.unfiltered()[prec].parents()
2575 parents = tuple(p.node() for p in parents)
2576 parents = tuple(p.node() for p in parents)
2576 repo.obsstore.create(
2577 repo.obsstore.create(
2577 tr,
2578 tr,
2578 prec,
2579 prec,
2579 succs,
2580 succs,
2580 opts[b'flags'],
2581 opts[b'flags'],
2581 parents=parents,
2582 parents=parents,
2582 date=date,
2583 date=date,
2583 metadata=metadata,
2584 metadata=metadata,
2584 ui=ui,
2585 ui=ui,
2585 )
2586 )
2586 tr.close()
2587 tr.close()
2587 except ValueError as exc:
2588 except ValueError as exc:
2588 raise error.Abort(
2589 raise error.Abort(
2589 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2590 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2590 )
2591 )
2591 finally:
2592 finally:
2592 tr.release()
2593 tr.release()
2593 finally:
2594 finally:
2594 l.release()
2595 l.release()
2595 else:
2596 else:
2596 if opts[b'rev']:
2597 if opts[b'rev']:
2597 revs = logcmdutil.revrange(repo, opts[b'rev'])
2598 revs = logcmdutil.revrange(repo, opts[b'rev'])
2598 nodes = [repo[r].node() for r in revs]
2599 nodes = [repo[r].node() for r in revs]
2599 markers = list(
2600 markers = list(
2600 obsutil.getmarkers(
2601 obsutil.getmarkers(
2601 repo, nodes=nodes, exclusive=opts[b'exclusive']
2602 repo, nodes=nodes, exclusive=opts[b'exclusive']
2602 )
2603 )
2603 )
2604 )
2604 markers.sort(key=lambda x: x._data)
2605 markers.sort(key=lambda x: x._data)
2605 else:
2606 else:
2606 markers = obsutil.getmarkers(repo)
2607 markers = obsutil.getmarkers(repo)
2607
2608
2608 markerstoiter = markers
2609 markerstoiter = markers
2609 isrelevant = lambda m: True
2610 isrelevant = lambda m: True
2610 if opts.get(b'rev') and opts.get(b'index'):
2611 if opts.get(b'rev') and opts.get(b'index'):
2611 markerstoiter = obsutil.getmarkers(repo)
2612 markerstoiter = obsutil.getmarkers(repo)
2612 markerset = set(markers)
2613 markerset = set(markers)
2613 isrelevant = lambda m: m in markerset
2614 isrelevant = lambda m: m in markerset
2614
2615
2615 fm = ui.formatter(b'debugobsolete', opts)
2616 fm = ui.formatter(b'debugobsolete', opts)
2616 for i, m in enumerate(markerstoiter):
2617 for i, m in enumerate(markerstoiter):
2617 if not isrelevant(m):
2618 if not isrelevant(m):
2618 # marker can be irrelevant when we're iterating over a set
2619 # marker can be irrelevant when we're iterating over a set
2619 # of markers (markerstoiter) which is bigger than the set
2620 # of markers (markerstoiter) which is bigger than the set
2620 # of markers we want to display (markers)
2621 # of markers we want to display (markers)
2621 # this can happen if both --index and --rev options are
2622 # this can happen if both --index and --rev options are
2622 # provided and thus we need to iterate over all of the markers
2623 # provided and thus we need to iterate over all of the markers
2623 # to get the correct indices, but only display the ones that
2624 # to get the correct indices, but only display the ones that
2624 # are relevant to --rev value
2625 # are relevant to --rev value
2625 continue
2626 continue
2626 fm.startitem()
2627 fm.startitem()
2627 ind = i if opts.get(b'index') else None
2628 ind = i if opts.get(b'index') else None
2628 cmdutil.showmarker(fm, m, index=ind)
2629 cmdutil.showmarker(fm, m, index=ind)
2629 fm.end()
2630 fm.end()
2630
2631
2631
2632
2632 @command(
2633 @command(
2633 b'debugp1copies',
2634 b'debugp1copies',
2634 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2635 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2635 _(b'[-r REV]'),
2636 _(b'[-r REV]'),
2636 )
2637 )
2637 def debugp1copies(ui, repo, **opts):
2638 def debugp1copies(ui, repo, **opts):
2638 """dump copy information compared to p1"""
2639 """dump copy information compared to p1"""
2639
2640
2640 opts = pycompat.byteskwargs(opts)
2641 opts = pycompat.byteskwargs(opts)
2641 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2642 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2642 for dst, src in ctx.p1copies().items():
2643 for dst, src in ctx.p1copies().items():
2643 ui.write(b'%s -> %s\n' % (src, dst))
2644 ui.write(b'%s -> %s\n' % (src, dst))
2644
2645
2645
2646
2646 @command(
2647 @command(
2647 b'debugp2copies',
2648 b'debugp2copies',
2648 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2649 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2649 _(b'[-r REV]'),
2650 _(b'[-r REV]'),
2650 )
2651 )
2651 def debugp1copies(ui, repo, **opts):
2652 def debugp1copies(ui, repo, **opts):
2652 """dump copy information compared to p2"""
2653 """dump copy information compared to p2"""
2653
2654
2654 opts = pycompat.byteskwargs(opts)
2655 opts = pycompat.byteskwargs(opts)
2655 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2656 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2656 for dst, src in ctx.p2copies().items():
2657 for dst, src in ctx.p2copies().items():
2657 ui.write(b'%s -> %s\n' % (src, dst))
2658 ui.write(b'%s -> %s\n' % (src, dst))
2658
2659
2659
2660
2660 @command(
2661 @command(
2661 b'debugpathcomplete',
2662 b'debugpathcomplete',
2662 [
2663 [
2663 (b'f', b'full', None, _(b'complete an entire path')),
2664 (b'f', b'full', None, _(b'complete an entire path')),
2664 (b'n', b'normal', None, _(b'show only normal files')),
2665 (b'n', b'normal', None, _(b'show only normal files')),
2665 (b'a', b'added', None, _(b'show only added files')),
2666 (b'a', b'added', None, _(b'show only added files')),
2666 (b'r', b'removed', None, _(b'show only removed files')),
2667 (b'r', b'removed', None, _(b'show only removed files')),
2667 ],
2668 ],
2668 _(b'FILESPEC...'),
2669 _(b'FILESPEC...'),
2669 )
2670 )
2670 def debugpathcomplete(ui, repo, *specs, **opts):
2671 def debugpathcomplete(ui, repo, *specs, **opts):
2671 """complete part or all of a tracked path
2672 """complete part or all of a tracked path
2672
2673
2673 This command supports shells that offer path name completion. It
2674 This command supports shells that offer path name completion. It
2674 currently completes only files already known to the dirstate.
2675 currently completes only files already known to the dirstate.
2675
2676
2676 Completion extends only to the next path segment unless
2677 Completion extends only to the next path segment unless
2677 --full is specified, in which case entire paths are used."""
2678 --full is specified, in which case entire paths are used."""
2678
2679
2679 def complete(path, acceptable):
2680 def complete(path, acceptable):
2680 dirstate = repo.dirstate
2681 dirstate = repo.dirstate
2681 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2682 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2682 rootdir = repo.root + pycompat.ossep
2683 rootdir = repo.root + pycompat.ossep
2683 if spec != repo.root and not spec.startswith(rootdir):
2684 if spec != repo.root and not spec.startswith(rootdir):
2684 return [], []
2685 return [], []
2685 if os.path.isdir(spec):
2686 if os.path.isdir(spec):
2686 spec += b'/'
2687 spec += b'/'
2687 spec = spec[len(rootdir) :]
2688 spec = spec[len(rootdir) :]
2688 fixpaths = pycompat.ossep != b'/'
2689 fixpaths = pycompat.ossep != b'/'
2689 if fixpaths:
2690 if fixpaths:
2690 spec = spec.replace(pycompat.ossep, b'/')
2691 spec = spec.replace(pycompat.ossep, b'/')
2691 speclen = len(spec)
2692 speclen = len(spec)
2692 fullpaths = opts['full']
2693 fullpaths = opts['full']
2693 files, dirs = set(), set()
2694 files, dirs = set(), set()
2694 adddir, addfile = dirs.add, files.add
2695 adddir, addfile = dirs.add, files.add
2695 for f, st in pycompat.iteritems(dirstate):
2696 for f, st in pycompat.iteritems(dirstate):
2696 if f.startswith(spec) and st.state in acceptable:
2697 if f.startswith(spec) and st.state in acceptable:
2697 if fixpaths:
2698 if fixpaths:
2698 f = f.replace(b'/', pycompat.ossep)
2699 f = f.replace(b'/', pycompat.ossep)
2699 if fullpaths:
2700 if fullpaths:
2700 addfile(f)
2701 addfile(f)
2701 continue
2702 continue
2702 s = f.find(pycompat.ossep, speclen)
2703 s = f.find(pycompat.ossep, speclen)
2703 if s >= 0:
2704 if s >= 0:
2704 adddir(f[:s])
2705 adddir(f[:s])
2705 else:
2706 else:
2706 addfile(f)
2707 addfile(f)
2707 return files, dirs
2708 return files, dirs
2708
2709
2709 acceptable = b''
2710 acceptable = b''
2710 if opts['normal']:
2711 if opts['normal']:
2711 acceptable += b'nm'
2712 acceptable += b'nm'
2712 if opts['added']:
2713 if opts['added']:
2713 acceptable += b'a'
2714 acceptable += b'a'
2714 if opts['removed']:
2715 if opts['removed']:
2715 acceptable += b'r'
2716 acceptable += b'r'
2716 cwd = repo.getcwd()
2717 cwd = repo.getcwd()
2717 if not specs:
2718 if not specs:
2718 specs = [b'.']
2719 specs = [b'.']
2719
2720
2720 files, dirs = set(), set()
2721 files, dirs = set(), set()
2721 for spec in specs:
2722 for spec in specs:
2722 f, d = complete(spec, acceptable or b'nmar')
2723 f, d = complete(spec, acceptable or b'nmar')
2723 files.update(f)
2724 files.update(f)
2724 dirs.update(d)
2725 dirs.update(d)
2725 files.update(dirs)
2726 files.update(dirs)
2726 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2727 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2727 ui.write(b'\n')
2728 ui.write(b'\n')
2728
2729
2729
2730
2730 @command(
2731 @command(
2731 b'debugpathcopies',
2732 b'debugpathcopies',
2732 cmdutil.walkopts,
2733 cmdutil.walkopts,
2733 b'hg debugpathcopies REV1 REV2 [FILE]',
2734 b'hg debugpathcopies REV1 REV2 [FILE]',
2734 inferrepo=True,
2735 inferrepo=True,
2735 )
2736 )
2736 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2737 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2737 """show copies between two revisions"""
2738 """show copies between two revisions"""
2738 ctx1 = scmutil.revsingle(repo, rev1)
2739 ctx1 = scmutil.revsingle(repo, rev1)
2739 ctx2 = scmutil.revsingle(repo, rev2)
2740 ctx2 = scmutil.revsingle(repo, rev2)
2740 m = scmutil.match(ctx1, pats, opts)
2741 m = scmutil.match(ctx1, pats, opts)
2741 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2742 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2742 ui.write(b'%s -> %s\n' % (src, dst))
2743 ui.write(b'%s -> %s\n' % (src, dst))
2743
2744
2744
2745
2745 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2746 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2746 def debugpeer(ui, path):
2747 def debugpeer(ui, path):
2747 """establish a connection to a peer repository"""
2748 """establish a connection to a peer repository"""
2748 # Always enable peer request logging. Requires --debug to display
2749 # Always enable peer request logging. Requires --debug to display
2749 # though.
2750 # though.
2750 overrides = {
2751 overrides = {
2751 (b'devel', b'debug.peer-request'): True,
2752 (b'devel', b'debug.peer-request'): True,
2752 }
2753 }
2753
2754
2754 with ui.configoverride(overrides):
2755 with ui.configoverride(overrides):
2755 peer = hg.peer(ui, {}, path)
2756 peer = hg.peer(ui, {}, path)
2756
2757
2757 try:
2758 try:
2758 local = peer.local() is not None
2759 local = peer.local() is not None
2759 canpush = peer.canpush()
2760 canpush = peer.canpush()
2760
2761
2761 ui.write(_(b'url: %s\n') % peer.url())
2762 ui.write(_(b'url: %s\n') % peer.url())
2762 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2763 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2763 ui.write(
2764 ui.write(
2764 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2765 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2765 )
2766 )
2766 finally:
2767 finally:
2767 peer.close()
2768 peer.close()
2768
2769
2769
2770
2770 @command(
2771 @command(
2771 b'debugpickmergetool',
2772 b'debugpickmergetool',
2772 [
2773 [
2773 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2774 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2774 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2775 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2775 ]
2776 ]
2776 + cmdutil.walkopts
2777 + cmdutil.walkopts
2777 + cmdutil.mergetoolopts,
2778 + cmdutil.mergetoolopts,
2778 _(b'[PATTERN]...'),
2779 _(b'[PATTERN]...'),
2779 inferrepo=True,
2780 inferrepo=True,
2780 )
2781 )
2781 def debugpickmergetool(ui, repo, *pats, **opts):
2782 def debugpickmergetool(ui, repo, *pats, **opts):
2782 """examine which merge tool is chosen for specified file
2783 """examine which merge tool is chosen for specified file
2783
2784
2784 As described in :hg:`help merge-tools`, Mercurial examines
2785 As described in :hg:`help merge-tools`, Mercurial examines
2785 configurations below in this order to decide which merge tool is
2786 configurations below in this order to decide which merge tool is
2786 chosen for specified file.
2787 chosen for specified file.
2787
2788
2788 1. ``--tool`` option
2789 1. ``--tool`` option
2789 2. ``HGMERGE`` environment variable
2790 2. ``HGMERGE`` environment variable
2790 3. configurations in ``merge-patterns`` section
2791 3. configurations in ``merge-patterns`` section
2791 4. configuration of ``ui.merge``
2792 4. configuration of ``ui.merge``
2792 5. configurations in ``merge-tools`` section
2793 5. configurations in ``merge-tools`` section
2793 6. ``hgmerge`` tool (for historical reason only)
2794 6. ``hgmerge`` tool (for historical reason only)
2794 7. default tool for fallback (``:merge`` or ``:prompt``)
2795 7. default tool for fallback (``:merge`` or ``:prompt``)
2795
2796
2796 This command writes out examination result in the style below::
2797 This command writes out examination result in the style below::
2797
2798
2798 FILE = MERGETOOL
2799 FILE = MERGETOOL
2799
2800
2800 By default, all files known in the first parent context of the
2801 By default, all files known in the first parent context of the
2801 working directory are examined. Use file patterns and/or -I/-X
2802 working directory are examined. Use file patterns and/or -I/-X
2802 options to limit target files. -r/--rev is also useful to examine
2803 options to limit target files. -r/--rev is also useful to examine
2803 files in another context without actual updating to it.
2804 files in another context without actual updating to it.
2804
2805
2805 With --debug, this command shows warning messages while matching
2806 With --debug, this command shows warning messages while matching
2806 against ``merge-patterns`` and so on, too. It is recommended to
2807 against ``merge-patterns`` and so on, too. It is recommended to
2807 use this option with explicit file patterns and/or -I/-X options,
2808 use this option with explicit file patterns and/or -I/-X options,
2808 because this option increases amount of output per file according
2809 because this option increases amount of output per file according
2809 to configurations in hgrc.
2810 to configurations in hgrc.
2810
2811
2811 With -v/--verbose, this command shows configurations below at
2812 With -v/--verbose, this command shows configurations below at
2812 first (only if specified).
2813 first (only if specified).
2813
2814
2814 - ``--tool`` option
2815 - ``--tool`` option
2815 - ``HGMERGE`` environment variable
2816 - ``HGMERGE`` environment variable
2816 - configuration of ``ui.merge``
2817 - configuration of ``ui.merge``
2817
2818
2818 If merge tool is chosen before matching against
2819 If merge tool is chosen before matching against
2819 ``merge-patterns``, this command can't show any helpful
2820 ``merge-patterns``, this command can't show any helpful
2820 information, even with --debug. In such case, information above is
2821 information, even with --debug. In such case, information above is
2821 useful to know why a merge tool is chosen.
2822 useful to know why a merge tool is chosen.
2822 """
2823 """
2823 opts = pycompat.byteskwargs(opts)
2824 opts = pycompat.byteskwargs(opts)
2824 overrides = {}
2825 overrides = {}
2825 if opts[b'tool']:
2826 if opts[b'tool']:
2826 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2827 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2827 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2828 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2828
2829
2829 with ui.configoverride(overrides, b'debugmergepatterns'):
2830 with ui.configoverride(overrides, b'debugmergepatterns'):
2830 hgmerge = encoding.environ.get(b"HGMERGE")
2831 hgmerge = encoding.environ.get(b"HGMERGE")
2831 if hgmerge is not None:
2832 if hgmerge is not None:
2832 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2833 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2833 uimerge = ui.config(b"ui", b"merge")
2834 uimerge = ui.config(b"ui", b"merge")
2834 if uimerge:
2835 if uimerge:
2835 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2836 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2836
2837
2837 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2838 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2838 m = scmutil.match(ctx, pats, opts)
2839 m = scmutil.match(ctx, pats, opts)
2839 changedelete = opts[b'changedelete']
2840 changedelete = opts[b'changedelete']
2840 for path in ctx.walk(m):
2841 for path in ctx.walk(m):
2841 fctx = ctx[path]
2842 fctx = ctx[path]
2842 with ui.silent(
2843 with ui.silent(
2843 error=True
2844 error=True
2844 ) if not ui.debugflag else util.nullcontextmanager():
2845 ) if not ui.debugflag else util.nullcontextmanager():
2845 tool, toolpath = filemerge._picktool(
2846 tool, toolpath = filemerge._picktool(
2846 repo,
2847 repo,
2847 ui,
2848 ui,
2848 path,
2849 path,
2849 fctx.isbinary(),
2850 fctx.isbinary(),
2850 b'l' in fctx.flags(),
2851 b'l' in fctx.flags(),
2851 changedelete,
2852 changedelete,
2852 )
2853 )
2853 ui.write(b'%s = %s\n' % (path, tool))
2854 ui.write(b'%s = %s\n' % (path, tool))
2854
2855
2855
2856
2856 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2857 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2857 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2858 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2858 """access the pushkey key/value protocol
2859 """access the pushkey key/value protocol
2859
2860
2860 With two args, list the keys in the given namespace.
2861 With two args, list the keys in the given namespace.
2861
2862
2862 With five args, set a key to new if it currently is set to old.
2863 With five args, set a key to new if it currently is set to old.
2863 Reports success or failure.
2864 Reports success or failure.
2864 """
2865 """
2865
2866
2866 target = hg.peer(ui, {}, repopath)
2867 target = hg.peer(ui, {}, repopath)
2867 try:
2868 try:
2868 if keyinfo:
2869 if keyinfo:
2869 key, old, new = keyinfo
2870 key, old, new = keyinfo
2870 with target.commandexecutor() as e:
2871 with target.commandexecutor() as e:
2871 r = e.callcommand(
2872 r = e.callcommand(
2872 b'pushkey',
2873 b'pushkey',
2873 {
2874 {
2874 b'namespace': namespace,
2875 b'namespace': namespace,
2875 b'key': key,
2876 b'key': key,
2876 b'old': old,
2877 b'old': old,
2877 b'new': new,
2878 b'new': new,
2878 },
2879 },
2879 ).result()
2880 ).result()
2880
2881
2881 ui.status(pycompat.bytestr(r) + b'\n')
2882 ui.status(pycompat.bytestr(r) + b'\n')
2882 return not r
2883 return not r
2883 else:
2884 else:
2884 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2885 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2885 ui.write(
2886 ui.write(
2886 b"%s\t%s\n"
2887 b"%s\t%s\n"
2887 % (stringutil.escapestr(k), stringutil.escapestr(v))
2888 % (stringutil.escapestr(k), stringutil.escapestr(v))
2888 )
2889 )
2889 finally:
2890 finally:
2890 target.close()
2891 target.close()
2891
2892
2892
2893
2893 @command(b'debugpvec', [], _(b'A B'))
2894 @command(b'debugpvec', [], _(b'A B'))
2894 def debugpvec(ui, repo, a, b=None):
2895 def debugpvec(ui, repo, a, b=None):
2895 ca = scmutil.revsingle(repo, a)
2896 ca = scmutil.revsingle(repo, a)
2896 cb = scmutil.revsingle(repo, b)
2897 cb = scmutil.revsingle(repo, b)
2897 pa = pvec.ctxpvec(ca)
2898 pa = pvec.ctxpvec(ca)
2898 pb = pvec.ctxpvec(cb)
2899 pb = pvec.ctxpvec(cb)
2899 if pa == pb:
2900 if pa == pb:
2900 rel = b"="
2901 rel = b"="
2901 elif pa > pb:
2902 elif pa > pb:
2902 rel = b">"
2903 rel = b">"
2903 elif pa < pb:
2904 elif pa < pb:
2904 rel = b"<"
2905 rel = b"<"
2905 elif pa | pb:
2906 elif pa | pb:
2906 rel = b"|"
2907 rel = b"|"
2907 ui.write(_(b"a: %s\n") % pa)
2908 ui.write(_(b"a: %s\n") % pa)
2908 ui.write(_(b"b: %s\n") % pb)
2909 ui.write(_(b"b: %s\n") % pb)
2909 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2910 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2910 ui.write(
2911 ui.write(
2911 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2912 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2912 % (
2913 % (
2913 abs(pa._depth - pb._depth),
2914 abs(pa._depth - pb._depth),
2914 pvec._hamming(pa._vec, pb._vec),
2915 pvec._hamming(pa._vec, pb._vec),
2915 pa.distance(pb),
2916 pa.distance(pb),
2916 rel,
2917 rel,
2917 )
2918 )
2918 )
2919 )
2919
2920
2920
2921
2921 @command(
2922 @command(
2922 b'debugrebuilddirstate|debugrebuildstate',
2923 b'debugrebuilddirstate|debugrebuildstate',
2923 [
2924 [
2924 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2925 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2925 (
2926 (
2926 b'',
2927 b'',
2927 b'minimal',
2928 b'minimal',
2928 None,
2929 None,
2929 _(
2930 _(
2930 b'only rebuild files that are inconsistent with '
2931 b'only rebuild files that are inconsistent with '
2931 b'the working copy parent'
2932 b'the working copy parent'
2932 ),
2933 ),
2933 ),
2934 ),
2934 ],
2935 ],
2935 _(b'[-r REV]'),
2936 _(b'[-r REV]'),
2936 )
2937 )
2937 def debugrebuilddirstate(ui, repo, rev, **opts):
2938 def debugrebuilddirstate(ui, repo, rev, **opts):
2938 """rebuild the dirstate as it would look like for the given revision
2939 """rebuild the dirstate as it would look like for the given revision
2939
2940
2940 If no revision is specified the first current parent will be used.
2941 If no revision is specified the first current parent will be used.
2941
2942
2942 The dirstate will be set to the files of the given revision.
2943 The dirstate will be set to the files of the given revision.
2943 The actual working directory content or existing dirstate
2944 The actual working directory content or existing dirstate
2944 information such as adds or removes is not considered.
2945 information such as adds or removes is not considered.
2945
2946
2946 ``minimal`` will only rebuild the dirstate status for files that claim to be
2947 ``minimal`` will only rebuild the dirstate status for files that claim to be
2947 tracked but are not in the parent manifest, or that exist in the parent
2948 tracked but are not in the parent manifest, or that exist in the parent
2948 manifest but are not in the dirstate. It will not change adds, removes, or
2949 manifest but are not in the dirstate. It will not change adds, removes, or
2949 modified files that are in the working copy parent.
2950 modified files that are in the working copy parent.
2950
2951
2951 One use of this command is to make the next :hg:`status` invocation
2952 One use of this command is to make the next :hg:`status` invocation
2952 check the actual file content.
2953 check the actual file content.
2953 """
2954 """
2954 ctx = scmutil.revsingle(repo, rev)
2955 ctx = scmutil.revsingle(repo, rev)
2955 with repo.wlock():
2956 with repo.wlock():
2956 dirstate = repo.dirstate
2957 dirstate = repo.dirstate
2957 changedfiles = None
2958 changedfiles = None
2958 # See command doc for what minimal does.
2959 # See command doc for what minimal does.
2959 if opts.get('minimal'):
2960 if opts.get('minimal'):
2960 manifestfiles = set(ctx.manifest().keys())
2961 manifestfiles = set(ctx.manifest().keys())
2961 dirstatefiles = set(dirstate)
2962 dirstatefiles = set(dirstate)
2962 manifestonly = manifestfiles - dirstatefiles
2963 manifestonly = manifestfiles - dirstatefiles
2963 dsonly = dirstatefiles - manifestfiles
2964 dsonly = dirstatefiles - manifestfiles
2964 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2965 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2965 changedfiles = manifestonly | dsnotadded
2966 changedfiles = manifestonly | dsnotadded
2966
2967
2967 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2968 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2968
2969
2969
2970
2970 @command(
2971 @command(
2971 b'debugrebuildfncache',
2972 b'debugrebuildfncache',
2972 [
2973 [
2973 (
2974 (
2974 b'',
2975 b'',
2975 b'only-data',
2976 b'only-data',
2976 False,
2977 False,
2977 _(b'only look for wrong .d files (much faster)'),
2978 _(b'only look for wrong .d files (much faster)'),
2978 )
2979 )
2979 ],
2980 ],
2980 b'',
2981 b'',
2981 )
2982 )
2982 def debugrebuildfncache(ui, repo, **opts):
2983 def debugrebuildfncache(ui, repo, **opts):
2983 """rebuild the fncache file"""
2984 """rebuild the fncache file"""
2984 opts = pycompat.byteskwargs(opts)
2985 opts = pycompat.byteskwargs(opts)
2985 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2986 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2986
2987
2987
2988
2988 @command(
2989 @command(
2989 b'debugrename',
2990 b'debugrename',
2990 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2991 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2991 _(b'[-r REV] [FILE]...'),
2992 _(b'[-r REV] [FILE]...'),
2992 )
2993 )
2993 def debugrename(ui, repo, *pats, **opts):
2994 def debugrename(ui, repo, *pats, **opts):
2994 """dump rename information"""
2995 """dump rename information"""
2995
2996
2996 opts = pycompat.byteskwargs(opts)
2997 opts = pycompat.byteskwargs(opts)
2997 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2998 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2998 m = scmutil.match(ctx, pats, opts)
2999 m = scmutil.match(ctx, pats, opts)
2999 for abs in ctx.walk(m):
3000 for abs in ctx.walk(m):
3000 fctx = ctx[abs]
3001 fctx = ctx[abs]
3001 o = fctx.filelog().renamed(fctx.filenode())
3002 o = fctx.filelog().renamed(fctx.filenode())
3002 rel = repo.pathto(abs)
3003 rel = repo.pathto(abs)
3003 if o:
3004 if o:
3004 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3005 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3005 else:
3006 else:
3006 ui.write(_(b"%s not renamed\n") % rel)
3007 ui.write(_(b"%s not renamed\n") % rel)
3007
3008
3008
3009
3009 @command(b'debugrequires|debugrequirements', [], b'')
3010 @command(b'debugrequires|debugrequirements', [], b'')
3010 def debugrequirements(ui, repo):
3011 def debugrequirements(ui, repo):
3011 """print the current repo requirements"""
3012 """print the current repo requirements"""
3012 for r in sorted(repo.requirements):
3013 for r in sorted(repo.requirements):
3013 ui.write(b"%s\n" % r)
3014 ui.write(b"%s\n" % r)
3014
3015
3015
3016
3016 @command(
3017 @command(
3017 b'debugrevlog',
3018 b'debugrevlog',
3018 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3019 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3019 _(b'-c|-m|FILE'),
3020 _(b'-c|-m|FILE'),
3020 optionalrepo=True,
3021 optionalrepo=True,
3021 )
3022 )
3022 def debugrevlog(ui, repo, file_=None, **opts):
3023 def debugrevlog(ui, repo, file_=None, **opts):
3023 """show data and statistics about a revlog"""
3024 """show data and statistics about a revlog"""
3024 opts = pycompat.byteskwargs(opts)
3025 opts = pycompat.byteskwargs(opts)
3025 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3026 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3026
3027
3027 if opts.get(b"dump"):
3028 if opts.get(b"dump"):
3028 numrevs = len(r)
3029 numrevs = len(r)
3029 ui.write(
3030 ui.write(
3030 (
3031 (
3031 b"# rev p1rev p2rev start end deltastart base p1 p2"
3032 b"# rev p1rev p2rev start end deltastart base p1 p2"
3032 b" rawsize totalsize compression heads chainlen\n"
3033 b" rawsize totalsize compression heads chainlen\n"
3033 )
3034 )
3034 )
3035 )
3035 ts = 0
3036 ts = 0
3036 heads = set()
3037 heads = set()
3037
3038
3038 for rev in pycompat.xrange(numrevs):
3039 for rev in pycompat.xrange(numrevs):
3039 dbase = r.deltaparent(rev)
3040 dbase = r.deltaparent(rev)
3040 if dbase == -1:
3041 if dbase == -1:
3041 dbase = rev
3042 dbase = rev
3042 cbase = r.chainbase(rev)
3043 cbase = r.chainbase(rev)
3043 clen = r.chainlen(rev)
3044 clen = r.chainlen(rev)
3044 p1, p2 = r.parentrevs(rev)
3045 p1, p2 = r.parentrevs(rev)
3045 rs = r.rawsize(rev)
3046 rs = r.rawsize(rev)
3046 ts = ts + rs
3047 ts = ts + rs
3047 heads -= set(r.parentrevs(rev))
3048 heads -= set(r.parentrevs(rev))
3048 heads.add(rev)
3049 heads.add(rev)
3049 try:
3050 try:
3050 compression = ts / r.end(rev)
3051 compression = ts / r.end(rev)
3051 except ZeroDivisionError:
3052 except ZeroDivisionError:
3052 compression = 0
3053 compression = 0
3053 ui.write(
3054 ui.write(
3054 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3055 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3055 b"%11d %5d %8d\n"
3056 b"%11d %5d %8d\n"
3056 % (
3057 % (
3057 rev,
3058 rev,
3058 p1,
3059 p1,
3059 p2,
3060 p2,
3060 r.start(rev),
3061 r.start(rev),
3061 r.end(rev),
3062 r.end(rev),
3062 r.start(dbase),
3063 r.start(dbase),
3063 r.start(cbase),
3064 r.start(cbase),
3064 r.start(p1),
3065 r.start(p1),
3065 r.start(p2),
3066 r.start(p2),
3066 rs,
3067 rs,
3067 ts,
3068 ts,
3068 compression,
3069 compression,
3069 len(heads),
3070 len(heads),
3070 clen,
3071 clen,
3071 )
3072 )
3072 )
3073 )
3073 return 0
3074 return 0
3074
3075
3075 format = r._format_version
3076 format = r._format_version
3076 v = r._format_flags
3077 v = r._format_flags
3077 flags = []
3078 flags = []
3078 gdelta = False
3079 gdelta = False
3079 if v & revlog.FLAG_INLINE_DATA:
3080 if v & revlog.FLAG_INLINE_DATA:
3080 flags.append(b'inline')
3081 flags.append(b'inline')
3081 if v & revlog.FLAG_GENERALDELTA:
3082 if v & revlog.FLAG_GENERALDELTA:
3082 gdelta = True
3083 gdelta = True
3083 flags.append(b'generaldelta')
3084 flags.append(b'generaldelta')
3084 if not flags:
3085 if not flags:
3085 flags = [b'(none)']
3086 flags = [b'(none)']
3086
3087
3087 ### tracks merge vs single parent
3088 ### tracks merge vs single parent
3088 nummerges = 0
3089 nummerges = 0
3089
3090
3090 ### tracks ways the "delta" are build
3091 ### tracks ways the "delta" are build
3091 # nodelta
3092 # nodelta
3092 numempty = 0
3093 numempty = 0
3093 numemptytext = 0
3094 numemptytext = 0
3094 numemptydelta = 0
3095 numemptydelta = 0
3095 # full file content
3096 # full file content
3096 numfull = 0
3097 numfull = 0
3097 # intermediate snapshot against a prior snapshot
3098 # intermediate snapshot against a prior snapshot
3098 numsemi = 0
3099 numsemi = 0
3099 # snapshot count per depth
3100 # snapshot count per depth
3100 numsnapdepth = collections.defaultdict(lambda: 0)
3101 numsnapdepth = collections.defaultdict(lambda: 0)
3101 # delta against previous revision
3102 # delta against previous revision
3102 numprev = 0
3103 numprev = 0
3103 # delta against first or second parent (not prev)
3104 # delta against first or second parent (not prev)
3104 nump1 = 0
3105 nump1 = 0
3105 nump2 = 0
3106 nump2 = 0
3106 # delta against neither prev nor parents
3107 # delta against neither prev nor parents
3107 numother = 0
3108 numother = 0
3108 # delta against prev that are also first or second parent
3109 # delta against prev that are also first or second parent
3109 # (details of `numprev`)
3110 # (details of `numprev`)
3110 nump1prev = 0
3111 nump1prev = 0
3111 nump2prev = 0
3112 nump2prev = 0
3112
3113
3113 # data about delta chain of each revs
3114 # data about delta chain of each revs
3114 chainlengths = []
3115 chainlengths = []
3115 chainbases = []
3116 chainbases = []
3116 chainspans = []
3117 chainspans = []
3117
3118
3118 # data about each revision
3119 # data about each revision
3119 datasize = [None, 0, 0]
3120 datasize = [None, 0, 0]
3120 fullsize = [None, 0, 0]
3121 fullsize = [None, 0, 0]
3121 semisize = [None, 0, 0]
3122 semisize = [None, 0, 0]
3122 # snapshot count per depth
3123 # snapshot count per depth
3123 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3124 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3124 deltasize = [None, 0, 0]
3125 deltasize = [None, 0, 0]
3125 chunktypecounts = {}
3126 chunktypecounts = {}
3126 chunktypesizes = {}
3127 chunktypesizes = {}
3127
3128
3128 def addsize(size, l):
3129 def addsize(size, l):
3129 if l[0] is None or size < l[0]:
3130 if l[0] is None or size < l[0]:
3130 l[0] = size
3131 l[0] = size
3131 if size > l[1]:
3132 if size > l[1]:
3132 l[1] = size
3133 l[1] = size
3133 l[2] += size
3134 l[2] += size
3134
3135
3135 numrevs = len(r)
3136 numrevs = len(r)
3136 for rev in pycompat.xrange(numrevs):
3137 for rev in pycompat.xrange(numrevs):
3137 p1, p2 = r.parentrevs(rev)
3138 p1, p2 = r.parentrevs(rev)
3138 delta = r.deltaparent(rev)
3139 delta = r.deltaparent(rev)
3139 if format > 0:
3140 if format > 0:
3140 addsize(r.rawsize(rev), datasize)
3141 addsize(r.rawsize(rev), datasize)
3141 if p2 != nullrev:
3142 if p2 != nullrev:
3142 nummerges += 1
3143 nummerges += 1
3143 size = r.length(rev)
3144 size = r.length(rev)
3144 if delta == nullrev:
3145 if delta == nullrev:
3145 chainlengths.append(0)
3146 chainlengths.append(0)
3146 chainbases.append(r.start(rev))
3147 chainbases.append(r.start(rev))
3147 chainspans.append(size)
3148 chainspans.append(size)
3148 if size == 0:
3149 if size == 0:
3149 numempty += 1
3150 numempty += 1
3150 numemptytext += 1
3151 numemptytext += 1
3151 else:
3152 else:
3152 numfull += 1
3153 numfull += 1
3153 numsnapdepth[0] += 1
3154 numsnapdepth[0] += 1
3154 addsize(size, fullsize)
3155 addsize(size, fullsize)
3155 addsize(size, snapsizedepth[0])
3156 addsize(size, snapsizedepth[0])
3156 else:
3157 else:
3157 chainlengths.append(chainlengths[delta] + 1)
3158 chainlengths.append(chainlengths[delta] + 1)
3158 baseaddr = chainbases[delta]
3159 baseaddr = chainbases[delta]
3159 revaddr = r.start(rev)
3160 revaddr = r.start(rev)
3160 chainbases.append(baseaddr)
3161 chainbases.append(baseaddr)
3161 chainspans.append((revaddr - baseaddr) + size)
3162 chainspans.append((revaddr - baseaddr) + size)
3162 if size == 0:
3163 if size == 0:
3163 numempty += 1
3164 numempty += 1
3164 numemptydelta += 1
3165 numemptydelta += 1
3165 elif r.issnapshot(rev):
3166 elif r.issnapshot(rev):
3166 addsize(size, semisize)
3167 addsize(size, semisize)
3167 numsemi += 1
3168 numsemi += 1
3168 depth = r.snapshotdepth(rev)
3169 depth = r.snapshotdepth(rev)
3169 numsnapdepth[depth] += 1
3170 numsnapdepth[depth] += 1
3170 addsize(size, snapsizedepth[depth])
3171 addsize(size, snapsizedepth[depth])
3171 else:
3172 else:
3172 addsize(size, deltasize)
3173 addsize(size, deltasize)
3173 if delta == rev - 1:
3174 if delta == rev - 1:
3174 numprev += 1
3175 numprev += 1
3175 if delta == p1:
3176 if delta == p1:
3176 nump1prev += 1
3177 nump1prev += 1
3177 elif delta == p2:
3178 elif delta == p2:
3178 nump2prev += 1
3179 nump2prev += 1
3179 elif delta == p1:
3180 elif delta == p1:
3180 nump1 += 1
3181 nump1 += 1
3181 elif delta == p2:
3182 elif delta == p2:
3182 nump2 += 1
3183 nump2 += 1
3183 elif delta != nullrev:
3184 elif delta != nullrev:
3184 numother += 1
3185 numother += 1
3185
3186
3186 # Obtain data on the raw chunks in the revlog.
3187 # Obtain data on the raw chunks in the revlog.
3187 if util.safehasattr(r, b'_getsegmentforrevs'):
3188 if util.safehasattr(r, b'_getsegmentforrevs'):
3188 segment = r._getsegmentforrevs(rev, rev)[1]
3189 segment = r._getsegmentforrevs(rev, rev)[1]
3189 else:
3190 else:
3190 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3191 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3191 if segment:
3192 if segment:
3192 chunktype = bytes(segment[0:1])
3193 chunktype = bytes(segment[0:1])
3193 else:
3194 else:
3194 chunktype = b'empty'
3195 chunktype = b'empty'
3195
3196
3196 if chunktype not in chunktypecounts:
3197 if chunktype not in chunktypecounts:
3197 chunktypecounts[chunktype] = 0
3198 chunktypecounts[chunktype] = 0
3198 chunktypesizes[chunktype] = 0
3199 chunktypesizes[chunktype] = 0
3199
3200
3200 chunktypecounts[chunktype] += 1
3201 chunktypecounts[chunktype] += 1
3201 chunktypesizes[chunktype] += size
3202 chunktypesizes[chunktype] += size
3202
3203
3203 # Adjust size min value for empty cases
3204 # Adjust size min value for empty cases
3204 for size in (datasize, fullsize, semisize, deltasize):
3205 for size in (datasize, fullsize, semisize, deltasize):
3205 if size[0] is None:
3206 if size[0] is None:
3206 size[0] = 0
3207 size[0] = 0
3207
3208
3208 numdeltas = numrevs - numfull - numempty - numsemi
3209 numdeltas = numrevs - numfull - numempty - numsemi
3209 numoprev = numprev - nump1prev - nump2prev
3210 numoprev = numprev - nump1prev - nump2prev
3210 totalrawsize = datasize[2]
3211 totalrawsize = datasize[2]
3211 datasize[2] /= numrevs
3212 datasize[2] /= numrevs
3212 fulltotal = fullsize[2]
3213 fulltotal = fullsize[2]
3213 if numfull == 0:
3214 if numfull == 0:
3214 fullsize[2] = 0
3215 fullsize[2] = 0
3215 else:
3216 else:
3216 fullsize[2] /= numfull
3217 fullsize[2] /= numfull
3217 semitotal = semisize[2]
3218 semitotal = semisize[2]
3218 snaptotal = {}
3219 snaptotal = {}
3219 if numsemi > 0:
3220 if numsemi > 0:
3220 semisize[2] /= numsemi
3221 semisize[2] /= numsemi
3221 for depth in snapsizedepth:
3222 for depth in snapsizedepth:
3222 snaptotal[depth] = snapsizedepth[depth][2]
3223 snaptotal[depth] = snapsizedepth[depth][2]
3223 snapsizedepth[depth][2] /= numsnapdepth[depth]
3224 snapsizedepth[depth][2] /= numsnapdepth[depth]
3224
3225
3225 deltatotal = deltasize[2]
3226 deltatotal = deltasize[2]
3226 if numdeltas > 0:
3227 if numdeltas > 0:
3227 deltasize[2] /= numdeltas
3228 deltasize[2] /= numdeltas
3228 totalsize = fulltotal + semitotal + deltatotal
3229 totalsize = fulltotal + semitotal + deltatotal
3229 avgchainlen = sum(chainlengths) / numrevs
3230 avgchainlen = sum(chainlengths) / numrevs
3230 maxchainlen = max(chainlengths)
3231 maxchainlen = max(chainlengths)
3231 maxchainspan = max(chainspans)
3232 maxchainspan = max(chainspans)
3232 compratio = 1
3233 compratio = 1
3233 if totalsize:
3234 if totalsize:
3234 compratio = totalrawsize / totalsize
3235 compratio = totalrawsize / totalsize
3235
3236
3236 basedfmtstr = b'%%%dd\n'
3237 basedfmtstr = b'%%%dd\n'
3237 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3238 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3238
3239
3239 def dfmtstr(max):
3240 def dfmtstr(max):
3240 return basedfmtstr % len(str(max))
3241 return basedfmtstr % len(str(max))
3241
3242
3242 def pcfmtstr(max, padding=0):
3243 def pcfmtstr(max, padding=0):
3243 return basepcfmtstr % (len(str(max)), b' ' * padding)
3244 return basepcfmtstr % (len(str(max)), b' ' * padding)
3244
3245
3245 def pcfmt(value, total):
3246 def pcfmt(value, total):
3246 if total:
3247 if total:
3247 return (value, 100 * float(value) / total)
3248 return (value, 100 * float(value) / total)
3248 else:
3249 else:
3249 return value, 100.0
3250 return value, 100.0
3250
3251
3251 ui.writenoi18n(b'format : %d\n' % format)
3252 ui.writenoi18n(b'format : %d\n' % format)
3252 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3253 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3253
3254
3254 ui.write(b'\n')
3255 ui.write(b'\n')
3255 fmt = pcfmtstr(totalsize)
3256 fmt = pcfmtstr(totalsize)
3256 fmt2 = dfmtstr(totalsize)
3257 fmt2 = dfmtstr(totalsize)
3257 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3258 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3258 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3259 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3259 ui.writenoi18n(
3260 ui.writenoi18n(
3260 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3261 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3261 )
3262 )
3262 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3263 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3263 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3264 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3264 ui.writenoi18n(
3265 ui.writenoi18n(
3265 b' text : '
3266 b' text : '
3266 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3267 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3267 )
3268 )
3268 ui.writenoi18n(
3269 ui.writenoi18n(
3269 b' delta : '
3270 b' delta : '
3270 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3271 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3271 )
3272 )
3272 ui.writenoi18n(
3273 ui.writenoi18n(
3273 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3274 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3274 )
3275 )
3275 for depth in sorted(numsnapdepth):
3276 for depth in sorted(numsnapdepth):
3276 ui.write(
3277 ui.write(
3277 (b' lvl-%-3d : ' % depth)
3278 (b' lvl-%-3d : ' % depth)
3278 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3279 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3279 )
3280 )
3280 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3281 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3281 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3282 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3282 ui.writenoi18n(
3283 ui.writenoi18n(
3283 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3284 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3284 )
3285 )
3285 for depth in sorted(numsnapdepth):
3286 for depth in sorted(numsnapdepth):
3286 ui.write(
3287 ui.write(
3287 (b' lvl-%-3d : ' % depth)
3288 (b' lvl-%-3d : ' % depth)
3288 + fmt % pcfmt(snaptotal[depth], totalsize)
3289 + fmt % pcfmt(snaptotal[depth], totalsize)
3289 )
3290 )
3290 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3291 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3291
3292
3292 def fmtchunktype(chunktype):
3293 def fmtchunktype(chunktype):
3293 if chunktype == b'empty':
3294 if chunktype == b'empty':
3294 return b' %s : ' % chunktype
3295 return b' %s : ' % chunktype
3295 elif chunktype in pycompat.bytestr(string.ascii_letters):
3296 elif chunktype in pycompat.bytestr(string.ascii_letters):
3296 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3297 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3297 else:
3298 else:
3298 return b' 0x%s : ' % hex(chunktype)
3299 return b' 0x%s : ' % hex(chunktype)
3299
3300
3300 ui.write(b'\n')
3301 ui.write(b'\n')
3301 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3302 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3302 for chunktype in sorted(chunktypecounts):
3303 for chunktype in sorted(chunktypecounts):
3303 ui.write(fmtchunktype(chunktype))
3304 ui.write(fmtchunktype(chunktype))
3304 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3305 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3305 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3306 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3306 for chunktype in sorted(chunktypecounts):
3307 for chunktype in sorted(chunktypecounts):
3307 ui.write(fmtchunktype(chunktype))
3308 ui.write(fmtchunktype(chunktype))
3308 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3309 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3309
3310
3310 ui.write(b'\n')
3311 ui.write(b'\n')
3311 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3312 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3312 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3313 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3313 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3314 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3314 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3315 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3315 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3316 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3316
3317
3317 if format > 0:
3318 if format > 0:
3318 ui.write(b'\n')
3319 ui.write(b'\n')
3319 ui.writenoi18n(
3320 ui.writenoi18n(
3320 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3321 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3321 % tuple(datasize)
3322 % tuple(datasize)
3322 )
3323 )
3323 ui.writenoi18n(
3324 ui.writenoi18n(
3324 b'full revision size (min/max/avg) : %d / %d / %d\n'
3325 b'full revision size (min/max/avg) : %d / %d / %d\n'
3325 % tuple(fullsize)
3326 % tuple(fullsize)
3326 )
3327 )
3327 ui.writenoi18n(
3328 ui.writenoi18n(
3328 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3329 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3329 % tuple(semisize)
3330 % tuple(semisize)
3330 )
3331 )
3331 for depth in sorted(snapsizedepth):
3332 for depth in sorted(snapsizedepth):
3332 if depth == 0:
3333 if depth == 0:
3333 continue
3334 continue
3334 ui.writenoi18n(
3335 ui.writenoi18n(
3335 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3336 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3336 % ((depth,) + tuple(snapsizedepth[depth]))
3337 % ((depth,) + tuple(snapsizedepth[depth]))
3337 )
3338 )
3338 ui.writenoi18n(
3339 ui.writenoi18n(
3339 b'delta size (min/max/avg) : %d / %d / %d\n'
3340 b'delta size (min/max/avg) : %d / %d / %d\n'
3340 % tuple(deltasize)
3341 % tuple(deltasize)
3341 )
3342 )
3342
3343
3343 if numdeltas > 0:
3344 if numdeltas > 0:
3344 ui.write(b'\n')
3345 ui.write(b'\n')
3345 fmt = pcfmtstr(numdeltas)
3346 fmt = pcfmtstr(numdeltas)
3346 fmt2 = pcfmtstr(numdeltas, 4)
3347 fmt2 = pcfmtstr(numdeltas, 4)
3347 ui.writenoi18n(
3348 ui.writenoi18n(
3348 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3349 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3349 )
3350 )
3350 if numprev > 0:
3351 if numprev > 0:
3351 ui.writenoi18n(
3352 ui.writenoi18n(
3352 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3353 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3353 )
3354 )
3354 ui.writenoi18n(
3355 ui.writenoi18n(
3355 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3356 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3356 )
3357 )
3357 ui.writenoi18n(
3358 ui.writenoi18n(
3358 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3359 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3359 )
3360 )
3360 if gdelta:
3361 if gdelta:
3361 ui.writenoi18n(
3362 ui.writenoi18n(
3362 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3363 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3363 )
3364 )
3364 ui.writenoi18n(
3365 ui.writenoi18n(
3365 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3366 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3366 )
3367 )
3367 ui.writenoi18n(
3368 ui.writenoi18n(
3368 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3369 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3369 )
3370 )
3370
3371
3371
3372
3372 @command(
3373 @command(
3373 b'debugrevlogindex',
3374 b'debugrevlogindex',
3374 cmdutil.debugrevlogopts
3375 cmdutil.debugrevlogopts
3375 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3376 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3376 _(b'[-f FORMAT] -c|-m|FILE'),
3377 _(b'[-f FORMAT] -c|-m|FILE'),
3377 optionalrepo=True,
3378 optionalrepo=True,
3378 )
3379 )
3379 def debugrevlogindex(ui, repo, file_=None, **opts):
3380 def debugrevlogindex(ui, repo, file_=None, **opts):
3380 """dump the contents of a revlog index"""
3381 """dump the contents of a revlog index"""
3381 opts = pycompat.byteskwargs(opts)
3382 opts = pycompat.byteskwargs(opts)
3382 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3383 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3383 format = opts.get(b'format', 0)
3384 format = opts.get(b'format', 0)
3384 if format not in (0, 1):
3385 if format not in (0, 1):
3385 raise error.Abort(_(b"unknown format %d") % format)
3386 raise error.Abort(_(b"unknown format %d") % format)
3386
3387
3387 if ui.debugflag:
3388 if ui.debugflag:
3388 shortfn = hex
3389 shortfn = hex
3389 else:
3390 else:
3390 shortfn = short
3391 shortfn = short
3391
3392
3392 # There might not be anything in r, so have a sane default
3393 # There might not be anything in r, so have a sane default
3393 idlen = 12
3394 idlen = 12
3394 for i in r:
3395 for i in r:
3395 idlen = len(shortfn(r.node(i)))
3396 idlen = len(shortfn(r.node(i)))
3396 break
3397 break
3397
3398
3398 if format == 0:
3399 if format == 0:
3399 if ui.verbose:
3400 if ui.verbose:
3400 ui.writenoi18n(
3401 ui.writenoi18n(
3401 b" rev offset length linkrev %s %s p2\n"
3402 b" rev offset length linkrev %s %s p2\n"
3402 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3403 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3403 )
3404 )
3404 else:
3405 else:
3405 ui.writenoi18n(
3406 ui.writenoi18n(
3406 b" rev linkrev %s %s p2\n"
3407 b" rev linkrev %s %s p2\n"
3407 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3408 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3408 )
3409 )
3409 elif format == 1:
3410 elif format == 1:
3410 if ui.verbose:
3411 if ui.verbose:
3411 ui.writenoi18n(
3412 ui.writenoi18n(
3412 (
3413 (
3413 b" rev flag offset length size link p1"
3414 b" rev flag offset length size link p1"
3414 b" p2 %s\n"
3415 b" p2 %s\n"
3415 )
3416 )
3416 % b"nodeid".rjust(idlen)
3417 % b"nodeid".rjust(idlen)
3417 )
3418 )
3418 else:
3419 else:
3419 ui.writenoi18n(
3420 ui.writenoi18n(
3420 b" rev flag size link p1 p2 %s\n"
3421 b" rev flag size link p1 p2 %s\n"
3421 % b"nodeid".rjust(idlen)
3422 % b"nodeid".rjust(idlen)
3422 )
3423 )
3423
3424
3424 for i in r:
3425 for i in r:
3425 node = r.node(i)
3426 node = r.node(i)
3426 if format == 0:
3427 if format == 0:
3427 try:
3428 try:
3428 pp = r.parents(node)
3429 pp = r.parents(node)
3429 except Exception:
3430 except Exception:
3430 pp = [repo.nullid, repo.nullid]
3431 pp = [repo.nullid, repo.nullid]
3431 if ui.verbose:
3432 if ui.verbose:
3432 ui.write(
3433 ui.write(
3433 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3434 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3434 % (
3435 % (
3435 i,
3436 i,
3436 r.start(i),
3437 r.start(i),
3437 r.length(i),
3438 r.length(i),
3438 r.linkrev(i),
3439 r.linkrev(i),
3439 shortfn(node),
3440 shortfn(node),
3440 shortfn(pp[0]),
3441 shortfn(pp[0]),
3441 shortfn(pp[1]),
3442 shortfn(pp[1]),
3442 )
3443 )
3443 )
3444 )
3444 else:
3445 else:
3445 ui.write(
3446 ui.write(
3446 b"% 6d % 7d %s %s %s\n"
3447 b"% 6d % 7d %s %s %s\n"
3447 % (
3448 % (
3448 i,
3449 i,
3449 r.linkrev(i),
3450 r.linkrev(i),
3450 shortfn(node),
3451 shortfn(node),
3451 shortfn(pp[0]),
3452 shortfn(pp[0]),
3452 shortfn(pp[1]),
3453 shortfn(pp[1]),
3453 )
3454 )
3454 )
3455 )
3455 elif format == 1:
3456 elif format == 1:
3456 pr = r.parentrevs(i)
3457 pr = r.parentrevs(i)
3457 if ui.verbose:
3458 if ui.verbose:
3458 ui.write(
3459 ui.write(
3459 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3460 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3460 % (
3461 % (
3461 i,
3462 i,
3462 r.flags(i),
3463 r.flags(i),
3463 r.start(i),
3464 r.start(i),
3464 r.length(i),
3465 r.length(i),
3465 r.rawsize(i),
3466 r.rawsize(i),
3466 r.linkrev(i),
3467 r.linkrev(i),
3467 pr[0],
3468 pr[0],
3468 pr[1],
3469 pr[1],
3469 shortfn(node),
3470 shortfn(node),
3470 )
3471 )
3471 )
3472 )
3472 else:
3473 else:
3473 ui.write(
3474 ui.write(
3474 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3475 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3475 % (
3476 % (
3476 i,
3477 i,
3477 r.flags(i),
3478 r.flags(i),
3478 r.rawsize(i),
3479 r.rawsize(i),
3479 r.linkrev(i),
3480 r.linkrev(i),
3480 pr[0],
3481 pr[0],
3481 pr[1],
3482 pr[1],
3482 shortfn(node),
3483 shortfn(node),
3483 )
3484 )
3484 )
3485 )
3485
3486
3486
3487
3487 @command(
3488 @command(
3488 b'debugrevspec',
3489 b'debugrevspec',
3489 [
3490 [
3490 (
3491 (
3491 b'',
3492 b'',
3492 b'optimize',
3493 b'optimize',
3493 None,
3494 None,
3494 _(b'print parsed tree after optimizing (DEPRECATED)'),
3495 _(b'print parsed tree after optimizing (DEPRECATED)'),
3495 ),
3496 ),
3496 (
3497 (
3497 b'',
3498 b'',
3498 b'show-revs',
3499 b'show-revs',
3499 True,
3500 True,
3500 _(b'print list of result revisions (default)'),
3501 _(b'print list of result revisions (default)'),
3501 ),
3502 ),
3502 (
3503 (
3503 b's',
3504 b's',
3504 b'show-set',
3505 b'show-set',
3505 None,
3506 None,
3506 _(b'print internal representation of result set'),
3507 _(b'print internal representation of result set'),
3507 ),
3508 ),
3508 (
3509 (
3509 b'p',
3510 b'p',
3510 b'show-stage',
3511 b'show-stage',
3511 [],
3512 [],
3512 _(b'print parsed tree at the given stage'),
3513 _(b'print parsed tree at the given stage'),
3513 _(b'NAME'),
3514 _(b'NAME'),
3514 ),
3515 ),
3515 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3516 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3516 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3517 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3517 ],
3518 ],
3518 b'REVSPEC',
3519 b'REVSPEC',
3519 )
3520 )
3520 def debugrevspec(ui, repo, expr, **opts):
3521 def debugrevspec(ui, repo, expr, **opts):
3521 """parse and apply a revision specification
3522 """parse and apply a revision specification
3522
3523
3523 Use -p/--show-stage option to print the parsed tree at the given stages.
3524 Use -p/--show-stage option to print the parsed tree at the given stages.
3524 Use -p all to print tree at every stage.
3525 Use -p all to print tree at every stage.
3525
3526
3526 Use --no-show-revs option with -s or -p to print only the set
3527 Use --no-show-revs option with -s or -p to print only the set
3527 representation or the parsed tree respectively.
3528 representation or the parsed tree respectively.
3528
3529
3529 Use --verify-optimized to compare the optimized result with the unoptimized
3530 Use --verify-optimized to compare the optimized result with the unoptimized
3530 one. Returns 1 if the optimized result differs.
3531 one. Returns 1 if the optimized result differs.
3531 """
3532 """
3532 opts = pycompat.byteskwargs(opts)
3533 opts = pycompat.byteskwargs(opts)
3533 aliases = ui.configitems(b'revsetalias')
3534 aliases = ui.configitems(b'revsetalias')
3534 stages = [
3535 stages = [
3535 (b'parsed', lambda tree: tree),
3536 (b'parsed', lambda tree: tree),
3536 (
3537 (
3537 b'expanded',
3538 b'expanded',
3538 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3539 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3539 ),
3540 ),
3540 (b'concatenated', revsetlang.foldconcat),
3541 (b'concatenated', revsetlang.foldconcat),
3541 (b'analyzed', revsetlang.analyze),
3542 (b'analyzed', revsetlang.analyze),
3542 (b'optimized', revsetlang.optimize),
3543 (b'optimized', revsetlang.optimize),
3543 ]
3544 ]
3544 if opts[b'no_optimized']:
3545 if opts[b'no_optimized']:
3545 stages = stages[:-1]
3546 stages = stages[:-1]
3546 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3547 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3547 raise error.Abort(
3548 raise error.Abort(
3548 _(b'cannot use --verify-optimized with --no-optimized')
3549 _(b'cannot use --verify-optimized with --no-optimized')
3549 )
3550 )
3550 stagenames = {n for n, f in stages}
3551 stagenames = {n for n, f in stages}
3551
3552
3552 showalways = set()
3553 showalways = set()
3553 showchanged = set()
3554 showchanged = set()
3554 if ui.verbose and not opts[b'show_stage']:
3555 if ui.verbose and not opts[b'show_stage']:
3555 # show parsed tree by --verbose (deprecated)
3556 # show parsed tree by --verbose (deprecated)
3556 showalways.add(b'parsed')
3557 showalways.add(b'parsed')
3557 showchanged.update([b'expanded', b'concatenated'])
3558 showchanged.update([b'expanded', b'concatenated'])
3558 if opts[b'optimize']:
3559 if opts[b'optimize']:
3559 showalways.add(b'optimized')
3560 showalways.add(b'optimized')
3560 if opts[b'show_stage'] and opts[b'optimize']:
3561 if opts[b'show_stage'] and opts[b'optimize']:
3561 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3562 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3562 if opts[b'show_stage'] == [b'all']:
3563 if opts[b'show_stage'] == [b'all']:
3563 showalways.update(stagenames)
3564 showalways.update(stagenames)
3564 else:
3565 else:
3565 for n in opts[b'show_stage']:
3566 for n in opts[b'show_stage']:
3566 if n not in stagenames:
3567 if n not in stagenames:
3567 raise error.Abort(_(b'invalid stage name: %s') % n)
3568 raise error.Abort(_(b'invalid stage name: %s') % n)
3568 showalways.update(opts[b'show_stage'])
3569 showalways.update(opts[b'show_stage'])
3569
3570
3570 treebystage = {}
3571 treebystage = {}
3571 printedtree = None
3572 printedtree = None
3572 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3573 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3573 for n, f in stages:
3574 for n, f in stages:
3574 treebystage[n] = tree = f(tree)
3575 treebystage[n] = tree = f(tree)
3575 if n in showalways or (n in showchanged and tree != printedtree):
3576 if n in showalways or (n in showchanged and tree != printedtree):
3576 if opts[b'show_stage'] or n != b'parsed':
3577 if opts[b'show_stage'] or n != b'parsed':
3577 ui.write(b"* %s:\n" % n)
3578 ui.write(b"* %s:\n" % n)
3578 ui.write(revsetlang.prettyformat(tree), b"\n")
3579 ui.write(revsetlang.prettyformat(tree), b"\n")
3579 printedtree = tree
3580 printedtree = tree
3580
3581
3581 if opts[b'verify_optimized']:
3582 if opts[b'verify_optimized']:
3582 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3583 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3583 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3584 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3584 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3585 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3585 ui.writenoi18n(
3586 ui.writenoi18n(
3586 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3587 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3587 )
3588 )
3588 ui.writenoi18n(
3589 ui.writenoi18n(
3589 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3590 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3590 )
3591 )
3591 arevs = list(arevs)
3592 arevs = list(arevs)
3592 brevs = list(brevs)
3593 brevs = list(brevs)
3593 if arevs == brevs:
3594 if arevs == brevs:
3594 return 0
3595 return 0
3595 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3596 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3596 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3597 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3597 sm = difflib.SequenceMatcher(None, arevs, brevs)
3598 sm = difflib.SequenceMatcher(None, arevs, brevs)
3598 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3599 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3599 if tag in ('delete', 'replace'):
3600 if tag in ('delete', 'replace'):
3600 for c in arevs[alo:ahi]:
3601 for c in arevs[alo:ahi]:
3601 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3602 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3602 if tag in ('insert', 'replace'):
3603 if tag in ('insert', 'replace'):
3603 for c in brevs[blo:bhi]:
3604 for c in brevs[blo:bhi]:
3604 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3605 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3605 if tag == 'equal':
3606 if tag == 'equal':
3606 for c in arevs[alo:ahi]:
3607 for c in arevs[alo:ahi]:
3607 ui.write(b' %d\n' % c)
3608 ui.write(b' %d\n' % c)
3608 return 1
3609 return 1
3609
3610
3610 func = revset.makematcher(tree)
3611 func = revset.makematcher(tree)
3611 revs = func(repo)
3612 revs = func(repo)
3612 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3613 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3613 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3614 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3614 if not opts[b'show_revs']:
3615 if not opts[b'show_revs']:
3615 return
3616 return
3616 for c in revs:
3617 for c in revs:
3617 ui.write(b"%d\n" % c)
3618 ui.write(b"%d\n" % c)
3618
3619
3619
3620
3620 @command(
3621 @command(
3621 b'debugserve',
3622 b'debugserve',
3622 [
3623 [
3623 (
3624 (
3624 b'',
3625 b'',
3625 b'sshstdio',
3626 b'sshstdio',
3626 False,
3627 False,
3627 _(b'run an SSH server bound to process handles'),
3628 _(b'run an SSH server bound to process handles'),
3628 ),
3629 ),
3629 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3630 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3630 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3631 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3631 ],
3632 ],
3632 b'',
3633 b'',
3633 )
3634 )
3634 def debugserve(ui, repo, **opts):
3635 def debugserve(ui, repo, **opts):
3635 """run a server with advanced settings
3636 """run a server with advanced settings
3636
3637
3637 This command is similar to :hg:`serve`. It exists partially as a
3638 This command is similar to :hg:`serve`. It exists partially as a
3638 workaround to the fact that ``hg serve --stdio`` must have specific
3639 workaround to the fact that ``hg serve --stdio`` must have specific
3639 arguments for security reasons.
3640 arguments for security reasons.
3640 """
3641 """
3641 opts = pycompat.byteskwargs(opts)
3642 opts = pycompat.byteskwargs(opts)
3642
3643
3643 if not opts[b'sshstdio']:
3644 if not opts[b'sshstdio']:
3644 raise error.Abort(_(b'only --sshstdio is currently supported'))
3645 raise error.Abort(_(b'only --sshstdio is currently supported'))
3645
3646
3646 logfh = None
3647 logfh = None
3647
3648
3648 if opts[b'logiofd'] and opts[b'logiofile']:
3649 if opts[b'logiofd'] and opts[b'logiofile']:
3649 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3650 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3650
3651
3651 if opts[b'logiofd']:
3652 if opts[b'logiofd']:
3652 # Ideally we would be line buffered. But line buffering in binary
3653 # Ideally we would be line buffered. But line buffering in binary
3653 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3654 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3654 # buffering could have performance impacts. But since this isn't
3655 # buffering could have performance impacts. But since this isn't
3655 # performance critical code, it should be fine.
3656 # performance critical code, it should be fine.
3656 try:
3657 try:
3657 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3658 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3658 except OSError as e:
3659 except OSError as e:
3659 if e.errno != errno.ESPIPE:
3660 if e.errno != errno.ESPIPE:
3660 raise
3661 raise
3661 # can't seek a pipe, so `ab` mode fails on py3
3662 # can't seek a pipe, so `ab` mode fails on py3
3662 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3663 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3663 elif opts[b'logiofile']:
3664 elif opts[b'logiofile']:
3664 logfh = open(opts[b'logiofile'], b'ab', 0)
3665 logfh = open(opts[b'logiofile'], b'ab', 0)
3665
3666
3666 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3667 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3667 s.serve_forever()
3668 s.serve_forever()
3668
3669
3669
3670
3670 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3671 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3671 def debugsetparents(ui, repo, rev1, rev2=None):
3672 def debugsetparents(ui, repo, rev1, rev2=None):
3672 """manually set the parents of the current working directory (DANGEROUS)
3673 """manually set the parents of the current working directory (DANGEROUS)
3673
3674
3674 This command is not what you are looking for and should not be used. Using
3675 This command is not what you are looking for and should not be used. Using
3675 this command will most certainly results in slight corruption of the file
3676 this command will most certainly results in slight corruption of the file
3676 level histories withing your repository. DO NOT USE THIS COMMAND.
3677 level histories withing your repository. DO NOT USE THIS COMMAND.
3677
3678
3678 The command update the p1 and p2 field in the dirstate, and not touching
3679 The command update the p1 and p2 field in the dirstate, and not touching
3679 anything else. This useful for writing repository conversion tools, but
3680 anything else. This useful for writing repository conversion tools, but
3680 should be used with extreme care. For example, neither the working
3681 should be used with extreme care. For example, neither the working
3681 directory nor the dirstate is updated, so file status may be incorrect
3682 directory nor the dirstate is updated, so file status may be incorrect
3682 after running this command. Only used if you are one of the few people that
3683 after running this command. Only used if you are one of the few people that
3683 deeply unstand both conversion tools and file level histories. If you are
3684 deeply unstand both conversion tools and file level histories. If you are
3684 reading this help, you are not one of this people (most of them sailed west
3685 reading this help, you are not one of this people (most of them sailed west
3685 from Mithlond anyway.
3686 from Mithlond anyway.
3686
3687
3687 So one last time DO NOT USE THIS COMMAND.
3688 So one last time DO NOT USE THIS COMMAND.
3688
3689
3689 Returns 0 on success.
3690 Returns 0 on success.
3690 """
3691 """
3691
3692
3692 node1 = scmutil.revsingle(repo, rev1).node()
3693 node1 = scmutil.revsingle(repo, rev1).node()
3693 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3694 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3694
3695
3695 with repo.wlock():
3696 with repo.wlock():
3696 repo.setparents(node1, node2)
3697 repo.setparents(node1, node2)
3697
3698
3698
3699
3699 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3700 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3700 def debugsidedata(ui, repo, file_, rev=None, **opts):
3701 def debugsidedata(ui, repo, file_, rev=None, **opts):
3701 """dump the side data for a cl/manifest/file revision
3702 """dump the side data for a cl/manifest/file revision
3702
3703
3703 Use --verbose to dump the sidedata content."""
3704 Use --verbose to dump the sidedata content."""
3704 opts = pycompat.byteskwargs(opts)
3705 opts = pycompat.byteskwargs(opts)
3705 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3706 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3706 if rev is not None:
3707 if rev is not None:
3707 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3708 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3708 file_, rev = None, file_
3709 file_, rev = None, file_
3709 elif rev is None:
3710 elif rev is None:
3710 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3711 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3711 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3712 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3712 r = getattr(r, '_revlog', r)
3713 r = getattr(r, '_revlog', r)
3713 try:
3714 try:
3714 sidedata = r.sidedata(r.lookup(rev))
3715 sidedata = r.sidedata(r.lookup(rev))
3715 except KeyError:
3716 except KeyError:
3716 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3717 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3717 if sidedata:
3718 if sidedata:
3718 sidedata = list(sidedata.items())
3719 sidedata = list(sidedata.items())
3719 sidedata.sort()
3720 sidedata.sort()
3720 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3721 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3721 for key, value in sidedata:
3722 for key, value in sidedata:
3722 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3723 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3723 if ui.verbose:
3724 if ui.verbose:
3724 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3725 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3725
3726
3726
3727
3727 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3728 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3728 def debugssl(ui, repo, source=None, **opts):
3729 def debugssl(ui, repo, source=None, **opts):
3729 """test a secure connection to a server
3730 """test a secure connection to a server
3730
3731
3731 This builds the certificate chain for the server on Windows, installing the
3732 This builds the certificate chain for the server on Windows, installing the
3732 missing intermediates and trusted root via Windows Update if necessary. It
3733 missing intermediates and trusted root via Windows Update if necessary. It
3733 does nothing on other platforms.
3734 does nothing on other platforms.
3734
3735
3735 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3736 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3736 that server is used. See :hg:`help urls` for more information.
3737 that server is used. See :hg:`help urls` for more information.
3737
3738
3738 If the update succeeds, retry the original operation. Otherwise, the cause
3739 If the update succeeds, retry the original operation. Otherwise, the cause
3739 of the SSL error is likely another issue.
3740 of the SSL error is likely another issue.
3740 """
3741 """
3741 if not pycompat.iswindows:
3742 if not pycompat.iswindows:
3742 raise error.Abort(
3743 raise error.Abort(
3743 _(b'certificate chain building is only possible on Windows')
3744 _(b'certificate chain building is only possible on Windows')
3744 )
3745 )
3745
3746
3746 if not source:
3747 if not source:
3747 if not repo:
3748 if not repo:
3748 raise error.Abort(
3749 raise error.Abort(
3749 _(
3750 _(
3750 b"there is no Mercurial repository here, and no "
3751 b"there is no Mercurial repository here, and no "
3751 b"server specified"
3752 b"server specified"
3752 )
3753 )
3753 )
3754 )
3754 source = b"default"
3755 source = b"default"
3755
3756
3756 source, branches = urlutil.get_unique_pull_path(
3757 source, branches = urlutil.get_unique_pull_path(
3757 b'debugssl', repo, ui, source
3758 b'debugssl', repo, ui, source
3758 )
3759 )
3759 url = urlutil.url(source)
3760 url = urlutil.url(source)
3760
3761
3761 defaultport = {b'https': 443, b'ssh': 22}
3762 defaultport = {b'https': 443, b'ssh': 22}
3762 if url.scheme in defaultport:
3763 if url.scheme in defaultport:
3763 try:
3764 try:
3764 addr = (url.host, int(url.port or defaultport[url.scheme]))
3765 addr = (url.host, int(url.port or defaultport[url.scheme]))
3765 except ValueError:
3766 except ValueError:
3766 raise error.Abort(_(b"malformed port number in URL"))
3767 raise error.Abort(_(b"malformed port number in URL"))
3767 else:
3768 else:
3768 raise error.Abort(_(b"only https and ssh connections are supported"))
3769 raise error.Abort(_(b"only https and ssh connections are supported"))
3769
3770
3770 from . import win32
3771 from . import win32
3771
3772
3772 s = ssl.wrap_socket(
3773 s = ssl.wrap_socket(
3773 socket.socket(),
3774 socket.socket(),
3774 ssl_version=ssl.PROTOCOL_TLS,
3775 ssl_version=ssl.PROTOCOL_TLS,
3775 cert_reqs=ssl.CERT_NONE,
3776 cert_reqs=ssl.CERT_NONE,
3776 ca_certs=None,
3777 ca_certs=None,
3777 )
3778 )
3778
3779
3779 try:
3780 try:
3780 s.connect(addr)
3781 s.connect(addr)
3781 cert = s.getpeercert(True)
3782 cert = s.getpeercert(True)
3782
3783
3783 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3784 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3784
3785
3785 complete = win32.checkcertificatechain(cert, build=False)
3786 complete = win32.checkcertificatechain(cert, build=False)
3786
3787
3787 if not complete:
3788 if not complete:
3788 ui.status(_(b'certificate chain is incomplete, updating... '))
3789 ui.status(_(b'certificate chain is incomplete, updating... '))
3789
3790
3790 if not win32.checkcertificatechain(cert):
3791 if not win32.checkcertificatechain(cert):
3791 ui.status(_(b'failed.\n'))
3792 ui.status(_(b'failed.\n'))
3792 else:
3793 else:
3793 ui.status(_(b'done.\n'))
3794 ui.status(_(b'done.\n'))
3794 else:
3795 else:
3795 ui.status(_(b'full certificate chain is available\n'))
3796 ui.status(_(b'full certificate chain is available\n'))
3796 finally:
3797 finally:
3797 s.close()
3798 s.close()
3798
3799
3799
3800
3800 @command(
3801 @command(
3801 b"debugbackupbundle",
3802 b"debugbackupbundle",
3802 [
3803 [
3803 (
3804 (
3804 b"",
3805 b"",
3805 b"recover",
3806 b"recover",
3806 b"",
3807 b"",
3807 b"brings the specified changeset back into the repository",
3808 b"brings the specified changeset back into the repository",
3808 )
3809 )
3809 ]
3810 ]
3810 + cmdutil.logopts,
3811 + cmdutil.logopts,
3811 _(b"hg debugbackupbundle [--recover HASH]"),
3812 _(b"hg debugbackupbundle [--recover HASH]"),
3812 )
3813 )
3813 def debugbackupbundle(ui, repo, *pats, **opts):
3814 def debugbackupbundle(ui, repo, *pats, **opts):
3814 """lists the changesets available in backup bundles
3815 """lists the changesets available in backup bundles
3815
3816
3816 Without any arguments, this command prints a list of the changesets in each
3817 Without any arguments, this command prints a list of the changesets in each
3817 backup bundle.
3818 backup bundle.
3818
3819
3819 --recover takes a changeset hash and unbundles the first bundle that
3820 --recover takes a changeset hash and unbundles the first bundle that
3820 contains that hash, which puts that changeset back in your repository.
3821 contains that hash, which puts that changeset back in your repository.
3821
3822
3822 --verbose will print the entire commit message and the bundle path for that
3823 --verbose will print the entire commit message and the bundle path for that
3823 backup.
3824 backup.
3824 """
3825 """
3825 backups = list(
3826 backups = list(
3826 filter(
3827 filter(
3827 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3828 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3828 )
3829 )
3829 )
3830 )
3830 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3831 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3831
3832
3832 opts = pycompat.byteskwargs(opts)
3833 opts = pycompat.byteskwargs(opts)
3833 opts[b"bundle"] = b""
3834 opts[b"bundle"] = b""
3834 opts[b"force"] = None
3835 opts[b"force"] = None
3835 limit = logcmdutil.getlimit(opts)
3836 limit = logcmdutil.getlimit(opts)
3836
3837
3837 def display(other, chlist, displayer):
3838 def display(other, chlist, displayer):
3838 if opts.get(b"newest_first"):
3839 if opts.get(b"newest_first"):
3839 chlist.reverse()
3840 chlist.reverse()
3840 count = 0
3841 count = 0
3841 for n in chlist:
3842 for n in chlist:
3842 if limit is not None and count >= limit:
3843 if limit is not None and count >= limit:
3843 break
3844 break
3844 parents = [
3845 parents = [
3845 True for p in other.changelog.parents(n) if p != repo.nullid
3846 True for p in other.changelog.parents(n) if p != repo.nullid
3846 ]
3847 ]
3847 if opts.get(b"no_merges") and len(parents) == 2:
3848 if opts.get(b"no_merges") and len(parents) == 2:
3848 continue
3849 continue
3849 count += 1
3850 count += 1
3850 displayer.show(other[n])
3851 displayer.show(other[n])
3851
3852
3852 recovernode = opts.get(b"recover")
3853 recovernode = opts.get(b"recover")
3853 if recovernode:
3854 if recovernode:
3854 if scmutil.isrevsymbol(repo, recovernode):
3855 if scmutil.isrevsymbol(repo, recovernode):
3855 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3856 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3856 return
3857 return
3857 elif backups:
3858 elif backups:
3858 msg = _(
3859 msg = _(
3859 b"Recover changesets using: hg debugbackupbundle --recover "
3860 b"Recover changesets using: hg debugbackupbundle --recover "
3860 b"<changeset hash>\n\nAvailable backup changesets:"
3861 b"<changeset hash>\n\nAvailable backup changesets:"
3861 )
3862 )
3862 ui.status(msg, label=b"status.removed")
3863 ui.status(msg, label=b"status.removed")
3863 else:
3864 else:
3864 ui.status(_(b"no backup changesets found\n"))
3865 ui.status(_(b"no backup changesets found\n"))
3865 return
3866 return
3866
3867
3867 for backup in backups:
3868 for backup in backups:
3868 # Much of this is copied from the hg incoming logic
3869 # Much of this is copied from the hg incoming logic
3869 source = os.path.relpath(backup, encoding.getcwd())
3870 source = os.path.relpath(backup, encoding.getcwd())
3870 source, branches = urlutil.get_unique_pull_path(
3871 source, branches = urlutil.get_unique_pull_path(
3871 b'debugbackupbundle',
3872 b'debugbackupbundle',
3872 repo,
3873 repo,
3873 ui,
3874 ui,
3874 source,
3875 source,
3875 default_branches=opts.get(b'branch'),
3876 default_branches=opts.get(b'branch'),
3876 )
3877 )
3877 try:
3878 try:
3878 other = hg.peer(repo, opts, source)
3879 other = hg.peer(repo, opts, source)
3879 except error.LookupError as ex:
3880 except error.LookupError as ex:
3880 msg = _(b"\nwarning: unable to open bundle %s") % source
3881 msg = _(b"\nwarning: unable to open bundle %s") % source
3881 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3882 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3882 ui.warn(msg, hint=hint)
3883 ui.warn(msg, hint=hint)
3883 continue
3884 continue
3884 revs, checkout = hg.addbranchrevs(
3885 revs, checkout = hg.addbranchrevs(
3885 repo, other, branches, opts.get(b"rev")
3886 repo, other, branches, opts.get(b"rev")
3886 )
3887 )
3887
3888
3888 if revs:
3889 if revs:
3889 revs = [other.lookup(rev) for rev in revs]
3890 revs = [other.lookup(rev) for rev in revs]
3890
3891
3891 with ui.silent():
3892 with ui.silent():
3892 try:
3893 try:
3893 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3894 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3894 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3895 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3895 )
3896 )
3896 except error.LookupError:
3897 except error.LookupError:
3897 continue
3898 continue
3898
3899
3899 try:
3900 try:
3900 if not chlist:
3901 if not chlist:
3901 continue
3902 continue
3902 if recovernode:
3903 if recovernode:
3903 with repo.lock(), repo.transaction(b"unbundle") as tr:
3904 with repo.lock(), repo.transaction(b"unbundle") as tr:
3904 if scmutil.isrevsymbol(other, recovernode):
3905 if scmutil.isrevsymbol(other, recovernode):
3905 ui.status(_(b"Unbundling %s\n") % (recovernode))
3906 ui.status(_(b"Unbundling %s\n") % (recovernode))
3906 f = hg.openpath(ui, source)
3907 f = hg.openpath(ui, source)
3907 gen = exchange.readbundle(ui, f, source)
3908 gen = exchange.readbundle(ui, f, source)
3908 if isinstance(gen, bundle2.unbundle20):
3909 if isinstance(gen, bundle2.unbundle20):
3909 bundle2.applybundle(
3910 bundle2.applybundle(
3910 repo,
3911 repo,
3911 gen,
3912 gen,
3912 tr,
3913 tr,
3913 source=b"unbundle",
3914 source=b"unbundle",
3914 url=b"bundle:" + source,
3915 url=b"bundle:" + source,
3915 )
3916 )
3916 else:
3917 else:
3917 gen.apply(repo, b"unbundle", b"bundle:" + source)
3918 gen.apply(repo, b"unbundle", b"bundle:" + source)
3918 break
3919 break
3919 else:
3920 else:
3920 backupdate = encoding.strtolocal(
3921 backupdate = encoding.strtolocal(
3921 time.strftime(
3922 time.strftime(
3922 "%a %H:%M, %Y-%m-%d",
3923 "%a %H:%M, %Y-%m-%d",
3923 time.localtime(os.path.getmtime(source)),
3924 time.localtime(os.path.getmtime(source)),
3924 )
3925 )
3925 )
3926 )
3926 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3927 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3927 if ui.verbose:
3928 if ui.verbose:
3928 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3929 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3929 else:
3930 else:
3930 opts[
3931 opts[
3931 b"template"
3932 b"template"
3932 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3933 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3933 displayer = logcmdutil.changesetdisplayer(
3934 displayer = logcmdutil.changesetdisplayer(
3934 ui, other, opts, False
3935 ui, other, opts, False
3935 )
3936 )
3936 display(other, chlist, displayer)
3937 display(other, chlist, displayer)
3937 displayer.close()
3938 displayer.close()
3938 finally:
3939 finally:
3939 cleanupfn()
3940 cleanupfn()
3940
3941
3941
3942
3942 @command(
3943 @command(
3943 b'debugsub',
3944 b'debugsub',
3944 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3945 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3945 _(b'[-r REV] [REV]'),
3946 _(b'[-r REV] [REV]'),
3946 )
3947 )
3947 def debugsub(ui, repo, rev=None):
3948 def debugsub(ui, repo, rev=None):
3948 ctx = scmutil.revsingle(repo, rev, None)
3949 ctx = scmutil.revsingle(repo, rev, None)
3949 for k, v in sorted(ctx.substate.items()):
3950 for k, v in sorted(ctx.substate.items()):
3950 ui.writenoi18n(b'path %s\n' % k)
3951 ui.writenoi18n(b'path %s\n' % k)
3951 ui.writenoi18n(b' source %s\n' % v[0])
3952 ui.writenoi18n(b' source %s\n' % v[0])
3952 ui.writenoi18n(b' revision %s\n' % v[1])
3953 ui.writenoi18n(b' revision %s\n' % v[1])
3953
3954
3954
3955
3955 @command(b'debugshell', optionalrepo=True)
3956 @command(b'debugshell', optionalrepo=True)
3956 def debugshell(ui, repo):
3957 def debugshell(ui, repo):
3957 """run an interactive Python interpreter
3958 """run an interactive Python interpreter
3958
3959
3959 The local namespace is provided with a reference to the ui and
3960 The local namespace is provided with a reference to the ui and
3960 the repo instance (if available).
3961 the repo instance (if available).
3961 """
3962 """
3962 import code
3963 import code
3963
3964
3964 imported_objects = {
3965 imported_objects = {
3965 'ui': ui,
3966 'ui': ui,
3966 'repo': repo,
3967 'repo': repo,
3967 }
3968 }
3968
3969
3969 code.interact(local=imported_objects)
3970 code.interact(local=imported_objects)
3970
3971
3971
3972
3972 @command(
3973 @command(
3973 b'debugsuccessorssets',
3974 b'debugsuccessorssets',
3974 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3975 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3975 _(b'[REV]'),
3976 _(b'[REV]'),
3976 )
3977 )
3977 def debugsuccessorssets(ui, repo, *revs, **opts):
3978 def debugsuccessorssets(ui, repo, *revs, **opts):
3978 """show set of successors for revision
3979 """show set of successors for revision
3979
3980
3980 A successors set of changeset A is a consistent group of revisions that
3981 A successors set of changeset A is a consistent group of revisions that
3981 succeed A. It contains non-obsolete changesets only unless closests
3982 succeed A. It contains non-obsolete changesets only unless closests
3982 successors set is set.
3983 successors set is set.
3983
3984
3984 In most cases a changeset A has a single successors set containing a single
3985 In most cases a changeset A has a single successors set containing a single
3985 successor (changeset A replaced by A').
3986 successor (changeset A replaced by A').
3986
3987
3987 A changeset that is made obsolete with no successors are called "pruned".
3988 A changeset that is made obsolete with no successors are called "pruned".
3988 Such changesets have no successors sets at all.
3989 Such changesets have no successors sets at all.
3989
3990
3990 A changeset that has been "split" will have a successors set containing
3991 A changeset that has been "split" will have a successors set containing
3991 more than one successor.
3992 more than one successor.
3992
3993
3993 A changeset that has been rewritten in multiple different ways is called
3994 A changeset that has been rewritten in multiple different ways is called
3994 "divergent". Such changesets have multiple successor sets (each of which
3995 "divergent". Such changesets have multiple successor sets (each of which
3995 may also be split, i.e. have multiple successors).
3996 may also be split, i.e. have multiple successors).
3996
3997
3997 Results are displayed as follows::
3998 Results are displayed as follows::
3998
3999
3999 <rev1>
4000 <rev1>
4000 <successors-1A>
4001 <successors-1A>
4001 <rev2>
4002 <rev2>
4002 <successors-2A>
4003 <successors-2A>
4003 <successors-2B1> <successors-2B2> <successors-2B3>
4004 <successors-2B1> <successors-2B2> <successors-2B3>
4004
4005
4005 Here rev2 has two possible (i.e. divergent) successors sets. The first
4006 Here rev2 has two possible (i.e. divergent) successors sets. The first
4006 holds one element, whereas the second holds three (i.e. the changeset has
4007 holds one element, whereas the second holds three (i.e. the changeset has
4007 been split).
4008 been split).
4008 """
4009 """
4009 # passed to successorssets caching computation from one call to another
4010 # passed to successorssets caching computation from one call to another
4010 cache = {}
4011 cache = {}
4011 ctx2str = bytes
4012 ctx2str = bytes
4012 node2str = short
4013 node2str = short
4013 for rev in logcmdutil.revrange(repo, revs):
4014 for rev in logcmdutil.revrange(repo, revs):
4014 ctx = repo[rev]
4015 ctx = repo[rev]
4015 ui.write(b'%s\n' % ctx2str(ctx))
4016 ui.write(b'%s\n' % ctx2str(ctx))
4016 for succsset in obsutil.successorssets(
4017 for succsset in obsutil.successorssets(
4017 repo, ctx.node(), closest=opts['closest'], cache=cache
4018 repo, ctx.node(), closest=opts['closest'], cache=cache
4018 ):
4019 ):
4019 if succsset:
4020 if succsset:
4020 ui.write(b' ')
4021 ui.write(b' ')
4021 ui.write(node2str(succsset[0]))
4022 ui.write(node2str(succsset[0]))
4022 for node in succsset[1:]:
4023 for node in succsset[1:]:
4023 ui.write(b' ')
4024 ui.write(b' ')
4024 ui.write(node2str(node))
4025 ui.write(node2str(node))
4025 ui.write(b'\n')
4026 ui.write(b'\n')
4026
4027
4027
4028
4028 @command(b'debugtagscache', [])
4029 @command(b'debugtagscache', [])
4029 def debugtagscache(ui, repo):
4030 def debugtagscache(ui, repo):
4030 """display the contents of .hg/cache/hgtagsfnodes1"""
4031 """display the contents of .hg/cache/hgtagsfnodes1"""
4031 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4032 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4032 flog = repo.file(b'.hgtags')
4033 flog = repo.file(b'.hgtags')
4033 for r in repo:
4034 for r in repo:
4034 node = repo[r].node()
4035 node = repo[r].node()
4035 tagsnode = cache.getfnode(node, computemissing=False)
4036 tagsnode = cache.getfnode(node, computemissing=False)
4036 if tagsnode:
4037 if tagsnode:
4037 tagsnodedisplay = hex(tagsnode)
4038 tagsnodedisplay = hex(tagsnode)
4038 if not flog.hasnode(tagsnode):
4039 if not flog.hasnode(tagsnode):
4039 tagsnodedisplay += b' (unknown node)'
4040 tagsnodedisplay += b' (unknown node)'
4040 elif tagsnode is None:
4041 elif tagsnode is None:
4041 tagsnodedisplay = b'missing'
4042 tagsnodedisplay = b'missing'
4042 else:
4043 else:
4043 tagsnodedisplay = b'invalid'
4044 tagsnodedisplay = b'invalid'
4044
4045
4045 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4046 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4046
4047
4047
4048
4048 @command(
4049 @command(
4049 b'debugtemplate',
4050 b'debugtemplate',
4050 [
4051 [
4051 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4052 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4052 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4053 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4053 ],
4054 ],
4054 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4055 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4055 optionalrepo=True,
4056 optionalrepo=True,
4056 )
4057 )
4057 def debugtemplate(ui, repo, tmpl, **opts):
4058 def debugtemplate(ui, repo, tmpl, **opts):
4058 """parse and apply a template
4059 """parse and apply a template
4059
4060
4060 If -r/--rev is given, the template is processed as a log template and
4061 If -r/--rev is given, the template is processed as a log template and
4061 applied to the given changesets. Otherwise, it is processed as a generic
4062 applied to the given changesets. Otherwise, it is processed as a generic
4062 template.
4063 template.
4063
4064
4064 Use --verbose to print the parsed tree.
4065 Use --verbose to print the parsed tree.
4065 """
4066 """
4066 revs = None
4067 revs = None
4067 if opts['rev']:
4068 if opts['rev']:
4068 if repo is None:
4069 if repo is None:
4069 raise error.RepoError(
4070 raise error.RepoError(
4070 _(b'there is no Mercurial repository here (.hg not found)')
4071 _(b'there is no Mercurial repository here (.hg not found)')
4071 )
4072 )
4072 revs = logcmdutil.revrange(repo, opts['rev'])
4073 revs = logcmdutil.revrange(repo, opts['rev'])
4073
4074
4074 props = {}
4075 props = {}
4075 for d in opts['define']:
4076 for d in opts['define']:
4076 try:
4077 try:
4077 k, v = (e.strip() for e in d.split(b'=', 1))
4078 k, v = (e.strip() for e in d.split(b'=', 1))
4078 if not k or k == b'ui':
4079 if not k or k == b'ui':
4079 raise ValueError
4080 raise ValueError
4080 props[k] = v
4081 props[k] = v
4081 except ValueError:
4082 except ValueError:
4082 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4083 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4083
4084
4084 if ui.verbose:
4085 if ui.verbose:
4085 aliases = ui.configitems(b'templatealias')
4086 aliases = ui.configitems(b'templatealias')
4086 tree = templater.parse(tmpl)
4087 tree = templater.parse(tmpl)
4087 ui.note(templater.prettyformat(tree), b'\n')
4088 ui.note(templater.prettyformat(tree), b'\n')
4088 newtree = templater.expandaliases(tree, aliases)
4089 newtree = templater.expandaliases(tree, aliases)
4089 if newtree != tree:
4090 if newtree != tree:
4090 ui.notenoi18n(
4091 ui.notenoi18n(
4091 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4092 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4092 )
4093 )
4093
4094
4094 if revs is None:
4095 if revs is None:
4095 tres = formatter.templateresources(ui, repo)
4096 tres = formatter.templateresources(ui, repo)
4096 t = formatter.maketemplater(ui, tmpl, resources=tres)
4097 t = formatter.maketemplater(ui, tmpl, resources=tres)
4097 if ui.verbose:
4098 if ui.verbose:
4098 kwds, funcs = t.symbolsuseddefault()
4099 kwds, funcs = t.symbolsuseddefault()
4099 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4100 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4100 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4101 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4101 ui.write(t.renderdefault(props))
4102 ui.write(t.renderdefault(props))
4102 else:
4103 else:
4103 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4104 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4104 if ui.verbose:
4105 if ui.verbose:
4105 kwds, funcs = displayer.t.symbolsuseddefault()
4106 kwds, funcs = displayer.t.symbolsuseddefault()
4106 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4107 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4107 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4108 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4108 for r in revs:
4109 for r in revs:
4109 displayer.show(repo[r], **pycompat.strkwargs(props))
4110 displayer.show(repo[r], **pycompat.strkwargs(props))
4110 displayer.close()
4111 displayer.close()
4111
4112
4112
4113
4113 @command(
4114 @command(
4114 b'debuguigetpass',
4115 b'debuguigetpass',
4115 [
4116 [
4116 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4117 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4117 ],
4118 ],
4118 _(b'[-p TEXT]'),
4119 _(b'[-p TEXT]'),
4119 norepo=True,
4120 norepo=True,
4120 )
4121 )
4121 def debuguigetpass(ui, prompt=b''):
4122 def debuguigetpass(ui, prompt=b''):
4122 """show prompt to type password"""
4123 """show prompt to type password"""
4123 r = ui.getpass(prompt)
4124 r = ui.getpass(prompt)
4124 if r is None:
4125 if r is None:
4125 r = b"<default response>"
4126 r = b"<default response>"
4126 ui.writenoi18n(b'response: %s\n' % r)
4127 ui.writenoi18n(b'response: %s\n' % r)
4127
4128
4128
4129
4129 @command(
4130 @command(
4130 b'debuguiprompt',
4131 b'debuguiprompt',
4131 [
4132 [
4132 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4133 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4133 ],
4134 ],
4134 _(b'[-p TEXT]'),
4135 _(b'[-p TEXT]'),
4135 norepo=True,
4136 norepo=True,
4136 )
4137 )
4137 def debuguiprompt(ui, prompt=b''):
4138 def debuguiprompt(ui, prompt=b''):
4138 """show plain prompt"""
4139 """show plain prompt"""
4139 r = ui.prompt(prompt)
4140 r = ui.prompt(prompt)
4140 ui.writenoi18n(b'response: %s\n' % r)
4141 ui.writenoi18n(b'response: %s\n' % r)
4141
4142
4142
4143
4143 @command(b'debugupdatecaches', [])
4144 @command(b'debugupdatecaches', [])
4144 def debugupdatecaches(ui, repo, *pats, **opts):
4145 def debugupdatecaches(ui, repo, *pats, **opts):
4145 """warm all known caches in the repository"""
4146 """warm all known caches in the repository"""
4146 with repo.wlock(), repo.lock():
4147 with repo.wlock(), repo.lock():
4147 repo.updatecaches(caches=repository.CACHES_ALL)
4148 repo.updatecaches(caches=repository.CACHES_ALL)
4148
4149
4149
4150
4150 @command(
4151 @command(
4151 b'debugupgraderepo',
4152 b'debugupgraderepo',
4152 [
4153 [
4153 (
4154 (
4154 b'o',
4155 b'o',
4155 b'optimize',
4156 b'optimize',
4156 [],
4157 [],
4157 _(b'extra optimization to perform'),
4158 _(b'extra optimization to perform'),
4158 _(b'NAME'),
4159 _(b'NAME'),
4159 ),
4160 ),
4160 (b'', b'run', False, _(b'performs an upgrade')),
4161 (b'', b'run', False, _(b'performs an upgrade')),
4161 (b'', b'backup', True, _(b'keep the old repository content around')),
4162 (b'', b'backup', True, _(b'keep the old repository content around')),
4162 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4163 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4163 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4164 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4164 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4165 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4165 ],
4166 ],
4166 )
4167 )
4167 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4168 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4168 """upgrade a repository to use different features
4169 """upgrade a repository to use different features
4169
4170
4170 If no arguments are specified, the repository is evaluated for upgrade
4171 If no arguments are specified, the repository is evaluated for upgrade
4171 and a list of problems and potential optimizations is printed.
4172 and a list of problems and potential optimizations is printed.
4172
4173
4173 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4174 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4174 can be influenced via additional arguments. More details will be provided
4175 can be influenced via additional arguments. More details will be provided
4175 by the command output when run without ``--run``.
4176 by the command output when run without ``--run``.
4176
4177
4177 During the upgrade, the repository will be locked and no writes will be
4178 During the upgrade, the repository will be locked and no writes will be
4178 allowed.
4179 allowed.
4179
4180
4180 At the end of the upgrade, the repository may not be readable while new
4181 At the end of the upgrade, the repository may not be readable while new
4181 repository data is swapped in. This window will be as long as it takes to
4182 repository data is swapped in. This window will be as long as it takes to
4182 rename some directories inside the ``.hg`` directory. On most machines, this
4183 rename some directories inside the ``.hg`` directory. On most machines, this
4183 should complete almost instantaneously and the chances of a consumer being
4184 should complete almost instantaneously and the chances of a consumer being
4184 unable to access the repository should be low.
4185 unable to access the repository should be low.
4185
4186
4186 By default, all revlogs will be upgraded. You can restrict this using flags
4187 By default, all revlogs will be upgraded. You can restrict this using flags
4187 such as `--manifest`:
4188 such as `--manifest`:
4188
4189
4189 * `--manifest`: only optimize the manifest
4190 * `--manifest`: only optimize the manifest
4190 * `--no-manifest`: optimize all revlog but the manifest
4191 * `--no-manifest`: optimize all revlog but the manifest
4191 * `--changelog`: optimize the changelog only
4192 * `--changelog`: optimize the changelog only
4192 * `--no-changelog --no-manifest`: optimize filelogs only
4193 * `--no-changelog --no-manifest`: optimize filelogs only
4193 * `--filelogs`: optimize the filelogs only
4194 * `--filelogs`: optimize the filelogs only
4194 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4195 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4195 """
4196 """
4196 return upgrade.upgraderepo(
4197 return upgrade.upgraderepo(
4197 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4198 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4198 )
4199 )
4199
4200
4200
4201
4201 @command(
4202 @command(
4202 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4203 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4203 )
4204 )
4204 def debugwalk(ui, repo, *pats, **opts):
4205 def debugwalk(ui, repo, *pats, **opts):
4205 """show how files match on given patterns"""
4206 """show how files match on given patterns"""
4206 opts = pycompat.byteskwargs(opts)
4207 opts = pycompat.byteskwargs(opts)
4207 m = scmutil.match(repo[None], pats, opts)
4208 m = scmutil.match(repo[None], pats, opts)
4208 if ui.verbose:
4209 if ui.verbose:
4209 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4210 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4210 items = list(repo[None].walk(m))
4211 items = list(repo[None].walk(m))
4211 if not items:
4212 if not items:
4212 return
4213 return
4213 f = lambda fn: fn
4214 f = lambda fn: fn
4214 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4215 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4215 f = lambda fn: util.normpath(fn)
4216 f = lambda fn: util.normpath(fn)
4216 fmt = b'f %%-%ds %%-%ds %%s' % (
4217 fmt = b'f %%-%ds %%-%ds %%s' % (
4217 max([len(abs) for abs in items]),
4218 max([len(abs) for abs in items]),
4218 max([len(repo.pathto(abs)) for abs in items]),
4219 max([len(repo.pathto(abs)) for abs in items]),
4219 )
4220 )
4220 for abs in items:
4221 for abs in items:
4221 line = fmt % (
4222 line = fmt % (
4222 abs,
4223 abs,
4223 f(repo.pathto(abs)),
4224 f(repo.pathto(abs)),
4224 m.exact(abs) and b'exact' or b'',
4225 m.exact(abs) and b'exact' or b'',
4225 )
4226 )
4226 ui.write(b"%s\n" % line.rstrip())
4227 ui.write(b"%s\n" % line.rstrip())
4227
4228
4228
4229
4229 @command(b'debugwhyunstable', [], _(b'REV'))
4230 @command(b'debugwhyunstable', [], _(b'REV'))
4230 def debugwhyunstable(ui, repo, rev):
4231 def debugwhyunstable(ui, repo, rev):
4231 """explain instabilities of a changeset"""
4232 """explain instabilities of a changeset"""
4232 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4233 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4233 dnodes = b''
4234 dnodes = b''
4234 if entry.get(b'divergentnodes'):
4235 if entry.get(b'divergentnodes'):
4235 dnodes = (
4236 dnodes = (
4236 b' '.join(
4237 b' '.join(
4237 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4238 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4238 for ctx in entry[b'divergentnodes']
4239 for ctx in entry[b'divergentnodes']
4239 )
4240 )
4240 + b' '
4241 + b' '
4241 )
4242 )
4242 ui.write(
4243 ui.write(
4243 b'%s: %s%s %s\n'
4244 b'%s: %s%s %s\n'
4244 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4245 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4245 )
4246 )
4246
4247
4247
4248
4248 @command(
4249 @command(
4249 b'debugwireargs',
4250 b'debugwireargs',
4250 [
4251 [
4251 (b'', b'three', b'', b'three'),
4252 (b'', b'three', b'', b'three'),
4252 (b'', b'four', b'', b'four'),
4253 (b'', b'four', b'', b'four'),
4253 (b'', b'five', b'', b'five'),
4254 (b'', b'five', b'', b'five'),
4254 ]
4255 ]
4255 + cmdutil.remoteopts,
4256 + cmdutil.remoteopts,
4256 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4257 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4257 norepo=True,
4258 norepo=True,
4258 )
4259 )
4259 def debugwireargs(ui, repopath, *vals, **opts):
4260 def debugwireargs(ui, repopath, *vals, **opts):
4260 opts = pycompat.byteskwargs(opts)
4261 opts = pycompat.byteskwargs(opts)
4261 repo = hg.peer(ui, opts, repopath)
4262 repo = hg.peer(ui, opts, repopath)
4262 try:
4263 try:
4263 for opt in cmdutil.remoteopts:
4264 for opt in cmdutil.remoteopts:
4264 del opts[opt[1]]
4265 del opts[opt[1]]
4265 args = {}
4266 args = {}
4266 for k, v in pycompat.iteritems(opts):
4267 for k, v in pycompat.iteritems(opts):
4267 if v:
4268 if v:
4268 args[k] = v
4269 args[k] = v
4269 args = pycompat.strkwargs(args)
4270 args = pycompat.strkwargs(args)
4270 # run twice to check that we don't mess up the stream for the next command
4271 # run twice to check that we don't mess up the stream for the next command
4271 res1 = repo.debugwireargs(*vals, **args)
4272 res1 = repo.debugwireargs(*vals, **args)
4272 res2 = repo.debugwireargs(*vals, **args)
4273 res2 = repo.debugwireargs(*vals, **args)
4273 ui.write(b"%s\n" % res1)
4274 ui.write(b"%s\n" % res1)
4274 if res1 != res2:
4275 if res1 != res2:
4275 ui.warn(b"%s\n" % res2)
4276 ui.warn(b"%s\n" % res2)
4276 finally:
4277 finally:
4277 repo.close()
4278 repo.close()
4278
4279
4279
4280
4280 def _parsewirelangblocks(fh):
4281 def _parsewirelangblocks(fh):
4281 activeaction = None
4282 activeaction = None
4282 blocklines = []
4283 blocklines = []
4283 lastindent = 0
4284 lastindent = 0
4284
4285
4285 for line in fh:
4286 for line in fh:
4286 line = line.rstrip()
4287 line = line.rstrip()
4287 if not line:
4288 if not line:
4288 continue
4289 continue
4289
4290
4290 if line.startswith(b'#'):
4291 if line.startswith(b'#'):
4291 continue
4292 continue
4292
4293
4293 if not line.startswith(b' '):
4294 if not line.startswith(b' '):
4294 # New block. Flush previous one.
4295 # New block. Flush previous one.
4295 if activeaction:
4296 if activeaction:
4296 yield activeaction, blocklines
4297 yield activeaction, blocklines
4297
4298
4298 activeaction = line
4299 activeaction = line
4299 blocklines = []
4300 blocklines = []
4300 lastindent = 0
4301 lastindent = 0
4301 continue
4302 continue
4302
4303
4303 # Else we start with an indent.
4304 # Else we start with an indent.
4304
4305
4305 if not activeaction:
4306 if not activeaction:
4306 raise error.Abort(_(b'indented line outside of block'))
4307 raise error.Abort(_(b'indented line outside of block'))
4307
4308
4308 indent = len(line) - len(line.lstrip())
4309 indent = len(line) - len(line.lstrip())
4309
4310
4310 # If this line is indented more than the last line, concatenate it.
4311 # If this line is indented more than the last line, concatenate it.
4311 if indent > lastindent and blocklines:
4312 if indent > lastindent and blocklines:
4312 blocklines[-1] += line.lstrip()
4313 blocklines[-1] += line.lstrip()
4313 else:
4314 else:
4314 blocklines.append(line)
4315 blocklines.append(line)
4315 lastindent = indent
4316 lastindent = indent
4316
4317
4317 # Flush last block.
4318 # Flush last block.
4318 if activeaction:
4319 if activeaction:
4319 yield activeaction, blocklines
4320 yield activeaction, blocklines
4320
4321
4321
4322
4322 @command(
4323 @command(
4323 b'debugwireproto',
4324 b'debugwireproto',
4324 [
4325 [
4325 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4326 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4326 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4327 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4327 (
4328 (
4328 b'',
4329 b'',
4329 b'noreadstderr',
4330 b'noreadstderr',
4330 False,
4331 False,
4331 _(b'do not read from stderr of the remote'),
4332 _(b'do not read from stderr of the remote'),
4332 ),
4333 ),
4333 (
4334 (
4334 b'',
4335 b'',
4335 b'nologhandshake',
4336 b'nologhandshake',
4336 False,
4337 False,
4337 _(b'do not log I/O related to the peer handshake'),
4338 _(b'do not log I/O related to the peer handshake'),
4338 ),
4339 ),
4339 ]
4340 ]
4340 + cmdutil.remoteopts,
4341 + cmdutil.remoteopts,
4341 _(b'[PATH]'),
4342 _(b'[PATH]'),
4342 optionalrepo=True,
4343 optionalrepo=True,
4343 )
4344 )
4344 def debugwireproto(ui, repo, path=None, **opts):
4345 def debugwireproto(ui, repo, path=None, **opts):
4345 """send wire protocol commands to a server
4346 """send wire protocol commands to a server
4346
4347
4347 This command can be used to issue wire protocol commands to remote
4348 This command can be used to issue wire protocol commands to remote
4348 peers and to debug the raw data being exchanged.
4349 peers and to debug the raw data being exchanged.
4349
4350
4350 ``--localssh`` will start an SSH server against the current repository
4351 ``--localssh`` will start an SSH server against the current repository
4351 and connect to that. By default, the connection will perform a handshake
4352 and connect to that. By default, the connection will perform a handshake
4352 and establish an appropriate peer instance.
4353 and establish an appropriate peer instance.
4353
4354
4354 ``--peer`` can be used to bypass the handshake protocol and construct a
4355 ``--peer`` can be used to bypass the handshake protocol and construct a
4355 peer instance using the specified class type. Valid values are ``raw``,
4356 peer instance using the specified class type. Valid values are ``raw``,
4356 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4357 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4357 don't support higher-level command actions.
4358 don't support higher-level command actions.
4358
4359
4359 ``--noreadstderr`` can be used to disable automatic reading from stderr
4360 ``--noreadstderr`` can be used to disable automatic reading from stderr
4360 of the peer (for SSH connections only). Disabling automatic reading of
4361 of the peer (for SSH connections only). Disabling automatic reading of
4361 stderr is useful for making output more deterministic.
4362 stderr is useful for making output more deterministic.
4362
4363
4363 Commands are issued via a mini language which is specified via stdin.
4364 Commands are issued via a mini language which is specified via stdin.
4364 The language consists of individual actions to perform. An action is
4365 The language consists of individual actions to perform. An action is
4365 defined by a block. A block is defined as a line with no leading
4366 defined by a block. A block is defined as a line with no leading
4366 space followed by 0 or more lines with leading space. Blocks are
4367 space followed by 0 or more lines with leading space. Blocks are
4367 effectively a high-level command with additional metadata.
4368 effectively a high-level command with additional metadata.
4368
4369
4369 Lines beginning with ``#`` are ignored.
4370 Lines beginning with ``#`` are ignored.
4370
4371
4371 The following sections denote available actions.
4372 The following sections denote available actions.
4372
4373
4373 raw
4374 raw
4374 ---
4375 ---
4375
4376
4376 Send raw data to the server.
4377 Send raw data to the server.
4377
4378
4378 The block payload contains the raw data to send as one atomic send
4379 The block payload contains the raw data to send as one atomic send
4379 operation. The data may not actually be delivered in a single system
4380 operation. The data may not actually be delivered in a single system
4380 call: it depends on the abilities of the transport being used.
4381 call: it depends on the abilities of the transport being used.
4381
4382
4382 Each line in the block is de-indented and concatenated. Then, that
4383 Each line in the block is de-indented and concatenated. Then, that
4383 value is evaluated as a Python b'' literal. This allows the use of
4384 value is evaluated as a Python b'' literal. This allows the use of
4384 backslash escaping, etc.
4385 backslash escaping, etc.
4385
4386
4386 raw+
4387 raw+
4387 ----
4388 ----
4388
4389
4389 Behaves like ``raw`` except flushes output afterwards.
4390 Behaves like ``raw`` except flushes output afterwards.
4390
4391
4391 command <X>
4392 command <X>
4392 -----------
4393 -----------
4393
4394
4394 Send a request to run a named command, whose name follows the ``command``
4395 Send a request to run a named command, whose name follows the ``command``
4395 string.
4396 string.
4396
4397
4397 Arguments to the command are defined as lines in this block. The format of
4398 Arguments to the command are defined as lines in this block. The format of
4398 each line is ``<key> <value>``. e.g.::
4399 each line is ``<key> <value>``. e.g.::
4399
4400
4400 command listkeys
4401 command listkeys
4401 namespace bookmarks
4402 namespace bookmarks
4402
4403
4403 If the value begins with ``eval:``, it will be interpreted as a Python
4404 If the value begins with ``eval:``, it will be interpreted as a Python
4404 literal expression. Otherwise values are interpreted as Python b'' literals.
4405 literal expression. Otherwise values are interpreted as Python b'' literals.
4405 This allows sending complex types and encoding special byte sequences via
4406 This allows sending complex types and encoding special byte sequences via
4406 backslash escaping.
4407 backslash escaping.
4407
4408
4408 The following arguments have special meaning:
4409 The following arguments have special meaning:
4409
4410
4410 ``PUSHFILE``
4411 ``PUSHFILE``
4411 When defined, the *push* mechanism of the peer will be used instead
4412 When defined, the *push* mechanism of the peer will be used instead
4412 of the static request-response mechanism and the content of the
4413 of the static request-response mechanism and the content of the
4413 file specified in the value of this argument will be sent as the
4414 file specified in the value of this argument will be sent as the
4414 command payload.
4415 command payload.
4415
4416
4416 This can be used to submit a local bundle file to the remote.
4417 This can be used to submit a local bundle file to the remote.
4417
4418
4418 batchbegin
4419 batchbegin
4419 ----------
4420 ----------
4420
4421
4421 Instruct the peer to begin a batched send.
4422 Instruct the peer to begin a batched send.
4422
4423
4423 All ``command`` blocks are queued for execution until the next
4424 All ``command`` blocks are queued for execution until the next
4424 ``batchsubmit`` block.
4425 ``batchsubmit`` block.
4425
4426
4426 batchsubmit
4427 batchsubmit
4427 -----------
4428 -----------
4428
4429
4429 Submit previously queued ``command`` blocks as a batch request.
4430 Submit previously queued ``command`` blocks as a batch request.
4430
4431
4431 This action MUST be paired with a ``batchbegin`` action.
4432 This action MUST be paired with a ``batchbegin`` action.
4432
4433
4433 httprequest <method> <path>
4434 httprequest <method> <path>
4434 ---------------------------
4435 ---------------------------
4435
4436
4436 (HTTP peer only)
4437 (HTTP peer only)
4437
4438
4438 Send an HTTP request to the peer.
4439 Send an HTTP request to the peer.
4439
4440
4440 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4441 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4441
4442
4442 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4443 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4443 headers to add to the request. e.g. ``Accept: foo``.
4444 headers to add to the request. e.g. ``Accept: foo``.
4444
4445
4445 The following arguments are special:
4446 The following arguments are special:
4446
4447
4447 ``BODYFILE``
4448 ``BODYFILE``
4448 The content of the file defined as the value to this argument will be
4449 The content of the file defined as the value to this argument will be
4449 transferred verbatim as the HTTP request body.
4450 transferred verbatim as the HTTP request body.
4450
4451
4451 ``frame <type> <flags> <payload>``
4452 ``frame <type> <flags> <payload>``
4452 Send a unified protocol frame as part of the request body.
4453 Send a unified protocol frame as part of the request body.
4453
4454
4454 All frames will be collected and sent as the body to the HTTP
4455 All frames will be collected and sent as the body to the HTTP
4455 request.
4456 request.
4456
4457
4457 close
4458 close
4458 -----
4459 -----
4459
4460
4460 Close the connection to the server.
4461 Close the connection to the server.
4461
4462
4462 flush
4463 flush
4463 -----
4464 -----
4464
4465
4465 Flush data written to the server.
4466 Flush data written to the server.
4466
4467
4467 readavailable
4468 readavailable
4468 -------------
4469 -------------
4469
4470
4470 Close the write end of the connection and read all available data from
4471 Close the write end of the connection and read all available data from
4471 the server.
4472 the server.
4472
4473
4473 If the connection to the server encompasses multiple pipes, we poll both
4474 If the connection to the server encompasses multiple pipes, we poll both
4474 pipes and read available data.
4475 pipes and read available data.
4475
4476
4476 readline
4477 readline
4477 --------
4478 --------
4478
4479
4479 Read a line of output from the server. If there are multiple output
4480 Read a line of output from the server. If there are multiple output
4480 pipes, reads only the main pipe.
4481 pipes, reads only the main pipe.
4481
4482
4482 ereadline
4483 ereadline
4483 ---------
4484 ---------
4484
4485
4485 Like ``readline``, but read from the stderr pipe, if available.
4486 Like ``readline``, but read from the stderr pipe, if available.
4486
4487
4487 read <X>
4488 read <X>
4488 --------
4489 --------
4489
4490
4490 ``read()`` N bytes from the server's main output pipe.
4491 ``read()`` N bytes from the server's main output pipe.
4491
4492
4492 eread <X>
4493 eread <X>
4493 ---------
4494 ---------
4494
4495
4495 ``read()`` N bytes from the server's stderr pipe, if available.
4496 ``read()`` N bytes from the server's stderr pipe, if available.
4496
4497
4497 Specifying Unified Frame-Based Protocol Frames
4498 Specifying Unified Frame-Based Protocol Frames
4498 ----------------------------------------------
4499 ----------------------------------------------
4499
4500
4500 It is possible to emit a *Unified Frame-Based Protocol* by using special
4501 It is possible to emit a *Unified Frame-Based Protocol* by using special
4501 syntax.
4502 syntax.
4502
4503
4503 A frame is composed as a type, flags, and payload. These can be parsed
4504 A frame is composed as a type, flags, and payload. These can be parsed
4504 from a string of the form:
4505 from a string of the form:
4505
4506
4506 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4507 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4507
4508
4508 ``request-id`` and ``stream-id`` are integers defining the request and
4509 ``request-id`` and ``stream-id`` are integers defining the request and
4509 stream identifiers.
4510 stream identifiers.
4510
4511
4511 ``type`` can be an integer value for the frame type or the string name
4512 ``type`` can be an integer value for the frame type or the string name
4512 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4513 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4513 ``command-name``.
4514 ``command-name``.
4514
4515
4515 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4516 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4516 components. Each component (and there can be just one) can be an integer
4517 components. Each component (and there can be just one) can be an integer
4517 or a flag name for stream flags or frame flags, respectively. Values are
4518 or a flag name for stream flags or frame flags, respectively. Values are
4518 resolved to integers and then bitwise OR'd together.
4519 resolved to integers and then bitwise OR'd together.
4519
4520
4520 ``payload`` represents the raw frame payload. If it begins with
4521 ``payload`` represents the raw frame payload. If it begins with
4521 ``cbor:``, the following string is evaluated as Python code and the
4522 ``cbor:``, the following string is evaluated as Python code and the
4522 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4523 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4523 as a Python byte string literal.
4524 as a Python byte string literal.
4524 """
4525 """
4525 opts = pycompat.byteskwargs(opts)
4526 opts = pycompat.byteskwargs(opts)
4526
4527
4527 if opts[b'localssh'] and not repo:
4528 if opts[b'localssh'] and not repo:
4528 raise error.Abort(_(b'--localssh requires a repository'))
4529 raise error.Abort(_(b'--localssh requires a repository'))
4529
4530
4530 if opts[b'peer'] and opts[b'peer'] not in (
4531 if opts[b'peer'] and opts[b'peer'] not in (
4531 b'raw',
4532 b'raw',
4532 b'ssh1',
4533 b'ssh1',
4533 ):
4534 ):
4534 raise error.Abort(
4535 raise error.Abort(
4535 _(b'invalid value for --peer'),
4536 _(b'invalid value for --peer'),
4536 hint=_(b'valid values are "raw" and "ssh1"'),
4537 hint=_(b'valid values are "raw" and "ssh1"'),
4537 )
4538 )
4538
4539
4539 if path and opts[b'localssh']:
4540 if path and opts[b'localssh']:
4540 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4541 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4541
4542
4542 if ui.interactive():
4543 if ui.interactive():
4543 ui.write(_(b'(waiting for commands on stdin)\n'))
4544 ui.write(_(b'(waiting for commands on stdin)\n'))
4544
4545
4545 blocks = list(_parsewirelangblocks(ui.fin))
4546 blocks = list(_parsewirelangblocks(ui.fin))
4546
4547
4547 proc = None
4548 proc = None
4548 stdin = None
4549 stdin = None
4549 stdout = None
4550 stdout = None
4550 stderr = None
4551 stderr = None
4551 opener = None
4552 opener = None
4552
4553
4553 if opts[b'localssh']:
4554 if opts[b'localssh']:
4554 # We start the SSH server in its own process so there is process
4555 # We start the SSH server in its own process so there is process
4555 # separation. This prevents a whole class of potential bugs around
4556 # separation. This prevents a whole class of potential bugs around
4556 # shared state from interfering with server operation.
4557 # shared state from interfering with server operation.
4557 args = procutil.hgcmd() + [
4558 args = procutil.hgcmd() + [
4558 b'-R',
4559 b'-R',
4559 repo.root,
4560 repo.root,
4560 b'debugserve',
4561 b'debugserve',
4561 b'--sshstdio',
4562 b'--sshstdio',
4562 ]
4563 ]
4563 proc = subprocess.Popen(
4564 proc = subprocess.Popen(
4564 pycompat.rapply(procutil.tonativestr, args),
4565 pycompat.rapply(procutil.tonativestr, args),
4565 stdin=subprocess.PIPE,
4566 stdin=subprocess.PIPE,
4566 stdout=subprocess.PIPE,
4567 stdout=subprocess.PIPE,
4567 stderr=subprocess.PIPE,
4568 stderr=subprocess.PIPE,
4568 bufsize=0,
4569 bufsize=0,
4569 )
4570 )
4570
4571
4571 stdin = proc.stdin
4572 stdin = proc.stdin
4572 stdout = proc.stdout
4573 stdout = proc.stdout
4573 stderr = proc.stderr
4574 stderr = proc.stderr
4574
4575
4575 # We turn the pipes into observers so we can log I/O.
4576 # We turn the pipes into observers so we can log I/O.
4576 if ui.verbose or opts[b'peer'] == b'raw':
4577 if ui.verbose or opts[b'peer'] == b'raw':
4577 stdin = util.makeloggingfileobject(
4578 stdin = util.makeloggingfileobject(
4578 ui, proc.stdin, b'i', logdata=True
4579 ui, proc.stdin, b'i', logdata=True
4579 )
4580 )
4580 stdout = util.makeloggingfileobject(
4581 stdout = util.makeloggingfileobject(
4581 ui, proc.stdout, b'o', logdata=True
4582 ui, proc.stdout, b'o', logdata=True
4582 )
4583 )
4583 stderr = util.makeloggingfileobject(
4584 stderr = util.makeloggingfileobject(
4584 ui, proc.stderr, b'e', logdata=True
4585 ui, proc.stderr, b'e', logdata=True
4585 )
4586 )
4586
4587
4587 # --localssh also implies the peer connection settings.
4588 # --localssh also implies the peer connection settings.
4588
4589
4589 url = b'ssh://localserver'
4590 url = b'ssh://localserver'
4590 autoreadstderr = not opts[b'noreadstderr']
4591 autoreadstderr = not opts[b'noreadstderr']
4591
4592
4592 if opts[b'peer'] == b'ssh1':
4593 if opts[b'peer'] == b'ssh1':
4593 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4594 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4594 peer = sshpeer.sshv1peer(
4595 peer = sshpeer.sshv1peer(
4595 ui,
4596 ui,
4596 url,
4597 url,
4597 proc,
4598 proc,
4598 stdin,
4599 stdin,
4599 stdout,
4600 stdout,
4600 stderr,
4601 stderr,
4601 None,
4602 None,
4602 autoreadstderr=autoreadstderr,
4603 autoreadstderr=autoreadstderr,
4603 )
4604 )
4604 elif opts[b'peer'] == b'raw':
4605 elif opts[b'peer'] == b'raw':
4605 ui.write(_(b'using raw connection to peer\n'))
4606 ui.write(_(b'using raw connection to peer\n'))
4606 peer = None
4607 peer = None
4607 else:
4608 else:
4608 ui.write(_(b'creating ssh peer from handshake results\n'))
4609 ui.write(_(b'creating ssh peer from handshake results\n'))
4609 peer = sshpeer.makepeer(
4610 peer = sshpeer.makepeer(
4610 ui,
4611 ui,
4611 url,
4612 url,
4612 proc,
4613 proc,
4613 stdin,
4614 stdin,
4614 stdout,
4615 stdout,
4615 stderr,
4616 stderr,
4616 autoreadstderr=autoreadstderr,
4617 autoreadstderr=autoreadstderr,
4617 )
4618 )
4618
4619
4619 elif path:
4620 elif path:
4620 # We bypass hg.peer() so we can proxy the sockets.
4621 # We bypass hg.peer() so we can proxy the sockets.
4621 # TODO consider not doing this because we skip
4622 # TODO consider not doing this because we skip
4622 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4623 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4623 u = urlutil.url(path)
4624 u = urlutil.url(path)
4624 if u.scheme != b'http':
4625 if u.scheme != b'http':
4625 raise error.Abort(_(b'only http:// paths are currently supported'))
4626 raise error.Abort(_(b'only http:// paths are currently supported'))
4626
4627
4627 url, authinfo = u.authinfo()
4628 url, authinfo = u.authinfo()
4628 openerargs = {
4629 openerargs = {
4629 'useragent': b'Mercurial debugwireproto',
4630 'useragent': b'Mercurial debugwireproto',
4630 }
4631 }
4631
4632
4632 # Turn pipes/sockets into observers so we can log I/O.
4633 # Turn pipes/sockets into observers so we can log I/O.
4633 if ui.verbose:
4634 if ui.verbose:
4634 openerargs.update(
4635 openerargs.update(
4635 {
4636 {
4636 'loggingfh': ui,
4637 'loggingfh': ui,
4637 'loggingname': b's',
4638 'loggingname': b's',
4638 'loggingopts': {
4639 'loggingopts': {
4639 'logdata': True,
4640 'logdata': True,
4640 'logdataapis': False,
4641 'logdataapis': False,
4641 },
4642 },
4642 }
4643 }
4643 )
4644 )
4644
4645
4645 if ui.debugflag:
4646 if ui.debugflag:
4646 openerargs['loggingopts']['logdataapis'] = True
4647 openerargs['loggingopts']['logdataapis'] = True
4647
4648
4648 # Don't send default headers when in raw mode. This allows us to
4649 # Don't send default headers when in raw mode. This allows us to
4649 # bypass most of the behavior of our URL handling code so we can
4650 # bypass most of the behavior of our URL handling code so we can
4650 # have near complete control over what's sent on the wire.
4651 # have near complete control over what's sent on the wire.
4651 if opts[b'peer'] == b'raw':
4652 if opts[b'peer'] == b'raw':
4652 openerargs['sendaccept'] = False
4653 openerargs['sendaccept'] = False
4653
4654
4654 opener = urlmod.opener(ui, authinfo, **openerargs)
4655 opener = urlmod.opener(ui, authinfo, **openerargs)
4655
4656
4656 if opts[b'peer'] == b'raw':
4657 if opts[b'peer'] == b'raw':
4657 ui.write(_(b'using raw connection to peer\n'))
4658 ui.write(_(b'using raw connection to peer\n'))
4658 peer = None
4659 peer = None
4659 elif opts[b'peer']:
4660 elif opts[b'peer']:
4660 raise error.Abort(
4661 raise error.Abort(
4661 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4662 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4662 )
4663 )
4663 else:
4664 else:
4664 peer = httppeer.makepeer(ui, path, opener=opener)
4665 peer = httppeer.makepeer(ui, path, opener=opener)
4665
4666
4666 # We /could/ populate stdin/stdout with sock.makefile()...
4667 # We /could/ populate stdin/stdout with sock.makefile()...
4667 else:
4668 else:
4668 raise error.Abort(_(b'unsupported connection configuration'))
4669 raise error.Abort(_(b'unsupported connection configuration'))
4669
4670
4670 batchedcommands = None
4671 batchedcommands = None
4671
4672
4672 # Now perform actions based on the parsed wire language instructions.
4673 # Now perform actions based on the parsed wire language instructions.
4673 for action, lines in blocks:
4674 for action, lines in blocks:
4674 if action in (b'raw', b'raw+'):
4675 if action in (b'raw', b'raw+'):
4675 if not stdin:
4676 if not stdin:
4676 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4677 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4677
4678
4678 # Concatenate the data together.
4679 # Concatenate the data together.
4679 data = b''.join(l.lstrip() for l in lines)
4680 data = b''.join(l.lstrip() for l in lines)
4680 data = stringutil.unescapestr(data)
4681 data = stringutil.unescapestr(data)
4681 stdin.write(data)
4682 stdin.write(data)
4682
4683
4683 if action == b'raw+':
4684 if action == b'raw+':
4684 stdin.flush()
4685 stdin.flush()
4685 elif action == b'flush':
4686 elif action == b'flush':
4686 if not stdin:
4687 if not stdin:
4687 raise error.Abort(_(b'cannot call flush on this peer'))
4688 raise error.Abort(_(b'cannot call flush on this peer'))
4688 stdin.flush()
4689 stdin.flush()
4689 elif action.startswith(b'command'):
4690 elif action.startswith(b'command'):
4690 if not peer:
4691 if not peer:
4691 raise error.Abort(
4692 raise error.Abort(
4692 _(
4693 _(
4693 b'cannot send commands unless peer instance '
4694 b'cannot send commands unless peer instance '
4694 b'is available'
4695 b'is available'
4695 )
4696 )
4696 )
4697 )
4697
4698
4698 command = action.split(b' ', 1)[1]
4699 command = action.split(b' ', 1)[1]
4699
4700
4700 args = {}
4701 args = {}
4701 for line in lines:
4702 for line in lines:
4702 # We need to allow empty values.
4703 # We need to allow empty values.
4703 fields = line.lstrip().split(b' ', 1)
4704 fields = line.lstrip().split(b' ', 1)
4704 if len(fields) == 1:
4705 if len(fields) == 1:
4705 key = fields[0]
4706 key = fields[0]
4706 value = b''
4707 value = b''
4707 else:
4708 else:
4708 key, value = fields
4709 key, value = fields
4709
4710
4710 if value.startswith(b'eval:'):
4711 if value.startswith(b'eval:'):
4711 value = stringutil.evalpythonliteral(value[5:])
4712 value = stringutil.evalpythonliteral(value[5:])
4712 else:
4713 else:
4713 value = stringutil.unescapestr(value)
4714 value = stringutil.unescapestr(value)
4714
4715
4715 args[key] = value
4716 args[key] = value
4716
4717
4717 if batchedcommands is not None:
4718 if batchedcommands is not None:
4718 batchedcommands.append((command, args))
4719 batchedcommands.append((command, args))
4719 continue
4720 continue
4720
4721
4721 ui.status(_(b'sending %s command\n') % command)
4722 ui.status(_(b'sending %s command\n') % command)
4722
4723
4723 if b'PUSHFILE' in args:
4724 if b'PUSHFILE' in args:
4724 with open(args[b'PUSHFILE'], 'rb') as fh:
4725 with open(args[b'PUSHFILE'], 'rb') as fh:
4725 del args[b'PUSHFILE']
4726 del args[b'PUSHFILE']
4726 res, output = peer._callpush(
4727 res, output = peer._callpush(
4727 command, fh, **pycompat.strkwargs(args)
4728 command, fh, **pycompat.strkwargs(args)
4728 )
4729 )
4729 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4730 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4730 ui.status(
4731 ui.status(
4731 _(b'remote output: %s\n') % stringutil.escapestr(output)
4732 _(b'remote output: %s\n') % stringutil.escapestr(output)
4732 )
4733 )
4733 else:
4734 else:
4734 with peer.commandexecutor() as e:
4735 with peer.commandexecutor() as e:
4735 res = e.callcommand(command, args).result()
4736 res = e.callcommand(command, args).result()
4736
4737
4737 ui.status(
4738 ui.status(
4738 _(b'response: %s\n')
4739 _(b'response: %s\n')
4739 % stringutil.pprint(res, bprefix=True, indent=2)
4740 % stringutil.pprint(res, bprefix=True, indent=2)
4740 )
4741 )
4741
4742
4742 elif action == b'batchbegin':
4743 elif action == b'batchbegin':
4743 if batchedcommands is not None:
4744 if batchedcommands is not None:
4744 raise error.Abort(_(b'nested batchbegin not allowed'))
4745 raise error.Abort(_(b'nested batchbegin not allowed'))
4745
4746
4746 batchedcommands = []
4747 batchedcommands = []
4747 elif action == b'batchsubmit':
4748 elif action == b'batchsubmit':
4748 # There is a batching API we could go through. But it would be
4749 # There is a batching API we could go through. But it would be
4749 # difficult to normalize requests into function calls. It is easier
4750 # difficult to normalize requests into function calls. It is easier
4750 # to bypass this layer and normalize to commands + args.
4751 # to bypass this layer and normalize to commands + args.
4751 ui.status(
4752 ui.status(
4752 _(b'sending batch with %d sub-commands\n')
4753 _(b'sending batch with %d sub-commands\n')
4753 % len(batchedcommands)
4754 % len(batchedcommands)
4754 )
4755 )
4755 assert peer is not None
4756 assert peer is not None
4756 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4757 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4757 ui.status(
4758 ui.status(
4758 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4759 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4759 )
4760 )
4760
4761
4761 batchedcommands = None
4762 batchedcommands = None
4762
4763
4763 elif action.startswith(b'httprequest '):
4764 elif action.startswith(b'httprequest '):
4764 if not opener:
4765 if not opener:
4765 raise error.Abort(
4766 raise error.Abort(
4766 _(b'cannot use httprequest without an HTTP peer')
4767 _(b'cannot use httprequest without an HTTP peer')
4767 )
4768 )
4768
4769
4769 request = action.split(b' ', 2)
4770 request = action.split(b' ', 2)
4770 if len(request) != 3:
4771 if len(request) != 3:
4771 raise error.Abort(
4772 raise error.Abort(
4772 _(
4773 _(
4773 b'invalid httprequest: expected format is '
4774 b'invalid httprequest: expected format is '
4774 b'"httprequest <method> <path>'
4775 b'"httprequest <method> <path>'
4775 )
4776 )
4776 )
4777 )
4777
4778
4778 method, httppath = request[1:]
4779 method, httppath = request[1:]
4779 headers = {}
4780 headers = {}
4780 body = None
4781 body = None
4781 frames = []
4782 frames = []
4782 for line in lines:
4783 for line in lines:
4783 line = line.lstrip()
4784 line = line.lstrip()
4784 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4785 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4785 if m:
4786 if m:
4786 # Headers need to use native strings.
4787 # Headers need to use native strings.
4787 key = pycompat.strurl(m.group(1))
4788 key = pycompat.strurl(m.group(1))
4788 value = pycompat.strurl(m.group(2))
4789 value = pycompat.strurl(m.group(2))
4789 headers[key] = value
4790 headers[key] = value
4790 continue
4791 continue
4791
4792
4792 if line.startswith(b'BODYFILE '):
4793 if line.startswith(b'BODYFILE '):
4793 with open(line.split(b' ', 1), b'rb') as fh:
4794 with open(line.split(b' ', 1), b'rb') as fh:
4794 body = fh.read()
4795 body = fh.read()
4795 elif line.startswith(b'frame '):
4796 elif line.startswith(b'frame '):
4796 frame = wireprotoframing.makeframefromhumanstring(
4797 frame = wireprotoframing.makeframefromhumanstring(
4797 line[len(b'frame ') :]
4798 line[len(b'frame ') :]
4798 )
4799 )
4799
4800
4800 frames.append(frame)
4801 frames.append(frame)
4801 else:
4802 else:
4802 raise error.Abort(
4803 raise error.Abort(
4803 _(b'unknown argument to httprequest: %s') % line
4804 _(b'unknown argument to httprequest: %s') % line
4804 )
4805 )
4805
4806
4806 url = path + httppath
4807 url = path + httppath
4807
4808
4808 if frames:
4809 if frames:
4809 body = b''.join(bytes(f) for f in frames)
4810 body = b''.join(bytes(f) for f in frames)
4810
4811
4811 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4812 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4812
4813
4813 # urllib.Request insists on using has_data() as a proxy for
4814 # urllib.Request insists on using has_data() as a proxy for
4814 # determining the request method. Override that to use our
4815 # determining the request method. Override that to use our
4815 # explicitly requested method.
4816 # explicitly requested method.
4816 req.get_method = lambda: pycompat.sysstr(method)
4817 req.get_method = lambda: pycompat.sysstr(method)
4817
4818
4818 try:
4819 try:
4819 res = opener.open(req)
4820 res = opener.open(req)
4820 body = res.read()
4821 body = res.read()
4821 except util.urlerr.urlerror as e:
4822 except util.urlerr.urlerror as e:
4822 # read() method must be called, but only exists in Python 2
4823 # read() method must be called, but only exists in Python 2
4823 getattr(e, 'read', lambda: None)()
4824 getattr(e, 'read', lambda: None)()
4824 continue
4825 continue
4825
4826
4826 ct = res.headers.get('Content-Type')
4827 ct = res.headers.get('Content-Type')
4827 if ct == 'application/mercurial-cbor':
4828 if ct == 'application/mercurial-cbor':
4828 ui.write(
4829 ui.write(
4829 _(b'cbor> %s\n')
4830 _(b'cbor> %s\n')
4830 % stringutil.pprint(
4831 % stringutil.pprint(
4831 cborutil.decodeall(body), bprefix=True, indent=2
4832 cborutil.decodeall(body), bprefix=True, indent=2
4832 )
4833 )
4833 )
4834 )
4834
4835
4835 elif action == b'close':
4836 elif action == b'close':
4836 assert peer is not None
4837 assert peer is not None
4837 peer.close()
4838 peer.close()
4838 elif action == b'readavailable':
4839 elif action == b'readavailable':
4839 if not stdout or not stderr:
4840 if not stdout or not stderr:
4840 raise error.Abort(
4841 raise error.Abort(
4841 _(b'readavailable not available on this peer')
4842 _(b'readavailable not available on this peer')
4842 )
4843 )
4843
4844
4844 stdin.close()
4845 stdin.close()
4845 stdout.read()
4846 stdout.read()
4846 stderr.read()
4847 stderr.read()
4847
4848
4848 elif action == b'readline':
4849 elif action == b'readline':
4849 if not stdout:
4850 if not stdout:
4850 raise error.Abort(_(b'readline not available on this peer'))
4851 raise error.Abort(_(b'readline not available on this peer'))
4851 stdout.readline()
4852 stdout.readline()
4852 elif action == b'ereadline':
4853 elif action == b'ereadline':
4853 if not stderr:
4854 if not stderr:
4854 raise error.Abort(_(b'ereadline not available on this peer'))
4855 raise error.Abort(_(b'ereadline not available on this peer'))
4855 stderr.readline()
4856 stderr.readline()
4856 elif action.startswith(b'read '):
4857 elif action.startswith(b'read '):
4857 count = int(action.split(b' ', 1)[1])
4858 count = int(action.split(b' ', 1)[1])
4858 if not stdout:
4859 if not stdout:
4859 raise error.Abort(_(b'read not available on this peer'))
4860 raise error.Abort(_(b'read not available on this peer'))
4860 stdout.read(count)
4861 stdout.read(count)
4861 elif action.startswith(b'eread '):
4862 elif action.startswith(b'eread '):
4862 count = int(action.split(b' ', 1)[1])
4863 count = int(action.split(b' ', 1)[1])
4863 if not stderr:
4864 if not stderr:
4864 raise error.Abort(_(b'eread not available on this peer'))
4865 raise error.Abort(_(b'eread not available on this peer'))
4865 stderr.read(count)
4866 stderr.read(count)
4866 else:
4867 else:
4867 raise error.Abort(_(b'unknown action: %s') % action)
4868 raise error.Abort(_(b'unknown action: %s') % action)
4868
4869
4869 if batchedcommands is not None:
4870 if batchedcommands is not None:
4870 raise error.Abort(_(b'unclosed "batchbegin" request'))
4871 raise error.Abort(_(b'unclosed "batchbegin" request'))
4871
4872
4872 if peer:
4873 if peer:
4873 peer.close()
4874 peer.close()
4874
4875
4875 if proc:
4876 if proc:
4876 proc.kill()
4877 proc.kill()
@@ -1,541 +1,531 b''
1 # Copyright (C) 2004, 2005 Canonical Ltd
1 # Copyright (C) 2004, 2005 Canonical Ltd
2 #
2 #
3 # This program is free software; you can redistribute it and/or modify
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
6 # (at your option) any later version.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU General Public License
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, see <http://www.gnu.org/licenses/>.
14 # along with this program; if not, see <http://www.gnu.org/licenses/>.
15
15
16 # mbp: "you know that thing where cvs gives you conflict markers?"
16 # mbp: "you know that thing where cvs gives you conflict markers?"
17 # s: "i hate that."
17 # s: "i hate that."
18
18
19 from __future__ import absolute_import
19 from __future__ import absolute_import
20
20
21 from .i18n import _
21 from .i18n import _
22 from . import (
22 from . import (
23 error,
23 error,
24 mdiff,
24 mdiff,
25 pycompat,
25 pycompat,
26 )
26 )
27 from .utils import stringutil
27 from .utils import stringutil
28
28
29
29
30 class CantReprocessAndShowBase(Exception):
30 class CantReprocessAndShowBase(Exception):
31 pass
31 pass
32
32
33
33
34 def intersect(ra, rb):
34 def intersect(ra, rb):
35 """Given two ranges return the range where they intersect or None.
35 """Given two ranges return the range where they intersect or None.
36
36
37 >>> intersect((0, 10), (0, 6))
37 >>> intersect((0, 10), (0, 6))
38 (0, 6)
38 (0, 6)
39 >>> intersect((0, 10), (5, 15))
39 >>> intersect((0, 10), (5, 15))
40 (5, 10)
40 (5, 10)
41 >>> intersect((0, 10), (10, 15))
41 >>> intersect((0, 10), (10, 15))
42 >>> intersect((0, 9), (10, 15))
42 >>> intersect((0, 9), (10, 15))
43 >>> intersect((0, 9), (7, 15))
43 >>> intersect((0, 9), (7, 15))
44 (7, 9)
44 (7, 9)
45 """
45 """
46 assert ra[0] <= ra[1]
46 assert ra[0] <= ra[1]
47 assert rb[0] <= rb[1]
47 assert rb[0] <= rb[1]
48
48
49 sa = max(ra[0], rb[0])
49 sa = max(ra[0], rb[0])
50 sb = min(ra[1], rb[1])
50 sb = min(ra[1], rb[1])
51 if sa < sb:
51 if sa < sb:
52 return sa, sb
52 return sa, sb
53 else:
53 else:
54 return None
54 return None
55
55
56
56
57 def compare_range(a, astart, aend, b, bstart, bend):
57 def compare_range(a, astart, aend, b, bstart, bend):
58 """Compare a[astart:aend] == b[bstart:bend], without slicing."""
58 """Compare a[astart:aend] == b[bstart:bend], without slicing."""
59 if (aend - astart) != (bend - bstart):
59 if (aend - astart) != (bend - bstart):
60 return False
60 return False
61 for ia, ib in zip(
61 for ia, ib in zip(
62 pycompat.xrange(astart, aend), pycompat.xrange(bstart, bend)
62 pycompat.xrange(astart, aend), pycompat.xrange(bstart, bend)
63 ):
63 ):
64 if a[ia] != b[ib]:
64 if a[ia] != b[ib]:
65 return False
65 return False
66 else:
66 else:
67 return True
67 return True
68
68
69
69
70 class Merge3Text(object):
70 class Merge3Text(object):
71 """3-way merge of texts.
71 """3-way merge of texts.
72
72
73 Given strings BASE, OTHER, THIS, tries to produce a combined text
73 Given strings BASE, OTHER, THIS, tries to produce a combined text
74 incorporating the changes from both BASE->OTHER and BASE->THIS."""
74 incorporating the changes from both BASE->OTHER and BASE->THIS."""
75
75
76 def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
76 def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
77 self.basetext = basetext
77 self.basetext = basetext
78 self.atext = atext
78 self.atext = atext
79 self.btext = btext
79 self.btext = btext
80 if base is None:
80 if base is None:
81 base = mdiff.splitnewlines(basetext)
81 base = mdiff.splitnewlines(basetext)
82 if a is None:
82 if a is None:
83 a = mdiff.splitnewlines(atext)
83 a = mdiff.splitnewlines(atext)
84 if b is None:
84 if b is None:
85 b = mdiff.splitnewlines(btext)
85 b = mdiff.splitnewlines(btext)
86 self.base = base
86 self.base = base
87 self.a = a
87 self.a = a
88 self.b = b
88 self.b = b
89
89
90 def merge_groups(self):
90 def merge_groups(self):
91 """Yield sequence of line groups. Each one is a tuple:
91 """Yield sequence of line groups. Each one is a tuple:
92
92
93 'unchanged', lines
93 'unchanged', lines
94 Lines unchanged from base
94 Lines unchanged from base
95
95
96 'a', lines
96 'a', lines
97 Lines taken from a
97 Lines taken from a
98
98
99 'same', lines
99 'same', lines
100 Lines taken from a (and equal to b)
100 Lines taken from a (and equal to b)
101
101
102 'b', lines
102 'b', lines
103 Lines taken from b
103 Lines taken from b
104
104
105 'conflict', (base_lines, a_lines, b_lines)
105 'conflict', (base_lines, a_lines, b_lines)
106 Lines from base were changed to either a or b and conflict.
106 Lines from base were changed to either a or b and conflict.
107 """
107 """
108 for t in self.merge_regions():
108 for t in self.merge_regions():
109 what = t[0]
109 what = t[0]
110 if what == b'unchanged':
110 if what == b'unchanged':
111 yield what, self.base[t[1] : t[2]]
111 yield what, self.base[t[1] : t[2]]
112 elif what == b'a' or what == b'same':
112 elif what == b'a' or what == b'same':
113 yield what, self.a[t[1] : t[2]]
113 yield what, self.a[t[1] : t[2]]
114 elif what == b'b':
114 elif what == b'b':
115 yield what, self.b[t[1] : t[2]]
115 yield what, self.b[t[1] : t[2]]
116 elif what == b'conflict':
116 elif what == b'conflict':
117 yield (
117 yield (
118 what,
118 what,
119 (
119 (
120 self.base[t[1] : t[2]],
120 self.base[t[1] : t[2]],
121 self.a[t[3] : t[4]],
121 self.a[t[3] : t[4]],
122 self.b[t[5] : t[6]],
122 self.b[t[5] : t[6]],
123 ),
123 ),
124 )
124 )
125 else:
125 else:
126 raise ValueError(what)
126 raise ValueError(what)
127
127
128 def merge_regions(self):
128 def merge_regions(self):
129 """Return sequences of matching and conflicting regions.
129 """Return sequences of matching and conflicting regions.
130
130
131 This returns tuples, where the first value says what kind we
131 This returns tuples, where the first value says what kind we
132 have:
132 have:
133
133
134 'unchanged', start, end
134 'unchanged', start, end
135 Take a region of base[start:end]
135 Take a region of base[start:end]
136
136
137 'same', astart, aend
137 'same', astart, aend
138 b and a are different from base but give the same result
138 b and a are different from base but give the same result
139
139
140 'a', start, end
140 'a', start, end
141 Non-clashing insertion from a[start:end]
141 Non-clashing insertion from a[start:end]
142
142
143 'conflict', zstart, zend, astart, aend, bstart, bend
143 'conflict', zstart, zend, astart, aend, bstart, bend
144 Conflict between a and b, with z as common ancestor
144 Conflict between a and b, with z as common ancestor
145
145
146 Method is as follows:
146 Method is as follows:
147
147
148 The two sequences align only on regions which match the base
148 The two sequences align only on regions which match the base
149 and both descendants. These are found by doing a two-way diff
149 and both descendants. These are found by doing a two-way diff
150 of each one against the base, and then finding the
150 of each one against the base, and then finding the
151 intersections between those regions. These "sync regions"
151 intersections between those regions. These "sync regions"
152 are by definition unchanged in both and easily dealt with.
152 are by definition unchanged in both and easily dealt with.
153
153
154 The regions in between can be in any of three cases:
154 The regions in between can be in any of three cases:
155 conflicted, or changed on only one side.
155 conflicted, or changed on only one side.
156 """
156 """
157
157
158 # section a[0:ia] has been disposed of, etc
158 # section a[0:ia] has been disposed of, etc
159 iz = ia = ib = 0
159 iz = ia = ib = 0
160
160
161 for region in self.find_sync_regions():
161 for region in self.find_sync_regions():
162 zmatch, zend, amatch, aend, bmatch, bend = region
162 zmatch, zend, amatch, aend, bmatch, bend = region
163 # print 'match base [%d:%d]' % (zmatch, zend)
163 # print 'match base [%d:%d]' % (zmatch, zend)
164
164
165 matchlen = zend - zmatch
165 matchlen = zend - zmatch
166 assert matchlen >= 0
166 assert matchlen >= 0
167 assert matchlen == (aend - amatch)
167 assert matchlen == (aend - amatch)
168 assert matchlen == (bend - bmatch)
168 assert matchlen == (bend - bmatch)
169
169
170 len_a = amatch - ia
170 len_a = amatch - ia
171 len_b = bmatch - ib
171 len_b = bmatch - ib
172 len_base = zmatch - iz
172 len_base = zmatch - iz
173 assert len_a >= 0
173 assert len_a >= 0
174 assert len_b >= 0
174 assert len_b >= 0
175 assert len_base >= 0
175 assert len_base >= 0
176
176
177 # print 'unmatched a=%d, b=%d' % (len_a, len_b)
177 # print 'unmatched a=%d, b=%d' % (len_a, len_b)
178
178
179 if len_a or len_b:
179 if len_a or len_b:
180 # try to avoid actually slicing the lists
180 # try to avoid actually slicing the lists
181 equal_a = compare_range(
181 equal_a = compare_range(
182 self.a, ia, amatch, self.base, iz, zmatch
182 self.a, ia, amatch, self.base, iz, zmatch
183 )
183 )
184 equal_b = compare_range(
184 equal_b = compare_range(
185 self.b, ib, bmatch, self.base, iz, zmatch
185 self.b, ib, bmatch, self.base, iz, zmatch
186 )
186 )
187 same = compare_range(self.a, ia, amatch, self.b, ib, bmatch)
187 same = compare_range(self.a, ia, amatch, self.b, ib, bmatch)
188
188
189 if same:
189 if same:
190 yield b'same', ia, amatch
190 yield b'same', ia, amatch
191 elif equal_a and not equal_b:
191 elif equal_a and not equal_b:
192 yield b'b', ib, bmatch
192 yield b'b', ib, bmatch
193 elif equal_b and not equal_a:
193 elif equal_b and not equal_a:
194 yield b'a', ia, amatch
194 yield b'a', ia, amatch
195 elif not equal_a and not equal_b:
195 elif not equal_a and not equal_b:
196 yield b'conflict', iz, zmatch, ia, amatch, ib, bmatch
196 yield b'conflict', iz, zmatch, ia, amatch, ib, bmatch
197 else:
197 else:
198 raise AssertionError(b"can't handle a=b=base but unmatched")
198 raise AssertionError(b"can't handle a=b=base but unmatched")
199
199
200 ia = amatch
200 ia = amatch
201 ib = bmatch
201 ib = bmatch
202 iz = zmatch
202 iz = zmatch
203
203
204 # if the same part of the base was deleted on both sides
204 # if the same part of the base was deleted on both sides
205 # that's OK, we can just skip it.
205 # that's OK, we can just skip it.
206
206
207 if matchlen > 0:
207 if matchlen > 0:
208 assert ia == amatch
208 assert ia == amatch
209 assert ib == bmatch
209 assert ib == bmatch
210 assert iz == zmatch
210 assert iz == zmatch
211
211
212 yield b'unchanged', zmatch, zend
212 yield b'unchanged', zmatch, zend
213 iz = zend
213 iz = zend
214 ia = aend
214 ia = aend
215 ib = bend
215 ib = bend
216
216
217 def minimize(self, merge_groups):
217 def minimize(self, merge_groups):
218 """Trim conflict regions of lines where A and B sides match.
218 """Trim conflict regions of lines where A and B sides match.
219
219
220 Lines where both A and B have made the same changes at the beginning
220 Lines where both A and B have made the same changes at the beginning
221 or the end of each merge region are eliminated from the conflict
221 or the end of each merge region are eliminated from the conflict
222 region and are instead considered the same.
222 region and are instead considered the same.
223 """
223 """
224 for what, lines in merge_groups:
224 for what, lines in merge_groups:
225 if what != b"conflict":
225 if what != b"conflict":
226 yield what, lines
226 yield what, lines
227 continue
227 continue
228 base_lines, a_lines, b_lines = lines
228 base_lines, a_lines, b_lines = lines
229 alen = len(a_lines)
229 alen = len(a_lines)
230 blen = len(b_lines)
230 blen = len(b_lines)
231
231
232 # find matches at the front
232 # find matches at the front
233 ii = 0
233 ii = 0
234 while ii < alen and ii < blen and a_lines[ii] == b_lines[ii]:
234 while ii < alen and ii < blen and a_lines[ii] == b_lines[ii]:
235 ii += 1
235 ii += 1
236 startmatches = ii
236 startmatches = ii
237
237
238 # find matches at the end
238 # find matches at the end
239 ii = 0
239 ii = 0
240 while (
240 while (
241 ii < alen and ii < blen and a_lines[-ii - 1] == b_lines[-ii - 1]
241 ii < alen and ii < blen and a_lines[-ii - 1] == b_lines[-ii - 1]
242 ):
242 ):
243 ii += 1
243 ii += 1
244 endmatches = ii
244 endmatches = ii
245
245
246 if startmatches > 0:
246 if startmatches > 0:
247 yield b'same', a_lines[:startmatches]
247 yield b'same', a_lines[:startmatches]
248
248
249 yield (
249 yield (
250 b'conflict',
250 b'conflict',
251 (
251 (
252 base_lines,
252 base_lines,
253 a_lines[startmatches : alen - endmatches],
253 a_lines[startmatches : alen - endmatches],
254 b_lines[startmatches : blen - endmatches],
254 b_lines[startmatches : blen - endmatches],
255 ),
255 ),
256 )
256 )
257
257
258 if endmatches > 0:
258 if endmatches > 0:
259 yield b'same', a_lines[alen - endmatches :]
259 yield b'same', a_lines[alen - endmatches :]
260
260
261 def find_sync_regions(self):
261 def find_sync_regions(self):
262 """Return a list of sync regions, where both descendants match the base.
262 """Return a list of sync regions, where both descendants match the base.
263
263
264 Generates a list of (base1, base2, a1, a2, b1, b2). There is
264 Generates a list of (base1, base2, a1, a2, b1, b2). There is
265 always a zero-length sync region at the end of all the files.
265 always a zero-length sync region at the end of all the files.
266 """
266 """
267
267
268 ia = ib = 0
268 ia = ib = 0
269 amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
269 amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
270 bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
270 bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
271 len_a = len(amatches)
271 len_a = len(amatches)
272 len_b = len(bmatches)
272 len_b = len(bmatches)
273
273
274 sl = []
274 sl = []
275
275
276 while ia < len_a and ib < len_b:
276 while ia < len_a and ib < len_b:
277 abase, amatch, alen = amatches[ia]
277 abase, amatch, alen = amatches[ia]
278 bbase, bmatch, blen = bmatches[ib]
278 bbase, bmatch, blen = bmatches[ib]
279
279
280 # there is an unconflicted block at i; how long does it
280 # there is an unconflicted block at i; how long does it
281 # extend? until whichever one ends earlier.
281 # extend? until whichever one ends earlier.
282 i = intersect((abase, abase + alen), (bbase, bbase + blen))
282 i = intersect((abase, abase + alen), (bbase, bbase + blen))
283 if i:
283 if i:
284 intbase = i[0]
284 intbase = i[0]
285 intend = i[1]
285 intend = i[1]
286 intlen = intend - intbase
286 intlen = intend - intbase
287
287
288 # found a match of base[i[0], i[1]]; this may be less than
288 # found a match of base[i[0], i[1]]; this may be less than
289 # the region that matches in either one
289 # the region that matches in either one
290 assert intlen <= alen
290 assert intlen <= alen
291 assert intlen <= blen
291 assert intlen <= blen
292 assert abase <= intbase
292 assert abase <= intbase
293 assert bbase <= intbase
293 assert bbase <= intbase
294
294
295 asub = amatch + (intbase - abase)
295 asub = amatch + (intbase - abase)
296 bsub = bmatch + (intbase - bbase)
296 bsub = bmatch + (intbase - bbase)
297 aend = asub + intlen
297 aend = asub + intlen
298 bend = bsub + intlen
298 bend = bsub + intlen
299
299
300 assert self.base[intbase:intend] == self.a[asub:aend], (
300 assert self.base[intbase:intend] == self.a[asub:aend], (
301 self.base[intbase:intend],
301 self.base[intbase:intend],
302 self.a[asub:aend],
302 self.a[asub:aend],
303 )
303 )
304
304
305 assert self.base[intbase:intend] == self.b[bsub:bend]
305 assert self.base[intbase:intend] == self.b[bsub:bend]
306
306
307 sl.append((intbase, intend, asub, aend, bsub, bend))
307 sl.append((intbase, intend, asub, aend, bsub, bend))
308
308
309 # advance whichever one ends first in the base text
309 # advance whichever one ends first in the base text
310 if (abase + alen) < (bbase + blen):
310 if (abase + alen) < (bbase + blen):
311 ia += 1
311 ia += 1
312 else:
312 else:
313 ib += 1
313 ib += 1
314
314
315 intbase = len(self.base)
315 intbase = len(self.base)
316 abase = len(self.a)
316 abase = len(self.a)
317 bbase = len(self.b)
317 bbase = len(self.b)
318 sl.append((intbase, intbase, abase, abase, bbase, bbase))
318 sl.append((intbase, intbase, abase, abase, bbase, bbase))
319
319
320 return sl
320 return sl
321
321
322
322
323 def _verifytext(text, path, ui, opts):
323 def _verifytext(text, path, ui, opts):
324 """verifies that text is non-binary (unless opts[text] is passed,
324 """verifies that text is non-binary (unless opts[text] is passed,
325 then we just warn)"""
325 then we just warn)"""
326 if stringutil.binary(text):
326 if stringutil.binary(text):
327 msg = _(b"%s looks like a binary file.") % path
327 msg = _(b"%s looks like a binary file.") % path
328 if not opts.get('quiet'):
328 if not opts.get('quiet'):
329 ui.warn(_(b'warning: %s\n') % msg)
329 ui.warn(_(b'warning: %s\n') % msg)
330 if not opts.get('text'):
330 if not opts.get('text'):
331 raise error.Abort(msg)
331 raise error.Abort(msg)
332 return text
332 return text
333
333
334
334
335 def _picklabels(overrides):
335 def _picklabels(overrides):
336 if len(overrides) > 3:
336 if len(overrides) > 3:
337 raise error.Abort(_(b"can only specify three labels."))
337 raise error.Abort(_(b"can only specify three labels."))
338 result = [None, None, None]
338 result = [None, None, None]
339 for i, override in enumerate(overrides):
339 for i, override in enumerate(overrides):
340 result[i] = override
340 result[i] = override
341 return result
341 return result
342
342
343
343
344 def _detect_newline(m3):
344 def _detect_newline(m3):
345 if len(m3.a) > 0:
345 if len(m3.a) > 0:
346 if m3.a[0].endswith(b'\r\n'):
346 if m3.a[0].endswith(b'\r\n'):
347 return b'\r\n'
347 return b'\r\n'
348 elif m3.a[0].endswith(b'\r'):
348 elif m3.a[0].endswith(b'\r'):
349 return b'\r'
349 return b'\r'
350 return b'\n'
350 return b'\n'
351
351
352
352
353 def render_markers(
353 def render_minimized(
354 m3,
354 m3,
355 name_a=None,
355 name_a=None,
356 name_b=None,
356 name_b=None,
357 start_marker=b'<<<<<<<',
357 start_marker=b'<<<<<<<',
358 mid_marker=b'=======',
358 mid_marker=b'=======',
359 end_marker=b'>>>>>>>',
359 end_marker=b'>>>>>>>',
360 minimize=False,
361 ):
360 ):
362 """Return merge in cvs-like form."""
361 """Return merge in cvs-like form."""
363 newline = _detect_newline(m3)
362 newline = _detect_newline(m3)
364 conflicts = False
363 conflicts = False
365 if name_a and start_marker:
364 if name_a:
366 start_marker = start_marker + b' ' + name_a
365 start_marker = start_marker + b' ' + name_a
367 if name_b and end_marker:
366 if name_b:
368 end_marker = end_marker + b' ' + name_b
367 end_marker = end_marker + b' ' + name_b
369 merge_groups = m3.merge_groups()
368 merge_groups = m3.merge_groups()
370 if minimize:
371 merge_groups = m3.minimize(merge_groups)
369 merge_groups = m3.minimize(merge_groups)
372 lines = []
370 lines = []
373 for what, group_lines in merge_groups:
371 for what, group_lines in merge_groups:
374 if what == b'conflict':
372 if what == b'conflict':
375 base_lines, a_lines, b_lines = group_lines
373 base_lines, a_lines, b_lines = group_lines
376 conflicts = True
374 conflicts = True
377 if start_marker is not None:
378 lines.append(start_marker + newline)
375 lines.append(start_marker + newline)
379 lines.extend(a_lines)
376 lines.extend(a_lines)
380 if mid_marker is not None:
381 lines.append(mid_marker + newline)
377 lines.append(mid_marker + newline)
382 lines.extend(b_lines)
378 lines.extend(b_lines)
383 if end_marker is not None:
384 lines.append(end_marker + newline)
379 lines.append(end_marker + newline)
385 else:
380 else:
386 lines.extend(group_lines)
381 lines.extend(group_lines)
387 return lines, conflicts
382 return lines, conflicts
388
383
389
384
390 def render_merge3(m3, name_a, name_b, name_base):
385 def render_merge3(m3, name_a, name_b, name_base):
391 """Render conflicts as 3-way conflict markers."""
386 """Render conflicts as 3-way conflict markers."""
392 newline = _detect_newline(m3)
387 newline = _detect_newline(m3)
393 conflicts = False
388 conflicts = False
394 lines = []
389 lines = []
395 for what, group_lines in m3.merge_groups():
390 for what, group_lines in m3.merge_groups():
396 if what == b'conflict':
391 if what == b'conflict':
397 base_lines, a_lines, b_lines = group_lines
392 base_lines, a_lines, b_lines = group_lines
398 conflicts = True
393 conflicts = True
399 lines.append(b'<<<<<<< ' + name_a + newline)
394 lines.append(b'<<<<<<< ' + name_a + newline)
400 lines.extend(a_lines)
395 lines.extend(a_lines)
401 lines.append(b'||||||| ' + name_base + newline)
396 lines.append(b'||||||| ' + name_base + newline)
402 lines.extend(base_lines)
397 lines.extend(base_lines)
403 lines.append(b'=======' + newline)
398 lines.append(b'=======' + newline)
404 lines.extend(b_lines)
399 lines.extend(b_lines)
405 lines.append(b'>>>>>>> ' + name_b + newline)
400 lines.append(b'>>>>>>> ' + name_b + newline)
406 else:
401 else:
407 lines.extend(group_lines)
402 lines.extend(group_lines)
408 return lines, conflicts
403 return lines, conflicts
409
404
410
405
411 def render_mergediff(m3, name_a, name_b, name_base):
406 def render_mergediff(m3, name_a, name_b, name_base):
412 """Render conflicts as conflict markers with one snapshot and one diff."""
407 """Render conflicts as conflict markers with one snapshot and one diff."""
413 newline = _detect_newline(m3)
408 newline = _detect_newline(m3)
414 lines = []
409 lines = []
415 conflicts = False
410 conflicts = False
416 for what, group_lines in m3.merge_groups():
411 for what, group_lines in m3.merge_groups():
417 if what == b'conflict':
412 if what == b'conflict':
418 base_lines, a_lines, b_lines = group_lines
413 base_lines, a_lines, b_lines = group_lines
419 base_text = b''.join(base_lines)
414 base_text = b''.join(base_lines)
420 b_blocks = list(
415 b_blocks = list(
421 mdiff.allblocks(
416 mdiff.allblocks(
422 base_text,
417 base_text,
423 b''.join(b_lines),
418 b''.join(b_lines),
424 lines1=base_lines,
419 lines1=base_lines,
425 lines2=b_lines,
420 lines2=b_lines,
426 )
421 )
427 )
422 )
428 a_blocks = list(
423 a_blocks = list(
429 mdiff.allblocks(
424 mdiff.allblocks(
430 base_text,
425 base_text,
431 b''.join(a_lines),
426 b''.join(a_lines),
432 lines1=base_lines,
427 lines1=base_lines,
433 lines2=b_lines,
428 lines2=b_lines,
434 )
429 )
435 )
430 )
436
431
437 def matching_lines(blocks):
432 def matching_lines(blocks):
438 return sum(
433 return sum(
439 block[1] - block[0]
434 block[1] - block[0]
440 for block, kind in blocks
435 for block, kind in blocks
441 if kind == b'='
436 if kind == b'='
442 )
437 )
443
438
444 def diff_lines(blocks, lines1, lines2):
439 def diff_lines(blocks, lines1, lines2):
445 for block, kind in blocks:
440 for block, kind in blocks:
446 if kind == b'=':
441 if kind == b'=':
447 for line in lines1[block[0] : block[1]]:
442 for line in lines1[block[0] : block[1]]:
448 yield b' ' + line
443 yield b' ' + line
449 else:
444 else:
450 for line in lines1[block[0] : block[1]]:
445 for line in lines1[block[0] : block[1]]:
451 yield b'-' + line
446 yield b'-' + line
452 for line in lines2[block[2] : block[3]]:
447 for line in lines2[block[2] : block[3]]:
453 yield b'+' + line
448 yield b'+' + line
454
449
455 lines.append(b"<<<<<<<" + newline)
450 lines.append(b"<<<<<<<" + newline)
456 if matching_lines(a_blocks) < matching_lines(b_blocks):
451 if matching_lines(a_blocks) < matching_lines(b_blocks):
457 lines.append(b"======= " + name_a + newline)
452 lines.append(b"======= " + name_a + newline)
458 lines.extend(a_lines)
453 lines.extend(a_lines)
459 lines.append(b"------- " + name_base + newline)
454 lines.append(b"------- " + name_base + newline)
460 lines.append(b"+++++++ " + name_b + newline)
455 lines.append(b"+++++++ " + name_b + newline)
461 lines.extend(diff_lines(b_blocks, base_lines, b_lines))
456 lines.extend(diff_lines(b_blocks, base_lines, b_lines))
462 else:
457 else:
463 lines.append(b"------- " + name_base + newline)
458 lines.append(b"------- " + name_base + newline)
464 lines.append(b"+++++++ " + name_a + newline)
459 lines.append(b"+++++++ " + name_a + newline)
465 lines.extend(diff_lines(a_blocks, base_lines, a_lines))
460 lines.extend(diff_lines(a_blocks, base_lines, a_lines))
466 lines.append(b"======= " + name_b + newline)
461 lines.append(b"======= " + name_b + newline)
467 lines.extend(b_lines)
462 lines.extend(b_lines)
468 lines.append(b">>>>>>>" + newline)
463 lines.append(b">>>>>>>" + newline)
469 conflicts = True
464 conflicts = True
470 else:
465 else:
471 lines.extend(group_lines)
466 lines.extend(group_lines)
472 return lines, conflicts
467 return lines, conflicts
473
468
474
469
475 def _resolve(m3, sides):
470 def _resolve(m3, sides):
476 lines = []
471 lines = []
477 for what, group_lines in m3.merge_groups():
472 for what, group_lines in m3.merge_groups():
478 if what == b'conflict':
473 if what == b'conflict':
479 for side in sides:
474 for side in sides:
480 lines.extend(group_lines[side])
475 lines.extend(group_lines[side])
481 else:
476 else:
482 lines.extend(group_lines)
477 lines.extend(group_lines)
483 return lines
478 return lines
484
479
485
480
486 def simplemerge(ui, localctx, basectx, otherctx, **opts):
481 def simplemerge(ui, localctx, basectx, otherctx, **opts):
487 """Performs the simplemerge algorithm.
482 """Performs the simplemerge algorithm.
488
483
489 The merged result is written into `localctx`.
484 The merged result is written into `localctx`.
490 """
485 """
491
486
492 def readctx(ctx):
487 def readctx(ctx):
493 # Merges were always run in the working copy before, which means
488 # Merges were always run in the working copy before, which means
494 # they used decoded data, if the user defined any repository
489 # they used decoded data, if the user defined any repository
495 # filters.
490 # filters.
496 #
491 #
497 # Maintain that behavior today for BC, though perhaps in the future
492 # Maintain that behavior today for BC, though perhaps in the future
498 # it'd be worth considering whether merging encoded data (what the
493 # it'd be worth considering whether merging encoded data (what the
499 # repository usually sees) might be more useful.
494 # repository usually sees) might be more useful.
500 return _verifytext(ctx.decodeddata(), ctx.path(), ui, opts)
495 return _verifytext(ctx.decodeddata(), ctx.path(), ui, opts)
501
496
502 try:
497 try:
503 localtext = readctx(localctx)
498 localtext = readctx(localctx)
504 basetext = readctx(basectx)
499 basetext = readctx(basectx)
505 othertext = readctx(otherctx)
500 othertext = readctx(otherctx)
506 except error.Abort:
501 except error.Abort:
507 return 1
502 return 1
508
503
509 m3 = Merge3Text(basetext, localtext, othertext)
504 m3 = Merge3Text(basetext, localtext, othertext)
510 conflicts = False
505 conflicts = False
511 mode = opts.get('mode', b'merge')
506 mode = opts.get('mode', b'merge')
512 if mode == b'union':
507 if mode == b'union':
513 lines = _resolve(m3, (1, 2))
508 lines = _resolve(m3, (1, 2))
514 elif mode == b'local':
509 elif mode == b'local':
515 lines = _resolve(m3, (1,))
510 lines = _resolve(m3, (1,))
516 elif mode == b'other':
511 elif mode == b'other':
517 lines = _resolve(m3, (2,))
512 lines = _resolve(m3, (2,))
518 else:
513 else:
519 name_a, name_b, name_base = _picklabels(opts.get('label', []))
514 name_a, name_b, name_base = _picklabels(opts.get('label', []))
520 if mode == b'mergediff':
515 if mode == b'mergediff':
521 lines, conflicts = render_mergediff(m3, name_a, name_b, name_base)
516 lines, conflicts = render_mergediff(m3, name_a, name_b, name_base)
522 elif mode == b'merge3':
517 elif mode == b'merge3':
523 lines, conflicts = render_merge3(m3, name_a, name_b, name_base)
518 lines, conflicts = render_merge3(m3, name_a, name_b, name_base)
524 else:
519 else:
525 extrakwargs = {
520 lines, conflicts = render_minimized(m3, name_a, name_b)
526 'minimize': True,
527 }
528 lines, conflicts = render_markers(
529 m3, name_a=name_a, name_b=name_b, **extrakwargs
530 )
531
521
532 mergedtext = b''.join(lines)
522 mergedtext = b''.join(lines)
533 if opts.get('print'):
523 if opts.get('print'):
534 ui.fout.write(mergedtext)
524 ui.fout.write(mergedtext)
535 else:
525 else:
536 # localctx.flags() already has the merged flags (done in
526 # localctx.flags() already has the merged flags (done in
537 # mergestate.resolve())
527 # mergestate.resolve())
538 localctx.write(mergedtext, localctx.flags())
528 localctx.write(mergedtext, localctx.flags())
539
529
540 if conflicts:
530 if conflicts:
541 return 1
531 return 1
@@ -1,397 +1,397 b''
1 # Copyright (C) 2004, 2005 Canonical Ltd
1 # Copyright (C) 2004, 2005 Canonical Ltd
2 #
2 #
3 # This program is free software; you can redistribute it and/or modify
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
6 # (at your option) any later version.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU General Public License
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, see <http://www.gnu.org/licenses/>.
14 # along with this program; if not, see <http://www.gnu.org/licenses/>.
15
15
16 from __future__ import absolute_import
16 from __future__ import absolute_import
17
17
18 import unittest
18 import unittest
19 from mercurial import (
19 from mercurial import (
20 error,
20 error,
21 simplemerge,
21 simplemerge,
22 util,
22 util,
23 )
23 )
24
24
25 from mercurial.utils import stringutil
25 from mercurial.utils import stringutil
26
26
27 TestCase = unittest.TestCase
27 TestCase = unittest.TestCase
28 # bzr compatible interface, for the tests
28 # bzr compatible interface, for the tests
29 class Merge3(simplemerge.Merge3Text):
29 class Merge3(simplemerge.Merge3Text):
30 """3-way merge of texts.
30 """3-way merge of texts.
31
31
32 Given BASE, OTHER, THIS, tries to produce a combined text
32 Given BASE, OTHER, THIS, tries to produce a combined text
33 incorporating the changes from both BASE->OTHER and BASE->THIS.
33 incorporating the changes from both BASE->OTHER and BASE->THIS.
34 All three will typically be sequences of lines."""
34 All three will typically be sequences of lines."""
35
35
36 def __init__(self, base, a, b):
36 def __init__(self, base, a, b):
37 basetext = b'\n'.join([i.strip(b'\n') for i in base] + [b''])
37 basetext = b'\n'.join([i.strip(b'\n') for i in base] + [b''])
38 atext = b'\n'.join([i.strip(b'\n') for i in a] + [b''])
38 atext = b'\n'.join([i.strip(b'\n') for i in a] + [b''])
39 btext = b'\n'.join([i.strip(b'\n') for i in b] + [b''])
39 btext = b'\n'.join([i.strip(b'\n') for i in b] + [b''])
40 if (
40 if (
41 stringutil.binary(basetext)
41 stringutil.binary(basetext)
42 or stringutil.binary(atext)
42 or stringutil.binary(atext)
43 or stringutil.binary(btext)
43 or stringutil.binary(btext)
44 ):
44 ):
45 raise error.Abort(b"don't know how to merge binary files")
45 raise error.Abort(b"don't know how to merge binary files")
46 simplemerge.Merge3Text.__init__(
46 simplemerge.Merge3Text.__init__(
47 self, basetext, atext, btext, base, a, b
47 self, basetext, atext, btext, base, a, b
48 )
48 )
49
49
50
50
51 CantReprocessAndShowBase = simplemerge.CantReprocessAndShowBase
51 CantReprocessAndShowBase = simplemerge.CantReprocessAndShowBase
52
52
53
53
54 def split_lines(t):
54 def split_lines(t):
55 return util.stringio(t).readlines()
55 return util.stringio(t).readlines()
56
56
57
57
58 ############################################################
58 ############################################################
59 # test case data from the gnu diffutils manual
59 # test case data from the gnu diffutils manual
60 # common base
60 # common base
61 TZU = split_lines(
61 TZU = split_lines(
62 b""" The Nameless is the origin of Heaven and Earth;
62 b""" The Nameless is the origin of Heaven and Earth;
63 The named is the mother of all things.
63 The named is the mother of all things.
64
64
65 Therefore let there always be non-being,
65 Therefore let there always be non-being,
66 so we may see their subtlety,
66 so we may see their subtlety,
67 And let there always be being,
67 And let there always be being,
68 so we may see their outcome.
68 so we may see their outcome.
69 The two are the same,
69 The two are the same,
70 But after they are produced,
70 But after they are produced,
71 they have different names.
71 they have different names.
72 They both may be called deep and profound.
72 They both may be called deep and profound.
73 Deeper and more profound,
73 Deeper and more profound,
74 The door of all subtleties!
74 The door of all subtleties!
75 """
75 """
76 )
76 )
77
77
78 LAO = split_lines(
78 LAO = split_lines(
79 b""" The Way that can be told of is not the eternal Way;
79 b""" The Way that can be told of is not the eternal Way;
80 The name that can be named is not the eternal name.
80 The name that can be named is not the eternal name.
81 The Nameless is the origin of Heaven and Earth;
81 The Nameless is the origin of Heaven and Earth;
82 The Named is the mother of all things.
82 The Named is the mother of all things.
83 Therefore let there always be non-being,
83 Therefore let there always be non-being,
84 so we may see their subtlety,
84 so we may see their subtlety,
85 And let there always be being,
85 And let there always be being,
86 so we may see their outcome.
86 so we may see their outcome.
87 The two are the same,
87 The two are the same,
88 But after they are produced,
88 But after they are produced,
89 they have different names.
89 they have different names.
90 """
90 """
91 )
91 )
92
92
93
93
94 TAO = split_lines(
94 TAO = split_lines(
95 b""" The Way that can be told of is not the eternal Way;
95 b""" The Way that can be told of is not the eternal Way;
96 The name that can be named is not the eternal name.
96 The name that can be named is not the eternal name.
97 The Nameless is the origin of Heaven and Earth;
97 The Nameless is the origin of Heaven and Earth;
98 The named is the mother of all things.
98 The named is the mother of all things.
99
99
100 Therefore let there always be non-being,
100 Therefore let there always be non-being,
101 so we may see their subtlety,
101 so we may see their subtlety,
102 And let there always be being,
102 And let there always be being,
103 so we may see their result.
103 so we may see their result.
104 The two are the same,
104 The two are the same,
105 But after they are produced,
105 But after they are produced,
106 they have different names.
106 they have different names.
107
107
108 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
108 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
109
109
110 """
110 """
111 )
111 )
112
112
113 MERGED_RESULT = split_lines(
113 MERGED_RESULT = split_lines(
114 b"""\
114 b"""\
115 The Way that can be told of is not the eternal Way;
115 The Way that can be told of is not the eternal Way;
116 The name that can be named is not the eternal name.
116 The name that can be named is not the eternal name.
117 The Nameless is the origin of Heaven and Earth;
117 The Nameless is the origin of Heaven and Earth;
118 The Named is the mother of all things.
118 The Named is the mother of all things.
119 Therefore let there always be non-being,
119 Therefore let there always be non-being,
120 so we may see their subtlety,
120 so we may see their subtlety,
121 And let there always be being,
121 And let there always be being,
122 so we may see their result.
122 so we may see their result.
123 The two are the same,
123 The two are the same,
124 But after they are produced,
124 But after they are produced,
125 they have different names.\
125 they have different names.\
126 \n<<<<<<< LAO\
126 \n<<<<<<< LAO\
127 \n=======
127 \n=======
128
128
129 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
129 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
130 \
130 \
131 \n>>>>>>> TAO
131 \n>>>>>>> TAO
132 """
132 """
133 )
133 )
134
134
135
135
136 class TestMerge3(TestCase):
136 class TestMerge3(TestCase):
137 def log(self, msg):
137 def log(self, msg):
138 pass
138 pass
139
139
140 def test_no_changes(self):
140 def test_no_changes(self):
141 """No conflicts because nothing changed"""
141 """No conflicts because nothing changed"""
142 m3 = Merge3([b'aaa', b'bbb'], [b'aaa', b'bbb'], [b'aaa', b'bbb'])
142 m3 = Merge3([b'aaa', b'bbb'], [b'aaa', b'bbb'], [b'aaa', b'bbb'])
143
143
144 self.assertEqual(
144 self.assertEqual(
145 list(m3.find_sync_regions()),
145 list(m3.find_sync_regions()),
146 [(0, 2, 0, 2, 0, 2), (2, 2, 2, 2, 2, 2)],
146 [(0, 2, 0, 2, 0, 2), (2, 2, 2, 2, 2, 2)],
147 )
147 )
148
148
149 self.assertEqual(list(m3.merge_regions()), [(b'unchanged', 0, 2)])
149 self.assertEqual(list(m3.merge_regions()), [(b'unchanged', 0, 2)])
150
150
151 self.assertEqual(
151 self.assertEqual(
152 list(m3.merge_groups()), [(b'unchanged', [b'aaa', b'bbb'])]
152 list(m3.merge_groups()), [(b'unchanged', [b'aaa', b'bbb'])]
153 )
153 )
154
154
155 def test_front_insert(self):
155 def test_front_insert(self):
156 m3 = Merge3([b'zz'], [b'aaa', b'bbb', b'zz'], [b'zz'])
156 m3 = Merge3([b'zz'], [b'aaa', b'bbb', b'zz'], [b'zz'])
157
157
158 # todo: should use a sentinel at end as from get_matching_blocks
158 # todo: should use a sentinel at end as from get_matching_blocks
159 # to match without zz
159 # to match without zz
160 self.assertEqual(
160 self.assertEqual(
161 list(m3.find_sync_regions()),
161 list(m3.find_sync_regions()),
162 [(0, 1, 2, 3, 0, 1), (1, 1, 3, 3, 1, 1)],
162 [(0, 1, 2, 3, 0, 1), (1, 1, 3, 3, 1, 1)],
163 )
163 )
164
164
165 self.assertEqual(
165 self.assertEqual(
166 list(m3.merge_regions()), [(b'a', 0, 2), (b'unchanged', 0, 1)]
166 list(m3.merge_regions()), [(b'a', 0, 2), (b'unchanged', 0, 1)]
167 )
167 )
168
168
169 self.assertEqual(
169 self.assertEqual(
170 list(m3.merge_groups()),
170 list(m3.merge_groups()),
171 [(b'a', [b'aaa', b'bbb']), (b'unchanged', [b'zz'])],
171 [(b'a', [b'aaa', b'bbb']), (b'unchanged', [b'zz'])],
172 )
172 )
173
173
174 def test_null_insert(self):
174 def test_null_insert(self):
175 m3 = Merge3([], [b'aaa', b'bbb'], [])
175 m3 = Merge3([], [b'aaa', b'bbb'], [])
176 # todo: should use a sentinel at end as from get_matching_blocks
176 # todo: should use a sentinel at end as from get_matching_blocks
177 # to match without zz
177 # to match without zz
178 self.assertEqual(list(m3.find_sync_regions()), [(0, 0, 2, 2, 0, 0)])
178 self.assertEqual(list(m3.find_sync_regions()), [(0, 0, 2, 2, 0, 0)])
179
179
180 self.assertEqual(list(m3.merge_regions()), [(b'a', 0, 2)])
180 self.assertEqual(list(m3.merge_regions()), [(b'a', 0, 2)])
181
181
182 self.assertEqual(
182 self.assertEqual(
183 simplemerge.render_markers(m3), ([b'aaa', b'bbb'], False)
183 simplemerge.render_minimized(m3), ([b'aaa', b'bbb'], False)
184 )
184 )
185
185
186 def test_no_conflicts(self):
186 def test_no_conflicts(self):
187 """No conflicts because only one side changed"""
187 """No conflicts because only one side changed"""
188 m3 = Merge3(
188 m3 = Merge3(
189 [b'aaa', b'bbb'], [b'aaa', b'111', b'bbb'], [b'aaa', b'bbb']
189 [b'aaa', b'bbb'], [b'aaa', b'111', b'bbb'], [b'aaa', b'bbb']
190 )
190 )
191
191
192 self.assertEqual(
192 self.assertEqual(
193 list(m3.find_sync_regions()),
193 list(m3.find_sync_regions()),
194 [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 1, 2), (2, 2, 3, 3, 2, 2)],
194 [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 1, 2), (2, 2, 3, 3, 2, 2)],
195 )
195 )
196
196
197 self.assertEqual(
197 self.assertEqual(
198 list(m3.merge_regions()),
198 list(m3.merge_regions()),
199 [(b'unchanged', 0, 1), (b'a', 1, 2), (b'unchanged', 1, 2)],
199 [(b'unchanged', 0, 1), (b'a', 1, 2), (b'unchanged', 1, 2)],
200 )
200 )
201
201
202 def test_append_a(self):
202 def test_append_a(self):
203 m3 = Merge3(
203 m3 = Merge3(
204 [b'aaa\n', b'bbb\n'],
204 [b'aaa\n', b'bbb\n'],
205 [b'aaa\n', b'bbb\n', b'222\n'],
205 [b'aaa\n', b'bbb\n', b'222\n'],
206 [b'aaa\n', b'bbb\n'],
206 [b'aaa\n', b'bbb\n'],
207 )
207 )
208
208
209 self.assertEqual(
209 self.assertEqual(
210 b''.join(simplemerge.render_markers(m3)[0]), b'aaa\nbbb\n222\n'
210 b''.join(simplemerge.render_minimized(m3)[0]), b'aaa\nbbb\n222\n'
211 )
211 )
212
212
213 def test_append_b(self):
213 def test_append_b(self):
214 m3 = Merge3(
214 m3 = Merge3(
215 [b'aaa\n', b'bbb\n'],
215 [b'aaa\n', b'bbb\n'],
216 [b'aaa\n', b'bbb\n'],
216 [b'aaa\n', b'bbb\n'],
217 [b'aaa\n', b'bbb\n', b'222\n'],
217 [b'aaa\n', b'bbb\n', b'222\n'],
218 )
218 )
219
219
220 self.assertEqual(
220 self.assertEqual(
221 b''.join(simplemerge.render_markers(m3)[0]), b'aaa\nbbb\n222\n'
221 b''.join(simplemerge.render_minimized(m3)[0]), b'aaa\nbbb\n222\n'
222 )
222 )
223
223
224 def test_append_agreement(self):
224 def test_append_agreement(self):
225 m3 = Merge3(
225 m3 = Merge3(
226 [b'aaa\n', b'bbb\n'],
226 [b'aaa\n', b'bbb\n'],
227 [b'aaa\n', b'bbb\n', b'222\n'],
227 [b'aaa\n', b'bbb\n', b'222\n'],
228 [b'aaa\n', b'bbb\n', b'222\n'],
228 [b'aaa\n', b'bbb\n', b'222\n'],
229 )
229 )
230
230
231 self.assertEqual(
231 self.assertEqual(
232 b''.join(simplemerge.render_markers(m3)[0]), b'aaa\nbbb\n222\n'
232 b''.join(simplemerge.render_minimized(m3)[0]), b'aaa\nbbb\n222\n'
233 )
233 )
234
234
235 def test_append_clash(self):
235 def test_append_clash(self):
236 m3 = Merge3(
236 m3 = Merge3(
237 [b'aaa\n', b'bbb\n'],
237 [b'aaa\n', b'bbb\n'],
238 [b'aaa\n', b'bbb\n', b'222\n'],
238 [b'aaa\n', b'bbb\n', b'222\n'],
239 [b'aaa\n', b'bbb\n', b'333\n'],
239 [b'aaa\n', b'bbb\n', b'333\n'],
240 )
240 )
241
241
242 ml, conflicts = simplemerge.render_markers(
242 ml, conflicts = simplemerge.render_minimized(
243 m3,
243 m3,
244 name_a=b'a',
244 name_a=b'a',
245 name_b=b'b',
245 name_b=b'b',
246 start_marker=b'<<',
246 start_marker=b'<<',
247 mid_marker=b'--',
247 mid_marker=b'--',
248 end_marker=b'>>',
248 end_marker=b'>>',
249 )
249 )
250 self.assertEqual(
250 self.assertEqual(
251 b''.join(ml),
251 b''.join(ml),
252 b'aaa\n' b'bbb\n' b'<< a\n' b'222\n' b'--\n' b'333\n' b'>> b\n',
252 b'aaa\n' b'bbb\n' b'<< a\n' b'222\n' b'--\n' b'333\n' b'>> b\n',
253 )
253 )
254
254
255 def test_insert_agreement(self):
255 def test_insert_agreement(self):
256 m3 = Merge3(
256 m3 = Merge3(
257 [b'aaa\n', b'bbb\n'],
257 [b'aaa\n', b'bbb\n'],
258 [b'aaa\n', b'222\n', b'bbb\n'],
258 [b'aaa\n', b'222\n', b'bbb\n'],
259 [b'aaa\n', b'222\n', b'bbb\n'],
259 [b'aaa\n', b'222\n', b'bbb\n'],
260 )
260 )
261
261
262 ml, conflicts = simplemerge.render_markers(
262 ml, conflicts = simplemerge.render_minimized(
263 m3,
263 m3,
264 name_a=b'a',
264 name_a=b'a',
265 name_b=b'b',
265 name_b=b'b',
266 start_marker=b'<<',
266 start_marker=b'<<',
267 mid_marker=b'--',
267 mid_marker=b'--',
268 end_marker=b'>>',
268 end_marker=b'>>',
269 )
269 )
270 self.assertEqual(b''.join(ml), b'aaa\n222\nbbb\n')
270 self.assertEqual(b''.join(ml), b'aaa\n222\nbbb\n')
271
271
272 def test_insert_clash(self):
272 def test_insert_clash(self):
273 """Both try to insert lines in the same place."""
273 """Both try to insert lines in the same place."""
274 m3 = Merge3(
274 m3 = Merge3(
275 [b'aaa\n', b'bbb\n'],
275 [b'aaa\n', b'bbb\n'],
276 [b'aaa\n', b'111\n', b'bbb\n'],
276 [b'aaa\n', b'111\n', b'bbb\n'],
277 [b'aaa\n', b'222\n', b'bbb\n'],
277 [b'aaa\n', b'222\n', b'bbb\n'],
278 )
278 )
279
279
280 self.assertEqual(
280 self.assertEqual(
281 list(m3.find_sync_regions()),
281 list(m3.find_sync_regions()),
282 [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 2, 3), (2, 2, 3, 3, 3, 3)],
282 [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 2, 3), (2, 2, 3, 3, 3, 3)],
283 )
283 )
284
284
285 self.assertEqual(
285 self.assertEqual(
286 list(m3.merge_regions()),
286 list(m3.merge_regions()),
287 [
287 [
288 (b'unchanged', 0, 1),
288 (b'unchanged', 0, 1),
289 (b'conflict', 1, 1, 1, 2, 1, 2),
289 (b'conflict', 1, 1, 1, 2, 1, 2),
290 (b'unchanged', 1, 2),
290 (b'unchanged', 1, 2),
291 ],
291 ],
292 )
292 )
293
293
294 self.assertEqual(
294 self.assertEqual(
295 list(m3.merge_groups()),
295 list(m3.merge_groups()),
296 [
296 [
297 (b'unchanged', [b'aaa\n']),
297 (b'unchanged', [b'aaa\n']),
298 (b'conflict', ([], [b'111\n'], [b'222\n'])),
298 (b'conflict', ([], [b'111\n'], [b'222\n'])),
299 (b'unchanged', [b'bbb\n']),
299 (b'unchanged', [b'bbb\n']),
300 ],
300 ],
301 )
301 )
302
302
303 ml, conflicts = simplemerge.render_markers(
303 ml, conflicts = simplemerge.render_minimized(
304 m3,
304 m3,
305 name_a=b'a',
305 name_a=b'a',
306 name_b=b'b',
306 name_b=b'b',
307 start_marker=b'<<',
307 start_marker=b'<<',
308 mid_marker=b'--',
308 mid_marker=b'--',
309 end_marker=b'>>',
309 end_marker=b'>>',
310 )
310 )
311 self.assertEqual(
311 self.assertEqual(
312 b''.join(ml),
312 b''.join(ml),
313 b'''aaa
313 b'''aaa
314 << a
314 << a
315 111
315 111
316 --
316 --
317 222
317 222
318 >> b
318 >> b
319 bbb
319 bbb
320 ''',
320 ''',
321 )
321 )
322
322
323 def test_replace_clash(self):
323 def test_replace_clash(self):
324 """Both try to insert lines in the same place."""
324 """Both try to insert lines in the same place."""
325 m3 = Merge3(
325 m3 = Merge3(
326 [b'aaa', b'000', b'bbb'],
326 [b'aaa', b'000', b'bbb'],
327 [b'aaa', b'111', b'bbb'],
327 [b'aaa', b'111', b'bbb'],
328 [b'aaa', b'222', b'bbb'],
328 [b'aaa', b'222', b'bbb'],
329 )
329 )
330
330
331 self.assertEqual(
331 self.assertEqual(
332 list(m3.find_sync_regions()),
332 list(m3.find_sync_regions()),
333 [(0, 1, 0, 1, 0, 1), (2, 3, 2, 3, 2, 3), (3, 3, 3, 3, 3, 3)],
333 [(0, 1, 0, 1, 0, 1), (2, 3, 2, 3, 2, 3), (3, 3, 3, 3, 3, 3)],
334 )
334 )
335
335
336 def test_replace_multi(self):
336 def test_replace_multi(self):
337 """Replacement with regions of different size."""
337 """Replacement with regions of different size."""
338 m3 = Merge3(
338 m3 = Merge3(
339 [b'aaa', b'000', b'000', b'bbb'],
339 [b'aaa', b'000', b'000', b'bbb'],
340 [b'aaa', b'111', b'111', b'111', b'bbb'],
340 [b'aaa', b'111', b'111', b'111', b'bbb'],
341 [b'aaa', b'222', b'222', b'222', b'222', b'bbb'],
341 [b'aaa', b'222', b'222', b'222', b'222', b'bbb'],
342 )
342 )
343
343
344 self.assertEqual(
344 self.assertEqual(
345 list(m3.find_sync_regions()),
345 list(m3.find_sync_regions()),
346 [(0, 1, 0, 1, 0, 1), (3, 4, 4, 5, 5, 6), (4, 4, 5, 5, 6, 6)],
346 [(0, 1, 0, 1, 0, 1), (3, 4, 4, 5, 5, 6), (4, 4, 5, 5, 6, 6)],
347 )
347 )
348
348
349 def test_merge_poem(self):
349 def test_merge_poem(self):
350 """Test case from diff3 manual"""
350 """Test case from diff3 manual"""
351 m3 = Merge3(TZU, LAO, TAO)
351 m3 = Merge3(TZU, LAO, TAO)
352 ml, conflicts = simplemerge.render_markers(m3, b'LAO', b'TAO')
352 ml, conflicts = simplemerge.render_minimized(m3, b'LAO', b'TAO')
353 self.log(b'merge result:')
353 self.log(b'merge result:')
354 self.log(b''.join(ml))
354 self.log(b''.join(ml))
355 self.assertEqual(ml, MERGED_RESULT)
355 self.assertEqual(ml, MERGED_RESULT)
356
356
357 def test_binary(self):
357 def test_binary(self):
358 with self.assertRaises(error.Abort):
358 with self.assertRaises(error.Abort):
359 Merge3([b'\x00'], [b'a'], [b'b'])
359 Merge3([b'\x00'], [b'a'], [b'b'])
360
360
361 def test_dos_text(self):
361 def test_dos_text(self):
362 base_text = b'a\r\n'
362 base_text = b'a\r\n'
363 this_text = b'b\r\n'
363 this_text = b'b\r\n'
364 other_text = b'c\r\n'
364 other_text = b'c\r\n'
365 m3 = Merge3(
365 m3 = Merge3(
366 base_text.splitlines(True),
366 base_text.splitlines(True),
367 other_text.splitlines(True),
367 other_text.splitlines(True),
368 this_text.splitlines(True),
368 this_text.splitlines(True),
369 )
369 )
370 m_lines, conflicts = simplemerge.render_markers(m3, b'OTHER', b'THIS')
370 m_lines, conflicts = simplemerge.render_minimized(m3, b'OTHER', b'THIS')
371 self.assertEqual(
371 self.assertEqual(
372 b'<<<<<<< OTHER\r\nc\r\n=======\r\nb\r\n'
372 b'<<<<<<< OTHER\r\nc\r\n=======\r\nb\r\n'
373 b'>>>>>>> THIS\r\n'.splitlines(True),
373 b'>>>>>>> THIS\r\n'.splitlines(True),
374 m_lines,
374 m_lines,
375 )
375 )
376
376
377 def test_mac_text(self):
377 def test_mac_text(self):
378 base_text = b'a\r'
378 base_text = b'a\r'
379 this_text = b'b\r'
379 this_text = b'b\r'
380 other_text = b'c\r'
380 other_text = b'c\r'
381 m3 = Merge3(
381 m3 = Merge3(
382 base_text.splitlines(True),
382 base_text.splitlines(True),
383 other_text.splitlines(True),
383 other_text.splitlines(True),
384 this_text.splitlines(True),
384 this_text.splitlines(True),
385 )
385 )
386 m_lines, conflicts = simplemerge.render_markers(m3, b'OTHER', b'THIS')
386 m_lines, conflicts = simplemerge.render_minimized(m3, b'OTHER', b'THIS')
387 self.assertEqual(
387 self.assertEqual(
388 b'<<<<<<< OTHER\rc\r=======\rb\r'
388 b'<<<<<<< OTHER\rc\r=======\rb\r'
389 b'>>>>>>> THIS\r'.splitlines(True),
389 b'>>>>>>> THIS\r'.splitlines(True),
390 m_lines,
390 m_lines,
391 )
391 )
392
392
393
393
394 if __name__ == '__main__':
394 if __name__ == '__main__':
395 import silenttestrunner
395 import silenttestrunner
396
396
397 silenttestrunner.main(__name__)
397 silenttestrunner.main(__name__)
General Comments 0
You need to be logged in to leave comments. Login now