##// END OF EJS Templates
simplemerge: clarify names of functions that render conflict markers...
Martin von Zweigbergk -
r49407:2dbee604 default
parent child Browse files
Show More
@@ -1,4874 +1,4876 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import binascii
10 import binascii
11 import codecs
11 import codecs
12 import collections
12 import collections
13 import contextlib
13 import contextlib
14 import difflib
14 import difflib
15 import errno
15 import errno
16 import glob
16 import glob
17 import operator
17 import operator
18 import os
18 import os
19 import platform
19 import platform
20 import random
20 import random
21 import re
21 import re
22 import socket
22 import socket
23 import ssl
23 import ssl
24 import stat
24 import stat
25 import string
25 import string
26 import subprocess
26 import subprocess
27 import sys
27 import sys
28 import time
28 import time
29
29
30 from .i18n import _
30 from .i18n import _
31 from .node import (
31 from .node import (
32 bin,
32 bin,
33 hex,
33 hex,
34 nullrev,
34 nullrev,
35 short,
35 short,
36 )
36 )
37 from .pycompat import (
37 from .pycompat import (
38 getattr,
38 getattr,
39 open,
39 open,
40 )
40 )
41 from . import (
41 from . import (
42 bundle2,
42 bundle2,
43 bundlerepo,
43 bundlerepo,
44 changegroup,
44 changegroup,
45 cmdutil,
45 cmdutil,
46 color,
46 color,
47 context,
47 context,
48 copies,
48 copies,
49 dagparser,
49 dagparser,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revset,
76 revset,
77 revsetlang,
77 revsetlang,
78 scmutil,
78 scmutil,
79 setdiscovery,
79 setdiscovery,
80 simplemerge,
80 simplemerge,
81 sshpeer,
81 sshpeer,
82 sslutil,
82 sslutil,
83 streamclone,
83 streamclone,
84 strip,
84 strip,
85 tags as tagsmod,
85 tags as tagsmod,
86 templater,
86 templater,
87 treediscovery,
87 treediscovery,
88 upgrade,
88 upgrade,
89 url as urlmod,
89 url as urlmod,
90 util,
90 util,
91 vfs as vfsmod,
91 vfs as vfsmod,
92 wireprotoframing,
92 wireprotoframing,
93 wireprotoserver,
93 wireprotoserver,
94 )
94 )
95 from .interfaces import repository
95 from .interfaces import repository
96 from .utils import (
96 from .utils import (
97 cborutil,
97 cborutil,
98 compression,
98 compression,
99 dateutil,
99 dateutil,
100 procutil,
100 procutil,
101 stringutil,
101 stringutil,
102 urlutil,
102 urlutil,
103 )
103 )
104
104
105 from .revlogutils import (
105 from .revlogutils import (
106 deltas as deltautil,
106 deltas as deltautil,
107 nodemap,
107 nodemap,
108 rewrite,
108 rewrite,
109 sidedata,
109 sidedata,
110 )
110 )
111
111
112 release = lockmod.release
112 release = lockmod.release
113
113
114 table = {}
114 table = {}
115 table.update(strip.command._table)
115 table.update(strip.command._table)
116 command = registrar.command(table)
116 command = registrar.command(table)
117
117
118
118
119 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
119 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 def debugancestor(ui, repo, *args):
120 def debugancestor(ui, repo, *args):
121 """find the ancestor revision of two revisions in a given index"""
121 """find the ancestor revision of two revisions in a given index"""
122 if len(args) == 3:
122 if len(args) == 3:
123 index, rev1, rev2 = args
123 index, rev1, rev2 = args
124 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
124 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 lookup = r.lookup
125 lookup = r.lookup
126 elif len(args) == 2:
126 elif len(args) == 2:
127 if not repo:
127 if not repo:
128 raise error.Abort(
128 raise error.Abort(
129 _(b'there is no Mercurial repository here (.hg not found)')
129 _(b'there is no Mercurial repository here (.hg not found)')
130 )
130 )
131 rev1, rev2 = args
131 rev1, rev2 = args
132 r = repo.changelog
132 r = repo.changelog
133 lookup = repo.lookup
133 lookup = repo.lookup
134 else:
134 else:
135 raise error.Abort(_(b'either two or three arguments required'))
135 raise error.Abort(_(b'either two or three arguments required'))
136 a = r.ancestor(lookup(rev1), lookup(rev2))
136 a = r.ancestor(lookup(rev1), lookup(rev2))
137 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
137 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138
138
139
139
140 @command(b'debugantivirusrunning', [])
140 @command(b'debugantivirusrunning', [])
141 def debugantivirusrunning(ui, repo):
141 def debugantivirusrunning(ui, repo):
142 """attempt to trigger an antivirus scanner to see if one is active"""
142 """attempt to trigger an antivirus scanner to see if one is active"""
143 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
143 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 f.write(
144 f.write(
145 util.b85decode(
145 util.b85decode(
146 # This is a base85-armored version of the EICAR test file. See
146 # This is a base85-armored version of the EICAR test file. See
147 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
147 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
148 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
149 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 )
150 )
151 )
151 )
152 # Give an AV engine time to scan the file.
152 # Give an AV engine time to scan the file.
153 time.sleep(2)
153 time.sleep(2)
154 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
154 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155
155
156
156
157 @command(b'debugapplystreamclonebundle', [], b'FILE')
157 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 def debugapplystreamclonebundle(ui, repo, fname):
158 def debugapplystreamclonebundle(ui, repo, fname):
159 """apply a stream clone bundle file"""
159 """apply a stream clone bundle file"""
160 f = hg.openpath(ui, fname)
160 f = hg.openpath(ui, fname)
161 gen = exchange.readbundle(ui, f, fname)
161 gen = exchange.readbundle(ui, f, fname)
162 gen.apply(repo)
162 gen.apply(repo)
163
163
164
164
165 @command(
165 @command(
166 b'debugbuilddag',
166 b'debugbuilddag',
167 [
167 [
168 (
168 (
169 b'm',
169 b'm',
170 b'mergeable-file',
170 b'mergeable-file',
171 None,
171 None,
172 _(b'add single file mergeable changes'),
172 _(b'add single file mergeable changes'),
173 ),
173 ),
174 (
174 (
175 b'o',
175 b'o',
176 b'overwritten-file',
176 b'overwritten-file',
177 None,
177 None,
178 _(b'add single file all revs overwrite'),
178 _(b'add single file all revs overwrite'),
179 ),
179 ),
180 (b'n', b'new-file', None, _(b'add new file at each rev')),
180 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 ],
181 ],
182 _(b'[OPTION]... [TEXT]'),
182 _(b'[OPTION]... [TEXT]'),
183 )
183 )
184 def debugbuilddag(
184 def debugbuilddag(
185 ui,
185 ui,
186 repo,
186 repo,
187 text=None,
187 text=None,
188 mergeable_file=False,
188 mergeable_file=False,
189 overwritten_file=False,
189 overwritten_file=False,
190 new_file=False,
190 new_file=False,
191 ):
191 ):
192 """builds a repo with a given DAG from scratch in the current empty repo
192 """builds a repo with a given DAG from scratch in the current empty repo
193
193
194 The description of the DAG is read from stdin if not given on the
194 The description of the DAG is read from stdin if not given on the
195 command line.
195 command line.
196
196
197 Elements:
197 Elements:
198
198
199 - "+n" is a linear run of n nodes based on the current default parent
199 - "+n" is a linear run of n nodes based on the current default parent
200 - "." is a single node based on the current default parent
200 - "." is a single node based on the current default parent
201 - "$" resets the default parent to null (implied at the start);
201 - "$" resets the default parent to null (implied at the start);
202 otherwise the default parent is always the last node created
202 otherwise the default parent is always the last node created
203 - "<p" sets the default parent to the backref p
203 - "<p" sets the default parent to the backref p
204 - "*p" is a fork at parent p, which is a backref
204 - "*p" is a fork at parent p, which is a backref
205 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
205 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
206 - "/p2" is a merge of the preceding node and p2
206 - "/p2" is a merge of the preceding node and p2
207 - ":tag" defines a local tag for the preceding node
207 - ":tag" defines a local tag for the preceding node
208 - "@branch" sets the named branch for subsequent nodes
208 - "@branch" sets the named branch for subsequent nodes
209 - "#...\\n" is a comment up to the end of the line
209 - "#...\\n" is a comment up to the end of the line
210
210
211 Whitespace between the above elements is ignored.
211 Whitespace between the above elements is ignored.
212
212
213 A backref is either
213 A backref is either
214
214
215 - a number n, which references the node curr-n, where curr is the current
215 - a number n, which references the node curr-n, where curr is the current
216 node, or
216 node, or
217 - the name of a local tag you placed earlier using ":tag", or
217 - the name of a local tag you placed earlier using ":tag", or
218 - empty to denote the default parent.
218 - empty to denote the default parent.
219
219
220 All string valued-elements are either strictly alphanumeric, or must
220 All string valued-elements are either strictly alphanumeric, or must
221 be enclosed in double quotes ("..."), with "\\" as escape character.
221 be enclosed in double quotes ("..."), with "\\" as escape character.
222 """
222 """
223
223
224 if text is None:
224 if text is None:
225 ui.status(_(b"reading DAG from stdin\n"))
225 ui.status(_(b"reading DAG from stdin\n"))
226 text = ui.fin.read()
226 text = ui.fin.read()
227
227
228 cl = repo.changelog
228 cl = repo.changelog
229 if len(cl) > 0:
229 if len(cl) > 0:
230 raise error.Abort(_(b'repository is not empty'))
230 raise error.Abort(_(b'repository is not empty'))
231
231
232 # determine number of revs in DAG
232 # determine number of revs in DAG
233 total = 0
233 total = 0
234 for type, data in dagparser.parsedag(text):
234 for type, data in dagparser.parsedag(text):
235 if type == b'n':
235 if type == b'n':
236 total += 1
236 total += 1
237
237
238 if mergeable_file:
238 if mergeable_file:
239 linesperrev = 2
239 linesperrev = 2
240 # make a file with k lines per rev
240 # make a file with k lines per rev
241 initialmergedlines = [
241 initialmergedlines = [
242 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
242 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
243 ]
243 ]
244 initialmergedlines.append(b"")
244 initialmergedlines.append(b"")
245
245
246 tags = []
246 tags = []
247 progress = ui.makeprogress(
247 progress = ui.makeprogress(
248 _(b'building'), unit=_(b'revisions'), total=total
248 _(b'building'), unit=_(b'revisions'), total=total
249 )
249 )
250 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
250 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
251 at = -1
251 at = -1
252 atbranch = b'default'
252 atbranch = b'default'
253 nodeids = []
253 nodeids = []
254 id = 0
254 id = 0
255 progress.update(id)
255 progress.update(id)
256 for type, data in dagparser.parsedag(text):
256 for type, data in dagparser.parsedag(text):
257 if type == b'n':
257 if type == b'n':
258 ui.note((b'node %s\n' % pycompat.bytestr(data)))
258 ui.note((b'node %s\n' % pycompat.bytestr(data)))
259 id, ps = data
259 id, ps = data
260
260
261 files = []
261 files = []
262 filecontent = {}
262 filecontent = {}
263
263
264 p2 = None
264 p2 = None
265 if mergeable_file:
265 if mergeable_file:
266 fn = b"mf"
266 fn = b"mf"
267 p1 = repo[ps[0]]
267 p1 = repo[ps[0]]
268 if len(ps) > 1:
268 if len(ps) > 1:
269 p2 = repo[ps[1]]
269 p2 = repo[ps[1]]
270 pa = p1.ancestor(p2)
270 pa = p1.ancestor(p2)
271 base, local, other = [
271 base, local, other = [
272 x[fn].data() for x in (pa, p1, p2)
272 x[fn].data() for x in (pa, p1, p2)
273 ]
273 ]
274 m3 = simplemerge.Merge3Text(base, local, other)
274 m3 = simplemerge.Merge3Text(base, local, other)
275 ml = [l.strip() for l in simplemerge.merge_lines(m3)[0]]
275 ml = [
276 l.strip() for l in simplemerge.render_markers(m3)[0]
277 ]
276 ml.append(b"")
278 ml.append(b"")
277 elif at > 0:
279 elif at > 0:
278 ml = p1[fn].data().split(b"\n")
280 ml = p1[fn].data().split(b"\n")
279 else:
281 else:
280 ml = initialmergedlines
282 ml = initialmergedlines
281 ml[id * linesperrev] += b" r%i" % id
283 ml[id * linesperrev] += b" r%i" % id
282 mergedtext = b"\n".join(ml)
284 mergedtext = b"\n".join(ml)
283 files.append(fn)
285 files.append(fn)
284 filecontent[fn] = mergedtext
286 filecontent[fn] = mergedtext
285
287
286 if overwritten_file:
288 if overwritten_file:
287 fn = b"of"
289 fn = b"of"
288 files.append(fn)
290 files.append(fn)
289 filecontent[fn] = b"r%i\n" % id
291 filecontent[fn] = b"r%i\n" % id
290
292
291 if new_file:
293 if new_file:
292 fn = b"nf%i" % id
294 fn = b"nf%i" % id
293 files.append(fn)
295 files.append(fn)
294 filecontent[fn] = b"r%i\n" % id
296 filecontent[fn] = b"r%i\n" % id
295 if len(ps) > 1:
297 if len(ps) > 1:
296 if not p2:
298 if not p2:
297 p2 = repo[ps[1]]
299 p2 = repo[ps[1]]
298 for fn in p2:
300 for fn in p2:
299 if fn.startswith(b"nf"):
301 if fn.startswith(b"nf"):
300 files.append(fn)
302 files.append(fn)
301 filecontent[fn] = p2[fn].data()
303 filecontent[fn] = p2[fn].data()
302
304
303 def fctxfn(repo, cx, path):
305 def fctxfn(repo, cx, path):
304 if path in filecontent:
306 if path in filecontent:
305 return context.memfilectx(
307 return context.memfilectx(
306 repo, cx, path, filecontent[path]
308 repo, cx, path, filecontent[path]
307 )
309 )
308 return None
310 return None
309
311
310 if len(ps) == 0 or ps[0] < 0:
312 if len(ps) == 0 or ps[0] < 0:
311 pars = [None, None]
313 pars = [None, None]
312 elif len(ps) == 1:
314 elif len(ps) == 1:
313 pars = [nodeids[ps[0]], None]
315 pars = [nodeids[ps[0]], None]
314 else:
316 else:
315 pars = [nodeids[p] for p in ps]
317 pars = [nodeids[p] for p in ps]
316 cx = context.memctx(
318 cx = context.memctx(
317 repo,
319 repo,
318 pars,
320 pars,
319 b"r%i" % id,
321 b"r%i" % id,
320 files,
322 files,
321 fctxfn,
323 fctxfn,
322 date=(id, 0),
324 date=(id, 0),
323 user=b"debugbuilddag",
325 user=b"debugbuilddag",
324 extra={b'branch': atbranch},
326 extra={b'branch': atbranch},
325 )
327 )
326 nodeid = repo.commitctx(cx)
328 nodeid = repo.commitctx(cx)
327 nodeids.append(nodeid)
329 nodeids.append(nodeid)
328 at = id
330 at = id
329 elif type == b'l':
331 elif type == b'l':
330 id, name = data
332 id, name = data
331 ui.note((b'tag %s\n' % name))
333 ui.note((b'tag %s\n' % name))
332 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
334 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
333 elif type == b'a':
335 elif type == b'a':
334 ui.note((b'branch %s\n' % data))
336 ui.note((b'branch %s\n' % data))
335 atbranch = data
337 atbranch = data
336 progress.update(id)
338 progress.update(id)
337
339
338 if tags:
340 if tags:
339 repo.vfs.write(b"localtags", b"".join(tags))
341 repo.vfs.write(b"localtags", b"".join(tags))
340
342
341
343
342 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
344 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
343 indent_string = b' ' * indent
345 indent_string = b' ' * indent
344 if all:
346 if all:
345 ui.writenoi18n(
347 ui.writenoi18n(
346 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
348 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
347 % indent_string
349 % indent_string
348 )
350 )
349
351
350 def showchunks(named):
352 def showchunks(named):
351 ui.write(b"\n%s%s\n" % (indent_string, named))
353 ui.write(b"\n%s%s\n" % (indent_string, named))
352 for deltadata in gen.deltaiter():
354 for deltadata in gen.deltaiter():
353 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
355 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
354 ui.write(
356 ui.write(
355 b"%s%s %s %s %s %s %d\n"
357 b"%s%s %s %s %s %s %d\n"
356 % (
358 % (
357 indent_string,
359 indent_string,
358 hex(node),
360 hex(node),
359 hex(p1),
361 hex(p1),
360 hex(p2),
362 hex(p2),
361 hex(cs),
363 hex(cs),
362 hex(deltabase),
364 hex(deltabase),
363 len(delta),
365 len(delta),
364 )
366 )
365 )
367 )
366
368
367 gen.changelogheader()
369 gen.changelogheader()
368 showchunks(b"changelog")
370 showchunks(b"changelog")
369 gen.manifestheader()
371 gen.manifestheader()
370 showchunks(b"manifest")
372 showchunks(b"manifest")
371 for chunkdata in iter(gen.filelogheader, {}):
373 for chunkdata in iter(gen.filelogheader, {}):
372 fname = chunkdata[b'filename']
374 fname = chunkdata[b'filename']
373 showchunks(fname)
375 showchunks(fname)
374 else:
376 else:
375 if isinstance(gen, bundle2.unbundle20):
377 if isinstance(gen, bundle2.unbundle20):
376 raise error.Abort(_(b'use debugbundle2 for this file'))
378 raise error.Abort(_(b'use debugbundle2 for this file'))
377 gen.changelogheader()
379 gen.changelogheader()
378 for deltadata in gen.deltaiter():
380 for deltadata in gen.deltaiter():
379 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
381 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
380 ui.write(b"%s%s\n" % (indent_string, hex(node)))
382 ui.write(b"%s%s\n" % (indent_string, hex(node)))
381
383
382
384
383 def _debugobsmarkers(ui, part, indent=0, **opts):
385 def _debugobsmarkers(ui, part, indent=0, **opts):
384 """display version and markers contained in 'data'"""
386 """display version and markers contained in 'data'"""
385 opts = pycompat.byteskwargs(opts)
387 opts = pycompat.byteskwargs(opts)
386 data = part.read()
388 data = part.read()
387 indent_string = b' ' * indent
389 indent_string = b' ' * indent
388 try:
390 try:
389 version, markers = obsolete._readmarkers(data)
391 version, markers = obsolete._readmarkers(data)
390 except error.UnknownVersion as exc:
392 except error.UnknownVersion as exc:
391 msg = b"%sunsupported version: %s (%d bytes)\n"
393 msg = b"%sunsupported version: %s (%d bytes)\n"
392 msg %= indent_string, exc.version, len(data)
394 msg %= indent_string, exc.version, len(data)
393 ui.write(msg)
395 ui.write(msg)
394 else:
396 else:
395 msg = b"%sversion: %d (%d bytes)\n"
397 msg = b"%sversion: %d (%d bytes)\n"
396 msg %= indent_string, version, len(data)
398 msg %= indent_string, version, len(data)
397 ui.write(msg)
399 ui.write(msg)
398 fm = ui.formatter(b'debugobsolete', opts)
400 fm = ui.formatter(b'debugobsolete', opts)
399 for rawmarker in sorted(markers):
401 for rawmarker in sorted(markers):
400 m = obsutil.marker(None, rawmarker)
402 m = obsutil.marker(None, rawmarker)
401 fm.startitem()
403 fm.startitem()
402 fm.plain(indent_string)
404 fm.plain(indent_string)
403 cmdutil.showmarker(fm, m)
405 cmdutil.showmarker(fm, m)
404 fm.end()
406 fm.end()
405
407
406
408
407 def _debugphaseheads(ui, data, indent=0):
409 def _debugphaseheads(ui, data, indent=0):
408 """display version and markers contained in 'data'"""
410 """display version and markers contained in 'data'"""
409 indent_string = b' ' * indent
411 indent_string = b' ' * indent
410 headsbyphase = phases.binarydecode(data)
412 headsbyphase = phases.binarydecode(data)
411 for phase in phases.allphases:
413 for phase in phases.allphases:
412 for head in headsbyphase[phase]:
414 for head in headsbyphase[phase]:
413 ui.write(indent_string)
415 ui.write(indent_string)
414 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
416 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
415
417
416
418
417 def _quasirepr(thing):
419 def _quasirepr(thing):
418 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
420 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
419 return b'{%s}' % (
421 return b'{%s}' % (
420 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
422 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
421 )
423 )
422 return pycompat.bytestr(repr(thing))
424 return pycompat.bytestr(repr(thing))
423
425
424
426
425 def _debugbundle2(ui, gen, all=None, **opts):
427 def _debugbundle2(ui, gen, all=None, **opts):
426 """lists the contents of a bundle2"""
428 """lists the contents of a bundle2"""
427 if not isinstance(gen, bundle2.unbundle20):
429 if not isinstance(gen, bundle2.unbundle20):
428 raise error.Abort(_(b'not a bundle2 file'))
430 raise error.Abort(_(b'not a bundle2 file'))
429 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
431 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
430 parttypes = opts.get('part_type', [])
432 parttypes = opts.get('part_type', [])
431 for part in gen.iterparts():
433 for part in gen.iterparts():
432 if parttypes and part.type not in parttypes:
434 if parttypes and part.type not in parttypes:
433 continue
435 continue
434 msg = b'%s -- %s (mandatory: %r)\n'
436 msg = b'%s -- %s (mandatory: %r)\n'
435 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
437 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
436 if part.type == b'changegroup':
438 if part.type == b'changegroup':
437 version = part.params.get(b'version', b'01')
439 version = part.params.get(b'version', b'01')
438 cg = changegroup.getunbundler(version, part, b'UN')
440 cg = changegroup.getunbundler(version, part, b'UN')
439 if not ui.quiet:
441 if not ui.quiet:
440 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
442 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
441 if part.type == b'obsmarkers':
443 if part.type == b'obsmarkers':
442 if not ui.quiet:
444 if not ui.quiet:
443 _debugobsmarkers(ui, part, indent=4, **opts)
445 _debugobsmarkers(ui, part, indent=4, **opts)
444 if part.type == b'phase-heads':
446 if part.type == b'phase-heads':
445 if not ui.quiet:
447 if not ui.quiet:
446 _debugphaseheads(ui, part, indent=4)
448 _debugphaseheads(ui, part, indent=4)
447
449
448
450
449 @command(
451 @command(
450 b'debugbundle',
452 b'debugbundle',
451 [
453 [
452 (b'a', b'all', None, _(b'show all details')),
454 (b'a', b'all', None, _(b'show all details')),
453 (b'', b'part-type', [], _(b'show only the named part type')),
455 (b'', b'part-type', [], _(b'show only the named part type')),
454 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
456 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
455 ],
457 ],
456 _(b'FILE'),
458 _(b'FILE'),
457 norepo=True,
459 norepo=True,
458 )
460 )
459 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
461 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
460 """lists the contents of a bundle"""
462 """lists the contents of a bundle"""
461 with hg.openpath(ui, bundlepath) as f:
463 with hg.openpath(ui, bundlepath) as f:
462 if spec:
464 if spec:
463 spec = exchange.getbundlespec(ui, f)
465 spec = exchange.getbundlespec(ui, f)
464 ui.write(b'%s\n' % spec)
466 ui.write(b'%s\n' % spec)
465 return
467 return
466
468
467 gen = exchange.readbundle(ui, f, bundlepath)
469 gen = exchange.readbundle(ui, f, bundlepath)
468 if isinstance(gen, bundle2.unbundle20):
470 if isinstance(gen, bundle2.unbundle20):
469 return _debugbundle2(ui, gen, all=all, **opts)
471 return _debugbundle2(ui, gen, all=all, **opts)
470 _debugchangegroup(ui, gen, all=all, **opts)
472 _debugchangegroup(ui, gen, all=all, **opts)
471
473
472
474
473 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
475 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
474 def debugcapabilities(ui, path, **opts):
476 def debugcapabilities(ui, path, **opts):
475 """lists the capabilities of a remote peer"""
477 """lists the capabilities of a remote peer"""
476 opts = pycompat.byteskwargs(opts)
478 opts = pycompat.byteskwargs(opts)
477 peer = hg.peer(ui, opts, path)
479 peer = hg.peer(ui, opts, path)
478 try:
480 try:
479 caps = peer.capabilities()
481 caps = peer.capabilities()
480 ui.writenoi18n(b'Main capabilities:\n')
482 ui.writenoi18n(b'Main capabilities:\n')
481 for c in sorted(caps):
483 for c in sorted(caps):
482 ui.write(b' %s\n' % c)
484 ui.write(b' %s\n' % c)
483 b2caps = bundle2.bundle2caps(peer)
485 b2caps = bundle2.bundle2caps(peer)
484 if b2caps:
486 if b2caps:
485 ui.writenoi18n(b'Bundle2 capabilities:\n')
487 ui.writenoi18n(b'Bundle2 capabilities:\n')
486 for key, values in sorted(pycompat.iteritems(b2caps)):
488 for key, values in sorted(pycompat.iteritems(b2caps)):
487 ui.write(b' %s\n' % key)
489 ui.write(b' %s\n' % key)
488 for v in values:
490 for v in values:
489 ui.write(b' %s\n' % v)
491 ui.write(b' %s\n' % v)
490 finally:
492 finally:
491 peer.close()
493 peer.close()
492
494
493
495
494 @command(
496 @command(
495 b'debugchangedfiles',
497 b'debugchangedfiles',
496 [
498 [
497 (
499 (
498 b'',
500 b'',
499 b'compute',
501 b'compute',
500 False,
502 False,
501 b"compute information instead of reading it from storage",
503 b"compute information instead of reading it from storage",
502 ),
504 ),
503 ],
505 ],
504 b'REV',
506 b'REV',
505 )
507 )
506 def debugchangedfiles(ui, repo, rev, **opts):
508 def debugchangedfiles(ui, repo, rev, **opts):
507 """list the stored files changes for a revision"""
509 """list the stored files changes for a revision"""
508 ctx = logcmdutil.revsingle(repo, rev, None)
510 ctx = logcmdutil.revsingle(repo, rev, None)
509 files = None
511 files = None
510
512
511 if opts['compute']:
513 if opts['compute']:
512 files = metadata.compute_all_files_changes(ctx)
514 files = metadata.compute_all_files_changes(ctx)
513 else:
515 else:
514 sd = repo.changelog.sidedata(ctx.rev())
516 sd = repo.changelog.sidedata(ctx.rev())
515 files_block = sd.get(sidedata.SD_FILES)
517 files_block = sd.get(sidedata.SD_FILES)
516 if files_block is not None:
518 if files_block is not None:
517 files = metadata.decode_files_sidedata(sd)
519 files = metadata.decode_files_sidedata(sd)
518 if files is not None:
520 if files is not None:
519 for f in sorted(files.touched):
521 for f in sorted(files.touched):
520 if f in files.added:
522 if f in files.added:
521 action = b"added"
523 action = b"added"
522 elif f in files.removed:
524 elif f in files.removed:
523 action = b"removed"
525 action = b"removed"
524 elif f in files.merged:
526 elif f in files.merged:
525 action = b"merged"
527 action = b"merged"
526 elif f in files.salvaged:
528 elif f in files.salvaged:
527 action = b"salvaged"
529 action = b"salvaged"
528 else:
530 else:
529 action = b"touched"
531 action = b"touched"
530
532
531 copy_parent = b""
533 copy_parent = b""
532 copy_source = b""
534 copy_source = b""
533 if f in files.copied_from_p1:
535 if f in files.copied_from_p1:
534 copy_parent = b"p1"
536 copy_parent = b"p1"
535 copy_source = files.copied_from_p1[f]
537 copy_source = files.copied_from_p1[f]
536 elif f in files.copied_from_p2:
538 elif f in files.copied_from_p2:
537 copy_parent = b"p2"
539 copy_parent = b"p2"
538 copy_source = files.copied_from_p2[f]
540 copy_source = files.copied_from_p2[f]
539
541
540 data = (action, copy_parent, f, copy_source)
542 data = (action, copy_parent, f, copy_source)
541 template = b"%-8s %2s: %s, %s;\n"
543 template = b"%-8s %2s: %s, %s;\n"
542 ui.write(template % data)
544 ui.write(template % data)
543
545
544
546
545 @command(b'debugcheckstate', [], b'')
547 @command(b'debugcheckstate', [], b'')
546 def debugcheckstate(ui, repo):
548 def debugcheckstate(ui, repo):
547 """validate the correctness of the current dirstate"""
549 """validate the correctness of the current dirstate"""
548 parent1, parent2 = repo.dirstate.parents()
550 parent1, parent2 = repo.dirstate.parents()
549 m1 = repo[parent1].manifest()
551 m1 = repo[parent1].manifest()
550 m2 = repo[parent2].manifest()
552 m2 = repo[parent2].manifest()
551 errors = 0
553 errors = 0
552 for err in repo.dirstate.verify(m1, m2):
554 for err in repo.dirstate.verify(m1, m2):
553 ui.warn(err[0] % err[1:])
555 ui.warn(err[0] % err[1:])
554 errors += 1
556 errors += 1
555 if errors:
557 if errors:
556 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
558 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
557 raise error.Abort(errstr)
559 raise error.Abort(errstr)
558
560
559
561
560 @command(
562 @command(
561 b'debugcolor',
563 b'debugcolor',
562 [(b'', b'style', None, _(b'show all configured styles'))],
564 [(b'', b'style', None, _(b'show all configured styles'))],
563 b'hg debugcolor',
565 b'hg debugcolor',
564 )
566 )
565 def debugcolor(ui, repo, **opts):
567 def debugcolor(ui, repo, **opts):
566 """show available color, effects or style"""
568 """show available color, effects or style"""
567 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
569 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
568 if opts.get('style'):
570 if opts.get('style'):
569 return _debugdisplaystyle(ui)
571 return _debugdisplaystyle(ui)
570 else:
572 else:
571 return _debugdisplaycolor(ui)
573 return _debugdisplaycolor(ui)
572
574
573
575
574 def _debugdisplaycolor(ui):
576 def _debugdisplaycolor(ui):
575 ui = ui.copy()
577 ui = ui.copy()
576 ui._styles.clear()
578 ui._styles.clear()
577 for effect in color._activeeffects(ui).keys():
579 for effect in color._activeeffects(ui).keys():
578 ui._styles[effect] = effect
580 ui._styles[effect] = effect
579 if ui._terminfoparams:
581 if ui._terminfoparams:
580 for k, v in ui.configitems(b'color'):
582 for k, v in ui.configitems(b'color'):
581 if k.startswith(b'color.'):
583 if k.startswith(b'color.'):
582 ui._styles[k] = k[6:]
584 ui._styles[k] = k[6:]
583 elif k.startswith(b'terminfo.'):
585 elif k.startswith(b'terminfo.'):
584 ui._styles[k] = k[9:]
586 ui._styles[k] = k[9:]
585 ui.write(_(b'available colors:\n'))
587 ui.write(_(b'available colors:\n'))
586 # sort label with a '_' after the other to group '_background' entry.
588 # sort label with a '_' after the other to group '_background' entry.
587 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
589 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
588 for colorname, label in items:
590 for colorname, label in items:
589 ui.write(b'%s\n' % colorname, label=label)
591 ui.write(b'%s\n' % colorname, label=label)
590
592
591
593
592 def _debugdisplaystyle(ui):
594 def _debugdisplaystyle(ui):
593 ui.write(_(b'available style:\n'))
595 ui.write(_(b'available style:\n'))
594 if not ui._styles:
596 if not ui._styles:
595 return
597 return
596 width = max(len(s) for s in ui._styles)
598 width = max(len(s) for s in ui._styles)
597 for label, effects in sorted(ui._styles.items()):
599 for label, effects in sorted(ui._styles.items()):
598 ui.write(b'%s' % label, label=label)
600 ui.write(b'%s' % label, label=label)
599 if effects:
601 if effects:
600 # 50
602 # 50
601 ui.write(b': ')
603 ui.write(b': ')
602 ui.write(b' ' * (max(0, width - len(label))))
604 ui.write(b' ' * (max(0, width - len(label))))
603 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
605 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
604 ui.write(b'\n')
606 ui.write(b'\n')
605
607
606
608
607 @command(b'debugcreatestreamclonebundle', [], b'FILE')
609 @command(b'debugcreatestreamclonebundle', [], b'FILE')
608 def debugcreatestreamclonebundle(ui, repo, fname):
610 def debugcreatestreamclonebundle(ui, repo, fname):
609 """create a stream clone bundle file
611 """create a stream clone bundle file
610
612
611 Stream bundles are special bundles that are essentially archives of
613 Stream bundles are special bundles that are essentially archives of
612 revlog files. They are commonly used for cloning very quickly.
614 revlog files. They are commonly used for cloning very quickly.
613 """
615 """
614 # TODO we may want to turn this into an abort when this functionality
616 # TODO we may want to turn this into an abort when this functionality
615 # is moved into `hg bundle`.
617 # is moved into `hg bundle`.
616 if phases.hassecret(repo):
618 if phases.hassecret(repo):
617 ui.warn(
619 ui.warn(
618 _(
620 _(
619 b'(warning: stream clone bundle will contain secret '
621 b'(warning: stream clone bundle will contain secret '
620 b'revisions)\n'
622 b'revisions)\n'
621 )
623 )
622 )
624 )
623
625
624 requirements, gen = streamclone.generatebundlev1(repo)
626 requirements, gen = streamclone.generatebundlev1(repo)
625 changegroup.writechunks(ui, gen, fname)
627 changegroup.writechunks(ui, gen, fname)
626
628
627 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
629 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
628
630
629
631
630 @command(
632 @command(
631 b'debugdag',
633 b'debugdag',
632 [
634 [
633 (b't', b'tags', None, _(b'use tags as labels')),
635 (b't', b'tags', None, _(b'use tags as labels')),
634 (b'b', b'branches', None, _(b'annotate with branch names')),
636 (b'b', b'branches', None, _(b'annotate with branch names')),
635 (b'', b'dots', None, _(b'use dots for runs')),
637 (b'', b'dots', None, _(b'use dots for runs')),
636 (b's', b'spaces', None, _(b'separate elements by spaces')),
638 (b's', b'spaces', None, _(b'separate elements by spaces')),
637 ],
639 ],
638 _(b'[OPTION]... [FILE [REV]...]'),
640 _(b'[OPTION]... [FILE [REV]...]'),
639 optionalrepo=True,
641 optionalrepo=True,
640 )
642 )
641 def debugdag(ui, repo, file_=None, *revs, **opts):
643 def debugdag(ui, repo, file_=None, *revs, **opts):
642 """format the changelog or an index DAG as a concise textual description
644 """format the changelog or an index DAG as a concise textual description
643
645
644 If you pass a revlog index, the revlog's DAG is emitted. If you list
646 If you pass a revlog index, the revlog's DAG is emitted. If you list
645 revision numbers, they get labeled in the output as rN.
647 revision numbers, they get labeled in the output as rN.
646
648
647 Otherwise, the changelog DAG of the current repo is emitted.
649 Otherwise, the changelog DAG of the current repo is emitted.
648 """
650 """
649 spaces = opts.get('spaces')
651 spaces = opts.get('spaces')
650 dots = opts.get('dots')
652 dots = opts.get('dots')
651 if file_:
653 if file_:
652 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
654 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
653 revs = {int(r) for r in revs}
655 revs = {int(r) for r in revs}
654
656
655 def events():
657 def events():
656 for r in rlog:
658 for r in rlog:
657 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
659 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
658 if r in revs:
660 if r in revs:
659 yield b'l', (r, b"r%i" % r)
661 yield b'l', (r, b"r%i" % r)
660
662
661 elif repo:
663 elif repo:
662 cl = repo.changelog
664 cl = repo.changelog
663 tags = opts.get('tags')
665 tags = opts.get('tags')
664 branches = opts.get('branches')
666 branches = opts.get('branches')
665 if tags:
667 if tags:
666 labels = {}
668 labels = {}
667 for l, n in repo.tags().items():
669 for l, n in repo.tags().items():
668 labels.setdefault(cl.rev(n), []).append(l)
670 labels.setdefault(cl.rev(n), []).append(l)
669
671
670 def events():
672 def events():
671 b = b"default"
673 b = b"default"
672 for r in cl:
674 for r in cl:
673 if branches:
675 if branches:
674 newb = cl.read(cl.node(r))[5][b'branch']
676 newb = cl.read(cl.node(r))[5][b'branch']
675 if newb != b:
677 if newb != b:
676 yield b'a', newb
678 yield b'a', newb
677 b = newb
679 b = newb
678 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
680 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
679 if tags:
681 if tags:
680 ls = labels.get(r)
682 ls = labels.get(r)
681 if ls:
683 if ls:
682 for l in ls:
684 for l in ls:
683 yield b'l', (r, l)
685 yield b'l', (r, l)
684
686
685 else:
687 else:
686 raise error.Abort(_(b'need repo for changelog dag'))
688 raise error.Abort(_(b'need repo for changelog dag'))
687
689
688 for line in dagparser.dagtextlines(
690 for line in dagparser.dagtextlines(
689 events(),
691 events(),
690 addspaces=spaces,
692 addspaces=spaces,
691 wraplabels=True,
693 wraplabels=True,
692 wrapannotations=True,
694 wrapannotations=True,
693 wrapnonlinear=dots,
695 wrapnonlinear=dots,
694 usedots=dots,
696 usedots=dots,
695 maxlinewidth=70,
697 maxlinewidth=70,
696 ):
698 ):
697 ui.write(line)
699 ui.write(line)
698 ui.write(b"\n")
700 ui.write(b"\n")
699
701
700
702
701 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
703 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
702 def debugdata(ui, repo, file_, rev=None, **opts):
704 def debugdata(ui, repo, file_, rev=None, **opts):
703 """dump the contents of a data file revision"""
705 """dump the contents of a data file revision"""
704 opts = pycompat.byteskwargs(opts)
706 opts = pycompat.byteskwargs(opts)
705 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
707 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
706 if rev is not None:
708 if rev is not None:
707 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
709 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
708 file_, rev = None, file_
710 file_, rev = None, file_
709 elif rev is None:
711 elif rev is None:
710 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
712 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
711 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
713 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
712 try:
714 try:
713 ui.write(r.rawdata(r.lookup(rev)))
715 ui.write(r.rawdata(r.lookup(rev)))
714 except KeyError:
716 except KeyError:
715 raise error.Abort(_(b'invalid revision identifier %s') % rev)
717 raise error.Abort(_(b'invalid revision identifier %s') % rev)
716
718
717
719
718 @command(
720 @command(
719 b'debugdate',
721 b'debugdate',
720 [(b'e', b'extended', None, _(b'try extended date formats'))],
722 [(b'e', b'extended', None, _(b'try extended date formats'))],
721 _(b'[-e] DATE [RANGE]'),
723 _(b'[-e] DATE [RANGE]'),
722 norepo=True,
724 norepo=True,
723 optionalrepo=True,
725 optionalrepo=True,
724 )
726 )
725 def debugdate(ui, date, range=None, **opts):
727 def debugdate(ui, date, range=None, **opts):
726 """parse and display a date"""
728 """parse and display a date"""
727 if opts["extended"]:
729 if opts["extended"]:
728 d = dateutil.parsedate(date, dateutil.extendeddateformats)
730 d = dateutil.parsedate(date, dateutil.extendeddateformats)
729 else:
731 else:
730 d = dateutil.parsedate(date)
732 d = dateutil.parsedate(date)
731 ui.writenoi18n(b"internal: %d %d\n" % d)
733 ui.writenoi18n(b"internal: %d %d\n" % d)
732 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
734 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
733 if range:
735 if range:
734 m = dateutil.matchdate(range)
736 m = dateutil.matchdate(range)
735 ui.writenoi18n(b"match: %s\n" % m(d[0]))
737 ui.writenoi18n(b"match: %s\n" % m(d[0]))
736
738
737
739
738 @command(
740 @command(
739 b'debugdeltachain',
741 b'debugdeltachain',
740 cmdutil.debugrevlogopts + cmdutil.formatteropts,
742 cmdutil.debugrevlogopts + cmdutil.formatteropts,
741 _(b'-c|-m|FILE'),
743 _(b'-c|-m|FILE'),
742 optionalrepo=True,
744 optionalrepo=True,
743 )
745 )
744 def debugdeltachain(ui, repo, file_=None, **opts):
746 def debugdeltachain(ui, repo, file_=None, **opts):
745 """dump information about delta chains in a revlog
747 """dump information about delta chains in a revlog
746
748
747 Output can be templatized. Available template keywords are:
749 Output can be templatized. Available template keywords are:
748
750
749 :``rev``: revision number
751 :``rev``: revision number
750 :``chainid``: delta chain identifier (numbered by unique base)
752 :``chainid``: delta chain identifier (numbered by unique base)
751 :``chainlen``: delta chain length to this revision
753 :``chainlen``: delta chain length to this revision
752 :``prevrev``: previous revision in delta chain
754 :``prevrev``: previous revision in delta chain
753 :``deltatype``: role of delta / how it was computed
755 :``deltatype``: role of delta / how it was computed
754 :``compsize``: compressed size of revision
756 :``compsize``: compressed size of revision
755 :``uncompsize``: uncompressed size of revision
757 :``uncompsize``: uncompressed size of revision
756 :``chainsize``: total size of compressed revisions in chain
758 :``chainsize``: total size of compressed revisions in chain
757 :``chainratio``: total chain size divided by uncompressed revision size
759 :``chainratio``: total chain size divided by uncompressed revision size
758 (new delta chains typically start at ratio 2.00)
760 (new delta chains typically start at ratio 2.00)
759 :``lindist``: linear distance from base revision in delta chain to end
761 :``lindist``: linear distance from base revision in delta chain to end
760 of this revision
762 of this revision
761 :``extradist``: total size of revisions not part of this delta chain from
763 :``extradist``: total size of revisions not part of this delta chain from
762 base of delta chain to end of this revision; a measurement
764 base of delta chain to end of this revision; a measurement
763 of how much extra data we need to read/seek across to read
765 of how much extra data we need to read/seek across to read
764 the delta chain for this revision
766 the delta chain for this revision
765 :``extraratio``: extradist divided by chainsize; another representation of
767 :``extraratio``: extradist divided by chainsize; another representation of
766 how much unrelated data is needed to load this delta chain
768 how much unrelated data is needed to load this delta chain
767
769
768 If the repository is configured to use the sparse read, additional keywords
770 If the repository is configured to use the sparse read, additional keywords
769 are available:
771 are available:
770
772
771 :``readsize``: total size of data read from the disk for a revision
773 :``readsize``: total size of data read from the disk for a revision
772 (sum of the sizes of all the blocks)
774 (sum of the sizes of all the blocks)
773 :``largestblock``: size of the largest block of data read from the disk
775 :``largestblock``: size of the largest block of data read from the disk
774 :``readdensity``: density of useful bytes in the data read from the disk
776 :``readdensity``: density of useful bytes in the data read from the disk
775 :``srchunks``: in how many data hunks the whole revision would be read
777 :``srchunks``: in how many data hunks the whole revision would be read
776
778
777 The sparse read can be enabled with experimental.sparse-read = True
779 The sparse read can be enabled with experimental.sparse-read = True
778 """
780 """
779 opts = pycompat.byteskwargs(opts)
781 opts = pycompat.byteskwargs(opts)
780 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
782 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
781 index = r.index
783 index = r.index
782 start = r.start
784 start = r.start
783 length = r.length
785 length = r.length
784 generaldelta = r._generaldelta
786 generaldelta = r._generaldelta
785 withsparseread = getattr(r, '_withsparseread', False)
787 withsparseread = getattr(r, '_withsparseread', False)
786
788
787 def revinfo(rev):
789 def revinfo(rev):
788 e = index[rev]
790 e = index[rev]
789 compsize = e[1]
791 compsize = e[1]
790 uncompsize = e[2]
792 uncompsize = e[2]
791 chainsize = 0
793 chainsize = 0
792
794
793 if generaldelta:
795 if generaldelta:
794 if e[3] == e[5]:
796 if e[3] == e[5]:
795 deltatype = b'p1'
797 deltatype = b'p1'
796 elif e[3] == e[6]:
798 elif e[3] == e[6]:
797 deltatype = b'p2'
799 deltatype = b'p2'
798 elif e[3] == rev - 1:
800 elif e[3] == rev - 1:
799 deltatype = b'prev'
801 deltatype = b'prev'
800 elif e[3] == rev:
802 elif e[3] == rev:
801 deltatype = b'base'
803 deltatype = b'base'
802 else:
804 else:
803 deltatype = b'other'
805 deltatype = b'other'
804 else:
806 else:
805 if e[3] == rev:
807 if e[3] == rev:
806 deltatype = b'base'
808 deltatype = b'base'
807 else:
809 else:
808 deltatype = b'prev'
810 deltatype = b'prev'
809
811
810 chain = r._deltachain(rev)[0]
812 chain = r._deltachain(rev)[0]
811 for iterrev in chain:
813 for iterrev in chain:
812 e = index[iterrev]
814 e = index[iterrev]
813 chainsize += e[1]
815 chainsize += e[1]
814
816
815 return compsize, uncompsize, deltatype, chain, chainsize
817 return compsize, uncompsize, deltatype, chain, chainsize
816
818
817 fm = ui.formatter(b'debugdeltachain', opts)
819 fm = ui.formatter(b'debugdeltachain', opts)
818
820
819 fm.plain(
821 fm.plain(
820 b' rev chain# chainlen prev delta '
822 b' rev chain# chainlen prev delta '
821 b'size rawsize chainsize ratio lindist extradist '
823 b'size rawsize chainsize ratio lindist extradist '
822 b'extraratio'
824 b'extraratio'
823 )
825 )
824 if withsparseread:
826 if withsparseread:
825 fm.plain(b' readsize largestblk rddensity srchunks')
827 fm.plain(b' readsize largestblk rddensity srchunks')
826 fm.plain(b'\n')
828 fm.plain(b'\n')
827
829
828 chainbases = {}
830 chainbases = {}
829 for rev in r:
831 for rev in r:
830 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
832 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
831 chainbase = chain[0]
833 chainbase = chain[0]
832 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
834 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
833 basestart = start(chainbase)
835 basestart = start(chainbase)
834 revstart = start(rev)
836 revstart = start(rev)
835 lineardist = revstart + comp - basestart
837 lineardist = revstart + comp - basestart
836 extradist = lineardist - chainsize
838 extradist = lineardist - chainsize
837 try:
839 try:
838 prevrev = chain[-2]
840 prevrev = chain[-2]
839 except IndexError:
841 except IndexError:
840 prevrev = -1
842 prevrev = -1
841
843
842 if uncomp != 0:
844 if uncomp != 0:
843 chainratio = float(chainsize) / float(uncomp)
845 chainratio = float(chainsize) / float(uncomp)
844 else:
846 else:
845 chainratio = chainsize
847 chainratio = chainsize
846
848
847 if chainsize != 0:
849 if chainsize != 0:
848 extraratio = float(extradist) / float(chainsize)
850 extraratio = float(extradist) / float(chainsize)
849 else:
851 else:
850 extraratio = extradist
852 extraratio = extradist
851
853
852 fm.startitem()
854 fm.startitem()
853 fm.write(
855 fm.write(
854 b'rev chainid chainlen prevrev deltatype compsize '
856 b'rev chainid chainlen prevrev deltatype compsize '
855 b'uncompsize chainsize chainratio lindist extradist '
857 b'uncompsize chainsize chainratio lindist extradist '
856 b'extraratio',
858 b'extraratio',
857 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
859 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
858 rev,
860 rev,
859 chainid,
861 chainid,
860 len(chain),
862 len(chain),
861 prevrev,
863 prevrev,
862 deltatype,
864 deltatype,
863 comp,
865 comp,
864 uncomp,
866 uncomp,
865 chainsize,
867 chainsize,
866 chainratio,
868 chainratio,
867 lineardist,
869 lineardist,
868 extradist,
870 extradist,
869 extraratio,
871 extraratio,
870 rev=rev,
872 rev=rev,
871 chainid=chainid,
873 chainid=chainid,
872 chainlen=len(chain),
874 chainlen=len(chain),
873 prevrev=prevrev,
875 prevrev=prevrev,
874 deltatype=deltatype,
876 deltatype=deltatype,
875 compsize=comp,
877 compsize=comp,
876 uncompsize=uncomp,
878 uncompsize=uncomp,
877 chainsize=chainsize,
879 chainsize=chainsize,
878 chainratio=chainratio,
880 chainratio=chainratio,
879 lindist=lineardist,
881 lindist=lineardist,
880 extradist=extradist,
882 extradist=extradist,
881 extraratio=extraratio,
883 extraratio=extraratio,
882 )
884 )
883 if withsparseread:
885 if withsparseread:
884 readsize = 0
886 readsize = 0
885 largestblock = 0
887 largestblock = 0
886 srchunks = 0
888 srchunks = 0
887
889
888 for revschunk in deltautil.slicechunk(r, chain):
890 for revschunk in deltautil.slicechunk(r, chain):
889 srchunks += 1
891 srchunks += 1
890 blkend = start(revschunk[-1]) + length(revschunk[-1])
892 blkend = start(revschunk[-1]) + length(revschunk[-1])
891 blksize = blkend - start(revschunk[0])
893 blksize = blkend - start(revschunk[0])
892
894
893 readsize += blksize
895 readsize += blksize
894 if largestblock < blksize:
896 if largestblock < blksize:
895 largestblock = blksize
897 largestblock = blksize
896
898
897 if readsize:
899 if readsize:
898 readdensity = float(chainsize) / float(readsize)
900 readdensity = float(chainsize) / float(readsize)
899 else:
901 else:
900 readdensity = 1
902 readdensity = 1
901
903
902 fm.write(
904 fm.write(
903 b'readsize largestblock readdensity srchunks',
905 b'readsize largestblock readdensity srchunks',
904 b' %10d %10d %9.5f %8d',
906 b' %10d %10d %9.5f %8d',
905 readsize,
907 readsize,
906 largestblock,
908 largestblock,
907 readdensity,
909 readdensity,
908 srchunks,
910 srchunks,
909 readsize=readsize,
911 readsize=readsize,
910 largestblock=largestblock,
912 largestblock=largestblock,
911 readdensity=readdensity,
913 readdensity=readdensity,
912 srchunks=srchunks,
914 srchunks=srchunks,
913 )
915 )
914
916
915 fm.plain(b'\n')
917 fm.plain(b'\n')
916
918
917 fm.end()
919 fm.end()
918
920
919
921
920 @command(
922 @command(
921 b'debugdirstate|debugstate',
923 b'debugdirstate|debugstate',
922 [
924 [
923 (
925 (
924 b'',
926 b'',
925 b'nodates',
927 b'nodates',
926 None,
928 None,
927 _(b'do not display the saved mtime (DEPRECATED)'),
929 _(b'do not display the saved mtime (DEPRECATED)'),
928 ),
930 ),
929 (b'', b'dates', True, _(b'display the saved mtime')),
931 (b'', b'dates', True, _(b'display the saved mtime')),
930 (b'', b'datesort', None, _(b'sort by saved mtime')),
932 (b'', b'datesort', None, _(b'sort by saved mtime')),
931 (
933 (
932 b'',
934 b'',
933 b'all',
935 b'all',
934 False,
936 False,
935 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
937 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
936 ),
938 ),
937 ],
939 ],
938 _(b'[OPTION]...'),
940 _(b'[OPTION]...'),
939 )
941 )
940 def debugstate(ui, repo, **opts):
942 def debugstate(ui, repo, **opts):
941 """show the contents of the current dirstate"""
943 """show the contents of the current dirstate"""
942
944
943 nodates = not opts['dates']
945 nodates = not opts['dates']
944 if opts.get('nodates') is not None:
946 if opts.get('nodates') is not None:
945 nodates = True
947 nodates = True
946 datesort = opts.get('datesort')
948 datesort = opts.get('datesort')
947
949
948 if datesort:
950 if datesort:
949
951
950 def keyfunc(entry):
952 def keyfunc(entry):
951 filename, _state, _mode, _size, mtime = entry
953 filename, _state, _mode, _size, mtime = entry
952 return (mtime, filename)
954 return (mtime, filename)
953
955
954 else:
956 else:
955 keyfunc = None # sort by filename
957 keyfunc = None # sort by filename
956 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
958 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
957 entries.sort(key=keyfunc)
959 entries.sort(key=keyfunc)
958 for entry in entries:
960 for entry in entries:
959 filename, state, mode, size, mtime = entry
961 filename, state, mode, size, mtime = entry
960 if mtime == -1:
962 if mtime == -1:
961 timestr = b'unset '
963 timestr = b'unset '
962 elif nodates:
964 elif nodates:
963 timestr = b'set '
965 timestr = b'set '
964 else:
966 else:
965 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
967 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
966 timestr = encoding.strtolocal(timestr)
968 timestr = encoding.strtolocal(timestr)
967 if mode & 0o20000:
969 if mode & 0o20000:
968 mode = b'lnk'
970 mode = b'lnk'
969 else:
971 else:
970 mode = b'%3o' % (mode & 0o777 & ~util.umask)
972 mode = b'%3o' % (mode & 0o777 & ~util.umask)
971 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
973 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
972 for f in repo.dirstate.copies():
974 for f in repo.dirstate.copies():
973 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
975 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
974
976
975
977
976 @command(
978 @command(
977 b'debugdirstateignorepatternshash',
979 b'debugdirstateignorepatternshash',
978 [],
980 [],
979 _(b''),
981 _(b''),
980 )
982 )
981 def debugdirstateignorepatternshash(ui, repo, **opts):
983 def debugdirstateignorepatternshash(ui, repo, **opts):
982 """show the hash of ignore patterns stored in dirstate if v2,
984 """show the hash of ignore patterns stored in dirstate if v2,
983 or nothing for dirstate-v2
985 or nothing for dirstate-v2
984 """
986 """
985 if repo.dirstate._use_dirstate_v2:
987 if repo.dirstate._use_dirstate_v2:
986 docket = repo.dirstate._map.docket
988 docket = repo.dirstate._map.docket
987 hash_len = 20 # 160 bits for SHA-1
989 hash_len = 20 # 160 bits for SHA-1
988 hash_bytes = docket.tree_metadata[-hash_len:]
990 hash_bytes = docket.tree_metadata[-hash_len:]
989 ui.write(binascii.hexlify(hash_bytes) + b'\n')
991 ui.write(binascii.hexlify(hash_bytes) + b'\n')
990
992
991
993
992 @command(
994 @command(
993 b'debugdiscovery',
995 b'debugdiscovery',
994 [
996 [
995 (b'', b'old', None, _(b'use old-style discovery')),
997 (b'', b'old', None, _(b'use old-style discovery')),
996 (
998 (
997 b'',
999 b'',
998 b'nonheads',
1000 b'nonheads',
999 None,
1001 None,
1000 _(b'use old-style discovery with non-heads included'),
1002 _(b'use old-style discovery with non-heads included'),
1001 ),
1003 ),
1002 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1004 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1003 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1005 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1004 (
1006 (
1005 b'',
1007 b'',
1006 b'local-as-revs',
1008 b'local-as-revs',
1007 b"",
1009 b"",
1008 b'treat local has having these revisions only',
1010 b'treat local has having these revisions only',
1009 ),
1011 ),
1010 (
1012 (
1011 b'',
1013 b'',
1012 b'remote-as-revs',
1014 b'remote-as-revs',
1013 b"",
1015 b"",
1014 b'use local as remote, with only these these revisions',
1016 b'use local as remote, with only these these revisions',
1015 ),
1017 ),
1016 ]
1018 ]
1017 + cmdutil.remoteopts
1019 + cmdutil.remoteopts
1018 + cmdutil.formatteropts,
1020 + cmdutil.formatteropts,
1019 _(b'[--rev REV] [OTHER]'),
1021 _(b'[--rev REV] [OTHER]'),
1020 )
1022 )
1021 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1023 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1022 """runs the changeset discovery protocol in isolation
1024 """runs the changeset discovery protocol in isolation
1023
1025
1024 The local peer can be "replaced" by a subset of the local repository by
1026 The local peer can be "replaced" by a subset of the local repository by
1025 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1027 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1026 be "replaced" by a subset of the local repository using the
1028 be "replaced" by a subset of the local repository using the
1027 `--local-as-revs` flag. This is useful to efficiently debug pathological
1029 `--local-as-revs` flag. This is useful to efficiently debug pathological
1028 discovery situation.
1030 discovery situation.
1029
1031
1030 The following developer oriented config are relevant for people playing with this command:
1032 The following developer oriented config are relevant for people playing with this command:
1031
1033
1032 * devel.discovery.exchange-heads=True
1034 * devel.discovery.exchange-heads=True
1033
1035
1034 If False, the discovery will not start with
1036 If False, the discovery will not start with
1035 remote head fetching and local head querying.
1037 remote head fetching and local head querying.
1036
1038
1037 * devel.discovery.grow-sample=True
1039 * devel.discovery.grow-sample=True
1038
1040
1039 If False, the sample size used in set discovery will not be increased
1041 If False, the sample size used in set discovery will not be increased
1040 through the process
1042 through the process
1041
1043
1042 * devel.discovery.grow-sample.dynamic=True
1044 * devel.discovery.grow-sample.dynamic=True
1043
1045
1044 When discovery.grow-sample.dynamic is True, the default, the sample size is
1046 When discovery.grow-sample.dynamic is True, the default, the sample size is
1045 adapted to the shape of the undecided set (it is set to the max of:
1047 adapted to the shape of the undecided set (it is set to the max of:
1046 <target-size>, len(roots(undecided)), len(heads(undecided)
1048 <target-size>, len(roots(undecided)), len(heads(undecided)
1047
1049
1048 * devel.discovery.grow-sample.rate=1.05
1050 * devel.discovery.grow-sample.rate=1.05
1049
1051
1050 the rate at which the sample grow
1052 the rate at which the sample grow
1051
1053
1052 * devel.discovery.randomize=True
1054 * devel.discovery.randomize=True
1053
1055
1054 If andom sampling during discovery are deterministic. It is meant for
1056 If andom sampling during discovery are deterministic. It is meant for
1055 integration tests.
1057 integration tests.
1056
1058
1057 * devel.discovery.sample-size=200
1059 * devel.discovery.sample-size=200
1058
1060
1059 Control the initial size of the discovery sample
1061 Control the initial size of the discovery sample
1060
1062
1061 * devel.discovery.sample-size.initial=100
1063 * devel.discovery.sample-size.initial=100
1062
1064
1063 Control the initial size of the discovery for initial change
1065 Control the initial size of the discovery for initial change
1064 """
1066 """
1065 opts = pycompat.byteskwargs(opts)
1067 opts = pycompat.byteskwargs(opts)
1066 unfi = repo.unfiltered()
1068 unfi = repo.unfiltered()
1067
1069
1068 # setup potential extra filtering
1070 # setup potential extra filtering
1069 local_revs = opts[b"local_as_revs"]
1071 local_revs = opts[b"local_as_revs"]
1070 remote_revs = opts[b"remote_as_revs"]
1072 remote_revs = opts[b"remote_as_revs"]
1071
1073
1072 # make sure tests are repeatable
1074 # make sure tests are repeatable
1073 random.seed(int(opts[b'seed']))
1075 random.seed(int(opts[b'seed']))
1074
1076
1075 if not remote_revs:
1077 if not remote_revs:
1076
1078
1077 remoteurl, branches = urlutil.get_unique_pull_path(
1079 remoteurl, branches = urlutil.get_unique_pull_path(
1078 b'debugdiscovery', repo, ui, remoteurl
1080 b'debugdiscovery', repo, ui, remoteurl
1079 )
1081 )
1080 remote = hg.peer(repo, opts, remoteurl)
1082 remote = hg.peer(repo, opts, remoteurl)
1081 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1083 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1082 else:
1084 else:
1083 branches = (None, [])
1085 branches = (None, [])
1084 remote_filtered_revs = logcmdutil.revrange(
1086 remote_filtered_revs = logcmdutil.revrange(
1085 unfi, [b"not (::(%s))" % remote_revs]
1087 unfi, [b"not (::(%s))" % remote_revs]
1086 )
1088 )
1087 remote_filtered_revs = frozenset(remote_filtered_revs)
1089 remote_filtered_revs = frozenset(remote_filtered_revs)
1088
1090
1089 def remote_func(x):
1091 def remote_func(x):
1090 return remote_filtered_revs
1092 return remote_filtered_revs
1091
1093
1092 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1094 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1093
1095
1094 remote = repo.peer()
1096 remote = repo.peer()
1095 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1097 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1096
1098
1097 if local_revs:
1099 if local_revs:
1098 local_filtered_revs = logcmdutil.revrange(
1100 local_filtered_revs = logcmdutil.revrange(
1099 unfi, [b"not (::(%s))" % local_revs]
1101 unfi, [b"not (::(%s))" % local_revs]
1100 )
1102 )
1101 local_filtered_revs = frozenset(local_filtered_revs)
1103 local_filtered_revs = frozenset(local_filtered_revs)
1102
1104
1103 def local_func(x):
1105 def local_func(x):
1104 return local_filtered_revs
1106 return local_filtered_revs
1105
1107
1106 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1108 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1107 repo = repo.filtered(b'debug-discovery-local-filter')
1109 repo = repo.filtered(b'debug-discovery-local-filter')
1108
1110
1109 data = {}
1111 data = {}
1110 if opts.get(b'old'):
1112 if opts.get(b'old'):
1111
1113
1112 def doit(pushedrevs, remoteheads, remote=remote):
1114 def doit(pushedrevs, remoteheads, remote=remote):
1113 if not util.safehasattr(remote, b'branches'):
1115 if not util.safehasattr(remote, b'branches'):
1114 # enable in-client legacy support
1116 # enable in-client legacy support
1115 remote = localrepo.locallegacypeer(remote.local())
1117 remote = localrepo.locallegacypeer(remote.local())
1116 common, _in, hds = treediscovery.findcommonincoming(
1118 common, _in, hds = treediscovery.findcommonincoming(
1117 repo, remote, force=True, audit=data
1119 repo, remote, force=True, audit=data
1118 )
1120 )
1119 common = set(common)
1121 common = set(common)
1120 if not opts.get(b'nonheads'):
1122 if not opts.get(b'nonheads'):
1121 ui.writenoi18n(
1123 ui.writenoi18n(
1122 b"unpruned common: %s\n"
1124 b"unpruned common: %s\n"
1123 % b" ".join(sorted(short(n) for n in common))
1125 % b" ".join(sorted(short(n) for n in common))
1124 )
1126 )
1125
1127
1126 clnode = repo.changelog.node
1128 clnode = repo.changelog.node
1127 common = repo.revs(b'heads(::%ln)', common)
1129 common = repo.revs(b'heads(::%ln)', common)
1128 common = {clnode(r) for r in common}
1130 common = {clnode(r) for r in common}
1129 return common, hds
1131 return common, hds
1130
1132
1131 else:
1133 else:
1132
1134
1133 def doit(pushedrevs, remoteheads, remote=remote):
1135 def doit(pushedrevs, remoteheads, remote=remote):
1134 nodes = None
1136 nodes = None
1135 if pushedrevs:
1137 if pushedrevs:
1136 revs = logcmdutil.revrange(repo, pushedrevs)
1138 revs = logcmdutil.revrange(repo, pushedrevs)
1137 nodes = [repo[r].node() for r in revs]
1139 nodes = [repo[r].node() for r in revs]
1138 common, any, hds = setdiscovery.findcommonheads(
1140 common, any, hds = setdiscovery.findcommonheads(
1139 ui, repo, remote, ancestorsof=nodes, audit=data
1141 ui, repo, remote, ancestorsof=nodes, audit=data
1140 )
1142 )
1141 return common, hds
1143 return common, hds
1142
1144
1143 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1145 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1144 localrevs = opts[b'rev']
1146 localrevs = opts[b'rev']
1145
1147
1146 fm = ui.formatter(b'debugdiscovery', opts)
1148 fm = ui.formatter(b'debugdiscovery', opts)
1147 if fm.strict_format:
1149 if fm.strict_format:
1148
1150
1149 @contextlib.contextmanager
1151 @contextlib.contextmanager
1150 def may_capture_output():
1152 def may_capture_output():
1151 ui.pushbuffer()
1153 ui.pushbuffer()
1152 yield
1154 yield
1153 data[b'output'] = ui.popbuffer()
1155 data[b'output'] = ui.popbuffer()
1154
1156
1155 else:
1157 else:
1156 may_capture_output = util.nullcontextmanager
1158 may_capture_output = util.nullcontextmanager
1157 with may_capture_output():
1159 with may_capture_output():
1158 with util.timedcm('debug-discovery') as t:
1160 with util.timedcm('debug-discovery') as t:
1159 common, hds = doit(localrevs, remoterevs)
1161 common, hds = doit(localrevs, remoterevs)
1160
1162
1161 # compute all statistics
1163 # compute all statistics
1162 heads_common = set(common)
1164 heads_common = set(common)
1163 heads_remote = set(hds)
1165 heads_remote = set(hds)
1164 heads_local = set(repo.heads())
1166 heads_local = set(repo.heads())
1165 # note: they cannot be a local or remote head that is in common and not
1167 # note: they cannot be a local or remote head that is in common and not
1166 # itself a head of common.
1168 # itself a head of common.
1167 heads_common_local = heads_common & heads_local
1169 heads_common_local = heads_common & heads_local
1168 heads_common_remote = heads_common & heads_remote
1170 heads_common_remote = heads_common & heads_remote
1169 heads_common_both = heads_common & heads_remote & heads_local
1171 heads_common_both = heads_common & heads_remote & heads_local
1170
1172
1171 all = repo.revs(b'all()')
1173 all = repo.revs(b'all()')
1172 common = repo.revs(b'::%ln', common)
1174 common = repo.revs(b'::%ln', common)
1173 roots_common = repo.revs(b'roots(::%ld)', common)
1175 roots_common = repo.revs(b'roots(::%ld)', common)
1174 missing = repo.revs(b'not ::%ld', common)
1176 missing = repo.revs(b'not ::%ld', common)
1175 heads_missing = repo.revs(b'heads(%ld)', missing)
1177 heads_missing = repo.revs(b'heads(%ld)', missing)
1176 roots_missing = repo.revs(b'roots(%ld)', missing)
1178 roots_missing = repo.revs(b'roots(%ld)', missing)
1177 assert len(common) + len(missing) == len(all)
1179 assert len(common) + len(missing) == len(all)
1178
1180
1179 initial_undecided = repo.revs(
1181 initial_undecided = repo.revs(
1180 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1182 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1181 )
1183 )
1182 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1184 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1183 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1185 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1184 common_initial_undecided = initial_undecided & common
1186 common_initial_undecided = initial_undecided & common
1185 missing_initial_undecided = initial_undecided & missing
1187 missing_initial_undecided = initial_undecided & missing
1186
1188
1187 data[b'elapsed'] = t.elapsed
1189 data[b'elapsed'] = t.elapsed
1188 data[b'nb-common-heads'] = len(heads_common)
1190 data[b'nb-common-heads'] = len(heads_common)
1189 data[b'nb-common-heads-local'] = len(heads_common_local)
1191 data[b'nb-common-heads-local'] = len(heads_common_local)
1190 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1192 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1191 data[b'nb-common-heads-both'] = len(heads_common_both)
1193 data[b'nb-common-heads-both'] = len(heads_common_both)
1192 data[b'nb-common-roots'] = len(roots_common)
1194 data[b'nb-common-roots'] = len(roots_common)
1193 data[b'nb-head-local'] = len(heads_local)
1195 data[b'nb-head-local'] = len(heads_local)
1194 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1196 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1195 data[b'nb-head-remote'] = len(heads_remote)
1197 data[b'nb-head-remote'] = len(heads_remote)
1196 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1198 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1197 heads_common_remote
1199 heads_common_remote
1198 )
1200 )
1199 data[b'nb-revs'] = len(all)
1201 data[b'nb-revs'] = len(all)
1200 data[b'nb-revs-common'] = len(common)
1202 data[b'nb-revs-common'] = len(common)
1201 data[b'nb-revs-missing'] = len(missing)
1203 data[b'nb-revs-missing'] = len(missing)
1202 data[b'nb-missing-heads'] = len(heads_missing)
1204 data[b'nb-missing-heads'] = len(heads_missing)
1203 data[b'nb-missing-roots'] = len(roots_missing)
1205 data[b'nb-missing-roots'] = len(roots_missing)
1204 data[b'nb-ini_und'] = len(initial_undecided)
1206 data[b'nb-ini_und'] = len(initial_undecided)
1205 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1207 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1206 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1208 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1207 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1209 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1208 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1210 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1209
1211
1210 fm.startitem()
1212 fm.startitem()
1211 fm.data(**pycompat.strkwargs(data))
1213 fm.data(**pycompat.strkwargs(data))
1212 # display discovery summary
1214 # display discovery summary
1213 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1215 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1214 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1216 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1215 fm.plain(b"heads summary:\n")
1217 fm.plain(b"heads summary:\n")
1216 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1218 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1217 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1219 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1218 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1220 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1219 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1221 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1220 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1222 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1221 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1223 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1222 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1224 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1223 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1225 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1224 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1226 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1225 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1227 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1226 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1228 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1227 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1229 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1228 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1230 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1229 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1231 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1230 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1232 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1231 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1233 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1232 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1234 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1233 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1235 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1234 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1236 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1235 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1237 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1236 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1238 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1237 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1239 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1238
1240
1239 if ui.verbose:
1241 if ui.verbose:
1240 fm.plain(
1242 fm.plain(
1241 b"common heads: %s\n"
1243 b"common heads: %s\n"
1242 % b" ".join(sorted(short(n) for n in heads_common))
1244 % b" ".join(sorted(short(n) for n in heads_common))
1243 )
1245 )
1244 fm.end()
1246 fm.end()
1245
1247
1246
1248
1247 _chunksize = 4 << 10
1249 _chunksize = 4 << 10
1248
1250
1249
1251
1250 @command(
1252 @command(
1251 b'debugdownload',
1253 b'debugdownload',
1252 [
1254 [
1253 (b'o', b'output', b'', _(b'path')),
1255 (b'o', b'output', b'', _(b'path')),
1254 ],
1256 ],
1255 optionalrepo=True,
1257 optionalrepo=True,
1256 )
1258 )
1257 def debugdownload(ui, repo, url, output=None, **opts):
1259 def debugdownload(ui, repo, url, output=None, **opts):
1258 """download a resource using Mercurial logic and config"""
1260 """download a resource using Mercurial logic and config"""
1259 fh = urlmod.open(ui, url, output)
1261 fh = urlmod.open(ui, url, output)
1260
1262
1261 dest = ui
1263 dest = ui
1262 if output:
1264 if output:
1263 dest = open(output, b"wb", _chunksize)
1265 dest = open(output, b"wb", _chunksize)
1264 try:
1266 try:
1265 data = fh.read(_chunksize)
1267 data = fh.read(_chunksize)
1266 while data:
1268 while data:
1267 dest.write(data)
1269 dest.write(data)
1268 data = fh.read(_chunksize)
1270 data = fh.read(_chunksize)
1269 finally:
1271 finally:
1270 if output:
1272 if output:
1271 dest.close()
1273 dest.close()
1272
1274
1273
1275
1274 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1276 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1275 def debugextensions(ui, repo, **opts):
1277 def debugextensions(ui, repo, **opts):
1276 '''show information about active extensions'''
1278 '''show information about active extensions'''
1277 opts = pycompat.byteskwargs(opts)
1279 opts = pycompat.byteskwargs(opts)
1278 exts = extensions.extensions(ui)
1280 exts = extensions.extensions(ui)
1279 hgver = util.version()
1281 hgver = util.version()
1280 fm = ui.formatter(b'debugextensions', opts)
1282 fm = ui.formatter(b'debugextensions', opts)
1281 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1283 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1282 isinternal = extensions.ismoduleinternal(extmod)
1284 isinternal = extensions.ismoduleinternal(extmod)
1283 extsource = None
1285 extsource = None
1284
1286
1285 if util.safehasattr(extmod, '__file__'):
1287 if util.safehasattr(extmod, '__file__'):
1286 extsource = pycompat.fsencode(extmod.__file__)
1288 extsource = pycompat.fsencode(extmod.__file__)
1287 elif getattr(sys, 'oxidized', False):
1289 elif getattr(sys, 'oxidized', False):
1288 extsource = pycompat.sysexecutable
1290 extsource = pycompat.sysexecutable
1289 if isinternal:
1291 if isinternal:
1290 exttestedwith = [] # never expose magic string to users
1292 exttestedwith = [] # never expose magic string to users
1291 else:
1293 else:
1292 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1294 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1293 extbuglink = getattr(extmod, 'buglink', None)
1295 extbuglink = getattr(extmod, 'buglink', None)
1294
1296
1295 fm.startitem()
1297 fm.startitem()
1296
1298
1297 if ui.quiet or ui.verbose:
1299 if ui.quiet or ui.verbose:
1298 fm.write(b'name', b'%s\n', extname)
1300 fm.write(b'name', b'%s\n', extname)
1299 else:
1301 else:
1300 fm.write(b'name', b'%s', extname)
1302 fm.write(b'name', b'%s', extname)
1301 if isinternal or hgver in exttestedwith:
1303 if isinternal or hgver in exttestedwith:
1302 fm.plain(b'\n')
1304 fm.plain(b'\n')
1303 elif not exttestedwith:
1305 elif not exttestedwith:
1304 fm.plain(_(b' (untested!)\n'))
1306 fm.plain(_(b' (untested!)\n'))
1305 else:
1307 else:
1306 lasttestedversion = exttestedwith[-1]
1308 lasttestedversion = exttestedwith[-1]
1307 fm.plain(b' (%s!)\n' % lasttestedversion)
1309 fm.plain(b' (%s!)\n' % lasttestedversion)
1308
1310
1309 fm.condwrite(
1311 fm.condwrite(
1310 ui.verbose and extsource,
1312 ui.verbose and extsource,
1311 b'source',
1313 b'source',
1312 _(b' location: %s\n'),
1314 _(b' location: %s\n'),
1313 extsource or b"",
1315 extsource or b"",
1314 )
1316 )
1315
1317
1316 if ui.verbose:
1318 if ui.verbose:
1317 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1319 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1318 fm.data(bundled=isinternal)
1320 fm.data(bundled=isinternal)
1319
1321
1320 fm.condwrite(
1322 fm.condwrite(
1321 ui.verbose and exttestedwith,
1323 ui.verbose and exttestedwith,
1322 b'testedwith',
1324 b'testedwith',
1323 _(b' tested with: %s\n'),
1325 _(b' tested with: %s\n'),
1324 fm.formatlist(exttestedwith, name=b'ver'),
1326 fm.formatlist(exttestedwith, name=b'ver'),
1325 )
1327 )
1326
1328
1327 fm.condwrite(
1329 fm.condwrite(
1328 ui.verbose and extbuglink,
1330 ui.verbose and extbuglink,
1329 b'buglink',
1331 b'buglink',
1330 _(b' bug reporting: %s\n'),
1332 _(b' bug reporting: %s\n'),
1331 extbuglink or b"",
1333 extbuglink or b"",
1332 )
1334 )
1333
1335
1334 fm.end()
1336 fm.end()
1335
1337
1336
1338
1337 @command(
1339 @command(
1338 b'debugfileset',
1340 b'debugfileset',
1339 [
1341 [
1340 (
1342 (
1341 b'r',
1343 b'r',
1342 b'rev',
1344 b'rev',
1343 b'',
1345 b'',
1344 _(b'apply the filespec on this revision'),
1346 _(b'apply the filespec on this revision'),
1345 _(b'REV'),
1347 _(b'REV'),
1346 ),
1348 ),
1347 (
1349 (
1348 b'',
1350 b'',
1349 b'all-files',
1351 b'all-files',
1350 False,
1352 False,
1351 _(b'test files from all revisions and working directory'),
1353 _(b'test files from all revisions and working directory'),
1352 ),
1354 ),
1353 (
1355 (
1354 b's',
1356 b's',
1355 b'show-matcher',
1357 b'show-matcher',
1356 None,
1358 None,
1357 _(b'print internal representation of matcher'),
1359 _(b'print internal representation of matcher'),
1358 ),
1360 ),
1359 (
1361 (
1360 b'p',
1362 b'p',
1361 b'show-stage',
1363 b'show-stage',
1362 [],
1364 [],
1363 _(b'print parsed tree at the given stage'),
1365 _(b'print parsed tree at the given stage'),
1364 _(b'NAME'),
1366 _(b'NAME'),
1365 ),
1367 ),
1366 ],
1368 ],
1367 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1369 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1368 )
1370 )
1369 def debugfileset(ui, repo, expr, **opts):
1371 def debugfileset(ui, repo, expr, **opts):
1370 '''parse and apply a fileset specification'''
1372 '''parse and apply a fileset specification'''
1371 from . import fileset
1373 from . import fileset
1372
1374
1373 fileset.symbols # force import of fileset so we have predicates to optimize
1375 fileset.symbols # force import of fileset so we have predicates to optimize
1374 opts = pycompat.byteskwargs(opts)
1376 opts = pycompat.byteskwargs(opts)
1375 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1377 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1376
1378
1377 stages = [
1379 stages = [
1378 (b'parsed', pycompat.identity),
1380 (b'parsed', pycompat.identity),
1379 (b'analyzed', filesetlang.analyze),
1381 (b'analyzed', filesetlang.analyze),
1380 (b'optimized', filesetlang.optimize),
1382 (b'optimized', filesetlang.optimize),
1381 ]
1383 ]
1382 stagenames = {n for n, f in stages}
1384 stagenames = {n for n, f in stages}
1383
1385
1384 showalways = set()
1386 showalways = set()
1385 if ui.verbose and not opts[b'show_stage']:
1387 if ui.verbose and not opts[b'show_stage']:
1386 # show parsed tree by --verbose (deprecated)
1388 # show parsed tree by --verbose (deprecated)
1387 showalways.add(b'parsed')
1389 showalways.add(b'parsed')
1388 if opts[b'show_stage'] == [b'all']:
1390 if opts[b'show_stage'] == [b'all']:
1389 showalways.update(stagenames)
1391 showalways.update(stagenames)
1390 else:
1392 else:
1391 for n in opts[b'show_stage']:
1393 for n in opts[b'show_stage']:
1392 if n not in stagenames:
1394 if n not in stagenames:
1393 raise error.Abort(_(b'invalid stage name: %s') % n)
1395 raise error.Abort(_(b'invalid stage name: %s') % n)
1394 showalways.update(opts[b'show_stage'])
1396 showalways.update(opts[b'show_stage'])
1395
1397
1396 tree = filesetlang.parse(expr)
1398 tree = filesetlang.parse(expr)
1397 for n, f in stages:
1399 for n, f in stages:
1398 tree = f(tree)
1400 tree = f(tree)
1399 if n in showalways:
1401 if n in showalways:
1400 if opts[b'show_stage'] or n != b'parsed':
1402 if opts[b'show_stage'] or n != b'parsed':
1401 ui.write(b"* %s:\n" % n)
1403 ui.write(b"* %s:\n" % n)
1402 ui.write(filesetlang.prettyformat(tree), b"\n")
1404 ui.write(filesetlang.prettyformat(tree), b"\n")
1403
1405
1404 files = set()
1406 files = set()
1405 if opts[b'all_files']:
1407 if opts[b'all_files']:
1406 for r in repo:
1408 for r in repo:
1407 c = repo[r]
1409 c = repo[r]
1408 files.update(c.files())
1410 files.update(c.files())
1409 files.update(c.substate)
1411 files.update(c.substate)
1410 if opts[b'all_files'] or ctx.rev() is None:
1412 if opts[b'all_files'] or ctx.rev() is None:
1411 wctx = repo[None]
1413 wctx = repo[None]
1412 files.update(
1414 files.update(
1413 repo.dirstate.walk(
1415 repo.dirstate.walk(
1414 scmutil.matchall(repo),
1416 scmutil.matchall(repo),
1415 subrepos=list(wctx.substate),
1417 subrepos=list(wctx.substate),
1416 unknown=True,
1418 unknown=True,
1417 ignored=True,
1419 ignored=True,
1418 )
1420 )
1419 )
1421 )
1420 files.update(wctx.substate)
1422 files.update(wctx.substate)
1421 else:
1423 else:
1422 files.update(ctx.files())
1424 files.update(ctx.files())
1423 files.update(ctx.substate)
1425 files.update(ctx.substate)
1424
1426
1425 m = ctx.matchfileset(repo.getcwd(), expr)
1427 m = ctx.matchfileset(repo.getcwd(), expr)
1426 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1428 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1427 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1429 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1428 for f in sorted(files):
1430 for f in sorted(files):
1429 if not m(f):
1431 if not m(f):
1430 continue
1432 continue
1431 ui.write(b"%s\n" % f)
1433 ui.write(b"%s\n" % f)
1432
1434
1433
1435
1434 @command(
1436 @command(
1435 b"debug-repair-issue6528",
1437 b"debug-repair-issue6528",
1436 [
1438 [
1437 (
1439 (
1438 b'',
1440 b'',
1439 b'to-report',
1441 b'to-report',
1440 b'',
1442 b'',
1441 _(b'build a report of affected revisions to this file'),
1443 _(b'build a report of affected revisions to this file'),
1442 _(b'FILE'),
1444 _(b'FILE'),
1443 ),
1445 ),
1444 (
1446 (
1445 b'',
1447 b'',
1446 b'from-report',
1448 b'from-report',
1447 b'',
1449 b'',
1448 _(b'repair revisions listed in this report file'),
1450 _(b'repair revisions listed in this report file'),
1449 _(b'FILE'),
1451 _(b'FILE'),
1450 ),
1452 ),
1451 (
1453 (
1452 b'',
1454 b'',
1453 b'paranoid',
1455 b'paranoid',
1454 False,
1456 False,
1455 _(b'check that both detection methods do the same thing'),
1457 _(b'check that both detection methods do the same thing'),
1456 ),
1458 ),
1457 ]
1459 ]
1458 + cmdutil.dryrunopts,
1460 + cmdutil.dryrunopts,
1459 )
1461 )
1460 def debug_repair_issue6528(ui, repo, **opts):
1462 def debug_repair_issue6528(ui, repo, **opts):
1461 """find affected revisions and repair them. See issue6528 for more details.
1463 """find affected revisions and repair them. See issue6528 for more details.
1462
1464
1463 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1465 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1464 computation of affected revisions for a given repository across clones.
1466 computation of affected revisions for a given repository across clones.
1465 The report format is line-based (with empty lines ignored):
1467 The report format is line-based (with empty lines ignored):
1466
1468
1467 ```
1469 ```
1468 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1470 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1469 ```
1471 ```
1470
1472
1471 There can be multiple broken revisions per filelog, they are separated by
1473 There can be multiple broken revisions per filelog, they are separated by
1472 a comma with no spaces. The only space is between the revision(s) and the
1474 a comma with no spaces. The only space is between the revision(s) and the
1473 filename.
1475 filename.
1474
1476
1475 Note that this does *not* mean that this repairs future affected revisions,
1477 Note that this does *not* mean that this repairs future affected revisions,
1476 that needs a separate fix at the exchange level that was introduced in
1478 that needs a separate fix at the exchange level that was introduced in
1477 Mercurial 5.9.1.
1479 Mercurial 5.9.1.
1478
1480
1479 There is a `--paranoid` flag to test that the fast implementation is correct
1481 There is a `--paranoid` flag to test that the fast implementation is correct
1480 by checking it against the slow implementation. Since this matter is quite
1482 by checking it against the slow implementation. Since this matter is quite
1481 urgent and testing every edge-case is probably quite costly, we use this
1483 urgent and testing every edge-case is probably quite costly, we use this
1482 method to test on large repositories as a fuzzing method of sorts.
1484 method to test on large repositories as a fuzzing method of sorts.
1483 """
1485 """
1484 cmdutil.check_incompatible_arguments(
1486 cmdutil.check_incompatible_arguments(
1485 opts, 'to_report', ['from_report', 'dry_run']
1487 opts, 'to_report', ['from_report', 'dry_run']
1486 )
1488 )
1487 dry_run = opts.get('dry_run')
1489 dry_run = opts.get('dry_run')
1488 to_report = opts.get('to_report')
1490 to_report = opts.get('to_report')
1489 from_report = opts.get('from_report')
1491 from_report = opts.get('from_report')
1490 paranoid = opts.get('paranoid')
1492 paranoid = opts.get('paranoid')
1491 # TODO maybe add filelog pattern and revision pattern parameters to help
1493 # TODO maybe add filelog pattern and revision pattern parameters to help
1492 # narrow down the search for users that know what they're looking for?
1494 # narrow down the search for users that know what they're looking for?
1493
1495
1494 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1496 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1495 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1497 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1496 raise error.Abort(_(msg))
1498 raise error.Abort(_(msg))
1497
1499
1498 rewrite.repair_issue6528(
1500 rewrite.repair_issue6528(
1499 ui,
1501 ui,
1500 repo,
1502 repo,
1501 dry_run=dry_run,
1503 dry_run=dry_run,
1502 to_report=to_report,
1504 to_report=to_report,
1503 from_report=from_report,
1505 from_report=from_report,
1504 paranoid=paranoid,
1506 paranoid=paranoid,
1505 )
1507 )
1506
1508
1507
1509
1508 @command(b'debugformat', [] + cmdutil.formatteropts)
1510 @command(b'debugformat', [] + cmdutil.formatteropts)
1509 def debugformat(ui, repo, **opts):
1511 def debugformat(ui, repo, **opts):
1510 """display format information about the current repository
1512 """display format information about the current repository
1511
1513
1512 Use --verbose to get extra information about current config value and
1514 Use --verbose to get extra information about current config value and
1513 Mercurial default."""
1515 Mercurial default."""
1514 opts = pycompat.byteskwargs(opts)
1516 opts = pycompat.byteskwargs(opts)
1515 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1517 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1516 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1518 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1517
1519
1518 def makeformatname(name):
1520 def makeformatname(name):
1519 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1521 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1520
1522
1521 fm = ui.formatter(b'debugformat', opts)
1523 fm = ui.formatter(b'debugformat', opts)
1522 if fm.isplain():
1524 if fm.isplain():
1523
1525
1524 def formatvalue(value):
1526 def formatvalue(value):
1525 if util.safehasattr(value, b'startswith'):
1527 if util.safehasattr(value, b'startswith'):
1526 return value
1528 return value
1527 if value:
1529 if value:
1528 return b'yes'
1530 return b'yes'
1529 else:
1531 else:
1530 return b'no'
1532 return b'no'
1531
1533
1532 else:
1534 else:
1533 formatvalue = pycompat.identity
1535 formatvalue = pycompat.identity
1534
1536
1535 fm.plain(b'format-variant')
1537 fm.plain(b'format-variant')
1536 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1538 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1537 fm.plain(b' repo')
1539 fm.plain(b' repo')
1538 if ui.verbose:
1540 if ui.verbose:
1539 fm.plain(b' config default')
1541 fm.plain(b' config default')
1540 fm.plain(b'\n')
1542 fm.plain(b'\n')
1541 for fv in upgrade.allformatvariant:
1543 for fv in upgrade.allformatvariant:
1542 fm.startitem()
1544 fm.startitem()
1543 repovalue = fv.fromrepo(repo)
1545 repovalue = fv.fromrepo(repo)
1544 configvalue = fv.fromconfig(repo)
1546 configvalue = fv.fromconfig(repo)
1545
1547
1546 if repovalue != configvalue:
1548 if repovalue != configvalue:
1547 namelabel = b'formatvariant.name.mismatchconfig'
1549 namelabel = b'formatvariant.name.mismatchconfig'
1548 repolabel = b'formatvariant.repo.mismatchconfig'
1550 repolabel = b'formatvariant.repo.mismatchconfig'
1549 elif repovalue != fv.default:
1551 elif repovalue != fv.default:
1550 namelabel = b'formatvariant.name.mismatchdefault'
1552 namelabel = b'formatvariant.name.mismatchdefault'
1551 repolabel = b'formatvariant.repo.mismatchdefault'
1553 repolabel = b'formatvariant.repo.mismatchdefault'
1552 else:
1554 else:
1553 namelabel = b'formatvariant.name.uptodate'
1555 namelabel = b'formatvariant.name.uptodate'
1554 repolabel = b'formatvariant.repo.uptodate'
1556 repolabel = b'formatvariant.repo.uptodate'
1555
1557
1556 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1558 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1557 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1559 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1558 if fv.default != configvalue:
1560 if fv.default != configvalue:
1559 configlabel = b'formatvariant.config.special'
1561 configlabel = b'formatvariant.config.special'
1560 else:
1562 else:
1561 configlabel = b'formatvariant.config.default'
1563 configlabel = b'formatvariant.config.default'
1562 fm.condwrite(
1564 fm.condwrite(
1563 ui.verbose,
1565 ui.verbose,
1564 b'config',
1566 b'config',
1565 b' %6s',
1567 b' %6s',
1566 formatvalue(configvalue),
1568 formatvalue(configvalue),
1567 label=configlabel,
1569 label=configlabel,
1568 )
1570 )
1569 fm.condwrite(
1571 fm.condwrite(
1570 ui.verbose,
1572 ui.verbose,
1571 b'default',
1573 b'default',
1572 b' %7s',
1574 b' %7s',
1573 formatvalue(fv.default),
1575 formatvalue(fv.default),
1574 label=b'formatvariant.default',
1576 label=b'formatvariant.default',
1575 )
1577 )
1576 fm.plain(b'\n')
1578 fm.plain(b'\n')
1577 fm.end()
1579 fm.end()
1578
1580
1579
1581
1580 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1582 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1581 def debugfsinfo(ui, path=b"."):
1583 def debugfsinfo(ui, path=b"."):
1582 """show information detected about current filesystem"""
1584 """show information detected about current filesystem"""
1583 ui.writenoi18n(b'path: %s\n' % path)
1585 ui.writenoi18n(b'path: %s\n' % path)
1584 ui.writenoi18n(
1586 ui.writenoi18n(
1585 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1587 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1586 )
1588 )
1587 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1589 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1588 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1590 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1589 ui.writenoi18n(
1591 ui.writenoi18n(
1590 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1592 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1591 )
1593 )
1592 ui.writenoi18n(
1594 ui.writenoi18n(
1593 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1595 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1594 )
1596 )
1595 casesensitive = b'(unknown)'
1597 casesensitive = b'(unknown)'
1596 try:
1598 try:
1597 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1599 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1598 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1600 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1599 except OSError:
1601 except OSError:
1600 pass
1602 pass
1601 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1603 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1602
1604
1603
1605
1604 @command(
1606 @command(
1605 b'debuggetbundle',
1607 b'debuggetbundle',
1606 [
1608 [
1607 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1609 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1608 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1610 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1609 (
1611 (
1610 b't',
1612 b't',
1611 b'type',
1613 b'type',
1612 b'bzip2',
1614 b'bzip2',
1613 _(b'bundle compression type to use'),
1615 _(b'bundle compression type to use'),
1614 _(b'TYPE'),
1616 _(b'TYPE'),
1615 ),
1617 ),
1616 ],
1618 ],
1617 _(b'REPO FILE [-H|-C ID]...'),
1619 _(b'REPO FILE [-H|-C ID]...'),
1618 norepo=True,
1620 norepo=True,
1619 )
1621 )
1620 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1622 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1621 """retrieves a bundle from a repo
1623 """retrieves a bundle from a repo
1622
1624
1623 Every ID must be a full-length hex node id string. Saves the bundle to the
1625 Every ID must be a full-length hex node id string. Saves the bundle to the
1624 given file.
1626 given file.
1625 """
1627 """
1626 opts = pycompat.byteskwargs(opts)
1628 opts = pycompat.byteskwargs(opts)
1627 repo = hg.peer(ui, opts, repopath)
1629 repo = hg.peer(ui, opts, repopath)
1628 if not repo.capable(b'getbundle'):
1630 if not repo.capable(b'getbundle'):
1629 raise error.Abort(b"getbundle() not supported by target repository")
1631 raise error.Abort(b"getbundle() not supported by target repository")
1630 args = {}
1632 args = {}
1631 if common:
1633 if common:
1632 args['common'] = [bin(s) for s in common]
1634 args['common'] = [bin(s) for s in common]
1633 if head:
1635 if head:
1634 args['heads'] = [bin(s) for s in head]
1636 args['heads'] = [bin(s) for s in head]
1635 # TODO: get desired bundlecaps from command line.
1637 # TODO: get desired bundlecaps from command line.
1636 args['bundlecaps'] = None
1638 args['bundlecaps'] = None
1637 bundle = repo.getbundle(b'debug', **args)
1639 bundle = repo.getbundle(b'debug', **args)
1638
1640
1639 bundletype = opts.get(b'type', b'bzip2').lower()
1641 bundletype = opts.get(b'type', b'bzip2').lower()
1640 btypes = {
1642 btypes = {
1641 b'none': b'HG10UN',
1643 b'none': b'HG10UN',
1642 b'bzip2': b'HG10BZ',
1644 b'bzip2': b'HG10BZ',
1643 b'gzip': b'HG10GZ',
1645 b'gzip': b'HG10GZ',
1644 b'bundle2': b'HG20',
1646 b'bundle2': b'HG20',
1645 }
1647 }
1646 bundletype = btypes.get(bundletype)
1648 bundletype = btypes.get(bundletype)
1647 if bundletype not in bundle2.bundletypes:
1649 if bundletype not in bundle2.bundletypes:
1648 raise error.Abort(_(b'unknown bundle type specified with --type'))
1650 raise error.Abort(_(b'unknown bundle type specified with --type'))
1649 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1651 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1650
1652
1651
1653
1652 @command(b'debugignore', [], b'[FILE]')
1654 @command(b'debugignore', [], b'[FILE]')
1653 def debugignore(ui, repo, *files, **opts):
1655 def debugignore(ui, repo, *files, **opts):
1654 """display the combined ignore pattern and information about ignored files
1656 """display the combined ignore pattern and information about ignored files
1655
1657
1656 With no argument display the combined ignore pattern.
1658 With no argument display the combined ignore pattern.
1657
1659
1658 Given space separated file names, shows if the given file is ignored and
1660 Given space separated file names, shows if the given file is ignored and
1659 if so, show the ignore rule (file and line number) that matched it.
1661 if so, show the ignore rule (file and line number) that matched it.
1660 """
1662 """
1661 ignore = repo.dirstate._ignore
1663 ignore = repo.dirstate._ignore
1662 if not files:
1664 if not files:
1663 # Show all the patterns
1665 # Show all the patterns
1664 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1666 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1665 else:
1667 else:
1666 m = scmutil.match(repo[None], pats=files)
1668 m = scmutil.match(repo[None], pats=files)
1667 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1669 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1668 for f in m.files():
1670 for f in m.files():
1669 nf = util.normpath(f)
1671 nf = util.normpath(f)
1670 ignored = None
1672 ignored = None
1671 ignoredata = None
1673 ignoredata = None
1672 if nf != b'.':
1674 if nf != b'.':
1673 if ignore(nf):
1675 if ignore(nf):
1674 ignored = nf
1676 ignored = nf
1675 ignoredata = repo.dirstate._ignorefileandline(nf)
1677 ignoredata = repo.dirstate._ignorefileandline(nf)
1676 else:
1678 else:
1677 for p in pathutil.finddirs(nf):
1679 for p in pathutil.finddirs(nf):
1678 if ignore(p):
1680 if ignore(p):
1679 ignored = p
1681 ignored = p
1680 ignoredata = repo.dirstate._ignorefileandline(p)
1682 ignoredata = repo.dirstate._ignorefileandline(p)
1681 break
1683 break
1682 if ignored:
1684 if ignored:
1683 if ignored == nf:
1685 if ignored == nf:
1684 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1686 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1685 else:
1687 else:
1686 ui.write(
1688 ui.write(
1687 _(
1689 _(
1688 b"%s is ignored because of "
1690 b"%s is ignored because of "
1689 b"containing directory %s\n"
1691 b"containing directory %s\n"
1690 )
1692 )
1691 % (uipathfn(f), ignored)
1693 % (uipathfn(f), ignored)
1692 )
1694 )
1693 ignorefile, lineno, line = ignoredata
1695 ignorefile, lineno, line = ignoredata
1694 ui.write(
1696 ui.write(
1695 _(b"(ignore rule in %s, line %d: '%s')\n")
1697 _(b"(ignore rule in %s, line %d: '%s')\n")
1696 % (ignorefile, lineno, line)
1698 % (ignorefile, lineno, line)
1697 )
1699 )
1698 else:
1700 else:
1699 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1701 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1700
1702
1701
1703
1702 @command(
1704 @command(
1703 b'debugindex',
1705 b'debugindex',
1704 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1706 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1705 _(b'-c|-m|FILE'),
1707 _(b'-c|-m|FILE'),
1706 )
1708 )
1707 def debugindex(ui, repo, file_=None, **opts):
1709 def debugindex(ui, repo, file_=None, **opts):
1708 """dump index data for a storage primitive"""
1710 """dump index data for a storage primitive"""
1709 opts = pycompat.byteskwargs(opts)
1711 opts = pycompat.byteskwargs(opts)
1710 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1712 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1711
1713
1712 if ui.debugflag:
1714 if ui.debugflag:
1713 shortfn = hex
1715 shortfn = hex
1714 else:
1716 else:
1715 shortfn = short
1717 shortfn = short
1716
1718
1717 idlen = 12
1719 idlen = 12
1718 for i in store:
1720 for i in store:
1719 idlen = len(shortfn(store.node(i)))
1721 idlen = len(shortfn(store.node(i)))
1720 break
1722 break
1721
1723
1722 fm = ui.formatter(b'debugindex', opts)
1724 fm = ui.formatter(b'debugindex', opts)
1723 fm.plain(
1725 fm.plain(
1724 b' rev linkrev %s %s p2\n'
1726 b' rev linkrev %s %s p2\n'
1725 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1727 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1726 )
1728 )
1727
1729
1728 for rev in store:
1730 for rev in store:
1729 node = store.node(rev)
1731 node = store.node(rev)
1730 parents = store.parents(node)
1732 parents = store.parents(node)
1731
1733
1732 fm.startitem()
1734 fm.startitem()
1733 fm.write(b'rev', b'%6d ', rev)
1735 fm.write(b'rev', b'%6d ', rev)
1734 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1736 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1735 fm.write(b'node', b'%s ', shortfn(node))
1737 fm.write(b'node', b'%s ', shortfn(node))
1736 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1738 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1737 fm.write(b'p2', b'%s', shortfn(parents[1]))
1739 fm.write(b'p2', b'%s', shortfn(parents[1]))
1738 fm.plain(b'\n')
1740 fm.plain(b'\n')
1739
1741
1740 fm.end()
1742 fm.end()
1741
1743
1742
1744
1743 @command(
1745 @command(
1744 b'debugindexdot',
1746 b'debugindexdot',
1745 cmdutil.debugrevlogopts,
1747 cmdutil.debugrevlogopts,
1746 _(b'-c|-m|FILE'),
1748 _(b'-c|-m|FILE'),
1747 optionalrepo=True,
1749 optionalrepo=True,
1748 )
1750 )
1749 def debugindexdot(ui, repo, file_=None, **opts):
1751 def debugindexdot(ui, repo, file_=None, **opts):
1750 """dump an index DAG as a graphviz dot file"""
1752 """dump an index DAG as a graphviz dot file"""
1751 opts = pycompat.byteskwargs(opts)
1753 opts = pycompat.byteskwargs(opts)
1752 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1754 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1753 ui.writenoi18n(b"digraph G {\n")
1755 ui.writenoi18n(b"digraph G {\n")
1754 for i in r:
1756 for i in r:
1755 node = r.node(i)
1757 node = r.node(i)
1756 pp = r.parents(node)
1758 pp = r.parents(node)
1757 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1759 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1758 if pp[1] != repo.nullid:
1760 if pp[1] != repo.nullid:
1759 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1761 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1760 ui.write(b"}\n")
1762 ui.write(b"}\n")
1761
1763
1762
1764
1763 @command(b'debugindexstats', [])
1765 @command(b'debugindexstats', [])
1764 def debugindexstats(ui, repo):
1766 def debugindexstats(ui, repo):
1765 """show stats related to the changelog index"""
1767 """show stats related to the changelog index"""
1766 repo.changelog.shortest(repo.nullid, 1)
1768 repo.changelog.shortest(repo.nullid, 1)
1767 index = repo.changelog.index
1769 index = repo.changelog.index
1768 if not util.safehasattr(index, b'stats'):
1770 if not util.safehasattr(index, b'stats'):
1769 raise error.Abort(_(b'debugindexstats only works with native code'))
1771 raise error.Abort(_(b'debugindexstats only works with native code'))
1770 for k, v in sorted(index.stats().items()):
1772 for k, v in sorted(index.stats().items()):
1771 ui.write(b'%s: %d\n' % (k, v))
1773 ui.write(b'%s: %d\n' % (k, v))
1772
1774
1773
1775
1774 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1776 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1775 def debuginstall(ui, **opts):
1777 def debuginstall(ui, **opts):
1776 """test Mercurial installation
1778 """test Mercurial installation
1777
1779
1778 Returns 0 on success.
1780 Returns 0 on success.
1779 """
1781 """
1780 opts = pycompat.byteskwargs(opts)
1782 opts = pycompat.byteskwargs(opts)
1781
1783
1782 problems = 0
1784 problems = 0
1783
1785
1784 fm = ui.formatter(b'debuginstall', opts)
1786 fm = ui.formatter(b'debuginstall', opts)
1785 fm.startitem()
1787 fm.startitem()
1786
1788
1787 # encoding might be unknown or wrong. don't translate these messages.
1789 # encoding might be unknown or wrong. don't translate these messages.
1788 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1790 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1789 err = None
1791 err = None
1790 try:
1792 try:
1791 codecs.lookup(pycompat.sysstr(encoding.encoding))
1793 codecs.lookup(pycompat.sysstr(encoding.encoding))
1792 except LookupError as inst:
1794 except LookupError as inst:
1793 err = stringutil.forcebytestr(inst)
1795 err = stringutil.forcebytestr(inst)
1794 problems += 1
1796 problems += 1
1795 fm.condwrite(
1797 fm.condwrite(
1796 err,
1798 err,
1797 b'encodingerror',
1799 b'encodingerror',
1798 b" %s\n (check that your locale is properly set)\n",
1800 b" %s\n (check that your locale is properly set)\n",
1799 err,
1801 err,
1800 )
1802 )
1801
1803
1802 # Python
1804 # Python
1803 pythonlib = None
1805 pythonlib = None
1804 if util.safehasattr(os, '__file__'):
1806 if util.safehasattr(os, '__file__'):
1805 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1807 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1806 elif getattr(sys, 'oxidized', False):
1808 elif getattr(sys, 'oxidized', False):
1807 pythonlib = pycompat.sysexecutable
1809 pythonlib = pycompat.sysexecutable
1808
1810
1809 fm.write(
1811 fm.write(
1810 b'pythonexe',
1812 b'pythonexe',
1811 _(b"checking Python executable (%s)\n"),
1813 _(b"checking Python executable (%s)\n"),
1812 pycompat.sysexecutable or _(b"unknown"),
1814 pycompat.sysexecutable or _(b"unknown"),
1813 )
1815 )
1814 fm.write(
1816 fm.write(
1815 b'pythonimplementation',
1817 b'pythonimplementation',
1816 _(b"checking Python implementation (%s)\n"),
1818 _(b"checking Python implementation (%s)\n"),
1817 pycompat.sysbytes(platform.python_implementation()),
1819 pycompat.sysbytes(platform.python_implementation()),
1818 )
1820 )
1819 fm.write(
1821 fm.write(
1820 b'pythonver',
1822 b'pythonver',
1821 _(b"checking Python version (%s)\n"),
1823 _(b"checking Python version (%s)\n"),
1822 (b"%d.%d.%d" % sys.version_info[:3]),
1824 (b"%d.%d.%d" % sys.version_info[:3]),
1823 )
1825 )
1824 fm.write(
1826 fm.write(
1825 b'pythonlib',
1827 b'pythonlib',
1826 _(b"checking Python lib (%s)...\n"),
1828 _(b"checking Python lib (%s)...\n"),
1827 pythonlib or _(b"unknown"),
1829 pythonlib or _(b"unknown"),
1828 )
1830 )
1829
1831
1830 try:
1832 try:
1831 from . import rustext # pytype: disable=import-error
1833 from . import rustext # pytype: disable=import-error
1832
1834
1833 rustext.__doc__ # trigger lazy import
1835 rustext.__doc__ # trigger lazy import
1834 except ImportError:
1836 except ImportError:
1835 rustext = None
1837 rustext = None
1836
1838
1837 security = set(sslutil.supportedprotocols)
1839 security = set(sslutil.supportedprotocols)
1838 if sslutil.hassni:
1840 if sslutil.hassni:
1839 security.add(b'sni')
1841 security.add(b'sni')
1840
1842
1841 fm.write(
1843 fm.write(
1842 b'pythonsecurity',
1844 b'pythonsecurity',
1843 _(b"checking Python security support (%s)\n"),
1845 _(b"checking Python security support (%s)\n"),
1844 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1846 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1845 )
1847 )
1846
1848
1847 # These are warnings, not errors. So don't increment problem count. This
1849 # These are warnings, not errors. So don't increment problem count. This
1848 # may change in the future.
1850 # may change in the future.
1849 if b'tls1.2' not in security:
1851 if b'tls1.2' not in security:
1850 fm.plain(
1852 fm.plain(
1851 _(
1853 _(
1852 b' TLS 1.2 not supported by Python install; '
1854 b' TLS 1.2 not supported by Python install; '
1853 b'network connections lack modern security\n'
1855 b'network connections lack modern security\n'
1854 )
1856 )
1855 )
1857 )
1856 if b'sni' not in security:
1858 if b'sni' not in security:
1857 fm.plain(
1859 fm.plain(
1858 _(
1860 _(
1859 b' SNI not supported by Python install; may have '
1861 b' SNI not supported by Python install; may have '
1860 b'connectivity issues with some servers\n'
1862 b'connectivity issues with some servers\n'
1861 )
1863 )
1862 )
1864 )
1863
1865
1864 fm.plain(
1866 fm.plain(
1865 _(
1867 _(
1866 b"checking Rust extensions (%s)\n"
1868 b"checking Rust extensions (%s)\n"
1867 % (b'missing' if rustext is None else b'installed')
1869 % (b'missing' if rustext is None else b'installed')
1868 ),
1870 ),
1869 )
1871 )
1870
1872
1871 # TODO print CA cert info
1873 # TODO print CA cert info
1872
1874
1873 # hg version
1875 # hg version
1874 hgver = util.version()
1876 hgver = util.version()
1875 fm.write(
1877 fm.write(
1876 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1878 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1877 )
1879 )
1878 fm.write(
1880 fm.write(
1879 b'hgverextra',
1881 b'hgverextra',
1880 _(b"checking Mercurial custom build (%s)\n"),
1882 _(b"checking Mercurial custom build (%s)\n"),
1881 b'+'.join(hgver.split(b'+')[1:]),
1883 b'+'.join(hgver.split(b'+')[1:]),
1882 )
1884 )
1883
1885
1884 # compiled modules
1886 # compiled modules
1885 hgmodules = None
1887 hgmodules = None
1886 if util.safehasattr(sys.modules[__name__], '__file__'):
1888 if util.safehasattr(sys.modules[__name__], '__file__'):
1887 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1889 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1888 elif getattr(sys, 'oxidized', False):
1890 elif getattr(sys, 'oxidized', False):
1889 hgmodules = pycompat.sysexecutable
1891 hgmodules = pycompat.sysexecutable
1890
1892
1891 fm.write(
1893 fm.write(
1892 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1894 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1893 )
1895 )
1894 fm.write(
1896 fm.write(
1895 b'hgmodules',
1897 b'hgmodules',
1896 _(b"checking installed modules (%s)...\n"),
1898 _(b"checking installed modules (%s)...\n"),
1897 hgmodules or _(b"unknown"),
1899 hgmodules or _(b"unknown"),
1898 )
1900 )
1899
1901
1900 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1902 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1901 rustext = rustandc # for now, that's the only case
1903 rustext = rustandc # for now, that's the only case
1902 cext = policy.policy in (b'c', b'allow') or rustandc
1904 cext = policy.policy in (b'c', b'allow') or rustandc
1903 nopure = cext or rustext
1905 nopure = cext or rustext
1904 if nopure:
1906 if nopure:
1905 err = None
1907 err = None
1906 try:
1908 try:
1907 if cext:
1909 if cext:
1908 from .cext import ( # pytype: disable=import-error
1910 from .cext import ( # pytype: disable=import-error
1909 base85,
1911 base85,
1910 bdiff,
1912 bdiff,
1911 mpatch,
1913 mpatch,
1912 osutil,
1914 osutil,
1913 )
1915 )
1914
1916
1915 # quiet pyflakes
1917 # quiet pyflakes
1916 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1918 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1917 if rustext:
1919 if rustext:
1918 from .rustext import ( # pytype: disable=import-error
1920 from .rustext import ( # pytype: disable=import-error
1919 ancestor,
1921 ancestor,
1920 dirstate,
1922 dirstate,
1921 )
1923 )
1922
1924
1923 dir(ancestor), dir(dirstate) # quiet pyflakes
1925 dir(ancestor), dir(dirstate) # quiet pyflakes
1924 except Exception as inst:
1926 except Exception as inst:
1925 err = stringutil.forcebytestr(inst)
1927 err = stringutil.forcebytestr(inst)
1926 problems += 1
1928 problems += 1
1927 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1929 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1928
1930
1929 compengines = util.compengines._engines.values()
1931 compengines = util.compengines._engines.values()
1930 fm.write(
1932 fm.write(
1931 b'compengines',
1933 b'compengines',
1932 _(b'checking registered compression engines (%s)\n'),
1934 _(b'checking registered compression engines (%s)\n'),
1933 fm.formatlist(
1935 fm.formatlist(
1934 sorted(e.name() for e in compengines),
1936 sorted(e.name() for e in compengines),
1935 name=b'compengine',
1937 name=b'compengine',
1936 fmt=b'%s',
1938 fmt=b'%s',
1937 sep=b', ',
1939 sep=b', ',
1938 ),
1940 ),
1939 )
1941 )
1940 fm.write(
1942 fm.write(
1941 b'compenginesavail',
1943 b'compenginesavail',
1942 _(b'checking available compression engines (%s)\n'),
1944 _(b'checking available compression engines (%s)\n'),
1943 fm.formatlist(
1945 fm.formatlist(
1944 sorted(e.name() for e in compengines if e.available()),
1946 sorted(e.name() for e in compengines if e.available()),
1945 name=b'compengine',
1947 name=b'compengine',
1946 fmt=b'%s',
1948 fmt=b'%s',
1947 sep=b', ',
1949 sep=b', ',
1948 ),
1950 ),
1949 )
1951 )
1950 wirecompengines = compression.compengines.supportedwireengines(
1952 wirecompengines = compression.compengines.supportedwireengines(
1951 compression.SERVERROLE
1953 compression.SERVERROLE
1952 )
1954 )
1953 fm.write(
1955 fm.write(
1954 b'compenginesserver',
1956 b'compenginesserver',
1955 _(
1957 _(
1956 b'checking available compression engines '
1958 b'checking available compression engines '
1957 b'for wire protocol (%s)\n'
1959 b'for wire protocol (%s)\n'
1958 ),
1960 ),
1959 fm.formatlist(
1961 fm.formatlist(
1960 [e.name() for e in wirecompengines if e.wireprotosupport()],
1962 [e.name() for e in wirecompengines if e.wireprotosupport()],
1961 name=b'compengine',
1963 name=b'compengine',
1962 fmt=b'%s',
1964 fmt=b'%s',
1963 sep=b', ',
1965 sep=b', ',
1964 ),
1966 ),
1965 )
1967 )
1966 re2 = b'missing'
1968 re2 = b'missing'
1967 if util._re2:
1969 if util._re2:
1968 re2 = b'available'
1970 re2 = b'available'
1969 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1971 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1970 fm.data(re2=bool(util._re2))
1972 fm.data(re2=bool(util._re2))
1971
1973
1972 # templates
1974 # templates
1973 p = templater.templatedir()
1975 p = templater.templatedir()
1974 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1976 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1975 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1977 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1976 if p:
1978 if p:
1977 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1979 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1978 if m:
1980 if m:
1979 # template found, check if it is working
1981 # template found, check if it is working
1980 err = None
1982 err = None
1981 try:
1983 try:
1982 templater.templater.frommapfile(m)
1984 templater.templater.frommapfile(m)
1983 except Exception as inst:
1985 except Exception as inst:
1984 err = stringutil.forcebytestr(inst)
1986 err = stringutil.forcebytestr(inst)
1985 p = None
1987 p = None
1986 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1988 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1987 else:
1989 else:
1988 p = None
1990 p = None
1989 fm.condwrite(
1991 fm.condwrite(
1990 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1992 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1991 )
1993 )
1992 fm.condwrite(
1994 fm.condwrite(
1993 not m,
1995 not m,
1994 b'defaulttemplatenotfound',
1996 b'defaulttemplatenotfound',
1995 _(b" template '%s' not found\n"),
1997 _(b" template '%s' not found\n"),
1996 b"default",
1998 b"default",
1997 )
1999 )
1998 if not p:
2000 if not p:
1999 problems += 1
2001 problems += 1
2000 fm.condwrite(
2002 fm.condwrite(
2001 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2003 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2002 )
2004 )
2003
2005
2004 # editor
2006 # editor
2005 editor = ui.geteditor()
2007 editor = ui.geteditor()
2006 editor = util.expandpath(editor)
2008 editor = util.expandpath(editor)
2007 editorbin = procutil.shellsplit(editor)[0]
2009 editorbin = procutil.shellsplit(editor)[0]
2008 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2010 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2009 cmdpath = procutil.findexe(editorbin)
2011 cmdpath = procutil.findexe(editorbin)
2010 fm.condwrite(
2012 fm.condwrite(
2011 not cmdpath and editor == b'vi',
2013 not cmdpath and editor == b'vi',
2012 b'vinotfound',
2014 b'vinotfound',
2013 _(
2015 _(
2014 b" No commit editor set and can't find %s in PATH\n"
2016 b" No commit editor set and can't find %s in PATH\n"
2015 b" (specify a commit editor in your configuration"
2017 b" (specify a commit editor in your configuration"
2016 b" file)\n"
2018 b" file)\n"
2017 ),
2019 ),
2018 not cmdpath and editor == b'vi' and editorbin,
2020 not cmdpath and editor == b'vi' and editorbin,
2019 )
2021 )
2020 fm.condwrite(
2022 fm.condwrite(
2021 not cmdpath and editor != b'vi',
2023 not cmdpath and editor != b'vi',
2022 b'editornotfound',
2024 b'editornotfound',
2023 _(
2025 _(
2024 b" Can't find editor '%s' in PATH\n"
2026 b" Can't find editor '%s' in PATH\n"
2025 b" (specify a commit editor in your configuration"
2027 b" (specify a commit editor in your configuration"
2026 b" file)\n"
2028 b" file)\n"
2027 ),
2029 ),
2028 not cmdpath and editorbin,
2030 not cmdpath and editorbin,
2029 )
2031 )
2030 if not cmdpath and editor != b'vi':
2032 if not cmdpath and editor != b'vi':
2031 problems += 1
2033 problems += 1
2032
2034
2033 # check username
2035 # check username
2034 username = None
2036 username = None
2035 err = None
2037 err = None
2036 try:
2038 try:
2037 username = ui.username()
2039 username = ui.username()
2038 except error.Abort as e:
2040 except error.Abort as e:
2039 err = e.message
2041 err = e.message
2040 problems += 1
2042 problems += 1
2041
2043
2042 fm.condwrite(
2044 fm.condwrite(
2043 username, b'username', _(b"checking username (%s)\n"), username
2045 username, b'username', _(b"checking username (%s)\n"), username
2044 )
2046 )
2045 fm.condwrite(
2047 fm.condwrite(
2046 err,
2048 err,
2047 b'usernameerror',
2049 b'usernameerror',
2048 _(
2050 _(
2049 b"checking username...\n %s\n"
2051 b"checking username...\n %s\n"
2050 b" (specify a username in your configuration file)\n"
2052 b" (specify a username in your configuration file)\n"
2051 ),
2053 ),
2052 err,
2054 err,
2053 )
2055 )
2054
2056
2055 for name, mod in extensions.extensions():
2057 for name, mod in extensions.extensions():
2056 handler = getattr(mod, 'debuginstall', None)
2058 handler = getattr(mod, 'debuginstall', None)
2057 if handler is not None:
2059 if handler is not None:
2058 problems += handler(ui, fm)
2060 problems += handler(ui, fm)
2059
2061
2060 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2062 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2061 if not problems:
2063 if not problems:
2062 fm.data(problems=problems)
2064 fm.data(problems=problems)
2063 fm.condwrite(
2065 fm.condwrite(
2064 problems,
2066 problems,
2065 b'problems',
2067 b'problems',
2066 _(b"%d problems detected, please check your install!\n"),
2068 _(b"%d problems detected, please check your install!\n"),
2067 problems,
2069 problems,
2068 )
2070 )
2069 fm.end()
2071 fm.end()
2070
2072
2071 return problems
2073 return problems
2072
2074
2073
2075
2074 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2076 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2075 def debugknown(ui, repopath, *ids, **opts):
2077 def debugknown(ui, repopath, *ids, **opts):
2076 """test whether node ids are known to a repo
2078 """test whether node ids are known to a repo
2077
2079
2078 Every ID must be a full-length hex node id string. Returns a list of 0s
2080 Every ID must be a full-length hex node id string. Returns a list of 0s
2079 and 1s indicating unknown/known.
2081 and 1s indicating unknown/known.
2080 """
2082 """
2081 opts = pycompat.byteskwargs(opts)
2083 opts = pycompat.byteskwargs(opts)
2082 repo = hg.peer(ui, opts, repopath)
2084 repo = hg.peer(ui, opts, repopath)
2083 if not repo.capable(b'known'):
2085 if not repo.capable(b'known'):
2084 raise error.Abort(b"known() not supported by target repository")
2086 raise error.Abort(b"known() not supported by target repository")
2085 flags = repo.known([bin(s) for s in ids])
2087 flags = repo.known([bin(s) for s in ids])
2086 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2088 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2087
2089
2088
2090
2089 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2091 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2090 def debuglabelcomplete(ui, repo, *args):
2092 def debuglabelcomplete(ui, repo, *args):
2091 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2093 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2092 debugnamecomplete(ui, repo, *args)
2094 debugnamecomplete(ui, repo, *args)
2093
2095
2094
2096
2095 @command(
2097 @command(
2096 b'debuglocks',
2098 b'debuglocks',
2097 [
2099 [
2098 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2100 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2099 (
2101 (
2100 b'W',
2102 b'W',
2101 b'force-free-wlock',
2103 b'force-free-wlock',
2102 None,
2104 None,
2103 _(b'free the working state lock (DANGEROUS)'),
2105 _(b'free the working state lock (DANGEROUS)'),
2104 ),
2106 ),
2105 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2107 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2106 (
2108 (
2107 b'S',
2109 b'S',
2108 b'set-wlock',
2110 b'set-wlock',
2109 None,
2111 None,
2110 _(b'set the working state lock until stopped'),
2112 _(b'set the working state lock until stopped'),
2111 ),
2113 ),
2112 ],
2114 ],
2113 _(b'[OPTION]...'),
2115 _(b'[OPTION]...'),
2114 )
2116 )
2115 def debuglocks(ui, repo, **opts):
2117 def debuglocks(ui, repo, **opts):
2116 """show or modify state of locks
2118 """show or modify state of locks
2117
2119
2118 By default, this command will show which locks are held. This
2120 By default, this command will show which locks are held. This
2119 includes the user and process holding the lock, the amount of time
2121 includes the user and process holding the lock, the amount of time
2120 the lock has been held, and the machine name where the process is
2122 the lock has been held, and the machine name where the process is
2121 running if it's not local.
2123 running if it's not local.
2122
2124
2123 Locks protect the integrity of Mercurial's data, so should be
2125 Locks protect the integrity of Mercurial's data, so should be
2124 treated with care. System crashes or other interruptions may cause
2126 treated with care. System crashes or other interruptions may cause
2125 locks to not be properly released, though Mercurial will usually
2127 locks to not be properly released, though Mercurial will usually
2126 detect and remove such stale locks automatically.
2128 detect and remove such stale locks automatically.
2127
2129
2128 However, detecting stale locks may not always be possible (for
2130 However, detecting stale locks may not always be possible (for
2129 instance, on a shared filesystem). Removing locks may also be
2131 instance, on a shared filesystem). Removing locks may also be
2130 blocked by filesystem permissions.
2132 blocked by filesystem permissions.
2131
2133
2132 Setting a lock will prevent other commands from changing the data.
2134 Setting a lock will prevent other commands from changing the data.
2133 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2135 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2134 The set locks are removed when the command exits.
2136 The set locks are removed when the command exits.
2135
2137
2136 Returns 0 if no locks are held.
2138 Returns 0 if no locks are held.
2137
2139
2138 """
2140 """
2139
2141
2140 if opts.get('force_free_lock'):
2142 if opts.get('force_free_lock'):
2141 repo.svfs.unlink(b'lock')
2143 repo.svfs.unlink(b'lock')
2142 if opts.get('force_free_wlock'):
2144 if opts.get('force_free_wlock'):
2143 repo.vfs.unlink(b'wlock')
2145 repo.vfs.unlink(b'wlock')
2144 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2146 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2145 return 0
2147 return 0
2146
2148
2147 locks = []
2149 locks = []
2148 try:
2150 try:
2149 if opts.get('set_wlock'):
2151 if opts.get('set_wlock'):
2150 try:
2152 try:
2151 locks.append(repo.wlock(False))
2153 locks.append(repo.wlock(False))
2152 except error.LockHeld:
2154 except error.LockHeld:
2153 raise error.Abort(_(b'wlock is already held'))
2155 raise error.Abort(_(b'wlock is already held'))
2154 if opts.get('set_lock'):
2156 if opts.get('set_lock'):
2155 try:
2157 try:
2156 locks.append(repo.lock(False))
2158 locks.append(repo.lock(False))
2157 except error.LockHeld:
2159 except error.LockHeld:
2158 raise error.Abort(_(b'lock is already held'))
2160 raise error.Abort(_(b'lock is already held'))
2159 if len(locks):
2161 if len(locks):
2160 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2162 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2161 return 0
2163 return 0
2162 finally:
2164 finally:
2163 release(*locks)
2165 release(*locks)
2164
2166
2165 now = time.time()
2167 now = time.time()
2166 held = 0
2168 held = 0
2167
2169
2168 def report(vfs, name, method):
2170 def report(vfs, name, method):
2169 # this causes stale locks to get reaped for more accurate reporting
2171 # this causes stale locks to get reaped for more accurate reporting
2170 try:
2172 try:
2171 l = method(False)
2173 l = method(False)
2172 except error.LockHeld:
2174 except error.LockHeld:
2173 l = None
2175 l = None
2174
2176
2175 if l:
2177 if l:
2176 l.release()
2178 l.release()
2177 else:
2179 else:
2178 try:
2180 try:
2179 st = vfs.lstat(name)
2181 st = vfs.lstat(name)
2180 age = now - st[stat.ST_MTIME]
2182 age = now - st[stat.ST_MTIME]
2181 user = util.username(st.st_uid)
2183 user = util.username(st.st_uid)
2182 locker = vfs.readlock(name)
2184 locker = vfs.readlock(name)
2183 if b":" in locker:
2185 if b":" in locker:
2184 host, pid = locker.split(b':')
2186 host, pid = locker.split(b':')
2185 if host == socket.gethostname():
2187 if host == socket.gethostname():
2186 locker = b'user %s, process %s' % (user or b'None', pid)
2188 locker = b'user %s, process %s' % (user or b'None', pid)
2187 else:
2189 else:
2188 locker = b'user %s, process %s, host %s' % (
2190 locker = b'user %s, process %s, host %s' % (
2189 user or b'None',
2191 user or b'None',
2190 pid,
2192 pid,
2191 host,
2193 host,
2192 )
2194 )
2193 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2195 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2194 return 1
2196 return 1
2195 except OSError as e:
2197 except OSError as e:
2196 if e.errno != errno.ENOENT:
2198 if e.errno != errno.ENOENT:
2197 raise
2199 raise
2198
2200
2199 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2201 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2200 return 0
2202 return 0
2201
2203
2202 held += report(repo.svfs, b"lock", repo.lock)
2204 held += report(repo.svfs, b"lock", repo.lock)
2203 held += report(repo.vfs, b"wlock", repo.wlock)
2205 held += report(repo.vfs, b"wlock", repo.wlock)
2204
2206
2205 return held
2207 return held
2206
2208
2207
2209
2208 @command(
2210 @command(
2209 b'debugmanifestfulltextcache',
2211 b'debugmanifestfulltextcache',
2210 [
2212 [
2211 (b'', b'clear', False, _(b'clear the cache')),
2213 (b'', b'clear', False, _(b'clear the cache')),
2212 (
2214 (
2213 b'a',
2215 b'a',
2214 b'add',
2216 b'add',
2215 [],
2217 [],
2216 _(b'add the given manifest nodes to the cache'),
2218 _(b'add the given manifest nodes to the cache'),
2217 _(b'NODE'),
2219 _(b'NODE'),
2218 ),
2220 ),
2219 ],
2221 ],
2220 b'',
2222 b'',
2221 )
2223 )
2222 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2224 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2223 """show, clear or amend the contents of the manifest fulltext cache"""
2225 """show, clear or amend the contents of the manifest fulltext cache"""
2224
2226
2225 def getcache():
2227 def getcache():
2226 r = repo.manifestlog.getstorage(b'')
2228 r = repo.manifestlog.getstorage(b'')
2227 try:
2229 try:
2228 return r._fulltextcache
2230 return r._fulltextcache
2229 except AttributeError:
2231 except AttributeError:
2230 msg = _(
2232 msg = _(
2231 b"Current revlog implementation doesn't appear to have a "
2233 b"Current revlog implementation doesn't appear to have a "
2232 b"manifest fulltext cache\n"
2234 b"manifest fulltext cache\n"
2233 )
2235 )
2234 raise error.Abort(msg)
2236 raise error.Abort(msg)
2235
2237
2236 if opts.get('clear'):
2238 if opts.get('clear'):
2237 with repo.wlock():
2239 with repo.wlock():
2238 cache = getcache()
2240 cache = getcache()
2239 cache.clear(clear_persisted_data=True)
2241 cache.clear(clear_persisted_data=True)
2240 return
2242 return
2241
2243
2242 if add:
2244 if add:
2243 with repo.wlock():
2245 with repo.wlock():
2244 m = repo.manifestlog
2246 m = repo.manifestlog
2245 store = m.getstorage(b'')
2247 store = m.getstorage(b'')
2246 for n in add:
2248 for n in add:
2247 try:
2249 try:
2248 manifest = m[store.lookup(n)]
2250 manifest = m[store.lookup(n)]
2249 except error.LookupError as e:
2251 except error.LookupError as e:
2250 raise error.Abort(
2252 raise error.Abort(
2251 bytes(e), hint=b"Check your manifest node id"
2253 bytes(e), hint=b"Check your manifest node id"
2252 )
2254 )
2253 manifest.read() # stores revisision in cache too
2255 manifest.read() # stores revisision in cache too
2254 return
2256 return
2255
2257
2256 cache = getcache()
2258 cache = getcache()
2257 if not len(cache):
2259 if not len(cache):
2258 ui.write(_(b'cache empty\n'))
2260 ui.write(_(b'cache empty\n'))
2259 else:
2261 else:
2260 ui.write(
2262 ui.write(
2261 _(
2263 _(
2262 b'cache contains %d manifest entries, in order of most to '
2264 b'cache contains %d manifest entries, in order of most to '
2263 b'least recent:\n'
2265 b'least recent:\n'
2264 )
2266 )
2265 % (len(cache),)
2267 % (len(cache),)
2266 )
2268 )
2267 totalsize = 0
2269 totalsize = 0
2268 for nodeid in cache:
2270 for nodeid in cache:
2269 # Use cache.get to not update the LRU order
2271 # Use cache.get to not update the LRU order
2270 data = cache.peek(nodeid)
2272 data = cache.peek(nodeid)
2271 size = len(data)
2273 size = len(data)
2272 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2274 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2273 ui.write(
2275 ui.write(
2274 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2276 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2275 )
2277 )
2276 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2278 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2277 ui.write(
2279 ui.write(
2278 _(b'total cache data size %s, on-disk %s\n')
2280 _(b'total cache data size %s, on-disk %s\n')
2279 % (util.bytecount(totalsize), util.bytecount(ondisk))
2281 % (util.bytecount(totalsize), util.bytecount(ondisk))
2280 )
2282 )
2281
2283
2282
2284
2283 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2285 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2284 def debugmergestate(ui, repo, *args, **opts):
2286 def debugmergestate(ui, repo, *args, **opts):
2285 """print merge state
2287 """print merge state
2286
2288
2287 Use --verbose to print out information about whether v1 or v2 merge state
2289 Use --verbose to print out information about whether v1 or v2 merge state
2288 was chosen."""
2290 was chosen."""
2289
2291
2290 if ui.verbose:
2292 if ui.verbose:
2291 ms = mergestatemod.mergestate(repo)
2293 ms = mergestatemod.mergestate(repo)
2292
2294
2293 # sort so that reasonable information is on top
2295 # sort so that reasonable information is on top
2294 v1records = ms._readrecordsv1()
2296 v1records = ms._readrecordsv1()
2295 v2records = ms._readrecordsv2()
2297 v2records = ms._readrecordsv2()
2296
2298
2297 if not v1records and not v2records:
2299 if not v1records and not v2records:
2298 pass
2300 pass
2299 elif not v2records:
2301 elif not v2records:
2300 ui.writenoi18n(b'no version 2 merge state\n')
2302 ui.writenoi18n(b'no version 2 merge state\n')
2301 elif ms._v1v2match(v1records, v2records):
2303 elif ms._v1v2match(v1records, v2records):
2302 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2304 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2303 else:
2305 else:
2304 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2306 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2305
2307
2306 opts = pycompat.byteskwargs(opts)
2308 opts = pycompat.byteskwargs(opts)
2307 if not opts[b'template']:
2309 if not opts[b'template']:
2308 opts[b'template'] = (
2310 opts[b'template'] = (
2309 b'{if(commits, "", "no merge state found\n")}'
2311 b'{if(commits, "", "no merge state found\n")}'
2310 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2312 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2311 b'{files % "file: {path} (state \\"{state}\\")\n'
2313 b'{files % "file: {path} (state \\"{state}\\")\n'
2312 b'{if(local_path, "'
2314 b'{if(local_path, "'
2313 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2315 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2314 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2316 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2315 b' other path: {other_path} (node {other_node})\n'
2317 b' other path: {other_path} (node {other_node})\n'
2316 b'")}'
2318 b'")}'
2317 b'{if(rename_side, "'
2319 b'{if(rename_side, "'
2318 b' rename side: {rename_side}\n'
2320 b' rename side: {rename_side}\n'
2319 b' renamed path: {renamed_path}\n'
2321 b' renamed path: {renamed_path}\n'
2320 b'")}'
2322 b'")}'
2321 b'{extras % " extra: {key} = {value}\n"}'
2323 b'{extras % " extra: {key} = {value}\n"}'
2322 b'"}'
2324 b'"}'
2323 b'{extras % "extra: {file} ({key} = {value})\n"}'
2325 b'{extras % "extra: {file} ({key} = {value})\n"}'
2324 )
2326 )
2325
2327
2326 ms = mergestatemod.mergestate.read(repo)
2328 ms = mergestatemod.mergestate.read(repo)
2327
2329
2328 fm = ui.formatter(b'debugmergestate', opts)
2330 fm = ui.formatter(b'debugmergestate', opts)
2329 fm.startitem()
2331 fm.startitem()
2330
2332
2331 fm_commits = fm.nested(b'commits')
2333 fm_commits = fm.nested(b'commits')
2332 if ms.active():
2334 if ms.active():
2333 for name, node, label_index in (
2335 for name, node, label_index in (
2334 (b'local', ms.local, 0),
2336 (b'local', ms.local, 0),
2335 (b'other', ms.other, 1),
2337 (b'other', ms.other, 1),
2336 ):
2338 ):
2337 fm_commits.startitem()
2339 fm_commits.startitem()
2338 fm_commits.data(name=name)
2340 fm_commits.data(name=name)
2339 fm_commits.data(node=hex(node))
2341 fm_commits.data(node=hex(node))
2340 if ms._labels and len(ms._labels) > label_index:
2342 if ms._labels and len(ms._labels) > label_index:
2341 fm_commits.data(label=ms._labels[label_index])
2343 fm_commits.data(label=ms._labels[label_index])
2342 fm_commits.end()
2344 fm_commits.end()
2343
2345
2344 fm_files = fm.nested(b'files')
2346 fm_files = fm.nested(b'files')
2345 if ms.active():
2347 if ms.active():
2346 for f in ms:
2348 for f in ms:
2347 fm_files.startitem()
2349 fm_files.startitem()
2348 fm_files.data(path=f)
2350 fm_files.data(path=f)
2349 state = ms._state[f]
2351 state = ms._state[f]
2350 fm_files.data(state=state[0])
2352 fm_files.data(state=state[0])
2351 if state[0] in (
2353 if state[0] in (
2352 mergestatemod.MERGE_RECORD_UNRESOLVED,
2354 mergestatemod.MERGE_RECORD_UNRESOLVED,
2353 mergestatemod.MERGE_RECORD_RESOLVED,
2355 mergestatemod.MERGE_RECORD_RESOLVED,
2354 ):
2356 ):
2355 fm_files.data(local_key=state[1])
2357 fm_files.data(local_key=state[1])
2356 fm_files.data(local_path=state[2])
2358 fm_files.data(local_path=state[2])
2357 fm_files.data(ancestor_path=state[3])
2359 fm_files.data(ancestor_path=state[3])
2358 fm_files.data(ancestor_node=state[4])
2360 fm_files.data(ancestor_node=state[4])
2359 fm_files.data(other_path=state[5])
2361 fm_files.data(other_path=state[5])
2360 fm_files.data(other_node=state[6])
2362 fm_files.data(other_node=state[6])
2361 fm_files.data(local_flags=state[7])
2363 fm_files.data(local_flags=state[7])
2362 elif state[0] in (
2364 elif state[0] in (
2363 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2365 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2364 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2366 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2365 ):
2367 ):
2366 fm_files.data(renamed_path=state[1])
2368 fm_files.data(renamed_path=state[1])
2367 fm_files.data(rename_side=state[2])
2369 fm_files.data(rename_side=state[2])
2368 fm_extras = fm_files.nested(b'extras')
2370 fm_extras = fm_files.nested(b'extras')
2369 for k, v in sorted(ms.extras(f).items()):
2371 for k, v in sorted(ms.extras(f).items()):
2370 fm_extras.startitem()
2372 fm_extras.startitem()
2371 fm_extras.data(key=k)
2373 fm_extras.data(key=k)
2372 fm_extras.data(value=v)
2374 fm_extras.data(value=v)
2373 fm_extras.end()
2375 fm_extras.end()
2374
2376
2375 fm_files.end()
2377 fm_files.end()
2376
2378
2377 fm_extras = fm.nested(b'extras')
2379 fm_extras = fm.nested(b'extras')
2378 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2380 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2379 if f in ms:
2381 if f in ms:
2380 # If file is in mergestate, we have already processed it's extras
2382 # If file is in mergestate, we have already processed it's extras
2381 continue
2383 continue
2382 for k, v in pycompat.iteritems(d):
2384 for k, v in pycompat.iteritems(d):
2383 fm_extras.startitem()
2385 fm_extras.startitem()
2384 fm_extras.data(file=f)
2386 fm_extras.data(file=f)
2385 fm_extras.data(key=k)
2387 fm_extras.data(key=k)
2386 fm_extras.data(value=v)
2388 fm_extras.data(value=v)
2387 fm_extras.end()
2389 fm_extras.end()
2388
2390
2389 fm.end()
2391 fm.end()
2390
2392
2391
2393
2392 @command(b'debugnamecomplete', [], _(b'NAME...'))
2394 @command(b'debugnamecomplete', [], _(b'NAME...'))
2393 def debugnamecomplete(ui, repo, *args):
2395 def debugnamecomplete(ui, repo, *args):
2394 '''complete "names" - tags, open branch names, bookmark names'''
2396 '''complete "names" - tags, open branch names, bookmark names'''
2395
2397
2396 names = set()
2398 names = set()
2397 # since we previously only listed open branches, we will handle that
2399 # since we previously only listed open branches, we will handle that
2398 # specially (after this for loop)
2400 # specially (after this for loop)
2399 for name, ns in pycompat.iteritems(repo.names):
2401 for name, ns in pycompat.iteritems(repo.names):
2400 if name != b'branches':
2402 if name != b'branches':
2401 names.update(ns.listnames(repo))
2403 names.update(ns.listnames(repo))
2402 names.update(
2404 names.update(
2403 tag
2405 tag
2404 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2406 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2405 if not closed
2407 if not closed
2406 )
2408 )
2407 completions = set()
2409 completions = set()
2408 if not args:
2410 if not args:
2409 args = [b'']
2411 args = [b'']
2410 for a in args:
2412 for a in args:
2411 completions.update(n for n in names if n.startswith(a))
2413 completions.update(n for n in names if n.startswith(a))
2412 ui.write(b'\n'.join(sorted(completions)))
2414 ui.write(b'\n'.join(sorted(completions)))
2413 ui.write(b'\n')
2415 ui.write(b'\n')
2414
2416
2415
2417
2416 @command(
2418 @command(
2417 b'debugnodemap',
2419 b'debugnodemap',
2418 [
2420 [
2419 (
2421 (
2420 b'',
2422 b'',
2421 b'dump-new',
2423 b'dump-new',
2422 False,
2424 False,
2423 _(b'write a (new) persistent binary nodemap on stdout'),
2425 _(b'write a (new) persistent binary nodemap on stdout'),
2424 ),
2426 ),
2425 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2427 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2426 (
2428 (
2427 b'',
2429 b'',
2428 b'check',
2430 b'check',
2429 False,
2431 False,
2430 _(b'check that the data on disk data are correct.'),
2432 _(b'check that the data on disk data are correct.'),
2431 ),
2433 ),
2432 (
2434 (
2433 b'',
2435 b'',
2434 b'metadata',
2436 b'metadata',
2435 False,
2437 False,
2436 _(b'display the on disk meta data for the nodemap'),
2438 _(b'display the on disk meta data for the nodemap'),
2437 ),
2439 ),
2438 ],
2440 ],
2439 )
2441 )
2440 def debugnodemap(ui, repo, **opts):
2442 def debugnodemap(ui, repo, **opts):
2441 """write and inspect on disk nodemap"""
2443 """write and inspect on disk nodemap"""
2442 if opts['dump_new']:
2444 if opts['dump_new']:
2443 unfi = repo.unfiltered()
2445 unfi = repo.unfiltered()
2444 cl = unfi.changelog
2446 cl = unfi.changelog
2445 if util.safehasattr(cl.index, "nodemap_data_all"):
2447 if util.safehasattr(cl.index, "nodemap_data_all"):
2446 data = cl.index.nodemap_data_all()
2448 data = cl.index.nodemap_data_all()
2447 else:
2449 else:
2448 data = nodemap.persistent_data(cl.index)
2450 data = nodemap.persistent_data(cl.index)
2449 ui.write(data)
2451 ui.write(data)
2450 elif opts['dump_disk']:
2452 elif opts['dump_disk']:
2451 unfi = repo.unfiltered()
2453 unfi = repo.unfiltered()
2452 cl = unfi.changelog
2454 cl = unfi.changelog
2453 nm_data = nodemap.persisted_data(cl)
2455 nm_data = nodemap.persisted_data(cl)
2454 if nm_data is not None:
2456 if nm_data is not None:
2455 docket, data = nm_data
2457 docket, data = nm_data
2456 ui.write(data[:])
2458 ui.write(data[:])
2457 elif opts['check']:
2459 elif opts['check']:
2458 unfi = repo.unfiltered()
2460 unfi = repo.unfiltered()
2459 cl = unfi.changelog
2461 cl = unfi.changelog
2460 nm_data = nodemap.persisted_data(cl)
2462 nm_data = nodemap.persisted_data(cl)
2461 if nm_data is not None:
2463 if nm_data is not None:
2462 docket, data = nm_data
2464 docket, data = nm_data
2463 return nodemap.check_data(ui, cl.index, data)
2465 return nodemap.check_data(ui, cl.index, data)
2464 elif opts['metadata']:
2466 elif opts['metadata']:
2465 unfi = repo.unfiltered()
2467 unfi = repo.unfiltered()
2466 cl = unfi.changelog
2468 cl = unfi.changelog
2467 nm_data = nodemap.persisted_data(cl)
2469 nm_data = nodemap.persisted_data(cl)
2468 if nm_data is not None:
2470 if nm_data is not None:
2469 docket, data = nm_data
2471 docket, data = nm_data
2470 ui.write((b"uid: %s\n") % docket.uid)
2472 ui.write((b"uid: %s\n") % docket.uid)
2471 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2473 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2472 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2474 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2473 ui.write((b"data-length: %d\n") % docket.data_length)
2475 ui.write((b"data-length: %d\n") % docket.data_length)
2474 ui.write((b"data-unused: %d\n") % docket.data_unused)
2476 ui.write((b"data-unused: %d\n") % docket.data_unused)
2475 unused_perc = docket.data_unused * 100.0 / docket.data_length
2477 unused_perc = docket.data_unused * 100.0 / docket.data_length
2476 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2478 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2477
2479
2478
2480
2479 @command(
2481 @command(
2480 b'debugobsolete',
2482 b'debugobsolete',
2481 [
2483 [
2482 (b'', b'flags', 0, _(b'markers flag')),
2484 (b'', b'flags', 0, _(b'markers flag')),
2483 (
2485 (
2484 b'',
2486 b'',
2485 b'record-parents',
2487 b'record-parents',
2486 False,
2488 False,
2487 _(b'record parent information for the precursor'),
2489 _(b'record parent information for the precursor'),
2488 ),
2490 ),
2489 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2491 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2490 (
2492 (
2491 b'',
2493 b'',
2492 b'exclusive',
2494 b'exclusive',
2493 False,
2495 False,
2494 _(b'restrict display to markers only relevant to REV'),
2496 _(b'restrict display to markers only relevant to REV'),
2495 ),
2497 ),
2496 (b'', b'index', False, _(b'display index of the marker')),
2498 (b'', b'index', False, _(b'display index of the marker')),
2497 (b'', b'delete', [], _(b'delete markers specified by indices')),
2499 (b'', b'delete', [], _(b'delete markers specified by indices')),
2498 ]
2500 ]
2499 + cmdutil.commitopts2
2501 + cmdutil.commitopts2
2500 + cmdutil.formatteropts,
2502 + cmdutil.formatteropts,
2501 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2503 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2502 )
2504 )
2503 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2505 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2504 """create arbitrary obsolete marker
2506 """create arbitrary obsolete marker
2505
2507
2506 With no arguments, displays the list of obsolescence markers."""
2508 With no arguments, displays the list of obsolescence markers."""
2507
2509
2508 opts = pycompat.byteskwargs(opts)
2510 opts = pycompat.byteskwargs(opts)
2509
2511
2510 def parsenodeid(s):
2512 def parsenodeid(s):
2511 try:
2513 try:
2512 # We do not use revsingle/revrange functions here to accept
2514 # We do not use revsingle/revrange functions here to accept
2513 # arbitrary node identifiers, possibly not present in the
2515 # arbitrary node identifiers, possibly not present in the
2514 # local repository.
2516 # local repository.
2515 n = bin(s)
2517 n = bin(s)
2516 if len(n) != repo.nodeconstants.nodelen:
2518 if len(n) != repo.nodeconstants.nodelen:
2517 raise TypeError()
2519 raise TypeError()
2518 return n
2520 return n
2519 except TypeError:
2521 except TypeError:
2520 raise error.InputError(
2522 raise error.InputError(
2521 b'changeset references must be full hexadecimal '
2523 b'changeset references must be full hexadecimal '
2522 b'node identifiers'
2524 b'node identifiers'
2523 )
2525 )
2524
2526
2525 if opts.get(b'delete'):
2527 if opts.get(b'delete'):
2526 indices = []
2528 indices = []
2527 for v in opts.get(b'delete'):
2529 for v in opts.get(b'delete'):
2528 try:
2530 try:
2529 indices.append(int(v))
2531 indices.append(int(v))
2530 except ValueError:
2532 except ValueError:
2531 raise error.InputError(
2533 raise error.InputError(
2532 _(b'invalid index value: %r') % v,
2534 _(b'invalid index value: %r') % v,
2533 hint=_(b'use integers for indices'),
2535 hint=_(b'use integers for indices'),
2534 )
2536 )
2535
2537
2536 if repo.currenttransaction():
2538 if repo.currenttransaction():
2537 raise error.Abort(
2539 raise error.Abort(
2538 _(b'cannot delete obsmarkers in the middle of transaction.')
2540 _(b'cannot delete obsmarkers in the middle of transaction.')
2539 )
2541 )
2540
2542
2541 with repo.lock():
2543 with repo.lock():
2542 n = repair.deleteobsmarkers(repo.obsstore, indices)
2544 n = repair.deleteobsmarkers(repo.obsstore, indices)
2543 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2545 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2544
2546
2545 return
2547 return
2546
2548
2547 if precursor is not None:
2549 if precursor is not None:
2548 if opts[b'rev']:
2550 if opts[b'rev']:
2549 raise error.InputError(
2551 raise error.InputError(
2550 b'cannot select revision when creating marker'
2552 b'cannot select revision when creating marker'
2551 )
2553 )
2552 metadata = {}
2554 metadata = {}
2553 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2555 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2554 succs = tuple(parsenodeid(succ) for succ in successors)
2556 succs = tuple(parsenodeid(succ) for succ in successors)
2555 l = repo.lock()
2557 l = repo.lock()
2556 try:
2558 try:
2557 tr = repo.transaction(b'debugobsolete')
2559 tr = repo.transaction(b'debugobsolete')
2558 try:
2560 try:
2559 date = opts.get(b'date')
2561 date = opts.get(b'date')
2560 if date:
2562 if date:
2561 date = dateutil.parsedate(date)
2563 date = dateutil.parsedate(date)
2562 else:
2564 else:
2563 date = None
2565 date = None
2564 prec = parsenodeid(precursor)
2566 prec = parsenodeid(precursor)
2565 parents = None
2567 parents = None
2566 if opts[b'record_parents']:
2568 if opts[b'record_parents']:
2567 if prec not in repo.unfiltered():
2569 if prec not in repo.unfiltered():
2568 raise error.Abort(
2570 raise error.Abort(
2569 b'cannot used --record-parents on '
2571 b'cannot used --record-parents on '
2570 b'unknown changesets'
2572 b'unknown changesets'
2571 )
2573 )
2572 parents = repo.unfiltered()[prec].parents()
2574 parents = repo.unfiltered()[prec].parents()
2573 parents = tuple(p.node() for p in parents)
2575 parents = tuple(p.node() for p in parents)
2574 repo.obsstore.create(
2576 repo.obsstore.create(
2575 tr,
2577 tr,
2576 prec,
2578 prec,
2577 succs,
2579 succs,
2578 opts[b'flags'],
2580 opts[b'flags'],
2579 parents=parents,
2581 parents=parents,
2580 date=date,
2582 date=date,
2581 metadata=metadata,
2583 metadata=metadata,
2582 ui=ui,
2584 ui=ui,
2583 )
2585 )
2584 tr.close()
2586 tr.close()
2585 except ValueError as exc:
2587 except ValueError as exc:
2586 raise error.Abort(
2588 raise error.Abort(
2587 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2589 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2588 )
2590 )
2589 finally:
2591 finally:
2590 tr.release()
2592 tr.release()
2591 finally:
2593 finally:
2592 l.release()
2594 l.release()
2593 else:
2595 else:
2594 if opts[b'rev']:
2596 if opts[b'rev']:
2595 revs = logcmdutil.revrange(repo, opts[b'rev'])
2597 revs = logcmdutil.revrange(repo, opts[b'rev'])
2596 nodes = [repo[r].node() for r in revs]
2598 nodes = [repo[r].node() for r in revs]
2597 markers = list(
2599 markers = list(
2598 obsutil.getmarkers(
2600 obsutil.getmarkers(
2599 repo, nodes=nodes, exclusive=opts[b'exclusive']
2601 repo, nodes=nodes, exclusive=opts[b'exclusive']
2600 )
2602 )
2601 )
2603 )
2602 markers.sort(key=lambda x: x._data)
2604 markers.sort(key=lambda x: x._data)
2603 else:
2605 else:
2604 markers = obsutil.getmarkers(repo)
2606 markers = obsutil.getmarkers(repo)
2605
2607
2606 markerstoiter = markers
2608 markerstoiter = markers
2607 isrelevant = lambda m: True
2609 isrelevant = lambda m: True
2608 if opts.get(b'rev') and opts.get(b'index'):
2610 if opts.get(b'rev') and opts.get(b'index'):
2609 markerstoiter = obsutil.getmarkers(repo)
2611 markerstoiter = obsutil.getmarkers(repo)
2610 markerset = set(markers)
2612 markerset = set(markers)
2611 isrelevant = lambda m: m in markerset
2613 isrelevant = lambda m: m in markerset
2612
2614
2613 fm = ui.formatter(b'debugobsolete', opts)
2615 fm = ui.formatter(b'debugobsolete', opts)
2614 for i, m in enumerate(markerstoiter):
2616 for i, m in enumerate(markerstoiter):
2615 if not isrelevant(m):
2617 if not isrelevant(m):
2616 # marker can be irrelevant when we're iterating over a set
2618 # marker can be irrelevant when we're iterating over a set
2617 # of markers (markerstoiter) which is bigger than the set
2619 # of markers (markerstoiter) which is bigger than the set
2618 # of markers we want to display (markers)
2620 # of markers we want to display (markers)
2619 # this can happen if both --index and --rev options are
2621 # this can happen if both --index and --rev options are
2620 # provided and thus we need to iterate over all of the markers
2622 # provided and thus we need to iterate over all of the markers
2621 # to get the correct indices, but only display the ones that
2623 # to get the correct indices, but only display the ones that
2622 # are relevant to --rev value
2624 # are relevant to --rev value
2623 continue
2625 continue
2624 fm.startitem()
2626 fm.startitem()
2625 ind = i if opts.get(b'index') else None
2627 ind = i if opts.get(b'index') else None
2626 cmdutil.showmarker(fm, m, index=ind)
2628 cmdutil.showmarker(fm, m, index=ind)
2627 fm.end()
2629 fm.end()
2628
2630
2629
2631
2630 @command(
2632 @command(
2631 b'debugp1copies',
2633 b'debugp1copies',
2632 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2634 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2633 _(b'[-r REV]'),
2635 _(b'[-r REV]'),
2634 )
2636 )
2635 def debugp1copies(ui, repo, **opts):
2637 def debugp1copies(ui, repo, **opts):
2636 """dump copy information compared to p1"""
2638 """dump copy information compared to p1"""
2637
2639
2638 opts = pycompat.byteskwargs(opts)
2640 opts = pycompat.byteskwargs(opts)
2639 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2641 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2640 for dst, src in ctx.p1copies().items():
2642 for dst, src in ctx.p1copies().items():
2641 ui.write(b'%s -> %s\n' % (src, dst))
2643 ui.write(b'%s -> %s\n' % (src, dst))
2642
2644
2643
2645
2644 @command(
2646 @command(
2645 b'debugp2copies',
2647 b'debugp2copies',
2646 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2648 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2647 _(b'[-r REV]'),
2649 _(b'[-r REV]'),
2648 )
2650 )
2649 def debugp1copies(ui, repo, **opts):
2651 def debugp1copies(ui, repo, **opts):
2650 """dump copy information compared to p2"""
2652 """dump copy information compared to p2"""
2651
2653
2652 opts = pycompat.byteskwargs(opts)
2654 opts = pycompat.byteskwargs(opts)
2653 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2655 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2654 for dst, src in ctx.p2copies().items():
2656 for dst, src in ctx.p2copies().items():
2655 ui.write(b'%s -> %s\n' % (src, dst))
2657 ui.write(b'%s -> %s\n' % (src, dst))
2656
2658
2657
2659
2658 @command(
2660 @command(
2659 b'debugpathcomplete',
2661 b'debugpathcomplete',
2660 [
2662 [
2661 (b'f', b'full', None, _(b'complete an entire path')),
2663 (b'f', b'full', None, _(b'complete an entire path')),
2662 (b'n', b'normal', None, _(b'show only normal files')),
2664 (b'n', b'normal', None, _(b'show only normal files')),
2663 (b'a', b'added', None, _(b'show only added files')),
2665 (b'a', b'added', None, _(b'show only added files')),
2664 (b'r', b'removed', None, _(b'show only removed files')),
2666 (b'r', b'removed', None, _(b'show only removed files')),
2665 ],
2667 ],
2666 _(b'FILESPEC...'),
2668 _(b'FILESPEC...'),
2667 )
2669 )
2668 def debugpathcomplete(ui, repo, *specs, **opts):
2670 def debugpathcomplete(ui, repo, *specs, **opts):
2669 """complete part or all of a tracked path
2671 """complete part or all of a tracked path
2670
2672
2671 This command supports shells that offer path name completion. It
2673 This command supports shells that offer path name completion. It
2672 currently completes only files already known to the dirstate.
2674 currently completes only files already known to the dirstate.
2673
2675
2674 Completion extends only to the next path segment unless
2676 Completion extends only to the next path segment unless
2675 --full is specified, in which case entire paths are used."""
2677 --full is specified, in which case entire paths are used."""
2676
2678
2677 def complete(path, acceptable):
2679 def complete(path, acceptable):
2678 dirstate = repo.dirstate
2680 dirstate = repo.dirstate
2679 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2681 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2680 rootdir = repo.root + pycompat.ossep
2682 rootdir = repo.root + pycompat.ossep
2681 if spec != repo.root and not spec.startswith(rootdir):
2683 if spec != repo.root and not spec.startswith(rootdir):
2682 return [], []
2684 return [], []
2683 if os.path.isdir(spec):
2685 if os.path.isdir(spec):
2684 spec += b'/'
2686 spec += b'/'
2685 spec = spec[len(rootdir) :]
2687 spec = spec[len(rootdir) :]
2686 fixpaths = pycompat.ossep != b'/'
2688 fixpaths = pycompat.ossep != b'/'
2687 if fixpaths:
2689 if fixpaths:
2688 spec = spec.replace(pycompat.ossep, b'/')
2690 spec = spec.replace(pycompat.ossep, b'/')
2689 speclen = len(spec)
2691 speclen = len(spec)
2690 fullpaths = opts['full']
2692 fullpaths = opts['full']
2691 files, dirs = set(), set()
2693 files, dirs = set(), set()
2692 adddir, addfile = dirs.add, files.add
2694 adddir, addfile = dirs.add, files.add
2693 for f, st in pycompat.iteritems(dirstate):
2695 for f, st in pycompat.iteritems(dirstate):
2694 if f.startswith(spec) and st.state in acceptable:
2696 if f.startswith(spec) and st.state in acceptable:
2695 if fixpaths:
2697 if fixpaths:
2696 f = f.replace(b'/', pycompat.ossep)
2698 f = f.replace(b'/', pycompat.ossep)
2697 if fullpaths:
2699 if fullpaths:
2698 addfile(f)
2700 addfile(f)
2699 continue
2701 continue
2700 s = f.find(pycompat.ossep, speclen)
2702 s = f.find(pycompat.ossep, speclen)
2701 if s >= 0:
2703 if s >= 0:
2702 adddir(f[:s])
2704 adddir(f[:s])
2703 else:
2705 else:
2704 addfile(f)
2706 addfile(f)
2705 return files, dirs
2707 return files, dirs
2706
2708
2707 acceptable = b''
2709 acceptable = b''
2708 if opts['normal']:
2710 if opts['normal']:
2709 acceptable += b'nm'
2711 acceptable += b'nm'
2710 if opts['added']:
2712 if opts['added']:
2711 acceptable += b'a'
2713 acceptable += b'a'
2712 if opts['removed']:
2714 if opts['removed']:
2713 acceptable += b'r'
2715 acceptable += b'r'
2714 cwd = repo.getcwd()
2716 cwd = repo.getcwd()
2715 if not specs:
2717 if not specs:
2716 specs = [b'.']
2718 specs = [b'.']
2717
2719
2718 files, dirs = set(), set()
2720 files, dirs = set(), set()
2719 for spec in specs:
2721 for spec in specs:
2720 f, d = complete(spec, acceptable or b'nmar')
2722 f, d = complete(spec, acceptable or b'nmar')
2721 files.update(f)
2723 files.update(f)
2722 dirs.update(d)
2724 dirs.update(d)
2723 files.update(dirs)
2725 files.update(dirs)
2724 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2726 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2725 ui.write(b'\n')
2727 ui.write(b'\n')
2726
2728
2727
2729
2728 @command(
2730 @command(
2729 b'debugpathcopies',
2731 b'debugpathcopies',
2730 cmdutil.walkopts,
2732 cmdutil.walkopts,
2731 b'hg debugpathcopies REV1 REV2 [FILE]',
2733 b'hg debugpathcopies REV1 REV2 [FILE]',
2732 inferrepo=True,
2734 inferrepo=True,
2733 )
2735 )
2734 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2736 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2735 """show copies between two revisions"""
2737 """show copies between two revisions"""
2736 ctx1 = scmutil.revsingle(repo, rev1)
2738 ctx1 = scmutil.revsingle(repo, rev1)
2737 ctx2 = scmutil.revsingle(repo, rev2)
2739 ctx2 = scmutil.revsingle(repo, rev2)
2738 m = scmutil.match(ctx1, pats, opts)
2740 m = scmutil.match(ctx1, pats, opts)
2739 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2741 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2740 ui.write(b'%s -> %s\n' % (src, dst))
2742 ui.write(b'%s -> %s\n' % (src, dst))
2741
2743
2742
2744
2743 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2745 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2744 def debugpeer(ui, path):
2746 def debugpeer(ui, path):
2745 """establish a connection to a peer repository"""
2747 """establish a connection to a peer repository"""
2746 # Always enable peer request logging. Requires --debug to display
2748 # Always enable peer request logging. Requires --debug to display
2747 # though.
2749 # though.
2748 overrides = {
2750 overrides = {
2749 (b'devel', b'debug.peer-request'): True,
2751 (b'devel', b'debug.peer-request'): True,
2750 }
2752 }
2751
2753
2752 with ui.configoverride(overrides):
2754 with ui.configoverride(overrides):
2753 peer = hg.peer(ui, {}, path)
2755 peer = hg.peer(ui, {}, path)
2754
2756
2755 try:
2757 try:
2756 local = peer.local() is not None
2758 local = peer.local() is not None
2757 canpush = peer.canpush()
2759 canpush = peer.canpush()
2758
2760
2759 ui.write(_(b'url: %s\n') % peer.url())
2761 ui.write(_(b'url: %s\n') % peer.url())
2760 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2762 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2761 ui.write(
2763 ui.write(
2762 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2764 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2763 )
2765 )
2764 finally:
2766 finally:
2765 peer.close()
2767 peer.close()
2766
2768
2767
2769
2768 @command(
2770 @command(
2769 b'debugpickmergetool',
2771 b'debugpickmergetool',
2770 [
2772 [
2771 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2773 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2772 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2774 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2773 ]
2775 ]
2774 + cmdutil.walkopts
2776 + cmdutil.walkopts
2775 + cmdutil.mergetoolopts,
2777 + cmdutil.mergetoolopts,
2776 _(b'[PATTERN]...'),
2778 _(b'[PATTERN]...'),
2777 inferrepo=True,
2779 inferrepo=True,
2778 )
2780 )
2779 def debugpickmergetool(ui, repo, *pats, **opts):
2781 def debugpickmergetool(ui, repo, *pats, **opts):
2780 """examine which merge tool is chosen for specified file
2782 """examine which merge tool is chosen for specified file
2781
2783
2782 As described in :hg:`help merge-tools`, Mercurial examines
2784 As described in :hg:`help merge-tools`, Mercurial examines
2783 configurations below in this order to decide which merge tool is
2785 configurations below in this order to decide which merge tool is
2784 chosen for specified file.
2786 chosen for specified file.
2785
2787
2786 1. ``--tool`` option
2788 1. ``--tool`` option
2787 2. ``HGMERGE`` environment variable
2789 2. ``HGMERGE`` environment variable
2788 3. configurations in ``merge-patterns`` section
2790 3. configurations in ``merge-patterns`` section
2789 4. configuration of ``ui.merge``
2791 4. configuration of ``ui.merge``
2790 5. configurations in ``merge-tools`` section
2792 5. configurations in ``merge-tools`` section
2791 6. ``hgmerge`` tool (for historical reason only)
2793 6. ``hgmerge`` tool (for historical reason only)
2792 7. default tool for fallback (``:merge`` or ``:prompt``)
2794 7. default tool for fallback (``:merge`` or ``:prompt``)
2793
2795
2794 This command writes out examination result in the style below::
2796 This command writes out examination result in the style below::
2795
2797
2796 FILE = MERGETOOL
2798 FILE = MERGETOOL
2797
2799
2798 By default, all files known in the first parent context of the
2800 By default, all files known in the first parent context of the
2799 working directory are examined. Use file patterns and/or -I/-X
2801 working directory are examined. Use file patterns and/or -I/-X
2800 options to limit target files. -r/--rev is also useful to examine
2802 options to limit target files. -r/--rev is also useful to examine
2801 files in another context without actual updating to it.
2803 files in another context without actual updating to it.
2802
2804
2803 With --debug, this command shows warning messages while matching
2805 With --debug, this command shows warning messages while matching
2804 against ``merge-patterns`` and so on, too. It is recommended to
2806 against ``merge-patterns`` and so on, too. It is recommended to
2805 use this option with explicit file patterns and/or -I/-X options,
2807 use this option with explicit file patterns and/or -I/-X options,
2806 because this option increases amount of output per file according
2808 because this option increases amount of output per file according
2807 to configurations in hgrc.
2809 to configurations in hgrc.
2808
2810
2809 With -v/--verbose, this command shows configurations below at
2811 With -v/--verbose, this command shows configurations below at
2810 first (only if specified).
2812 first (only if specified).
2811
2813
2812 - ``--tool`` option
2814 - ``--tool`` option
2813 - ``HGMERGE`` environment variable
2815 - ``HGMERGE`` environment variable
2814 - configuration of ``ui.merge``
2816 - configuration of ``ui.merge``
2815
2817
2816 If merge tool is chosen before matching against
2818 If merge tool is chosen before matching against
2817 ``merge-patterns``, this command can't show any helpful
2819 ``merge-patterns``, this command can't show any helpful
2818 information, even with --debug. In such case, information above is
2820 information, even with --debug. In such case, information above is
2819 useful to know why a merge tool is chosen.
2821 useful to know why a merge tool is chosen.
2820 """
2822 """
2821 opts = pycompat.byteskwargs(opts)
2823 opts = pycompat.byteskwargs(opts)
2822 overrides = {}
2824 overrides = {}
2823 if opts[b'tool']:
2825 if opts[b'tool']:
2824 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2826 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2825 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2827 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2826
2828
2827 with ui.configoverride(overrides, b'debugmergepatterns'):
2829 with ui.configoverride(overrides, b'debugmergepatterns'):
2828 hgmerge = encoding.environ.get(b"HGMERGE")
2830 hgmerge = encoding.environ.get(b"HGMERGE")
2829 if hgmerge is not None:
2831 if hgmerge is not None:
2830 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2832 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2831 uimerge = ui.config(b"ui", b"merge")
2833 uimerge = ui.config(b"ui", b"merge")
2832 if uimerge:
2834 if uimerge:
2833 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2835 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2834
2836
2835 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2837 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2836 m = scmutil.match(ctx, pats, opts)
2838 m = scmutil.match(ctx, pats, opts)
2837 changedelete = opts[b'changedelete']
2839 changedelete = opts[b'changedelete']
2838 for path in ctx.walk(m):
2840 for path in ctx.walk(m):
2839 fctx = ctx[path]
2841 fctx = ctx[path]
2840 with ui.silent(
2842 with ui.silent(
2841 error=True
2843 error=True
2842 ) if not ui.debugflag else util.nullcontextmanager():
2844 ) if not ui.debugflag else util.nullcontextmanager():
2843 tool, toolpath = filemerge._picktool(
2845 tool, toolpath = filemerge._picktool(
2844 repo,
2846 repo,
2845 ui,
2847 ui,
2846 path,
2848 path,
2847 fctx.isbinary(),
2849 fctx.isbinary(),
2848 b'l' in fctx.flags(),
2850 b'l' in fctx.flags(),
2849 changedelete,
2851 changedelete,
2850 )
2852 )
2851 ui.write(b'%s = %s\n' % (path, tool))
2853 ui.write(b'%s = %s\n' % (path, tool))
2852
2854
2853
2855
2854 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2856 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2855 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2857 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2856 """access the pushkey key/value protocol
2858 """access the pushkey key/value protocol
2857
2859
2858 With two args, list the keys in the given namespace.
2860 With two args, list the keys in the given namespace.
2859
2861
2860 With five args, set a key to new if it currently is set to old.
2862 With five args, set a key to new if it currently is set to old.
2861 Reports success or failure.
2863 Reports success or failure.
2862 """
2864 """
2863
2865
2864 target = hg.peer(ui, {}, repopath)
2866 target = hg.peer(ui, {}, repopath)
2865 try:
2867 try:
2866 if keyinfo:
2868 if keyinfo:
2867 key, old, new = keyinfo
2869 key, old, new = keyinfo
2868 with target.commandexecutor() as e:
2870 with target.commandexecutor() as e:
2869 r = e.callcommand(
2871 r = e.callcommand(
2870 b'pushkey',
2872 b'pushkey',
2871 {
2873 {
2872 b'namespace': namespace,
2874 b'namespace': namespace,
2873 b'key': key,
2875 b'key': key,
2874 b'old': old,
2876 b'old': old,
2875 b'new': new,
2877 b'new': new,
2876 },
2878 },
2877 ).result()
2879 ).result()
2878
2880
2879 ui.status(pycompat.bytestr(r) + b'\n')
2881 ui.status(pycompat.bytestr(r) + b'\n')
2880 return not r
2882 return not r
2881 else:
2883 else:
2882 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2884 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2883 ui.write(
2885 ui.write(
2884 b"%s\t%s\n"
2886 b"%s\t%s\n"
2885 % (stringutil.escapestr(k), stringutil.escapestr(v))
2887 % (stringutil.escapestr(k), stringutil.escapestr(v))
2886 )
2888 )
2887 finally:
2889 finally:
2888 target.close()
2890 target.close()
2889
2891
2890
2892
2891 @command(b'debugpvec', [], _(b'A B'))
2893 @command(b'debugpvec', [], _(b'A B'))
2892 def debugpvec(ui, repo, a, b=None):
2894 def debugpvec(ui, repo, a, b=None):
2893 ca = scmutil.revsingle(repo, a)
2895 ca = scmutil.revsingle(repo, a)
2894 cb = scmutil.revsingle(repo, b)
2896 cb = scmutil.revsingle(repo, b)
2895 pa = pvec.ctxpvec(ca)
2897 pa = pvec.ctxpvec(ca)
2896 pb = pvec.ctxpvec(cb)
2898 pb = pvec.ctxpvec(cb)
2897 if pa == pb:
2899 if pa == pb:
2898 rel = b"="
2900 rel = b"="
2899 elif pa > pb:
2901 elif pa > pb:
2900 rel = b">"
2902 rel = b">"
2901 elif pa < pb:
2903 elif pa < pb:
2902 rel = b"<"
2904 rel = b"<"
2903 elif pa | pb:
2905 elif pa | pb:
2904 rel = b"|"
2906 rel = b"|"
2905 ui.write(_(b"a: %s\n") % pa)
2907 ui.write(_(b"a: %s\n") % pa)
2906 ui.write(_(b"b: %s\n") % pb)
2908 ui.write(_(b"b: %s\n") % pb)
2907 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2909 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2908 ui.write(
2910 ui.write(
2909 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2911 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2910 % (
2912 % (
2911 abs(pa._depth - pb._depth),
2913 abs(pa._depth - pb._depth),
2912 pvec._hamming(pa._vec, pb._vec),
2914 pvec._hamming(pa._vec, pb._vec),
2913 pa.distance(pb),
2915 pa.distance(pb),
2914 rel,
2916 rel,
2915 )
2917 )
2916 )
2918 )
2917
2919
2918
2920
2919 @command(
2921 @command(
2920 b'debugrebuilddirstate|debugrebuildstate',
2922 b'debugrebuilddirstate|debugrebuildstate',
2921 [
2923 [
2922 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2924 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2923 (
2925 (
2924 b'',
2926 b'',
2925 b'minimal',
2927 b'minimal',
2926 None,
2928 None,
2927 _(
2929 _(
2928 b'only rebuild files that are inconsistent with '
2930 b'only rebuild files that are inconsistent with '
2929 b'the working copy parent'
2931 b'the working copy parent'
2930 ),
2932 ),
2931 ),
2933 ),
2932 ],
2934 ],
2933 _(b'[-r REV]'),
2935 _(b'[-r REV]'),
2934 )
2936 )
2935 def debugrebuilddirstate(ui, repo, rev, **opts):
2937 def debugrebuilddirstate(ui, repo, rev, **opts):
2936 """rebuild the dirstate as it would look like for the given revision
2938 """rebuild the dirstate as it would look like for the given revision
2937
2939
2938 If no revision is specified the first current parent will be used.
2940 If no revision is specified the first current parent will be used.
2939
2941
2940 The dirstate will be set to the files of the given revision.
2942 The dirstate will be set to the files of the given revision.
2941 The actual working directory content or existing dirstate
2943 The actual working directory content or existing dirstate
2942 information such as adds or removes is not considered.
2944 information such as adds or removes is not considered.
2943
2945
2944 ``minimal`` will only rebuild the dirstate status for files that claim to be
2946 ``minimal`` will only rebuild the dirstate status for files that claim to be
2945 tracked but are not in the parent manifest, or that exist in the parent
2947 tracked but are not in the parent manifest, or that exist in the parent
2946 manifest but are not in the dirstate. It will not change adds, removes, or
2948 manifest but are not in the dirstate. It will not change adds, removes, or
2947 modified files that are in the working copy parent.
2949 modified files that are in the working copy parent.
2948
2950
2949 One use of this command is to make the next :hg:`status` invocation
2951 One use of this command is to make the next :hg:`status` invocation
2950 check the actual file content.
2952 check the actual file content.
2951 """
2953 """
2952 ctx = scmutil.revsingle(repo, rev)
2954 ctx = scmutil.revsingle(repo, rev)
2953 with repo.wlock():
2955 with repo.wlock():
2954 dirstate = repo.dirstate
2956 dirstate = repo.dirstate
2955 changedfiles = None
2957 changedfiles = None
2956 # See command doc for what minimal does.
2958 # See command doc for what minimal does.
2957 if opts.get('minimal'):
2959 if opts.get('minimal'):
2958 manifestfiles = set(ctx.manifest().keys())
2960 manifestfiles = set(ctx.manifest().keys())
2959 dirstatefiles = set(dirstate)
2961 dirstatefiles = set(dirstate)
2960 manifestonly = manifestfiles - dirstatefiles
2962 manifestonly = manifestfiles - dirstatefiles
2961 dsonly = dirstatefiles - manifestfiles
2963 dsonly = dirstatefiles - manifestfiles
2962 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2964 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2963 changedfiles = manifestonly | dsnotadded
2965 changedfiles = manifestonly | dsnotadded
2964
2966
2965 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2967 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2966
2968
2967
2969
2968 @command(
2970 @command(
2969 b'debugrebuildfncache',
2971 b'debugrebuildfncache',
2970 [
2972 [
2971 (
2973 (
2972 b'',
2974 b'',
2973 b'only-data',
2975 b'only-data',
2974 False,
2976 False,
2975 _(b'only look for wrong .d files (much faster)'),
2977 _(b'only look for wrong .d files (much faster)'),
2976 )
2978 )
2977 ],
2979 ],
2978 b'',
2980 b'',
2979 )
2981 )
2980 def debugrebuildfncache(ui, repo, **opts):
2982 def debugrebuildfncache(ui, repo, **opts):
2981 """rebuild the fncache file"""
2983 """rebuild the fncache file"""
2982 opts = pycompat.byteskwargs(opts)
2984 opts = pycompat.byteskwargs(opts)
2983 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2985 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2984
2986
2985
2987
2986 @command(
2988 @command(
2987 b'debugrename',
2989 b'debugrename',
2988 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2990 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2989 _(b'[-r REV] [FILE]...'),
2991 _(b'[-r REV] [FILE]...'),
2990 )
2992 )
2991 def debugrename(ui, repo, *pats, **opts):
2993 def debugrename(ui, repo, *pats, **opts):
2992 """dump rename information"""
2994 """dump rename information"""
2993
2995
2994 opts = pycompat.byteskwargs(opts)
2996 opts = pycompat.byteskwargs(opts)
2995 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2997 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2996 m = scmutil.match(ctx, pats, opts)
2998 m = scmutil.match(ctx, pats, opts)
2997 for abs in ctx.walk(m):
2999 for abs in ctx.walk(m):
2998 fctx = ctx[abs]
3000 fctx = ctx[abs]
2999 o = fctx.filelog().renamed(fctx.filenode())
3001 o = fctx.filelog().renamed(fctx.filenode())
3000 rel = repo.pathto(abs)
3002 rel = repo.pathto(abs)
3001 if o:
3003 if o:
3002 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3004 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3003 else:
3005 else:
3004 ui.write(_(b"%s not renamed\n") % rel)
3006 ui.write(_(b"%s not renamed\n") % rel)
3005
3007
3006
3008
3007 @command(b'debugrequires|debugrequirements', [], b'')
3009 @command(b'debugrequires|debugrequirements', [], b'')
3008 def debugrequirements(ui, repo):
3010 def debugrequirements(ui, repo):
3009 """print the current repo requirements"""
3011 """print the current repo requirements"""
3010 for r in sorted(repo.requirements):
3012 for r in sorted(repo.requirements):
3011 ui.write(b"%s\n" % r)
3013 ui.write(b"%s\n" % r)
3012
3014
3013
3015
3014 @command(
3016 @command(
3015 b'debugrevlog',
3017 b'debugrevlog',
3016 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3018 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3017 _(b'-c|-m|FILE'),
3019 _(b'-c|-m|FILE'),
3018 optionalrepo=True,
3020 optionalrepo=True,
3019 )
3021 )
3020 def debugrevlog(ui, repo, file_=None, **opts):
3022 def debugrevlog(ui, repo, file_=None, **opts):
3021 """show data and statistics about a revlog"""
3023 """show data and statistics about a revlog"""
3022 opts = pycompat.byteskwargs(opts)
3024 opts = pycompat.byteskwargs(opts)
3023 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3025 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3024
3026
3025 if opts.get(b"dump"):
3027 if opts.get(b"dump"):
3026 numrevs = len(r)
3028 numrevs = len(r)
3027 ui.write(
3029 ui.write(
3028 (
3030 (
3029 b"# rev p1rev p2rev start end deltastart base p1 p2"
3031 b"# rev p1rev p2rev start end deltastart base p1 p2"
3030 b" rawsize totalsize compression heads chainlen\n"
3032 b" rawsize totalsize compression heads chainlen\n"
3031 )
3033 )
3032 )
3034 )
3033 ts = 0
3035 ts = 0
3034 heads = set()
3036 heads = set()
3035
3037
3036 for rev in pycompat.xrange(numrevs):
3038 for rev in pycompat.xrange(numrevs):
3037 dbase = r.deltaparent(rev)
3039 dbase = r.deltaparent(rev)
3038 if dbase == -1:
3040 if dbase == -1:
3039 dbase = rev
3041 dbase = rev
3040 cbase = r.chainbase(rev)
3042 cbase = r.chainbase(rev)
3041 clen = r.chainlen(rev)
3043 clen = r.chainlen(rev)
3042 p1, p2 = r.parentrevs(rev)
3044 p1, p2 = r.parentrevs(rev)
3043 rs = r.rawsize(rev)
3045 rs = r.rawsize(rev)
3044 ts = ts + rs
3046 ts = ts + rs
3045 heads -= set(r.parentrevs(rev))
3047 heads -= set(r.parentrevs(rev))
3046 heads.add(rev)
3048 heads.add(rev)
3047 try:
3049 try:
3048 compression = ts / r.end(rev)
3050 compression = ts / r.end(rev)
3049 except ZeroDivisionError:
3051 except ZeroDivisionError:
3050 compression = 0
3052 compression = 0
3051 ui.write(
3053 ui.write(
3052 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3054 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3053 b"%11d %5d %8d\n"
3055 b"%11d %5d %8d\n"
3054 % (
3056 % (
3055 rev,
3057 rev,
3056 p1,
3058 p1,
3057 p2,
3059 p2,
3058 r.start(rev),
3060 r.start(rev),
3059 r.end(rev),
3061 r.end(rev),
3060 r.start(dbase),
3062 r.start(dbase),
3061 r.start(cbase),
3063 r.start(cbase),
3062 r.start(p1),
3064 r.start(p1),
3063 r.start(p2),
3065 r.start(p2),
3064 rs,
3066 rs,
3065 ts,
3067 ts,
3066 compression,
3068 compression,
3067 len(heads),
3069 len(heads),
3068 clen,
3070 clen,
3069 )
3071 )
3070 )
3072 )
3071 return 0
3073 return 0
3072
3074
3073 format = r._format_version
3075 format = r._format_version
3074 v = r._format_flags
3076 v = r._format_flags
3075 flags = []
3077 flags = []
3076 gdelta = False
3078 gdelta = False
3077 if v & revlog.FLAG_INLINE_DATA:
3079 if v & revlog.FLAG_INLINE_DATA:
3078 flags.append(b'inline')
3080 flags.append(b'inline')
3079 if v & revlog.FLAG_GENERALDELTA:
3081 if v & revlog.FLAG_GENERALDELTA:
3080 gdelta = True
3082 gdelta = True
3081 flags.append(b'generaldelta')
3083 flags.append(b'generaldelta')
3082 if not flags:
3084 if not flags:
3083 flags = [b'(none)']
3085 flags = [b'(none)']
3084
3086
3085 ### tracks merge vs single parent
3087 ### tracks merge vs single parent
3086 nummerges = 0
3088 nummerges = 0
3087
3089
3088 ### tracks ways the "delta" are build
3090 ### tracks ways the "delta" are build
3089 # nodelta
3091 # nodelta
3090 numempty = 0
3092 numempty = 0
3091 numemptytext = 0
3093 numemptytext = 0
3092 numemptydelta = 0
3094 numemptydelta = 0
3093 # full file content
3095 # full file content
3094 numfull = 0
3096 numfull = 0
3095 # intermediate snapshot against a prior snapshot
3097 # intermediate snapshot against a prior snapshot
3096 numsemi = 0
3098 numsemi = 0
3097 # snapshot count per depth
3099 # snapshot count per depth
3098 numsnapdepth = collections.defaultdict(lambda: 0)
3100 numsnapdepth = collections.defaultdict(lambda: 0)
3099 # delta against previous revision
3101 # delta against previous revision
3100 numprev = 0
3102 numprev = 0
3101 # delta against first or second parent (not prev)
3103 # delta against first or second parent (not prev)
3102 nump1 = 0
3104 nump1 = 0
3103 nump2 = 0
3105 nump2 = 0
3104 # delta against neither prev nor parents
3106 # delta against neither prev nor parents
3105 numother = 0
3107 numother = 0
3106 # delta against prev that are also first or second parent
3108 # delta against prev that are also first or second parent
3107 # (details of `numprev`)
3109 # (details of `numprev`)
3108 nump1prev = 0
3110 nump1prev = 0
3109 nump2prev = 0
3111 nump2prev = 0
3110
3112
3111 # data about delta chain of each revs
3113 # data about delta chain of each revs
3112 chainlengths = []
3114 chainlengths = []
3113 chainbases = []
3115 chainbases = []
3114 chainspans = []
3116 chainspans = []
3115
3117
3116 # data about each revision
3118 # data about each revision
3117 datasize = [None, 0, 0]
3119 datasize = [None, 0, 0]
3118 fullsize = [None, 0, 0]
3120 fullsize = [None, 0, 0]
3119 semisize = [None, 0, 0]
3121 semisize = [None, 0, 0]
3120 # snapshot count per depth
3122 # snapshot count per depth
3121 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3123 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3122 deltasize = [None, 0, 0]
3124 deltasize = [None, 0, 0]
3123 chunktypecounts = {}
3125 chunktypecounts = {}
3124 chunktypesizes = {}
3126 chunktypesizes = {}
3125
3127
3126 def addsize(size, l):
3128 def addsize(size, l):
3127 if l[0] is None or size < l[0]:
3129 if l[0] is None or size < l[0]:
3128 l[0] = size
3130 l[0] = size
3129 if size > l[1]:
3131 if size > l[1]:
3130 l[1] = size
3132 l[1] = size
3131 l[2] += size
3133 l[2] += size
3132
3134
3133 numrevs = len(r)
3135 numrevs = len(r)
3134 for rev in pycompat.xrange(numrevs):
3136 for rev in pycompat.xrange(numrevs):
3135 p1, p2 = r.parentrevs(rev)
3137 p1, p2 = r.parentrevs(rev)
3136 delta = r.deltaparent(rev)
3138 delta = r.deltaparent(rev)
3137 if format > 0:
3139 if format > 0:
3138 addsize(r.rawsize(rev), datasize)
3140 addsize(r.rawsize(rev), datasize)
3139 if p2 != nullrev:
3141 if p2 != nullrev:
3140 nummerges += 1
3142 nummerges += 1
3141 size = r.length(rev)
3143 size = r.length(rev)
3142 if delta == nullrev:
3144 if delta == nullrev:
3143 chainlengths.append(0)
3145 chainlengths.append(0)
3144 chainbases.append(r.start(rev))
3146 chainbases.append(r.start(rev))
3145 chainspans.append(size)
3147 chainspans.append(size)
3146 if size == 0:
3148 if size == 0:
3147 numempty += 1
3149 numempty += 1
3148 numemptytext += 1
3150 numemptytext += 1
3149 else:
3151 else:
3150 numfull += 1
3152 numfull += 1
3151 numsnapdepth[0] += 1
3153 numsnapdepth[0] += 1
3152 addsize(size, fullsize)
3154 addsize(size, fullsize)
3153 addsize(size, snapsizedepth[0])
3155 addsize(size, snapsizedepth[0])
3154 else:
3156 else:
3155 chainlengths.append(chainlengths[delta] + 1)
3157 chainlengths.append(chainlengths[delta] + 1)
3156 baseaddr = chainbases[delta]
3158 baseaddr = chainbases[delta]
3157 revaddr = r.start(rev)
3159 revaddr = r.start(rev)
3158 chainbases.append(baseaddr)
3160 chainbases.append(baseaddr)
3159 chainspans.append((revaddr - baseaddr) + size)
3161 chainspans.append((revaddr - baseaddr) + size)
3160 if size == 0:
3162 if size == 0:
3161 numempty += 1
3163 numempty += 1
3162 numemptydelta += 1
3164 numemptydelta += 1
3163 elif r.issnapshot(rev):
3165 elif r.issnapshot(rev):
3164 addsize(size, semisize)
3166 addsize(size, semisize)
3165 numsemi += 1
3167 numsemi += 1
3166 depth = r.snapshotdepth(rev)
3168 depth = r.snapshotdepth(rev)
3167 numsnapdepth[depth] += 1
3169 numsnapdepth[depth] += 1
3168 addsize(size, snapsizedepth[depth])
3170 addsize(size, snapsizedepth[depth])
3169 else:
3171 else:
3170 addsize(size, deltasize)
3172 addsize(size, deltasize)
3171 if delta == rev - 1:
3173 if delta == rev - 1:
3172 numprev += 1
3174 numprev += 1
3173 if delta == p1:
3175 if delta == p1:
3174 nump1prev += 1
3176 nump1prev += 1
3175 elif delta == p2:
3177 elif delta == p2:
3176 nump2prev += 1
3178 nump2prev += 1
3177 elif delta == p1:
3179 elif delta == p1:
3178 nump1 += 1
3180 nump1 += 1
3179 elif delta == p2:
3181 elif delta == p2:
3180 nump2 += 1
3182 nump2 += 1
3181 elif delta != nullrev:
3183 elif delta != nullrev:
3182 numother += 1
3184 numother += 1
3183
3185
3184 # Obtain data on the raw chunks in the revlog.
3186 # Obtain data on the raw chunks in the revlog.
3185 if util.safehasattr(r, b'_getsegmentforrevs'):
3187 if util.safehasattr(r, b'_getsegmentforrevs'):
3186 segment = r._getsegmentforrevs(rev, rev)[1]
3188 segment = r._getsegmentforrevs(rev, rev)[1]
3187 else:
3189 else:
3188 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3190 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3189 if segment:
3191 if segment:
3190 chunktype = bytes(segment[0:1])
3192 chunktype = bytes(segment[0:1])
3191 else:
3193 else:
3192 chunktype = b'empty'
3194 chunktype = b'empty'
3193
3195
3194 if chunktype not in chunktypecounts:
3196 if chunktype not in chunktypecounts:
3195 chunktypecounts[chunktype] = 0
3197 chunktypecounts[chunktype] = 0
3196 chunktypesizes[chunktype] = 0
3198 chunktypesizes[chunktype] = 0
3197
3199
3198 chunktypecounts[chunktype] += 1
3200 chunktypecounts[chunktype] += 1
3199 chunktypesizes[chunktype] += size
3201 chunktypesizes[chunktype] += size
3200
3202
3201 # Adjust size min value for empty cases
3203 # Adjust size min value for empty cases
3202 for size in (datasize, fullsize, semisize, deltasize):
3204 for size in (datasize, fullsize, semisize, deltasize):
3203 if size[0] is None:
3205 if size[0] is None:
3204 size[0] = 0
3206 size[0] = 0
3205
3207
3206 numdeltas = numrevs - numfull - numempty - numsemi
3208 numdeltas = numrevs - numfull - numempty - numsemi
3207 numoprev = numprev - nump1prev - nump2prev
3209 numoprev = numprev - nump1prev - nump2prev
3208 totalrawsize = datasize[2]
3210 totalrawsize = datasize[2]
3209 datasize[2] /= numrevs
3211 datasize[2] /= numrevs
3210 fulltotal = fullsize[2]
3212 fulltotal = fullsize[2]
3211 if numfull == 0:
3213 if numfull == 0:
3212 fullsize[2] = 0
3214 fullsize[2] = 0
3213 else:
3215 else:
3214 fullsize[2] /= numfull
3216 fullsize[2] /= numfull
3215 semitotal = semisize[2]
3217 semitotal = semisize[2]
3216 snaptotal = {}
3218 snaptotal = {}
3217 if numsemi > 0:
3219 if numsemi > 0:
3218 semisize[2] /= numsemi
3220 semisize[2] /= numsemi
3219 for depth in snapsizedepth:
3221 for depth in snapsizedepth:
3220 snaptotal[depth] = snapsizedepth[depth][2]
3222 snaptotal[depth] = snapsizedepth[depth][2]
3221 snapsizedepth[depth][2] /= numsnapdepth[depth]
3223 snapsizedepth[depth][2] /= numsnapdepth[depth]
3222
3224
3223 deltatotal = deltasize[2]
3225 deltatotal = deltasize[2]
3224 if numdeltas > 0:
3226 if numdeltas > 0:
3225 deltasize[2] /= numdeltas
3227 deltasize[2] /= numdeltas
3226 totalsize = fulltotal + semitotal + deltatotal
3228 totalsize = fulltotal + semitotal + deltatotal
3227 avgchainlen = sum(chainlengths) / numrevs
3229 avgchainlen = sum(chainlengths) / numrevs
3228 maxchainlen = max(chainlengths)
3230 maxchainlen = max(chainlengths)
3229 maxchainspan = max(chainspans)
3231 maxchainspan = max(chainspans)
3230 compratio = 1
3232 compratio = 1
3231 if totalsize:
3233 if totalsize:
3232 compratio = totalrawsize / totalsize
3234 compratio = totalrawsize / totalsize
3233
3235
3234 basedfmtstr = b'%%%dd\n'
3236 basedfmtstr = b'%%%dd\n'
3235 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3237 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3236
3238
3237 def dfmtstr(max):
3239 def dfmtstr(max):
3238 return basedfmtstr % len(str(max))
3240 return basedfmtstr % len(str(max))
3239
3241
3240 def pcfmtstr(max, padding=0):
3242 def pcfmtstr(max, padding=0):
3241 return basepcfmtstr % (len(str(max)), b' ' * padding)
3243 return basepcfmtstr % (len(str(max)), b' ' * padding)
3242
3244
3243 def pcfmt(value, total):
3245 def pcfmt(value, total):
3244 if total:
3246 if total:
3245 return (value, 100 * float(value) / total)
3247 return (value, 100 * float(value) / total)
3246 else:
3248 else:
3247 return value, 100.0
3249 return value, 100.0
3248
3250
3249 ui.writenoi18n(b'format : %d\n' % format)
3251 ui.writenoi18n(b'format : %d\n' % format)
3250 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3252 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3251
3253
3252 ui.write(b'\n')
3254 ui.write(b'\n')
3253 fmt = pcfmtstr(totalsize)
3255 fmt = pcfmtstr(totalsize)
3254 fmt2 = dfmtstr(totalsize)
3256 fmt2 = dfmtstr(totalsize)
3255 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3257 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3256 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3258 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3257 ui.writenoi18n(
3259 ui.writenoi18n(
3258 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3260 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3259 )
3261 )
3260 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3262 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3261 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3263 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3262 ui.writenoi18n(
3264 ui.writenoi18n(
3263 b' text : '
3265 b' text : '
3264 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3266 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3265 )
3267 )
3266 ui.writenoi18n(
3268 ui.writenoi18n(
3267 b' delta : '
3269 b' delta : '
3268 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3270 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3269 )
3271 )
3270 ui.writenoi18n(
3272 ui.writenoi18n(
3271 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3273 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3272 )
3274 )
3273 for depth in sorted(numsnapdepth):
3275 for depth in sorted(numsnapdepth):
3274 ui.write(
3276 ui.write(
3275 (b' lvl-%-3d : ' % depth)
3277 (b' lvl-%-3d : ' % depth)
3276 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3278 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3277 )
3279 )
3278 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3280 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3279 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3281 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3280 ui.writenoi18n(
3282 ui.writenoi18n(
3281 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3283 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3282 )
3284 )
3283 for depth in sorted(numsnapdepth):
3285 for depth in sorted(numsnapdepth):
3284 ui.write(
3286 ui.write(
3285 (b' lvl-%-3d : ' % depth)
3287 (b' lvl-%-3d : ' % depth)
3286 + fmt % pcfmt(snaptotal[depth], totalsize)
3288 + fmt % pcfmt(snaptotal[depth], totalsize)
3287 )
3289 )
3288 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3290 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3289
3291
3290 def fmtchunktype(chunktype):
3292 def fmtchunktype(chunktype):
3291 if chunktype == b'empty':
3293 if chunktype == b'empty':
3292 return b' %s : ' % chunktype
3294 return b' %s : ' % chunktype
3293 elif chunktype in pycompat.bytestr(string.ascii_letters):
3295 elif chunktype in pycompat.bytestr(string.ascii_letters):
3294 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3296 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3295 else:
3297 else:
3296 return b' 0x%s : ' % hex(chunktype)
3298 return b' 0x%s : ' % hex(chunktype)
3297
3299
3298 ui.write(b'\n')
3300 ui.write(b'\n')
3299 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3301 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3300 for chunktype in sorted(chunktypecounts):
3302 for chunktype in sorted(chunktypecounts):
3301 ui.write(fmtchunktype(chunktype))
3303 ui.write(fmtchunktype(chunktype))
3302 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3304 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3303 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3305 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3304 for chunktype in sorted(chunktypecounts):
3306 for chunktype in sorted(chunktypecounts):
3305 ui.write(fmtchunktype(chunktype))
3307 ui.write(fmtchunktype(chunktype))
3306 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3308 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3307
3309
3308 ui.write(b'\n')
3310 ui.write(b'\n')
3309 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3311 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3310 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3312 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3311 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3313 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3312 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3314 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3313 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3315 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3314
3316
3315 if format > 0:
3317 if format > 0:
3316 ui.write(b'\n')
3318 ui.write(b'\n')
3317 ui.writenoi18n(
3319 ui.writenoi18n(
3318 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3320 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3319 % tuple(datasize)
3321 % tuple(datasize)
3320 )
3322 )
3321 ui.writenoi18n(
3323 ui.writenoi18n(
3322 b'full revision size (min/max/avg) : %d / %d / %d\n'
3324 b'full revision size (min/max/avg) : %d / %d / %d\n'
3323 % tuple(fullsize)
3325 % tuple(fullsize)
3324 )
3326 )
3325 ui.writenoi18n(
3327 ui.writenoi18n(
3326 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3328 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3327 % tuple(semisize)
3329 % tuple(semisize)
3328 )
3330 )
3329 for depth in sorted(snapsizedepth):
3331 for depth in sorted(snapsizedepth):
3330 if depth == 0:
3332 if depth == 0:
3331 continue
3333 continue
3332 ui.writenoi18n(
3334 ui.writenoi18n(
3333 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3335 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3334 % ((depth,) + tuple(snapsizedepth[depth]))
3336 % ((depth,) + tuple(snapsizedepth[depth]))
3335 )
3337 )
3336 ui.writenoi18n(
3338 ui.writenoi18n(
3337 b'delta size (min/max/avg) : %d / %d / %d\n'
3339 b'delta size (min/max/avg) : %d / %d / %d\n'
3338 % tuple(deltasize)
3340 % tuple(deltasize)
3339 )
3341 )
3340
3342
3341 if numdeltas > 0:
3343 if numdeltas > 0:
3342 ui.write(b'\n')
3344 ui.write(b'\n')
3343 fmt = pcfmtstr(numdeltas)
3345 fmt = pcfmtstr(numdeltas)
3344 fmt2 = pcfmtstr(numdeltas, 4)
3346 fmt2 = pcfmtstr(numdeltas, 4)
3345 ui.writenoi18n(
3347 ui.writenoi18n(
3346 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3348 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3347 )
3349 )
3348 if numprev > 0:
3350 if numprev > 0:
3349 ui.writenoi18n(
3351 ui.writenoi18n(
3350 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3352 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3351 )
3353 )
3352 ui.writenoi18n(
3354 ui.writenoi18n(
3353 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3355 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3354 )
3356 )
3355 ui.writenoi18n(
3357 ui.writenoi18n(
3356 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3358 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3357 )
3359 )
3358 if gdelta:
3360 if gdelta:
3359 ui.writenoi18n(
3361 ui.writenoi18n(
3360 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3362 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3361 )
3363 )
3362 ui.writenoi18n(
3364 ui.writenoi18n(
3363 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3365 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3364 )
3366 )
3365 ui.writenoi18n(
3367 ui.writenoi18n(
3366 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3368 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3367 )
3369 )
3368
3370
3369
3371
3370 @command(
3372 @command(
3371 b'debugrevlogindex',
3373 b'debugrevlogindex',
3372 cmdutil.debugrevlogopts
3374 cmdutil.debugrevlogopts
3373 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3375 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3374 _(b'[-f FORMAT] -c|-m|FILE'),
3376 _(b'[-f FORMAT] -c|-m|FILE'),
3375 optionalrepo=True,
3377 optionalrepo=True,
3376 )
3378 )
3377 def debugrevlogindex(ui, repo, file_=None, **opts):
3379 def debugrevlogindex(ui, repo, file_=None, **opts):
3378 """dump the contents of a revlog index"""
3380 """dump the contents of a revlog index"""
3379 opts = pycompat.byteskwargs(opts)
3381 opts = pycompat.byteskwargs(opts)
3380 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3382 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3381 format = opts.get(b'format', 0)
3383 format = opts.get(b'format', 0)
3382 if format not in (0, 1):
3384 if format not in (0, 1):
3383 raise error.Abort(_(b"unknown format %d") % format)
3385 raise error.Abort(_(b"unknown format %d") % format)
3384
3386
3385 if ui.debugflag:
3387 if ui.debugflag:
3386 shortfn = hex
3388 shortfn = hex
3387 else:
3389 else:
3388 shortfn = short
3390 shortfn = short
3389
3391
3390 # There might not be anything in r, so have a sane default
3392 # There might not be anything in r, so have a sane default
3391 idlen = 12
3393 idlen = 12
3392 for i in r:
3394 for i in r:
3393 idlen = len(shortfn(r.node(i)))
3395 idlen = len(shortfn(r.node(i)))
3394 break
3396 break
3395
3397
3396 if format == 0:
3398 if format == 0:
3397 if ui.verbose:
3399 if ui.verbose:
3398 ui.writenoi18n(
3400 ui.writenoi18n(
3399 b" rev offset length linkrev %s %s p2\n"
3401 b" rev offset length linkrev %s %s p2\n"
3400 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3402 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3401 )
3403 )
3402 else:
3404 else:
3403 ui.writenoi18n(
3405 ui.writenoi18n(
3404 b" rev linkrev %s %s p2\n"
3406 b" rev linkrev %s %s p2\n"
3405 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3407 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3406 )
3408 )
3407 elif format == 1:
3409 elif format == 1:
3408 if ui.verbose:
3410 if ui.verbose:
3409 ui.writenoi18n(
3411 ui.writenoi18n(
3410 (
3412 (
3411 b" rev flag offset length size link p1"
3413 b" rev flag offset length size link p1"
3412 b" p2 %s\n"
3414 b" p2 %s\n"
3413 )
3415 )
3414 % b"nodeid".rjust(idlen)
3416 % b"nodeid".rjust(idlen)
3415 )
3417 )
3416 else:
3418 else:
3417 ui.writenoi18n(
3419 ui.writenoi18n(
3418 b" rev flag size link p1 p2 %s\n"
3420 b" rev flag size link p1 p2 %s\n"
3419 % b"nodeid".rjust(idlen)
3421 % b"nodeid".rjust(idlen)
3420 )
3422 )
3421
3423
3422 for i in r:
3424 for i in r:
3423 node = r.node(i)
3425 node = r.node(i)
3424 if format == 0:
3426 if format == 0:
3425 try:
3427 try:
3426 pp = r.parents(node)
3428 pp = r.parents(node)
3427 except Exception:
3429 except Exception:
3428 pp = [repo.nullid, repo.nullid]
3430 pp = [repo.nullid, repo.nullid]
3429 if ui.verbose:
3431 if ui.verbose:
3430 ui.write(
3432 ui.write(
3431 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3433 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3432 % (
3434 % (
3433 i,
3435 i,
3434 r.start(i),
3436 r.start(i),
3435 r.length(i),
3437 r.length(i),
3436 r.linkrev(i),
3438 r.linkrev(i),
3437 shortfn(node),
3439 shortfn(node),
3438 shortfn(pp[0]),
3440 shortfn(pp[0]),
3439 shortfn(pp[1]),
3441 shortfn(pp[1]),
3440 )
3442 )
3441 )
3443 )
3442 else:
3444 else:
3443 ui.write(
3445 ui.write(
3444 b"% 6d % 7d %s %s %s\n"
3446 b"% 6d % 7d %s %s %s\n"
3445 % (
3447 % (
3446 i,
3448 i,
3447 r.linkrev(i),
3449 r.linkrev(i),
3448 shortfn(node),
3450 shortfn(node),
3449 shortfn(pp[0]),
3451 shortfn(pp[0]),
3450 shortfn(pp[1]),
3452 shortfn(pp[1]),
3451 )
3453 )
3452 )
3454 )
3453 elif format == 1:
3455 elif format == 1:
3454 pr = r.parentrevs(i)
3456 pr = r.parentrevs(i)
3455 if ui.verbose:
3457 if ui.verbose:
3456 ui.write(
3458 ui.write(
3457 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3459 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3458 % (
3460 % (
3459 i,
3461 i,
3460 r.flags(i),
3462 r.flags(i),
3461 r.start(i),
3463 r.start(i),
3462 r.length(i),
3464 r.length(i),
3463 r.rawsize(i),
3465 r.rawsize(i),
3464 r.linkrev(i),
3466 r.linkrev(i),
3465 pr[0],
3467 pr[0],
3466 pr[1],
3468 pr[1],
3467 shortfn(node),
3469 shortfn(node),
3468 )
3470 )
3469 )
3471 )
3470 else:
3472 else:
3471 ui.write(
3473 ui.write(
3472 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3474 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3473 % (
3475 % (
3474 i,
3476 i,
3475 r.flags(i),
3477 r.flags(i),
3476 r.rawsize(i),
3478 r.rawsize(i),
3477 r.linkrev(i),
3479 r.linkrev(i),
3478 pr[0],
3480 pr[0],
3479 pr[1],
3481 pr[1],
3480 shortfn(node),
3482 shortfn(node),
3481 )
3483 )
3482 )
3484 )
3483
3485
3484
3486
3485 @command(
3487 @command(
3486 b'debugrevspec',
3488 b'debugrevspec',
3487 [
3489 [
3488 (
3490 (
3489 b'',
3491 b'',
3490 b'optimize',
3492 b'optimize',
3491 None,
3493 None,
3492 _(b'print parsed tree after optimizing (DEPRECATED)'),
3494 _(b'print parsed tree after optimizing (DEPRECATED)'),
3493 ),
3495 ),
3494 (
3496 (
3495 b'',
3497 b'',
3496 b'show-revs',
3498 b'show-revs',
3497 True,
3499 True,
3498 _(b'print list of result revisions (default)'),
3500 _(b'print list of result revisions (default)'),
3499 ),
3501 ),
3500 (
3502 (
3501 b's',
3503 b's',
3502 b'show-set',
3504 b'show-set',
3503 None,
3505 None,
3504 _(b'print internal representation of result set'),
3506 _(b'print internal representation of result set'),
3505 ),
3507 ),
3506 (
3508 (
3507 b'p',
3509 b'p',
3508 b'show-stage',
3510 b'show-stage',
3509 [],
3511 [],
3510 _(b'print parsed tree at the given stage'),
3512 _(b'print parsed tree at the given stage'),
3511 _(b'NAME'),
3513 _(b'NAME'),
3512 ),
3514 ),
3513 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3515 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3514 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3516 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3515 ],
3517 ],
3516 b'REVSPEC',
3518 b'REVSPEC',
3517 )
3519 )
3518 def debugrevspec(ui, repo, expr, **opts):
3520 def debugrevspec(ui, repo, expr, **opts):
3519 """parse and apply a revision specification
3521 """parse and apply a revision specification
3520
3522
3521 Use -p/--show-stage option to print the parsed tree at the given stages.
3523 Use -p/--show-stage option to print the parsed tree at the given stages.
3522 Use -p all to print tree at every stage.
3524 Use -p all to print tree at every stage.
3523
3525
3524 Use --no-show-revs option with -s or -p to print only the set
3526 Use --no-show-revs option with -s or -p to print only the set
3525 representation or the parsed tree respectively.
3527 representation or the parsed tree respectively.
3526
3528
3527 Use --verify-optimized to compare the optimized result with the unoptimized
3529 Use --verify-optimized to compare the optimized result with the unoptimized
3528 one. Returns 1 if the optimized result differs.
3530 one. Returns 1 if the optimized result differs.
3529 """
3531 """
3530 opts = pycompat.byteskwargs(opts)
3532 opts = pycompat.byteskwargs(opts)
3531 aliases = ui.configitems(b'revsetalias')
3533 aliases = ui.configitems(b'revsetalias')
3532 stages = [
3534 stages = [
3533 (b'parsed', lambda tree: tree),
3535 (b'parsed', lambda tree: tree),
3534 (
3536 (
3535 b'expanded',
3537 b'expanded',
3536 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3538 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3537 ),
3539 ),
3538 (b'concatenated', revsetlang.foldconcat),
3540 (b'concatenated', revsetlang.foldconcat),
3539 (b'analyzed', revsetlang.analyze),
3541 (b'analyzed', revsetlang.analyze),
3540 (b'optimized', revsetlang.optimize),
3542 (b'optimized', revsetlang.optimize),
3541 ]
3543 ]
3542 if opts[b'no_optimized']:
3544 if opts[b'no_optimized']:
3543 stages = stages[:-1]
3545 stages = stages[:-1]
3544 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3546 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3545 raise error.Abort(
3547 raise error.Abort(
3546 _(b'cannot use --verify-optimized with --no-optimized')
3548 _(b'cannot use --verify-optimized with --no-optimized')
3547 )
3549 )
3548 stagenames = {n for n, f in stages}
3550 stagenames = {n for n, f in stages}
3549
3551
3550 showalways = set()
3552 showalways = set()
3551 showchanged = set()
3553 showchanged = set()
3552 if ui.verbose and not opts[b'show_stage']:
3554 if ui.verbose and not opts[b'show_stage']:
3553 # show parsed tree by --verbose (deprecated)
3555 # show parsed tree by --verbose (deprecated)
3554 showalways.add(b'parsed')
3556 showalways.add(b'parsed')
3555 showchanged.update([b'expanded', b'concatenated'])
3557 showchanged.update([b'expanded', b'concatenated'])
3556 if opts[b'optimize']:
3558 if opts[b'optimize']:
3557 showalways.add(b'optimized')
3559 showalways.add(b'optimized')
3558 if opts[b'show_stage'] and opts[b'optimize']:
3560 if opts[b'show_stage'] and opts[b'optimize']:
3559 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3561 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3560 if opts[b'show_stage'] == [b'all']:
3562 if opts[b'show_stage'] == [b'all']:
3561 showalways.update(stagenames)
3563 showalways.update(stagenames)
3562 else:
3564 else:
3563 for n in opts[b'show_stage']:
3565 for n in opts[b'show_stage']:
3564 if n not in stagenames:
3566 if n not in stagenames:
3565 raise error.Abort(_(b'invalid stage name: %s') % n)
3567 raise error.Abort(_(b'invalid stage name: %s') % n)
3566 showalways.update(opts[b'show_stage'])
3568 showalways.update(opts[b'show_stage'])
3567
3569
3568 treebystage = {}
3570 treebystage = {}
3569 printedtree = None
3571 printedtree = None
3570 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3572 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3571 for n, f in stages:
3573 for n, f in stages:
3572 treebystage[n] = tree = f(tree)
3574 treebystage[n] = tree = f(tree)
3573 if n in showalways or (n in showchanged and tree != printedtree):
3575 if n in showalways or (n in showchanged and tree != printedtree):
3574 if opts[b'show_stage'] or n != b'parsed':
3576 if opts[b'show_stage'] or n != b'parsed':
3575 ui.write(b"* %s:\n" % n)
3577 ui.write(b"* %s:\n" % n)
3576 ui.write(revsetlang.prettyformat(tree), b"\n")
3578 ui.write(revsetlang.prettyformat(tree), b"\n")
3577 printedtree = tree
3579 printedtree = tree
3578
3580
3579 if opts[b'verify_optimized']:
3581 if opts[b'verify_optimized']:
3580 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3582 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3581 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3583 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3582 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3584 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3583 ui.writenoi18n(
3585 ui.writenoi18n(
3584 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3586 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3585 )
3587 )
3586 ui.writenoi18n(
3588 ui.writenoi18n(
3587 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3589 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3588 )
3590 )
3589 arevs = list(arevs)
3591 arevs = list(arevs)
3590 brevs = list(brevs)
3592 brevs = list(brevs)
3591 if arevs == brevs:
3593 if arevs == brevs:
3592 return 0
3594 return 0
3593 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3595 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3594 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3596 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3595 sm = difflib.SequenceMatcher(None, arevs, brevs)
3597 sm = difflib.SequenceMatcher(None, arevs, brevs)
3596 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3598 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3597 if tag in ('delete', 'replace'):
3599 if tag in ('delete', 'replace'):
3598 for c in arevs[alo:ahi]:
3600 for c in arevs[alo:ahi]:
3599 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3601 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3600 if tag in ('insert', 'replace'):
3602 if tag in ('insert', 'replace'):
3601 for c in brevs[blo:bhi]:
3603 for c in brevs[blo:bhi]:
3602 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3604 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3603 if tag == 'equal':
3605 if tag == 'equal':
3604 for c in arevs[alo:ahi]:
3606 for c in arevs[alo:ahi]:
3605 ui.write(b' %d\n' % c)
3607 ui.write(b' %d\n' % c)
3606 return 1
3608 return 1
3607
3609
3608 func = revset.makematcher(tree)
3610 func = revset.makematcher(tree)
3609 revs = func(repo)
3611 revs = func(repo)
3610 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3612 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3611 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3613 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3612 if not opts[b'show_revs']:
3614 if not opts[b'show_revs']:
3613 return
3615 return
3614 for c in revs:
3616 for c in revs:
3615 ui.write(b"%d\n" % c)
3617 ui.write(b"%d\n" % c)
3616
3618
3617
3619
3618 @command(
3620 @command(
3619 b'debugserve',
3621 b'debugserve',
3620 [
3622 [
3621 (
3623 (
3622 b'',
3624 b'',
3623 b'sshstdio',
3625 b'sshstdio',
3624 False,
3626 False,
3625 _(b'run an SSH server bound to process handles'),
3627 _(b'run an SSH server bound to process handles'),
3626 ),
3628 ),
3627 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3629 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3628 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3630 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3629 ],
3631 ],
3630 b'',
3632 b'',
3631 )
3633 )
3632 def debugserve(ui, repo, **opts):
3634 def debugserve(ui, repo, **opts):
3633 """run a server with advanced settings
3635 """run a server with advanced settings
3634
3636
3635 This command is similar to :hg:`serve`. It exists partially as a
3637 This command is similar to :hg:`serve`. It exists partially as a
3636 workaround to the fact that ``hg serve --stdio`` must have specific
3638 workaround to the fact that ``hg serve --stdio`` must have specific
3637 arguments for security reasons.
3639 arguments for security reasons.
3638 """
3640 """
3639 opts = pycompat.byteskwargs(opts)
3641 opts = pycompat.byteskwargs(opts)
3640
3642
3641 if not opts[b'sshstdio']:
3643 if not opts[b'sshstdio']:
3642 raise error.Abort(_(b'only --sshstdio is currently supported'))
3644 raise error.Abort(_(b'only --sshstdio is currently supported'))
3643
3645
3644 logfh = None
3646 logfh = None
3645
3647
3646 if opts[b'logiofd'] and opts[b'logiofile']:
3648 if opts[b'logiofd'] and opts[b'logiofile']:
3647 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3649 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3648
3650
3649 if opts[b'logiofd']:
3651 if opts[b'logiofd']:
3650 # Ideally we would be line buffered. But line buffering in binary
3652 # Ideally we would be line buffered. But line buffering in binary
3651 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3653 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3652 # buffering could have performance impacts. But since this isn't
3654 # buffering could have performance impacts. But since this isn't
3653 # performance critical code, it should be fine.
3655 # performance critical code, it should be fine.
3654 try:
3656 try:
3655 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3657 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3656 except OSError as e:
3658 except OSError as e:
3657 if e.errno != errno.ESPIPE:
3659 if e.errno != errno.ESPIPE:
3658 raise
3660 raise
3659 # can't seek a pipe, so `ab` mode fails on py3
3661 # can't seek a pipe, so `ab` mode fails on py3
3660 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3662 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3661 elif opts[b'logiofile']:
3663 elif opts[b'logiofile']:
3662 logfh = open(opts[b'logiofile'], b'ab', 0)
3664 logfh = open(opts[b'logiofile'], b'ab', 0)
3663
3665
3664 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3666 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3665 s.serve_forever()
3667 s.serve_forever()
3666
3668
3667
3669
3668 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3670 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3669 def debugsetparents(ui, repo, rev1, rev2=None):
3671 def debugsetparents(ui, repo, rev1, rev2=None):
3670 """manually set the parents of the current working directory (DANGEROUS)
3672 """manually set the parents of the current working directory (DANGEROUS)
3671
3673
3672 This command is not what you are looking for and should not be used. Using
3674 This command is not what you are looking for and should not be used. Using
3673 this command will most certainly results in slight corruption of the file
3675 this command will most certainly results in slight corruption of the file
3674 level histories withing your repository. DO NOT USE THIS COMMAND.
3676 level histories withing your repository. DO NOT USE THIS COMMAND.
3675
3677
3676 The command update the p1 and p2 field in the dirstate, and not touching
3678 The command update the p1 and p2 field in the dirstate, and not touching
3677 anything else. This useful for writing repository conversion tools, but
3679 anything else. This useful for writing repository conversion tools, but
3678 should be used with extreme care. For example, neither the working
3680 should be used with extreme care. For example, neither the working
3679 directory nor the dirstate is updated, so file status may be incorrect
3681 directory nor the dirstate is updated, so file status may be incorrect
3680 after running this command. Only used if you are one of the few people that
3682 after running this command. Only used if you are one of the few people that
3681 deeply unstand both conversion tools and file level histories. If you are
3683 deeply unstand both conversion tools and file level histories. If you are
3682 reading this help, you are not one of this people (most of them sailed west
3684 reading this help, you are not one of this people (most of them sailed west
3683 from Mithlond anyway.
3685 from Mithlond anyway.
3684
3686
3685 So one last time DO NOT USE THIS COMMAND.
3687 So one last time DO NOT USE THIS COMMAND.
3686
3688
3687 Returns 0 on success.
3689 Returns 0 on success.
3688 """
3690 """
3689
3691
3690 node1 = scmutil.revsingle(repo, rev1).node()
3692 node1 = scmutil.revsingle(repo, rev1).node()
3691 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3693 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3692
3694
3693 with repo.wlock():
3695 with repo.wlock():
3694 repo.setparents(node1, node2)
3696 repo.setparents(node1, node2)
3695
3697
3696
3698
3697 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3699 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3698 def debugsidedata(ui, repo, file_, rev=None, **opts):
3700 def debugsidedata(ui, repo, file_, rev=None, **opts):
3699 """dump the side data for a cl/manifest/file revision
3701 """dump the side data for a cl/manifest/file revision
3700
3702
3701 Use --verbose to dump the sidedata content."""
3703 Use --verbose to dump the sidedata content."""
3702 opts = pycompat.byteskwargs(opts)
3704 opts = pycompat.byteskwargs(opts)
3703 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3705 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3704 if rev is not None:
3706 if rev is not None:
3705 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3707 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3706 file_, rev = None, file_
3708 file_, rev = None, file_
3707 elif rev is None:
3709 elif rev is None:
3708 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3710 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3709 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3711 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3710 r = getattr(r, '_revlog', r)
3712 r = getattr(r, '_revlog', r)
3711 try:
3713 try:
3712 sidedata = r.sidedata(r.lookup(rev))
3714 sidedata = r.sidedata(r.lookup(rev))
3713 except KeyError:
3715 except KeyError:
3714 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3716 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3715 if sidedata:
3717 if sidedata:
3716 sidedata = list(sidedata.items())
3718 sidedata = list(sidedata.items())
3717 sidedata.sort()
3719 sidedata.sort()
3718 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3720 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3719 for key, value in sidedata:
3721 for key, value in sidedata:
3720 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3722 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3721 if ui.verbose:
3723 if ui.verbose:
3722 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3724 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3723
3725
3724
3726
3725 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3727 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3726 def debugssl(ui, repo, source=None, **opts):
3728 def debugssl(ui, repo, source=None, **opts):
3727 """test a secure connection to a server
3729 """test a secure connection to a server
3728
3730
3729 This builds the certificate chain for the server on Windows, installing the
3731 This builds the certificate chain for the server on Windows, installing the
3730 missing intermediates and trusted root via Windows Update if necessary. It
3732 missing intermediates and trusted root via Windows Update if necessary. It
3731 does nothing on other platforms.
3733 does nothing on other platforms.
3732
3734
3733 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3735 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3734 that server is used. See :hg:`help urls` for more information.
3736 that server is used. See :hg:`help urls` for more information.
3735
3737
3736 If the update succeeds, retry the original operation. Otherwise, the cause
3738 If the update succeeds, retry the original operation. Otherwise, the cause
3737 of the SSL error is likely another issue.
3739 of the SSL error is likely another issue.
3738 """
3740 """
3739 if not pycompat.iswindows:
3741 if not pycompat.iswindows:
3740 raise error.Abort(
3742 raise error.Abort(
3741 _(b'certificate chain building is only possible on Windows')
3743 _(b'certificate chain building is only possible on Windows')
3742 )
3744 )
3743
3745
3744 if not source:
3746 if not source:
3745 if not repo:
3747 if not repo:
3746 raise error.Abort(
3748 raise error.Abort(
3747 _(
3749 _(
3748 b"there is no Mercurial repository here, and no "
3750 b"there is no Mercurial repository here, and no "
3749 b"server specified"
3751 b"server specified"
3750 )
3752 )
3751 )
3753 )
3752 source = b"default"
3754 source = b"default"
3753
3755
3754 source, branches = urlutil.get_unique_pull_path(
3756 source, branches = urlutil.get_unique_pull_path(
3755 b'debugssl', repo, ui, source
3757 b'debugssl', repo, ui, source
3756 )
3758 )
3757 url = urlutil.url(source)
3759 url = urlutil.url(source)
3758
3760
3759 defaultport = {b'https': 443, b'ssh': 22}
3761 defaultport = {b'https': 443, b'ssh': 22}
3760 if url.scheme in defaultport:
3762 if url.scheme in defaultport:
3761 try:
3763 try:
3762 addr = (url.host, int(url.port or defaultport[url.scheme]))
3764 addr = (url.host, int(url.port or defaultport[url.scheme]))
3763 except ValueError:
3765 except ValueError:
3764 raise error.Abort(_(b"malformed port number in URL"))
3766 raise error.Abort(_(b"malformed port number in URL"))
3765 else:
3767 else:
3766 raise error.Abort(_(b"only https and ssh connections are supported"))
3768 raise error.Abort(_(b"only https and ssh connections are supported"))
3767
3769
3768 from . import win32
3770 from . import win32
3769
3771
3770 s = ssl.wrap_socket(
3772 s = ssl.wrap_socket(
3771 socket.socket(),
3773 socket.socket(),
3772 ssl_version=ssl.PROTOCOL_TLS,
3774 ssl_version=ssl.PROTOCOL_TLS,
3773 cert_reqs=ssl.CERT_NONE,
3775 cert_reqs=ssl.CERT_NONE,
3774 ca_certs=None,
3776 ca_certs=None,
3775 )
3777 )
3776
3778
3777 try:
3779 try:
3778 s.connect(addr)
3780 s.connect(addr)
3779 cert = s.getpeercert(True)
3781 cert = s.getpeercert(True)
3780
3782
3781 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3783 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3782
3784
3783 complete = win32.checkcertificatechain(cert, build=False)
3785 complete = win32.checkcertificatechain(cert, build=False)
3784
3786
3785 if not complete:
3787 if not complete:
3786 ui.status(_(b'certificate chain is incomplete, updating... '))
3788 ui.status(_(b'certificate chain is incomplete, updating... '))
3787
3789
3788 if not win32.checkcertificatechain(cert):
3790 if not win32.checkcertificatechain(cert):
3789 ui.status(_(b'failed.\n'))
3791 ui.status(_(b'failed.\n'))
3790 else:
3792 else:
3791 ui.status(_(b'done.\n'))
3793 ui.status(_(b'done.\n'))
3792 else:
3794 else:
3793 ui.status(_(b'full certificate chain is available\n'))
3795 ui.status(_(b'full certificate chain is available\n'))
3794 finally:
3796 finally:
3795 s.close()
3797 s.close()
3796
3798
3797
3799
3798 @command(
3800 @command(
3799 b"debugbackupbundle",
3801 b"debugbackupbundle",
3800 [
3802 [
3801 (
3803 (
3802 b"",
3804 b"",
3803 b"recover",
3805 b"recover",
3804 b"",
3806 b"",
3805 b"brings the specified changeset back into the repository",
3807 b"brings the specified changeset back into the repository",
3806 )
3808 )
3807 ]
3809 ]
3808 + cmdutil.logopts,
3810 + cmdutil.logopts,
3809 _(b"hg debugbackupbundle [--recover HASH]"),
3811 _(b"hg debugbackupbundle [--recover HASH]"),
3810 )
3812 )
3811 def debugbackupbundle(ui, repo, *pats, **opts):
3813 def debugbackupbundle(ui, repo, *pats, **opts):
3812 """lists the changesets available in backup bundles
3814 """lists the changesets available in backup bundles
3813
3815
3814 Without any arguments, this command prints a list of the changesets in each
3816 Without any arguments, this command prints a list of the changesets in each
3815 backup bundle.
3817 backup bundle.
3816
3818
3817 --recover takes a changeset hash and unbundles the first bundle that
3819 --recover takes a changeset hash and unbundles the first bundle that
3818 contains that hash, which puts that changeset back in your repository.
3820 contains that hash, which puts that changeset back in your repository.
3819
3821
3820 --verbose will print the entire commit message and the bundle path for that
3822 --verbose will print the entire commit message and the bundle path for that
3821 backup.
3823 backup.
3822 """
3824 """
3823 backups = list(
3825 backups = list(
3824 filter(
3826 filter(
3825 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3827 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3826 )
3828 )
3827 )
3829 )
3828 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3830 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3829
3831
3830 opts = pycompat.byteskwargs(opts)
3832 opts = pycompat.byteskwargs(opts)
3831 opts[b"bundle"] = b""
3833 opts[b"bundle"] = b""
3832 opts[b"force"] = None
3834 opts[b"force"] = None
3833 limit = logcmdutil.getlimit(opts)
3835 limit = logcmdutil.getlimit(opts)
3834
3836
3835 def display(other, chlist, displayer):
3837 def display(other, chlist, displayer):
3836 if opts.get(b"newest_first"):
3838 if opts.get(b"newest_first"):
3837 chlist.reverse()
3839 chlist.reverse()
3838 count = 0
3840 count = 0
3839 for n in chlist:
3841 for n in chlist:
3840 if limit is not None and count >= limit:
3842 if limit is not None and count >= limit:
3841 break
3843 break
3842 parents = [
3844 parents = [
3843 True for p in other.changelog.parents(n) if p != repo.nullid
3845 True for p in other.changelog.parents(n) if p != repo.nullid
3844 ]
3846 ]
3845 if opts.get(b"no_merges") and len(parents) == 2:
3847 if opts.get(b"no_merges") and len(parents) == 2:
3846 continue
3848 continue
3847 count += 1
3849 count += 1
3848 displayer.show(other[n])
3850 displayer.show(other[n])
3849
3851
3850 recovernode = opts.get(b"recover")
3852 recovernode = opts.get(b"recover")
3851 if recovernode:
3853 if recovernode:
3852 if scmutil.isrevsymbol(repo, recovernode):
3854 if scmutil.isrevsymbol(repo, recovernode):
3853 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3855 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3854 return
3856 return
3855 elif backups:
3857 elif backups:
3856 msg = _(
3858 msg = _(
3857 b"Recover changesets using: hg debugbackupbundle --recover "
3859 b"Recover changesets using: hg debugbackupbundle --recover "
3858 b"<changeset hash>\n\nAvailable backup changesets:"
3860 b"<changeset hash>\n\nAvailable backup changesets:"
3859 )
3861 )
3860 ui.status(msg, label=b"status.removed")
3862 ui.status(msg, label=b"status.removed")
3861 else:
3863 else:
3862 ui.status(_(b"no backup changesets found\n"))
3864 ui.status(_(b"no backup changesets found\n"))
3863 return
3865 return
3864
3866
3865 for backup in backups:
3867 for backup in backups:
3866 # Much of this is copied from the hg incoming logic
3868 # Much of this is copied from the hg incoming logic
3867 source = os.path.relpath(backup, encoding.getcwd())
3869 source = os.path.relpath(backup, encoding.getcwd())
3868 source, branches = urlutil.get_unique_pull_path(
3870 source, branches = urlutil.get_unique_pull_path(
3869 b'debugbackupbundle',
3871 b'debugbackupbundle',
3870 repo,
3872 repo,
3871 ui,
3873 ui,
3872 source,
3874 source,
3873 default_branches=opts.get(b'branch'),
3875 default_branches=opts.get(b'branch'),
3874 )
3876 )
3875 try:
3877 try:
3876 other = hg.peer(repo, opts, source)
3878 other = hg.peer(repo, opts, source)
3877 except error.LookupError as ex:
3879 except error.LookupError as ex:
3878 msg = _(b"\nwarning: unable to open bundle %s") % source
3880 msg = _(b"\nwarning: unable to open bundle %s") % source
3879 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3881 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3880 ui.warn(msg, hint=hint)
3882 ui.warn(msg, hint=hint)
3881 continue
3883 continue
3882 revs, checkout = hg.addbranchrevs(
3884 revs, checkout = hg.addbranchrevs(
3883 repo, other, branches, opts.get(b"rev")
3885 repo, other, branches, opts.get(b"rev")
3884 )
3886 )
3885
3887
3886 if revs:
3888 if revs:
3887 revs = [other.lookup(rev) for rev in revs]
3889 revs = [other.lookup(rev) for rev in revs]
3888
3890
3889 with ui.silent():
3891 with ui.silent():
3890 try:
3892 try:
3891 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3893 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3892 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3894 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3893 )
3895 )
3894 except error.LookupError:
3896 except error.LookupError:
3895 continue
3897 continue
3896
3898
3897 try:
3899 try:
3898 if not chlist:
3900 if not chlist:
3899 continue
3901 continue
3900 if recovernode:
3902 if recovernode:
3901 with repo.lock(), repo.transaction(b"unbundle") as tr:
3903 with repo.lock(), repo.transaction(b"unbundle") as tr:
3902 if scmutil.isrevsymbol(other, recovernode):
3904 if scmutil.isrevsymbol(other, recovernode):
3903 ui.status(_(b"Unbundling %s\n") % (recovernode))
3905 ui.status(_(b"Unbundling %s\n") % (recovernode))
3904 f = hg.openpath(ui, source)
3906 f = hg.openpath(ui, source)
3905 gen = exchange.readbundle(ui, f, source)
3907 gen = exchange.readbundle(ui, f, source)
3906 if isinstance(gen, bundle2.unbundle20):
3908 if isinstance(gen, bundle2.unbundle20):
3907 bundle2.applybundle(
3909 bundle2.applybundle(
3908 repo,
3910 repo,
3909 gen,
3911 gen,
3910 tr,
3912 tr,
3911 source=b"unbundle",
3913 source=b"unbundle",
3912 url=b"bundle:" + source,
3914 url=b"bundle:" + source,
3913 )
3915 )
3914 else:
3916 else:
3915 gen.apply(repo, b"unbundle", b"bundle:" + source)
3917 gen.apply(repo, b"unbundle", b"bundle:" + source)
3916 break
3918 break
3917 else:
3919 else:
3918 backupdate = encoding.strtolocal(
3920 backupdate = encoding.strtolocal(
3919 time.strftime(
3921 time.strftime(
3920 "%a %H:%M, %Y-%m-%d",
3922 "%a %H:%M, %Y-%m-%d",
3921 time.localtime(os.path.getmtime(source)),
3923 time.localtime(os.path.getmtime(source)),
3922 )
3924 )
3923 )
3925 )
3924 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3926 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3925 if ui.verbose:
3927 if ui.verbose:
3926 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3928 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3927 else:
3929 else:
3928 opts[
3930 opts[
3929 b"template"
3931 b"template"
3930 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3932 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3931 displayer = logcmdutil.changesetdisplayer(
3933 displayer = logcmdutil.changesetdisplayer(
3932 ui, other, opts, False
3934 ui, other, opts, False
3933 )
3935 )
3934 display(other, chlist, displayer)
3936 display(other, chlist, displayer)
3935 displayer.close()
3937 displayer.close()
3936 finally:
3938 finally:
3937 cleanupfn()
3939 cleanupfn()
3938
3940
3939
3941
3940 @command(
3942 @command(
3941 b'debugsub',
3943 b'debugsub',
3942 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3944 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3943 _(b'[-r REV] [REV]'),
3945 _(b'[-r REV] [REV]'),
3944 )
3946 )
3945 def debugsub(ui, repo, rev=None):
3947 def debugsub(ui, repo, rev=None):
3946 ctx = scmutil.revsingle(repo, rev, None)
3948 ctx = scmutil.revsingle(repo, rev, None)
3947 for k, v in sorted(ctx.substate.items()):
3949 for k, v in sorted(ctx.substate.items()):
3948 ui.writenoi18n(b'path %s\n' % k)
3950 ui.writenoi18n(b'path %s\n' % k)
3949 ui.writenoi18n(b' source %s\n' % v[0])
3951 ui.writenoi18n(b' source %s\n' % v[0])
3950 ui.writenoi18n(b' revision %s\n' % v[1])
3952 ui.writenoi18n(b' revision %s\n' % v[1])
3951
3953
3952
3954
3953 @command(b'debugshell', optionalrepo=True)
3955 @command(b'debugshell', optionalrepo=True)
3954 def debugshell(ui, repo):
3956 def debugshell(ui, repo):
3955 """run an interactive Python interpreter
3957 """run an interactive Python interpreter
3956
3958
3957 The local namespace is provided with a reference to the ui and
3959 The local namespace is provided with a reference to the ui and
3958 the repo instance (if available).
3960 the repo instance (if available).
3959 """
3961 """
3960 import code
3962 import code
3961
3963
3962 imported_objects = {
3964 imported_objects = {
3963 'ui': ui,
3965 'ui': ui,
3964 'repo': repo,
3966 'repo': repo,
3965 }
3967 }
3966
3968
3967 code.interact(local=imported_objects)
3969 code.interact(local=imported_objects)
3968
3970
3969
3971
3970 @command(
3972 @command(
3971 b'debugsuccessorssets',
3973 b'debugsuccessorssets',
3972 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3974 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3973 _(b'[REV]'),
3975 _(b'[REV]'),
3974 )
3976 )
3975 def debugsuccessorssets(ui, repo, *revs, **opts):
3977 def debugsuccessorssets(ui, repo, *revs, **opts):
3976 """show set of successors for revision
3978 """show set of successors for revision
3977
3979
3978 A successors set of changeset A is a consistent group of revisions that
3980 A successors set of changeset A is a consistent group of revisions that
3979 succeed A. It contains non-obsolete changesets only unless closests
3981 succeed A. It contains non-obsolete changesets only unless closests
3980 successors set is set.
3982 successors set is set.
3981
3983
3982 In most cases a changeset A has a single successors set containing a single
3984 In most cases a changeset A has a single successors set containing a single
3983 successor (changeset A replaced by A').
3985 successor (changeset A replaced by A').
3984
3986
3985 A changeset that is made obsolete with no successors are called "pruned".
3987 A changeset that is made obsolete with no successors are called "pruned".
3986 Such changesets have no successors sets at all.
3988 Such changesets have no successors sets at all.
3987
3989
3988 A changeset that has been "split" will have a successors set containing
3990 A changeset that has been "split" will have a successors set containing
3989 more than one successor.
3991 more than one successor.
3990
3992
3991 A changeset that has been rewritten in multiple different ways is called
3993 A changeset that has been rewritten in multiple different ways is called
3992 "divergent". Such changesets have multiple successor sets (each of which
3994 "divergent". Such changesets have multiple successor sets (each of which
3993 may also be split, i.e. have multiple successors).
3995 may also be split, i.e. have multiple successors).
3994
3996
3995 Results are displayed as follows::
3997 Results are displayed as follows::
3996
3998
3997 <rev1>
3999 <rev1>
3998 <successors-1A>
4000 <successors-1A>
3999 <rev2>
4001 <rev2>
4000 <successors-2A>
4002 <successors-2A>
4001 <successors-2B1> <successors-2B2> <successors-2B3>
4003 <successors-2B1> <successors-2B2> <successors-2B3>
4002
4004
4003 Here rev2 has two possible (i.e. divergent) successors sets. The first
4005 Here rev2 has two possible (i.e. divergent) successors sets. The first
4004 holds one element, whereas the second holds three (i.e. the changeset has
4006 holds one element, whereas the second holds three (i.e. the changeset has
4005 been split).
4007 been split).
4006 """
4008 """
4007 # passed to successorssets caching computation from one call to another
4009 # passed to successorssets caching computation from one call to another
4008 cache = {}
4010 cache = {}
4009 ctx2str = bytes
4011 ctx2str = bytes
4010 node2str = short
4012 node2str = short
4011 for rev in logcmdutil.revrange(repo, revs):
4013 for rev in logcmdutil.revrange(repo, revs):
4012 ctx = repo[rev]
4014 ctx = repo[rev]
4013 ui.write(b'%s\n' % ctx2str(ctx))
4015 ui.write(b'%s\n' % ctx2str(ctx))
4014 for succsset in obsutil.successorssets(
4016 for succsset in obsutil.successorssets(
4015 repo, ctx.node(), closest=opts['closest'], cache=cache
4017 repo, ctx.node(), closest=opts['closest'], cache=cache
4016 ):
4018 ):
4017 if succsset:
4019 if succsset:
4018 ui.write(b' ')
4020 ui.write(b' ')
4019 ui.write(node2str(succsset[0]))
4021 ui.write(node2str(succsset[0]))
4020 for node in succsset[1:]:
4022 for node in succsset[1:]:
4021 ui.write(b' ')
4023 ui.write(b' ')
4022 ui.write(node2str(node))
4024 ui.write(node2str(node))
4023 ui.write(b'\n')
4025 ui.write(b'\n')
4024
4026
4025
4027
4026 @command(b'debugtagscache', [])
4028 @command(b'debugtagscache', [])
4027 def debugtagscache(ui, repo):
4029 def debugtagscache(ui, repo):
4028 """display the contents of .hg/cache/hgtagsfnodes1"""
4030 """display the contents of .hg/cache/hgtagsfnodes1"""
4029 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4031 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4030 flog = repo.file(b'.hgtags')
4032 flog = repo.file(b'.hgtags')
4031 for r in repo:
4033 for r in repo:
4032 node = repo[r].node()
4034 node = repo[r].node()
4033 tagsnode = cache.getfnode(node, computemissing=False)
4035 tagsnode = cache.getfnode(node, computemissing=False)
4034 if tagsnode:
4036 if tagsnode:
4035 tagsnodedisplay = hex(tagsnode)
4037 tagsnodedisplay = hex(tagsnode)
4036 if not flog.hasnode(tagsnode):
4038 if not flog.hasnode(tagsnode):
4037 tagsnodedisplay += b' (unknown node)'
4039 tagsnodedisplay += b' (unknown node)'
4038 elif tagsnode is None:
4040 elif tagsnode is None:
4039 tagsnodedisplay = b'missing'
4041 tagsnodedisplay = b'missing'
4040 else:
4042 else:
4041 tagsnodedisplay = b'invalid'
4043 tagsnodedisplay = b'invalid'
4042
4044
4043 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4045 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4044
4046
4045
4047
4046 @command(
4048 @command(
4047 b'debugtemplate',
4049 b'debugtemplate',
4048 [
4050 [
4049 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4051 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4050 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4052 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4051 ],
4053 ],
4052 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4054 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4053 optionalrepo=True,
4055 optionalrepo=True,
4054 )
4056 )
4055 def debugtemplate(ui, repo, tmpl, **opts):
4057 def debugtemplate(ui, repo, tmpl, **opts):
4056 """parse and apply a template
4058 """parse and apply a template
4057
4059
4058 If -r/--rev is given, the template is processed as a log template and
4060 If -r/--rev is given, the template is processed as a log template and
4059 applied to the given changesets. Otherwise, it is processed as a generic
4061 applied to the given changesets. Otherwise, it is processed as a generic
4060 template.
4062 template.
4061
4063
4062 Use --verbose to print the parsed tree.
4064 Use --verbose to print the parsed tree.
4063 """
4065 """
4064 revs = None
4066 revs = None
4065 if opts['rev']:
4067 if opts['rev']:
4066 if repo is None:
4068 if repo is None:
4067 raise error.RepoError(
4069 raise error.RepoError(
4068 _(b'there is no Mercurial repository here (.hg not found)')
4070 _(b'there is no Mercurial repository here (.hg not found)')
4069 )
4071 )
4070 revs = logcmdutil.revrange(repo, opts['rev'])
4072 revs = logcmdutil.revrange(repo, opts['rev'])
4071
4073
4072 props = {}
4074 props = {}
4073 for d in opts['define']:
4075 for d in opts['define']:
4074 try:
4076 try:
4075 k, v = (e.strip() for e in d.split(b'=', 1))
4077 k, v = (e.strip() for e in d.split(b'=', 1))
4076 if not k or k == b'ui':
4078 if not k or k == b'ui':
4077 raise ValueError
4079 raise ValueError
4078 props[k] = v
4080 props[k] = v
4079 except ValueError:
4081 except ValueError:
4080 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4082 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4081
4083
4082 if ui.verbose:
4084 if ui.verbose:
4083 aliases = ui.configitems(b'templatealias')
4085 aliases = ui.configitems(b'templatealias')
4084 tree = templater.parse(tmpl)
4086 tree = templater.parse(tmpl)
4085 ui.note(templater.prettyformat(tree), b'\n')
4087 ui.note(templater.prettyformat(tree), b'\n')
4086 newtree = templater.expandaliases(tree, aliases)
4088 newtree = templater.expandaliases(tree, aliases)
4087 if newtree != tree:
4089 if newtree != tree:
4088 ui.notenoi18n(
4090 ui.notenoi18n(
4089 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4091 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4090 )
4092 )
4091
4093
4092 if revs is None:
4094 if revs is None:
4093 tres = formatter.templateresources(ui, repo)
4095 tres = formatter.templateresources(ui, repo)
4094 t = formatter.maketemplater(ui, tmpl, resources=tres)
4096 t = formatter.maketemplater(ui, tmpl, resources=tres)
4095 if ui.verbose:
4097 if ui.verbose:
4096 kwds, funcs = t.symbolsuseddefault()
4098 kwds, funcs = t.symbolsuseddefault()
4097 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4099 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4098 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4100 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4099 ui.write(t.renderdefault(props))
4101 ui.write(t.renderdefault(props))
4100 else:
4102 else:
4101 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4103 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4102 if ui.verbose:
4104 if ui.verbose:
4103 kwds, funcs = displayer.t.symbolsuseddefault()
4105 kwds, funcs = displayer.t.symbolsuseddefault()
4104 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4106 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4105 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4107 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4106 for r in revs:
4108 for r in revs:
4107 displayer.show(repo[r], **pycompat.strkwargs(props))
4109 displayer.show(repo[r], **pycompat.strkwargs(props))
4108 displayer.close()
4110 displayer.close()
4109
4111
4110
4112
4111 @command(
4113 @command(
4112 b'debuguigetpass',
4114 b'debuguigetpass',
4113 [
4115 [
4114 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4116 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4115 ],
4117 ],
4116 _(b'[-p TEXT]'),
4118 _(b'[-p TEXT]'),
4117 norepo=True,
4119 norepo=True,
4118 )
4120 )
4119 def debuguigetpass(ui, prompt=b''):
4121 def debuguigetpass(ui, prompt=b''):
4120 """show prompt to type password"""
4122 """show prompt to type password"""
4121 r = ui.getpass(prompt)
4123 r = ui.getpass(prompt)
4122 if r is None:
4124 if r is None:
4123 r = b"<default response>"
4125 r = b"<default response>"
4124 ui.writenoi18n(b'response: %s\n' % r)
4126 ui.writenoi18n(b'response: %s\n' % r)
4125
4127
4126
4128
4127 @command(
4129 @command(
4128 b'debuguiprompt',
4130 b'debuguiprompt',
4129 [
4131 [
4130 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4132 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4131 ],
4133 ],
4132 _(b'[-p TEXT]'),
4134 _(b'[-p TEXT]'),
4133 norepo=True,
4135 norepo=True,
4134 )
4136 )
4135 def debuguiprompt(ui, prompt=b''):
4137 def debuguiprompt(ui, prompt=b''):
4136 """show plain prompt"""
4138 """show plain prompt"""
4137 r = ui.prompt(prompt)
4139 r = ui.prompt(prompt)
4138 ui.writenoi18n(b'response: %s\n' % r)
4140 ui.writenoi18n(b'response: %s\n' % r)
4139
4141
4140
4142
4141 @command(b'debugupdatecaches', [])
4143 @command(b'debugupdatecaches', [])
4142 def debugupdatecaches(ui, repo, *pats, **opts):
4144 def debugupdatecaches(ui, repo, *pats, **opts):
4143 """warm all known caches in the repository"""
4145 """warm all known caches in the repository"""
4144 with repo.wlock(), repo.lock():
4146 with repo.wlock(), repo.lock():
4145 repo.updatecaches(caches=repository.CACHES_ALL)
4147 repo.updatecaches(caches=repository.CACHES_ALL)
4146
4148
4147
4149
4148 @command(
4150 @command(
4149 b'debugupgraderepo',
4151 b'debugupgraderepo',
4150 [
4152 [
4151 (
4153 (
4152 b'o',
4154 b'o',
4153 b'optimize',
4155 b'optimize',
4154 [],
4156 [],
4155 _(b'extra optimization to perform'),
4157 _(b'extra optimization to perform'),
4156 _(b'NAME'),
4158 _(b'NAME'),
4157 ),
4159 ),
4158 (b'', b'run', False, _(b'performs an upgrade')),
4160 (b'', b'run', False, _(b'performs an upgrade')),
4159 (b'', b'backup', True, _(b'keep the old repository content around')),
4161 (b'', b'backup', True, _(b'keep the old repository content around')),
4160 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4162 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4161 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4163 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4162 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4164 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4163 ],
4165 ],
4164 )
4166 )
4165 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4167 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4166 """upgrade a repository to use different features
4168 """upgrade a repository to use different features
4167
4169
4168 If no arguments are specified, the repository is evaluated for upgrade
4170 If no arguments are specified, the repository is evaluated for upgrade
4169 and a list of problems and potential optimizations is printed.
4171 and a list of problems and potential optimizations is printed.
4170
4172
4171 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4173 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4172 can be influenced via additional arguments. More details will be provided
4174 can be influenced via additional arguments. More details will be provided
4173 by the command output when run without ``--run``.
4175 by the command output when run without ``--run``.
4174
4176
4175 During the upgrade, the repository will be locked and no writes will be
4177 During the upgrade, the repository will be locked and no writes will be
4176 allowed.
4178 allowed.
4177
4179
4178 At the end of the upgrade, the repository may not be readable while new
4180 At the end of the upgrade, the repository may not be readable while new
4179 repository data is swapped in. This window will be as long as it takes to
4181 repository data is swapped in. This window will be as long as it takes to
4180 rename some directories inside the ``.hg`` directory. On most machines, this
4182 rename some directories inside the ``.hg`` directory. On most machines, this
4181 should complete almost instantaneously and the chances of a consumer being
4183 should complete almost instantaneously and the chances of a consumer being
4182 unable to access the repository should be low.
4184 unable to access the repository should be low.
4183
4185
4184 By default, all revlogs will be upgraded. You can restrict this using flags
4186 By default, all revlogs will be upgraded. You can restrict this using flags
4185 such as `--manifest`:
4187 such as `--manifest`:
4186
4188
4187 * `--manifest`: only optimize the manifest
4189 * `--manifest`: only optimize the manifest
4188 * `--no-manifest`: optimize all revlog but the manifest
4190 * `--no-manifest`: optimize all revlog but the manifest
4189 * `--changelog`: optimize the changelog only
4191 * `--changelog`: optimize the changelog only
4190 * `--no-changelog --no-manifest`: optimize filelogs only
4192 * `--no-changelog --no-manifest`: optimize filelogs only
4191 * `--filelogs`: optimize the filelogs only
4193 * `--filelogs`: optimize the filelogs only
4192 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4194 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4193 """
4195 """
4194 return upgrade.upgraderepo(
4196 return upgrade.upgraderepo(
4195 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4197 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4196 )
4198 )
4197
4199
4198
4200
4199 @command(
4201 @command(
4200 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4202 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4201 )
4203 )
4202 def debugwalk(ui, repo, *pats, **opts):
4204 def debugwalk(ui, repo, *pats, **opts):
4203 """show how files match on given patterns"""
4205 """show how files match on given patterns"""
4204 opts = pycompat.byteskwargs(opts)
4206 opts = pycompat.byteskwargs(opts)
4205 m = scmutil.match(repo[None], pats, opts)
4207 m = scmutil.match(repo[None], pats, opts)
4206 if ui.verbose:
4208 if ui.verbose:
4207 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4209 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4208 items = list(repo[None].walk(m))
4210 items = list(repo[None].walk(m))
4209 if not items:
4211 if not items:
4210 return
4212 return
4211 f = lambda fn: fn
4213 f = lambda fn: fn
4212 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4214 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4213 f = lambda fn: util.normpath(fn)
4215 f = lambda fn: util.normpath(fn)
4214 fmt = b'f %%-%ds %%-%ds %%s' % (
4216 fmt = b'f %%-%ds %%-%ds %%s' % (
4215 max([len(abs) for abs in items]),
4217 max([len(abs) for abs in items]),
4216 max([len(repo.pathto(abs)) for abs in items]),
4218 max([len(repo.pathto(abs)) for abs in items]),
4217 )
4219 )
4218 for abs in items:
4220 for abs in items:
4219 line = fmt % (
4221 line = fmt % (
4220 abs,
4222 abs,
4221 f(repo.pathto(abs)),
4223 f(repo.pathto(abs)),
4222 m.exact(abs) and b'exact' or b'',
4224 m.exact(abs) and b'exact' or b'',
4223 )
4225 )
4224 ui.write(b"%s\n" % line.rstrip())
4226 ui.write(b"%s\n" % line.rstrip())
4225
4227
4226
4228
4227 @command(b'debugwhyunstable', [], _(b'REV'))
4229 @command(b'debugwhyunstable', [], _(b'REV'))
4228 def debugwhyunstable(ui, repo, rev):
4230 def debugwhyunstable(ui, repo, rev):
4229 """explain instabilities of a changeset"""
4231 """explain instabilities of a changeset"""
4230 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4232 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4231 dnodes = b''
4233 dnodes = b''
4232 if entry.get(b'divergentnodes'):
4234 if entry.get(b'divergentnodes'):
4233 dnodes = (
4235 dnodes = (
4234 b' '.join(
4236 b' '.join(
4235 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4237 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4236 for ctx in entry[b'divergentnodes']
4238 for ctx in entry[b'divergentnodes']
4237 )
4239 )
4238 + b' '
4240 + b' '
4239 )
4241 )
4240 ui.write(
4242 ui.write(
4241 b'%s: %s%s %s\n'
4243 b'%s: %s%s %s\n'
4242 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4244 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4243 )
4245 )
4244
4246
4245
4247
4246 @command(
4248 @command(
4247 b'debugwireargs',
4249 b'debugwireargs',
4248 [
4250 [
4249 (b'', b'three', b'', b'three'),
4251 (b'', b'three', b'', b'three'),
4250 (b'', b'four', b'', b'four'),
4252 (b'', b'four', b'', b'four'),
4251 (b'', b'five', b'', b'five'),
4253 (b'', b'five', b'', b'five'),
4252 ]
4254 ]
4253 + cmdutil.remoteopts,
4255 + cmdutil.remoteopts,
4254 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4256 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4255 norepo=True,
4257 norepo=True,
4256 )
4258 )
4257 def debugwireargs(ui, repopath, *vals, **opts):
4259 def debugwireargs(ui, repopath, *vals, **opts):
4258 opts = pycompat.byteskwargs(opts)
4260 opts = pycompat.byteskwargs(opts)
4259 repo = hg.peer(ui, opts, repopath)
4261 repo = hg.peer(ui, opts, repopath)
4260 try:
4262 try:
4261 for opt in cmdutil.remoteopts:
4263 for opt in cmdutil.remoteopts:
4262 del opts[opt[1]]
4264 del opts[opt[1]]
4263 args = {}
4265 args = {}
4264 for k, v in pycompat.iteritems(opts):
4266 for k, v in pycompat.iteritems(opts):
4265 if v:
4267 if v:
4266 args[k] = v
4268 args[k] = v
4267 args = pycompat.strkwargs(args)
4269 args = pycompat.strkwargs(args)
4268 # run twice to check that we don't mess up the stream for the next command
4270 # run twice to check that we don't mess up the stream for the next command
4269 res1 = repo.debugwireargs(*vals, **args)
4271 res1 = repo.debugwireargs(*vals, **args)
4270 res2 = repo.debugwireargs(*vals, **args)
4272 res2 = repo.debugwireargs(*vals, **args)
4271 ui.write(b"%s\n" % res1)
4273 ui.write(b"%s\n" % res1)
4272 if res1 != res2:
4274 if res1 != res2:
4273 ui.warn(b"%s\n" % res2)
4275 ui.warn(b"%s\n" % res2)
4274 finally:
4276 finally:
4275 repo.close()
4277 repo.close()
4276
4278
4277
4279
4278 def _parsewirelangblocks(fh):
4280 def _parsewirelangblocks(fh):
4279 activeaction = None
4281 activeaction = None
4280 blocklines = []
4282 blocklines = []
4281 lastindent = 0
4283 lastindent = 0
4282
4284
4283 for line in fh:
4285 for line in fh:
4284 line = line.rstrip()
4286 line = line.rstrip()
4285 if not line:
4287 if not line:
4286 continue
4288 continue
4287
4289
4288 if line.startswith(b'#'):
4290 if line.startswith(b'#'):
4289 continue
4291 continue
4290
4292
4291 if not line.startswith(b' '):
4293 if not line.startswith(b' '):
4292 # New block. Flush previous one.
4294 # New block. Flush previous one.
4293 if activeaction:
4295 if activeaction:
4294 yield activeaction, blocklines
4296 yield activeaction, blocklines
4295
4297
4296 activeaction = line
4298 activeaction = line
4297 blocklines = []
4299 blocklines = []
4298 lastindent = 0
4300 lastindent = 0
4299 continue
4301 continue
4300
4302
4301 # Else we start with an indent.
4303 # Else we start with an indent.
4302
4304
4303 if not activeaction:
4305 if not activeaction:
4304 raise error.Abort(_(b'indented line outside of block'))
4306 raise error.Abort(_(b'indented line outside of block'))
4305
4307
4306 indent = len(line) - len(line.lstrip())
4308 indent = len(line) - len(line.lstrip())
4307
4309
4308 # If this line is indented more than the last line, concatenate it.
4310 # If this line is indented more than the last line, concatenate it.
4309 if indent > lastindent and blocklines:
4311 if indent > lastindent and blocklines:
4310 blocklines[-1] += line.lstrip()
4312 blocklines[-1] += line.lstrip()
4311 else:
4313 else:
4312 blocklines.append(line)
4314 blocklines.append(line)
4313 lastindent = indent
4315 lastindent = indent
4314
4316
4315 # Flush last block.
4317 # Flush last block.
4316 if activeaction:
4318 if activeaction:
4317 yield activeaction, blocklines
4319 yield activeaction, blocklines
4318
4320
4319
4321
4320 @command(
4322 @command(
4321 b'debugwireproto',
4323 b'debugwireproto',
4322 [
4324 [
4323 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4325 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4324 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4326 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4325 (
4327 (
4326 b'',
4328 b'',
4327 b'noreadstderr',
4329 b'noreadstderr',
4328 False,
4330 False,
4329 _(b'do not read from stderr of the remote'),
4331 _(b'do not read from stderr of the remote'),
4330 ),
4332 ),
4331 (
4333 (
4332 b'',
4334 b'',
4333 b'nologhandshake',
4335 b'nologhandshake',
4334 False,
4336 False,
4335 _(b'do not log I/O related to the peer handshake'),
4337 _(b'do not log I/O related to the peer handshake'),
4336 ),
4338 ),
4337 ]
4339 ]
4338 + cmdutil.remoteopts,
4340 + cmdutil.remoteopts,
4339 _(b'[PATH]'),
4341 _(b'[PATH]'),
4340 optionalrepo=True,
4342 optionalrepo=True,
4341 )
4343 )
4342 def debugwireproto(ui, repo, path=None, **opts):
4344 def debugwireproto(ui, repo, path=None, **opts):
4343 """send wire protocol commands to a server
4345 """send wire protocol commands to a server
4344
4346
4345 This command can be used to issue wire protocol commands to remote
4347 This command can be used to issue wire protocol commands to remote
4346 peers and to debug the raw data being exchanged.
4348 peers and to debug the raw data being exchanged.
4347
4349
4348 ``--localssh`` will start an SSH server against the current repository
4350 ``--localssh`` will start an SSH server against the current repository
4349 and connect to that. By default, the connection will perform a handshake
4351 and connect to that. By default, the connection will perform a handshake
4350 and establish an appropriate peer instance.
4352 and establish an appropriate peer instance.
4351
4353
4352 ``--peer`` can be used to bypass the handshake protocol and construct a
4354 ``--peer`` can be used to bypass the handshake protocol and construct a
4353 peer instance using the specified class type. Valid values are ``raw``,
4355 peer instance using the specified class type. Valid values are ``raw``,
4354 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4356 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4355 don't support higher-level command actions.
4357 don't support higher-level command actions.
4356
4358
4357 ``--noreadstderr`` can be used to disable automatic reading from stderr
4359 ``--noreadstderr`` can be used to disable automatic reading from stderr
4358 of the peer (for SSH connections only). Disabling automatic reading of
4360 of the peer (for SSH connections only). Disabling automatic reading of
4359 stderr is useful for making output more deterministic.
4361 stderr is useful for making output more deterministic.
4360
4362
4361 Commands are issued via a mini language which is specified via stdin.
4363 Commands are issued via a mini language which is specified via stdin.
4362 The language consists of individual actions to perform. An action is
4364 The language consists of individual actions to perform. An action is
4363 defined by a block. A block is defined as a line with no leading
4365 defined by a block. A block is defined as a line with no leading
4364 space followed by 0 or more lines with leading space. Blocks are
4366 space followed by 0 or more lines with leading space. Blocks are
4365 effectively a high-level command with additional metadata.
4367 effectively a high-level command with additional metadata.
4366
4368
4367 Lines beginning with ``#`` are ignored.
4369 Lines beginning with ``#`` are ignored.
4368
4370
4369 The following sections denote available actions.
4371 The following sections denote available actions.
4370
4372
4371 raw
4373 raw
4372 ---
4374 ---
4373
4375
4374 Send raw data to the server.
4376 Send raw data to the server.
4375
4377
4376 The block payload contains the raw data to send as one atomic send
4378 The block payload contains the raw data to send as one atomic send
4377 operation. The data may not actually be delivered in a single system
4379 operation. The data may not actually be delivered in a single system
4378 call: it depends on the abilities of the transport being used.
4380 call: it depends on the abilities of the transport being used.
4379
4381
4380 Each line in the block is de-indented and concatenated. Then, that
4382 Each line in the block is de-indented and concatenated. Then, that
4381 value is evaluated as a Python b'' literal. This allows the use of
4383 value is evaluated as a Python b'' literal. This allows the use of
4382 backslash escaping, etc.
4384 backslash escaping, etc.
4383
4385
4384 raw+
4386 raw+
4385 ----
4387 ----
4386
4388
4387 Behaves like ``raw`` except flushes output afterwards.
4389 Behaves like ``raw`` except flushes output afterwards.
4388
4390
4389 command <X>
4391 command <X>
4390 -----------
4392 -----------
4391
4393
4392 Send a request to run a named command, whose name follows the ``command``
4394 Send a request to run a named command, whose name follows the ``command``
4393 string.
4395 string.
4394
4396
4395 Arguments to the command are defined as lines in this block. The format of
4397 Arguments to the command are defined as lines in this block. The format of
4396 each line is ``<key> <value>``. e.g.::
4398 each line is ``<key> <value>``. e.g.::
4397
4399
4398 command listkeys
4400 command listkeys
4399 namespace bookmarks
4401 namespace bookmarks
4400
4402
4401 If the value begins with ``eval:``, it will be interpreted as a Python
4403 If the value begins with ``eval:``, it will be interpreted as a Python
4402 literal expression. Otherwise values are interpreted as Python b'' literals.
4404 literal expression. Otherwise values are interpreted as Python b'' literals.
4403 This allows sending complex types and encoding special byte sequences via
4405 This allows sending complex types and encoding special byte sequences via
4404 backslash escaping.
4406 backslash escaping.
4405
4407
4406 The following arguments have special meaning:
4408 The following arguments have special meaning:
4407
4409
4408 ``PUSHFILE``
4410 ``PUSHFILE``
4409 When defined, the *push* mechanism of the peer will be used instead
4411 When defined, the *push* mechanism of the peer will be used instead
4410 of the static request-response mechanism and the content of the
4412 of the static request-response mechanism and the content of the
4411 file specified in the value of this argument will be sent as the
4413 file specified in the value of this argument will be sent as the
4412 command payload.
4414 command payload.
4413
4415
4414 This can be used to submit a local bundle file to the remote.
4416 This can be used to submit a local bundle file to the remote.
4415
4417
4416 batchbegin
4418 batchbegin
4417 ----------
4419 ----------
4418
4420
4419 Instruct the peer to begin a batched send.
4421 Instruct the peer to begin a batched send.
4420
4422
4421 All ``command`` blocks are queued for execution until the next
4423 All ``command`` blocks are queued for execution until the next
4422 ``batchsubmit`` block.
4424 ``batchsubmit`` block.
4423
4425
4424 batchsubmit
4426 batchsubmit
4425 -----------
4427 -----------
4426
4428
4427 Submit previously queued ``command`` blocks as a batch request.
4429 Submit previously queued ``command`` blocks as a batch request.
4428
4430
4429 This action MUST be paired with a ``batchbegin`` action.
4431 This action MUST be paired with a ``batchbegin`` action.
4430
4432
4431 httprequest <method> <path>
4433 httprequest <method> <path>
4432 ---------------------------
4434 ---------------------------
4433
4435
4434 (HTTP peer only)
4436 (HTTP peer only)
4435
4437
4436 Send an HTTP request to the peer.
4438 Send an HTTP request to the peer.
4437
4439
4438 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4440 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4439
4441
4440 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4442 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4441 headers to add to the request. e.g. ``Accept: foo``.
4443 headers to add to the request. e.g. ``Accept: foo``.
4442
4444
4443 The following arguments are special:
4445 The following arguments are special:
4444
4446
4445 ``BODYFILE``
4447 ``BODYFILE``
4446 The content of the file defined as the value to this argument will be
4448 The content of the file defined as the value to this argument will be
4447 transferred verbatim as the HTTP request body.
4449 transferred verbatim as the HTTP request body.
4448
4450
4449 ``frame <type> <flags> <payload>``
4451 ``frame <type> <flags> <payload>``
4450 Send a unified protocol frame as part of the request body.
4452 Send a unified protocol frame as part of the request body.
4451
4453
4452 All frames will be collected and sent as the body to the HTTP
4454 All frames will be collected and sent as the body to the HTTP
4453 request.
4455 request.
4454
4456
4455 close
4457 close
4456 -----
4458 -----
4457
4459
4458 Close the connection to the server.
4460 Close the connection to the server.
4459
4461
4460 flush
4462 flush
4461 -----
4463 -----
4462
4464
4463 Flush data written to the server.
4465 Flush data written to the server.
4464
4466
4465 readavailable
4467 readavailable
4466 -------------
4468 -------------
4467
4469
4468 Close the write end of the connection and read all available data from
4470 Close the write end of the connection and read all available data from
4469 the server.
4471 the server.
4470
4472
4471 If the connection to the server encompasses multiple pipes, we poll both
4473 If the connection to the server encompasses multiple pipes, we poll both
4472 pipes and read available data.
4474 pipes and read available data.
4473
4475
4474 readline
4476 readline
4475 --------
4477 --------
4476
4478
4477 Read a line of output from the server. If there are multiple output
4479 Read a line of output from the server. If there are multiple output
4478 pipes, reads only the main pipe.
4480 pipes, reads only the main pipe.
4479
4481
4480 ereadline
4482 ereadline
4481 ---------
4483 ---------
4482
4484
4483 Like ``readline``, but read from the stderr pipe, if available.
4485 Like ``readline``, but read from the stderr pipe, if available.
4484
4486
4485 read <X>
4487 read <X>
4486 --------
4488 --------
4487
4489
4488 ``read()`` N bytes from the server's main output pipe.
4490 ``read()`` N bytes from the server's main output pipe.
4489
4491
4490 eread <X>
4492 eread <X>
4491 ---------
4493 ---------
4492
4494
4493 ``read()`` N bytes from the server's stderr pipe, if available.
4495 ``read()`` N bytes from the server's stderr pipe, if available.
4494
4496
4495 Specifying Unified Frame-Based Protocol Frames
4497 Specifying Unified Frame-Based Protocol Frames
4496 ----------------------------------------------
4498 ----------------------------------------------
4497
4499
4498 It is possible to emit a *Unified Frame-Based Protocol* by using special
4500 It is possible to emit a *Unified Frame-Based Protocol* by using special
4499 syntax.
4501 syntax.
4500
4502
4501 A frame is composed as a type, flags, and payload. These can be parsed
4503 A frame is composed as a type, flags, and payload. These can be parsed
4502 from a string of the form:
4504 from a string of the form:
4503
4505
4504 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4506 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4505
4507
4506 ``request-id`` and ``stream-id`` are integers defining the request and
4508 ``request-id`` and ``stream-id`` are integers defining the request and
4507 stream identifiers.
4509 stream identifiers.
4508
4510
4509 ``type`` can be an integer value for the frame type or the string name
4511 ``type`` can be an integer value for the frame type or the string name
4510 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4512 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4511 ``command-name``.
4513 ``command-name``.
4512
4514
4513 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4515 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4514 components. Each component (and there can be just one) can be an integer
4516 components. Each component (and there can be just one) can be an integer
4515 or a flag name for stream flags or frame flags, respectively. Values are
4517 or a flag name for stream flags or frame flags, respectively. Values are
4516 resolved to integers and then bitwise OR'd together.
4518 resolved to integers and then bitwise OR'd together.
4517
4519
4518 ``payload`` represents the raw frame payload. If it begins with
4520 ``payload`` represents the raw frame payload. If it begins with
4519 ``cbor:``, the following string is evaluated as Python code and the
4521 ``cbor:``, the following string is evaluated as Python code and the
4520 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4522 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4521 as a Python byte string literal.
4523 as a Python byte string literal.
4522 """
4524 """
4523 opts = pycompat.byteskwargs(opts)
4525 opts = pycompat.byteskwargs(opts)
4524
4526
4525 if opts[b'localssh'] and not repo:
4527 if opts[b'localssh'] and not repo:
4526 raise error.Abort(_(b'--localssh requires a repository'))
4528 raise error.Abort(_(b'--localssh requires a repository'))
4527
4529
4528 if opts[b'peer'] and opts[b'peer'] not in (
4530 if opts[b'peer'] and opts[b'peer'] not in (
4529 b'raw',
4531 b'raw',
4530 b'ssh1',
4532 b'ssh1',
4531 ):
4533 ):
4532 raise error.Abort(
4534 raise error.Abort(
4533 _(b'invalid value for --peer'),
4535 _(b'invalid value for --peer'),
4534 hint=_(b'valid values are "raw" and "ssh1"'),
4536 hint=_(b'valid values are "raw" and "ssh1"'),
4535 )
4537 )
4536
4538
4537 if path and opts[b'localssh']:
4539 if path and opts[b'localssh']:
4538 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4540 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4539
4541
4540 if ui.interactive():
4542 if ui.interactive():
4541 ui.write(_(b'(waiting for commands on stdin)\n'))
4543 ui.write(_(b'(waiting for commands on stdin)\n'))
4542
4544
4543 blocks = list(_parsewirelangblocks(ui.fin))
4545 blocks = list(_parsewirelangblocks(ui.fin))
4544
4546
4545 proc = None
4547 proc = None
4546 stdin = None
4548 stdin = None
4547 stdout = None
4549 stdout = None
4548 stderr = None
4550 stderr = None
4549 opener = None
4551 opener = None
4550
4552
4551 if opts[b'localssh']:
4553 if opts[b'localssh']:
4552 # We start the SSH server in its own process so there is process
4554 # We start the SSH server in its own process so there is process
4553 # separation. This prevents a whole class of potential bugs around
4555 # separation. This prevents a whole class of potential bugs around
4554 # shared state from interfering with server operation.
4556 # shared state from interfering with server operation.
4555 args = procutil.hgcmd() + [
4557 args = procutil.hgcmd() + [
4556 b'-R',
4558 b'-R',
4557 repo.root,
4559 repo.root,
4558 b'debugserve',
4560 b'debugserve',
4559 b'--sshstdio',
4561 b'--sshstdio',
4560 ]
4562 ]
4561 proc = subprocess.Popen(
4563 proc = subprocess.Popen(
4562 pycompat.rapply(procutil.tonativestr, args),
4564 pycompat.rapply(procutil.tonativestr, args),
4563 stdin=subprocess.PIPE,
4565 stdin=subprocess.PIPE,
4564 stdout=subprocess.PIPE,
4566 stdout=subprocess.PIPE,
4565 stderr=subprocess.PIPE,
4567 stderr=subprocess.PIPE,
4566 bufsize=0,
4568 bufsize=0,
4567 )
4569 )
4568
4570
4569 stdin = proc.stdin
4571 stdin = proc.stdin
4570 stdout = proc.stdout
4572 stdout = proc.stdout
4571 stderr = proc.stderr
4573 stderr = proc.stderr
4572
4574
4573 # We turn the pipes into observers so we can log I/O.
4575 # We turn the pipes into observers so we can log I/O.
4574 if ui.verbose or opts[b'peer'] == b'raw':
4576 if ui.verbose or opts[b'peer'] == b'raw':
4575 stdin = util.makeloggingfileobject(
4577 stdin = util.makeloggingfileobject(
4576 ui, proc.stdin, b'i', logdata=True
4578 ui, proc.stdin, b'i', logdata=True
4577 )
4579 )
4578 stdout = util.makeloggingfileobject(
4580 stdout = util.makeloggingfileobject(
4579 ui, proc.stdout, b'o', logdata=True
4581 ui, proc.stdout, b'o', logdata=True
4580 )
4582 )
4581 stderr = util.makeloggingfileobject(
4583 stderr = util.makeloggingfileobject(
4582 ui, proc.stderr, b'e', logdata=True
4584 ui, proc.stderr, b'e', logdata=True
4583 )
4585 )
4584
4586
4585 # --localssh also implies the peer connection settings.
4587 # --localssh also implies the peer connection settings.
4586
4588
4587 url = b'ssh://localserver'
4589 url = b'ssh://localserver'
4588 autoreadstderr = not opts[b'noreadstderr']
4590 autoreadstderr = not opts[b'noreadstderr']
4589
4591
4590 if opts[b'peer'] == b'ssh1':
4592 if opts[b'peer'] == b'ssh1':
4591 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4593 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4592 peer = sshpeer.sshv1peer(
4594 peer = sshpeer.sshv1peer(
4593 ui,
4595 ui,
4594 url,
4596 url,
4595 proc,
4597 proc,
4596 stdin,
4598 stdin,
4597 stdout,
4599 stdout,
4598 stderr,
4600 stderr,
4599 None,
4601 None,
4600 autoreadstderr=autoreadstderr,
4602 autoreadstderr=autoreadstderr,
4601 )
4603 )
4602 elif opts[b'peer'] == b'raw':
4604 elif opts[b'peer'] == b'raw':
4603 ui.write(_(b'using raw connection to peer\n'))
4605 ui.write(_(b'using raw connection to peer\n'))
4604 peer = None
4606 peer = None
4605 else:
4607 else:
4606 ui.write(_(b'creating ssh peer from handshake results\n'))
4608 ui.write(_(b'creating ssh peer from handshake results\n'))
4607 peer = sshpeer.makepeer(
4609 peer = sshpeer.makepeer(
4608 ui,
4610 ui,
4609 url,
4611 url,
4610 proc,
4612 proc,
4611 stdin,
4613 stdin,
4612 stdout,
4614 stdout,
4613 stderr,
4615 stderr,
4614 autoreadstderr=autoreadstderr,
4616 autoreadstderr=autoreadstderr,
4615 )
4617 )
4616
4618
4617 elif path:
4619 elif path:
4618 # We bypass hg.peer() so we can proxy the sockets.
4620 # We bypass hg.peer() so we can proxy the sockets.
4619 # TODO consider not doing this because we skip
4621 # TODO consider not doing this because we skip
4620 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4622 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4621 u = urlutil.url(path)
4623 u = urlutil.url(path)
4622 if u.scheme != b'http':
4624 if u.scheme != b'http':
4623 raise error.Abort(_(b'only http:// paths are currently supported'))
4625 raise error.Abort(_(b'only http:// paths are currently supported'))
4624
4626
4625 url, authinfo = u.authinfo()
4627 url, authinfo = u.authinfo()
4626 openerargs = {
4628 openerargs = {
4627 'useragent': b'Mercurial debugwireproto',
4629 'useragent': b'Mercurial debugwireproto',
4628 }
4630 }
4629
4631
4630 # Turn pipes/sockets into observers so we can log I/O.
4632 # Turn pipes/sockets into observers so we can log I/O.
4631 if ui.verbose:
4633 if ui.verbose:
4632 openerargs.update(
4634 openerargs.update(
4633 {
4635 {
4634 'loggingfh': ui,
4636 'loggingfh': ui,
4635 'loggingname': b's',
4637 'loggingname': b's',
4636 'loggingopts': {
4638 'loggingopts': {
4637 'logdata': True,
4639 'logdata': True,
4638 'logdataapis': False,
4640 'logdataapis': False,
4639 },
4641 },
4640 }
4642 }
4641 )
4643 )
4642
4644
4643 if ui.debugflag:
4645 if ui.debugflag:
4644 openerargs['loggingopts']['logdataapis'] = True
4646 openerargs['loggingopts']['logdataapis'] = True
4645
4647
4646 # Don't send default headers when in raw mode. This allows us to
4648 # Don't send default headers when in raw mode. This allows us to
4647 # bypass most of the behavior of our URL handling code so we can
4649 # bypass most of the behavior of our URL handling code so we can
4648 # have near complete control over what's sent on the wire.
4650 # have near complete control over what's sent on the wire.
4649 if opts[b'peer'] == b'raw':
4651 if opts[b'peer'] == b'raw':
4650 openerargs['sendaccept'] = False
4652 openerargs['sendaccept'] = False
4651
4653
4652 opener = urlmod.opener(ui, authinfo, **openerargs)
4654 opener = urlmod.opener(ui, authinfo, **openerargs)
4653
4655
4654 if opts[b'peer'] == b'raw':
4656 if opts[b'peer'] == b'raw':
4655 ui.write(_(b'using raw connection to peer\n'))
4657 ui.write(_(b'using raw connection to peer\n'))
4656 peer = None
4658 peer = None
4657 elif opts[b'peer']:
4659 elif opts[b'peer']:
4658 raise error.Abort(
4660 raise error.Abort(
4659 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4661 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4660 )
4662 )
4661 else:
4663 else:
4662 peer = httppeer.makepeer(ui, path, opener=opener)
4664 peer = httppeer.makepeer(ui, path, opener=opener)
4663
4665
4664 # We /could/ populate stdin/stdout with sock.makefile()...
4666 # We /could/ populate stdin/stdout with sock.makefile()...
4665 else:
4667 else:
4666 raise error.Abort(_(b'unsupported connection configuration'))
4668 raise error.Abort(_(b'unsupported connection configuration'))
4667
4669
4668 batchedcommands = None
4670 batchedcommands = None
4669
4671
4670 # Now perform actions based on the parsed wire language instructions.
4672 # Now perform actions based on the parsed wire language instructions.
4671 for action, lines in blocks:
4673 for action, lines in blocks:
4672 if action in (b'raw', b'raw+'):
4674 if action in (b'raw', b'raw+'):
4673 if not stdin:
4675 if not stdin:
4674 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4676 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4675
4677
4676 # Concatenate the data together.
4678 # Concatenate the data together.
4677 data = b''.join(l.lstrip() for l in lines)
4679 data = b''.join(l.lstrip() for l in lines)
4678 data = stringutil.unescapestr(data)
4680 data = stringutil.unescapestr(data)
4679 stdin.write(data)
4681 stdin.write(data)
4680
4682
4681 if action == b'raw+':
4683 if action == b'raw+':
4682 stdin.flush()
4684 stdin.flush()
4683 elif action == b'flush':
4685 elif action == b'flush':
4684 if not stdin:
4686 if not stdin:
4685 raise error.Abort(_(b'cannot call flush on this peer'))
4687 raise error.Abort(_(b'cannot call flush on this peer'))
4686 stdin.flush()
4688 stdin.flush()
4687 elif action.startswith(b'command'):
4689 elif action.startswith(b'command'):
4688 if not peer:
4690 if not peer:
4689 raise error.Abort(
4691 raise error.Abort(
4690 _(
4692 _(
4691 b'cannot send commands unless peer instance '
4693 b'cannot send commands unless peer instance '
4692 b'is available'
4694 b'is available'
4693 )
4695 )
4694 )
4696 )
4695
4697
4696 command = action.split(b' ', 1)[1]
4698 command = action.split(b' ', 1)[1]
4697
4699
4698 args = {}
4700 args = {}
4699 for line in lines:
4701 for line in lines:
4700 # We need to allow empty values.
4702 # We need to allow empty values.
4701 fields = line.lstrip().split(b' ', 1)
4703 fields = line.lstrip().split(b' ', 1)
4702 if len(fields) == 1:
4704 if len(fields) == 1:
4703 key = fields[0]
4705 key = fields[0]
4704 value = b''
4706 value = b''
4705 else:
4707 else:
4706 key, value = fields
4708 key, value = fields
4707
4709
4708 if value.startswith(b'eval:'):
4710 if value.startswith(b'eval:'):
4709 value = stringutil.evalpythonliteral(value[5:])
4711 value = stringutil.evalpythonliteral(value[5:])
4710 else:
4712 else:
4711 value = stringutil.unescapestr(value)
4713 value = stringutil.unescapestr(value)
4712
4714
4713 args[key] = value
4715 args[key] = value
4714
4716
4715 if batchedcommands is not None:
4717 if batchedcommands is not None:
4716 batchedcommands.append((command, args))
4718 batchedcommands.append((command, args))
4717 continue
4719 continue
4718
4720
4719 ui.status(_(b'sending %s command\n') % command)
4721 ui.status(_(b'sending %s command\n') % command)
4720
4722
4721 if b'PUSHFILE' in args:
4723 if b'PUSHFILE' in args:
4722 with open(args[b'PUSHFILE'], 'rb') as fh:
4724 with open(args[b'PUSHFILE'], 'rb') as fh:
4723 del args[b'PUSHFILE']
4725 del args[b'PUSHFILE']
4724 res, output = peer._callpush(
4726 res, output = peer._callpush(
4725 command, fh, **pycompat.strkwargs(args)
4727 command, fh, **pycompat.strkwargs(args)
4726 )
4728 )
4727 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4729 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4728 ui.status(
4730 ui.status(
4729 _(b'remote output: %s\n') % stringutil.escapestr(output)
4731 _(b'remote output: %s\n') % stringutil.escapestr(output)
4730 )
4732 )
4731 else:
4733 else:
4732 with peer.commandexecutor() as e:
4734 with peer.commandexecutor() as e:
4733 res = e.callcommand(command, args).result()
4735 res = e.callcommand(command, args).result()
4734
4736
4735 ui.status(
4737 ui.status(
4736 _(b'response: %s\n')
4738 _(b'response: %s\n')
4737 % stringutil.pprint(res, bprefix=True, indent=2)
4739 % stringutil.pprint(res, bprefix=True, indent=2)
4738 )
4740 )
4739
4741
4740 elif action == b'batchbegin':
4742 elif action == b'batchbegin':
4741 if batchedcommands is not None:
4743 if batchedcommands is not None:
4742 raise error.Abort(_(b'nested batchbegin not allowed'))
4744 raise error.Abort(_(b'nested batchbegin not allowed'))
4743
4745
4744 batchedcommands = []
4746 batchedcommands = []
4745 elif action == b'batchsubmit':
4747 elif action == b'batchsubmit':
4746 # There is a batching API we could go through. But it would be
4748 # There is a batching API we could go through. But it would be
4747 # difficult to normalize requests into function calls. It is easier
4749 # difficult to normalize requests into function calls. It is easier
4748 # to bypass this layer and normalize to commands + args.
4750 # to bypass this layer and normalize to commands + args.
4749 ui.status(
4751 ui.status(
4750 _(b'sending batch with %d sub-commands\n')
4752 _(b'sending batch with %d sub-commands\n')
4751 % len(batchedcommands)
4753 % len(batchedcommands)
4752 )
4754 )
4753 assert peer is not None
4755 assert peer is not None
4754 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4756 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4755 ui.status(
4757 ui.status(
4756 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4758 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4757 )
4759 )
4758
4760
4759 batchedcommands = None
4761 batchedcommands = None
4760
4762
4761 elif action.startswith(b'httprequest '):
4763 elif action.startswith(b'httprequest '):
4762 if not opener:
4764 if not opener:
4763 raise error.Abort(
4765 raise error.Abort(
4764 _(b'cannot use httprequest without an HTTP peer')
4766 _(b'cannot use httprequest without an HTTP peer')
4765 )
4767 )
4766
4768
4767 request = action.split(b' ', 2)
4769 request = action.split(b' ', 2)
4768 if len(request) != 3:
4770 if len(request) != 3:
4769 raise error.Abort(
4771 raise error.Abort(
4770 _(
4772 _(
4771 b'invalid httprequest: expected format is '
4773 b'invalid httprequest: expected format is '
4772 b'"httprequest <method> <path>'
4774 b'"httprequest <method> <path>'
4773 )
4775 )
4774 )
4776 )
4775
4777
4776 method, httppath = request[1:]
4778 method, httppath = request[1:]
4777 headers = {}
4779 headers = {}
4778 body = None
4780 body = None
4779 frames = []
4781 frames = []
4780 for line in lines:
4782 for line in lines:
4781 line = line.lstrip()
4783 line = line.lstrip()
4782 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4784 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4783 if m:
4785 if m:
4784 # Headers need to use native strings.
4786 # Headers need to use native strings.
4785 key = pycompat.strurl(m.group(1))
4787 key = pycompat.strurl(m.group(1))
4786 value = pycompat.strurl(m.group(2))
4788 value = pycompat.strurl(m.group(2))
4787 headers[key] = value
4789 headers[key] = value
4788 continue
4790 continue
4789
4791
4790 if line.startswith(b'BODYFILE '):
4792 if line.startswith(b'BODYFILE '):
4791 with open(line.split(b' ', 1), b'rb') as fh:
4793 with open(line.split(b' ', 1), b'rb') as fh:
4792 body = fh.read()
4794 body = fh.read()
4793 elif line.startswith(b'frame '):
4795 elif line.startswith(b'frame '):
4794 frame = wireprotoframing.makeframefromhumanstring(
4796 frame = wireprotoframing.makeframefromhumanstring(
4795 line[len(b'frame ') :]
4797 line[len(b'frame ') :]
4796 )
4798 )
4797
4799
4798 frames.append(frame)
4800 frames.append(frame)
4799 else:
4801 else:
4800 raise error.Abort(
4802 raise error.Abort(
4801 _(b'unknown argument to httprequest: %s') % line
4803 _(b'unknown argument to httprequest: %s') % line
4802 )
4804 )
4803
4805
4804 url = path + httppath
4806 url = path + httppath
4805
4807
4806 if frames:
4808 if frames:
4807 body = b''.join(bytes(f) for f in frames)
4809 body = b''.join(bytes(f) for f in frames)
4808
4810
4809 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4811 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4810
4812
4811 # urllib.Request insists on using has_data() as a proxy for
4813 # urllib.Request insists on using has_data() as a proxy for
4812 # determining the request method. Override that to use our
4814 # determining the request method. Override that to use our
4813 # explicitly requested method.
4815 # explicitly requested method.
4814 req.get_method = lambda: pycompat.sysstr(method)
4816 req.get_method = lambda: pycompat.sysstr(method)
4815
4817
4816 try:
4818 try:
4817 res = opener.open(req)
4819 res = opener.open(req)
4818 body = res.read()
4820 body = res.read()
4819 except util.urlerr.urlerror as e:
4821 except util.urlerr.urlerror as e:
4820 # read() method must be called, but only exists in Python 2
4822 # read() method must be called, but only exists in Python 2
4821 getattr(e, 'read', lambda: None)()
4823 getattr(e, 'read', lambda: None)()
4822 continue
4824 continue
4823
4825
4824 ct = res.headers.get('Content-Type')
4826 ct = res.headers.get('Content-Type')
4825 if ct == 'application/mercurial-cbor':
4827 if ct == 'application/mercurial-cbor':
4826 ui.write(
4828 ui.write(
4827 _(b'cbor> %s\n')
4829 _(b'cbor> %s\n')
4828 % stringutil.pprint(
4830 % stringutil.pprint(
4829 cborutil.decodeall(body), bprefix=True, indent=2
4831 cborutil.decodeall(body), bprefix=True, indent=2
4830 )
4832 )
4831 )
4833 )
4832
4834
4833 elif action == b'close':
4835 elif action == b'close':
4834 assert peer is not None
4836 assert peer is not None
4835 peer.close()
4837 peer.close()
4836 elif action == b'readavailable':
4838 elif action == b'readavailable':
4837 if not stdout or not stderr:
4839 if not stdout or not stderr:
4838 raise error.Abort(
4840 raise error.Abort(
4839 _(b'readavailable not available on this peer')
4841 _(b'readavailable not available on this peer')
4840 )
4842 )
4841
4843
4842 stdin.close()
4844 stdin.close()
4843 stdout.read()
4845 stdout.read()
4844 stderr.read()
4846 stderr.read()
4845
4847
4846 elif action == b'readline':
4848 elif action == b'readline':
4847 if not stdout:
4849 if not stdout:
4848 raise error.Abort(_(b'readline not available on this peer'))
4850 raise error.Abort(_(b'readline not available on this peer'))
4849 stdout.readline()
4851 stdout.readline()
4850 elif action == b'ereadline':
4852 elif action == b'ereadline':
4851 if not stderr:
4853 if not stderr:
4852 raise error.Abort(_(b'ereadline not available on this peer'))
4854 raise error.Abort(_(b'ereadline not available on this peer'))
4853 stderr.readline()
4855 stderr.readline()
4854 elif action.startswith(b'read '):
4856 elif action.startswith(b'read '):
4855 count = int(action.split(b' ', 1)[1])
4857 count = int(action.split(b' ', 1)[1])
4856 if not stdout:
4858 if not stdout:
4857 raise error.Abort(_(b'read not available on this peer'))
4859 raise error.Abort(_(b'read not available on this peer'))
4858 stdout.read(count)
4860 stdout.read(count)
4859 elif action.startswith(b'eread '):
4861 elif action.startswith(b'eread '):
4860 count = int(action.split(b' ', 1)[1])
4862 count = int(action.split(b' ', 1)[1])
4861 if not stderr:
4863 if not stderr:
4862 raise error.Abort(_(b'eread not available on this peer'))
4864 raise error.Abort(_(b'eread not available on this peer'))
4863 stderr.read(count)
4865 stderr.read(count)
4864 else:
4866 else:
4865 raise error.Abort(_(b'unknown action: %s') % action)
4867 raise error.Abort(_(b'unknown action: %s') % action)
4866
4868
4867 if batchedcommands is not None:
4869 if batchedcommands is not None:
4868 raise error.Abort(_(b'unclosed "batchbegin" request'))
4870 raise error.Abort(_(b'unclosed "batchbegin" request'))
4869
4871
4870 if peer:
4872 if peer:
4871 peer.close()
4873 peer.close()
4872
4874
4873 if proc:
4875 if proc:
4874 proc.kill()
4876 proc.kill()
@@ -1,525 +1,525 b''
1 # Copyright (C) 2004, 2005 Canonical Ltd
1 # Copyright (C) 2004, 2005 Canonical Ltd
2 #
2 #
3 # This program is free software; you can redistribute it and/or modify
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
6 # (at your option) any later version.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU General Public License
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, see <http://www.gnu.org/licenses/>.
14 # along with this program; if not, see <http://www.gnu.org/licenses/>.
15
15
16 # mbp: "you know that thing where cvs gives you conflict markers?"
16 # mbp: "you know that thing where cvs gives you conflict markers?"
17 # s: "i hate that."
17 # s: "i hate that."
18
18
19 from __future__ import absolute_import
19 from __future__ import absolute_import
20
20
21 from .i18n import _
21 from .i18n import _
22 from . import (
22 from . import (
23 error,
23 error,
24 mdiff,
24 mdiff,
25 pycompat,
25 pycompat,
26 )
26 )
27 from .utils import stringutil
27 from .utils import stringutil
28
28
29
29
30 class CantReprocessAndShowBase(Exception):
30 class CantReprocessAndShowBase(Exception):
31 pass
31 pass
32
32
33
33
34 def intersect(ra, rb):
34 def intersect(ra, rb):
35 """Given two ranges return the range where they intersect or None.
35 """Given two ranges return the range where they intersect or None.
36
36
37 >>> intersect((0, 10), (0, 6))
37 >>> intersect((0, 10), (0, 6))
38 (0, 6)
38 (0, 6)
39 >>> intersect((0, 10), (5, 15))
39 >>> intersect((0, 10), (5, 15))
40 (5, 10)
40 (5, 10)
41 >>> intersect((0, 10), (10, 15))
41 >>> intersect((0, 10), (10, 15))
42 >>> intersect((0, 9), (10, 15))
42 >>> intersect((0, 9), (10, 15))
43 >>> intersect((0, 9), (7, 15))
43 >>> intersect((0, 9), (7, 15))
44 (7, 9)
44 (7, 9)
45 """
45 """
46 assert ra[0] <= ra[1]
46 assert ra[0] <= ra[1]
47 assert rb[0] <= rb[1]
47 assert rb[0] <= rb[1]
48
48
49 sa = max(ra[0], rb[0])
49 sa = max(ra[0], rb[0])
50 sb = min(ra[1], rb[1])
50 sb = min(ra[1], rb[1])
51 if sa < sb:
51 if sa < sb:
52 return sa, sb
52 return sa, sb
53 else:
53 else:
54 return None
54 return None
55
55
56
56
57 def compare_range(a, astart, aend, b, bstart, bend):
57 def compare_range(a, astart, aend, b, bstart, bend):
58 """Compare a[astart:aend] == b[bstart:bend], without slicing."""
58 """Compare a[astart:aend] == b[bstart:bend], without slicing."""
59 if (aend - astart) != (bend - bstart):
59 if (aend - astart) != (bend - bstart):
60 return False
60 return False
61 for ia, ib in zip(
61 for ia, ib in zip(
62 pycompat.xrange(astart, aend), pycompat.xrange(bstart, bend)
62 pycompat.xrange(astart, aend), pycompat.xrange(bstart, bend)
63 ):
63 ):
64 if a[ia] != b[ib]:
64 if a[ia] != b[ib]:
65 return False
65 return False
66 else:
66 else:
67 return True
67 return True
68
68
69
69
70 class Merge3Text(object):
70 class Merge3Text(object):
71 """3-way merge of texts.
71 """3-way merge of texts.
72
72
73 Given strings BASE, OTHER, THIS, tries to produce a combined text
73 Given strings BASE, OTHER, THIS, tries to produce a combined text
74 incorporating the changes from both BASE->OTHER and BASE->THIS."""
74 incorporating the changes from both BASE->OTHER and BASE->THIS."""
75
75
76 def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
76 def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
77 self.basetext = basetext
77 self.basetext = basetext
78 self.atext = atext
78 self.atext = atext
79 self.btext = btext
79 self.btext = btext
80 if base is None:
80 if base is None:
81 base = mdiff.splitnewlines(basetext)
81 base = mdiff.splitnewlines(basetext)
82 if a is None:
82 if a is None:
83 a = mdiff.splitnewlines(atext)
83 a = mdiff.splitnewlines(atext)
84 if b is None:
84 if b is None:
85 b = mdiff.splitnewlines(btext)
85 b = mdiff.splitnewlines(btext)
86 self.base = base
86 self.base = base
87 self.a = a
87 self.a = a
88 self.b = b
88 self.b = b
89
89
90 def merge_groups(self):
90 def merge_groups(self):
91 """Yield sequence of line groups. Each one is a tuple:
91 """Yield sequence of line groups. Each one is a tuple:
92
92
93 'unchanged', lines
93 'unchanged', lines
94 Lines unchanged from base
94 Lines unchanged from base
95
95
96 'a', lines
96 'a', lines
97 Lines taken from a
97 Lines taken from a
98
98
99 'same', lines
99 'same', lines
100 Lines taken from a (and equal to b)
100 Lines taken from a (and equal to b)
101
101
102 'b', lines
102 'b', lines
103 Lines taken from b
103 Lines taken from b
104
104
105 'conflict', (base_lines, a_lines, b_lines)
105 'conflict', (base_lines, a_lines, b_lines)
106 Lines from base were changed to either a or b and conflict.
106 Lines from base were changed to either a or b and conflict.
107 """
107 """
108 for t in self.merge_regions():
108 for t in self.merge_regions():
109 what = t[0]
109 what = t[0]
110 if what == b'unchanged':
110 if what == b'unchanged':
111 yield what, self.base[t[1] : t[2]]
111 yield what, self.base[t[1] : t[2]]
112 elif what == b'a' or what == b'same':
112 elif what == b'a' or what == b'same':
113 yield what, self.a[t[1] : t[2]]
113 yield what, self.a[t[1] : t[2]]
114 elif what == b'b':
114 elif what == b'b':
115 yield what, self.b[t[1] : t[2]]
115 yield what, self.b[t[1] : t[2]]
116 elif what == b'conflict':
116 elif what == b'conflict':
117 yield (
117 yield (
118 what,
118 what,
119 (
119 (
120 self.base[t[1] : t[2]],
120 self.base[t[1] : t[2]],
121 self.a[t[3] : t[4]],
121 self.a[t[3] : t[4]],
122 self.b[t[5] : t[6]],
122 self.b[t[5] : t[6]],
123 ),
123 ),
124 )
124 )
125 else:
125 else:
126 raise ValueError(what)
126 raise ValueError(what)
127
127
128 def merge_regions(self):
128 def merge_regions(self):
129 """Return sequences of matching and conflicting regions.
129 """Return sequences of matching and conflicting regions.
130
130
131 This returns tuples, where the first value says what kind we
131 This returns tuples, where the first value says what kind we
132 have:
132 have:
133
133
134 'unchanged', start, end
134 'unchanged', start, end
135 Take a region of base[start:end]
135 Take a region of base[start:end]
136
136
137 'same', astart, aend
137 'same', astart, aend
138 b and a are different from base but give the same result
138 b and a are different from base but give the same result
139
139
140 'a', start, end
140 'a', start, end
141 Non-clashing insertion from a[start:end]
141 Non-clashing insertion from a[start:end]
142
142
143 'conflict', zstart, zend, astart, aend, bstart, bend
143 'conflict', zstart, zend, astart, aend, bstart, bend
144 Conflict between a and b, with z as common ancestor
144 Conflict between a and b, with z as common ancestor
145
145
146 Method is as follows:
146 Method is as follows:
147
147
148 The two sequences align only on regions which match the base
148 The two sequences align only on regions which match the base
149 and both descendants. These are found by doing a two-way diff
149 and both descendants. These are found by doing a two-way diff
150 of each one against the base, and then finding the
150 of each one against the base, and then finding the
151 intersections between those regions. These "sync regions"
151 intersections between those regions. These "sync regions"
152 are by definition unchanged in both and easily dealt with.
152 are by definition unchanged in both and easily dealt with.
153
153
154 The regions in between can be in any of three cases:
154 The regions in between can be in any of three cases:
155 conflicted, or changed on only one side.
155 conflicted, or changed on only one side.
156 """
156 """
157
157
158 # section a[0:ia] has been disposed of, etc
158 # section a[0:ia] has been disposed of, etc
159 iz = ia = ib = 0
159 iz = ia = ib = 0
160
160
161 for region in self.find_sync_regions():
161 for region in self.find_sync_regions():
162 zmatch, zend, amatch, aend, bmatch, bend = region
162 zmatch, zend, amatch, aend, bmatch, bend = region
163 # print 'match base [%d:%d]' % (zmatch, zend)
163 # print 'match base [%d:%d]' % (zmatch, zend)
164
164
165 matchlen = zend - zmatch
165 matchlen = zend - zmatch
166 assert matchlen >= 0
166 assert matchlen >= 0
167 assert matchlen == (aend - amatch)
167 assert matchlen == (aend - amatch)
168 assert matchlen == (bend - bmatch)
168 assert matchlen == (bend - bmatch)
169
169
170 len_a = amatch - ia
170 len_a = amatch - ia
171 len_b = bmatch - ib
171 len_b = bmatch - ib
172 len_base = zmatch - iz
172 len_base = zmatch - iz
173 assert len_a >= 0
173 assert len_a >= 0
174 assert len_b >= 0
174 assert len_b >= 0
175 assert len_base >= 0
175 assert len_base >= 0
176
176
177 # print 'unmatched a=%d, b=%d' % (len_a, len_b)
177 # print 'unmatched a=%d, b=%d' % (len_a, len_b)
178
178
179 if len_a or len_b:
179 if len_a or len_b:
180 # try to avoid actually slicing the lists
180 # try to avoid actually slicing the lists
181 equal_a = compare_range(
181 equal_a = compare_range(
182 self.a, ia, amatch, self.base, iz, zmatch
182 self.a, ia, amatch, self.base, iz, zmatch
183 )
183 )
184 equal_b = compare_range(
184 equal_b = compare_range(
185 self.b, ib, bmatch, self.base, iz, zmatch
185 self.b, ib, bmatch, self.base, iz, zmatch
186 )
186 )
187 same = compare_range(self.a, ia, amatch, self.b, ib, bmatch)
187 same = compare_range(self.a, ia, amatch, self.b, ib, bmatch)
188
188
189 if same:
189 if same:
190 yield b'same', ia, amatch
190 yield b'same', ia, amatch
191 elif equal_a and not equal_b:
191 elif equal_a and not equal_b:
192 yield b'b', ib, bmatch
192 yield b'b', ib, bmatch
193 elif equal_b and not equal_a:
193 elif equal_b and not equal_a:
194 yield b'a', ia, amatch
194 yield b'a', ia, amatch
195 elif not equal_a and not equal_b:
195 elif not equal_a and not equal_b:
196 yield b'conflict', iz, zmatch, ia, amatch, ib, bmatch
196 yield b'conflict', iz, zmatch, ia, amatch, ib, bmatch
197 else:
197 else:
198 raise AssertionError(b"can't handle a=b=base but unmatched")
198 raise AssertionError(b"can't handle a=b=base but unmatched")
199
199
200 ia = amatch
200 ia = amatch
201 ib = bmatch
201 ib = bmatch
202 iz = zmatch
202 iz = zmatch
203
203
204 # if the same part of the base was deleted on both sides
204 # if the same part of the base was deleted on both sides
205 # that's OK, we can just skip it.
205 # that's OK, we can just skip it.
206
206
207 if matchlen > 0:
207 if matchlen > 0:
208 assert ia == amatch
208 assert ia == amatch
209 assert ib == bmatch
209 assert ib == bmatch
210 assert iz == zmatch
210 assert iz == zmatch
211
211
212 yield b'unchanged', zmatch, zend
212 yield b'unchanged', zmatch, zend
213 iz = zend
213 iz = zend
214 ia = aend
214 ia = aend
215 ib = bend
215 ib = bend
216
216
217 def minimize(self, merge_groups):
217 def minimize(self, merge_groups):
218 """Trim conflict regions of lines where A and B sides match.
218 """Trim conflict regions of lines where A and B sides match.
219
219
220 Lines where both A and B have made the same changes at the beginning
220 Lines where both A and B have made the same changes at the beginning
221 or the end of each merge region are eliminated from the conflict
221 or the end of each merge region are eliminated from the conflict
222 region and are instead considered the same.
222 region and are instead considered the same.
223 """
223 """
224 for what, lines in merge_groups:
224 for what, lines in merge_groups:
225 if what != b"conflict":
225 if what != b"conflict":
226 yield what, lines
226 yield what, lines
227 continue
227 continue
228 base_lines, a_lines, b_lines = lines
228 base_lines, a_lines, b_lines = lines
229 alen = len(a_lines)
229 alen = len(a_lines)
230 blen = len(b_lines)
230 blen = len(b_lines)
231
231
232 # find matches at the front
232 # find matches at the front
233 ii = 0
233 ii = 0
234 while ii < alen and ii < blen and a_lines[ii] == b_lines[ii]:
234 while ii < alen and ii < blen and a_lines[ii] == b_lines[ii]:
235 ii += 1
235 ii += 1
236 startmatches = ii
236 startmatches = ii
237
237
238 # find matches at the end
238 # find matches at the end
239 ii = 0
239 ii = 0
240 while (
240 while (
241 ii < alen and ii < blen and a_lines[-ii - 1] == b_lines[-ii - 1]
241 ii < alen and ii < blen and a_lines[-ii - 1] == b_lines[-ii - 1]
242 ):
242 ):
243 ii += 1
243 ii += 1
244 endmatches = ii
244 endmatches = ii
245
245
246 if startmatches > 0:
246 if startmatches > 0:
247 yield b'same', a_lines[:startmatches]
247 yield b'same', a_lines[:startmatches]
248
248
249 yield (
249 yield (
250 b'conflict',
250 b'conflict',
251 (
251 (
252 base_lines,
252 base_lines,
253 a_lines[startmatches : alen - endmatches],
253 a_lines[startmatches : alen - endmatches],
254 b_lines[startmatches : blen - endmatches],
254 b_lines[startmatches : blen - endmatches],
255 ),
255 ),
256 )
256 )
257
257
258 if endmatches > 0:
258 if endmatches > 0:
259 yield b'same', a_lines[alen - endmatches :]
259 yield b'same', a_lines[alen - endmatches :]
260
260
261 def find_sync_regions(self):
261 def find_sync_regions(self):
262 """Return a list of sync regions, where both descendants match the base.
262 """Return a list of sync regions, where both descendants match the base.
263
263
264 Generates a list of (base1, base2, a1, a2, b1, b2). There is
264 Generates a list of (base1, base2, a1, a2, b1, b2). There is
265 always a zero-length sync region at the end of all the files.
265 always a zero-length sync region at the end of all the files.
266 """
266 """
267
267
268 ia = ib = 0
268 ia = ib = 0
269 amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
269 amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
270 bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
270 bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
271 len_a = len(amatches)
271 len_a = len(amatches)
272 len_b = len(bmatches)
272 len_b = len(bmatches)
273
273
274 sl = []
274 sl = []
275
275
276 while ia < len_a and ib < len_b:
276 while ia < len_a and ib < len_b:
277 abase, amatch, alen = amatches[ia]
277 abase, amatch, alen = amatches[ia]
278 bbase, bmatch, blen = bmatches[ib]
278 bbase, bmatch, blen = bmatches[ib]
279
279
280 # there is an unconflicted block at i; how long does it
280 # there is an unconflicted block at i; how long does it
281 # extend? until whichever one ends earlier.
281 # extend? until whichever one ends earlier.
282 i = intersect((abase, abase + alen), (bbase, bbase + blen))
282 i = intersect((abase, abase + alen), (bbase, bbase + blen))
283 if i:
283 if i:
284 intbase = i[0]
284 intbase = i[0]
285 intend = i[1]
285 intend = i[1]
286 intlen = intend - intbase
286 intlen = intend - intbase
287
287
288 # found a match of base[i[0], i[1]]; this may be less than
288 # found a match of base[i[0], i[1]]; this may be less than
289 # the region that matches in either one
289 # the region that matches in either one
290 assert intlen <= alen
290 assert intlen <= alen
291 assert intlen <= blen
291 assert intlen <= blen
292 assert abase <= intbase
292 assert abase <= intbase
293 assert bbase <= intbase
293 assert bbase <= intbase
294
294
295 asub = amatch + (intbase - abase)
295 asub = amatch + (intbase - abase)
296 bsub = bmatch + (intbase - bbase)
296 bsub = bmatch + (intbase - bbase)
297 aend = asub + intlen
297 aend = asub + intlen
298 bend = bsub + intlen
298 bend = bsub + intlen
299
299
300 assert self.base[intbase:intend] == self.a[asub:aend], (
300 assert self.base[intbase:intend] == self.a[asub:aend], (
301 self.base[intbase:intend],
301 self.base[intbase:intend],
302 self.a[asub:aend],
302 self.a[asub:aend],
303 )
303 )
304
304
305 assert self.base[intbase:intend] == self.b[bsub:bend]
305 assert self.base[intbase:intend] == self.b[bsub:bend]
306
306
307 sl.append((intbase, intend, asub, aend, bsub, bend))
307 sl.append((intbase, intend, asub, aend, bsub, bend))
308
308
309 # advance whichever one ends first in the base text
309 # advance whichever one ends first in the base text
310 if (abase + alen) < (bbase + blen):
310 if (abase + alen) < (bbase + blen):
311 ia += 1
311 ia += 1
312 else:
312 else:
313 ib += 1
313 ib += 1
314
314
315 intbase = len(self.base)
315 intbase = len(self.base)
316 abase = len(self.a)
316 abase = len(self.a)
317 bbase = len(self.b)
317 bbase = len(self.b)
318 sl.append((intbase, intbase, abase, abase, bbase, bbase))
318 sl.append((intbase, intbase, abase, abase, bbase, bbase))
319
319
320 return sl
320 return sl
321
321
322
322
323 def _verifytext(text, path, ui, opts):
323 def _verifytext(text, path, ui, opts):
324 """verifies that text is non-binary (unless opts[text] is passed,
324 """verifies that text is non-binary (unless opts[text] is passed,
325 then we just warn)"""
325 then we just warn)"""
326 if stringutil.binary(text):
326 if stringutil.binary(text):
327 msg = _(b"%s looks like a binary file.") % path
327 msg = _(b"%s looks like a binary file.") % path
328 if not opts.get('quiet'):
328 if not opts.get('quiet'):
329 ui.warn(_(b'warning: %s\n') % msg)
329 ui.warn(_(b'warning: %s\n') % msg)
330 if not opts.get('text'):
330 if not opts.get('text'):
331 raise error.Abort(msg)
331 raise error.Abort(msg)
332 return text
332 return text
333
333
334
334
335 def _picklabels(defaults, overrides):
335 def _picklabels(defaults, overrides):
336 if len(overrides) > 3:
336 if len(overrides) > 3:
337 raise error.Abort(_(b"can only specify three labels."))
337 raise error.Abort(_(b"can only specify three labels."))
338 result = defaults[:]
338 result = defaults[:]
339 for i, override in enumerate(overrides):
339 for i, override in enumerate(overrides):
340 result[i] = override
340 result[i] = override
341 return result
341 return result
342
342
343
343
344 def merge_lines(
344 def render_markers(
345 m3,
345 m3,
346 name_a=None,
346 name_a=None,
347 name_b=None,
347 name_b=None,
348 name_base=None,
348 name_base=None,
349 start_marker=b'<<<<<<<',
349 start_marker=b'<<<<<<<',
350 mid_marker=b'=======',
350 mid_marker=b'=======',
351 end_marker=b'>>>>>>>',
351 end_marker=b'>>>>>>>',
352 base_marker=None,
352 base_marker=None,
353 minimize=False,
353 minimize=False,
354 ):
354 ):
355 """Return merge in cvs-like form."""
355 """Return merge in cvs-like form."""
356 conflicts = False
356 conflicts = False
357 newline = b'\n'
357 newline = b'\n'
358 if len(m3.a) > 0:
358 if len(m3.a) > 0:
359 if m3.a[0].endswith(b'\r\n'):
359 if m3.a[0].endswith(b'\r\n'):
360 newline = b'\r\n'
360 newline = b'\r\n'
361 elif m3.a[0].endswith(b'\r'):
361 elif m3.a[0].endswith(b'\r'):
362 newline = b'\r'
362 newline = b'\r'
363 if name_a and start_marker:
363 if name_a and start_marker:
364 start_marker = start_marker + b' ' + name_a
364 start_marker = start_marker + b' ' + name_a
365 if name_b and end_marker:
365 if name_b and end_marker:
366 end_marker = end_marker + b' ' + name_b
366 end_marker = end_marker + b' ' + name_b
367 if name_base and base_marker:
367 if name_base and base_marker:
368 base_marker = base_marker + b' ' + name_base
368 base_marker = base_marker + b' ' + name_base
369 merge_groups = m3.merge_groups()
369 merge_groups = m3.merge_groups()
370 if minimize:
370 if minimize:
371 merge_groups = m3.minimize(merge_groups)
371 merge_groups = m3.minimize(merge_groups)
372 lines = []
372 lines = []
373 for what, group_lines in merge_groups:
373 for what, group_lines in merge_groups:
374 if what == b'conflict':
374 if what == b'conflict':
375 base_lines, a_lines, b_lines = group_lines
375 base_lines, a_lines, b_lines = group_lines
376 conflicts = True
376 conflicts = True
377 if start_marker is not None:
377 if start_marker is not None:
378 lines.append(start_marker + newline)
378 lines.append(start_marker + newline)
379 lines.extend(a_lines)
379 lines.extend(a_lines)
380 if base_marker is not None:
380 if base_marker is not None:
381 lines.append(base_marker + newline)
381 lines.append(base_marker + newline)
382 lines.extend(base_lines)
382 lines.extend(base_lines)
383 if mid_marker is not None:
383 if mid_marker is not None:
384 lines.append(mid_marker + newline)
384 lines.append(mid_marker + newline)
385 lines.extend(b_lines)
385 lines.extend(b_lines)
386 if end_marker is not None:
386 if end_marker is not None:
387 lines.append(end_marker + newline)
387 lines.append(end_marker + newline)
388 else:
388 else:
389 lines.extend(group_lines)
389 lines.extend(group_lines)
390 return lines, conflicts
390 return lines, conflicts
391
391
392
392
393 def _mergediff(m3, name_a, name_b, name_base):
393 def render_mergediff(m3, name_a, name_b, name_base):
394 lines = []
394 lines = []
395 conflicts = False
395 conflicts = False
396 for what, group_lines in m3.merge_groups():
396 for what, group_lines in m3.merge_groups():
397 if what == b'conflict':
397 if what == b'conflict':
398 base_lines, a_lines, b_lines = group_lines
398 base_lines, a_lines, b_lines = group_lines
399 base_text = b''.join(base_lines)
399 base_text = b''.join(base_lines)
400 b_blocks = list(
400 b_blocks = list(
401 mdiff.allblocks(
401 mdiff.allblocks(
402 base_text,
402 base_text,
403 b''.join(b_lines),
403 b''.join(b_lines),
404 lines1=base_lines,
404 lines1=base_lines,
405 lines2=b_lines,
405 lines2=b_lines,
406 )
406 )
407 )
407 )
408 a_blocks = list(
408 a_blocks = list(
409 mdiff.allblocks(
409 mdiff.allblocks(
410 base_text,
410 base_text,
411 b''.join(a_lines),
411 b''.join(a_lines),
412 lines1=base_lines,
412 lines1=base_lines,
413 lines2=b_lines,
413 lines2=b_lines,
414 )
414 )
415 )
415 )
416
416
417 def matching_lines(blocks):
417 def matching_lines(blocks):
418 return sum(
418 return sum(
419 block[1] - block[0]
419 block[1] - block[0]
420 for block, kind in blocks
420 for block, kind in blocks
421 if kind == b'='
421 if kind == b'='
422 )
422 )
423
423
424 def diff_lines(blocks, lines1, lines2):
424 def diff_lines(blocks, lines1, lines2):
425 for block, kind in blocks:
425 for block, kind in blocks:
426 if kind == b'=':
426 if kind == b'=':
427 for line in lines1[block[0] : block[1]]:
427 for line in lines1[block[0] : block[1]]:
428 yield b' ' + line
428 yield b' ' + line
429 else:
429 else:
430 for line in lines1[block[0] : block[1]]:
430 for line in lines1[block[0] : block[1]]:
431 yield b'-' + line
431 yield b'-' + line
432 for line in lines2[block[2] : block[3]]:
432 for line in lines2[block[2] : block[3]]:
433 yield b'+' + line
433 yield b'+' + line
434
434
435 lines.append(b"<<<<<<<\n")
435 lines.append(b"<<<<<<<\n")
436 if matching_lines(a_blocks) < matching_lines(b_blocks):
436 if matching_lines(a_blocks) < matching_lines(b_blocks):
437 lines.append(b"======= %s\n" % name_a)
437 lines.append(b"======= %s\n" % name_a)
438 lines.extend(a_lines)
438 lines.extend(a_lines)
439 lines.append(b"------- %s\n" % name_base)
439 lines.append(b"------- %s\n" % name_base)
440 lines.append(b"+++++++ %s\n" % name_b)
440 lines.append(b"+++++++ %s\n" % name_b)
441 lines.extend(diff_lines(b_blocks, base_lines, b_lines))
441 lines.extend(diff_lines(b_blocks, base_lines, b_lines))
442 else:
442 else:
443 lines.append(b"------- %s\n" % name_base)
443 lines.append(b"------- %s\n" % name_base)
444 lines.append(b"+++++++ %s\n" % name_a)
444 lines.append(b"+++++++ %s\n" % name_a)
445 lines.extend(diff_lines(a_blocks, base_lines, a_lines))
445 lines.extend(diff_lines(a_blocks, base_lines, a_lines))
446 lines.append(b"======= %s\n" % name_b)
446 lines.append(b"======= %s\n" % name_b)
447 lines.extend(b_lines)
447 lines.extend(b_lines)
448 lines.append(b">>>>>>>\n")
448 lines.append(b">>>>>>>\n")
449 conflicts = True
449 conflicts = True
450 else:
450 else:
451 lines.extend(group_lines)
451 lines.extend(group_lines)
452 return lines, conflicts
452 return lines, conflicts
453
453
454
454
455 def _resolve(m3, sides):
455 def _resolve(m3, sides):
456 lines = []
456 lines = []
457 for what, group_lines in m3.merge_groups():
457 for what, group_lines in m3.merge_groups():
458 if what == b'conflict':
458 if what == b'conflict':
459 for side in sides:
459 for side in sides:
460 lines.extend(group_lines[side])
460 lines.extend(group_lines[side])
461 else:
461 else:
462 lines.extend(group_lines)
462 lines.extend(group_lines)
463 return lines
463 return lines
464
464
465
465
466 def simplemerge(ui, localctx, basectx, otherctx, **opts):
466 def simplemerge(ui, localctx, basectx, otherctx, **opts):
467 """Performs the simplemerge algorithm.
467 """Performs the simplemerge algorithm.
468
468
469 The merged result is written into `localctx`.
469 The merged result is written into `localctx`.
470 """
470 """
471
471
472 def readctx(ctx):
472 def readctx(ctx):
473 # Merges were always run in the working copy before, which means
473 # Merges were always run in the working copy before, which means
474 # they used decoded data, if the user defined any repository
474 # they used decoded data, if the user defined any repository
475 # filters.
475 # filters.
476 #
476 #
477 # Maintain that behavior today for BC, though perhaps in the future
477 # Maintain that behavior today for BC, though perhaps in the future
478 # it'd be worth considering whether merging encoded data (what the
478 # it'd be worth considering whether merging encoded data (what the
479 # repository usually sees) might be more useful.
479 # repository usually sees) might be more useful.
480 return _verifytext(ctx.decodeddata(), ctx.path(), ui, opts)
480 return _verifytext(ctx.decodeddata(), ctx.path(), ui, opts)
481
481
482 try:
482 try:
483 localtext = readctx(localctx)
483 localtext = readctx(localctx)
484 basetext = readctx(basectx)
484 basetext = readctx(basectx)
485 othertext = readctx(otherctx)
485 othertext = readctx(otherctx)
486 except error.Abort:
486 except error.Abort:
487 return 1
487 return 1
488
488
489 m3 = Merge3Text(basetext, localtext, othertext)
489 m3 = Merge3Text(basetext, localtext, othertext)
490 conflicts = False
490 conflicts = False
491 mode = opts.get('mode', b'merge')
491 mode = opts.get('mode', b'merge')
492 if mode == b'union':
492 if mode == b'union':
493 lines = _resolve(m3, (1, 2))
493 lines = _resolve(m3, (1, 2))
494 elif mode == b'local':
494 elif mode == b'local':
495 lines = _resolve(m3, (1,))
495 lines = _resolve(m3, (1,))
496 elif mode == b'other':
496 elif mode == b'other':
497 lines = _resolve(m3, (2,))
497 lines = _resolve(m3, (2,))
498 else:
498 else:
499 name_a, name_b, name_base = _picklabels(
499 name_a, name_b, name_base = _picklabels(
500 [localctx.path(), otherctx.path(), None], opts.get('label', [])
500 [localctx.path(), otherctx.path(), None], opts.get('label', [])
501 )
501 )
502 if mode == b'mergediff':
502 if mode == b'mergediff':
503 lines, conflicts = _mergediff(m3, name_a, name_b, name_base)
503 lines, conflicts = render_mergediff(m3, name_a, name_b, name_base)
504 else:
504 else:
505 extrakwargs = {
505 extrakwargs = {
506 'minimize': True,
506 'minimize': True,
507 }
507 }
508 if mode == b'merge3':
508 if mode == b'merge3':
509 extrakwargs['base_marker'] = b'|||||||'
509 extrakwargs['base_marker'] = b'|||||||'
510 extrakwargs['name_base'] = name_base
510 extrakwargs['name_base'] = name_base
511 extrakwargs['minimize'] = False
511 extrakwargs['minimize'] = False
512 lines, conflicts = merge_lines(
512 lines, conflicts = render_markers(
513 m3, name_a=name_a, name_b=name_b, **extrakwargs
513 m3, name_a=name_a, name_b=name_b, **extrakwargs
514 )
514 )
515
515
516 mergedtext = b''.join(lines)
516 mergedtext = b''.join(lines)
517 if opts.get('print'):
517 if opts.get('print'):
518 ui.fout.write(mergedtext)
518 ui.fout.write(mergedtext)
519 else:
519 else:
520 # localctx.flags() already has the merged flags (done in
520 # localctx.flags() already has the merged flags (done in
521 # mergestate.resolve())
521 # mergestate.resolve())
522 localctx.write(mergedtext, localctx.flags())
522 localctx.write(mergedtext, localctx.flags())
523
523
524 if conflicts:
524 if conflicts:
525 return 1
525 return 1
@@ -1,395 +1,397 b''
1 # Copyright (C) 2004, 2005 Canonical Ltd
1 # Copyright (C) 2004, 2005 Canonical Ltd
2 #
2 #
3 # This program is free software; you can redistribute it and/or modify
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
6 # (at your option) any later version.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU General Public License
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, see <http://www.gnu.org/licenses/>.
14 # along with this program; if not, see <http://www.gnu.org/licenses/>.
15
15
16 from __future__ import absolute_import
16 from __future__ import absolute_import
17
17
18 import unittest
18 import unittest
19 from mercurial import (
19 from mercurial import (
20 error,
20 error,
21 simplemerge,
21 simplemerge,
22 util,
22 util,
23 )
23 )
24
24
25 from mercurial.utils import stringutil
25 from mercurial.utils import stringutil
26
26
27 TestCase = unittest.TestCase
27 TestCase = unittest.TestCase
28 # bzr compatible interface, for the tests
28 # bzr compatible interface, for the tests
29 class Merge3(simplemerge.Merge3Text):
29 class Merge3(simplemerge.Merge3Text):
30 """3-way merge of texts.
30 """3-way merge of texts.
31
31
32 Given BASE, OTHER, THIS, tries to produce a combined text
32 Given BASE, OTHER, THIS, tries to produce a combined text
33 incorporating the changes from both BASE->OTHER and BASE->THIS.
33 incorporating the changes from both BASE->OTHER and BASE->THIS.
34 All three will typically be sequences of lines."""
34 All three will typically be sequences of lines."""
35
35
36 def __init__(self, base, a, b):
36 def __init__(self, base, a, b):
37 basetext = b'\n'.join([i.strip(b'\n') for i in base] + [b''])
37 basetext = b'\n'.join([i.strip(b'\n') for i in base] + [b''])
38 atext = b'\n'.join([i.strip(b'\n') for i in a] + [b''])
38 atext = b'\n'.join([i.strip(b'\n') for i in a] + [b''])
39 btext = b'\n'.join([i.strip(b'\n') for i in b] + [b''])
39 btext = b'\n'.join([i.strip(b'\n') for i in b] + [b''])
40 if (
40 if (
41 stringutil.binary(basetext)
41 stringutil.binary(basetext)
42 or stringutil.binary(atext)
42 or stringutil.binary(atext)
43 or stringutil.binary(btext)
43 or stringutil.binary(btext)
44 ):
44 ):
45 raise error.Abort(b"don't know how to merge binary files")
45 raise error.Abort(b"don't know how to merge binary files")
46 simplemerge.Merge3Text.__init__(
46 simplemerge.Merge3Text.__init__(
47 self, basetext, atext, btext, base, a, b
47 self, basetext, atext, btext, base, a, b
48 )
48 )
49
49
50
50
51 CantReprocessAndShowBase = simplemerge.CantReprocessAndShowBase
51 CantReprocessAndShowBase = simplemerge.CantReprocessAndShowBase
52
52
53
53
54 def split_lines(t):
54 def split_lines(t):
55 return util.stringio(t).readlines()
55 return util.stringio(t).readlines()
56
56
57
57
58 ############################################################
58 ############################################################
59 # test case data from the gnu diffutils manual
59 # test case data from the gnu diffutils manual
60 # common base
60 # common base
61 TZU = split_lines(
61 TZU = split_lines(
62 b""" The Nameless is the origin of Heaven and Earth;
62 b""" The Nameless is the origin of Heaven and Earth;
63 The named is the mother of all things.
63 The named is the mother of all things.
64
64
65 Therefore let there always be non-being,
65 Therefore let there always be non-being,
66 so we may see their subtlety,
66 so we may see their subtlety,
67 And let there always be being,
67 And let there always be being,
68 so we may see their outcome.
68 so we may see their outcome.
69 The two are the same,
69 The two are the same,
70 But after they are produced,
70 But after they are produced,
71 they have different names.
71 they have different names.
72 They both may be called deep and profound.
72 They both may be called deep and profound.
73 Deeper and more profound,
73 Deeper and more profound,
74 The door of all subtleties!
74 The door of all subtleties!
75 """
75 """
76 )
76 )
77
77
78 LAO = split_lines(
78 LAO = split_lines(
79 b""" The Way that can be told of is not the eternal Way;
79 b""" The Way that can be told of is not the eternal Way;
80 The name that can be named is not the eternal name.
80 The name that can be named is not the eternal name.
81 The Nameless is the origin of Heaven and Earth;
81 The Nameless is the origin of Heaven and Earth;
82 The Named is the mother of all things.
82 The Named is the mother of all things.
83 Therefore let there always be non-being,
83 Therefore let there always be non-being,
84 so we may see their subtlety,
84 so we may see their subtlety,
85 And let there always be being,
85 And let there always be being,
86 so we may see their outcome.
86 so we may see their outcome.
87 The two are the same,
87 The two are the same,
88 But after they are produced,
88 But after they are produced,
89 they have different names.
89 they have different names.
90 """
90 """
91 )
91 )
92
92
93
93
94 TAO = split_lines(
94 TAO = split_lines(
95 b""" The Way that can be told of is not the eternal Way;
95 b""" The Way that can be told of is not the eternal Way;
96 The name that can be named is not the eternal name.
96 The name that can be named is not the eternal name.
97 The Nameless is the origin of Heaven and Earth;
97 The Nameless is the origin of Heaven and Earth;
98 The named is the mother of all things.
98 The named is the mother of all things.
99
99
100 Therefore let there always be non-being,
100 Therefore let there always be non-being,
101 so we may see their subtlety,
101 so we may see their subtlety,
102 And let there always be being,
102 And let there always be being,
103 so we may see their result.
103 so we may see their result.
104 The two are the same,
104 The two are the same,
105 But after they are produced,
105 But after they are produced,
106 they have different names.
106 they have different names.
107
107
108 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
108 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
109
109
110 """
110 """
111 )
111 )
112
112
113 MERGED_RESULT = split_lines(
113 MERGED_RESULT = split_lines(
114 b"""\
114 b"""\
115 The Way that can be told of is not the eternal Way;
115 The Way that can be told of is not the eternal Way;
116 The name that can be named is not the eternal name.
116 The name that can be named is not the eternal name.
117 The Nameless is the origin of Heaven and Earth;
117 The Nameless is the origin of Heaven and Earth;
118 The Named is the mother of all things.
118 The Named is the mother of all things.
119 Therefore let there always be non-being,
119 Therefore let there always be non-being,
120 so we may see their subtlety,
120 so we may see their subtlety,
121 And let there always be being,
121 And let there always be being,
122 so we may see their result.
122 so we may see their result.
123 The two are the same,
123 The two are the same,
124 But after they are produced,
124 But after they are produced,
125 they have different names.\
125 they have different names.\
126 \n<<<<<<< LAO\
126 \n<<<<<<< LAO\
127 \n=======
127 \n=======
128
128
129 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
129 -- The Way of Lao-Tzu, tr. Wing-tsit Chan
130 \
130 \
131 \n>>>>>>> TAO
131 \n>>>>>>> TAO
132 """
132 """
133 )
133 )
134
134
135
135
136 class TestMerge3(TestCase):
136 class TestMerge3(TestCase):
137 def log(self, msg):
137 def log(self, msg):
138 pass
138 pass
139
139
140 def test_no_changes(self):
140 def test_no_changes(self):
141 """No conflicts because nothing changed"""
141 """No conflicts because nothing changed"""
142 m3 = Merge3([b'aaa', b'bbb'], [b'aaa', b'bbb'], [b'aaa', b'bbb'])
142 m3 = Merge3([b'aaa', b'bbb'], [b'aaa', b'bbb'], [b'aaa', b'bbb'])
143
143
144 self.assertEqual(
144 self.assertEqual(
145 list(m3.find_sync_regions()),
145 list(m3.find_sync_regions()),
146 [(0, 2, 0, 2, 0, 2), (2, 2, 2, 2, 2, 2)],
146 [(0, 2, 0, 2, 0, 2), (2, 2, 2, 2, 2, 2)],
147 )
147 )
148
148
149 self.assertEqual(list(m3.merge_regions()), [(b'unchanged', 0, 2)])
149 self.assertEqual(list(m3.merge_regions()), [(b'unchanged', 0, 2)])
150
150
151 self.assertEqual(
151 self.assertEqual(
152 list(m3.merge_groups()), [(b'unchanged', [b'aaa', b'bbb'])]
152 list(m3.merge_groups()), [(b'unchanged', [b'aaa', b'bbb'])]
153 )
153 )
154
154
155 def test_front_insert(self):
155 def test_front_insert(self):
156 m3 = Merge3([b'zz'], [b'aaa', b'bbb', b'zz'], [b'zz'])
156 m3 = Merge3([b'zz'], [b'aaa', b'bbb', b'zz'], [b'zz'])
157
157
158 # todo: should use a sentinel at end as from get_matching_blocks
158 # todo: should use a sentinel at end as from get_matching_blocks
159 # to match without zz
159 # to match without zz
160 self.assertEqual(
160 self.assertEqual(
161 list(m3.find_sync_regions()),
161 list(m3.find_sync_regions()),
162 [(0, 1, 2, 3, 0, 1), (1, 1, 3, 3, 1, 1)],
162 [(0, 1, 2, 3, 0, 1), (1, 1, 3, 3, 1, 1)],
163 )
163 )
164
164
165 self.assertEqual(
165 self.assertEqual(
166 list(m3.merge_regions()), [(b'a', 0, 2), (b'unchanged', 0, 1)]
166 list(m3.merge_regions()), [(b'a', 0, 2), (b'unchanged', 0, 1)]
167 )
167 )
168
168
169 self.assertEqual(
169 self.assertEqual(
170 list(m3.merge_groups()),
170 list(m3.merge_groups()),
171 [(b'a', [b'aaa', b'bbb']), (b'unchanged', [b'zz'])],
171 [(b'a', [b'aaa', b'bbb']), (b'unchanged', [b'zz'])],
172 )
172 )
173
173
174 def test_null_insert(self):
174 def test_null_insert(self):
175 m3 = Merge3([], [b'aaa', b'bbb'], [])
175 m3 = Merge3([], [b'aaa', b'bbb'], [])
176 # todo: should use a sentinel at end as from get_matching_blocks
176 # todo: should use a sentinel at end as from get_matching_blocks
177 # to match without zz
177 # to match without zz
178 self.assertEqual(list(m3.find_sync_regions()), [(0, 0, 2, 2, 0, 0)])
178 self.assertEqual(list(m3.find_sync_regions()), [(0, 0, 2, 2, 0, 0)])
179
179
180 self.assertEqual(list(m3.merge_regions()), [(b'a', 0, 2)])
180 self.assertEqual(list(m3.merge_regions()), [(b'a', 0, 2)])
181
181
182 self.assertEqual(simplemerge.merge_lines(m3), ([b'aaa', b'bbb'], False))
182 self.assertEqual(
183 simplemerge.render_markers(m3), ([b'aaa', b'bbb'], False)
184 )
183
185
184 def test_no_conflicts(self):
186 def test_no_conflicts(self):
185 """No conflicts because only one side changed"""
187 """No conflicts because only one side changed"""
186 m3 = Merge3(
188 m3 = Merge3(
187 [b'aaa', b'bbb'], [b'aaa', b'111', b'bbb'], [b'aaa', b'bbb']
189 [b'aaa', b'bbb'], [b'aaa', b'111', b'bbb'], [b'aaa', b'bbb']
188 )
190 )
189
191
190 self.assertEqual(
192 self.assertEqual(
191 list(m3.find_sync_regions()),
193 list(m3.find_sync_regions()),
192 [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 1, 2), (2, 2, 3, 3, 2, 2)],
194 [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 1, 2), (2, 2, 3, 3, 2, 2)],
193 )
195 )
194
196
195 self.assertEqual(
197 self.assertEqual(
196 list(m3.merge_regions()),
198 list(m3.merge_regions()),
197 [(b'unchanged', 0, 1), (b'a', 1, 2), (b'unchanged', 1, 2)],
199 [(b'unchanged', 0, 1), (b'a', 1, 2), (b'unchanged', 1, 2)],
198 )
200 )
199
201
200 def test_append_a(self):
202 def test_append_a(self):
201 m3 = Merge3(
203 m3 = Merge3(
202 [b'aaa\n', b'bbb\n'],
204 [b'aaa\n', b'bbb\n'],
203 [b'aaa\n', b'bbb\n', b'222\n'],
205 [b'aaa\n', b'bbb\n', b'222\n'],
204 [b'aaa\n', b'bbb\n'],
206 [b'aaa\n', b'bbb\n'],
205 )
207 )
206
208
207 self.assertEqual(
209 self.assertEqual(
208 b''.join(simplemerge.merge_lines(m3)[0]), b'aaa\nbbb\n222\n'
210 b''.join(simplemerge.render_markers(m3)[0]), b'aaa\nbbb\n222\n'
209 )
211 )
210
212
211 def test_append_b(self):
213 def test_append_b(self):
212 m3 = Merge3(
214 m3 = Merge3(
213 [b'aaa\n', b'bbb\n'],
215 [b'aaa\n', b'bbb\n'],
214 [b'aaa\n', b'bbb\n'],
216 [b'aaa\n', b'bbb\n'],
215 [b'aaa\n', b'bbb\n', b'222\n'],
217 [b'aaa\n', b'bbb\n', b'222\n'],
216 )
218 )
217
219
218 self.assertEqual(
220 self.assertEqual(
219 b''.join(simplemerge.merge_lines(m3)[0]), b'aaa\nbbb\n222\n'
221 b''.join(simplemerge.render_markers(m3)[0]), b'aaa\nbbb\n222\n'
220 )
222 )
221
223
222 def test_append_agreement(self):
224 def test_append_agreement(self):
223 m3 = Merge3(
225 m3 = Merge3(
224 [b'aaa\n', b'bbb\n'],
226 [b'aaa\n', b'bbb\n'],
225 [b'aaa\n', b'bbb\n', b'222\n'],
227 [b'aaa\n', b'bbb\n', b'222\n'],
226 [b'aaa\n', b'bbb\n', b'222\n'],
228 [b'aaa\n', b'bbb\n', b'222\n'],
227 )
229 )
228
230
229 self.assertEqual(
231 self.assertEqual(
230 b''.join(simplemerge.merge_lines(m3)[0]), b'aaa\nbbb\n222\n'
232 b''.join(simplemerge.render_markers(m3)[0]), b'aaa\nbbb\n222\n'
231 )
233 )
232
234
233 def test_append_clash(self):
235 def test_append_clash(self):
234 m3 = Merge3(
236 m3 = Merge3(
235 [b'aaa\n', b'bbb\n'],
237 [b'aaa\n', b'bbb\n'],
236 [b'aaa\n', b'bbb\n', b'222\n'],
238 [b'aaa\n', b'bbb\n', b'222\n'],
237 [b'aaa\n', b'bbb\n', b'333\n'],
239 [b'aaa\n', b'bbb\n', b'333\n'],
238 )
240 )
239
241
240 ml, conflicts = simplemerge.merge_lines(
242 ml, conflicts = simplemerge.render_markers(
241 m3,
243 m3,
242 name_a=b'a',
244 name_a=b'a',
243 name_b=b'b',
245 name_b=b'b',
244 start_marker=b'<<',
246 start_marker=b'<<',
245 mid_marker=b'--',
247 mid_marker=b'--',
246 end_marker=b'>>',
248 end_marker=b'>>',
247 )
249 )
248 self.assertEqual(
250 self.assertEqual(
249 b''.join(ml),
251 b''.join(ml),
250 b'aaa\n' b'bbb\n' b'<< a\n' b'222\n' b'--\n' b'333\n' b'>> b\n',
252 b'aaa\n' b'bbb\n' b'<< a\n' b'222\n' b'--\n' b'333\n' b'>> b\n',
251 )
253 )
252
254
253 def test_insert_agreement(self):
255 def test_insert_agreement(self):
254 m3 = Merge3(
256 m3 = Merge3(
255 [b'aaa\n', b'bbb\n'],
257 [b'aaa\n', b'bbb\n'],
256 [b'aaa\n', b'222\n', b'bbb\n'],
258 [b'aaa\n', b'222\n', b'bbb\n'],
257 [b'aaa\n', b'222\n', b'bbb\n'],
259 [b'aaa\n', b'222\n', b'bbb\n'],
258 )
260 )
259
261
260 ml, conflicts = simplemerge.merge_lines(
262 ml, conflicts = simplemerge.render_markers(
261 m3,
263 m3,
262 name_a=b'a',
264 name_a=b'a',
263 name_b=b'b',
265 name_b=b'b',
264 start_marker=b'<<',
266 start_marker=b'<<',
265 mid_marker=b'--',
267 mid_marker=b'--',
266 end_marker=b'>>',
268 end_marker=b'>>',
267 )
269 )
268 self.assertEqual(b''.join(ml), b'aaa\n222\nbbb\n')
270 self.assertEqual(b''.join(ml), b'aaa\n222\nbbb\n')
269
271
270 def test_insert_clash(self):
272 def test_insert_clash(self):
271 """Both try to insert lines in the same place."""
273 """Both try to insert lines in the same place."""
272 m3 = Merge3(
274 m3 = Merge3(
273 [b'aaa\n', b'bbb\n'],
275 [b'aaa\n', b'bbb\n'],
274 [b'aaa\n', b'111\n', b'bbb\n'],
276 [b'aaa\n', b'111\n', b'bbb\n'],
275 [b'aaa\n', b'222\n', b'bbb\n'],
277 [b'aaa\n', b'222\n', b'bbb\n'],
276 )
278 )
277
279
278 self.assertEqual(
280 self.assertEqual(
279 list(m3.find_sync_regions()),
281 list(m3.find_sync_regions()),
280 [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 2, 3), (2, 2, 3, 3, 3, 3)],
282 [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 2, 3), (2, 2, 3, 3, 3, 3)],
281 )
283 )
282
284
283 self.assertEqual(
285 self.assertEqual(
284 list(m3.merge_regions()),
286 list(m3.merge_regions()),
285 [
287 [
286 (b'unchanged', 0, 1),
288 (b'unchanged', 0, 1),
287 (b'conflict', 1, 1, 1, 2, 1, 2),
289 (b'conflict', 1, 1, 1, 2, 1, 2),
288 (b'unchanged', 1, 2),
290 (b'unchanged', 1, 2),
289 ],
291 ],
290 )
292 )
291
293
292 self.assertEqual(
294 self.assertEqual(
293 list(m3.merge_groups()),
295 list(m3.merge_groups()),
294 [
296 [
295 (b'unchanged', [b'aaa\n']),
297 (b'unchanged', [b'aaa\n']),
296 (b'conflict', ([], [b'111\n'], [b'222\n'])),
298 (b'conflict', ([], [b'111\n'], [b'222\n'])),
297 (b'unchanged', [b'bbb\n']),
299 (b'unchanged', [b'bbb\n']),
298 ],
300 ],
299 )
301 )
300
302
301 ml, conflicts = simplemerge.merge_lines(
303 ml, conflicts = simplemerge.render_markers(
302 m3,
304 m3,
303 name_a=b'a',
305 name_a=b'a',
304 name_b=b'b',
306 name_b=b'b',
305 start_marker=b'<<',
307 start_marker=b'<<',
306 mid_marker=b'--',
308 mid_marker=b'--',
307 end_marker=b'>>',
309 end_marker=b'>>',
308 )
310 )
309 self.assertEqual(
311 self.assertEqual(
310 b''.join(ml),
312 b''.join(ml),
311 b'''aaa
313 b'''aaa
312 << a
314 << a
313 111
315 111
314 --
316 --
315 222
317 222
316 >> b
318 >> b
317 bbb
319 bbb
318 ''',
320 ''',
319 )
321 )
320
322
321 def test_replace_clash(self):
323 def test_replace_clash(self):
322 """Both try to insert lines in the same place."""
324 """Both try to insert lines in the same place."""
323 m3 = Merge3(
325 m3 = Merge3(
324 [b'aaa', b'000', b'bbb'],
326 [b'aaa', b'000', b'bbb'],
325 [b'aaa', b'111', b'bbb'],
327 [b'aaa', b'111', b'bbb'],
326 [b'aaa', b'222', b'bbb'],
328 [b'aaa', b'222', b'bbb'],
327 )
329 )
328
330
329 self.assertEqual(
331 self.assertEqual(
330 list(m3.find_sync_regions()),
332 list(m3.find_sync_regions()),
331 [(0, 1, 0, 1, 0, 1), (2, 3, 2, 3, 2, 3), (3, 3, 3, 3, 3, 3)],
333 [(0, 1, 0, 1, 0, 1), (2, 3, 2, 3, 2, 3), (3, 3, 3, 3, 3, 3)],
332 )
334 )
333
335
334 def test_replace_multi(self):
336 def test_replace_multi(self):
335 """Replacement with regions of different size."""
337 """Replacement with regions of different size."""
336 m3 = Merge3(
338 m3 = Merge3(
337 [b'aaa', b'000', b'000', b'bbb'],
339 [b'aaa', b'000', b'000', b'bbb'],
338 [b'aaa', b'111', b'111', b'111', b'bbb'],
340 [b'aaa', b'111', b'111', b'111', b'bbb'],
339 [b'aaa', b'222', b'222', b'222', b'222', b'bbb'],
341 [b'aaa', b'222', b'222', b'222', b'222', b'bbb'],
340 )
342 )
341
343
342 self.assertEqual(
344 self.assertEqual(
343 list(m3.find_sync_regions()),
345 list(m3.find_sync_regions()),
344 [(0, 1, 0, 1, 0, 1), (3, 4, 4, 5, 5, 6), (4, 4, 5, 5, 6, 6)],
346 [(0, 1, 0, 1, 0, 1), (3, 4, 4, 5, 5, 6), (4, 4, 5, 5, 6, 6)],
345 )
347 )
346
348
347 def test_merge_poem(self):
349 def test_merge_poem(self):
348 """Test case from diff3 manual"""
350 """Test case from diff3 manual"""
349 m3 = Merge3(TZU, LAO, TAO)
351 m3 = Merge3(TZU, LAO, TAO)
350 ml, conflicts = simplemerge.merge_lines(m3, b'LAO', b'TAO')
352 ml, conflicts = simplemerge.render_markers(m3, b'LAO', b'TAO')
351 self.log(b'merge result:')
353 self.log(b'merge result:')
352 self.log(b''.join(ml))
354 self.log(b''.join(ml))
353 self.assertEqual(ml, MERGED_RESULT)
355 self.assertEqual(ml, MERGED_RESULT)
354
356
355 def test_binary(self):
357 def test_binary(self):
356 with self.assertRaises(error.Abort):
358 with self.assertRaises(error.Abort):
357 Merge3([b'\x00'], [b'a'], [b'b'])
359 Merge3([b'\x00'], [b'a'], [b'b'])
358
360
359 def test_dos_text(self):
361 def test_dos_text(self):
360 base_text = b'a\r\n'
362 base_text = b'a\r\n'
361 this_text = b'b\r\n'
363 this_text = b'b\r\n'
362 other_text = b'c\r\n'
364 other_text = b'c\r\n'
363 m3 = Merge3(
365 m3 = Merge3(
364 base_text.splitlines(True),
366 base_text.splitlines(True),
365 other_text.splitlines(True),
367 other_text.splitlines(True),
366 this_text.splitlines(True),
368 this_text.splitlines(True),
367 )
369 )
368 m_lines, conflicts = simplemerge.merge_lines(m3, b'OTHER', b'THIS')
370 m_lines, conflicts = simplemerge.render_markers(m3, b'OTHER', b'THIS')
369 self.assertEqual(
371 self.assertEqual(
370 b'<<<<<<< OTHER\r\nc\r\n=======\r\nb\r\n'
372 b'<<<<<<< OTHER\r\nc\r\n=======\r\nb\r\n'
371 b'>>>>>>> THIS\r\n'.splitlines(True),
373 b'>>>>>>> THIS\r\n'.splitlines(True),
372 m_lines,
374 m_lines,
373 )
375 )
374
376
375 def test_mac_text(self):
377 def test_mac_text(self):
376 base_text = b'a\r'
378 base_text = b'a\r'
377 this_text = b'b\r'
379 this_text = b'b\r'
378 other_text = b'c\r'
380 other_text = b'c\r'
379 m3 = Merge3(
381 m3 = Merge3(
380 base_text.splitlines(True),
382 base_text.splitlines(True),
381 other_text.splitlines(True),
383 other_text.splitlines(True),
382 this_text.splitlines(True),
384 this_text.splitlines(True),
383 )
385 )
384 m_lines, conflicts = simplemerge.merge_lines(m3, b'OTHER', b'THIS')
386 m_lines, conflicts = simplemerge.render_markers(m3, b'OTHER', b'THIS')
385 self.assertEqual(
387 self.assertEqual(
386 b'<<<<<<< OTHER\rc\r=======\rb\r'
388 b'<<<<<<< OTHER\rc\r=======\rb\r'
387 b'>>>>>>> THIS\r'.splitlines(True),
389 b'>>>>>>> THIS\r'.splitlines(True),
388 m_lines,
390 m_lines,
389 )
391 )
390
392
391
393
392 if __name__ == '__main__':
394 if __name__ == '__main__':
393 import silenttestrunner
395 import silenttestrunner
394
396
395 silenttestrunner.main(__name__)
397 silenttestrunner.main(__name__)
General Comments 0
You need to be logged in to leave comments. Login now