##// END OF EJS Templates
debugmergestate: show extras for files which are not in mergestate...
Pulkit Goyal -
r46016:766797f2 default
parent child Browse files
Show More
@@ -1,4529 +1,4542 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import glob
14 import glob
15 import operator
15 import operator
16 import os
16 import os
17 import platform
17 import platform
18 import random
18 import random
19 import re
19 import re
20 import socket
20 import socket
21 import ssl
21 import ssl
22 import stat
22 import stat
23 import string
23 import string
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullid,
32 nullid,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 mergestate as mergestatemod,
61 mergestate as mergestatemod,
62 obsolete,
62 obsolete,
63 obsutil,
63 obsutil,
64 pathutil,
64 pathutil,
65 phases,
65 phases,
66 policy,
66 policy,
67 pvec,
67 pvec,
68 pycompat,
68 pycompat,
69 registrar,
69 registrar,
70 repair,
70 repair,
71 revlog,
71 revlog,
72 revset,
72 revset,
73 revsetlang,
73 revsetlang,
74 scmutil,
74 scmutil,
75 setdiscovery,
75 setdiscovery,
76 simplemerge,
76 simplemerge,
77 sshpeer,
77 sshpeer,
78 sslutil,
78 sslutil,
79 streamclone,
79 streamclone,
80 tags as tagsmod,
80 tags as tagsmod,
81 templater,
81 templater,
82 treediscovery,
82 treediscovery,
83 upgrade,
83 upgrade,
84 url as urlmod,
84 url as urlmod,
85 util,
85 util,
86 vfs as vfsmod,
86 vfs as vfsmod,
87 wireprotoframing,
87 wireprotoframing,
88 wireprotoserver,
88 wireprotoserver,
89 wireprotov2peer,
89 wireprotov2peer,
90 )
90 )
91 from .utils import (
91 from .utils import (
92 cborutil,
92 cborutil,
93 compression,
93 compression,
94 dateutil,
94 dateutil,
95 procutil,
95 procutil,
96 stringutil,
96 stringutil,
97 )
97 )
98
98
99 from .revlogutils import (
99 from .revlogutils import (
100 deltas as deltautil,
100 deltas as deltautil,
101 nodemap,
101 nodemap,
102 )
102 )
103
103
104 release = lockmod.release
104 release = lockmod.release
105
105
106 command = registrar.command()
106 command = registrar.command()
107
107
108
108
109 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
109 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
110 def debugancestor(ui, repo, *args):
110 def debugancestor(ui, repo, *args):
111 """find the ancestor revision of two revisions in a given index"""
111 """find the ancestor revision of two revisions in a given index"""
112 if len(args) == 3:
112 if len(args) == 3:
113 index, rev1, rev2 = args
113 index, rev1, rev2 = args
114 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
114 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
115 lookup = r.lookup
115 lookup = r.lookup
116 elif len(args) == 2:
116 elif len(args) == 2:
117 if not repo:
117 if not repo:
118 raise error.Abort(
118 raise error.Abort(
119 _(b'there is no Mercurial repository here (.hg not found)')
119 _(b'there is no Mercurial repository here (.hg not found)')
120 )
120 )
121 rev1, rev2 = args
121 rev1, rev2 = args
122 r = repo.changelog
122 r = repo.changelog
123 lookup = repo.lookup
123 lookup = repo.lookup
124 else:
124 else:
125 raise error.Abort(_(b'either two or three arguments required'))
125 raise error.Abort(_(b'either two or three arguments required'))
126 a = r.ancestor(lookup(rev1), lookup(rev2))
126 a = r.ancestor(lookup(rev1), lookup(rev2))
127 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
127 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
128
128
129
129
130 @command(b'debugantivirusrunning', [])
130 @command(b'debugantivirusrunning', [])
131 def debugantivirusrunning(ui, repo):
131 def debugantivirusrunning(ui, repo):
132 """attempt to trigger an antivirus scanner to see if one is active"""
132 """attempt to trigger an antivirus scanner to see if one is active"""
133 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
133 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
134 f.write(
134 f.write(
135 util.b85decode(
135 util.b85decode(
136 # This is a base85-armored version of the EICAR test file. See
136 # This is a base85-armored version of the EICAR test file. See
137 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
137 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
138 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
138 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
139 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
139 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
140 )
140 )
141 )
141 )
142 # Give an AV engine time to scan the file.
142 # Give an AV engine time to scan the file.
143 time.sleep(2)
143 time.sleep(2)
144 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
144 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
145
145
146
146
147 @command(b'debugapplystreamclonebundle', [], b'FILE')
147 @command(b'debugapplystreamclonebundle', [], b'FILE')
148 def debugapplystreamclonebundle(ui, repo, fname):
148 def debugapplystreamclonebundle(ui, repo, fname):
149 """apply a stream clone bundle file"""
149 """apply a stream clone bundle file"""
150 f = hg.openpath(ui, fname)
150 f = hg.openpath(ui, fname)
151 gen = exchange.readbundle(ui, f, fname)
151 gen = exchange.readbundle(ui, f, fname)
152 gen.apply(repo)
152 gen.apply(repo)
153
153
154
154
155 @command(
155 @command(
156 b'debugbuilddag',
156 b'debugbuilddag',
157 [
157 [
158 (
158 (
159 b'm',
159 b'm',
160 b'mergeable-file',
160 b'mergeable-file',
161 None,
161 None,
162 _(b'add single file mergeable changes'),
162 _(b'add single file mergeable changes'),
163 ),
163 ),
164 (
164 (
165 b'o',
165 b'o',
166 b'overwritten-file',
166 b'overwritten-file',
167 None,
167 None,
168 _(b'add single file all revs overwrite'),
168 _(b'add single file all revs overwrite'),
169 ),
169 ),
170 (b'n', b'new-file', None, _(b'add new file at each rev')),
170 (b'n', b'new-file', None, _(b'add new file at each rev')),
171 ],
171 ],
172 _(b'[OPTION]... [TEXT]'),
172 _(b'[OPTION]... [TEXT]'),
173 )
173 )
174 def debugbuilddag(
174 def debugbuilddag(
175 ui,
175 ui,
176 repo,
176 repo,
177 text=None,
177 text=None,
178 mergeable_file=False,
178 mergeable_file=False,
179 overwritten_file=False,
179 overwritten_file=False,
180 new_file=False,
180 new_file=False,
181 ):
181 ):
182 """builds a repo with a given DAG from scratch in the current empty repo
182 """builds a repo with a given DAG from scratch in the current empty repo
183
183
184 The description of the DAG is read from stdin if not given on the
184 The description of the DAG is read from stdin if not given on the
185 command line.
185 command line.
186
186
187 Elements:
187 Elements:
188
188
189 - "+n" is a linear run of n nodes based on the current default parent
189 - "+n" is a linear run of n nodes based on the current default parent
190 - "." is a single node based on the current default parent
190 - "." is a single node based on the current default parent
191 - "$" resets the default parent to null (implied at the start);
191 - "$" resets the default parent to null (implied at the start);
192 otherwise the default parent is always the last node created
192 otherwise the default parent is always the last node created
193 - "<p" sets the default parent to the backref p
193 - "<p" sets the default parent to the backref p
194 - "*p" is a fork at parent p, which is a backref
194 - "*p" is a fork at parent p, which is a backref
195 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
195 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
196 - "/p2" is a merge of the preceding node and p2
196 - "/p2" is a merge of the preceding node and p2
197 - ":tag" defines a local tag for the preceding node
197 - ":tag" defines a local tag for the preceding node
198 - "@branch" sets the named branch for subsequent nodes
198 - "@branch" sets the named branch for subsequent nodes
199 - "#...\\n" is a comment up to the end of the line
199 - "#...\\n" is a comment up to the end of the line
200
200
201 Whitespace between the above elements is ignored.
201 Whitespace between the above elements is ignored.
202
202
203 A backref is either
203 A backref is either
204
204
205 - a number n, which references the node curr-n, where curr is the current
205 - a number n, which references the node curr-n, where curr is the current
206 node, or
206 node, or
207 - the name of a local tag you placed earlier using ":tag", or
207 - the name of a local tag you placed earlier using ":tag", or
208 - empty to denote the default parent.
208 - empty to denote the default parent.
209
209
210 All string valued-elements are either strictly alphanumeric, or must
210 All string valued-elements are either strictly alphanumeric, or must
211 be enclosed in double quotes ("..."), with "\\" as escape character.
211 be enclosed in double quotes ("..."), with "\\" as escape character.
212 """
212 """
213
213
214 if text is None:
214 if text is None:
215 ui.status(_(b"reading DAG from stdin\n"))
215 ui.status(_(b"reading DAG from stdin\n"))
216 text = ui.fin.read()
216 text = ui.fin.read()
217
217
218 cl = repo.changelog
218 cl = repo.changelog
219 if len(cl) > 0:
219 if len(cl) > 0:
220 raise error.Abort(_(b'repository is not empty'))
220 raise error.Abort(_(b'repository is not empty'))
221
221
222 # determine number of revs in DAG
222 # determine number of revs in DAG
223 total = 0
223 total = 0
224 for type, data in dagparser.parsedag(text):
224 for type, data in dagparser.parsedag(text):
225 if type == b'n':
225 if type == b'n':
226 total += 1
226 total += 1
227
227
228 if mergeable_file:
228 if mergeable_file:
229 linesperrev = 2
229 linesperrev = 2
230 # make a file with k lines per rev
230 # make a file with k lines per rev
231 initialmergedlines = [
231 initialmergedlines = [
232 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
232 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
233 ]
233 ]
234 initialmergedlines.append(b"")
234 initialmergedlines.append(b"")
235
235
236 tags = []
236 tags = []
237 progress = ui.makeprogress(
237 progress = ui.makeprogress(
238 _(b'building'), unit=_(b'revisions'), total=total
238 _(b'building'), unit=_(b'revisions'), total=total
239 )
239 )
240 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
240 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
241 at = -1
241 at = -1
242 atbranch = b'default'
242 atbranch = b'default'
243 nodeids = []
243 nodeids = []
244 id = 0
244 id = 0
245 progress.update(id)
245 progress.update(id)
246 for type, data in dagparser.parsedag(text):
246 for type, data in dagparser.parsedag(text):
247 if type == b'n':
247 if type == b'n':
248 ui.note((b'node %s\n' % pycompat.bytestr(data)))
248 ui.note((b'node %s\n' % pycompat.bytestr(data)))
249 id, ps = data
249 id, ps = data
250
250
251 files = []
251 files = []
252 filecontent = {}
252 filecontent = {}
253
253
254 p2 = None
254 p2 = None
255 if mergeable_file:
255 if mergeable_file:
256 fn = b"mf"
256 fn = b"mf"
257 p1 = repo[ps[0]]
257 p1 = repo[ps[0]]
258 if len(ps) > 1:
258 if len(ps) > 1:
259 p2 = repo[ps[1]]
259 p2 = repo[ps[1]]
260 pa = p1.ancestor(p2)
260 pa = p1.ancestor(p2)
261 base, local, other = [
261 base, local, other = [
262 x[fn].data() for x in (pa, p1, p2)
262 x[fn].data() for x in (pa, p1, p2)
263 ]
263 ]
264 m3 = simplemerge.Merge3Text(base, local, other)
264 m3 = simplemerge.Merge3Text(base, local, other)
265 ml = [l.strip() for l in m3.merge_lines()]
265 ml = [l.strip() for l in m3.merge_lines()]
266 ml.append(b"")
266 ml.append(b"")
267 elif at > 0:
267 elif at > 0:
268 ml = p1[fn].data().split(b"\n")
268 ml = p1[fn].data().split(b"\n")
269 else:
269 else:
270 ml = initialmergedlines
270 ml = initialmergedlines
271 ml[id * linesperrev] += b" r%i" % id
271 ml[id * linesperrev] += b" r%i" % id
272 mergedtext = b"\n".join(ml)
272 mergedtext = b"\n".join(ml)
273 files.append(fn)
273 files.append(fn)
274 filecontent[fn] = mergedtext
274 filecontent[fn] = mergedtext
275
275
276 if overwritten_file:
276 if overwritten_file:
277 fn = b"of"
277 fn = b"of"
278 files.append(fn)
278 files.append(fn)
279 filecontent[fn] = b"r%i\n" % id
279 filecontent[fn] = b"r%i\n" % id
280
280
281 if new_file:
281 if new_file:
282 fn = b"nf%i" % id
282 fn = b"nf%i" % id
283 files.append(fn)
283 files.append(fn)
284 filecontent[fn] = b"r%i\n" % id
284 filecontent[fn] = b"r%i\n" % id
285 if len(ps) > 1:
285 if len(ps) > 1:
286 if not p2:
286 if not p2:
287 p2 = repo[ps[1]]
287 p2 = repo[ps[1]]
288 for fn in p2:
288 for fn in p2:
289 if fn.startswith(b"nf"):
289 if fn.startswith(b"nf"):
290 files.append(fn)
290 files.append(fn)
291 filecontent[fn] = p2[fn].data()
291 filecontent[fn] = p2[fn].data()
292
292
293 def fctxfn(repo, cx, path):
293 def fctxfn(repo, cx, path):
294 if path in filecontent:
294 if path in filecontent:
295 return context.memfilectx(
295 return context.memfilectx(
296 repo, cx, path, filecontent[path]
296 repo, cx, path, filecontent[path]
297 )
297 )
298 return None
298 return None
299
299
300 if len(ps) == 0 or ps[0] < 0:
300 if len(ps) == 0 or ps[0] < 0:
301 pars = [None, None]
301 pars = [None, None]
302 elif len(ps) == 1:
302 elif len(ps) == 1:
303 pars = [nodeids[ps[0]], None]
303 pars = [nodeids[ps[0]], None]
304 else:
304 else:
305 pars = [nodeids[p] for p in ps]
305 pars = [nodeids[p] for p in ps]
306 cx = context.memctx(
306 cx = context.memctx(
307 repo,
307 repo,
308 pars,
308 pars,
309 b"r%i" % id,
309 b"r%i" % id,
310 files,
310 files,
311 fctxfn,
311 fctxfn,
312 date=(id, 0),
312 date=(id, 0),
313 user=b"debugbuilddag",
313 user=b"debugbuilddag",
314 extra={b'branch': atbranch},
314 extra={b'branch': atbranch},
315 )
315 )
316 nodeid = repo.commitctx(cx)
316 nodeid = repo.commitctx(cx)
317 nodeids.append(nodeid)
317 nodeids.append(nodeid)
318 at = id
318 at = id
319 elif type == b'l':
319 elif type == b'l':
320 id, name = data
320 id, name = data
321 ui.note((b'tag %s\n' % name))
321 ui.note((b'tag %s\n' % name))
322 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
322 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
323 elif type == b'a':
323 elif type == b'a':
324 ui.note((b'branch %s\n' % data))
324 ui.note((b'branch %s\n' % data))
325 atbranch = data
325 atbranch = data
326 progress.update(id)
326 progress.update(id)
327
327
328 if tags:
328 if tags:
329 repo.vfs.write(b"localtags", b"".join(tags))
329 repo.vfs.write(b"localtags", b"".join(tags))
330
330
331
331
332 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
332 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
333 indent_string = b' ' * indent
333 indent_string = b' ' * indent
334 if all:
334 if all:
335 ui.writenoi18n(
335 ui.writenoi18n(
336 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
336 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
337 % indent_string
337 % indent_string
338 )
338 )
339
339
340 def showchunks(named):
340 def showchunks(named):
341 ui.write(b"\n%s%s\n" % (indent_string, named))
341 ui.write(b"\n%s%s\n" % (indent_string, named))
342 for deltadata in gen.deltaiter():
342 for deltadata in gen.deltaiter():
343 node, p1, p2, cs, deltabase, delta, flags = deltadata
343 node, p1, p2, cs, deltabase, delta, flags = deltadata
344 ui.write(
344 ui.write(
345 b"%s%s %s %s %s %s %d\n"
345 b"%s%s %s %s %s %s %d\n"
346 % (
346 % (
347 indent_string,
347 indent_string,
348 hex(node),
348 hex(node),
349 hex(p1),
349 hex(p1),
350 hex(p2),
350 hex(p2),
351 hex(cs),
351 hex(cs),
352 hex(deltabase),
352 hex(deltabase),
353 len(delta),
353 len(delta),
354 )
354 )
355 )
355 )
356
356
357 gen.changelogheader()
357 gen.changelogheader()
358 showchunks(b"changelog")
358 showchunks(b"changelog")
359 gen.manifestheader()
359 gen.manifestheader()
360 showchunks(b"manifest")
360 showchunks(b"manifest")
361 for chunkdata in iter(gen.filelogheader, {}):
361 for chunkdata in iter(gen.filelogheader, {}):
362 fname = chunkdata[b'filename']
362 fname = chunkdata[b'filename']
363 showchunks(fname)
363 showchunks(fname)
364 else:
364 else:
365 if isinstance(gen, bundle2.unbundle20):
365 if isinstance(gen, bundle2.unbundle20):
366 raise error.Abort(_(b'use debugbundle2 for this file'))
366 raise error.Abort(_(b'use debugbundle2 for this file'))
367 gen.changelogheader()
367 gen.changelogheader()
368 for deltadata in gen.deltaiter():
368 for deltadata in gen.deltaiter():
369 node, p1, p2, cs, deltabase, delta, flags = deltadata
369 node, p1, p2, cs, deltabase, delta, flags = deltadata
370 ui.write(b"%s%s\n" % (indent_string, hex(node)))
370 ui.write(b"%s%s\n" % (indent_string, hex(node)))
371
371
372
372
373 def _debugobsmarkers(ui, part, indent=0, **opts):
373 def _debugobsmarkers(ui, part, indent=0, **opts):
374 """display version and markers contained in 'data'"""
374 """display version and markers contained in 'data'"""
375 opts = pycompat.byteskwargs(opts)
375 opts = pycompat.byteskwargs(opts)
376 data = part.read()
376 data = part.read()
377 indent_string = b' ' * indent
377 indent_string = b' ' * indent
378 try:
378 try:
379 version, markers = obsolete._readmarkers(data)
379 version, markers = obsolete._readmarkers(data)
380 except error.UnknownVersion as exc:
380 except error.UnknownVersion as exc:
381 msg = b"%sunsupported version: %s (%d bytes)\n"
381 msg = b"%sunsupported version: %s (%d bytes)\n"
382 msg %= indent_string, exc.version, len(data)
382 msg %= indent_string, exc.version, len(data)
383 ui.write(msg)
383 ui.write(msg)
384 else:
384 else:
385 msg = b"%sversion: %d (%d bytes)\n"
385 msg = b"%sversion: %d (%d bytes)\n"
386 msg %= indent_string, version, len(data)
386 msg %= indent_string, version, len(data)
387 ui.write(msg)
387 ui.write(msg)
388 fm = ui.formatter(b'debugobsolete', opts)
388 fm = ui.formatter(b'debugobsolete', opts)
389 for rawmarker in sorted(markers):
389 for rawmarker in sorted(markers):
390 m = obsutil.marker(None, rawmarker)
390 m = obsutil.marker(None, rawmarker)
391 fm.startitem()
391 fm.startitem()
392 fm.plain(indent_string)
392 fm.plain(indent_string)
393 cmdutil.showmarker(fm, m)
393 cmdutil.showmarker(fm, m)
394 fm.end()
394 fm.end()
395
395
396
396
397 def _debugphaseheads(ui, data, indent=0):
397 def _debugphaseheads(ui, data, indent=0):
398 """display version and markers contained in 'data'"""
398 """display version and markers contained in 'data'"""
399 indent_string = b' ' * indent
399 indent_string = b' ' * indent
400 headsbyphase = phases.binarydecode(data)
400 headsbyphase = phases.binarydecode(data)
401 for phase in phases.allphases:
401 for phase in phases.allphases:
402 for head in headsbyphase[phase]:
402 for head in headsbyphase[phase]:
403 ui.write(indent_string)
403 ui.write(indent_string)
404 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
404 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
405
405
406
406
407 def _quasirepr(thing):
407 def _quasirepr(thing):
408 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
408 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
409 return b'{%s}' % (
409 return b'{%s}' % (
410 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
410 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
411 )
411 )
412 return pycompat.bytestr(repr(thing))
412 return pycompat.bytestr(repr(thing))
413
413
414
414
415 def _debugbundle2(ui, gen, all=None, **opts):
415 def _debugbundle2(ui, gen, all=None, **opts):
416 """lists the contents of a bundle2"""
416 """lists the contents of a bundle2"""
417 if not isinstance(gen, bundle2.unbundle20):
417 if not isinstance(gen, bundle2.unbundle20):
418 raise error.Abort(_(b'not a bundle2 file'))
418 raise error.Abort(_(b'not a bundle2 file'))
419 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
419 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
420 parttypes = opts.get('part_type', [])
420 parttypes = opts.get('part_type', [])
421 for part in gen.iterparts():
421 for part in gen.iterparts():
422 if parttypes and part.type not in parttypes:
422 if parttypes and part.type not in parttypes:
423 continue
423 continue
424 msg = b'%s -- %s (mandatory: %r)\n'
424 msg = b'%s -- %s (mandatory: %r)\n'
425 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
425 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
426 if part.type == b'changegroup':
426 if part.type == b'changegroup':
427 version = part.params.get(b'version', b'01')
427 version = part.params.get(b'version', b'01')
428 cg = changegroup.getunbundler(version, part, b'UN')
428 cg = changegroup.getunbundler(version, part, b'UN')
429 if not ui.quiet:
429 if not ui.quiet:
430 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
430 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
431 if part.type == b'obsmarkers':
431 if part.type == b'obsmarkers':
432 if not ui.quiet:
432 if not ui.quiet:
433 _debugobsmarkers(ui, part, indent=4, **opts)
433 _debugobsmarkers(ui, part, indent=4, **opts)
434 if part.type == b'phase-heads':
434 if part.type == b'phase-heads':
435 if not ui.quiet:
435 if not ui.quiet:
436 _debugphaseheads(ui, part, indent=4)
436 _debugphaseheads(ui, part, indent=4)
437
437
438
438
439 @command(
439 @command(
440 b'debugbundle',
440 b'debugbundle',
441 [
441 [
442 (b'a', b'all', None, _(b'show all details')),
442 (b'a', b'all', None, _(b'show all details')),
443 (b'', b'part-type', [], _(b'show only the named part type')),
443 (b'', b'part-type', [], _(b'show only the named part type')),
444 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
444 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
445 ],
445 ],
446 _(b'FILE'),
446 _(b'FILE'),
447 norepo=True,
447 norepo=True,
448 )
448 )
449 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
449 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
450 """lists the contents of a bundle"""
450 """lists the contents of a bundle"""
451 with hg.openpath(ui, bundlepath) as f:
451 with hg.openpath(ui, bundlepath) as f:
452 if spec:
452 if spec:
453 spec = exchange.getbundlespec(ui, f)
453 spec = exchange.getbundlespec(ui, f)
454 ui.write(b'%s\n' % spec)
454 ui.write(b'%s\n' % spec)
455 return
455 return
456
456
457 gen = exchange.readbundle(ui, f, bundlepath)
457 gen = exchange.readbundle(ui, f, bundlepath)
458 if isinstance(gen, bundle2.unbundle20):
458 if isinstance(gen, bundle2.unbundle20):
459 return _debugbundle2(ui, gen, all=all, **opts)
459 return _debugbundle2(ui, gen, all=all, **opts)
460 _debugchangegroup(ui, gen, all=all, **opts)
460 _debugchangegroup(ui, gen, all=all, **opts)
461
461
462
462
463 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
463 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
464 def debugcapabilities(ui, path, **opts):
464 def debugcapabilities(ui, path, **opts):
465 """lists the capabilities of a remote peer"""
465 """lists the capabilities of a remote peer"""
466 opts = pycompat.byteskwargs(opts)
466 opts = pycompat.byteskwargs(opts)
467 peer = hg.peer(ui, opts, path)
467 peer = hg.peer(ui, opts, path)
468 caps = peer.capabilities()
468 caps = peer.capabilities()
469 ui.writenoi18n(b'Main capabilities:\n')
469 ui.writenoi18n(b'Main capabilities:\n')
470 for c in sorted(caps):
470 for c in sorted(caps):
471 ui.write(b' %s\n' % c)
471 ui.write(b' %s\n' % c)
472 b2caps = bundle2.bundle2caps(peer)
472 b2caps = bundle2.bundle2caps(peer)
473 if b2caps:
473 if b2caps:
474 ui.writenoi18n(b'Bundle2 capabilities:\n')
474 ui.writenoi18n(b'Bundle2 capabilities:\n')
475 for key, values in sorted(pycompat.iteritems(b2caps)):
475 for key, values in sorted(pycompat.iteritems(b2caps)):
476 ui.write(b' %s\n' % key)
476 ui.write(b' %s\n' % key)
477 for v in values:
477 for v in values:
478 ui.write(b' %s\n' % v)
478 ui.write(b' %s\n' % v)
479
479
480
480
481 @command(b'debugcheckstate', [], b'')
481 @command(b'debugcheckstate', [], b'')
482 def debugcheckstate(ui, repo):
482 def debugcheckstate(ui, repo):
483 """validate the correctness of the current dirstate"""
483 """validate the correctness of the current dirstate"""
484 parent1, parent2 = repo.dirstate.parents()
484 parent1, parent2 = repo.dirstate.parents()
485 m1 = repo[parent1].manifest()
485 m1 = repo[parent1].manifest()
486 m2 = repo[parent2].manifest()
486 m2 = repo[parent2].manifest()
487 errors = 0
487 errors = 0
488 for f in repo.dirstate:
488 for f in repo.dirstate:
489 state = repo.dirstate[f]
489 state = repo.dirstate[f]
490 if state in b"nr" and f not in m1:
490 if state in b"nr" and f not in m1:
491 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
491 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
492 errors += 1
492 errors += 1
493 if state in b"a" and f in m1:
493 if state in b"a" and f in m1:
494 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
494 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
495 errors += 1
495 errors += 1
496 if state in b"m" and f not in m1 and f not in m2:
496 if state in b"m" and f not in m1 and f not in m2:
497 ui.warn(
497 ui.warn(
498 _(b"%s in state %s, but not in either manifest\n") % (f, state)
498 _(b"%s in state %s, but not in either manifest\n") % (f, state)
499 )
499 )
500 errors += 1
500 errors += 1
501 for f in m1:
501 for f in m1:
502 state = repo.dirstate[f]
502 state = repo.dirstate[f]
503 if state not in b"nrm":
503 if state not in b"nrm":
504 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
504 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
505 errors += 1
505 errors += 1
506 if errors:
506 if errors:
507 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
507 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
508 raise error.Abort(errstr)
508 raise error.Abort(errstr)
509
509
510
510
511 @command(
511 @command(
512 b'debugcolor',
512 b'debugcolor',
513 [(b'', b'style', None, _(b'show all configured styles'))],
513 [(b'', b'style', None, _(b'show all configured styles'))],
514 b'hg debugcolor',
514 b'hg debugcolor',
515 )
515 )
516 def debugcolor(ui, repo, **opts):
516 def debugcolor(ui, repo, **opts):
517 """show available color, effects or style"""
517 """show available color, effects or style"""
518 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
518 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
519 if opts.get('style'):
519 if opts.get('style'):
520 return _debugdisplaystyle(ui)
520 return _debugdisplaystyle(ui)
521 else:
521 else:
522 return _debugdisplaycolor(ui)
522 return _debugdisplaycolor(ui)
523
523
524
524
525 def _debugdisplaycolor(ui):
525 def _debugdisplaycolor(ui):
526 ui = ui.copy()
526 ui = ui.copy()
527 ui._styles.clear()
527 ui._styles.clear()
528 for effect in color._activeeffects(ui).keys():
528 for effect in color._activeeffects(ui).keys():
529 ui._styles[effect] = effect
529 ui._styles[effect] = effect
530 if ui._terminfoparams:
530 if ui._terminfoparams:
531 for k, v in ui.configitems(b'color'):
531 for k, v in ui.configitems(b'color'):
532 if k.startswith(b'color.'):
532 if k.startswith(b'color.'):
533 ui._styles[k] = k[6:]
533 ui._styles[k] = k[6:]
534 elif k.startswith(b'terminfo.'):
534 elif k.startswith(b'terminfo.'):
535 ui._styles[k] = k[9:]
535 ui._styles[k] = k[9:]
536 ui.write(_(b'available colors:\n'))
536 ui.write(_(b'available colors:\n'))
537 # sort label with a '_' after the other to group '_background' entry.
537 # sort label with a '_' after the other to group '_background' entry.
538 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
538 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
539 for colorname, label in items:
539 for colorname, label in items:
540 ui.write(b'%s\n' % colorname, label=label)
540 ui.write(b'%s\n' % colorname, label=label)
541
541
542
542
543 def _debugdisplaystyle(ui):
543 def _debugdisplaystyle(ui):
544 ui.write(_(b'available style:\n'))
544 ui.write(_(b'available style:\n'))
545 if not ui._styles:
545 if not ui._styles:
546 return
546 return
547 width = max(len(s) for s in ui._styles)
547 width = max(len(s) for s in ui._styles)
548 for label, effects in sorted(ui._styles.items()):
548 for label, effects in sorted(ui._styles.items()):
549 ui.write(b'%s' % label, label=label)
549 ui.write(b'%s' % label, label=label)
550 if effects:
550 if effects:
551 # 50
551 # 50
552 ui.write(b': ')
552 ui.write(b': ')
553 ui.write(b' ' * (max(0, width - len(label))))
553 ui.write(b' ' * (max(0, width - len(label))))
554 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
554 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
555 ui.write(b'\n')
555 ui.write(b'\n')
556
556
557
557
558 @command(b'debugcreatestreamclonebundle', [], b'FILE')
558 @command(b'debugcreatestreamclonebundle', [], b'FILE')
559 def debugcreatestreamclonebundle(ui, repo, fname):
559 def debugcreatestreamclonebundle(ui, repo, fname):
560 """create a stream clone bundle file
560 """create a stream clone bundle file
561
561
562 Stream bundles are special bundles that are essentially archives of
562 Stream bundles are special bundles that are essentially archives of
563 revlog files. They are commonly used for cloning very quickly.
563 revlog files. They are commonly used for cloning very quickly.
564 """
564 """
565 # TODO we may want to turn this into an abort when this functionality
565 # TODO we may want to turn this into an abort when this functionality
566 # is moved into `hg bundle`.
566 # is moved into `hg bundle`.
567 if phases.hassecret(repo):
567 if phases.hassecret(repo):
568 ui.warn(
568 ui.warn(
569 _(
569 _(
570 b'(warning: stream clone bundle will contain secret '
570 b'(warning: stream clone bundle will contain secret '
571 b'revisions)\n'
571 b'revisions)\n'
572 )
572 )
573 )
573 )
574
574
575 requirements, gen = streamclone.generatebundlev1(repo)
575 requirements, gen = streamclone.generatebundlev1(repo)
576 changegroup.writechunks(ui, gen, fname)
576 changegroup.writechunks(ui, gen, fname)
577
577
578 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
578 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
579
579
580
580
581 @command(
581 @command(
582 b'debugdag',
582 b'debugdag',
583 [
583 [
584 (b't', b'tags', None, _(b'use tags as labels')),
584 (b't', b'tags', None, _(b'use tags as labels')),
585 (b'b', b'branches', None, _(b'annotate with branch names')),
585 (b'b', b'branches', None, _(b'annotate with branch names')),
586 (b'', b'dots', None, _(b'use dots for runs')),
586 (b'', b'dots', None, _(b'use dots for runs')),
587 (b's', b'spaces', None, _(b'separate elements by spaces')),
587 (b's', b'spaces', None, _(b'separate elements by spaces')),
588 ],
588 ],
589 _(b'[OPTION]... [FILE [REV]...]'),
589 _(b'[OPTION]... [FILE [REV]...]'),
590 optionalrepo=True,
590 optionalrepo=True,
591 )
591 )
592 def debugdag(ui, repo, file_=None, *revs, **opts):
592 def debugdag(ui, repo, file_=None, *revs, **opts):
593 """format the changelog or an index DAG as a concise textual description
593 """format the changelog or an index DAG as a concise textual description
594
594
595 If you pass a revlog index, the revlog's DAG is emitted. If you list
595 If you pass a revlog index, the revlog's DAG is emitted. If you list
596 revision numbers, they get labeled in the output as rN.
596 revision numbers, they get labeled in the output as rN.
597
597
598 Otherwise, the changelog DAG of the current repo is emitted.
598 Otherwise, the changelog DAG of the current repo is emitted.
599 """
599 """
600 spaces = opts.get('spaces')
600 spaces = opts.get('spaces')
601 dots = opts.get('dots')
601 dots = opts.get('dots')
602 if file_:
602 if file_:
603 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
603 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
604 revs = {int(r) for r in revs}
604 revs = {int(r) for r in revs}
605
605
606 def events():
606 def events():
607 for r in rlog:
607 for r in rlog:
608 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
608 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
609 if r in revs:
609 if r in revs:
610 yield b'l', (r, b"r%i" % r)
610 yield b'l', (r, b"r%i" % r)
611
611
612 elif repo:
612 elif repo:
613 cl = repo.changelog
613 cl = repo.changelog
614 tags = opts.get('tags')
614 tags = opts.get('tags')
615 branches = opts.get('branches')
615 branches = opts.get('branches')
616 if tags:
616 if tags:
617 labels = {}
617 labels = {}
618 for l, n in repo.tags().items():
618 for l, n in repo.tags().items():
619 labels.setdefault(cl.rev(n), []).append(l)
619 labels.setdefault(cl.rev(n), []).append(l)
620
620
621 def events():
621 def events():
622 b = b"default"
622 b = b"default"
623 for r in cl:
623 for r in cl:
624 if branches:
624 if branches:
625 newb = cl.read(cl.node(r))[5][b'branch']
625 newb = cl.read(cl.node(r))[5][b'branch']
626 if newb != b:
626 if newb != b:
627 yield b'a', newb
627 yield b'a', newb
628 b = newb
628 b = newb
629 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
629 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
630 if tags:
630 if tags:
631 ls = labels.get(r)
631 ls = labels.get(r)
632 if ls:
632 if ls:
633 for l in ls:
633 for l in ls:
634 yield b'l', (r, l)
634 yield b'l', (r, l)
635
635
636 else:
636 else:
637 raise error.Abort(_(b'need repo for changelog dag'))
637 raise error.Abort(_(b'need repo for changelog dag'))
638
638
639 for line in dagparser.dagtextlines(
639 for line in dagparser.dagtextlines(
640 events(),
640 events(),
641 addspaces=spaces,
641 addspaces=spaces,
642 wraplabels=True,
642 wraplabels=True,
643 wrapannotations=True,
643 wrapannotations=True,
644 wrapnonlinear=dots,
644 wrapnonlinear=dots,
645 usedots=dots,
645 usedots=dots,
646 maxlinewidth=70,
646 maxlinewidth=70,
647 ):
647 ):
648 ui.write(line)
648 ui.write(line)
649 ui.write(b"\n")
649 ui.write(b"\n")
650
650
651
651
652 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
652 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
653 def debugdata(ui, repo, file_, rev=None, **opts):
653 def debugdata(ui, repo, file_, rev=None, **opts):
654 """dump the contents of a data file revision"""
654 """dump the contents of a data file revision"""
655 opts = pycompat.byteskwargs(opts)
655 opts = pycompat.byteskwargs(opts)
656 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
656 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
657 if rev is not None:
657 if rev is not None:
658 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
658 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
659 file_, rev = None, file_
659 file_, rev = None, file_
660 elif rev is None:
660 elif rev is None:
661 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
661 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
662 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
662 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
663 try:
663 try:
664 ui.write(r.rawdata(r.lookup(rev)))
664 ui.write(r.rawdata(r.lookup(rev)))
665 except KeyError:
665 except KeyError:
666 raise error.Abort(_(b'invalid revision identifier %s') % rev)
666 raise error.Abort(_(b'invalid revision identifier %s') % rev)
667
667
668
668
669 @command(
669 @command(
670 b'debugdate',
670 b'debugdate',
671 [(b'e', b'extended', None, _(b'try extended date formats'))],
671 [(b'e', b'extended', None, _(b'try extended date formats'))],
672 _(b'[-e] DATE [RANGE]'),
672 _(b'[-e] DATE [RANGE]'),
673 norepo=True,
673 norepo=True,
674 optionalrepo=True,
674 optionalrepo=True,
675 )
675 )
676 def debugdate(ui, date, range=None, **opts):
676 def debugdate(ui, date, range=None, **opts):
677 """parse and display a date"""
677 """parse and display a date"""
678 if opts["extended"]:
678 if opts["extended"]:
679 d = dateutil.parsedate(date, dateutil.extendeddateformats)
679 d = dateutil.parsedate(date, dateutil.extendeddateformats)
680 else:
680 else:
681 d = dateutil.parsedate(date)
681 d = dateutil.parsedate(date)
682 ui.writenoi18n(b"internal: %d %d\n" % d)
682 ui.writenoi18n(b"internal: %d %d\n" % d)
683 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
683 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
684 if range:
684 if range:
685 m = dateutil.matchdate(range)
685 m = dateutil.matchdate(range)
686 ui.writenoi18n(b"match: %s\n" % m(d[0]))
686 ui.writenoi18n(b"match: %s\n" % m(d[0]))
687
687
688
688
689 @command(
689 @command(
690 b'debugdeltachain',
690 b'debugdeltachain',
691 cmdutil.debugrevlogopts + cmdutil.formatteropts,
691 cmdutil.debugrevlogopts + cmdutil.formatteropts,
692 _(b'-c|-m|FILE'),
692 _(b'-c|-m|FILE'),
693 optionalrepo=True,
693 optionalrepo=True,
694 )
694 )
695 def debugdeltachain(ui, repo, file_=None, **opts):
695 def debugdeltachain(ui, repo, file_=None, **opts):
696 """dump information about delta chains in a revlog
696 """dump information about delta chains in a revlog
697
697
698 Output can be templatized. Available template keywords are:
698 Output can be templatized. Available template keywords are:
699
699
700 :``rev``: revision number
700 :``rev``: revision number
701 :``chainid``: delta chain identifier (numbered by unique base)
701 :``chainid``: delta chain identifier (numbered by unique base)
702 :``chainlen``: delta chain length to this revision
702 :``chainlen``: delta chain length to this revision
703 :``prevrev``: previous revision in delta chain
703 :``prevrev``: previous revision in delta chain
704 :``deltatype``: role of delta / how it was computed
704 :``deltatype``: role of delta / how it was computed
705 :``compsize``: compressed size of revision
705 :``compsize``: compressed size of revision
706 :``uncompsize``: uncompressed size of revision
706 :``uncompsize``: uncompressed size of revision
707 :``chainsize``: total size of compressed revisions in chain
707 :``chainsize``: total size of compressed revisions in chain
708 :``chainratio``: total chain size divided by uncompressed revision size
708 :``chainratio``: total chain size divided by uncompressed revision size
709 (new delta chains typically start at ratio 2.00)
709 (new delta chains typically start at ratio 2.00)
710 :``lindist``: linear distance from base revision in delta chain to end
710 :``lindist``: linear distance from base revision in delta chain to end
711 of this revision
711 of this revision
712 :``extradist``: total size of revisions not part of this delta chain from
712 :``extradist``: total size of revisions not part of this delta chain from
713 base of delta chain to end of this revision; a measurement
713 base of delta chain to end of this revision; a measurement
714 of how much extra data we need to read/seek across to read
714 of how much extra data we need to read/seek across to read
715 the delta chain for this revision
715 the delta chain for this revision
716 :``extraratio``: extradist divided by chainsize; another representation of
716 :``extraratio``: extradist divided by chainsize; another representation of
717 how much unrelated data is needed to load this delta chain
717 how much unrelated data is needed to load this delta chain
718
718
719 If the repository is configured to use the sparse read, additional keywords
719 If the repository is configured to use the sparse read, additional keywords
720 are available:
720 are available:
721
721
722 :``readsize``: total size of data read from the disk for a revision
722 :``readsize``: total size of data read from the disk for a revision
723 (sum of the sizes of all the blocks)
723 (sum of the sizes of all the blocks)
724 :``largestblock``: size of the largest block of data read from the disk
724 :``largestblock``: size of the largest block of data read from the disk
725 :``readdensity``: density of useful bytes in the data read from the disk
725 :``readdensity``: density of useful bytes in the data read from the disk
726 :``srchunks``: in how many data hunks the whole revision would be read
726 :``srchunks``: in how many data hunks the whole revision would be read
727
727
728 The sparse read can be enabled with experimental.sparse-read = True
728 The sparse read can be enabled with experimental.sparse-read = True
729 """
729 """
730 opts = pycompat.byteskwargs(opts)
730 opts = pycompat.byteskwargs(opts)
731 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
731 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
732 index = r.index
732 index = r.index
733 start = r.start
733 start = r.start
734 length = r.length
734 length = r.length
735 generaldelta = r.version & revlog.FLAG_GENERALDELTA
735 generaldelta = r.version & revlog.FLAG_GENERALDELTA
736 withsparseread = getattr(r, '_withsparseread', False)
736 withsparseread = getattr(r, '_withsparseread', False)
737
737
738 def revinfo(rev):
738 def revinfo(rev):
739 e = index[rev]
739 e = index[rev]
740 compsize = e[1]
740 compsize = e[1]
741 uncompsize = e[2]
741 uncompsize = e[2]
742 chainsize = 0
742 chainsize = 0
743
743
744 if generaldelta:
744 if generaldelta:
745 if e[3] == e[5]:
745 if e[3] == e[5]:
746 deltatype = b'p1'
746 deltatype = b'p1'
747 elif e[3] == e[6]:
747 elif e[3] == e[6]:
748 deltatype = b'p2'
748 deltatype = b'p2'
749 elif e[3] == rev - 1:
749 elif e[3] == rev - 1:
750 deltatype = b'prev'
750 deltatype = b'prev'
751 elif e[3] == rev:
751 elif e[3] == rev:
752 deltatype = b'base'
752 deltatype = b'base'
753 else:
753 else:
754 deltatype = b'other'
754 deltatype = b'other'
755 else:
755 else:
756 if e[3] == rev:
756 if e[3] == rev:
757 deltatype = b'base'
757 deltatype = b'base'
758 else:
758 else:
759 deltatype = b'prev'
759 deltatype = b'prev'
760
760
761 chain = r._deltachain(rev)[0]
761 chain = r._deltachain(rev)[0]
762 for iterrev in chain:
762 for iterrev in chain:
763 e = index[iterrev]
763 e = index[iterrev]
764 chainsize += e[1]
764 chainsize += e[1]
765
765
766 return compsize, uncompsize, deltatype, chain, chainsize
766 return compsize, uncompsize, deltatype, chain, chainsize
767
767
768 fm = ui.formatter(b'debugdeltachain', opts)
768 fm = ui.formatter(b'debugdeltachain', opts)
769
769
770 fm.plain(
770 fm.plain(
771 b' rev chain# chainlen prev delta '
771 b' rev chain# chainlen prev delta '
772 b'size rawsize chainsize ratio lindist extradist '
772 b'size rawsize chainsize ratio lindist extradist '
773 b'extraratio'
773 b'extraratio'
774 )
774 )
775 if withsparseread:
775 if withsparseread:
776 fm.plain(b' readsize largestblk rddensity srchunks')
776 fm.plain(b' readsize largestblk rddensity srchunks')
777 fm.plain(b'\n')
777 fm.plain(b'\n')
778
778
779 chainbases = {}
779 chainbases = {}
780 for rev in r:
780 for rev in r:
781 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
781 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
782 chainbase = chain[0]
782 chainbase = chain[0]
783 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
783 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
784 basestart = start(chainbase)
784 basestart = start(chainbase)
785 revstart = start(rev)
785 revstart = start(rev)
786 lineardist = revstart + comp - basestart
786 lineardist = revstart + comp - basestart
787 extradist = lineardist - chainsize
787 extradist = lineardist - chainsize
788 try:
788 try:
789 prevrev = chain[-2]
789 prevrev = chain[-2]
790 except IndexError:
790 except IndexError:
791 prevrev = -1
791 prevrev = -1
792
792
793 if uncomp != 0:
793 if uncomp != 0:
794 chainratio = float(chainsize) / float(uncomp)
794 chainratio = float(chainsize) / float(uncomp)
795 else:
795 else:
796 chainratio = chainsize
796 chainratio = chainsize
797
797
798 if chainsize != 0:
798 if chainsize != 0:
799 extraratio = float(extradist) / float(chainsize)
799 extraratio = float(extradist) / float(chainsize)
800 else:
800 else:
801 extraratio = extradist
801 extraratio = extradist
802
802
803 fm.startitem()
803 fm.startitem()
804 fm.write(
804 fm.write(
805 b'rev chainid chainlen prevrev deltatype compsize '
805 b'rev chainid chainlen prevrev deltatype compsize '
806 b'uncompsize chainsize chainratio lindist extradist '
806 b'uncompsize chainsize chainratio lindist extradist '
807 b'extraratio',
807 b'extraratio',
808 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
808 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
809 rev,
809 rev,
810 chainid,
810 chainid,
811 len(chain),
811 len(chain),
812 prevrev,
812 prevrev,
813 deltatype,
813 deltatype,
814 comp,
814 comp,
815 uncomp,
815 uncomp,
816 chainsize,
816 chainsize,
817 chainratio,
817 chainratio,
818 lineardist,
818 lineardist,
819 extradist,
819 extradist,
820 extraratio,
820 extraratio,
821 rev=rev,
821 rev=rev,
822 chainid=chainid,
822 chainid=chainid,
823 chainlen=len(chain),
823 chainlen=len(chain),
824 prevrev=prevrev,
824 prevrev=prevrev,
825 deltatype=deltatype,
825 deltatype=deltatype,
826 compsize=comp,
826 compsize=comp,
827 uncompsize=uncomp,
827 uncompsize=uncomp,
828 chainsize=chainsize,
828 chainsize=chainsize,
829 chainratio=chainratio,
829 chainratio=chainratio,
830 lindist=lineardist,
830 lindist=lineardist,
831 extradist=extradist,
831 extradist=extradist,
832 extraratio=extraratio,
832 extraratio=extraratio,
833 )
833 )
834 if withsparseread:
834 if withsparseread:
835 readsize = 0
835 readsize = 0
836 largestblock = 0
836 largestblock = 0
837 srchunks = 0
837 srchunks = 0
838
838
839 for revschunk in deltautil.slicechunk(r, chain):
839 for revschunk in deltautil.slicechunk(r, chain):
840 srchunks += 1
840 srchunks += 1
841 blkend = start(revschunk[-1]) + length(revschunk[-1])
841 blkend = start(revschunk[-1]) + length(revschunk[-1])
842 blksize = blkend - start(revschunk[0])
842 blksize = blkend - start(revschunk[0])
843
843
844 readsize += blksize
844 readsize += blksize
845 if largestblock < blksize:
845 if largestblock < blksize:
846 largestblock = blksize
846 largestblock = blksize
847
847
848 if readsize:
848 if readsize:
849 readdensity = float(chainsize) / float(readsize)
849 readdensity = float(chainsize) / float(readsize)
850 else:
850 else:
851 readdensity = 1
851 readdensity = 1
852
852
853 fm.write(
853 fm.write(
854 b'readsize largestblock readdensity srchunks',
854 b'readsize largestblock readdensity srchunks',
855 b' %10d %10d %9.5f %8d',
855 b' %10d %10d %9.5f %8d',
856 readsize,
856 readsize,
857 largestblock,
857 largestblock,
858 readdensity,
858 readdensity,
859 srchunks,
859 srchunks,
860 readsize=readsize,
860 readsize=readsize,
861 largestblock=largestblock,
861 largestblock=largestblock,
862 readdensity=readdensity,
862 readdensity=readdensity,
863 srchunks=srchunks,
863 srchunks=srchunks,
864 )
864 )
865
865
866 fm.plain(b'\n')
866 fm.plain(b'\n')
867
867
868 fm.end()
868 fm.end()
869
869
870
870
871 @command(
871 @command(
872 b'debugdirstate|debugstate',
872 b'debugdirstate|debugstate',
873 [
873 [
874 (
874 (
875 b'',
875 b'',
876 b'nodates',
876 b'nodates',
877 None,
877 None,
878 _(b'do not display the saved mtime (DEPRECATED)'),
878 _(b'do not display the saved mtime (DEPRECATED)'),
879 ),
879 ),
880 (b'', b'dates', True, _(b'display the saved mtime')),
880 (b'', b'dates', True, _(b'display the saved mtime')),
881 (b'', b'datesort', None, _(b'sort by saved mtime')),
881 (b'', b'datesort', None, _(b'sort by saved mtime')),
882 ],
882 ],
883 _(b'[OPTION]...'),
883 _(b'[OPTION]...'),
884 )
884 )
885 def debugstate(ui, repo, **opts):
885 def debugstate(ui, repo, **opts):
886 """show the contents of the current dirstate"""
886 """show the contents of the current dirstate"""
887
887
888 nodates = not opts['dates']
888 nodates = not opts['dates']
889 if opts.get('nodates') is not None:
889 if opts.get('nodates') is not None:
890 nodates = True
890 nodates = True
891 datesort = opts.get('datesort')
891 datesort = opts.get('datesort')
892
892
893 if datesort:
893 if datesort:
894 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
894 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
895 else:
895 else:
896 keyfunc = None # sort by filename
896 keyfunc = None # sort by filename
897 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
897 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
898 if ent[3] == -1:
898 if ent[3] == -1:
899 timestr = b'unset '
899 timestr = b'unset '
900 elif nodates:
900 elif nodates:
901 timestr = b'set '
901 timestr = b'set '
902 else:
902 else:
903 timestr = time.strftime(
903 timestr = time.strftime(
904 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
904 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
905 )
905 )
906 timestr = encoding.strtolocal(timestr)
906 timestr = encoding.strtolocal(timestr)
907 if ent[1] & 0o20000:
907 if ent[1] & 0o20000:
908 mode = b'lnk'
908 mode = b'lnk'
909 else:
909 else:
910 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
910 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
911 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
911 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
912 for f in repo.dirstate.copies():
912 for f in repo.dirstate.copies():
913 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
913 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
914
914
915
915
916 @command(
916 @command(
917 b'debugdiscovery',
917 b'debugdiscovery',
918 [
918 [
919 (b'', b'old', None, _(b'use old-style discovery')),
919 (b'', b'old', None, _(b'use old-style discovery')),
920 (
920 (
921 b'',
921 b'',
922 b'nonheads',
922 b'nonheads',
923 None,
923 None,
924 _(b'use old-style discovery with non-heads included'),
924 _(b'use old-style discovery with non-heads included'),
925 ),
925 ),
926 (b'', b'rev', [], b'restrict discovery to this set of revs'),
926 (b'', b'rev', [], b'restrict discovery to this set of revs'),
927 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
927 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
928 ]
928 ]
929 + cmdutil.remoteopts,
929 + cmdutil.remoteopts,
930 _(b'[--rev REV] [OTHER]'),
930 _(b'[--rev REV] [OTHER]'),
931 )
931 )
932 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
932 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
933 """runs the changeset discovery protocol in isolation"""
933 """runs the changeset discovery protocol in isolation"""
934 opts = pycompat.byteskwargs(opts)
934 opts = pycompat.byteskwargs(opts)
935 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
935 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
936 remote = hg.peer(repo, opts, remoteurl)
936 remote = hg.peer(repo, opts, remoteurl)
937 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
937 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
938
938
939 # make sure tests are repeatable
939 # make sure tests are repeatable
940 random.seed(int(opts[b'seed']))
940 random.seed(int(opts[b'seed']))
941
941
942 if opts.get(b'old'):
942 if opts.get(b'old'):
943
943
944 def doit(pushedrevs, remoteheads, remote=remote):
944 def doit(pushedrevs, remoteheads, remote=remote):
945 if not util.safehasattr(remote, b'branches'):
945 if not util.safehasattr(remote, b'branches'):
946 # enable in-client legacy support
946 # enable in-client legacy support
947 remote = localrepo.locallegacypeer(remote.local())
947 remote = localrepo.locallegacypeer(remote.local())
948 common, _in, hds = treediscovery.findcommonincoming(
948 common, _in, hds = treediscovery.findcommonincoming(
949 repo, remote, force=True
949 repo, remote, force=True
950 )
950 )
951 common = set(common)
951 common = set(common)
952 if not opts.get(b'nonheads'):
952 if not opts.get(b'nonheads'):
953 ui.writenoi18n(
953 ui.writenoi18n(
954 b"unpruned common: %s\n"
954 b"unpruned common: %s\n"
955 % b" ".join(sorted(short(n) for n in common))
955 % b" ".join(sorted(short(n) for n in common))
956 )
956 )
957
957
958 clnode = repo.changelog.node
958 clnode = repo.changelog.node
959 common = repo.revs(b'heads(::%ln)', common)
959 common = repo.revs(b'heads(::%ln)', common)
960 common = {clnode(r) for r in common}
960 common = {clnode(r) for r in common}
961 return common, hds
961 return common, hds
962
962
963 else:
963 else:
964
964
965 def doit(pushedrevs, remoteheads, remote=remote):
965 def doit(pushedrevs, remoteheads, remote=remote):
966 nodes = None
966 nodes = None
967 if pushedrevs:
967 if pushedrevs:
968 revs = scmutil.revrange(repo, pushedrevs)
968 revs = scmutil.revrange(repo, pushedrevs)
969 nodes = [repo[r].node() for r in revs]
969 nodes = [repo[r].node() for r in revs]
970 common, any, hds = setdiscovery.findcommonheads(
970 common, any, hds = setdiscovery.findcommonheads(
971 ui, repo, remote, ancestorsof=nodes
971 ui, repo, remote, ancestorsof=nodes
972 )
972 )
973 return common, hds
973 return common, hds
974
974
975 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
975 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
976 localrevs = opts[b'rev']
976 localrevs = opts[b'rev']
977 with util.timedcm('debug-discovery') as t:
977 with util.timedcm('debug-discovery') as t:
978 common, hds = doit(localrevs, remoterevs)
978 common, hds = doit(localrevs, remoterevs)
979
979
980 # compute all statistics
980 # compute all statistics
981 common = set(common)
981 common = set(common)
982 rheads = set(hds)
982 rheads = set(hds)
983 lheads = set(repo.heads())
983 lheads = set(repo.heads())
984
984
985 data = {}
985 data = {}
986 data[b'elapsed'] = t.elapsed
986 data[b'elapsed'] = t.elapsed
987 data[b'nb-common'] = len(common)
987 data[b'nb-common'] = len(common)
988 data[b'nb-common-local'] = len(common & lheads)
988 data[b'nb-common-local'] = len(common & lheads)
989 data[b'nb-common-remote'] = len(common & rheads)
989 data[b'nb-common-remote'] = len(common & rheads)
990 data[b'nb-common-both'] = len(common & rheads & lheads)
990 data[b'nb-common-both'] = len(common & rheads & lheads)
991 data[b'nb-local'] = len(lheads)
991 data[b'nb-local'] = len(lheads)
992 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
992 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
993 data[b'nb-remote'] = len(rheads)
993 data[b'nb-remote'] = len(rheads)
994 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
994 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
995 data[b'nb-revs'] = len(repo.revs(b'all()'))
995 data[b'nb-revs'] = len(repo.revs(b'all()'))
996 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
996 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
997 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
997 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
998
998
999 # display discovery summary
999 # display discovery summary
1000 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1000 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1001 ui.writenoi18n(b"heads summary:\n")
1001 ui.writenoi18n(b"heads summary:\n")
1002 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
1002 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
1003 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
1003 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
1004 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
1004 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
1005 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
1005 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
1006 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
1006 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
1007 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
1007 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
1008 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
1008 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
1009 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
1009 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
1010 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
1010 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
1011 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
1011 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
1012 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1012 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1013 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1013 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1014 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1014 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1015
1015
1016 if ui.verbose:
1016 if ui.verbose:
1017 ui.writenoi18n(
1017 ui.writenoi18n(
1018 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1018 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1019 )
1019 )
1020
1020
1021
1021
1022 _chunksize = 4 << 10
1022 _chunksize = 4 << 10
1023
1023
1024
1024
1025 @command(
1025 @command(
1026 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1026 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1027 )
1027 )
1028 def debugdownload(ui, repo, url, output=None, **opts):
1028 def debugdownload(ui, repo, url, output=None, **opts):
1029 """download a resource using Mercurial logic and config
1029 """download a resource using Mercurial logic and config
1030 """
1030 """
1031 fh = urlmod.open(ui, url, output)
1031 fh = urlmod.open(ui, url, output)
1032
1032
1033 dest = ui
1033 dest = ui
1034 if output:
1034 if output:
1035 dest = open(output, b"wb", _chunksize)
1035 dest = open(output, b"wb", _chunksize)
1036 try:
1036 try:
1037 data = fh.read(_chunksize)
1037 data = fh.read(_chunksize)
1038 while data:
1038 while data:
1039 dest.write(data)
1039 dest.write(data)
1040 data = fh.read(_chunksize)
1040 data = fh.read(_chunksize)
1041 finally:
1041 finally:
1042 if output:
1042 if output:
1043 dest.close()
1043 dest.close()
1044
1044
1045
1045
1046 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1046 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1047 def debugextensions(ui, repo, **opts):
1047 def debugextensions(ui, repo, **opts):
1048 '''show information about active extensions'''
1048 '''show information about active extensions'''
1049 opts = pycompat.byteskwargs(opts)
1049 opts = pycompat.byteskwargs(opts)
1050 exts = extensions.extensions(ui)
1050 exts = extensions.extensions(ui)
1051 hgver = util.version()
1051 hgver = util.version()
1052 fm = ui.formatter(b'debugextensions', opts)
1052 fm = ui.formatter(b'debugextensions', opts)
1053 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1053 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1054 isinternal = extensions.ismoduleinternal(extmod)
1054 isinternal = extensions.ismoduleinternal(extmod)
1055 extsource = None
1055 extsource = None
1056
1056
1057 if util.safehasattr(extmod, '__file__'):
1057 if util.safehasattr(extmod, '__file__'):
1058 extsource = pycompat.fsencode(extmod.__file__)
1058 extsource = pycompat.fsencode(extmod.__file__)
1059 elif getattr(sys, 'oxidized', False):
1059 elif getattr(sys, 'oxidized', False):
1060 extsource = pycompat.sysexecutable
1060 extsource = pycompat.sysexecutable
1061 if isinternal:
1061 if isinternal:
1062 exttestedwith = [] # never expose magic string to users
1062 exttestedwith = [] # never expose magic string to users
1063 else:
1063 else:
1064 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1064 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1065 extbuglink = getattr(extmod, 'buglink', None)
1065 extbuglink = getattr(extmod, 'buglink', None)
1066
1066
1067 fm.startitem()
1067 fm.startitem()
1068
1068
1069 if ui.quiet or ui.verbose:
1069 if ui.quiet or ui.verbose:
1070 fm.write(b'name', b'%s\n', extname)
1070 fm.write(b'name', b'%s\n', extname)
1071 else:
1071 else:
1072 fm.write(b'name', b'%s', extname)
1072 fm.write(b'name', b'%s', extname)
1073 if isinternal or hgver in exttestedwith:
1073 if isinternal or hgver in exttestedwith:
1074 fm.plain(b'\n')
1074 fm.plain(b'\n')
1075 elif not exttestedwith:
1075 elif not exttestedwith:
1076 fm.plain(_(b' (untested!)\n'))
1076 fm.plain(_(b' (untested!)\n'))
1077 else:
1077 else:
1078 lasttestedversion = exttestedwith[-1]
1078 lasttestedversion = exttestedwith[-1]
1079 fm.plain(b' (%s!)\n' % lasttestedversion)
1079 fm.plain(b' (%s!)\n' % lasttestedversion)
1080
1080
1081 fm.condwrite(
1081 fm.condwrite(
1082 ui.verbose and extsource,
1082 ui.verbose and extsource,
1083 b'source',
1083 b'source',
1084 _(b' location: %s\n'),
1084 _(b' location: %s\n'),
1085 extsource or b"",
1085 extsource or b"",
1086 )
1086 )
1087
1087
1088 if ui.verbose:
1088 if ui.verbose:
1089 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1089 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1090 fm.data(bundled=isinternal)
1090 fm.data(bundled=isinternal)
1091
1091
1092 fm.condwrite(
1092 fm.condwrite(
1093 ui.verbose and exttestedwith,
1093 ui.verbose and exttestedwith,
1094 b'testedwith',
1094 b'testedwith',
1095 _(b' tested with: %s\n'),
1095 _(b' tested with: %s\n'),
1096 fm.formatlist(exttestedwith, name=b'ver'),
1096 fm.formatlist(exttestedwith, name=b'ver'),
1097 )
1097 )
1098
1098
1099 fm.condwrite(
1099 fm.condwrite(
1100 ui.verbose and extbuglink,
1100 ui.verbose and extbuglink,
1101 b'buglink',
1101 b'buglink',
1102 _(b' bug reporting: %s\n'),
1102 _(b' bug reporting: %s\n'),
1103 extbuglink or b"",
1103 extbuglink or b"",
1104 )
1104 )
1105
1105
1106 fm.end()
1106 fm.end()
1107
1107
1108
1108
1109 @command(
1109 @command(
1110 b'debugfileset',
1110 b'debugfileset',
1111 [
1111 [
1112 (
1112 (
1113 b'r',
1113 b'r',
1114 b'rev',
1114 b'rev',
1115 b'',
1115 b'',
1116 _(b'apply the filespec on this revision'),
1116 _(b'apply the filespec on this revision'),
1117 _(b'REV'),
1117 _(b'REV'),
1118 ),
1118 ),
1119 (
1119 (
1120 b'',
1120 b'',
1121 b'all-files',
1121 b'all-files',
1122 False,
1122 False,
1123 _(b'test files from all revisions and working directory'),
1123 _(b'test files from all revisions and working directory'),
1124 ),
1124 ),
1125 (
1125 (
1126 b's',
1126 b's',
1127 b'show-matcher',
1127 b'show-matcher',
1128 None,
1128 None,
1129 _(b'print internal representation of matcher'),
1129 _(b'print internal representation of matcher'),
1130 ),
1130 ),
1131 (
1131 (
1132 b'p',
1132 b'p',
1133 b'show-stage',
1133 b'show-stage',
1134 [],
1134 [],
1135 _(b'print parsed tree at the given stage'),
1135 _(b'print parsed tree at the given stage'),
1136 _(b'NAME'),
1136 _(b'NAME'),
1137 ),
1137 ),
1138 ],
1138 ],
1139 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1139 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1140 )
1140 )
1141 def debugfileset(ui, repo, expr, **opts):
1141 def debugfileset(ui, repo, expr, **opts):
1142 '''parse and apply a fileset specification'''
1142 '''parse and apply a fileset specification'''
1143 from . import fileset
1143 from . import fileset
1144
1144
1145 fileset.symbols # force import of fileset so we have predicates to optimize
1145 fileset.symbols # force import of fileset so we have predicates to optimize
1146 opts = pycompat.byteskwargs(opts)
1146 opts = pycompat.byteskwargs(opts)
1147 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1147 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1148
1148
1149 stages = [
1149 stages = [
1150 (b'parsed', pycompat.identity),
1150 (b'parsed', pycompat.identity),
1151 (b'analyzed', filesetlang.analyze),
1151 (b'analyzed', filesetlang.analyze),
1152 (b'optimized', filesetlang.optimize),
1152 (b'optimized', filesetlang.optimize),
1153 ]
1153 ]
1154 stagenames = {n for n, f in stages}
1154 stagenames = {n for n, f in stages}
1155
1155
1156 showalways = set()
1156 showalways = set()
1157 if ui.verbose and not opts[b'show_stage']:
1157 if ui.verbose and not opts[b'show_stage']:
1158 # show parsed tree by --verbose (deprecated)
1158 # show parsed tree by --verbose (deprecated)
1159 showalways.add(b'parsed')
1159 showalways.add(b'parsed')
1160 if opts[b'show_stage'] == [b'all']:
1160 if opts[b'show_stage'] == [b'all']:
1161 showalways.update(stagenames)
1161 showalways.update(stagenames)
1162 else:
1162 else:
1163 for n in opts[b'show_stage']:
1163 for n in opts[b'show_stage']:
1164 if n not in stagenames:
1164 if n not in stagenames:
1165 raise error.Abort(_(b'invalid stage name: %s') % n)
1165 raise error.Abort(_(b'invalid stage name: %s') % n)
1166 showalways.update(opts[b'show_stage'])
1166 showalways.update(opts[b'show_stage'])
1167
1167
1168 tree = filesetlang.parse(expr)
1168 tree = filesetlang.parse(expr)
1169 for n, f in stages:
1169 for n, f in stages:
1170 tree = f(tree)
1170 tree = f(tree)
1171 if n in showalways:
1171 if n in showalways:
1172 if opts[b'show_stage'] or n != b'parsed':
1172 if opts[b'show_stage'] or n != b'parsed':
1173 ui.write(b"* %s:\n" % n)
1173 ui.write(b"* %s:\n" % n)
1174 ui.write(filesetlang.prettyformat(tree), b"\n")
1174 ui.write(filesetlang.prettyformat(tree), b"\n")
1175
1175
1176 files = set()
1176 files = set()
1177 if opts[b'all_files']:
1177 if opts[b'all_files']:
1178 for r in repo:
1178 for r in repo:
1179 c = repo[r]
1179 c = repo[r]
1180 files.update(c.files())
1180 files.update(c.files())
1181 files.update(c.substate)
1181 files.update(c.substate)
1182 if opts[b'all_files'] or ctx.rev() is None:
1182 if opts[b'all_files'] or ctx.rev() is None:
1183 wctx = repo[None]
1183 wctx = repo[None]
1184 files.update(
1184 files.update(
1185 repo.dirstate.walk(
1185 repo.dirstate.walk(
1186 scmutil.matchall(repo),
1186 scmutil.matchall(repo),
1187 subrepos=list(wctx.substate),
1187 subrepos=list(wctx.substate),
1188 unknown=True,
1188 unknown=True,
1189 ignored=True,
1189 ignored=True,
1190 )
1190 )
1191 )
1191 )
1192 files.update(wctx.substate)
1192 files.update(wctx.substate)
1193 else:
1193 else:
1194 files.update(ctx.files())
1194 files.update(ctx.files())
1195 files.update(ctx.substate)
1195 files.update(ctx.substate)
1196
1196
1197 m = ctx.matchfileset(repo.getcwd(), expr)
1197 m = ctx.matchfileset(repo.getcwd(), expr)
1198 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1198 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1199 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1199 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1200 for f in sorted(files):
1200 for f in sorted(files):
1201 if not m(f):
1201 if not m(f):
1202 continue
1202 continue
1203 ui.write(b"%s\n" % f)
1203 ui.write(b"%s\n" % f)
1204
1204
1205
1205
1206 @command(b'debugformat', [] + cmdutil.formatteropts)
1206 @command(b'debugformat', [] + cmdutil.formatteropts)
1207 def debugformat(ui, repo, **opts):
1207 def debugformat(ui, repo, **opts):
1208 """display format information about the current repository
1208 """display format information about the current repository
1209
1209
1210 Use --verbose to get extra information about current config value and
1210 Use --verbose to get extra information about current config value and
1211 Mercurial default."""
1211 Mercurial default."""
1212 opts = pycompat.byteskwargs(opts)
1212 opts = pycompat.byteskwargs(opts)
1213 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1213 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1214 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1214 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1215
1215
1216 def makeformatname(name):
1216 def makeformatname(name):
1217 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1217 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1218
1218
1219 fm = ui.formatter(b'debugformat', opts)
1219 fm = ui.formatter(b'debugformat', opts)
1220 if fm.isplain():
1220 if fm.isplain():
1221
1221
1222 def formatvalue(value):
1222 def formatvalue(value):
1223 if util.safehasattr(value, b'startswith'):
1223 if util.safehasattr(value, b'startswith'):
1224 return value
1224 return value
1225 if value:
1225 if value:
1226 return b'yes'
1226 return b'yes'
1227 else:
1227 else:
1228 return b'no'
1228 return b'no'
1229
1229
1230 else:
1230 else:
1231 formatvalue = pycompat.identity
1231 formatvalue = pycompat.identity
1232
1232
1233 fm.plain(b'format-variant')
1233 fm.plain(b'format-variant')
1234 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1234 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1235 fm.plain(b' repo')
1235 fm.plain(b' repo')
1236 if ui.verbose:
1236 if ui.verbose:
1237 fm.plain(b' config default')
1237 fm.plain(b' config default')
1238 fm.plain(b'\n')
1238 fm.plain(b'\n')
1239 for fv in upgrade.allformatvariant:
1239 for fv in upgrade.allformatvariant:
1240 fm.startitem()
1240 fm.startitem()
1241 repovalue = fv.fromrepo(repo)
1241 repovalue = fv.fromrepo(repo)
1242 configvalue = fv.fromconfig(repo)
1242 configvalue = fv.fromconfig(repo)
1243
1243
1244 if repovalue != configvalue:
1244 if repovalue != configvalue:
1245 namelabel = b'formatvariant.name.mismatchconfig'
1245 namelabel = b'formatvariant.name.mismatchconfig'
1246 repolabel = b'formatvariant.repo.mismatchconfig'
1246 repolabel = b'formatvariant.repo.mismatchconfig'
1247 elif repovalue != fv.default:
1247 elif repovalue != fv.default:
1248 namelabel = b'formatvariant.name.mismatchdefault'
1248 namelabel = b'formatvariant.name.mismatchdefault'
1249 repolabel = b'formatvariant.repo.mismatchdefault'
1249 repolabel = b'formatvariant.repo.mismatchdefault'
1250 else:
1250 else:
1251 namelabel = b'formatvariant.name.uptodate'
1251 namelabel = b'formatvariant.name.uptodate'
1252 repolabel = b'formatvariant.repo.uptodate'
1252 repolabel = b'formatvariant.repo.uptodate'
1253
1253
1254 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1254 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1255 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1255 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1256 if fv.default != configvalue:
1256 if fv.default != configvalue:
1257 configlabel = b'formatvariant.config.special'
1257 configlabel = b'formatvariant.config.special'
1258 else:
1258 else:
1259 configlabel = b'formatvariant.config.default'
1259 configlabel = b'formatvariant.config.default'
1260 fm.condwrite(
1260 fm.condwrite(
1261 ui.verbose,
1261 ui.verbose,
1262 b'config',
1262 b'config',
1263 b' %6s',
1263 b' %6s',
1264 formatvalue(configvalue),
1264 formatvalue(configvalue),
1265 label=configlabel,
1265 label=configlabel,
1266 )
1266 )
1267 fm.condwrite(
1267 fm.condwrite(
1268 ui.verbose,
1268 ui.verbose,
1269 b'default',
1269 b'default',
1270 b' %7s',
1270 b' %7s',
1271 formatvalue(fv.default),
1271 formatvalue(fv.default),
1272 label=b'formatvariant.default',
1272 label=b'formatvariant.default',
1273 )
1273 )
1274 fm.plain(b'\n')
1274 fm.plain(b'\n')
1275 fm.end()
1275 fm.end()
1276
1276
1277
1277
1278 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1278 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1279 def debugfsinfo(ui, path=b"."):
1279 def debugfsinfo(ui, path=b"."):
1280 """show information detected about current filesystem"""
1280 """show information detected about current filesystem"""
1281 ui.writenoi18n(b'path: %s\n' % path)
1281 ui.writenoi18n(b'path: %s\n' % path)
1282 ui.writenoi18n(
1282 ui.writenoi18n(
1283 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1283 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1284 )
1284 )
1285 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1285 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1286 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1286 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1287 ui.writenoi18n(
1287 ui.writenoi18n(
1288 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1288 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1289 )
1289 )
1290 ui.writenoi18n(
1290 ui.writenoi18n(
1291 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1291 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1292 )
1292 )
1293 casesensitive = b'(unknown)'
1293 casesensitive = b'(unknown)'
1294 try:
1294 try:
1295 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1295 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1296 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1296 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1297 except OSError:
1297 except OSError:
1298 pass
1298 pass
1299 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1299 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1300
1300
1301
1301
1302 @command(
1302 @command(
1303 b'debuggetbundle',
1303 b'debuggetbundle',
1304 [
1304 [
1305 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1305 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1306 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1306 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1307 (
1307 (
1308 b't',
1308 b't',
1309 b'type',
1309 b'type',
1310 b'bzip2',
1310 b'bzip2',
1311 _(b'bundle compression type to use'),
1311 _(b'bundle compression type to use'),
1312 _(b'TYPE'),
1312 _(b'TYPE'),
1313 ),
1313 ),
1314 ],
1314 ],
1315 _(b'REPO FILE [-H|-C ID]...'),
1315 _(b'REPO FILE [-H|-C ID]...'),
1316 norepo=True,
1316 norepo=True,
1317 )
1317 )
1318 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1318 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1319 """retrieves a bundle from a repo
1319 """retrieves a bundle from a repo
1320
1320
1321 Every ID must be a full-length hex node id string. Saves the bundle to the
1321 Every ID must be a full-length hex node id string. Saves the bundle to the
1322 given file.
1322 given file.
1323 """
1323 """
1324 opts = pycompat.byteskwargs(opts)
1324 opts = pycompat.byteskwargs(opts)
1325 repo = hg.peer(ui, opts, repopath)
1325 repo = hg.peer(ui, opts, repopath)
1326 if not repo.capable(b'getbundle'):
1326 if not repo.capable(b'getbundle'):
1327 raise error.Abort(b"getbundle() not supported by target repository")
1327 raise error.Abort(b"getbundle() not supported by target repository")
1328 args = {}
1328 args = {}
1329 if common:
1329 if common:
1330 args['common'] = [bin(s) for s in common]
1330 args['common'] = [bin(s) for s in common]
1331 if head:
1331 if head:
1332 args['heads'] = [bin(s) for s in head]
1332 args['heads'] = [bin(s) for s in head]
1333 # TODO: get desired bundlecaps from command line.
1333 # TODO: get desired bundlecaps from command line.
1334 args['bundlecaps'] = None
1334 args['bundlecaps'] = None
1335 bundle = repo.getbundle(b'debug', **args)
1335 bundle = repo.getbundle(b'debug', **args)
1336
1336
1337 bundletype = opts.get(b'type', b'bzip2').lower()
1337 bundletype = opts.get(b'type', b'bzip2').lower()
1338 btypes = {
1338 btypes = {
1339 b'none': b'HG10UN',
1339 b'none': b'HG10UN',
1340 b'bzip2': b'HG10BZ',
1340 b'bzip2': b'HG10BZ',
1341 b'gzip': b'HG10GZ',
1341 b'gzip': b'HG10GZ',
1342 b'bundle2': b'HG20',
1342 b'bundle2': b'HG20',
1343 }
1343 }
1344 bundletype = btypes.get(bundletype)
1344 bundletype = btypes.get(bundletype)
1345 if bundletype not in bundle2.bundletypes:
1345 if bundletype not in bundle2.bundletypes:
1346 raise error.Abort(_(b'unknown bundle type specified with --type'))
1346 raise error.Abort(_(b'unknown bundle type specified with --type'))
1347 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1347 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1348
1348
1349
1349
1350 @command(b'debugignore', [], b'[FILE]')
1350 @command(b'debugignore', [], b'[FILE]')
1351 def debugignore(ui, repo, *files, **opts):
1351 def debugignore(ui, repo, *files, **opts):
1352 """display the combined ignore pattern and information about ignored files
1352 """display the combined ignore pattern and information about ignored files
1353
1353
1354 With no argument display the combined ignore pattern.
1354 With no argument display the combined ignore pattern.
1355
1355
1356 Given space separated file names, shows if the given file is ignored and
1356 Given space separated file names, shows if the given file is ignored and
1357 if so, show the ignore rule (file and line number) that matched it.
1357 if so, show the ignore rule (file and line number) that matched it.
1358 """
1358 """
1359 ignore = repo.dirstate._ignore
1359 ignore = repo.dirstate._ignore
1360 if not files:
1360 if not files:
1361 # Show all the patterns
1361 # Show all the patterns
1362 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1362 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1363 else:
1363 else:
1364 m = scmutil.match(repo[None], pats=files)
1364 m = scmutil.match(repo[None], pats=files)
1365 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1365 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1366 for f in m.files():
1366 for f in m.files():
1367 nf = util.normpath(f)
1367 nf = util.normpath(f)
1368 ignored = None
1368 ignored = None
1369 ignoredata = None
1369 ignoredata = None
1370 if nf != b'.':
1370 if nf != b'.':
1371 if ignore(nf):
1371 if ignore(nf):
1372 ignored = nf
1372 ignored = nf
1373 ignoredata = repo.dirstate._ignorefileandline(nf)
1373 ignoredata = repo.dirstate._ignorefileandline(nf)
1374 else:
1374 else:
1375 for p in pathutil.finddirs(nf):
1375 for p in pathutil.finddirs(nf):
1376 if ignore(p):
1376 if ignore(p):
1377 ignored = p
1377 ignored = p
1378 ignoredata = repo.dirstate._ignorefileandline(p)
1378 ignoredata = repo.dirstate._ignorefileandline(p)
1379 break
1379 break
1380 if ignored:
1380 if ignored:
1381 if ignored == nf:
1381 if ignored == nf:
1382 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1382 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1383 else:
1383 else:
1384 ui.write(
1384 ui.write(
1385 _(
1385 _(
1386 b"%s is ignored because of "
1386 b"%s is ignored because of "
1387 b"containing directory %s\n"
1387 b"containing directory %s\n"
1388 )
1388 )
1389 % (uipathfn(f), ignored)
1389 % (uipathfn(f), ignored)
1390 )
1390 )
1391 ignorefile, lineno, line = ignoredata
1391 ignorefile, lineno, line = ignoredata
1392 ui.write(
1392 ui.write(
1393 _(b"(ignore rule in %s, line %d: '%s')\n")
1393 _(b"(ignore rule in %s, line %d: '%s')\n")
1394 % (ignorefile, lineno, line)
1394 % (ignorefile, lineno, line)
1395 )
1395 )
1396 else:
1396 else:
1397 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1397 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1398
1398
1399
1399
1400 @command(
1400 @command(
1401 b'debugindex',
1401 b'debugindex',
1402 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1402 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1403 _(b'-c|-m|FILE'),
1403 _(b'-c|-m|FILE'),
1404 )
1404 )
1405 def debugindex(ui, repo, file_=None, **opts):
1405 def debugindex(ui, repo, file_=None, **opts):
1406 """dump index data for a storage primitive"""
1406 """dump index data for a storage primitive"""
1407 opts = pycompat.byteskwargs(opts)
1407 opts = pycompat.byteskwargs(opts)
1408 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1408 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1409
1409
1410 if ui.debugflag:
1410 if ui.debugflag:
1411 shortfn = hex
1411 shortfn = hex
1412 else:
1412 else:
1413 shortfn = short
1413 shortfn = short
1414
1414
1415 idlen = 12
1415 idlen = 12
1416 for i in store:
1416 for i in store:
1417 idlen = len(shortfn(store.node(i)))
1417 idlen = len(shortfn(store.node(i)))
1418 break
1418 break
1419
1419
1420 fm = ui.formatter(b'debugindex', opts)
1420 fm = ui.formatter(b'debugindex', opts)
1421 fm.plain(
1421 fm.plain(
1422 b' rev linkrev %s %s p2\n'
1422 b' rev linkrev %s %s p2\n'
1423 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1423 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1424 )
1424 )
1425
1425
1426 for rev in store:
1426 for rev in store:
1427 node = store.node(rev)
1427 node = store.node(rev)
1428 parents = store.parents(node)
1428 parents = store.parents(node)
1429
1429
1430 fm.startitem()
1430 fm.startitem()
1431 fm.write(b'rev', b'%6d ', rev)
1431 fm.write(b'rev', b'%6d ', rev)
1432 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1432 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1433 fm.write(b'node', b'%s ', shortfn(node))
1433 fm.write(b'node', b'%s ', shortfn(node))
1434 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1434 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1435 fm.write(b'p2', b'%s', shortfn(parents[1]))
1435 fm.write(b'p2', b'%s', shortfn(parents[1]))
1436 fm.plain(b'\n')
1436 fm.plain(b'\n')
1437
1437
1438 fm.end()
1438 fm.end()
1439
1439
1440
1440
1441 @command(
1441 @command(
1442 b'debugindexdot',
1442 b'debugindexdot',
1443 cmdutil.debugrevlogopts,
1443 cmdutil.debugrevlogopts,
1444 _(b'-c|-m|FILE'),
1444 _(b'-c|-m|FILE'),
1445 optionalrepo=True,
1445 optionalrepo=True,
1446 )
1446 )
1447 def debugindexdot(ui, repo, file_=None, **opts):
1447 def debugindexdot(ui, repo, file_=None, **opts):
1448 """dump an index DAG as a graphviz dot file"""
1448 """dump an index DAG as a graphviz dot file"""
1449 opts = pycompat.byteskwargs(opts)
1449 opts = pycompat.byteskwargs(opts)
1450 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1450 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1451 ui.writenoi18n(b"digraph G {\n")
1451 ui.writenoi18n(b"digraph G {\n")
1452 for i in r:
1452 for i in r:
1453 node = r.node(i)
1453 node = r.node(i)
1454 pp = r.parents(node)
1454 pp = r.parents(node)
1455 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1455 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1456 if pp[1] != nullid:
1456 if pp[1] != nullid:
1457 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1457 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1458 ui.write(b"}\n")
1458 ui.write(b"}\n")
1459
1459
1460
1460
1461 @command(b'debugindexstats', [])
1461 @command(b'debugindexstats', [])
1462 def debugindexstats(ui, repo):
1462 def debugindexstats(ui, repo):
1463 """show stats related to the changelog index"""
1463 """show stats related to the changelog index"""
1464 repo.changelog.shortest(nullid, 1)
1464 repo.changelog.shortest(nullid, 1)
1465 index = repo.changelog.index
1465 index = repo.changelog.index
1466 if not util.safehasattr(index, b'stats'):
1466 if not util.safehasattr(index, b'stats'):
1467 raise error.Abort(_(b'debugindexstats only works with native code'))
1467 raise error.Abort(_(b'debugindexstats only works with native code'))
1468 for k, v in sorted(index.stats().items()):
1468 for k, v in sorted(index.stats().items()):
1469 ui.write(b'%s: %d\n' % (k, v))
1469 ui.write(b'%s: %d\n' % (k, v))
1470
1470
1471
1471
1472 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1472 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1473 def debuginstall(ui, **opts):
1473 def debuginstall(ui, **opts):
1474 '''test Mercurial installation
1474 '''test Mercurial installation
1475
1475
1476 Returns 0 on success.
1476 Returns 0 on success.
1477 '''
1477 '''
1478 opts = pycompat.byteskwargs(opts)
1478 opts = pycompat.byteskwargs(opts)
1479
1479
1480 problems = 0
1480 problems = 0
1481
1481
1482 fm = ui.formatter(b'debuginstall', opts)
1482 fm = ui.formatter(b'debuginstall', opts)
1483 fm.startitem()
1483 fm.startitem()
1484
1484
1485 # encoding might be unknown or wrong. don't translate these messages.
1485 # encoding might be unknown or wrong. don't translate these messages.
1486 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1486 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1487 err = None
1487 err = None
1488 try:
1488 try:
1489 codecs.lookup(pycompat.sysstr(encoding.encoding))
1489 codecs.lookup(pycompat.sysstr(encoding.encoding))
1490 except LookupError as inst:
1490 except LookupError as inst:
1491 err = stringutil.forcebytestr(inst)
1491 err = stringutil.forcebytestr(inst)
1492 problems += 1
1492 problems += 1
1493 fm.condwrite(
1493 fm.condwrite(
1494 err,
1494 err,
1495 b'encodingerror',
1495 b'encodingerror',
1496 b" %s\n (check that your locale is properly set)\n",
1496 b" %s\n (check that your locale is properly set)\n",
1497 err,
1497 err,
1498 )
1498 )
1499
1499
1500 # Python
1500 # Python
1501 pythonlib = None
1501 pythonlib = None
1502 if util.safehasattr(os, '__file__'):
1502 if util.safehasattr(os, '__file__'):
1503 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1503 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1504 elif getattr(sys, 'oxidized', False):
1504 elif getattr(sys, 'oxidized', False):
1505 pythonlib = pycompat.sysexecutable
1505 pythonlib = pycompat.sysexecutable
1506
1506
1507 fm.write(
1507 fm.write(
1508 b'pythonexe',
1508 b'pythonexe',
1509 _(b"checking Python executable (%s)\n"),
1509 _(b"checking Python executable (%s)\n"),
1510 pycompat.sysexecutable or _(b"unknown"),
1510 pycompat.sysexecutable or _(b"unknown"),
1511 )
1511 )
1512 fm.write(
1512 fm.write(
1513 b'pythonimplementation',
1513 b'pythonimplementation',
1514 _(b"checking Python implementation (%s)\n"),
1514 _(b"checking Python implementation (%s)\n"),
1515 pycompat.sysbytes(platform.python_implementation()),
1515 pycompat.sysbytes(platform.python_implementation()),
1516 )
1516 )
1517 fm.write(
1517 fm.write(
1518 b'pythonver',
1518 b'pythonver',
1519 _(b"checking Python version (%s)\n"),
1519 _(b"checking Python version (%s)\n"),
1520 (b"%d.%d.%d" % sys.version_info[:3]),
1520 (b"%d.%d.%d" % sys.version_info[:3]),
1521 )
1521 )
1522 fm.write(
1522 fm.write(
1523 b'pythonlib',
1523 b'pythonlib',
1524 _(b"checking Python lib (%s)...\n"),
1524 _(b"checking Python lib (%s)...\n"),
1525 pythonlib or _(b"unknown"),
1525 pythonlib or _(b"unknown"),
1526 )
1526 )
1527
1527
1528 try:
1528 try:
1529 from . import rustext
1529 from . import rustext
1530
1530
1531 rustext.__doc__ # trigger lazy import
1531 rustext.__doc__ # trigger lazy import
1532 except ImportError:
1532 except ImportError:
1533 rustext = None
1533 rustext = None
1534
1534
1535 security = set(sslutil.supportedprotocols)
1535 security = set(sslutil.supportedprotocols)
1536 if sslutil.hassni:
1536 if sslutil.hassni:
1537 security.add(b'sni')
1537 security.add(b'sni')
1538
1538
1539 fm.write(
1539 fm.write(
1540 b'pythonsecurity',
1540 b'pythonsecurity',
1541 _(b"checking Python security support (%s)\n"),
1541 _(b"checking Python security support (%s)\n"),
1542 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1542 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1543 )
1543 )
1544
1544
1545 # These are warnings, not errors. So don't increment problem count. This
1545 # These are warnings, not errors. So don't increment problem count. This
1546 # may change in the future.
1546 # may change in the future.
1547 if b'tls1.2' not in security:
1547 if b'tls1.2' not in security:
1548 fm.plain(
1548 fm.plain(
1549 _(
1549 _(
1550 b' TLS 1.2 not supported by Python install; '
1550 b' TLS 1.2 not supported by Python install; '
1551 b'network connections lack modern security\n'
1551 b'network connections lack modern security\n'
1552 )
1552 )
1553 )
1553 )
1554 if b'sni' not in security:
1554 if b'sni' not in security:
1555 fm.plain(
1555 fm.plain(
1556 _(
1556 _(
1557 b' SNI not supported by Python install; may have '
1557 b' SNI not supported by Python install; may have '
1558 b'connectivity issues with some servers\n'
1558 b'connectivity issues with some servers\n'
1559 )
1559 )
1560 )
1560 )
1561
1561
1562 fm.plain(
1562 fm.plain(
1563 _(
1563 _(
1564 b"checking Rust extensions (%s)\n"
1564 b"checking Rust extensions (%s)\n"
1565 % (b'missing' if rustext is None else b'installed')
1565 % (b'missing' if rustext is None else b'installed')
1566 ),
1566 ),
1567 )
1567 )
1568
1568
1569 # TODO print CA cert info
1569 # TODO print CA cert info
1570
1570
1571 # hg version
1571 # hg version
1572 hgver = util.version()
1572 hgver = util.version()
1573 fm.write(
1573 fm.write(
1574 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1574 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1575 )
1575 )
1576 fm.write(
1576 fm.write(
1577 b'hgverextra',
1577 b'hgverextra',
1578 _(b"checking Mercurial custom build (%s)\n"),
1578 _(b"checking Mercurial custom build (%s)\n"),
1579 b'+'.join(hgver.split(b'+')[1:]),
1579 b'+'.join(hgver.split(b'+')[1:]),
1580 )
1580 )
1581
1581
1582 # compiled modules
1582 # compiled modules
1583 hgmodules = None
1583 hgmodules = None
1584 if util.safehasattr(sys.modules[__name__], '__file__'):
1584 if util.safehasattr(sys.modules[__name__], '__file__'):
1585 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1585 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1586 elif getattr(sys, 'oxidized', False):
1586 elif getattr(sys, 'oxidized', False):
1587 hgmodules = pycompat.sysexecutable
1587 hgmodules = pycompat.sysexecutable
1588
1588
1589 fm.write(
1589 fm.write(
1590 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1590 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1591 )
1591 )
1592 fm.write(
1592 fm.write(
1593 b'hgmodules',
1593 b'hgmodules',
1594 _(b"checking installed modules (%s)...\n"),
1594 _(b"checking installed modules (%s)...\n"),
1595 hgmodules or _(b"unknown"),
1595 hgmodules or _(b"unknown"),
1596 )
1596 )
1597
1597
1598 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1598 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1599 rustext = rustandc # for now, that's the only case
1599 rustext = rustandc # for now, that's the only case
1600 cext = policy.policy in (b'c', b'allow') or rustandc
1600 cext = policy.policy in (b'c', b'allow') or rustandc
1601 nopure = cext or rustext
1601 nopure = cext or rustext
1602 if nopure:
1602 if nopure:
1603 err = None
1603 err = None
1604 try:
1604 try:
1605 if cext:
1605 if cext:
1606 from .cext import ( # pytype: disable=import-error
1606 from .cext import ( # pytype: disable=import-error
1607 base85,
1607 base85,
1608 bdiff,
1608 bdiff,
1609 mpatch,
1609 mpatch,
1610 osutil,
1610 osutil,
1611 )
1611 )
1612
1612
1613 # quiet pyflakes
1613 # quiet pyflakes
1614 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1614 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1615 if rustext:
1615 if rustext:
1616 from .rustext import ( # pytype: disable=import-error
1616 from .rustext import ( # pytype: disable=import-error
1617 ancestor,
1617 ancestor,
1618 dirstate,
1618 dirstate,
1619 )
1619 )
1620
1620
1621 dir(ancestor), dir(dirstate) # quiet pyflakes
1621 dir(ancestor), dir(dirstate) # quiet pyflakes
1622 except Exception as inst:
1622 except Exception as inst:
1623 err = stringutil.forcebytestr(inst)
1623 err = stringutil.forcebytestr(inst)
1624 problems += 1
1624 problems += 1
1625 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1625 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1626
1626
1627 compengines = util.compengines._engines.values()
1627 compengines = util.compengines._engines.values()
1628 fm.write(
1628 fm.write(
1629 b'compengines',
1629 b'compengines',
1630 _(b'checking registered compression engines (%s)\n'),
1630 _(b'checking registered compression engines (%s)\n'),
1631 fm.formatlist(
1631 fm.formatlist(
1632 sorted(e.name() for e in compengines),
1632 sorted(e.name() for e in compengines),
1633 name=b'compengine',
1633 name=b'compengine',
1634 fmt=b'%s',
1634 fmt=b'%s',
1635 sep=b', ',
1635 sep=b', ',
1636 ),
1636 ),
1637 )
1637 )
1638 fm.write(
1638 fm.write(
1639 b'compenginesavail',
1639 b'compenginesavail',
1640 _(b'checking available compression engines (%s)\n'),
1640 _(b'checking available compression engines (%s)\n'),
1641 fm.formatlist(
1641 fm.formatlist(
1642 sorted(e.name() for e in compengines if e.available()),
1642 sorted(e.name() for e in compengines if e.available()),
1643 name=b'compengine',
1643 name=b'compengine',
1644 fmt=b'%s',
1644 fmt=b'%s',
1645 sep=b', ',
1645 sep=b', ',
1646 ),
1646 ),
1647 )
1647 )
1648 wirecompengines = compression.compengines.supportedwireengines(
1648 wirecompengines = compression.compengines.supportedwireengines(
1649 compression.SERVERROLE
1649 compression.SERVERROLE
1650 )
1650 )
1651 fm.write(
1651 fm.write(
1652 b'compenginesserver',
1652 b'compenginesserver',
1653 _(
1653 _(
1654 b'checking available compression engines '
1654 b'checking available compression engines '
1655 b'for wire protocol (%s)\n'
1655 b'for wire protocol (%s)\n'
1656 ),
1656 ),
1657 fm.formatlist(
1657 fm.formatlist(
1658 [e.name() for e in wirecompengines if e.wireprotosupport()],
1658 [e.name() for e in wirecompengines if e.wireprotosupport()],
1659 name=b'compengine',
1659 name=b'compengine',
1660 fmt=b'%s',
1660 fmt=b'%s',
1661 sep=b', ',
1661 sep=b', ',
1662 ),
1662 ),
1663 )
1663 )
1664 re2 = b'missing'
1664 re2 = b'missing'
1665 if util._re2:
1665 if util._re2:
1666 re2 = b'available'
1666 re2 = b'available'
1667 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1667 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1668 fm.data(re2=bool(util._re2))
1668 fm.data(re2=bool(util._re2))
1669
1669
1670 # templates
1670 # templates
1671 p = templater.templatedir()
1671 p = templater.templatedir()
1672 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1672 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1673 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1673 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1674 if p:
1674 if p:
1675 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1675 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1676 if m:
1676 if m:
1677 # template found, check if it is working
1677 # template found, check if it is working
1678 err = None
1678 err = None
1679 try:
1679 try:
1680 templater.templater.frommapfile(m)
1680 templater.templater.frommapfile(m)
1681 except Exception as inst:
1681 except Exception as inst:
1682 err = stringutil.forcebytestr(inst)
1682 err = stringutil.forcebytestr(inst)
1683 p = None
1683 p = None
1684 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1684 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1685 else:
1685 else:
1686 p = None
1686 p = None
1687 fm.condwrite(
1687 fm.condwrite(
1688 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1688 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1689 )
1689 )
1690 fm.condwrite(
1690 fm.condwrite(
1691 not m,
1691 not m,
1692 b'defaulttemplatenotfound',
1692 b'defaulttemplatenotfound',
1693 _(b" template '%s' not found\n"),
1693 _(b" template '%s' not found\n"),
1694 b"default",
1694 b"default",
1695 )
1695 )
1696 if not p:
1696 if not p:
1697 problems += 1
1697 problems += 1
1698 fm.condwrite(
1698 fm.condwrite(
1699 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1699 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1700 )
1700 )
1701
1701
1702 # editor
1702 # editor
1703 editor = ui.geteditor()
1703 editor = ui.geteditor()
1704 editor = util.expandpath(editor)
1704 editor = util.expandpath(editor)
1705 editorbin = procutil.shellsplit(editor)[0]
1705 editorbin = procutil.shellsplit(editor)[0]
1706 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1706 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1707 cmdpath = procutil.findexe(editorbin)
1707 cmdpath = procutil.findexe(editorbin)
1708 fm.condwrite(
1708 fm.condwrite(
1709 not cmdpath and editor == b'vi',
1709 not cmdpath and editor == b'vi',
1710 b'vinotfound',
1710 b'vinotfound',
1711 _(
1711 _(
1712 b" No commit editor set and can't find %s in PATH\n"
1712 b" No commit editor set and can't find %s in PATH\n"
1713 b" (specify a commit editor in your configuration"
1713 b" (specify a commit editor in your configuration"
1714 b" file)\n"
1714 b" file)\n"
1715 ),
1715 ),
1716 not cmdpath and editor == b'vi' and editorbin,
1716 not cmdpath and editor == b'vi' and editorbin,
1717 )
1717 )
1718 fm.condwrite(
1718 fm.condwrite(
1719 not cmdpath and editor != b'vi',
1719 not cmdpath and editor != b'vi',
1720 b'editornotfound',
1720 b'editornotfound',
1721 _(
1721 _(
1722 b" Can't find editor '%s' in PATH\n"
1722 b" Can't find editor '%s' in PATH\n"
1723 b" (specify a commit editor in your configuration"
1723 b" (specify a commit editor in your configuration"
1724 b" file)\n"
1724 b" file)\n"
1725 ),
1725 ),
1726 not cmdpath and editorbin,
1726 not cmdpath and editorbin,
1727 )
1727 )
1728 if not cmdpath and editor != b'vi':
1728 if not cmdpath and editor != b'vi':
1729 problems += 1
1729 problems += 1
1730
1730
1731 # check username
1731 # check username
1732 username = None
1732 username = None
1733 err = None
1733 err = None
1734 try:
1734 try:
1735 username = ui.username()
1735 username = ui.username()
1736 except error.Abort as e:
1736 except error.Abort as e:
1737 err = stringutil.forcebytestr(e)
1737 err = stringutil.forcebytestr(e)
1738 problems += 1
1738 problems += 1
1739
1739
1740 fm.condwrite(
1740 fm.condwrite(
1741 username, b'username', _(b"checking username (%s)\n"), username
1741 username, b'username', _(b"checking username (%s)\n"), username
1742 )
1742 )
1743 fm.condwrite(
1743 fm.condwrite(
1744 err,
1744 err,
1745 b'usernameerror',
1745 b'usernameerror',
1746 _(
1746 _(
1747 b"checking username...\n %s\n"
1747 b"checking username...\n %s\n"
1748 b" (specify a username in your configuration file)\n"
1748 b" (specify a username in your configuration file)\n"
1749 ),
1749 ),
1750 err,
1750 err,
1751 )
1751 )
1752
1752
1753 for name, mod in extensions.extensions():
1753 for name, mod in extensions.extensions():
1754 handler = getattr(mod, 'debuginstall', None)
1754 handler = getattr(mod, 'debuginstall', None)
1755 if handler is not None:
1755 if handler is not None:
1756 problems += handler(ui, fm)
1756 problems += handler(ui, fm)
1757
1757
1758 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1758 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1759 if not problems:
1759 if not problems:
1760 fm.data(problems=problems)
1760 fm.data(problems=problems)
1761 fm.condwrite(
1761 fm.condwrite(
1762 problems,
1762 problems,
1763 b'problems',
1763 b'problems',
1764 _(b"%d problems detected, please check your install!\n"),
1764 _(b"%d problems detected, please check your install!\n"),
1765 problems,
1765 problems,
1766 )
1766 )
1767 fm.end()
1767 fm.end()
1768
1768
1769 return problems
1769 return problems
1770
1770
1771
1771
1772 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1772 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1773 def debugknown(ui, repopath, *ids, **opts):
1773 def debugknown(ui, repopath, *ids, **opts):
1774 """test whether node ids are known to a repo
1774 """test whether node ids are known to a repo
1775
1775
1776 Every ID must be a full-length hex node id string. Returns a list of 0s
1776 Every ID must be a full-length hex node id string. Returns a list of 0s
1777 and 1s indicating unknown/known.
1777 and 1s indicating unknown/known.
1778 """
1778 """
1779 opts = pycompat.byteskwargs(opts)
1779 opts = pycompat.byteskwargs(opts)
1780 repo = hg.peer(ui, opts, repopath)
1780 repo = hg.peer(ui, opts, repopath)
1781 if not repo.capable(b'known'):
1781 if not repo.capable(b'known'):
1782 raise error.Abort(b"known() not supported by target repository")
1782 raise error.Abort(b"known() not supported by target repository")
1783 flags = repo.known([bin(s) for s in ids])
1783 flags = repo.known([bin(s) for s in ids])
1784 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1784 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1785
1785
1786
1786
1787 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1787 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1788 def debuglabelcomplete(ui, repo, *args):
1788 def debuglabelcomplete(ui, repo, *args):
1789 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1789 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1790 debugnamecomplete(ui, repo, *args)
1790 debugnamecomplete(ui, repo, *args)
1791
1791
1792
1792
1793 @command(
1793 @command(
1794 b'debuglocks',
1794 b'debuglocks',
1795 [
1795 [
1796 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1796 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1797 (
1797 (
1798 b'W',
1798 b'W',
1799 b'force-wlock',
1799 b'force-wlock',
1800 None,
1800 None,
1801 _(b'free the working state lock (DANGEROUS)'),
1801 _(b'free the working state lock (DANGEROUS)'),
1802 ),
1802 ),
1803 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1803 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1804 (
1804 (
1805 b'S',
1805 b'S',
1806 b'set-wlock',
1806 b'set-wlock',
1807 None,
1807 None,
1808 _(b'set the working state lock until stopped'),
1808 _(b'set the working state lock until stopped'),
1809 ),
1809 ),
1810 ],
1810 ],
1811 _(b'[OPTION]...'),
1811 _(b'[OPTION]...'),
1812 )
1812 )
1813 def debuglocks(ui, repo, **opts):
1813 def debuglocks(ui, repo, **opts):
1814 """show or modify state of locks
1814 """show or modify state of locks
1815
1815
1816 By default, this command will show which locks are held. This
1816 By default, this command will show which locks are held. This
1817 includes the user and process holding the lock, the amount of time
1817 includes the user and process holding the lock, the amount of time
1818 the lock has been held, and the machine name where the process is
1818 the lock has been held, and the machine name where the process is
1819 running if it's not local.
1819 running if it's not local.
1820
1820
1821 Locks protect the integrity of Mercurial's data, so should be
1821 Locks protect the integrity of Mercurial's data, so should be
1822 treated with care. System crashes or other interruptions may cause
1822 treated with care. System crashes or other interruptions may cause
1823 locks to not be properly released, though Mercurial will usually
1823 locks to not be properly released, though Mercurial will usually
1824 detect and remove such stale locks automatically.
1824 detect and remove such stale locks automatically.
1825
1825
1826 However, detecting stale locks may not always be possible (for
1826 However, detecting stale locks may not always be possible (for
1827 instance, on a shared filesystem). Removing locks may also be
1827 instance, on a shared filesystem). Removing locks may also be
1828 blocked by filesystem permissions.
1828 blocked by filesystem permissions.
1829
1829
1830 Setting a lock will prevent other commands from changing the data.
1830 Setting a lock will prevent other commands from changing the data.
1831 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1831 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1832 The set locks are removed when the command exits.
1832 The set locks are removed when the command exits.
1833
1833
1834 Returns 0 if no locks are held.
1834 Returns 0 if no locks are held.
1835
1835
1836 """
1836 """
1837
1837
1838 if opts.get('force_lock'):
1838 if opts.get('force_lock'):
1839 repo.svfs.unlink(b'lock')
1839 repo.svfs.unlink(b'lock')
1840 if opts.get('force_wlock'):
1840 if opts.get('force_wlock'):
1841 repo.vfs.unlink(b'wlock')
1841 repo.vfs.unlink(b'wlock')
1842 if opts.get('force_lock') or opts.get('force_wlock'):
1842 if opts.get('force_lock') or opts.get('force_wlock'):
1843 return 0
1843 return 0
1844
1844
1845 locks = []
1845 locks = []
1846 try:
1846 try:
1847 if opts.get('set_wlock'):
1847 if opts.get('set_wlock'):
1848 try:
1848 try:
1849 locks.append(repo.wlock(False))
1849 locks.append(repo.wlock(False))
1850 except error.LockHeld:
1850 except error.LockHeld:
1851 raise error.Abort(_(b'wlock is already held'))
1851 raise error.Abort(_(b'wlock is already held'))
1852 if opts.get('set_lock'):
1852 if opts.get('set_lock'):
1853 try:
1853 try:
1854 locks.append(repo.lock(False))
1854 locks.append(repo.lock(False))
1855 except error.LockHeld:
1855 except error.LockHeld:
1856 raise error.Abort(_(b'lock is already held'))
1856 raise error.Abort(_(b'lock is already held'))
1857 if len(locks):
1857 if len(locks):
1858 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1858 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1859 return 0
1859 return 0
1860 finally:
1860 finally:
1861 release(*locks)
1861 release(*locks)
1862
1862
1863 now = time.time()
1863 now = time.time()
1864 held = 0
1864 held = 0
1865
1865
1866 def report(vfs, name, method):
1866 def report(vfs, name, method):
1867 # this causes stale locks to get reaped for more accurate reporting
1867 # this causes stale locks to get reaped for more accurate reporting
1868 try:
1868 try:
1869 l = method(False)
1869 l = method(False)
1870 except error.LockHeld:
1870 except error.LockHeld:
1871 l = None
1871 l = None
1872
1872
1873 if l:
1873 if l:
1874 l.release()
1874 l.release()
1875 else:
1875 else:
1876 try:
1876 try:
1877 st = vfs.lstat(name)
1877 st = vfs.lstat(name)
1878 age = now - st[stat.ST_MTIME]
1878 age = now - st[stat.ST_MTIME]
1879 user = util.username(st.st_uid)
1879 user = util.username(st.st_uid)
1880 locker = vfs.readlock(name)
1880 locker = vfs.readlock(name)
1881 if b":" in locker:
1881 if b":" in locker:
1882 host, pid = locker.split(b':')
1882 host, pid = locker.split(b':')
1883 if host == socket.gethostname():
1883 if host == socket.gethostname():
1884 locker = b'user %s, process %s' % (user or b'None', pid)
1884 locker = b'user %s, process %s' % (user or b'None', pid)
1885 else:
1885 else:
1886 locker = b'user %s, process %s, host %s' % (
1886 locker = b'user %s, process %s, host %s' % (
1887 user or b'None',
1887 user or b'None',
1888 pid,
1888 pid,
1889 host,
1889 host,
1890 )
1890 )
1891 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1891 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1892 return 1
1892 return 1
1893 except OSError as e:
1893 except OSError as e:
1894 if e.errno != errno.ENOENT:
1894 if e.errno != errno.ENOENT:
1895 raise
1895 raise
1896
1896
1897 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1897 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1898 return 0
1898 return 0
1899
1899
1900 held += report(repo.svfs, b"lock", repo.lock)
1900 held += report(repo.svfs, b"lock", repo.lock)
1901 held += report(repo.vfs, b"wlock", repo.wlock)
1901 held += report(repo.vfs, b"wlock", repo.wlock)
1902
1902
1903 return held
1903 return held
1904
1904
1905
1905
1906 @command(
1906 @command(
1907 b'debugmanifestfulltextcache',
1907 b'debugmanifestfulltextcache',
1908 [
1908 [
1909 (b'', b'clear', False, _(b'clear the cache')),
1909 (b'', b'clear', False, _(b'clear the cache')),
1910 (
1910 (
1911 b'a',
1911 b'a',
1912 b'add',
1912 b'add',
1913 [],
1913 [],
1914 _(b'add the given manifest nodes to the cache'),
1914 _(b'add the given manifest nodes to the cache'),
1915 _(b'NODE'),
1915 _(b'NODE'),
1916 ),
1916 ),
1917 ],
1917 ],
1918 b'',
1918 b'',
1919 )
1919 )
1920 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1920 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1921 """show, clear or amend the contents of the manifest fulltext cache"""
1921 """show, clear or amend the contents of the manifest fulltext cache"""
1922
1922
1923 def getcache():
1923 def getcache():
1924 r = repo.manifestlog.getstorage(b'')
1924 r = repo.manifestlog.getstorage(b'')
1925 try:
1925 try:
1926 return r._fulltextcache
1926 return r._fulltextcache
1927 except AttributeError:
1927 except AttributeError:
1928 msg = _(
1928 msg = _(
1929 b"Current revlog implementation doesn't appear to have a "
1929 b"Current revlog implementation doesn't appear to have a "
1930 b"manifest fulltext cache\n"
1930 b"manifest fulltext cache\n"
1931 )
1931 )
1932 raise error.Abort(msg)
1932 raise error.Abort(msg)
1933
1933
1934 if opts.get('clear'):
1934 if opts.get('clear'):
1935 with repo.wlock():
1935 with repo.wlock():
1936 cache = getcache()
1936 cache = getcache()
1937 cache.clear(clear_persisted_data=True)
1937 cache.clear(clear_persisted_data=True)
1938 return
1938 return
1939
1939
1940 if add:
1940 if add:
1941 with repo.wlock():
1941 with repo.wlock():
1942 m = repo.manifestlog
1942 m = repo.manifestlog
1943 store = m.getstorage(b'')
1943 store = m.getstorage(b'')
1944 for n in add:
1944 for n in add:
1945 try:
1945 try:
1946 manifest = m[store.lookup(n)]
1946 manifest = m[store.lookup(n)]
1947 except error.LookupError as e:
1947 except error.LookupError as e:
1948 raise error.Abort(e, hint=b"Check your manifest node id")
1948 raise error.Abort(e, hint=b"Check your manifest node id")
1949 manifest.read() # stores revisision in cache too
1949 manifest.read() # stores revisision in cache too
1950 return
1950 return
1951
1951
1952 cache = getcache()
1952 cache = getcache()
1953 if not len(cache):
1953 if not len(cache):
1954 ui.write(_(b'cache empty\n'))
1954 ui.write(_(b'cache empty\n'))
1955 else:
1955 else:
1956 ui.write(
1956 ui.write(
1957 _(
1957 _(
1958 b'cache contains %d manifest entries, in order of most to '
1958 b'cache contains %d manifest entries, in order of most to '
1959 b'least recent:\n'
1959 b'least recent:\n'
1960 )
1960 )
1961 % (len(cache),)
1961 % (len(cache),)
1962 )
1962 )
1963 totalsize = 0
1963 totalsize = 0
1964 for nodeid in cache:
1964 for nodeid in cache:
1965 # Use cache.get to not update the LRU order
1965 # Use cache.get to not update the LRU order
1966 data = cache.peek(nodeid)
1966 data = cache.peek(nodeid)
1967 size = len(data)
1967 size = len(data)
1968 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1968 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1969 ui.write(
1969 ui.write(
1970 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1970 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1971 )
1971 )
1972 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1972 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1973 ui.write(
1973 ui.write(
1974 _(b'total cache data size %s, on-disk %s\n')
1974 _(b'total cache data size %s, on-disk %s\n')
1975 % (util.bytecount(totalsize), util.bytecount(ondisk))
1975 % (util.bytecount(totalsize), util.bytecount(ondisk))
1976 )
1976 )
1977
1977
1978
1978
1979 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
1979 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
1980 def debugmergestate(ui, repo, *args, **opts):
1980 def debugmergestate(ui, repo, *args, **opts):
1981 """print merge state
1981 """print merge state
1982
1982
1983 Use --verbose to print out information about whether v1 or v2 merge state
1983 Use --verbose to print out information about whether v1 or v2 merge state
1984 was chosen."""
1984 was chosen."""
1985
1985
1986 if ui.verbose:
1986 if ui.verbose:
1987 ms = mergestatemod.mergestate(repo)
1987 ms = mergestatemod.mergestate(repo)
1988
1988
1989 # sort so that reasonable information is on top
1989 # sort so that reasonable information is on top
1990 v1records = ms._readrecordsv1()
1990 v1records = ms._readrecordsv1()
1991 v2records = ms._readrecordsv2()
1991 v2records = ms._readrecordsv2()
1992
1992
1993 if not v1records and not v2records:
1993 if not v1records and not v2records:
1994 pass
1994 pass
1995 elif not v2records:
1995 elif not v2records:
1996 ui.writenoi18n(b'no version 2 merge state\n')
1996 ui.writenoi18n(b'no version 2 merge state\n')
1997 elif ms._v1v2match(v1records, v2records):
1997 elif ms._v1v2match(v1records, v2records):
1998 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
1998 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
1999 else:
1999 else:
2000 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2000 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2001
2001
2002 opts = pycompat.byteskwargs(opts)
2002 opts = pycompat.byteskwargs(opts)
2003 if not opts[b'template']:
2003 if not opts[b'template']:
2004 opts[b'template'] = (
2004 opts[b'template'] = (
2005 b'{if(commits, "", "no merge state found\n")}'
2005 b'{if(commits, "", "no merge state found\n")}'
2006 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2006 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2007 b'{files % "file: {path} (state \\"{state}\\")\n'
2007 b'{files % "file: {path} (state \\"{state}\\")\n'
2008 b'{if(local_path, "'
2008 b'{if(local_path, "'
2009 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2009 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2010 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2010 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2011 b' other path: {other_path} (node {other_node})\n'
2011 b' other path: {other_path} (node {other_node})\n'
2012 b'")}'
2012 b'")}'
2013 b'{if(rename_side, "'
2013 b'{if(rename_side, "'
2014 b' rename side: {rename_side}\n'
2014 b' rename side: {rename_side}\n'
2015 b' renamed path: {renamed_path}\n'
2015 b' renamed path: {renamed_path}\n'
2016 b'")}'
2016 b'")}'
2017 b'{extras % " extra: {key} = {value}\n"}'
2017 b'{extras % " extra: {key} = {value}\n"}'
2018 b'"}'
2018 b'"}'
2019 b'{extras % "extra: {file} ({key} = {value})\n"}'
2019 )
2020 )
2020
2021
2021 ms = mergestatemod.mergestate.read(repo)
2022 ms = mergestatemod.mergestate.read(repo)
2022
2023
2023 fm = ui.formatter(b'debugmergestate', opts)
2024 fm = ui.formatter(b'debugmergestate', opts)
2024 fm.startitem()
2025 fm.startitem()
2025
2026
2026 fm_commits = fm.nested(b'commits')
2027 fm_commits = fm.nested(b'commits')
2027 if ms.active():
2028 if ms.active():
2028 for name, node, label_index in (
2029 for name, node, label_index in (
2029 (b'local', ms.local, 0),
2030 (b'local', ms.local, 0),
2030 (b'other', ms.other, 1),
2031 (b'other', ms.other, 1),
2031 ):
2032 ):
2032 fm_commits.startitem()
2033 fm_commits.startitem()
2033 fm_commits.data(name=name)
2034 fm_commits.data(name=name)
2034 fm_commits.data(node=hex(node))
2035 fm_commits.data(node=hex(node))
2035 if ms._labels and len(ms._labels) > label_index:
2036 if ms._labels and len(ms._labels) > label_index:
2036 fm_commits.data(label=ms._labels[label_index])
2037 fm_commits.data(label=ms._labels[label_index])
2037 fm_commits.end()
2038 fm_commits.end()
2038
2039
2039 fm_files = fm.nested(b'files')
2040 fm_files = fm.nested(b'files')
2040 if ms.active():
2041 if ms.active():
2041 for f in ms:
2042 for f in ms:
2042 fm_files.startitem()
2043 fm_files.startitem()
2043 fm_files.data(path=f)
2044 fm_files.data(path=f)
2044 state = ms._state[f]
2045 state = ms._state[f]
2045 fm_files.data(state=state[0])
2046 fm_files.data(state=state[0])
2046 if state[0] in (
2047 if state[0] in (
2047 mergestatemod.MERGE_RECORD_UNRESOLVED,
2048 mergestatemod.MERGE_RECORD_UNRESOLVED,
2048 mergestatemod.MERGE_RECORD_RESOLVED,
2049 mergestatemod.MERGE_RECORD_RESOLVED,
2049 ):
2050 ):
2050 fm_files.data(local_key=state[1])
2051 fm_files.data(local_key=state[1])
2051 fm_files.data(local_path=state[2])
2052 fm_files.data(local_path=state[2])
2052 fm_files.data(ancestor_path=state[3])
2053 fm_files.data(ancestor_path=state[3])
2053 fm_files.data(ancestor_node=state[4])
2054 fm_files.data(ancestor_node=state[4])
2054 fm_files.data(other_path=state[5])
2055 fm_files.data(other_path=state[5])
2055 fm_files.data(other_node=state[6])
2056 fm_files.data(other_node=state[6])
2056 fm_files.data(local_flags=state[7])
2057 fm_files.data(local_flags=state[7])
2057 elif state[0] in (
2058 elif state[0] in (
2058 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2059 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2059 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2060 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2060 ):
2061 ):
2061 fm_files.data(renamed_path=state[1])
2062 fm_files.data(renamed_path=state[1])
2062 fm_files.data(rename_side=state[2])
2063 fm_files.data(rename_side=state[2])
2063 fm_extras = fm_files.nested(b'extras')
2064 fm_extras = fm_files.nested(b'extras')
2064 for k, v in ms.extras(f).items():
2065 for k, v in ms.extras(f).items():
2065 fm_extras.startitem()
2066 fm_extras.startitem()
2066 fm_extras.data(key=k)
2067 fm_extras.data(key=k)
2067 fm_extras.data(value=v)
2068 fm_extras.data(value=v)
2068 fm_extras.end()
2069 fm_extras.end()
2069
2070
2070 fm_files.end()
2071 fm_files.end()
2071
2072
2073 fm_extras = fm.nested(b'extras')
2074 for f, d in sorted(pycompat.iteritems(ms._stateextras)):
2075 if f in ms:
2076 # If file is in mergestate, we have already processed it's extras
2077 continue
2078 for k, v in pycompat.iteritems(d):
2079 fm_extras.startitem()
2080 fm_extras.data(file=f)
2081 fm_extras.data(key=k)
2082 fm_extras.data(value=v)
2083 fm_extras.end()
2084
2072 fm.end()
2085 fm.end()
2073
2086
2074
2087
2075 @command(b'debugnamecomplete', [], _(b'NAME...'))
2088 @command(b'debugnamecomplete', [], _(b'NAME...'))
2076 def debugnamecomplete(ui, repo, *args):
2089 def debugnamecomplete(ui, repo, *args):
2077 '''complete "names" - tags, open branch names, bookmark names'''
2090 '''complete "names" - tags, open branch names, bookmark names'''
2078
2091
2079 names = set()
2092 names = set()
2080 # since we previously only listed open branches, we will handle that
2093 # since we previously only listed open branches, we will handle that
2081 # specially (after this for loop)
2094 # specially (after this for loop)
2082 for name, ns in pycompat.iteritems(repo.names):
2095 for name, ns in pycompat.iteritems(repo.names):
2083 if name != b'branches':
2096 if name != b'branches':
2084 names.update(ns.listnames(repo))
2097 names.update(ns.listnames(repo))
2085 names.update(
2098 names.update(
2086 tag
2099 tag
2087 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2100 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2088 if not closed
2101 if not closed
2089 )
2102 )
2090 completions = set()
2103 completions = set()
2091 if not args:
2104 if not args:
2092 args = [b'']
2105 args = [b'']
2093 for a in args:
2106 for a in args:
2094 completions.update(n for n in names if n.startswith(a))
2107 completions.update(n for n in names if n.startswith(a))
2095 ui.write(b'\n'.join(sorted(completions)))
2108 ui.write(b'\n'.join(sorted(completions)))
2096 ui.write(b'\n')
2109 ui.write(b'\n')
2097
2110
2098
2111
2099 @command(
2112 @command(
2100 b'debugnodemap',
2113 b'debugnodemap',
2101 [
2114 [
2102 (
2115 (
2103 b'',
2116 b'',
2104 b'dump-new',
2117 b'dump-new',
2105 False,
2118 False,
2106 _(b'write a (new) persistent binary nodemap on stdin'),
2119 _(b'write a (new) persistent binary nodemap on stdin'),
2107 ),
2120 ),
2108 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2121 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2109 (
2122 (
2110 b'',
2123 b'',
2111 b'check',
2124 b'check',
2112 False,
2125 False,
2113 _(b'check that the data on disk data are correct.'),
2126 _(b'check that the data on disk data are correct.'),
2114 ),
2127 ),
2115 (
2128 (
2116 b'',
2129 b'',
2117 b'metadata',
2130 b'metadata',
2118 False,
2131 False,
2119 _(b'display the on disk meta data for the nodemap'),
2132 _(b'display the on disk meta data for the nodemap'),
2120 ),
2133 ),
2121 ],
2134 ],
2122 )
2135 )
2123 def debugnodemap(ui, repo, **opts):
2136 def debugnodemap(ui, repo, **opts):
2124 """write and inspect on disk nodemap
2137 """write and inspect on disk nodemap
2125 """
2138 """
2126 if opts['dump_new']:
2139 if opts['dump_new']:
2127 unfi = repo.unfiltered()
2140 unfi = repo.unfiltered()
2128 cl = unfi.changelog
2141 cl = unfi.changelog
2129 if util.safehasattr(cl.index, "nodemap_data_all"):
2142 if util.safehasattr(cl.index, "nodemap_data_all"):
2130 data = cl.index.nodemap_data_all()
2143 data = cl.index.nodemap_data_all()
2131 else:
2144 else:
2132 data = nodemap.persistent_data(cl.index)
2145 data = nodemap.persistent_data(cl.index)
2133 ui.write(data)
2146 ui.write(data)
2134 elif opts['dump_disk']:
2147 elif opts['dump_disk']:
2135 unfi = repo.unfiltered()
2148 unfi = repo.unfiltered()
2136 cl = unfi.changelog
2149 cl = unfi.changelog
2137 nm_data = nodemap.persisted_data(cl)
2150 nm_data = nodemap.persisted_data(cl)
2138 if nm_data is not None:
2151 if nm_data is not None:
2139 docket, data = nm_data
2152 docket, data = nm_data
2140 ui.write(data[:])
2153 ui.write(data[:])
2141 elif opts['check']:
2154 elif opts['check']:
2142 unfi = repo.unfiltered()
2155 unfi = repo.unfiltered()
2143 cl = unfi.changelog
2156 cl = unfi.changelog
2144 nm_data = nodemap.persisted_data(cl)
2157 nm_data = nodemap.persisted_data(cl)
2145 if nm_data is not None:
2158 if nm_data is not None:
2146 docket, data = nm_data
2159 docket, data = nm_data
2147 return nodemap.check_data(ui, cl.index, data)
2160 return nodemap.check_data(ui, cl.index, data)
2148 elif opts['metadata']:
2161 elif opts['metadata']:
2149 unfi = repo.unfiltered()
2162 unfi = repo.unfiltered()
2150 cl = unfi.changelog
2163 cl = unfi.changelog
2151 nm_data = nodemap.persisted_data(cl)
2164 nm_data = nodemap.persisted_data(cl)
2152 if nm_data is not None:
2165 if nm_data is not None:
2153 docket, data = nm_data
2166 docket, data = nm_data
2154 ui.write((b"uid: %s\n") % docket.uid)
2167 ui.write((b"uid: %s\n") % docket.uid)
2155 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2168 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2156 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2169 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2157 ui.write((b"data-length: %d\n") % docket.data_length)
2170 ui.write((b"data-length: %d\n") % docket.data_length)
2158 ui.write((b"data-unused: %d\n") % docket.data_unused)
2171 ui.write((b"data-unused: %d\n") % docket.data_unused)
2159 unused_perc = docket.data_unused * 100.0 / docket.data_length
2172 unused_perc = docket.data_unused * 100.0 / docket.data_length
2160 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2173 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2161
2174
2162
2175
2163 @command(
2176 @command(
2164 b'debugobsolete',
2177 b'debugobsolete',
2165 [
2178 [
2166 (b'', b'flags', 0, _(b'markers flag')),
2179 (b'', b'flags', 0, _(b'markers flag')),
2167 (
2180 (
2168 b'',
2181 b'',
2169 b'record-parents',
2182 b'record-parents',
2170 False,
2183 False,
2171 _(b'record parent information for the precursor'),
2184 _(b'record parent information for the precursor'),
2172 ),
2185 ),
2173 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2186 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2174 (
2187 (
2175 b'',
2188 b'',
2176 b'exclusive',
2189 b'exclusive',
2177 False,
2190 False,
2178 _(b'restrict display to markers only relevant to REV'),
2191 _(b'restrict display to markers only relevant to REV'),
2179 ),
2192 ),
2180 (b'', b'index', False, _(b'display index of the marker')),
2193 (b'', b'index', False, _(b'display index of the marker')),
2181 (b'', b'delete', [], _(b'delete markers specified by indices')),
2194 (b'', b'delete', [], _(b'delete markers specified by indices')),
2182 ]
2195 ]
2183 + cmdutil.commitopts2
2196 + cmdutil.commitopts2
2184 + cmdutil.formatteropts,
2197 + cmdutil.formatteropts,
2185 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2198 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2186 )
2199 )
2187 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2200 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2188 """create arbitrary obsolete marker
2201 """create arbitrary obsolete marker
2189
2202
2190 With no arguments, displays the list of obsolescence markers."""
2203 With no arguments, displays the list of obsolescence markers."""
2191
2204
2192 opts = pycompat.byteskwargs(opts)
2205 opts = pycompat.byteskwargs(opts)
2193
2206
2194 def parsenodeid(s):
2207 def parsenodeid(s):
2195 try:
2208 try:
2196 # We do not use revsingle/revrange functions here to accept
2209 # We do not use revsingle/revrange functions here to accept
2197 # arbitrary node identifiers, possibly not present in the
2210 # arbitrary node identifiers, possibly not present in the
2198 # local repository.
2211 # local repository.
2199 n = bin(s)
2212 n = bin(s)
2200 if len(n) != len(nullid):
2213 if len(n) != len(nullid):
2201 raise TypeError()
2214 raise TypeError()
2202 return n
2215 return n
2203 except TypeError:
2216 except TypeError:
2204 raise error.Abort(
2217 raise error.Abort(
2205 b'changeset references must be full hexadecimal '
2218 b'changeset references must be full hexadecimal '
2206 b'node identifiers'
2219 b'node identifiers'
2207 )
2220 )
2208
2221
2209 if opts.get(b'delete'):
2222 if opts.get(b'delete'):
2210 indices = []
2223 indices = []
2211 for v in opts.get(b'delete'):
2224 for v in opts.get(b'delete'):
2212 try:
2225 try:
2213 indices.append(int(v))
2226 indices.append(int(v))
2214 except ValueError:
2227 except ValueError:
2215 raise error.Abort(
2228 raise error.Abort(
2216 _(b'invalid index value: %r') % v,
2229 _(b'invalid index value: %r') % v,
2217 hint=_(b'use integers for indices'),
2230 hint=_(b'use integers for indices'),
2218 )
2231 )
2219
2232
2220 if repo.currenttransaction():
2233 if repo.currenttransaction():
2221 raise error.Abort(
2234 raise error.Abort(
2222 _(b'cannot delete obsmarkers in the middle of transaction.')
2235 _(b'cannot delete obsmarkers in the middle of transaction.')
2223 )
2236 )
2224
2237
2225 with repo.lock():
2238 with repo.lock():
2226 n = repair.deleteobsmarkers(repo.obsstore, indices)
2239 n = repair.deleteobsmarkers(repo.obsstore, indices)
2227 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2240 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2228
2241
2229 return
2242 return
2230
2243
2231 if precursor is not None:
2244 if precursor is not None:
2232 if opts[b'rev']:
2245 if opts[b'rev']:
2233 raise error.Abort(b'cannot select revision when creating marker')
2246 raise error.Abort(b'cannot select revision when creating marker')
2234 metadata = {}
2247 metadata = {}
2235 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2248 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2236 succs = tuple(parsenodeid(succ) for succ in successors)
2249 succs = tuple(parsenodeid(succ) for succ in successors)
2237 l = repo.lock()
2250 l = repo.lock()
2238 try:
2251 try:
2239 tr = repo.transaction(b'debugobsolete')
2252 tr = repo.transaction(b'debugobsolete')
2240 try:
2253 try:
2241 date = opts.get(b'date')
2254 date = opts.get(b'date')
2242 if date:
2255 if date:
2243 date = dateutil.parsedate(date)
2256 date = dateutil.parsedate(date)
2244 else:
2257 else:
2245 date = None
2258 date = None
2246 prec = parsenodeid(precursor)
2259 prec = parsenodeid(precursor)
2247 parents = None
2260 parents = None
2248 if opts[b'record_parents']:
2261 if opts[b'record_parents']:
2249 if prec not in repo.unfiltered():
2262 if prec not in repo.unfiltered():
2250 raise error.Abort(
2263 raise error.Abort(
2251 b'cannot used --record-parents on '
2264 b'cannot used --record-parents on '
2252 b'unknown changesets'
2265 b'unknown changesets'
2253 )
2266 )
2254 parents = repo.unfiltered()[prec].parents()
2267 parents = repo.unfiltered()[prec].parents()
2255 parents = tuple(p.node() for p in parents)
2268 parents = tuple(p.node() for p in parents)
2256 repo.obsstore.create(
2269 repo.obsstore.create(
2257 tr,
2270 tr,
2258 prec,
2271 prec,
2259 succs,
2272 succs,
2260 opts[b'flags'],
2273 opts[b'flags'],
2261 parents=parents,
2274 parents=parents,
2262 date=date,
2275 date=date,
2263 metadata=metadata,
2276 metadata=metadata,
2264 ui=ui,
2277 ui=ui,
2265 )
2278 )
2266 tr.close()
2279 tr.close()
2267 except ValueError as exc:
2280 except ValueError as exc:
2268 raise error.Abort(
2281 raise error.Abort(
2269 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2282 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2270 )
2283 )
2271 finally:
2284 finally:
2272 tr.release()
2285 tr.release()
2273 finally:
2286 finally:
2274 l.release()
2287 l.release()
2275 else:
2288 else:
2276 if opts[b'rev']:
2289 if opts[b'rev']:
2277 revs = scmutil.revrange(repo, opts[b'rev'])
2290 revs = scmutil.revrange(repo, opts[b'rev'])
2278 nodes = [repo[r].node() for r in revs]
2291 nodes = [repo[r].node() for r in revs]
2279 markers = list(
2292 markers = list(
2280 obsutil.getmarkers(
2293 obsutil.getmarkers(
2281 repo, nodes=nodes, exclusive=opts[b'exclusive']
2294 repo, nodes=nodes, exclusive=opts[b'exclusive']
2282 )
2295 )
2283 )
2296 )
2284 markers.sort(key=lambda x: x._data)
2297 markers.sort(key=lambda x: x._data)
2285 else:
2298 else:
2286 markers = obsutil.getmarkers(repo)
2299 markers = obsutil.getmarkers(repo)
2287
2300
2288 markerstoiter = markers
2301 markerstoiter = markers
2289 isrelevant = lambda m: True
2302 isrelevant = lambda m: True
2290 if opts.get(b'rev') and opts.get(b'index'):
2303 if opts.get(b'rev') and opts.get(b'index'):
2291 markerstoiter = obsutil.getmarkers(repo)
2304 markerstoiter = obsutil.getmarkers(repo)
2292 markerset = set(markers)
2305 markerset = set(markers)
2293 isrelevant = lambda m: m in markerset
2306 isrelevant = lambda m: m in markerset
2294
2307
2295 fm = ui.formatter(b'debugobsolete', opts)
2308 fm = ui.formatter(b'debugobsolete', opts)
2296 for i, m in enumerate(markerstoiter):
2309 for i, m in enumerate(markerstoiter):
2297 if not isrelevant(m):
2310 if not isrelevant(m):
2298 # marker can be irrelevant when we're iterating over a set
2311 # marker can be irrelevant when we're iterating over a set
2299 # of markers (markerstoiter) which is bigger than the set
2312 # of markers (markerstoiter) which is bigger than the set
2300 # of markers we want to display (markers)
2313 # of markers we want to display (markers)
2301 # this can happen if both --index and --rev options are
2314 # this can happen if both --index and --rev options are
2302 # provided and thus we need to iterate over all of the markers
2315 # provided and thus we need to iterate over all of the markers
2303 # to get the correct indices, but only display the ones that
2316 # to get the correct indices, but only display the ones that
2304 # are relevant to --rev value
2317 # are relevant to --rev value
2305 continue
2318 continue
2306 fm.startitem()
2319 fm.startitem()
2307 ind = i if opts.get(b'index') else None
2320 ind = i if opts.get(b'index') else None
2308 cmdutil.showmarker(fm, m, index=ind)
2321 cmdutil.showmarker(fm, m, index=ind)
2309 fm.end()
2322 fm.end()
2310
2323
2311
2324
2312 @command(
2325 @command(
2313 b'debugp1copies',
2326 b'debugp1copies',
2314 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2327 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2315 _(b'[-r REV]'),
2328 _(b'[-r REV]'),
2316 )
2329 )
2317 def debugp1copies(ui, repo, **opts):
2330 def debugp1copies(ui, repo, **opts):
2318 """dump copy information compared to p1"""
2331 """dump copy information compared to p1"""
2319
2332
2320 opts = pycompat.byteskwargs(opts)
2333 opts = pycompat.byteskwargs(opts)
2321 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2334 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2322 for dst, src in ctx.p1copies().items():
2335 for dst, src in ctx.p1copies().items():
2323 ui.write(b'%s -> %s\n' % (src, dst))
2336 ui.write(b'%s -> %s\n' % (src, dst))
2324
2337
2325
2338
2326 @command(
2339 @command(
2327 b'debugp2copies',
2340 b'debugp2copies',
2328 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2341 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2329 _(b'[-r REV]'),
2342 _(b'[-r REV]'),
2330 )
2343 )
2331 def debugp1copies(ui, repo, **opts):
2344 def debugp1copies(ui, repo, **opts):
2332 """dump copy information compared to p2"""
2345 """dump copy information compared to p2"""
2333
2346
2334 opts = pycompat.byteskwargs(opts)
2347 opts = pycompat.byteskwargs(opts)
2335 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2348 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2336 for dst, src in ctx.p2copies().items():
2349 for dst, src in ctx.p2copies().items():
2337 ui.write(b'%s -> %s\n' % (src, dst))
2350 ui.write(b'%s -> %s\n' % (src, dst))
2338
2351
2339
2352
2340 @command(
2353 @command(
2341 b'debugpathcomplete',
2354 b'debugpathcomplete',
2342 [
2355 [
2343 (b'f', b'full', None, _(b'complete an entire path')),
2356 (b'f', b'full', None, _(b'complete an entire path')),
2344 (b'n', b'normal', None, _(b'show only normal files')),
2357 (b'n', b'normal', None, _(b'show only normal files')),
2345 (b'a', b'added', None, _(b'show only added files')),
2358 (b'a', b'added', None, _(b'show only added files')),
2346 (b'r', b'removed', None, _(b'show only removed files')),
2359 (b'r', b'removed', None, _(b'show only removed files')),
2347 ],
2360 ],
2348 _(b'FILESPEC...'),
2361 _(b'FILESPEC...'),
2349 )
2362 )
2350 def debugpathcomplete(ui, repo, *specs, **opts):
2363 def debugpathcomplete(ui, repo, *specs, **opts):
2351 '''complete part or all of a tracked path
2364 '''complete part or all of a tracked path
2352
2365
2353 This command supports shells that offer path name completion. It
2366 This command supports shells that offer path name completion. It
2354 currently completes only files already known to the dirstate.
2367 currently completes only files already known to the dirstate.
2355
2368
2356 Completion extends only to the next path segment unless
2369 Completion extends only to the next path segment unless
2357 --full is specified, in which case entire paths are used.'''
2370 --full is specified, in which case entire paths are used.'''
2358
2371
2359 def complete(path, acceptable):
2372 def complete(path, acceptable):
2360 dirstate = repo.dirstate
2373 dirstate = repo.dirstate
2361 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2374 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2362 rootdir = repo.root + pycompat.ossep
2375 rootdir = repo.root + pycompat.ossep
2363 if spec != repo.root and not spec.startswith(rootdir):
2376 if spec != repo.root and not spec.startswith(rootdir):
2364 return [], []
2377 return [], []
2365 if os.path.isdir(spec):
2378 if os.path.isdir(spec):
2366 spec += b'/'
2379 spec += b'/'
2367 spec = spec[len(rootdir) :]
2380 spec = spec[len(rootdir) :]
2368 fixpaths = pycompat.ossep != b'/'
2381 fixpaths = pycompat.ossep != b'/'
2369 if fixpaths:
2382 if fixpaths:
2370 spec = spec.replace(pycompat.ossep, b'/')
2383 spec = spec.replace(pycompat.ossep, b'/')
2371 speclen = len(spec)
2384 speclen = len(spec)
2372 fullpaths = opts['full']
2385 fullpaths = opts['full']
2373 files, dirs = set(), set()
2386 files, dirs = set(), set()
2374 adddir, addfile = dirs.add, files.add
2387 adddir, addfile = dirs.add, files.add
2375 for f, st in pycompat.iteritems(dirstate):
2388 for f, st in pycompat.iteritems(dirstate):
2376 if f.startswith(spec) and st[0] in acceptable:
2389 if f.startswith(spec) and st[0] in acceptable:
2377 if fixpaths:
2390 if fixpaths:
2378 f = f.replace(b'/', pycompat.ossep)
2391 f = f.replace(b'/', pycompat.ossep)
2379 if fullpaths:
2392 if fullpaths:
2380 addfile(f)
2393 addfile(f)
2381 continue
2394 continue
2382 s = f.find(pycompat.ossep, speclen)
2395 s = f.find(pycompat.ossep, speclen)
2383 if s >= 0:
2396 if s >= 0:
2384 adddir(f[:s])
2397 adddir(f[:s])
2385 else:
2398 else:
2386 addfile(f)
2399 addfile(f)
2387 return files, dirs
2400 return files, dirs
2388
2401
2389 acceptable = b''
2402 acceptable = b''
2390 if opts['normal']:
2403 if opts['normal']:
2391 acceptable += b'nm'
2404 acceptable += b'nm'
2392 if opts['added']:
2405 if opts['added']:
2393 acceptable += b'a'
2406 acceptable += b'a'
2394 if opts['removed']:
2407 if opts['removed']:
2395 acceptable += b'r'
2408 acceptable += b'r'
2396 cwd = repo.getcwd()
2409 cwd = repo.getcwd()
2397 if not specs:
2410 if not specs:
2398 specs = [b'.']
2411 specs = [b'.']
2399
2412
2400 files, dirs = set(), set()
2413 files, dirs = set(), set()
2401 for spec in specs:
2414 for spec in specs:
2402 f, d = complete(spec, acceptable or b'nmar')
2415 f, d = complete(spec, acceptable or b'nmar')
2403 files.update(f)
2416 files.update(f)
2404 dirs.update(d)
2417 dirs.update(d)
2405 files.update(dirs)
2418 files.update(dirs)
2406 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2419 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2407 ui.write(b'\n')
2420 ui.write(b'\n')
2408
2421
2409
2422
2410 @command(
2423 @command(
2411 b'debugpathcopies',
2424 b'debugpathcopies',
2412 cmdutil.walkopts,
2425 cmdutil.walkopts,
2413 b'hg debugpathcopies REV1 REV2 [FILE]',
2426 b'hg debugpathcopies REV1 REV2 [FILE]',
2414 inferrepo=True,
2427 inferrepo=True,
2415 )
2428 )
2416 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2429 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2417 """show copies between two revisions"""
2430 """show copies between two revisions"""
2418 ctx1 = scmutil.revsingle(repo, rev1)
2431 ctx1 = scmutil.revsingle(repo, rev1)
2419 ctx2 = scmutil.revsingle(repo, rev2)
2432 ctx2 = scmutil.revsingle(repo, rev2)
2420 m = scmutil.match(ctx1, pats, opts)
2433 m = scmutil.match(ctx1, pats, opts)
2421 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2434 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2422 ui.write(b'%s -> %s\n' % (src, dst))
2435 ui.write(b'%s -> %s\n' % (src, dst))
2423
2436
2424
2437
2425 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2438 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2426 def debugpeer(ui, path):
2439 def debugpeer(ui, path):
2427 """establish a connection to a peer repository"""
2440 """establish a connection to a peer repository"""
2428 # Always enable peer request logging. Requires --debug to display
2441 # Always enable peer request logging. Requires --debug to display
2429 # though.
2442 # though.
2430 overrides = {
2443 overrides = {
2431 (b'devel', b'debug.peer-request'): True,
2444 (b'devel', b'debug.peer-request'): True,
2432 }
2445 }
2433
2446
2434 with ui.configoverride(overrides):
2447 with ui.configoverride(overrides):
2435 peer = hg.peer(ui, {}, path)
2448 peer = hg.peer(ui, {}, path)
2436
2449
2437 local = peer.local() is not None
2450 local = peer.local() is not None
2438 canpush = peer.canpush()
2451 canpush = peer.canpush()
2439
2452
2440 ui.write(_(b'url: %s\n') % peer.url())
2453 ui.write(_(b'url: %s\n') % peer.url())
2441 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2454 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2442 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2455 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2443
2456
2444
2457
2445 @command(
2458 @command(
2446 b'debugpickmergetool',
2459 b'debugpickmergetool',
2447 [
2460 [
2448 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2461 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2449 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2462 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2450 ]
2463 ]
2451 + cmdutil.walkopts
2464 + cmdutil.walkopts
2452 + cmdutil.mergetoolopts,
2465 + cmdutil.mergetoolopts,
2453 _(b'[PATTERN]...'),
2466 _(b'[PATTERN]...'),
2454 inferrepo=True,
2467 inferrepo=True,
2455 )
2468 )
2456 def debugpickmergetool(ui, repo, *pats, **opts):
2469 def debugpickmergetool(ui, repo, *pats, **opts):
2457 """examine which merge tool is chosen for specified file
2470 """examine which merge tool is chosen for specified file
2458
2471
2459 As described in :hg:`help merge-tools`, Mercurial examines
2472 As described in :hg:`help merge-tools`, Mercurial examines
2460 configurations below in this order to decide which merge tool is
2473 configurations below in this order to decide which merge tool is
2461 chosen for specified file.
2474 chosen for specified file.
2462
2475
2463 1. ``--tool`` option
2476 1. ``--tool`` option
2464 2. ``HGMERGE`` environment variable
2477 2. ``HGMERGE`` environment variable
2465 3. configurations in ``merge-patterns`` section
2478 3. configurations in ``merge-patterns`` section
2466 4. configuration of ``ui.merge``
2479 4. configuration of ``ui.merge``
2467 5. configurations in ``merge-tools`` section
2480 5. configurations in ``merge-tools`` section
2468 6. ``hgmerge`` tool (for historical reason only)
2481 6. ``hgmerge`` tool (for historical reason only)
2469 7. default tool for fallback (``:merge`` or ``:prompt``)
2482 7. default tool for fallback (``:merge`` or ``:prompt``)
2470
2483
2471 This command writes out examination result in the style below::
2484 This command writes out examination result in the style below::
2472
2485
2473 FILE = MERGETOOL
2486 FILE = MERGETOOL
2474
2487
2475 By default, all files known in the first parent context of the
2488 By default, all files known in the first parent context of the
2476 working directory are examined. Use file patterns and/or -I/-X
2489 working directory are examined. Use file patterns and/or -I/-X
2477 options to limit target files. -r/--rev is also useful to examine
2490 options to limit target files. -r/--rev is also useful to examine
2478 files in another context without actual updating to it.
2491 files in another context without actual updating to it.
2479
2492
2480 With --debug, this command shows warning messages while matching
2493 With --debug, this command shows warning messages while matching
2481 against ``merge-patterns`` and so on, too. It is recommended to
2494 against ``merge-patterns`` and so on, too. It is recommended to
2482 use this option with explicit file patterns and/or -I/-X options,
2495 use this option with explicit file patterns and/or -I/-X options,
2483 because this option increases amount of output per file according
2496 because this option increases amount of output per file according
2484 to configurations in hgrc.
2497 to configurations in hgrc.
2485
2498
2486 With -v/--verbose, this command shows configurations below at
2499 With -v/--verbose, this command shows configurations below at
2487 first (only if specified).
2500 first (only if specified).
2488
2501
2489 - ``--tool`` option
2502 - ``--tool`` option
2490 - ``HGMERGE`` environment variable
2503 - ``HGMERGE`` environment variable
2491 - configuration of ``ui.merge``
2504 - configuration of ``ui.merge``
2492
2505
2493 If merge tool is chosen before matching against
2506 If merge tool is chosen before matching against
2494 ``merge-patterns``, this command can't show any helpful
2507 ``merge-patterns``, this command can't show any helpful
2495 information, even with --debug. In such case, information above is
2508 information, even with --debug. In such case, information above is
2496 useful to know why a merge tool is chosen.
2509 useful to know why a merge tool is chosen.
2497 """
2510 """
2498 opts = pycompat.byteskwargs(opts)
2511 opts = pycompat.byteskwargs(opts)
2499 overrides = {}
2512 overrides = {}
2500 if opts[b'tool']:
2513 if opts[b'tool']:
2501 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2514 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2502 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2515 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2503
2516
2504 with ui.configoverride(overrides, b'debugmergepatterns'):
2517 with ui.configoverride(overrides, b'debugmergepatterns'):
2505 hgmerge = encoding.environ.get(b"HGMERGE")
2518 hgmerge = encoding.environ.get(b"HGMERGE")
2506 if hgmerge is not None:
2519 if hgmerge is not None:
2507 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2520 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2508 uimerge = ui.config(b"ui", b"merge")
2521 uimerge = ui.config(b"ui", b"merge")
2509 if uimerge:
2522 if uimerge:
2510 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2523 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2511
2524
2512 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2525 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2513 m = scmutil.match(ctx, pats, opts)
2526 m = scmutil.match(ctx, pats, opts)
2514 changedelete = opts[b'changedelete']
2527 changedelete = opts[b'changedelete']
2515 for path in ctx.walk(m):
2528 for path in ctx.walk(m):
2516 fctx = ctx[path]
2529 fctx = ctx[path]
2517 try:
2530 try:
2518 if not ui.debugflag:
2531 if not ui.debugflag:
2519 ui.pushbuffer(error=True)
2532 ui.pushbuffer(error=True)
2520 tool, toolpath = filemerge._picktool(
2533 tool, toolpath = filemerge._picktool(
2521 repo,
2534 repo,
2522 ui,
2535 ui,
2523 path,
2536 path,
2524 fctx.isbinary(),
2537 fctx.isbinary(),
2525 b'l' in fctx.flags(),
2538 b'l' in fctx.flags(),
2526 changedelete,
2539 changedelete,
2527 )
2540 )
2528 finally:
2541 finally:
2529 if not ui.debugflag:
2542 if not ui.debugflag:
2530 ui.popbuffer()
2543 ui.popbuffer()
2531 ui.write(b'%s = %s\n' % (path, tool))
2544 ui.write(b'%s = %s\n' % (path, tool))
2532
2545
2533
2546
2534 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2547 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2535 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2548 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2536 '''access the pushkey key/value protocol
2549 '''access the pushkey key/value protocol
2537
2550
2538 With two args, list the keys in the given namespace.
2551 With two args, list the keys in the given namespace.
2539
2552
2540 With five args, set a key to new if it currently is set to old.
2553 With five args, set a key to new if it currently is set to old.
2541 Reports success or failure.
2554 Reports success or failure.
2542 '''
2555 '''
2543
2556
2544 target = hg.peer(ui, {}, repopath)
2557 target = hg.peer(ui, {}, repopath)
2545 if keyinfo:
2558 if keyinfo:
2546 key, old, new = keyinfo
2559 key, old, new = keyinfo
2547 with target.commandexecutor() as e:
2560 with target.commandexecutor() as e:
2548 r = e.callcommand(
2561 r = e.callcommand(
2549 b'pushkey',
2562 b'pushkey',
2550 {
2563 {
2551 b'namespace': namespace,
2564 b'namespace': namespace,
2552 b'key': key,
2565 b'key': key,
2553 b'old': old,
2566 b'old': old,
2554 b'new': new,
2567 b'new': new,
2555 },
2568 },
2556 ).result()
2569 ).result()
2557
2570
2558 ui.status(pycompat.bytestr(r) + b'\n')
2571 ui.status(pycompat.bytestr(r) + b'\n')
2559 return not r
2572 return not r
2560 else:
2573 else:
2561 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2574 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2562 ui.write(
2575 ui.write(
2563 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2576 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2564 )
2577 )
2565
2578
2566
2579
2567 @command(b'debugpvec', [], _(b'A B'))
2580 @command(b'debugpvec', [], _(b'A B'))
2568 def debugpvec(ui, repo, a, b=None):
2581 def debugpvec(ui, repo, a, b=None):
2569 ca = scmutil.revsingle(repo, a)
2582 ca = scmutil.revsingle(repo, a)
2570 cb = scmutil.revsingle(repo, b)
2583 cb = scmutil.revsingle(repo, b)
2571 pa = pvec.ctxpvec(ca)
2584 pa = pvec.ctxpvec(ca)
2572 pb = pvec.ctxpvec(cb)
2585 pb = pvec.ctxpvec(cb)
2573 if pa == pb:
2586 if pa == pb:
2574 rel = b"="
2587 rel = b"="
2575 elif pa > pb:
2588 elif pa > pb:
2576 rel = b">"
2589 rel = b">"
2577 elif pa < pb:
2590 elif pa < pb:
2578 rel = b"<"
2591 rel = b"<"
2579 elif pa | pb:
2592 elif pa | pb:
2580 rel = b"|"
2593 rel = b"|"
2581 ui.write(_(b"a: %s\n") % pa)
2594 ui.write(_(b"a: %s\n") % pa)
2582 ui.write(_(b"b: %s\n") % pb)
2595 ui.write(_(b"b: %s\n") % pb)
2583 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2596 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2584 ui.write(
2597 ui.write(
2585 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2598 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2586 % (
2599 % (
2587 abs(pa._depth - pb._depth),
2600 abs(pa._depth - pb._depth),
2588 pvec._hamming(pa._vec, pb._vec),
2601 pvec._hamming(pa._vec, pb._vec),
2589 pa.distance(pb),
2602 pa.distance(pb),
2590 rel,
2603 rel,
2591 )
2604 )
2592 )
2605 )
2593
2606
2594
2607
2595 @command(
2608 @command(
2596 b'debugrebuilddirstate|debugrebuildstate',
2609 b'debugrebuilddirstate|debugrebuildstate',
2597 [
2610 [
2598 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2611 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2599 (
2612 (
2600 b'',
2613 b'',
2601 b'minimal',
2614 b'minimal',
2602 None,
2615 None,
2603 _(
2616 _(
2604 b'only rebuild files that are inconsistent with '
2617 b'only rebuild files that are inconsistent with '
2605 b'the working copy parent'
2618 b'the working copy parent'
2606 ),
2619 ),
2607 ),
2620 ),
2608 ],
2621 ],
2609 _(b'[-r REV]'),
2622 _(b'[-r REV]'),
2610 )
2623 )
2611 def debugrebuilddirstate(ui, repo, rev, **opts):
2624 def debugrebuilddirstate(ui, repo, rev, **opts):
2612 """rebuild the dirstate as it would look like for the given revision
2625 """rebuild the dirstate as it would look like for the given revision
2613
2626
2614 If no revision is specified the first current parent will be used.
2627 If no revision is specified the first current parent will be used.
2615
2628
2616 The dirstate will be set to the files of the given revision.
2629 The dirstate will be set to the files of the given revision.
2617 The actual working directory content or existing dirstate
2630 The actual working directory content or existing dirstate
2618 information such as adds or removes is not considered.
2631 information such as adds or removes is not considered.
2619
2632
2620 ``minimal`` will only rebuild the dirstate status for files that claim to be
2633 ``minimal`` will only rebuild the dirstate status for files that claim to be
2621 tracked but are not in the parent manifest, or that exist in the parent
2634 tracked but are not in the parent manifest, or that exist in the parent
2622 manifest but are not in the dirstate. It will not change adds, removes, or
2635 manifest but are not in the dirstate. It will not change adds, removes, or
2623 modified files that are in the working copy parent.
2636 modified files that are in the working copy parent.
2624
2637
2625 One use of this command is to make the next :hg:`status` invocation
2638 One use of this command is to make the next :hg:`status` invocation
2626 check the actual file content.
2639 check the actual file content.
2627 """
2640 """
2628 ctx = scmutil.revsingle(repo, rev)
2641 ctx = scmutil.revsingle(repo, rev)
2629 with repo.wlock():
2642 with repo.wlock():
2630 dirstate = repo.dirstate
2643 dirstate = repo.dirstate
2631 changedfiles = None
2644 changedfiles = None
2632 # See command doc for what minimal does.
2645 # See command doc for what minimal does.
2633 if opts.get('minimal'):
2646 if opts.get('minimal'):
2634 manifestfiles = set(ctx.manifest().keys())
2647 manifestfiles = set(ctx.manifest().keys())
2635 dirstatefiles = set(dirstate)
2648 dirstatefiles = set(dirstate)
2636 manifestonly = manifestfiles - dirstatefiles
2649 manifestonly = manifestfiles - dirstatefiles
2637 dsonly = dirstatefiles - manifestfiles
2650 dsonly = dirstatefiles - manifestfiles
2638 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2651 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2639 changedfiles = manifestonly | dsnotadded
2652 changedfiles = manifestonly | dsnotadded
2640
2653
2641 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2654 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2642
2655
2643
2656
2644 @command(b'debugrebuildfncache', [], b'')
2657 @command(b'debugrebuildfncache', [], b'')
2645 def debugrebuildfncache(ui, repo):
2658 def debugrebuildfncache(ui, repo):
2646 """rebuild the fncache file"""
2659 """rebuild the fncache file"""
2647 repair.rebuildfncache(ui, repo)
2660 repair.rebuildfncache(ui, repo)
2648
2661
2649
2662
2650 @command(
2663 @command(
2651 b'debugrename',
2664 b'debugrename',
2652 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2665 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2653 _(b'[-r REV] [FILE]...'),
2666 _(b'[-r REV] [FILE]...'),
2654 )
2667 )
2655 def debugrename(ui, repo, *pats, **opts):
2668 def debugrename(ui, repo, *pats, **opts):
2656 """dump rename information"""
2669 """dump rename information"""
2657
2670
2658 opts = pycompat.byteskwargs(opts)
2671 opts = pycompat.byteskwargs(opts)
2659 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2672 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2660 m = scmutil.match(ctx, pats, opts)
2673 m = scmutil.match(ctx, pats, opts)
2661 for abs in ctx.walk(m):
2674 for abs in ctx.walk(m):
2662 fctx = ctx[abs]
2675 fctx = ctx[abs]
2663 o = fctx.filelog().renamed(fctx.filenode())
2676 o = fctx.filelog().renamed(fctx.filenode())
2664 rel = repo.pathto(abs)
2677 rel = repo.pathto(abs)
2665 if o:
2678 if o:
2666 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2679 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2667 else:
2680 else:
2668 ui.write(_(b"%s not renamed\n") % rel)
2681 ui.write(_(b"%s not renamed\n") % rel)
2669
2682
2670
2683
2671 @command(b'debugrequires|debugrequirements', [], b'')
2684 @command(b'debugrequires|debugrequirements', [], b'')
2672 def debugrequirements(ui, repo):
2685 def debugrequirements(ui, repo):
2673 """ print the current repo requirements """
2686 """ print the current repo requirements """
2674 for r in sorted(repo.requirements):
2687 for r in sorted(repo.requirements):
2675 ui.write(b"%s\n" % r)
2688 ui.write(b"%s\n" % r)
2676
2689
2677
2690
2678 @command(
2691 @command(
2679 b'debugrevlog',
2692 b'debugrevlog',
2680 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2693 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2681 _(b'-c|-m|FILE'),
2694 _(b'-c|-m|FILE'),
2682 optionalrepo=True,
2695 optionalrepo=True,
2683 )
2696 )
2684 def debugrevlog(ui, repo, file_=None, **opts):
2697 def debugrevlog(ui, repo, file_=None, **opts):
2685 """show data and statistics about a revlog"""
2698 """show data and statistics about a revlog"""
2686 opts = pycompat.byteskwargs(opts)
2699 opts = pycompat.byteskwargs(opts)
2687 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2700 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2688
2701
2689 if opts.get(b"dump"):
2702 if opts.get(b"dump"):
2690 numrevs = len(r)
2703 numrevs = len(r)
2691 ui.write(
2704 ui.write(
2692 (
2705 (
2693 b"# rev p1rev p2rev start end deltastart base p1 p2"
2706 b"# rev p1rev p2rev start end deltastart base p1 p2"
2694 b" rawsize totalsize compression heads chainlen\n"
2707 b" rawsize totalsize compression heads chainlen\n"
2695 )
2708 )
2696 )
2709 )
2697 ts = 0
2710 ts = 0
2698 heads = set()
2711 heads = set()
2699
2712
2700 for rev in pycompat.xrange(numrevs):
2713 for rev in pycompat.xrange(numrevs):
2701 dbase = r.deltaparent(rev)
2714 dbase = r.deltaparent(rev)
2702 if dbase == -1:
2715 if dbase == -1:
2703 dbase = rev
2716 dbase = rev
2704 cbase = r.chainbase(rev)
2717 cbase = r.chainbase(rev)
2705 clen = r.chainlen(rev)
2718 clen = r.chainlen(rev)
2706 p1, p2 = r.parentrevs(rev)
2719 p1, p2 = r.parentrevs(rev)
2707 rs = r.rawsize(rev)
2720 rs = r.rawsize(rev)
2708 ts = ts + rs
2721 ts = ts + rs
2709 heads -= set(r.parentrevs(rev))
2722 heads -= set(r.parentrevs(rev))
2710 heads.add(rev)
2723 heads.add(rev)
2711 try:
2724 try:
2712 compression = ts / r.end(rev)
2725 compression = ts / r.end(rev)
2713 except ZeroDivisionError:
2726 except ZeroDivisionError:
2714 compression = 0
2727 compression = 0
2715 ui.write(
2728 ui.write(
2716 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2729 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2717 b"%11d %5d %8d\n"
2730 b"%11d %5d %8d\n"
2718 % (
2731 % (
2719 rev,
2732 rev,
2720 p1,
2733 p1,
2721 p2,
2734 p2,
2722 r.start(rev),
2735 r.start(rev),
2723 r.end(rev),
2736 r.end(rev),
2724 r.start(dbase),
2737 r.start(dbase),
2725 r.start(cbase),
2738 r.start(cbase),
2726 r.start(p1),
2739 r.start(p1),
2727 r.start(p2),
2740 r.start(p2),
2728 rs,
2741 rs,
2729 ts,
2742 ts,
2730 compression,
2743 compression,
2731 len(heads),
2744 len(heads),
2732 clen,
2745 clen,
2733 )
2746 )
2734 )
2747 )
2735 return 0
2748 return 0
2736
2749
2737 v = r.version
2750 v = r.version
2738 format = v & 0xFFFF
2751 format = v & 0xFFFF
2739 flags = []
2752 flags = []
2740 gdelta = False
2753 gdelta = False
2741 if v & revlog.FLAG_INLINE_DATA:
2754 if v & revlog.FLAG_INLINE_DATA:
2742 flags.append(b'inline')
2755 flags.append(b'inline')
2743 if v & revlog.FLAG_GENERALDELTA:
2756 if v & revlog.FLAG_GENERALDELTA:
2744 gdelta = True
2757 gdelta = True
2745 flags.append(b'generaldelta')
2758 flags.append(b'generaldelta')
2746 if not flags:
2759 if not flags:
2747 flags = [b'(none)']
2760 flags = [b'(none)']
2748
2761
2749 ### tracks merge vs single parent
2762 ### tracks merge vs single parent
2750 nummerges = 0
2763 nummerges = 0
2751
2764
2752 ### tracks ways the "delta" are build
2765 ### tracks ways the "delta" are build
2753 # nodelta
2766 # nodelta
2754 numempty = 0
2767 numempty = 0
2755 numemptytext = 0
2768 numemptytext = 0
2756 numemptydelta = 0
2769 numemptydelta = 0
2757 # full file content
2770 # full file content
2758 numfull = 0
2771 numfull = 0
2759 # intermediate snapshot against a prior snapshot
2772 # intermediate snapshot against a prior snapshot
2760 numsemi = 0
2773 numsemi = 0
2761 # snapshot count per depth
2774 # snapshot count per depth
2762 numsnapdepth = collections.defaultdict(lambda: 0)
2775 numsnapdepth = collections.defaultdict(lambda: 0)
2763 # delta against previous revision
2776 # delta against previous revision
2764 numprev = 0
2777 numprev = 0
2765 # delta against first or second parent (not prev)
2778 # delta against first or second parent (not prev)
2766 nump1 = 0
2779 nump1 = 0
2767 nump2 = 0
2780 nump2 = 0
2768 # delta against neither prev nor parents
2781 # delta against neither prev nor parents
2769 numother = 0
2782 numother = 0
2770 # delta against prev that are also first or second parent
2783 # delta against prev that are also first or second parent
2771 # (details of `numprev`)
2784 # (details of `numprev`)
2772 nump1prev = 0
2785 nump1prev = 0
2773 nump2prev = 0
2786 nump2prev = 0
2774
2787
2775 # data about delta chain of each revs
2788 # data about delta chain of each revs
2776 chainlengths = []
2789 chainlengths = []
2777 chainbases = []
2790 chainbases = []
2778 chainspans = []
2791 chainspans = []
2779
2792
2780 # data about each revision
2793 # data about each revision
2781 datasize = [None, 0, 0]
2794 datasize = [None, 0, 0]
2782 fullsize = [None, 0, 0]
2795 fullsize = [None, 0, 0]
2783 semisize = [None, 0, 0]
2796 semisize = [None, 0, 0]
2784 # snapshot count per depth
2797 # snapshot count per depth
2785 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2798 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2786 deltasize = [None, 0, 0]
2799 deltasize = [None, 0, 0]
2787 chunktypecounts = {}
2800 chunktypecounts = {}
2788 chunktypesizes = {}
2801 chunktypesizes = {}
2789
2802
2790 def addsize(size, l):
2803 def addsize(size, l):
2791 if l[0] is None or size < l[0]:
2804 if l[0] is None or size < l[0]:
2792 l[0] = size
2805 l[0] = size
2793 if size > l[1]:
2806 if size > l[1]:
2794 l[1] = size
2807 l[1] = size
2795 l[2] += size
2808 l[2] += size
2796
2809
2797 numrevs = len(r)
2810 numrevs = len(r)
2798 for rev in pycompat.xrange(numrevs):
2811 for rev in pycompat.xrange(numrevs):
2799 p1, p2 = r.parentrevs(rev)
2812 p1, p2 = r.parentrevs(rev)
2800 delta = r.deltaparent(rev)
2813 delta = r.deltaparent(rev)
2801 if format > 0:
2814 if format > 0:
2802 addsize(r.rawsize(rev), datasize)
2815 addsize(r.rawsize(rev), datasize)
2803 if p2 != nullrev:
2816 if p2 != nullrev:
2804 nummerges += 1
2817 nummerges += 1
2805 size = r.length(rev)
2818 size = r.length(rev)
2806 if delta == nullrev:
2819 if delta == nullrev:
2807 chainlengths.append(0)
2820 chainlengths.append(0)
2808 chainbases.append(r.start(rev))
2821 chainbases.append(r.start(rev))
2809 chainspans.append(size)
2822 chainspans.append(size)
2810 if size == 0:
2823 if size == 0:
2811 numempty += 1
2824 numempty += 1
2812 numemptytext += 1
2825 numemptytext += 1
2813 else:
2826 else:
2814 numfull += 1
2827 numfull += 1
2815 numsnapdepth[0] += 1
2828 numsnapdepth[0] += 1
2816 addsize(size, fullsize)
2829 addsize(size, fullsize)
2817 addsize(size, snapsizedepth[0])
2830 addsize(size, snapsizedepth[0])
2818 else:
2831 else:
2819 chainlengths.append(chainlengths[delta] + 1)
2832 chainlengths.append(chainlengths[delta] + 1)
2820 baseaddr = chainbases[delta]
2833 baseaddr = chainbases[delta]
2821 revaddr = r.start(rev)
2834 revaddr = r.start(rev)
2822 chainbases.append(baseaddr)
2835 chainbases.append(baseaddr)
2823 chainspans.append((revaddr - baseaddr) + size)
2836 chainspans.append((revaddr - baseaddr) + size)
2824 if size == 0:
2837 if size == 0:
2825 numempty += 1
2838 numempty += 1
2826 numemptydelta += 1
2839 numemptydelta += 1
2827 elif r.issnapshot(rev):
2840 elif r.issnapshot(rev):
2828 addsize(size, semisize)
2841 addsize(size, semisize)
2829 numsemi += 1
2842 numsemi += 1
2830 depth = r.snapshotdepth(rev)
2843 depth = r.snapshotdepth(rev)
2831 numsnapdepth[depth] += 1
2844 numsnapdepth[depth] += 1
2832 addsize(size, snapsizedepth[depth])
2845 addsize(size, snapsizedepth[depth])
2833 else:
2846 else:
2834 addsize(size, deltasize)
2847 addsize(size, deltasize)
2835 if delta == rev - 1:
2848 if delta == rev - 1:
2836 numprev += 1
2849 numprev += 1
2837 if delta == p1:
2850 if delta == p1:
2838 nump1prev += 1
2851 nump1prev += 1
2839 elif delta == p2:
2852 elif delta == p2:
2840 nump2prev += 1
2853 nump2prev += 1
2841 elif delta == p1:
2854 elif delta == p1:
2842 nump1 += 1
2855 nump1 += 1
2843 elif delta == p2:
2856 elif delta == p2:
2844 nump2 += 1
2857 nump2 += 1
2845 elif delta != nullrev:
2858 elif delta != nullrev:
2846 numother += 1
2859 numother += 1
2847
2860
2848 # Obtain data on the raw chunks in the revlog.
2861 # Obtain data on the raw chunks in the revlog.
2849 if util.safehasattr(r, b'_getsegmentforrevs'):
2862 if util.safehasattr(r, b'_getsegmentforrevs'):
2850 segment = r._getsegmentforrevs(rev, rev)[1]
2863 segment = r._getsegmentforrevs(rev, rev)[1]
2851 else:
2864 else:
2852 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2865 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2853 if segment:
2866 if segment:
2854 chunktype = bytes(segment[0:1])
2867 chunktype = bytes(segment[0:1])
2855 else:
2868 else:
2856 chunktype = b'empty'
2869 chunktype = b'empty'
2857
2870
2858 if chunktype not in chunktypecounts:
2871 if chunktype not in chunktypecounts:
2859 chunktypecounts[chunktype] = 0
2872 chunktypecounts[chunktype] = 0
2860 chunktypesizes[chunktype] = 0
2873 chunktypesizes[chunktype] = 0
2861
2874
2862 chunktypecounts[chunktype] += 1
2875 chunktypecounts[chunktype] += 1
2863 chunktypesizes[chunktype] += size
2876 chunktypesizes[chunktype] += size
2864
2877
2865 # Adjust size min value for empty cases
2878 # Adjust size min value for empty cases
2866 for size in (datasize, fullsize, semisize, deltasize):
2879 for size in (datasize, fullsize, semisize, deltasize):
2867 if size[0] is None:
2880 if size[0] is None:
2868 size[0] = 0
2881 size[0] = 0
2869
2882
2870 numdeltas = numrevs - numfull - numempty - numsemi
2883 numdeltas = numrevs - numfull - numempty - numsemi
2871 numoprev = numprev - nump1prev - nump2prev
2884 numoprev = numprev - nump1prev - nump2prev
2872 totalrawsize = datasize[2]
2885 totalrawsize = datasize[2]
2873 datasize[2] /= numrevs
2886 datasize[2] /= numrevs
2874 fulltotal = fullsize[2]
2887 fulltotal = fullsize[2]
2875 if numfull == 0:
2888 if numfull == 0:
2876 fullsize[2] = 0
2889 fullsize[2] = 0
2877 else:
2890 else:
2878 fullsize[2] /= numfull
2891 fullsize[2] /= numfull
2879 semitotal = semisize[2]
2892 semitotal = semisize[2]
2880 snaptotal = {}
2893 snaptotal = {}
2881 if numsemi > 0:
2894 if numsemi > 0:
2882 semisize[2] /= numsemi
2895 semisize[2] /= numsemi
2883 for depth in snapsizedepth:
2896 for depth in snapsizedepth:
2884 snaptotal[depth] = snapsizedepth[depth][2]
2897 snaptotal[depth] = snapsizedepth[depth][2]
2885 snapsizedepth[depth][2] /= numsnapdepth[depth]
2898 snapsizedepth[depth][2] /= numsnapdepth[depth]
2886
2899
2887 deltatotal = deltasize[2]
2900 deltatotal = deltasize[2]
2888 if numdeltas > 0:
2901 if numdeltas > 0:
2889 deltasize[2] /= numdeltas
2902 deltasize[2] /= numdeltas
2890 totalsize = fulltotal + semitotal + deltatotal
2903 totalsize = fulltotal + semitotal + deltatotal
2891 avgchainlen = sum(chainlengths) / numrevs
2904 avgchainlen = sum(chainlengths) / numrevs
2892 maxchainlen = max(chainlengths)
2905 maxchainlen = max(chainlengths)
2893 maxchainspan = max(chainspans)
2906 maxchainspan = max(chainspans)
2894 compratio = 1
2907 compratio = 1
2895 if totalsize:
2908 if totalsize:
2896 compratio = totalrawsize / totalsize
2909 compratio = totalrawsize / totalsize
2897
2910
2898 basedfmtstr = b'%%%dd\n'
2911 basedfmtstr = b'%%%dd\n'
2899 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2912 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2900
2913
2901 def dfmtstr(max):
2914 def dfmtstr(max):
2902 return basedfmtstr % len(str(max))
2915 return basedfmtstr % len(str(max))
2903
2916
2904 def pcfmtstr(max, padding=0):
2917 def pcfmtstr(max, padding=0):
2905 return basepcfmtstr % (len(str(max)), b' ' * padding)
2918 return basepcfmtstr % (len(str(max)), b' ' * padding)
2906
2919
2907 def pcfmt(value, total):
2920 def pcfmt(value, total):
2908 if total:
2921 if total:
2909 return (value, 100 * float(value) / total)
2922 return (value, 100 * float(value) / total)
2910 else:
2923 else:
2911 return value, 100.0
2924 return value, 100.0
2912
2925
2913 ui.writenoi18n(b'format : %d\n' % format)
2926 ui.writenoi18n(b'format : %d\n' % format)
2914 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2927 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2915
2928
2916 ui.write(b'\n')
2929 ui.write(b'\n')
2917 fmt = pcfmtstr(totalsize)
2930 fmt = pcfmtstr(totalsize)
2918 fmt2 = dfmtstr(totalsize)
2931 fmt2 = dfmtstr(totalsize)
2919 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2932 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2920 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2933 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2921 ui.writenoi18n(
2934 ui.writenoi18n(
2922 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2935 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2923 )
2936 )
2924 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2937 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2925 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2938 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2926 ui.writenoi18n(
2939 ui.writenoi18n(
2927 b' text : '
2940 b' text : '
2928 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2941 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2929 )
2942 )
2930 ui.writenoi18n(
2943 ui.writenoi18n(
2931 b' delta : '
2944 b' delta : '
2932 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2945 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2933 )
2946 )
2934 ui.writenoi18n(
2947 ui.writenoi18n(
2935 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2948 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2936 )
2949 )
2937 for depth in sorted(numsnapdepth):
2950 for depth in sorted(numsnapdepth):
2938 ui.write(
2951 ui.write(
2939 (b' lvl-%-3d : ' % depth)
2952 (b' lvl-%-3d : ' % depth)
2940 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2953 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2941 )
2954 )
2942 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2955 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2943 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2956 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2944 ui.writenoi18n(
2957 ui.writenoi18n(
2945 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2958 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2946 )
2959 )
2947 for depth in sorted(numsnapdepth):
2960 for depth in sorted(numsnapdepth):
2948 ui.write(
2961 ui.write(
2949 (b' lvl-%-3d : ' % depth)
2962 (b' lvl-%-3d : ' % depth)
2950 + fmt % pcfmt(snaptotal[depth], totalsize)
2963 + fmt % pcfmt(snaptotal[depth], totalsize)
2951 )
2964 )
2952 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2965 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2953
2966
2954 def fmtchunktype(chunktype):
2967 def fmtchunktype(chunktype):
2955 if chunktype == b'empty':
2968 if chunktype == b'empty':
2956 return b' %s : ' % chunktype
2969 return b' %s : ' % chunktype
2957 elif chunktype in pycompat.bytestr(string.ascii_letters):
2970 elif chunktype in pycompat.bytestr(string.ascii_letters):
2958 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2971 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2959 else:
2972 else:
2960 return b' 0x%s : ' % hex(chunktype)
2973 return b' 0x%s : ' % hex(chunktype)
2961
2974
2962 ui.write(b'\n')
2975 ui.write(b'\n')
2963 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2976 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2964 for chunktype in sorted(chunktypecounts):
2977 for chunktype in sorted(chunktypecounts):
2965 ui.write(fmtchunktype(chunktype))
2978 ui.write(fmtchunktype(chunktype))
2966 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2979 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2967 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2980 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2968 for chunktype in sorted(chunktypecounts):
2981 for chunktype in sorted(chunktypecounts):
2969 ui.write(fmtchunktype(chunktype))
2982 ui.write(fmtchunktype(chunktype))
2970 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2983 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2971
2984
2972 ui.write(b'\n')
2985 ui.write(b'\n')
2973 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2986 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2974 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2987 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2975 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2988 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2976 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2989 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2977 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2990 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2978
2991
2979 if format > 0:
2992 if format > 0:
2980 ui.write(b'\n')
2993 ui.write(b'\n')
2981 ui.writenoi18n(
2994 ui.writenoi18n(
2982 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2995 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2983 % tuple(datasize)
2996 % tuple(datasize)
2984 )
2997 )
2985 ui.writenoi18n(
2998 ui.writenoi18n(
2986 b'full revision size (min/max/avg) : %d / %d / %d\n'
2999 b'full revision size (min/max/avg) : %d / %d / %d\n'
2987 % tuple(fullsize)
3000 % tuple(fullsize)
2988 )
3001 )
2989 ui.writenoi18n(
3002 ui.writenoi18n(
2990 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3003 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2991 % tuple(semisize)
3004 % tuple(semisize)
2992 )
3005 )
2993 for depth in sorted(snapsizedepth):
3006 for depth in sorted(snapsizedepth):
2994 if depth == 0:
3007 if depth == 0:
2995 continue
3008 continue
2996 ui.writenoi18n(
3009 ui.writenoi18n(
2997 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3010 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2998 % ((depth,) + tuple(snapsizedepth[depth]))
3011 % ((depth,) + tuple(snapsizedepth[depth]))
2999 )
3012 )
3000 ui.writenoi18n(
3013 ui.writenoi18n(
3001 b'delta size (min/max/avg) : %d / %d / %d\n'
3014 b'delta size (min/max/avg) : %d / %d / %d\n'
3002 % tuple(deltasize)
3015 % tuple(deltasize)
3003 )
3016 )
3004
3017
3005 if numdeltas > 0:
3018 if numdeltas > 0:
3006 ui.write(b'\n')
3019 ui.write(b'\n')
3007 fmt = pcfmtstr(numdeltas)
3020 fmt = pcfmtstr(numdeltas)
3008 fmt2 = pcfmtstr(numdeltas, 4)
3021 fmt2 = pcfmtstr(numdeltas, 4)
3009 ui.writenoi18n(
3022 ui.writenoi18n(
3010 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3023 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3011 )
3024 )
3012 if numprev > 0:
3025 if numprev > 0:
3013 ui.writenoi18n(
3026 ui.writenoi18n(
3014 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3027 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3015 )
3028 )
3016 ui.writenoi18n(
3029 ui.writenoi18n(
3017 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3030 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3018 )
3031 )
3019 ui.writenoi18n(
3032 ui.writenoi18n(
3020 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3033 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3021 )
3034 )
3022 if gdelta:
3035 if gdelta:
3023 ui.writenoi18n(
3036 ui.writenoi18n(
3024 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3037 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3025 )
3038 )
3026 ui.writenoi18n(
3039 ui.writenoi18n(
3027 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3040 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3028 )
3041 )
3029 ui.writenoi18n(
3042 ui.writenoi18n(
3030 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3043 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3031 )
3044 )
3032
3045
3033
3046
3034 @command(
3047 @command(
3035 b'debugrevlogindex',
3048 b'debugrevlogindex',
3036 cmdutil.debugrevlogopts
3049 cmdutil.debugrevlogopts
3037 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3050 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3038 _(b'[-f FORMAT] -c|-m|FILE'),
3051 _(b'[-f FORMAT] -c|-m|FILE'),
3039 optionalrepo=True,
3052 optionalrepo=True,
3040 )
3053 )
3041 def debugrevlogindex(ui, repo, file_=None, **opts):
3054 def debugrevlogindex(ui, repo, file_=None, **opts):
3042 """dump the contents of a revlog index"""
3055 """dump the contents of a revlog index"""
3043 opts = pycompat.byteskwargs(opts)
3056 opts = pycompat.byteskwargs(opts)
3044 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3057 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3045 format = opts.get(b'format', 0)
3058 format = opts.get(b'format', 0)
3046 if format not in (0, 1):
3059 if format not in (0, 1):
3047 raise error.Abort(_(b"unknown format %d") % format)
3060 raise error.Abort(_(b"unknown format %d") % format)
3048
3061
3049 if ui.debugflag:
3062 if ui.debugflag:
3050 shortfn = hex
3063 shortfn = hex
3051 else:
3064 else:
3052 shortfn = short
3065 shortfn = short
3053
3066
3054 # There might not be anything in r, so have a sane default
3067 # There might not be anything in r, so have a sane default
3055 idlen = 12
3068 idlen = 12
3056 for i in r:
3069 for i in r:
3057 idlen = len(shortfn(r.node(i)))
3070 idlen = len(shortfn(r.node(i)))
3058 break
3071 break
3059
3072
3060 if format == 0:
3073 if format == 0:
3061 if ui.verbose:
3074 if ui.verbose:
3062 ui.writenoi18n(
3075 ui.writenoi18n(
3063 b" rev offset length linkrev %s %s p2\n"
3076 b" rev offset length linkrev %s %s p2\n"
3064 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3077 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3065 )
3078 )
3066 else:
3079 else:
3067 ui.writenoi18n(
3080 ui.writenoi18n(
3068 b" rev linkrev %s %s p2\n"
3081 b" rev linkrev %s %s p2\n"
3069 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3082 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3070 )
3083 )
3071 elif format == 1:
3084 elif format == 1:
3072 if ui.verbose:
3085 if ui.verbose:
3073 ui.writenoi18n(
3086 ui.writenoi18n(
3074 (
3087 (
3075 b" rev flag offset length size link p1"
3088 b" rev flag offset length size link p1"
3076 b" p2 %s\n"
3089 b" p2 %s\n"
3077 )
3090 )
3078 % b"nodeid".rjust(idlen)
3091 % b"nodeid".rjust(idlen)
3079 )
3092 )
3080 else:
3093 else:
3081 ui.writenoi18n(
3094 ui.writenoi18n(
3082 b" rev flag size link p1 p2 %s\n"
3095 b" rev flag size link p1 p2 %s\n"
3083 % b"nodeid".rjust(idlen)
3096 % b"nodeid".rjust(idlen)
3084 )
3097 )
3085
3098
3086 for i in r:
3099 for i in r:
3087 node = r.node(i)
3100 node = r.node(i)
3088 if format == 0:
3101 if format == 0:
3089 try:
3102 try:
3090 pp = r.parents(node)
3103 pp = r.parents(node)
3091 except Exception:
3104 except Exception:
3092 pp = [nullid, nullid]
3105 pp = [nullid, nullid]
3093 if ui.verbose:
3106 if ui.verbose:
3094 ui.write(
3107 ui.write(
3095 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3108 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3096 % (
3109 % (
3097 i,
3110 i,
3098 r.start(i),
3111 r.start(i),
3099 r.length(i),
3112 r.length(i),
3100 r.linkrev(i),
3113 r.linkrev(i),
3101 shortfn(node),
3114 shortfn(node),
3102 shortfn(pp[0]),
3115 shortfn(pp[0]),
3103 shortfn(pp[1]),
3116 shortfn(pp[1]),
3104 )
3117 )
3105 )
3118 )
3106 else:
3119 else:
3107 ui.write(
3120 ui.write(
3108 b"% 6d % 7d %s %s %s\n"
3121 b"% 6d % 7d %s %s %s\n"
3109 % (
3122 % (
3110 i,
3123 i,
3111 r.linkrev(i),
3124 r.linkrev(i),
3112 shortfn(node),
3125 shortfn(node),
3113 shortfn(pp[0]),
3126 shortfn(pp[0]),
3114 shortfn(pp[1]),
3127 shortfn(pp[1]),
3115 )
3128 )
3116 )
3129 )
3117 elif format == 1:
3130 elif format == 1:
3118 pr = r.parentrevs(i)
3131 pr = r.parentrevs(i)
3119 if ui.verbose:
3132 if ui.verbose:
3120 ui.write(
3133 ui.write(
3121 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3134 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3122 % (
3135 % (
3123 i,
3136 i,
3124 r.flags(i),
3137 r.flags(i),
3125 r.start(i),
3138 r.start(i),
3126 r.length(i),
3139 r.length(i),
3127 r.rawsize(i),
3140 r.rawsize(i),
3128 r.linkrev(i),
3141 r.linkrev(i),
3129 pr[0],
3142 pr[0],
3130 pr[1],
3143 pr[1],
3131 shortfn(node),
3144 shortfn(node),
3132 )
3145 )
3133 )
3146 )
3134 else:
3147 else:
3135 ui.write(
3148 ui.write(
3136 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3149 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3137 % (
3150 % (
3138 i,
3151 i,
3139 r.flags(i),
3152 r.flags(i),
3140 r.rawsize(i),
3153 r.rawsize(i),
3141 r.linkrev(i),
3154 r.linkrev(i),
3142 pr[0],
3155 pr[0],
3143 pr[1],
3156 pr[1],
3144 shortfn(node),
3157 shortfn(node),
3145 )
3158 )
3146 )
3159 )
3147
3160
3148
3161
3149 @command(
3162 @command(
3150 b'debugrevspec',
3163 b'debugrevspec',
3151 [
3164 [
3152 (
3165 (
3153 b'',
3166 b'',
3154 b'optimize',
3167 b'optimize',
3155 None,
3168 None,
3156 _(b'print parsed tree after optimizing (DEPRECATED)'),
3169 _(b'print parsed tree after optimizing (DEPRECATED)'),
3157 ),
3170 ),
3158 (
3171 (
3159 b'',
3172 b'',
3160 b'show-revs',
3173 b'show-revs',
3161 True,
3174 True,
3162 _(b'print list of result revisions (default)'),
3175 _(b'print list of result revisions (default)'),
3163 ),
3176 ),
3164 (
3177 (
3165 b's',
3178 b's',
3166 b'show-set',
3179 b'show-set',
3167 None,
3180 None,
3168 _(b'print internal representation of result set'),
3181 _(b'print internal representation of result set'),
3169 ),
3182 ),
3170 (
3183 (
3171 b'p',
3184 b'p',
3172 b'show-stage',
3185 b'show-stage',
3173 [],
3186 [],
3174 _(b'print parsed tree at the given stage'),
3187 _(b'print parsed tree at the given stage'),
3175 _(b'NAME'),
3188 _(b'NAME'),
3176 ),
3189 ),
3177 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3190 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3178 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3191 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3179 ],
3192 ],
3180 b'REVSPEC',
3193 b'REVSPEC',
3181 )
3194 )
3182 def debugrevspec(ui, repo, expr, **opts):
3195 def debugrevspec(ui, repo, expr, **opts):
3183 """parse and apply a revision specification
3196 """parse and apply a revision specification
3184
3197
3185 Use -p/--show-stage option to print the parsed tree at the given stages.
3198 Use -p/--show-stage option to print the parsed tree at the given stages.
3186 Use -p all to print tree at every stage.
3199 Use -p all to print tree at every stage.
3187
3200
3188 Use --no-show-revs option with -s or -p to print only the set
3201 Use --no-show-revs option with -s or -p to print only the set
3189 representation or the parsed tree respectively.
3202 representation or the parsed tree respectively.
3190
3203
3191 Use --verify-optimized to compare the optimized result with the unoptimized
3204 Use --verify-optimized to compare the optimized result with the unoptimized
3192 one. Returns 1 if the optimized result differs.
3205 one. Returns 1 if the optimized result differs.
3193 """
3206 """
3194 opts = pycompat.byteskwargs(opts)
3207 opts = pycompat.byteskwargs(opts)
3195 aliases = ui.configitems(b'revsetalias')
3208 aliases = ui.configitems(b'revsetalias')
3196 stages = [
3209 stages = [
3197 (b'parsed', lambda tree: tree),
3210 (b'parsed', lambda tree: tree),
3198 (
3211 (
3199 b'expanded',
3212 b'expanded',
3200 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3213 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3201 ),
3214 ),
3202 (b'concatenated', revsetlang.foldconcat),
3215 (b'concatenated', revsetlang.foldconcat),
3203 (b'analyzed', revsetlang.analyze),
3216 (b'analyzed', revsetlang.analyze),
3204 (b'optimized', revsetlang.optimize),
3217 (b'optimized', revsetlang.optimize),
3205 ]
3218 ]
3206 if opts[b'no_optimized']:
3219 if opts[b'no_optimized']:
3207 stages = stages[:-1]
3220 stages = stages[:-1]
3208 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3221 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3209 raise error.Abort(
3222 raise error.Abort(
3210 _(b'cannot use --verify-optimized with --no-optimized')
3223 _(b'cannot use --verify-optimized with --no-optimized')
3211 )
3224 )
3212 stagenames = {n for n, f in stages}
3225 stagenames = {n for n, f in stages}
3213
3226
3214 showalways = set()
3227 showalways = set()
3215 showchanged = set()
3228 showchanged = set()
3216 if ui.verbose and not opts[b'show_stage']:
3229 if ui.verbose and not opts[b'show_stage']:
3217 # show parsed tree by --verbose (deprecated)
3230 # show parsed tree by --verbose (deprecated)
3218 showalways.add(b'parsed')
3231 showalways.add(b'parsed')
3219 showchanged.update([b'expanded', b'concatenated'])
3232 showchanged.update([b'expanded', b'concatenated'])
3220 if opts[b'optimize']:
3233 if opts[b'optimize']:
3221 showalways.add(b'optimized')
3234 showalways.add(b'optimized')
3222 if opts[b'show_stage'] and opts[b'optimize']:
3235 if opts[b'show_stage'] and opts[b'optimize']:
3223 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3236 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3224 if opts[b'show_stage'] == [b'all']:
3237 if opts[b'show_stage'] == [b'all']:
3225 showalways.update(stagenames)
3238 showalways.update(stagenames)
3226 else:
3239 else:
3227 for n in opts[b'show_stage']:
3240 for n in opts[b'show_stage']:
3228 if n not in stagenames:
3241 if n not in stagenames:
3229 raise error.Abort(_(b'invalid stage name: %s') % n)
3242 raise error.Abort(_(b'invalid stage name: %s') % n)
3230 showalways.update(opts[b'show_stage'])
3243 showalways.update(opts[b'show_stage'])
3231
3244
3232 treebystage = {}
3245 treebystage = {}
3233 printedtree = None
3246 printedtree = None
3234 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3247 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3235 for n, f in stages:
3248 for n, f in stages:
3236 treebystage[n] = tree = f(tree)
3249 treebystage[n] = tree = f(tree)
3237 if n in showalways or (n in showchanged and tree != printedtree):
3250 if n in showalways or (n in showchanged and tree != printedtree):
3238 if opts[b'show_stage'] or n != b'parsed':
3251 if opts[b'show_stage'] or n != b'parsed':
3239 ui.write(b"* %s:\n" % n)
3252 ui.write(b"* %s:\n" % n)
3240 ui.write(revsetlang.prettyformat(tree), b"\n")
3253 ui.write(revsetlang.prettyformat(tree), b"\n")
3241 printedtree = tree
3254 printedtree = tree
3242
3255
3243 if opts[b'verify_optimized']:
3256 if opts[b'verify_optimized']:
3244 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3257 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3245 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3258 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3246 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3259 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3247 ui.writenoi18n(
3260 ui.writenoi18n(
3248 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3261 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3249 )
3262 )
3250 ui.writenoi18n(
3263 ui.writenoi18n(
3251 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3264 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3252 )
3265 )
3253 arevs = list(arevs)
3266 arevs = list(arevs)
3254 brevs = list(brevs)
3267 brevs = list(brevs)
3255 if arevs == brevs:
3268 if arevs == brevs:
3256 return 0
3269 return 0
3257 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3270 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3258 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3271 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3259 sm = difflib.SequenceMatcher(None, arevs, brevs)
3272 sm = difflib.SequenceMatcher(None, arevs, brevs)
3260 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3273 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3261 if tag in ('delete', 'replace'):
3274 if tag in ('delete', 'replace'):
3262 for c in arevs[alo:ahi]:
3275 for c in arevs[alo:ahi]:
3263 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3276 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3264 if tag in ('insert', 'replace'):
3277 if tag in ('insert', 'replace'):
3265 for c in brevs[blo:bhi]:
3278 for c in brevs[blo:bhi]:
3266 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3279 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3267 if tag == 'equal':
3280 if tag == 'equal':
3268 for c in arevs[alo:ahi]:
3281 for c in arevs[alo:ahi]:
3269 ui.write(b' %d\n' % c)
3282 ui.write(b' %d\n' % c)
3270 return 1
3283 return 1
3271
3284
3272 func = revset.makematcher(tree)
3285 func = revset.makematcher(tree)
3273 revs = func(repo)
3286 revs = func(repo)
3274 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3287 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3275 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3288 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3276 if not opts[b'show_revs']:
3289 if not opts[b'show_revs']:
3277 return
3290 return
3278 for c in revs:
3291 for c in revs:
3279 ui.write(b"%d\n" % c)
3292 ui.write(b"%d\n" % c)
3280
3293
3281
3294
3282 @command(
3295 @command(
3283 b'debugserve',
3296 b'debugserve',
3284 [
3297 [
3285 (
3298 (
3286 b'',
3299 b'',
3287 b'sshstdio',
3300 b'sshstdio',
3288 False,
3301 False,
3289 _(b'run an SSH server bound to process handles'),
3302 _(b'run an SSH server bound to process handles'),
3290 ),
3303 ),
3291 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3304 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3292 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3305 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3293 ],
3306 ],
3294 b'',
3307 b'',
3295 )
3308 )
3296 def debugserve(ui, repo, **opts):
3309 def debugserve(ui, repo, **opts):
3297 """run a server with advanced settings
3310 """run a server with advanced settings
3298
3311
3299 This command is similar to :hg:`serve`. It exists partially as a
3312 This command is similar to :hg:`serve`. It exists partially as a
3300 workaround to the fact that ``hg serve --stdio`` must have specific
3313 workaround to the fact that ``hg serve --stdio`` must have specific
3301 arguments for security reasons.
3314 arguments for security reasons.
3302 """
3315 """
3303 opts = pycompat.byteskwargs(opts)
3316 opts = pycompat.byteskwargs(opts)
3304
3317
3305 if not opts[b'sshstdio']:
3318 if not opts[b'sshstdio']:
3306 raise error.Abort(_(b'only --sshstdio is currently supported'))
3319 raise error.Abort(_(b'only --sshstdio is currently supported'))
3307
3320
3308 logfh = None
3321 logfh = None
3309
3322
3310 if opts[b'logiofd'] and opts[b'logiofile']:
3323 if opts[b'logiofd'] and opts[b'logiofile']:
3311 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3324 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3312
3325
3313 if opts[b'logiofd']:
3326 if opts[b'logiofd']:
3314 # Ideally we would be line buffered. But line buffering in binary
3327 # Ideally we would be line buffered. But line buffering in binary
3315 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3328 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3316 # buffering could have performance impacts. But since this isn't
3329 # buffering could have performance impacts. But since this isn't
3317 # performance critical code, it should be fine.
3330 # performance critical code, it should be fine.
3318 try:
3331 try:
3319 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3332 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3320 except OSError as e:
3333 except OSError as e:
3321 if e.errno != errno.ESPIPE:
3334 if e.errno != errno.ESPIPE:
3322 raise
3335 raise
3323 # can't seek a pipe, so `ab` mode fails on py3
3336 # can't seek a pipe, so `ab` mode fails on py3
3324 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3337 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3325 elif opts[b'logiofile']:
3338 elif opts[b'logiofile']:
3326 logfh = open(opts[b'logiofile'], b'ab', 0)
3339 logfh = open(opts[b'logiofile'], b'ab', 0)
3327
3340
3328 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3341 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3329 s.serve_forever()
3342 s.serve_forever()
3330
3343
3331
3344
3332 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3345 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3333 def debugsetparents(ui, repo, rev1, rev2=None):
3346 def debugsetparents(ui, repo, rev1, rev2=None):
3334 """manually set the parents of the current working directory
3347 """manually set the parents of the current working directory
3335
3348
3336 This is useful for writing repository conversion tools, but should
3349 This is useful for writing repository conversion tools, but should
3337 be used with care. For example, neither the working directory nor the
3350 be used with care. For example, neither the working directory nor the
3338 dirstate is updated, so file status may be incorrect after running this
3351 dirstate is updated, so file status may be incorrect after running this
3339 command.
3352 command.
3340
3353
3341 Returns 0 on success.
3354 Returns 0 on success.
3342 """
3355 """
3343
3356
3344 node1 = scmutil.revsingle(repo, rev1).node()
3357 node1 = scmutil.revsingle(repo, rev1).node()
3345 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3358 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3346
3359
3347 with repo.wlock():
3360 with repo.wlock():
3348 repo.setparents(node1, node2)
3361 repo.setparents(node1, node2)
3349
3362
3350
3363
3351 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3364 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3352 def debugsidedata(ui, repo, file_, rev=None, **opts):
3365 def debugsidedata(ui, repo, file_, rev=None, **opts):
3353 """dump the side data for a cl/manifest/file revision
3366 """dump the side data for a cl/manifest/file revision
3354
3367
3355 Use --verbose to dump the sidedata content."""
3368 Use --verbose to dump the sidedata content."""
3356 opts = pycompat.byteskwargs(opts)
3369 opts = pycompat.byteskwargs(opts)
3357 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3370 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3358 if rev is not None:
3371 if rev is not None:
3359 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3372 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3360 file_, rev = None, file_
3373 file_, rev = None, file_
3361 elif rev is None:
3374 elif rev is None:
3362 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3375 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3363 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3376 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3364 r = getattr(r, '_revlog', r)
3377 r = getattr(r, '_revlog', r)
3365 try:
3378 try:
3366 sidedata = r.sidedata(r.lookup(rev))
3379 sidedata = r.sidedata(r.lookup(rev))
3367 except KeyError:
3380 except KeyError:
3368 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3381 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3369 if sidedata:
3382 if sidedata:
3370 sidedata = list(sidedata.items())
3383 sidedata = list(sidedata.items())
3371 sidedata.sort()
3384 sidedata.sort()
3372 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3385 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3373 for key, value in sidedata:
3386 for key, value in sidedata:
3374 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3387 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3375 if ui.verbose:
3388 if ui.verbose:
3376 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3389 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3377
3390
3378
3391
3379 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3392 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3380 def debugssl(ui, repo, source=None, **opts):
3393 def debugssl(ui, repo, source=None, **opts):
3381 '''test a secure connection to a server
3394 '''test a secure connection to a server
3382
3395
3383 This builds the certificate chain for the server on Windows, installing the
3396 This builds the certificate chain for the server on Windows, installing the
3384 missing intermediates and trusted root via Windows Update if necessary. It
3397 missing intermediates and trusted root via Windows Update if necessary. It
3385 does nothing on other platforms.
3398 does nothing on other platforms.
3386
3399
3387 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3400 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3388 that server is used. See :hg:`help urls` for more information.
3401 that server is used. See :hg:`help urls` for more information.
3389
3402
3390 If the update succeeds, retry the original operation. Otherwise, the cause
3403 If the update succeeds, retry the original operation. Otherwise, the cause
3391 of the SSL error is likely another issue.
3404 of the SSL error is likely another issue.
3392 '''
3405 '''
3393 if not pycompat.iswindows:
3406 if not pycompat.iswindows:
3394 raise error.Abort(
3407 raise error.Abort(
3395 _(b'certificate chain building is only possible on Windows')
3408 _(b'certificate chain building is only possible on Windows')
3396 )
3409 )
3397
3410
3398 if not source:
3411 if not source:
3399 if not repo:
3412 if not repo:
3400 raise error.Abort(
3413 raise error.Abort(
3401 _(
3414 _(
3402 b"there is no Mercurial repository here, and no "
3415 b"there is no Mercurial repository here, and no "
3403 b"server specified"
3416 b"server specified"
3404 )
3417 )
3405 )
3418 )
3406 source = b"default"
3419 source = b"default"
3407
3420
3408 source, branches = hg.parseurl(ui.expandpath(source))
3421 source, branches = hg.parseurl(ui.expandpath(source))
3409 url = util.url(source)
3422 url = util.url(source)
3410
3423
3411 defaultport = {b'https': 443, b'ssh': 22}
3424 defaultport = {b'https': 443, b'ssh': 22}
3412 if url.scheme in defaultport:
3425 if url.scheme in defaultport:
3413 try:
3426 try:
3414 addr = (url.host, int(url.port or defaultport[url.scheme]))
3427 addr = (url.host, int(url.port or defaultport[url.scheme]))
3415 except ValueError:
3428 except ValueError:
3416 raise error.Abort(_(b"malformed port number in URL"))
3429 raise error.Abort(_(b"malformed port number in URL"))
3417 else:
3430 else:
3418 raise error.Abort(_(b"only https and ssh connections are supported"))
3431 raise error.Abort(_(b"only https and ssh connections are supported"))
3419
3432
3420 from . import win32
3433 from . import win32
3421
3434
3422 s = ssl.wrap_socket(
3435 s = ssl.wrap_socket(
3423 socket.socket(),
3436 socket.socket(),
3424 ssl_version=ssl.PROTOCOL_TLS,
3437 ssl_version=ssl.PROTOCOL_TLS,
3425 cert_reqs=ssl.CERT_NONE,
3438 cert_reqs=ssl.CERT_NONE,
3426 ca_certs=None,
3439 ca_certs=None,
3427 )
3440 )
3428
3441
3429 try:
3442 try:
3430 s.connect(addr)
3443 s.connect(addr)
3431 cert = s.getpeercert(True)
3444 cert = s.getpeercert(True)
3432
3445
3433 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3446 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3434
3447
3435 complete = win32.checkcertificatechain(cert, build=False)
3448 complete = win32.checkcertificatechain(cert, build=False)
3436
3449
3437 if not complete:
3450 if not complete:
3438 ui.status(_(b'certificate chain is incomplete, updating... '))
3451 ui.status(_(b'certificate chain is incomplete, updating... '))
3439
3452
3440 if not win32.checkcertificatechain(cert):
3453 if not win32.checkcertificatechain(cert):
3441 ui.status(_(b'failed.\n'))
3454 ui.status(_(b'failed.\n'))
3442 else:
3455 else:
3443 ui.status(_(b'done.\n'))
3456 ui.status(_(b'done.\n'))
3444 else:
3457 else:
3445 ui.status(_(b'full certificate chain is available\n'))
3458 ui.status(_(b'full certificate chain is available\n'))
3446 finally:
3459 finally:
3447 s.close()
3460 s.close()
3448
3461
3449
3462
3450 @command(
3463 @command(
3451 b"debugbackupbundle",
3464 b"debugbackupbundle",
3452 [
3465 [
3453 (
3466 (
3454 b"",
3467 b"",
3455 b"recover",
3468 b"recover",
3456 b"",
3469 b"",
3457 b"brings the specified changeset back into the repository",
3470 b"brings the specified changeset back into the repository",
3458 )
3471 )
3459 ]
3472 ]
3460 + cmdutil.logopts,
3473 + cmdutil.logopts,
3461 _(b"hg debugbackupbundle [--recover HASH]"),
3474 _(b"hg debugbackupbundle [--recover HASH]"),
3462 )
3475 )
3463 def debugbackupbundle(ui, repo, *pats, **opts):
3476 def debugbackupbundle(ui, repo, *pats, **opts):
3464 """lists the changesets available in backup bundles
3477 """lists the changesets available in backup bundles
3465
3478
3466 Without any arguments, this command prints a list of the changesets in each
3479 Without any arguments, this command prints a list of the changesets in each
3467 backup bundle.
3480 backup bundle.
3468
3481
3469 --recover takes a changeset hash and unbundles the first bundle that
3482 --recover takes a changeset hash and unbundles the first bundle that
3470 contains that hash, which puts that changeset back in your repository.
3483 contains that hash, which puts that changeset back in your repository.
3471
3484
3472 --verbose will print the entire commit message and the bundle path for that
3485 --verbose will print the entire commit message and the bundle path for that
3473 backup.
3486 backup.
3474 """
3487 """
3475 backups = list(
3488 backups = list(
3476 filter(
3489 filter(
3477 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3490 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3478 )
3491 )
3479 )
3492 )
3480 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3493 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3481
3494
3482 opts = pycompat.byteskwargs(opts)
3495 opts = pycompat.byteskwargs(opts)
3483 opts[b"bundle"] = b""
3496 opts[b"bundle"] = b""
3484 opts[b"force"] = None
3497 opts[b"force"] = None
3485 limit = logcmdutil.getlimit(opts)
3498 limit = logcmdutil.getlimit(opts)
3486
3499
3487 def display(other, chlist, displayer):
3500 def display(other, chlist, displayer):
3488 if opts.get(b"newest_first"):
3501 if opts.get(b"newest_first"):
3489 chlist.reverse()
3502 chlist.reverse()
3490 count = 0
3503 count = 0
3491 for n in chlist:
3504 for n in chlist:
3492 if limit is not None and count >= limit:
3505 if limit is not None and count >= limit:
3493 break
3506 break
3494 parents = [True for p in other.changelog.parents(n) if p != nullid]
3507 parents = [True for p in other.changelog.parents(n) if p != nullid]
3495 if opts.get(b"no_merges") and len(parents) == 2:
3508 if opts.get(b"no_merges") and len(parents) == 2:
3496 continue
3509 continue
3497 count += 1
3510 count += 1
3498 displayer.show(other[n])
3511 displayer.show(other[n])
3499
3512
3500 recovernode = opts.get(b"recover")
3513 recovernode = opts.get(b"recover")
3501 if recovernode:
3514 if recovernode:
3502 if scmutil.isrevsymbol(repo, recovernode):
3515 if scmutil.isrevsymbol(repo, recovernode):
3503 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3516 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3504 return
3517 return
3505 elif backups:
3518 elif backups:
3506 msg = _(
3519 msg = _(
3507 b"Recover changesets using: hg debugbackupbundle --recover "
3520 b"Recover changesets using: hg debugbackupbundle --recover "
3508 b"<changeset hash>\n\nAvailable backup changesets:"
3521 b"<changeset hash>\n\nAvailable backup changesets:"
3509 )
3522 )
3510 ui.status(msg, label=b"status.removed")
3523 ui.status(msg, label=b"status.removed")
3511 else:
3524 else:
3512 ui.status(_(b"no backup changesets found\n"))
3525 ui.status(_(b"no backup changesets found\n"))
3513 return
3526 return
3514
3527
3515 for backup in backups:
3528 for backup in backups:
3516 # Much of this is copied from the hg incoming logic
3529 # Much of this is copied from the hg incoming logic
3517 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3530 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3518 source, branches = hg.parseurl(source, opts.get(b"branch"))
3531 source, branches = hg.parseurl(source, opts.get(b"branch"))
3519 try:
3532 try:
3520 other = hg.peer(repo, opts, source)
3533 other = hg.peer(repo, opts, source)
3521 except error.LookupError as ex:
3534 except error.LookupError as ex:
3522 msg = _(b"\nwarning: unable to open bundle %s") % source
3535 msg = _(b"\nwarning: unable to open bundle %s") % source
3523 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3536 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3524 ui.warn(msg, hint=hint)
3537 ui.warn(msg, hint=hint)
3525 continue
3538 continue
3526 revs, checkout = hg.addbranchrevs(
3539 revs, checkout = hg.addbranchrevs(
3527 repo, other, branches, opts.get(b"rev")
3540 repo, other, branches, opts.get(b"rev")
3528 )
3541 )
3529
3542
3530 if revs:
3543 if revs:
3531 revs = [other.lookup(rev) for rev in revs]
3544 revs = [other.lookup(rev) for rev in revs]
3532
3545
3533 quiet = ui.quiet
3546 quiet = ui.quiet
3534 try:
3547 try:
3535 ui.quiet = True
3548 ui.quiet = True
3536 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3549 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3537 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3550 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3538 )
3551 )
3539 except error.LookupError:
3552 except error.LookupError:
3540 continue
3553 continue
3541 finally:
3554 finally:
3542 ui.quiet = quiet
3555 ui.quiet = quiet
3543
3556
3544 try:
3557 try:
3545 if not chlist:
3558 if not chlist:
3546 continue
3559 continue
3547 if recovernode:
3560 if recovernode:
3548 with repo.lock(), repo.transaction(b"unbundle") as tr:
3561 with repo.lock(), repo.transaction(b"unbundle") as tr:
3549 if scmutil.isrevsymbol(other, recovernode):
3562 if scmutil.isrevsymbol(other, recovernode):
3550 ui.status(_(b"Unbundling %s\n") % (recovernode))
3563 ui.status(_(b"Unbundling %s\n") % (recovernode))
3551 f = hg.openpath(ui, source)
3564 f = hg.openpath(ui, source)
3552 gen = exchange.readbundle(ui, f, source)
3565 gen = exchange.readbundle(ui, f, source)
3553 if isinstance(gen, bundle2.unbundle20):
3566 if isinstance(gen, bundle2.unbundle20):
3554 bundle2.applybundle(
3567 bundle2.applybundle(
3555 repo,
3568 repo,
3556 gen,
3569 gen,
3557 tr,
3570 tr,
3558 source=b"unbundle",
3571 source=b"unbundle",
3559 url=b"bundle:" + source,
3572 url=b"bundle:" + source,
3560 )
3573 )
3561 else:
3574 else:
3562 gen.apply(repo, b"unbundle", b"bundle:" + source)
3575 gen.apply(repo, b"unbundle", b"bundle:" + source)
3563 break
3576 break
3564 else:
3577 else:
3565 backupdate = encoding.strtolocal(
3578 backupdate = encoding.strtolocal(
3566 time.strftime(
3579 time.strftime(
3567 "%a %H:%M, %Y-%m-%d",
3580 "%a %H:%M, %Y-%m-%d",
3568 time.localtime(os.path.getmtime(source)),
3581 time.localtime(os.path.getmtime(source)),
3569 )
3582 )
3570 )
3583 )
3571 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3584 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3572 if ui.verbose:
3585 if ui.verbose:
3573 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3586 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3574 else:
3587 else:
3575 opts[
3588 opts[
3576 b"template"
3589 b"template"
3577 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3590 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3578 displayer = logcmdutil.changesetdisplayer(
3591 displayer = logcmdutil.changesetdisplayer(
3579 ui, other, opts, False
3592 ui, other, opts, False
3580 )
3593 )
3581 display(other, chlist, displayer)
3594 display(other, chlist, displayer)
3582 displayer.close()
3595 displayer.close()
3583 finally:
3596 finally:
3584 cleanupfn()
3597 cleanupfn()
3585
3598
3586
3599
3587 @command(
3600 @command(
3588 b'debugsub',
3601 b'debugsub',
3589 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3602 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3590 _(b'[-r REV] [REV]'),
3603 _(b'[-r REV] [REV]'),
3591 )
3604 )
3592 def debugsub(ui, repo, rev=None):
3605 def debugsub(ui, repo, rev=None):
3593 ctx = scmutil.revsingle(repo, rev, None)
3606 ctx = scmutil.revsingle(repo, rev, None)
3594 for k, v in sorted(ctx.substate.items()):
3607 for k, v in sorted(ctx.substate.items()):
3595 ui.writenoi18n(b'path %s\n' % k)
3608 ui.writenoi18n(b'path %s\n' % k)
3596 ui.writenoi18n(b' source %s\n' % v[0])
3609 ui.writenoi18n(b' source %s\n' % v[0])
3597 ui.writenoi18n(b' revision %s\n' % v[1])
3610 ui.writenoi18n(b' revision %s\n' % v[1])
3598
3611
3599
3612
3600 @command(
3613 @command(
3601 b'debugsuccessorssets',
3614 b'debugsuccessorssets',
3602 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3615 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3603 _(b'[REV]'),
3616 _(b'[REV]'),
3604 )
3617 )
3605 def debugsuccessorssets(ui, repo, *revs, **opts):
3618 def debugsuccessorssets(ui, repo, *revs, **opts):
3606 """show set of successors for revision
3619 """show set of successors for revision
3607
3620
3608 A successors set of changeset A is a consistent group of revisions that
3621 A successors set of changeset A is a consistent group of revisions that
3609 succeed A. It contains non-obsolete changesets only unless closests
3622 succeed A. It contains non-obsolete changesets only unless closests
3610 successors set is set.
3623 successors set is set.
3611
3624
3612 In most cases a changeset A has a single successors set containing a single
3625 In most cases a changeset A has a single successors set containing a single
3613 successor (changeset A replaced by A').
3626 successor (changeset A replaced by A').
3614
3627
3615 A changeset that is made obsolete with no successors are called "pruned".
3628 A changeset that is made obsolete with no successors are called "pruned".
3616 Such changesets have no successors sets at all.
3629 Such changesets have no successors sets at all.
3617
3630
3618 A changeset that has been "split" will have a successors set containing
3631 A changeset that has been "split" will have a successors set containing
3619 more than one successor.
3632 more than one successor.
3620
3633
3621 A changeset that has been rewritten in multiple different ways is called
3634 A changeset that has been rewritten in multiple different ways is called
3622 "divergent". Such changesets have multiple successor sets (each of which
3635 "divergent". Such changesets have multiple successor sets (each of which
3623 may also be split, i.e. have multiple successors).
3636 may also be split, i.e. have multiple successors).
3624
3637
3625 Results are displayed as follows::
3638 Results are displayed as follows::
3626
3639
3627 <rev1>
3640 <rev1>
3628 <successors-1A>
3641 <successors-1A>
3629 <rev2>
3642 <rev2>
3630 <successors-2A>
3643 <successors-2A>
3631 <successors-2B1> <successors-2B2> <successors-2B3>
3644 <successors-2B1> <successors-2B2> <successors-2B3>
3632
3645
3633 Here rev2 has two possible (i.e. divergent) successors sets. The first
3646 Here rev2 has two possible (i.e. divergent) successors sets. The first
3634 holds one element, whereas the second holds three (i.e. the changeset has
3647 holds one element, whereas the second holds three (i.e. the changeset has
3635 been split).
3648 been split).
3636 """
3649 """
3637 # passed to successorssets caching computation from one call to another
3650 # passed to successorssets caching computation from one call to another
3638 cache = {}
3651 cache = {}
3639 ctx2str = bytes
3652 ctx2str = bytes
3640 node2str = short
3653 node2str = short
3641 for rev in scmutil.revrange(repo, revs):
3654 for rev in scmutil.revrange(repo, revs):
3642 ctx = repo[rev]
3655 ctx = repo[rev]
3643 ui.write(b'%s\n' % ctx2str(ctx))
3656 ui.write(b'%s\n' % ctx2str(ctx))
3644 for succsset in obsutil.successorssets(
3657 for succsset in obsutil.successorssets(
3645 repo, ctx.node(), closest=opts['closest'], cache=cache
3658 repo, ctx.node(), closest=opts['closest'], cache=cache
3646 ):
3659 ):
3647 if succsset:
3660 if succsset:
3648 ui.write(b' ')
3661 ui.write(b' ')
3649 ui.write(node2str(succsset[0]))
3662 ui.write(node2str(succsset[0]))
3650 for node in succsset[1:]:
3663 for node in succsset[1:]:
3651 ui.write(b' ')
3664 ui.write(b' ')
3652 ui.write(node2str(node))
3665 ui.write(node2str(node))
3653 ui.write(b'\n')
3666 ui.write(b'\n')
3654
3667
3655
3668
3656 @command(b'debugtagscache', [])
3669 @command(b'debugtagscache', [])
3657 def debugtagscache(ui, repo):
3670 def debugtagscache(ui, repo):
3658 """display the contents of .hg/cache/hgtagsfnodes1"""
3671 """display the contents of .hg/cache/hgtagsfnodes1"""
3659 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3672 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3660 for r in repo:
3673 for r in repo:
3661 node = repo[r].node()
3674 node = repo[r].node()
3662 tagsnode = cache.getfnode(node, computemissing=False)
3675 tagsnode = cache.getfnode(node, computemissing=False)
3663 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3676 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3664 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3677 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3665
3678
3666
3679
3667 @command(
3680 @command(
3668 b'debugtemplate',
3681 b'debugtemplate',
3669 [
3682 [
3670 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3683 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3671 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3684 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3672 ],
3685 ],
3673 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3686 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3674 optionalrepo=True,
3687 optionalrepo=True,
3675 )
3688 )
3676 def debugtemplate(ui, repo, tmpl, **opts):
3689 def debugtemplate(ui, repo, tmpl, **opts):
3677 """parse and apply a template
3690 """parse and apply a template
3678
3691
3679 If -r/--rev is given, the template is processed as a log template and
3692 If -r/--rev is given, the template is processed as a log template and
3680 applied to the given changesets. Otherwise, it is processed as a generic
3693 applied to the given changesets. Otherwise, it is processed as a generic
3681 template.
3694 template.
3682
3695
3683 Use --verbose to print the parsed tree.
3696 Use --verbose to print the parsed tree.
3684 """
3697 """
3685 revs = None
3698 revs = None
3686 if opts['rev']:
3699 if opts['rev']:
3687 if repo is None:
3700 if repo is None:
3688 raise error.RepoError(
3701 raise error.RepoError(
3689 _(b'there is no Mercurial repository here (.hg not found)')
3702 _(b'there is no Mercurial repository here (.hg not found)')
3690 )
3703 )
3691 revs = scmutil.revrange(repo, opts['rev'])
3704 revs = scmutil.revrange(repo, opts['rev'])
3692
3705
3693 props = {}
3706 props = {}
3694 for d in opts['define']:
3707 for d in opts['define']:
3695 try:
3708 try:
3696 k, v = (e.strip() for e in d.split(b'=', 1))
3709 k, v = (e.strip() for e in d.split(b'=', 1))
3697 if not k or k == b'ui':
3710 if not k or k == b'ui':
3698 raise ValueError
3711 raise ValueError
3699 props[k] = v
3712 props[k] = v
3700 except ValueError:
3713 except ValueError:
3701 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3714 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3702
3715
3703 if ui.verbose:
3716 if ui.verbose:
3704 aliases = ui.configitems(b'templatealias')
3717 aliases = ui.configitems(b'templatealias')
3705 tree = templater.parse(tmpl)
3718 tree = templater.parse(tmpl)
3706 ui.note(templater.prettyformat(tree), b'\n')
3719 ui.note(templater.prettyformat(tree), b'\n')
3707 newtree = templater.expandaliases(tree, aliases)
3720 newtree = templater.expandaliases(tree, aliases)
3708 if newtree != tree:
3721 if newtree != tree:
3709 ui.notenoi18n(
3722 ui.notenoi18n(
3710 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3723 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3711 )
3724 )
3712
3725
3713 if revs is None:
3726 if revs is None:
3714 tres = formatter.templateresources(ui, repo)
3727 tres = formatter.templateresources(ui, repo)
3715 t = formatter.maketemplater(ui, tmpl, resources=tres)
3728 t = formatter.maketemplater(ui, tmpl, resources=tres)
3716 if ui.verbose:
3729 if ui.verbose:
3717 kwds, funcs = t.symbolsuseddefault()
3730 kwds, funcs = t.symbolsuseddefault()
3718 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3731 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3719 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3732 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3720 ui.write(t.renderdefault(props))
3733 ui.write(t.renderdefault(props))
3721 else:
3734 else:
3722 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3735 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3723 if ui.verbose:
3736 if ui.verbose:
3724 kwds, funcs = displayer.t.symbolsuseddefault()
3737 kwds, funcs = displayer.t.symbolsuseddefault()
3725 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3738 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3726 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3739 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3727 for r in revs:
3740 for r in revs:
3728 displayer.show(repo[r], **pycompat.strkwargs(props))
3741 displayer.show(repo[r], **pycompat.strkwargs(props))
3729 displayer.close()
3742 displayer.close()
3730
3743
3731
3744
3732 @command(
3745 @command(
3733 b'debuguigetpass',
3746 b'debuguigetpass',
3734 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3747 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3735 _(b'[-p TEXT]'),
3748 _(b'[-p TEXT]'),
3736 norepo=True,
3749 norepo=True,
3737 )
3750 )
3738 def debuguigetpass(ui, prompt=b''):
3751 def debuguigetpass(ui, prompt=b''):
3739 """show prompt to type password"""
3752 """show prompt to type password"""
3740 r = ui.getpass(prompt)
3753 r = ui.getpass(prompt)
3741 ui.writenoi18n(b'response: %s\n' % r)
3754 ui.writenoi18n(b'response: %s\n' % r)
3742
3755
3743
3756
3744 @command(
3757 @command(
3745 b'debuguiprompt',
3758 b'debuguiprompt',
3746 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3759 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3747 _(b'[-p TEXT]'),
3760 _(b'[-p TEXT]'),
3748 norepo=True,
3761 norepo=True,
3749 )
3762 )
3750 def debuguiprompt(ui, prompt=b''):
3763 def debuguiprompt(ui, prompt=b''):
3751 """show plain prompt"""
3764 """show plain prompt"""
3752 r = ui.prompt(prompt)
3765 r = ui.prompt(prompt)
3753 ui.writenoi18n(b'response: %s\n' % r)
3766 ui.writenoi18n(b'response: %s\n' % r)
3754
3767
3755
3768
3756 @command(b'debugupdatecaches', [])
3769 @command(b'debugupdatecaches', [])
3757 def debugupdatecaches(ui, repo, *pats, **opts):
3770 def debugupdatecaches(ui, repo, *pats, **opts):
3758 """warm all known caches in the repository"""
3771 """warm all known caches in the repository"""
3759 with repo.wlock(), repo.lock():
3772 with repo.wlock(), repo.lock():
3760 repo.updatecaches(full=True)
3773 repo.updatecaches(full=True)
3761
3774
3762
3775
3763 @command(
3776 @command(
3764 b'debugupgraderepo',
3777 b'debugupgraderepo',
3765 [
3778 [
3766 (
3779 (
3767 b'o',
3780 b'o',
3768 b'optimize',
3781 b'optimize',
3769 [],
3782 [],
3770 _(b'extra optimization to perform'),
3783 _(b'extra optimization to perform'),
3771 _(b'NAME'),
3784 _(b'NAME'),
3772 ),
3785 ),
3773 (b'', b'run', False, _(b'performs an upgrade')),
3786 (b'', b'run', False, _(b'performs an upgrade')),
3774 (b'', b'backup', True, _(b'keep the old repository content around')),
3787 (b'', b'backup', True, _(b'keep the old repository content around')),
3775 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3788 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3776 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3789 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3777 ],
3790 ],
3778 )
3791 )
3779 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3792 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3780 """upgrade a repository to use different features
3793 """upgrade a repository to use different features
3781
3794
3782 If no arguments are specified, the repository is evaluated for upgrade
3795 If no arguments are specified, the repository is evaluated for upgrade
3783 and a list of problems and potential optimizations is printed.
3796 and a list of problems and potential optimizations is printed.
3784
3797
3785 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3798 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3786 can be influenced via additional arguments. More details will be provided
3799 can be influenced via additional arguments. More details will be provided
3787 by the command output when run without ``--run``.
3800 by the command output when run without ``--run``.
3788
3801
3789 During the upgrade, the repository will be locked and no writes will be
3802 During the upgrade, the repository will be locked and no writes will be
3790 allowed.
3803 allowed.
3791
3804
3792 At the end of the upgrade, the repository may not be readable while new
3805 At the end of the upgrade, the repository may not be readable while new
3793 repository data is swapped in. This window will be as long as it takes to
3806 repository data is swapped in. This window will be as long as it takes to
3794 rename some directories inside the ``.hg`` directory. On most machines, this
3807 rename some directories inside the ``.hg`` directory. On most machines, this
3795 should complete almost instantaneously and the chances of a consumer being
3808 should complete almost instantaneously and the chances of a consumer being
3796 unable to access the repository should be low.
3809 unable to access the repository should be low.
3797
3810
3798 By default, all revlog will be upgraded. You can restrict this using flag
3811 By default, all revlog will be upgraded. You can restrict this using flag
3799 such as `--manifest`:
3812 such as `--manifest`:
3800
3813
3801 * `--manifest`: only optimize the manifest
3814 * `--manifest`: only optimize the manifest
3802 * `--no-manifest`: optimize all revlog but the manifest
3815 * `--no-manifest`: optimize all revlog but the manifest
3803 * `--changelog`: optimize the changelog only
3816 * `--changelog`: optimize the changelog only
3804 * `--no-changelog --no-manifest`: optimize filelogs only
3817 * `--no-changelog --no-manifest`: optimize filelogs only
3805 """
3818 """
3806 return upgrade.upgraderepo(
3819 return upgrade.upgraderepo(
3807 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3820 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3808 )
3821 )
3809
3822
3810
3823
3811 @command(
3824 @command(
3812 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3825 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3813 )
3826 )
3814 def debugwalk(ui, repo, *pats, **opts):
3827 def debugwalk(ui, repo, *pats, **opts):
3815 """show how files match on given patterns"""
3828 """show how files match on given patterns"""
3816 opts = pycompat.byteskwargs(opts)
3829 opts = pycompat.byteskwargs(opts)
3817 m = scmutil.match(repo[None], pats, opts)
3830 m = scmutil.match(repo[None], pats, opts)
3818 if ui.verbose:
3831 if ui.verbose:
3819 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3832 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3820 items = list(repo[None].walk(m))
3833 items = list(repo[None].walk(m))
3821 if not items:
3834 if not items:
3822 return
3835 return
3823 f = lambda fn: fn
3836 f = lambda fn: fn
3824 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3837 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3825 f = lambda fn: util.normpath(fn)
3838 f = lambda fn: util.normpath(fn)
3826 fmt = b'f %%-%ds %%-%ds %%s' % (
3839 fmt = b'f %%-%ds %%-%ds %%s' % (
3827 max([len(abs) for abs in items]),
3840 max([len(abs) for abs in items]),
3828 max([len(repo.pathto(abs)) for abs in items]),
3841 max([len(repo.pathto(abs)) for abs in items]),
3829 )
3842 )
3830 for abs in items:
3843 for abs in items:
3831 line = fmt % (
3844 line = fmt % (
3832 abs,
3845 abs,
3833 f(repo.pathto(abs)),
3846 f(repo.pathto(abs)),
3834 m.exact(abs) and b'exact' or b'',
3847 m.exact(abs) and b'exact' or b'',
3835 )
3848 )
3836 ui.write(b"%s\n" % line.rstrip())
3849 ui.write(b"%s\n" % line.rstrip())
3837
3850
3838
3851
3839 @command(b'debugwhyunstable', [], _(b'REV'))
3852 @command(b'debugwhyunstable', [], _(b'REV'))
3840 def debugwhyunstable(ui, repo, rev):
3853 def debugwhyunstable(ui, repo, rev):
3841 """explain instabilities of a changeset"""
3854 """explain instabilities of a changeset"""
3842 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3855 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3843 dnodes = b''
3856 dnodes = b''
3844 if entry.get(b'divergentnodes'):
3857 if entry.get(b'divergentnodes'):
3845 dnodes = (
3858 dnodes = (
3846 b' '.join(
3859 b' '.join(
3847 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3860 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3848 for ctx in entry[b'divergentnodes']
3861 for ctx in entry[b'divergentnodes']
3849 )
3862 )
3850 + b' '
3863 + b' '
3851 )
3864 )
3852 ui.write(
3865 ui.write(
3853 b'%s: %s%s %s\n'
3866 b'%s: %s%s %s\n'
3854 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3867 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3855 )
3868 )
3856
3869
3857
3870
3858 @command(
3871 @command(
3859 b'debugwireargs',
3872 b'debugwireargs',
3860 [
3873 [
3861 (b'', b'three', b'', b'three'),
3874 (b'', b'three', b'', b'three'),
3862 (b'', b'four', b'', b'four'),
3875 (b'', b'four', b'', b'four'),
3863 (b'', b'five', b'', b'five'),
3876 (b'', b'five', b'', b'five'),
3864 ]
3877 ]
3865 + cmdutil.remoteopts,
3878 + cmdutil.remoteopts,
3866 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3879 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3867 norepo=True,
3880 norepo=True,
3868 )
3881 )
3869 def debugwireargs(ui, repopath, *vals, **opts):
3882 def debugwireargs(ui, repopath, *vals, **opts):
3870 opts = pycompat.byteskwargs(opts)
3883 opts = pycompat.byteskwargs(opts)
3871 repo = hg.peer(ui, opts, repopath)
3884 repo = hg.peer(ui, opts, repopath)
3872 for opt in cmdutil.remoteopts:
3885 for opt in cmdutil.remoteopts:
3873 del opts[opt[1]]
3886 del opts[opt[1]]
3874 args = {}
3887 args = {}
3875 for k, v in pycompat.iteritems(opts):
3888 for k, v in pycompat.iteritems(opts):
3876 if v:
3889 if v:
3877 args[k] = v
3890 args[k] = v
3878 args = pycompat.strkwargs(args)
3891 args = pycompat.strkwargs(args)
3879 # run twice to check that we don't mess up the stream for the next command
3892 # run twice to check that we don't mess up the stream for the next command
3880 res1 = repo.debugwireargs(*vals, **args)
3893 res1 = repo.debugwireargs(*vals, **args)
3881 res2 = repo.debugwireargs(*vals, **args)
3894 res2 = repo.debugwireargs(*vals, **args)
3882 ui.write(b"%s\n" % res1)
3895 ui.write(b"%s\n" % res1)
3883 if res1 != res2:
3896 if res1 != res2:
3884 ui.warn(b"%s\n" % res2)
3897 ui.warn(b"%s\n" % res2)
3885
3898
3886
3899
3887 def _parsewirelangblocks(fh):
3900 def _parsewirelangblocks(fh):
3888 activeaction = None
3901 activeaction = None
3889 blocklines = []
3902 blocklines = []
3890 lastindent = 0
3903 lastindent = 0
3891
3904
3892 for line in fh:
3905 for line in fh:
3893 line = line.rstrip()
3906 line = line.rstrip()
3894 if not line:
3907 if not line:
3895 continue
3908 continue
3896
3909
3897 if line.startswith(b'#'):
3910 if line.startswith(b'#'):
3898 continue
3911 continue
3899
3912
3900 if not line.startswith(b' '):
3913 if not line.startswith(b' '):
3901 # New block. Flush previous one.
3914 # New block. Flush previous one.
3902 if activeaction:
3915 if activeaction:
3903 yield activeaction, blocklines
3916 yield activeaction, blocklines
3904
3917
3905 activeaction = line
3918 activeaction = line
3906 blocklines = []
3919 blocklines = []
3907 lastindent = 0
3920 lastindent = 0
3908 continue
3921 continue
3909
3922
3910 # Else we start with an indent.
3923 # Else we start with an indent.
3911
3924
3912 if not activeaction:
3925 if not activeaction:
3913 raise error.Abort(_(b'indented line outside of block'))
3926 raise error.Abort(_(b'indented line outside of block'))
3914
3927
3915 indent = len(line) - len(line.lstrip())
3928 indent = len(line) - len(line.lstrip())
3916
3929
3917 # If this line is indented more than the last line, concatenate it.
3930 # If this line is indented more than the last line, concatenate it.
3918 if indent > lastindent and blocklines:
3931 if indent > lastindent and blocklines:
3919 blocklines[-1] += line.lstrip()
3932 blocklines[-1] += line.lstrip()
3920 else:
3933 else:
3921 blocklines.append(line)
3934 blocklines.append(line)
3922 lastindent = indent
3935 lastindent = indent
3923
3936
3924 # Flush last block.
3937 # Flush last block.
3925 if activeaction:
3938 if activeaction:
3926 yield activeaction, blocklines
3939 yield activeaction, blocklines
3927
3940
3928
3941
3929 @command(
3942 @command(
3930 b'debugwireproto',
3943 b'debugwireproto',
3931 [
3944 [
3932 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3945 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3933 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3946 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3934 (
3947 (
3935 b'',
3948 b'',
3936 b'noreadstderr',
3949 b'noreadstderr',
3937 False,
3950 False,
3938 _(b'do not read from stderr of the remote'),
3951 _(b'do not read from stderr of the remote'),
3939 ),
3952 ),
3940 (
3953 (
3941 b'',
3954 b'',
3942 b'nologhandshake',
3955 b'nologhandshake',
3943 False,
3956 False,
3944 _(b'do not log I/O related to the peer handshake'),
3957 _(b'do not log I/O related to the peer handshake'),
3945 ),
3958 ),
3946 ]
3959 ]
3947 + cmdutil.remoteopts,
3960 + cmdutil.remoteopts,
3948 _(b'[PATH]'),
3961 _(b'[PATH]'),
3949 optionalrepo=True,
3962 optionalrepo=True,
3950 )
3963 )
3951 def debugwireproto(ui, repo, path=None, **opts):
3964 def debugwireproto(ui, repo, path=None, **opts):
3952 """send wire protocol commands to a server
3965 """send wire protocol commands to a server
3953
3966
3954 This command can be used to issue wire protocol commands to remote
3967 This command can be used to issue wire protocol commands to remote
3955 peers and to debug the raw data being exchanged.
3968 peers and to debug the raw data being exchanged.
3956
3969
3957 ``--localssh`` will start an SSH server against the current repository
3970 ``--localssh`` will start an SSH server against the current repository
3958 and connect to that. By default, the connection will perform a handshake
3971 and connect to that. By default, the connection will perform a handshake
3959 and establish an appropriate peer instance.
3972 and establish an appropriate peer instance.
3960
3973
3961 ``--peer`` can be used to bypass the handshake protocol and construct a
3974 ``--peer`` can be used to bypass the handshake protocol and construct a
3962 peer instance using the specified class type. Valid values are ``raw``,
3975 peer instance using the specified class type. Valid values are ``raw``,
3963 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3976 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3964 raw data payloads and don't support higher-level command actions.
3977 raw data payloads and don't support higher-level command actions.
3965
3978
3966 ``--noreadstderr`` can be used to disable automatic reading from stderr
3979 ``--noreadstderr`` can be used to disable automatic reading from stderr
3967 of the peer (for SSH connections only). Disabling automatic reading of
3980 of the peer (for SSH connections only). Disabling automatic reading of
3968 stderr is useful for making output more deterministic.
3981 stderr is useful for making output more deterministic.
3969
3982
3970 Commands are issued via a mini language which is specified via stdin.
3983 Commands are issued via a mini language which is specified via stdin.
3971 The language consists of individual actions to perform. An action is
3984 The language consists of individual actions to perform. An action is
3972 defined by a block. A block is defined as a line with no leading
3985 defined by a block. A block is defined as a line with no leading
3973 space followed by 0 or more lines with leading space. Blocks are
3986 space followed by 0 or more lines with leading space. Blocks are
3974 effectively a high-level command with additional metadata.
3987 effectively a high-level command with additional metadata.
3975
3988
3976 Lines beginning with ``#`` are ignored.
3989 Lines beginning with ``#`` are ignored.
3977
3990
3978 The following sections denote available actions.
3991 The following sections denote available actions.
3979
3992
3980 raw
3993 raw
3981 ---
3994 ---
3982
3995
3983 Send raw data to the server.
3996 Send raw data to the server.
3984
3997
3985 The block payload contains the raw data to send as one atomic send
3998 The block payload contains the raw data to send as one atomic send
3986 operation. The data may not actually be delivered in a single system
3999 operation. The data may not actually be delivered in a single system
3987 call: it depends on the abilities of the transport being used.
4000 call: it depends on the abilities of the transport being used.
3988
4001
3989 Each line in the block is de-indented and concatenated. Then, that
4002 Each line in the block is de-indented and concatenated. Then, that
3990 value is evaluated as a Python b'' literal. This allows the use of
4003 value is evaluated as a Python b'' literal. This allows the use of
3991 backslash escaping, etc.
4004 backslash escaping, etc.
3992
4005
3993 raw+
4006 raw+
3994 ----
4007 ----
3995
4008
3996 Behaves like ``raw`` except flushes output afterwards.
4009 Behaves like ``raw`` except flushes output afterwards.
3997
4010
3998 command <X>
4011 command <X>
3999 -----------
4012 -----------
4000
4013
4001 Send a request to run a named command, whose name follows the ``command``
4014 Send a request to run a named command, whose name follows the ``command``
4002 string.
4015 string.
4003
4016
4004 Arguments to the command are defined as lines in this block. The format of
4017 Arguments to the command are defined as lines in this block. The format of
4005 each line is ``<key> <value>``. e.g.::
4018 each line is ``<key> <value>``. e.g.::
4006
4019
4007 command listkeys
4020 command listkeys
4008 namespace bookmarks
4021 namespace bookmarks
4009
4022
4010 If the value begins with ``eval:``, it will be interpreted as a Python
4023 If the value begins with ``eval:``, it will be interpreted as a Python
4011 literal expression. Otherwise values are interpreted as Python b'' literals.
4024 literal expression. Otherwise values are interpreted as Python b'' literals.
4012 This allows sending complex types and encoding special byte sequences via
4025 This allows sending complex types and encoding special byte sequences via
4013 backslash escaping.
4026 backslash escaping.
4014
4027
4015 The following arguments have special meaning:
4028 The following arguments have special meaning:
4016
4029
4017 ``PUSHFILE``
4030 ``PUSHFILE``
4018 When defined, the *push* mechanism of the peer will be used instead
4031 When defined, the *push* mechanism of the peer will be used instead
4019 of the static request-response mechanism and the content of the
4032 of the static request-response mechanism and the content of the
4020 file specified in the value of this argument will be sent as the
4033 file specified in the value of this argument will be sent as the
4021 command payload.
4034 command payload.
4022
4035
4023 This can be used to submit a local bundle file to the remote.
4036 This can be used to submit a local bundle file to the remote.
4024
4037
4025 batchbegin
4038 batchbegin
4026 ----------
4039 ----------
4027
4040
4028 Instruct the peer to begin a batched send.
4041 Instruct the peer to begin a batched send.
4029
4042
4030 All ``command`` blocks are queued for execution until the next
4043 All ``command`` blocks are queued for execution until the next
4031 ``batchsubmit`` block.
4044 ``batchsubmit`` block.
4032
4045
4033 batchsubmit
4046 batchsubmit
4034 -----------
4047 -----------
4035
4048
4036 Submit previously queued ``command`` blocks as a batch request.
4049 Submit previously queued ``command`` blocks as a batch request.
4037
4050
4038 This action MUST be paired with a ``batchbegin`` action.
4051 This action MUST be paired with a ``batchbegin`` action.
4039
4052
4040 httprequest <method> <path>
4053 httprequest <method> <path>
4041 ---------------------------
4054 ---------------------------
4042
4055
4043 (HTTP peer only)
4056 (HTTP peer only)
4044
4057
4045 Send an HTTP request to the peer.
4058 Send an HTTP request to the peer.
4046
4059
4047 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4060 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4048
4061
4049 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4062 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4050 headers to add to the request. e.g. ``Accept: foo``.
4063 headers to add to the request. e.g. ``Accept: foo``.
4051
4064
4052 The following arguments are special:
4065 The following arguments are special:
4053
4066
4054 ``BODYFILE``
4067 ``BODYFILE``
4055 The content of the file defined as the value to this argument will be
4068 The content of the file defined as the value to this argument will be
4056 transferred verbatim as the HTTP request body.
4069 transferred verbatim as the HTTP request body.
4057
4070
4058 ``frame <type> <flags> <payload>``
4071 ``frame <type> <flags> <payload>``
4059 Send a unified protocol frame as part of the request body.
4072 Send a unified protocol frame as part of the request body.
4060
4073
4061 All frames will be collected and sent as the body to the HTTP
4074 All frames will be collected and sent as the body to the HTTP
4062 request.
4075 request.
4063
4076
4064 close
4077 close
4065 -----
4078 -----
4066
4079
4067 Close the connection to the server.
4080 Close the connection to the server.
4068
4081
4069 flush
4082 flush
4070 -----
4083 -----
4071
4084
4072 Flush data written to the server.
4085 Flush data written to the server.
4073
4086
4074 readavailable
4087 readavailable
4075 -------------
4088 -------------
4076
4089
4077 Close the write end of the connection and read all available data from
4090 Close the write end of the connection and read all available data from
4078 the server.
4091 the server.
4079
4092
4080 If the connection to the server encompasses multiple pipes, we poll both
4093 If the connection to the server encompasses multiple pipes, we poll both
4081 pipes and read available data.
4094 pipes and read available data.
4082
4095
4083 readline
4096 readline
4084 --------
4097 --------
4085
4098
4086 Read a line of output from the server. If there are multiple output
4099 Read a line of output from the server. If there are multiple output
4087 pipes, reads only the main pipe.
4100 pipes, reads only the main pipe.
4088
4101
4089 ereadline
4102 ereadline
4090 ---------
4103 ---------
4091
4104
4092 Like ``readline``, but read from the stderr pipe, if available.
4105 Like ``readline``, but read from the stderr pipe, if available.
4093
4106
4094 read <X>
4107 read <X>
4095 --------
4108 --------
4096
4109
4097 ``read()`` N bytes from the server's main output pipe.
4110 ``read()`` N bytes from the server's main output pipe.
4098
4111
4099 eread <X>
4112 eread <X>
4100 ---------
4113 ---------
4101
4114
4102 ``read()`` N bytes from the server's stderr pipe, if available.
4115 ``read()`` N bytes from the server's stderr pipe, if available.
4103
4116
4104 Specifying Unified Frame-Based Protocol Frames
4117 Specifying Unified Frame-Based Protocol Frames
4105 ----------------------------------------------
4118 ----------------------------------------------
4106
4119
4107 It is possible to emit a *Unified Frame-Based Protocol* by using special
4120 It is possible to emit a *Unified Frame-Based Protocol* by using special
4108 syntax.
4121 syntax.
4109
4122
4110 A frame is composed as a type, flags, and payload. These can be parsed
4123 A frame is composed as a type, flags, and payload. These can be parsed
4111 from a string of the form:
4124 from a string of the form:
4112
4125
4113 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4126 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4114
4127
4115 ``request-id`` and ``stream-id`` are integers defining the request and
4128 ``request-id`` and ``stream-id`` are integers defining the request and
4116 stream identifiers.
4129 stream identifiers.
4117
4130
4118 ``type`` can be an integer value for the frame type or the string name
4131 ``type`` can be an integer value for the frame type or the string name
4119 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4132 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4120 ``command-name``.
4133 ``command-name``.
4121
4134
4122 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4135 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4123 components. Each component (and there can be just one) can be an integer
4136 components. Each component (and there can be just one) can be an integer
4124 or a flag name for stream flags or frame flags, respectively. Values are
4137 or a flag name for stream flags or frame flags, respectively. Values are
4125 resolved to integers and then bitwise OR'd together.
4138 resolved to integers and then bitwise OR'd together.
4126
4139
4127 ``payload`` represents the raw frame payload. If it begins with
4140 ``payload`` represents the raw frame payload. If it begins with
4128 ``cbor:``, the following string is evaluated as Python code and the
4141 ``cbor:``, the following string is evaluated as Python code and the
4129 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4142 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4130 as a Python byte string literal.
4143 as a Python byte string literal.
4131 """
4144 """
4132 opts = pycompat.byteskwargs(opts)
4145 opts = pycompat.byteskwargs(opts)
4133
4146
4134 if opts[b'localssh'] and not repo:
4147 if opts[b'localssh'] and not repo:
4135 raise error.Abort(_(b'--localssh requires a repository'))
4148 raise error.Abort(_(b'--localssh requires a repository'))
4136
4149
4137 if opts[b'peer'] and opts[b'peer'] not in (
4150 if opts[b'peer'] and opts[b'peer'] not in (
4138 b'raw',
4151 b'raw',
4139 b'http2',
4152 b'http2',
4140 b'ssh1',
4153 b'ssh1',
4141 b'ssh2',
4154 b'ssh2',
4142 ):
4155 ):
4143 raise error.Abort(
4156 raise error.Abort(
4144 _(b'invalid value for --peer'),
4157 _(b'invalid value for --peer'),
4145 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4158 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4146 )
4159 )
4147
4160
4148 if path and opts[b'localssh']:
4161 if path and opts[b'localssh']:
4149 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4162 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4150
4163
4151 if ui.interactive():
4164 if ui.interactive():
4152 ui.write(_(b'(waiting for commands on stdin)\n'))
4165 ui.write(_(b'(waiting for commands on stdin)\n'))
4153
4166
4154 blocks = list(_parsewirelangblocks(ui.fin))
4167 blocks = list(_parsewirelangblocks(ui.fin))
4155
4168
4156 proc = None
4169 proc = None
4157 stdin = None
4170 stdin = None
4158 stdout = None
4171 stdout = None
4159 stderr = None
4172 stderr = None
4160 opener = None
4173 opener = None
4161
4174
4162 if opts[b'localssh']:
4175 if opts[b'localssh']:
4163 # We start the SSH server in its own process so there is process
4176 # We start the SSH server in its own process so there is process
4164 # separation. This prevents a whole class of potential bugs around
4177 # separation. This prevents a whole class of potential bugs around
4165 # shared state from interfering with server operation.
4178 # shared state from interfering with server operation.
4166 args = procutil.hgcmd() + [
4179 args = procutil.hgcmd() + [
4167 b'-R',
4180 b'-R',
4168 repo.root,
4181 repo.root,
4169 b'debugserve',
4182 b'debugserve',
4170 b'--sshstdio',
4183 b'--sshstdio',
4171 ]
4184 ]
4172 proc = subprocess.Popen(
4185 proc = subprocess.Popen(
4173 pycompat.rapply(procutil.tonativestr, args),
4186 pycompat.rapply(procutil.tonativestr, args),
4174 stdin=subprocess.PIPE,
4187 stdin=subprocess.PIPE,
4175 stdout=subprocess.PIPE,
4188 stdout=subprocess.PIPE,
4176 stderr=subprocess.PIPE,
4189 stderr=subprocess.PIPE,
4177 bufsize=0,
4190 bufsize=0,
4178 )
4191 )
4179
4192
4180 stdin = proc.stdin
4193 stdin = proc.stdin
4181 stdout = proc.stdout
4194 stdout = proc.stdout
4182 stderr = proc.stderr
4195 stderr = proc.stderr
4183
4196
4184 # We turn the pipes into observers so we can log I/O.
4197 # We turn the pipes into observers so we can log I/O.
4185 if ui.verbose or opts[b'peer'] == b'raw':
4198 if ui.verbose or opts[b'peer'] == b'raw':
4186 stdin = util.makeloggingfileobject(
4199 stdin = util.makeloggingfileobject(
4187 ui, proc.stdin, b'i', logdata=True
4200 ui, proc.stdin, b'i', logdata=True
4188 )
4201 )
4189 stdout = util.makeloggingfileobject(
4202 stdout = util.makeloggingfileobject(
4190 ui, proc.stdout, b'o', logdata=True
4203 ui, proc.stdout, b'o', logdata=True
4191 )
4204 )
4192 stderr = util.makeloggingfileobject(
4205 stderr = util.makeloggingfileobject(
4193 ui, proc.stderr, b'e', logdata=True
4206 ui, proc.stderr, b'e', logdata=True
4194 )
4207 )
4195
4208
4196 # --localssh also implies the peer connection settings.
4209 # --localssh also implies the peer connection settings.
4197
4210
4198 url = b'ssh://localserver'
4211 url = b'ssh://localserver'
4199 autoreadstderr = not opts[b'noreadstderr']
4212 autoreadstderr = not opts[b'noreadstderr']
4200
4213
4201 if opts[b'peer'] == b'ssh1':
4214 if opts[b'peer'] == b'ssh1':
4202 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4215 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4203 peer = sshpeer.sshv1peer(
4216 peer = sshpeer.sshv1peer(
4204 ui,
4217 ui,
4205 url,
4218 url,
4206 proc,
4219 proc,
4207 stdin,
4220 stdin,
4208 stdout,
4221 stdout,
4209 stderr,
4222 stderr,
4210 None,
4223 None,
4211 autoreadstderr=autoreadstderr,
4224 autoreadstderr=autoreadstderr,
4212 )
4225 )
4213 elif opts[b'peer'] == b'ssh2':
4226 elif opts[b'peer'] == b'ssh2':
4214 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4227 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4215 peer = sshpeer.sshv2peer(
4228 peer = sshpeer.sshv2peer(
4216 ui,
4229 ui,
4217 url,
4230 url,
4218 proc,
4231 proc,
4219 stdin,
4232 stdin,
4220 stdout,
4233 stdout,
4221 stderr,
4234 stderr,
4222 None,
4235 None,
4223 autoreadstderr=autoreadstderr,
4236 autoreadstderr=autoreadstderr,
4224 )
4237 )
4225 elif opts[b'peer'] == b'raw':
4238 elif opts[b'peer'] == b'raw':
4226 ui.write(_(b'using raw connection to peer\n'))
4239 ui.write(_(b'using raw connection to peer\n'))
4227 peer = None
4240 peer = None
4228 else:
4241 else:
4229 ui.write(_(b'creating ssh peer from handshake results\n'))
4242 ui.write(_(b'creating ssh peer from handshake results\n'))
4230 peer = sshpeer.makepeer(
4243 peer = sshpeer.makepeer(
4231 ui,
4244 ui,
4232 url,
4245 url,
4233 proc,
4246 proc,
4234 stdin,
4247 stdin,
4235 stdout,
4248 stdout,
4236 stderr,
4249 stderr,
4237 autoreadstderr=autoreadstderr,
4250 autoreadstderr=autoreadstderr,
4238 )
4251 )
4239
4252
4240 elif path:
4253 elif path:
4241 # We bypass hg.peer() so we can proxy the sockets.
4254 # We bypass hg.peer() so we can proxy the sockets.
4242 # TODO consider not doing this because we skip
4255 # TODO consider not doing this because we skip
4243 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4256 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4244 u = util.url(path)
4257 u = util.url(path)
4245 if u.scheme != b'http':
4258 if u.scheme != b'http':
4246 raise error.Abort(_(b'only http:// paths are currently supported'))
4259 raise error.Abort(_(b'only http:// paths are currently supported'))
4247
4260
4248 url, authinfo = u.authinfo()
4261 url, authinfo = u.authinfo()
4249 openerargs = {
4262 openerargs = {
4250 'useragent': b'Mercurial debugwireproto',
4263 'useragent': b'Mercurial debugwireproto',
4251 }
4264 }
4252
4265
4253 # Turn pipes/sockets into observers so we can log I/O.
4266 # Turn pipes/sockets into observers so we can log I/O.
4254 if ui.verbose:
4267 if ui.verbose:
4255 openerargs.update(
4268 openerargs.update(
4256 {
4269 {
4257 'loggingfh': ui,
4270 'loggingfh': ui,
4258 'loggingname': b's',
4271 'loggingname': b's',
4259 'loggingopts': {'logdata': True, 'logdataapis': False,},
4272 'loggingopts': {'logdata': True, 'logdataapis': False,},
4260 }
4273 }
4261 )
4274 )
4262
4275
4263 if ui.debugflag:
4276 if ui.debugflag:
4264 openerargs['loggingopts']['logdataapis'] = True
4277 openerargs['loggingopts']['logdataapis'] = True
4265
4278
4266 # Don't send default headers when in raw mode. This allows us to
4279 # Don't send default headers when in raw mode. This allows us to
4267 # bypass most of the behavior of our URL handling code so we can
4280 # bypass most of the behavior of our URL handling code so we can
4268 # have near complete control over what's sent on the wire.
4281 # have near complete control over what's sent on the wire.
4269 if opts[b'peer'] == b'raw':
4282 if opts[b'peer'] == b'raw':
4270 openerargs['sendaccept'] = False
4283 openerargs['sendaccept'] = False
4271
4284
4272 opener = urlmod.opener(ui, authinfo, **openerargs)
4285 opener = urlmod.opener(ui, authinfo, **openerargs)
4273
4286
4274 if opts[b'peer'] == b'http2':
4287 if opts[b'peer'] == b'http2':
4275 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4288 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4276 # We go through makepeer() because we need an API descriptor for
4289 # We go through makepeer() because we need an API descriptor for
4277 # the peer instance to be useful.
4290 # the peer instance to be useful.
4278 with ui.configoverride(
4291 with ui.configoverride(
4279 {(b'experimental', b'httppeer.advertise-v2'): True}
4292 {(b'experimental', b'httppeer.advertise-v2'): True}
4280 ):
4293 ):
4281 if opts[b'nologhandshake']:
4294 if opts[b'nologhandshake']:
4282 ui.pushbuffer()
4295 ui.pushbuffer()
4283
4296
4284 peer = httppeer.makepeer(ui, path, opener=opener)
4297 peer = httppeer.makepeer(ui, path, opener=opener)
4285
4298
4286 if opts[b'nologhandshake']:
4299 if opts[b'nologhandshake']:
4287 ui.popbuffer()
4300 ui.popbuffer()
4288
4301
4289 if not isinstance(peer, httppeer.httpv2peer):
4302 if not isinstance(peer, httppeer.httpv2peer):
4290 raise error.Abort(
4303 raise error.Abort(
4291 _(
4304 _(
4292 b'could not instantiate HTTP peer for '
4305 b'could not instantiate HTTP peer for '
4293 b'wire protocol version 2'
4306 b'wire protocol version 2'
4294 ),
4307 ),
4295 hint=_(
4308 hint=_(
4296 b'the server may not have the feature '
4309 b'the server may not have the feature '
4297 b'enabled or is not allowing this '
4310 b'enabled or is not allowing this '
4298 b'client version'
4311 b'client version'
4299 ),
4312 ),
4300 )
4313 )
4301
4314
4302 elif opts[b'peer'] == b'raw':
4315 elif opts[b'peer'] == b'raw':
4303 ui.write(_(b'using raw connection to peer\n'))
4316 ui.write(_(b'using raw connection to peer\n'))
4304 peer = None
4317 peer = None
4305 elif opts[b'peer']:
4318 elif opts[b'peer']:
4306 raise error.Abort(
4319 raise error.Abort(
4307 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4320 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4308 )
4321 )
4309 else:
4322 else:
4310 peer = httppeer.makepeer(ui, path, opener=opener)
4323 peer = httppeer.makepeer(ui, path, opener=opener)
4311
4324
4312 # We /could/ populate stdin/stdout with sock.makefile()...
4325 # We /could/ populate stdin/stdout with sock.makefile()...
4313 else:
4326 else:
4314 raise error.Abort(_(b'unsupported connection configuration'))
4327 raise error.Abort(_(b'unsupported connection configuration'))
4315
4328
4316 batchedcommands = None
4329 batchedcommands = None
4317
4330
4318 # Now perform actions based on the parsed wire language instructions.
4331 # Now perform actions based on the parsed wire language instructions.
4319 for action, lines in blocks:
4332 for action, lines in blocks:
4320 if action in (b'raw', b'raw+'):
4333 if action in (b'raw', b'raw+'):
4321 if not stdin:
4334 if not stdin:
4322 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4335 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4323
4336
4324 # Concatenate the data together.
4337 # Concatenate the data together.
4325 data = b''.join(l.lstrip() for l in lines)
4338 data = b''.join(l.lstrip() for l in lines)
4326 data = stringutil.unescapestr(data)
4339 data = stringutil.unescapestr(data)
4327 stdin.write(data)
4340 stdin.write(data)
4328
4341
4329 if action == b'raw+':
4342 if action == b'raw+':
4330 stdin.flush()
4343 stdin.flush()
4331 elif action == b'flush':
4344 elif action == b'flush':
4332 if not stdin:
4345 if not stdin:
4333 raise error.Abort(_(b'cannot call flush on this peer'))
4346 raise error.Abort(_(b'cannot call flush on this peer'))
4334 stdin.flush()
4347 stdin.flush()
4335 elif action.startswith(b'command'):
4348 elif action.startswith(b'command'):
4336 if not peer:
4349 if not peer:
4337 raise error.Abort(
4350 raise error.Abort(
4338 _(
4351 _(
4339 b'cannot send commands unless peer instance '
4352 b'cannot send commands unless peer instance '
4340 b'is available'
4353 b'is available'
4341 )
4354 )
4342 )
4355 )
4343
4356
4344 command = action.split(b' ', 1)[1]
4357 command = action.split(b' ', 1)[1]
4345
4358
4346 args = {}
4359 args = {}
4347 for line in lines:
4360 for line in lines:
4348 # We need to allow empty values.
4361 # We need to allow empty values.
4349 fields = line.lstrip().split(b' ', 1)
4362 fields = line.lstrip().split(b' ', 1)
4350 if len(fields) == 1:
4363 if len(fields) == 1:
4351 key = fields[0]
4364 key = fields[0]
4352 value = b''
4365 value = b''
4353 else:
4366 else:
4354 key, value = fields
4367 key, value = fields
4355
4368
4356 if value.startswith(b'eval:'):
4369 if value.startswith(b'eval:'):
4357 value = stringutil.evalpythonliteral(value[5:])
4370 value = stringutil.evalpythonliteral(value[5:])
4358 else:
4371 else:
4359 value = stringutil.unescapestr(value)
4372 value = stringutil.unescapestr(value)
4360
4373
4361 args[key] = value
4374 args[key] = value
4362
4375
4363 if batchedcommands is not None:
4376 if batchedcommands is not None:
4364 batchedcommands.append((command, args))
4377 batchedcommands.append((command, args))
4365 continue
4378 continue
4366
4379
4367 ui.status(_(b'sending %s command\n') % command)
4380 ui.status(_(b'sending %s command\n') % command)
4368
4381
4369 if b'PUSHFILE' in args:
4382 if b'PUSHFILE' in args:
4370 with open(args[b'PUSHFILE'], 'rb') as fh:
4383 with open(args[b'PUSHFILE'], 'rb') as fh:
4371 del args[b'PUSHFILE']
4384 del args[b'PUSHFILE']
4372 res, output = peer._callpush(
4385 res, output = peer._callpush(
4373 command, fh, **pycompat.strkwargs(args)
4386 command, fh, **pycompat.strkwargs(args)
4374 )
4387 )
4375 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4388 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4376 ui.status(
4389 ui.status(
4377 _(b'remote output: %s\n') % stringutil.escapestr(output)
4390 _(b'remote output: %s\n') % stringutil.escapestr(output)
4378 )
4391 )
4379 else:
4392 else:
4380 with peer.commandexecutor() as e:
4393 with peer.commandexecutor() as e:
4381 res = e.callcommand(command, args).result()
4394 res = e.callcommand(command, args).result()
4382
4395
4383 if isinstance(res, wireprotov2peer.commandresponse):
4396 if isinstance(res, wireprotov2peer.commandresponse):
4384 val = res.objects()
4397 val = res.objects()
4385 ui.status(
4398 ui.status(
4386 _(b'response: %s\n')
4399 _(b'response: %s\n')
4387 % stringutil.pprint(val, bprefix=True, indent=2)
4400 % stringutil.pprint(val, bprefix=True, indent=2)
4388 )
4401 )
4389 else:
4402 else:
4390 ui.status(
4403 ui.status(
4391 _(b'response: %s\n')
4404 _(b'response: %s\n')
4392 % stringutil.pprint(res, bprefix=True, indent=2)
4405 % stringutil.pprint(res, bprefix=True, indent=2)
4393 )
4406 )
4394
4407
4395 elif action == b'batchbegin':
4408 elif action == b'batchbegin':
4396 if batchedcommands is not None:
4409 if batchedcommands is not None:
4397 raise error.Abort(_(b'nested batchbegin not allowed'))
4410 raise error.Abort(_(b'nested batchbegin not allowed'))
4398
4411
4399 batchedcommands = []
4412 batchedcommands = []
4400 elif action == b'batchsubmit':
4413 elif action == b'batchsubmit':
4401 # There is a batching API we could go through. But it would be
4414 # There is a batching API we could go through. But it would be
4402 # difficult to normalize requests into function calls. It is easier
4415 # difficult to normalize requests into function calls. It is easier
4403 # to bypass this layer and normalize to commands + args.
4416 # to bypass this layer and normalize to commands + args.
4404 ui.status(
4417 ui.status(
4405 _(b'sending batch with %d sub-commands\n')
4418 _(b'sending batch with %d sub-commands\n')
4406 % len(batchedcommands)
4419 % len(batchedcommands)
4407 )
4420 )
4408 assert peer is not None
4421 assert peer is not None
4409 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4422 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4410 ui.status(
4423 ui.status(
4411 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4424 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4412 )
4425 )
4413
4426
4414 batchedcommands = None
4427 batchedcommands = None
4415
4428
4416 elif action.startswith(b'httprequest '):
4429 elif action.startswith(b'httprequest '):
4417 if not opener:
4430 if not opener:
4418 raise error.Abort(
4431 raise error.Abort(
4419 _(b'cannot use httprequest without an HTTP peer')
4432 _(b'cannot use httprequest without an HTTP peer')
4420 )
4433 )
4421
4434
4422 request = action.split(b' ', 2)
4435 request = action.split(b' ', 2)
4423 if len(request) != 3:
4436 if len(request) != 3:
4424 raise error.Abort(
4437 raise error.Abort(
4425 _(
4438 _(
4426 b'invalid httprequest: expected format is '
4439 b'invalid httprequest: expected format is '
4427 b'"httprequest <method> <path>'
4440 b'"httprequest <method> <path>'
4428 )
4441 )
4429 )
4442 )
4430
4443
4431 method, httppath = request[1:]
4444 method, httppath = request[1:]
4432 headers = {}
4445 headers = {}
4433 body = None
4446 body = None
4434 frames = []
4447 frames = []
4435 for line in lines:
4448 for line in lines:
4436 line = line.lstrip()
4449 line = line.lstrip()
4437 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4450 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4438 if m:
4451 if m:
4439 # Headers need to use native strings.
4452 # Headers need to use native strings.
4440 key = pycompat.strurl(m.group(1))
4453 key = pycompat.strurl(m.group(1))
4441 value = pycompat.strurl(m.group(2))
4454 value = pycompat.strurl(m.group(2))
4442 headers[key] = value
4455 headers[key] = value
4443 continue
4456 continue
4444
4457
4445 if line.startswith(b'BODYFILE '):
4458 if line.startswith(b'BODYFILE '):
4446 with open(line.split(b' ', 1), b'rb') as fh:
4459 with open(line.split(b' ', 1), b'rb') as fh:
4447 body = fh.read()
4460 body = fh.read()
4448 elif line.startswith(b'frame '):
4461 elif line.startswith(b'frame '):
4449 frame = wireprotoframing.makeframefromhumanstring(
4462 frame = wireprotoframing.makeframefromhumanstring(
4450 line[len(b'frame ') :]
4463 line[len(b'frame ') :]
4451 )
4464 )
4452
4465
4453 frames.append(frame)
4466 frames.append(frame)
4454 else:
4467 else:
4455 raise error.Abort(
4468 raise error.Abort(
4456 _(b'unknown argument to httprequest: %s') % line
4469 _(b'unknown argument to httprequest: %s') % line
4457 )
4470 )
4458
4471
4459 url = path + httppath
4472 url = path + httppath
4460
4473
4461 if frames:
4474 if frames:
4462 body = b''.join(bytes(f) for f in frames)
4475 body = b''.join(bytes(f) for f in frames)
4463
4476
4464 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4477 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4465
4478
4466 # urllib.Request insists on using has_data() as a proxy for
4479 # urllib.Request insists on using has_data() as a proxy for
4467 # determining the request method. Override that to use our
4480 # determining the request method. Override that to use our
4468 # explicitly requested method.
4481 # explicitly requested method.
4469 req.get_method = lambda: pycompat.sysstr(method)
4482 req.get_method = lambda: pycompat.sysstr(method)
4470
4483
4471 try:
4484 try:
4472 res = opener.open(req)
4485 res = opener.open(req)
4473 body = res.read()
4486 body = res.read()
4474 except util.urlerr.urlerror as e:
4487 except util.urlerr.urlerror as e:
4475 # read() method must be called, but only exists in Python 2
4488 # read() method must be called, but only exists in Python 2
4476 getattr(e, 'read', lambda: None)()
4489 getattr(e, 'read', lambda: None)()
4477 continue
4490 continue
4478
4491
4479 ct = res.headers.get('Content-Type')
4492 ct = res.headers.get('Content-Type')
4480 if ct == 'application/mercurial-cbor':
4493 if ct == 'application/mercurial-cbor':
4481 ui.write(
4494 ui.write(
4482 _(b'cbor> %s\n')
4495 _(b'cbor> %s\n')
4483 % stringutil.pprint(
4496 % stringutil.pprint(
4484 cborutil.decodeall(body), bprefix=True, indent=2
4497 cborutil.decodeall(body), bprefix=True, indent=2
4485 )
4498 )
4486 )
4499 )
4487
4500
4488 elif action == b'close':
4501 elif action == b'close':
4489 assert peer is not None
4502 assert peer is not None
4490 peer.close()
4503 peer.close()
4491 elif action == b'readavailable':
4504 elif action == b'readavailable':
4492 if not stdout or not stderr:
4505 if not stdout or not stderr:
4493 raise error.Abort(
4506 raise error.Abort(
4494 _(b'readavailable not available on this peer')
4507 _(b'readavailable not available on this peer')
4495 )
4508 )
4496
4509
4497 stdin.close()
4510 stdin.close()
4498 stdout.read()
4511 stdout.read()
4499 stderr.read()
4512 stderr.read()
4500
4513
4501 elif action == b'readline':
4514 elif action == b'readline':
4502 if not stdout:
4515 if not stdout:
4503 raise error.Abort(_(b'readline not available on this peer'))
4516 raise error.Abort(_(b'readline not available on this peer'))
4504 stdout.readline()
4517 stdout.readline()
4505 elif action == b'ereadline':
4518 elif action == b'ereadline':
4506 if not stderr:
4519 if not stderr:
4507 raise error.Abort(_(b'ereadline not available on this peer'))
4520 raise error.Abort(_(b'ereadline not available on this peer'))
4508 stderr.readline()
4521 stderr.readline()
4509 elif action.startswith(b'read '):
4522 elif action.startswith(b'read '):
4510 count = int(action.split(b' ', 1)[1])
4523 count = int(action.split(b' ', 1)[1])
4511 if not stdout:
4524 if not stdout:
4512 raise error.Abort(_(b'read not available on this peer'))
4525 raise error.Abort(_(b'read not available on this peer'))
4513 stdout.read(count)
4526 stdout.read(count)
4514 elif action.startswith(b'eread '):
4527 elif action.startswith(b'eread '):
4515 count = int(action.split(b' ', 1)[1])
4528 count = int(action.split(b' ', 1)[1])
4516 if not stderr:
4529 if not stderr:
4517 raise error.Abort(_(b'eread not available on this peer'))
4530 raise error.Abort(_(b'eread not available on this peer'))
4518 stderr.read(count)
4531 stderr.read(count)
4519 else:
4532 else:
4520 raise error.Abort(_(b'unknown action: %s') % action)
4533 raise error.Abort(_(b'unknown action: %s') % action)
4521
4534
4522 if batchedcommands is not None:
4535 if batchedcommands is not None:
4523 raise error.Abort(_(b'unclosed "batchbegin" request'))
4536 raise error.Abort(_(b'unclosed "batchbegin" request'))
4524
4537
4525 if peer:
4538 if peer:
4526 peer.close()
4539 peer.close()
4527
4540
4528 if proc:
4541 if proc:
4529 proc.kill()
4542 proc.kill()
@@ -1,795 +1,796 b''
1 test that a commit clears the merge state.
1 test that a commit clears the merge state.
2
2
3 $ hg init repo
3 $ hg init repo
4 $ cd repo
4 $ cd repo
5
5
6 $ echo foo > file1
6 $ echo foo > file1
7 $ echo foo > file2
7 $ echo foo > file2
8 $ hg commit -Am 'add files'
8 $ hg commit -Am 'add files'
9 adding file1
9 adding file1
10 adding file2
10 adding file2
11
11
12 $ echo bar >> file1
12 $ echo bar >> file1
13 $ echo bar >> file2
13 $ echo bar >> file2
14 $ hg commit -Am 'append bar to files'
14 $ hg commit -Am 'append bar to files'
15
15
16 create a second head with conflicting edits
16 create a second head with conflicting edits
17
17
18 $ hg up -C 0
18 $ hg up -C 0
19 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
19 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
20 $ echo baz >> file1
20 $ echo baz >> file1
21 $ echo baz >> file2
21 $ echo baz >> file2
22 $ hg commit -Am 'append baz to files'
22 $ hg commit -Am 'append baz to files'
23 created new head
23 created new head
24
24
25 create a third head with no conflicting edits
25 create a third head with no conflicting edits
26 $ hg up -qC 0
26 $ hg up -qC 0
27 $ echo foo > file3
27 $ echo foo > file3
28 $ hg commit -Am 'add non-conflicting file'
28 $ hg commit -Am 'add non-conflicting file'
29 adding file3
29 adding file3
30 created new head
30 created new head
31
31
32 failing merge
32 failing merge
33
33
34 $ hg up -qC 2
34 $ hg up -qC 2
35 $ hg merge --tool=internal:fail 1
35 $ hg merge --tool=internal:fail 1
36 0 files updated, 0 files merged, 0 files removed, 2 files unresolved
36 0 files updated, 0 files merged, 0 files removed, 2 files unresolved
37 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
37 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
38 [1]
38 [1]
39
39
40 resolve -l should contain unresolved entries
40 resolve -l should contain unresolved entries
41
41
42 $ hg resolve -l
42 $ hg resolve -l
43 U file1
43 U file1
44 U file2
44 U file2
45
45
46 $ hg resolve -l --no-status
46 $ hg resolve -l --no-status
47 file1
47 file1
48 file2
48 file2
49
49
50 resolving an unknown path should emit a warning, but not for -l
50 resolving an unknown path should emit a warning, but not for -l
51
51
52 $ hg resolve -m does-not-exist
52 $ hg resolve -m does-not-exist
53 arguments do not match paths that need resolving
53 arguments do not match paths that need resolving
54 $ hg resolve -l does-not-exist
54 $ hg resolve -l does-not-exist
55
55
56 tell users how they could have used resolve
56 tell users how they could have used resolve
57
57
58 $ mkdir nested
58 $ mkdir nested
59 $ cd nested
59 $ cd nested
60 $ hg resolve -m file1
60 $ hg resolve -m file1
61 arguments do not match paths that need resolving
61 arguments do not match paths that need resolving
62 (try: hg resolve -m path:file1)
62 (try: hg resolve -m path:file1)
63 $ hg resolve -m file1 filez
63 $ hg resolve -m file1 filez
64 arguments do not match paths that need resolving
64 arguments do not match paths that need resolving
65 (try: hg resolve -m path:file1 path:filez)
65 (try: hg resolve -m path:file1 path:filez)
66 $ hg resolve -m path:file1 path:filez
66 $ hg resolve -m path:file1 path:filez
67 $ hg resolve -l
67 $ hg resolve -l
68 R file1
68 R file1
69 U file2
69 U file2
70 $ hg resolve -l --config ui.relative-paths=yes
70 $ hg resolve -l --config ui.relative-paths=yes
71 R ../file1
71 R ../file1
72 U ../file2
72 U ../file2
73 $ hg resolve --re-merge filez file2
73 $ hg resolve --re-merge filez file2
74 arguments do not match paths that need resolving
74 arguments do not match paths that need resolving
75 (try: hg resolve --re-merge path:filez path:file2)
75 (try: hg resolve --re-merge path:filez path:file2)
76 $ hg resolve -m filez file2
76 $ hg resolve -m filez file2
77 arguments do not match paths that need resolving
77 arguments do not match paths that need resolving
78 (try: hg resolve -m path:filez path:file2)
78 (try: hg resolve -m path:filez path:file2)
79 $ hg resolve -m path:filez path:file2
79 $ hg resolve -m path:filez path:file2
80 (no more unresolved files)
80 (no more unresolved files)
81 $ hg resolve -l
81 $ hg resolve -l
82 R file1
82 R file1
83 R file2
83 R file2
84
84
85 cleanup
85 cleanup
86 $ hg resolve -u
86 $ hg resolve -u
87 $ cd ..
87 $ cd ..
88 $ rmdir nested
88 $ rmdir nested
89
89
90 don't allow marking or unmarking driver-resolved files
90 don't allow marking or unmarking driver-resolved files
91
91
92 $ cat > $TESTTMP/markdriver.py << EOF
92 $ cat > $TESTTMP/markdriver.py << EOF
93 > '''mark and unmark files as driver-resolved'''
93 > '''mark and unmark files as driver-resolved'''
94 > from mercurial import (
94 > from mercurial import (
95 > mergestate,
95 > mergestate,
96 > pycompat,
96 > pycompat,
97 > registrar,
97 > registrar,
98 > scmutil,
98 > scmutil,
99 > )
99 > )
100 > cmdtable = {}
100 > cmdtable = {}
101 > command = registrar.command(cmdtable)
101 > command = registrar.command(cmdtable)
102 > @command(b'markdriver',
102 > @command(b'markdriver',
103 > [(b'u', b'unmark', None, b'')],
103 > [(b'u', b'unmark', None, b'')],
104 > b'FILE...')
104 > b'FILE...')
105 > def markdriver(ui, repo, *pats, **opts):
105 > def markdriver(ui, repo, *pats, **opts):
106 > wlock = repo.wlock()
106 > wlock = repo.wlock()
107 > opts = pycompat.byteskwargs(opts)
107 > opts = pycompat.byteskwargs(opts)
108 > try:
108 > try:
109 > ms = mergestate.mergestate.read(repo)
109 > ms = mergestate.mergestate.read(repo)
110 > m = scmutil.match(repo[None], pats, opts)
110 > m = scmutil.match(repo[None], pats, opts)
111 > for f in ms:
111 > for f in ms:
112 > if not m(f):
112 > if not m(f):
113 > continue
113 > continue
114 > if not opts[b'unmark']:
114 > if not opts[b'unmark']:
115 > ms.mark(f, b'd')
115 > ms.mark(f, b'd')
116 > else:
116 > else:
117 > ms.mark(f, b'u')
117 > ms.mark(f, b'u')
118 > ms.commit()
118 > ms.commit()
119 > finally:
119 > finally:
120 > wlock.release()
120 > wlock.release()
121 > EOF
121 > EOF
122 $ hg --config extensions.markdriver=$TESTTMP/markdriver.py markdriver file1
122 $ hg --config extensions.markdriver=$TESTTMP/markdriver.py markdriver file1
123 $ hg resolve --list
123 $ hg resolve --list
124 D file1
124 D file1
125 U file2
125 U file2
126 $ hg resolve --mark file1
126 $ hg resolve --mark file1
127 not marking file1 as it is driver-resolved
127 not marking file1 as it is driver-resolved
128 this should not print out file1
128 this should not print out file1
129 $ hg resolve --mark --all
129 $ hg resolve --mark --all
130 (no more unresolved files -- run "hg resolve --all" to conclude)
130 (no more unresolved files -- run "hg resolve --all" to conclude)
131 $ hg resolve --mark 'glob:file*'
131 $ hg resolve --mark 'glob:file*'
132 (no more unresolved files -- run "hg resolve --all" to conclude)
132 (no more unresolved files -- run "hg resolve --all" to conclude)
133 $ hg resolve --list
133 $ hg resolve --list
134 D file1
134 D file1
135 R file2
135 R file2
136 $ hg resolve --unmark file1
136 $ hg resolve --unmark file1
137 not unmarking file1 as it is driver-resolved
137 not unmarking file1 as it is driver-resolved
138 (no more unresolved files -- run "hg resolve --all" to conclude)
138 (no more unresolved files -- run "hg resolve --all" to conclude)
139 $ hg resolve --unmark --all
139 $ hg resolve --unmark --all
140 $ hg resolve --list
140 $ hg resolve --list
141 D file1
141 D file1
142 U file2
142 U file2
143 $ hg --config extensions.markdriver=$TESTTMP/markdriver.py markdriver --unmark file1
143 $ hg --config extensions.markdriver=$TESTTMP/markdriver.py markdriver --unmark file1
144 $ hg resolve --list
144 $ hg resolve --list
145 U file1
145 U file1
146 U file2
146 U file2
147
147
148 resolve the failure
148 resolve the failure
149
149
150 $ echo resolved > file1
150 $ echo resolved > file1
151 $ hg resolve -m file1
151 $ hg resolve -m file1
152
152
153 resolve -l should show resolved file as resolved
153 resolve -l should show resolved file as resolved
154
154
155 $ hg resolve -l
155 $ hg resolve -l
156 R file1
156 R file1
157 U file2
157 U file2
158
158
159 $ hg resolve -l -Tjson
159 $ hg resolve -l -Tjson
160 [
160 [
161 {
161 {
162 "mergestatus": "R",
162 "mergestatus": "R",
163 "path": "file1"
163 "path": "file1"
164 },
164 },
165 {
165 {
166 "mergestatus": "U",
166 "mergestatus": "U",
167 "path": "file2"
167 "path": "file2"
168 }
168 }
169 ]
169 ]
170
170
171 $ hg resolve -l -T '{path} {mergestatus} {status} {p1rev} {p2rev}\n'
171 $ hg resolve -l -T '{path} {mergestatus} {status} {p1rev} {p2rev}\n'
172 file1 R M 2 1
172 file1 R M 2 1
173 file2 U M 2 1
173 file2 U M 2 1
174
174
175 resolve -m without paths should mark all resolved
175 resolve -m without paths should mark all resolved
176
176
177 $ hg resolve -m
177 $ hg resolve -m
178 (no more unresolved files)
178 (no more unresolved files)
179 $ hg commit -m 'resolved'
179 $ hg commit -m 'resolved'
180
180
181 resolve -l should be empty after commit
181 resolve -l should be empty after commit
182
182
183 $ hg resolve -l
183 $ hg resolve -l
184
184
185 $ hg resolve -l -Tjson
185 $ hg resolve -l -Tjson
186 [
186 [
187 ]
187 ]
188
188
189 resolve --all should abort when no merge in progress
189 resolve --all should abort when no merge in progress
190
190
191 $ hg resolve --all
191 $ hg resolve --all
192 abort: resolve command not applicable when not merging
192 abort: resolve command not applicable when not merging
193 [255]
193 [255]
194
194
195 resolve -m should abort when no merge in progress
195 resolve -m should abort when no merge in progress
196
196
197 $ hg resolve -m
197 $ hg resolve -m
198 abort: resolve command not applicable when not merging
198 abort: resolve command not applicable when not merging
199 [255]
199 [255]
200
200
201 can not update or merge when there are unresolved conflicts
201 can not update or merge when there are unresolved conflicts
202
202
203 $ hg up -qC 0
203 $ hg up -qC 0
204 $ echo quux >> file1
204 $ echo quux >> file1
205 $ hg up 1
205 $ hg up 1
206 merging file1
206 merging file1
207 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
207 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
208 1 files updated, 0 files merged, 0 files removed, 1 files unresolved
208 1 files updated, 0 files merged, 0 files removed, 1 files unresolved
209 use 'hg resolve' to retry unresolved file merges
209 use 'hg resolve' to retry unresolved file merges
210 [1]
210 [1]
211 $ hg up 0
211 $ hg up 0
212 abort: outstanding merge conflicts
212 abort: outstanding merge conflicts
213 (use 'hg resolve' to resolve)
213 (use 'hg resolve' to resolve)
214 [255]
214 [255]
215 $ hg merge 2
215 $ hg merge 2
216 abort: outstanding merge conflicts
216 abort: outstanding merge conflicts
217 (use 'hg resolve' to resolve)
217 (use 'hg resolve' to resolve)
218 [255]
218 [255]
219 $ hg merge --force 2
219 $ hg merge --force 2
220 abort: outstanding merge conflicts
220 abort: outstanding merge conflicts
221 (use 'hg resolve' to resolve)
221 (use 'hg resolve' to resolve)
222 [255]
222 [255]
223
223
224 set up conflict-free merge
224 set up conflict-free merge
225
225
226 $ hg up -qC 3
226 $ hg up -qC 3
227 $ hg merge 1
227 $ hg merge 1
228 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
228 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
229 (branch merge, don't forget to commit)
229 (branch merge, don't forget to commit)
230
230
231 resolve --all should do nothing in merge without conflicts
231 resolve --all should do nothing in merge without conflicts
232 $ hg resolve --all
232 $ hg resolve --all
233 (no more unresolved files)
233 (no more unresolved files)
234
234
235 resolve -m should do nothing in merge without conflicts
235 resolve -m should do nothing in merge without conflicts
236
236
237 $ hg resolve -m
237 $ hg resolve -m
238 (no more unresolved files)
238 (no more unresolved files)
239
239
240 get back to conflicting state
240 get back to conflicting state
241
241
242 $ hg up -qC 2
242 $ hg up -qC 2
243 $ hg merge --tool=internal:fail 1
243 $ hg merge --tool=internal:fail 1
244 0 files updated, 0 files merged, 0 files removed, 2 files unresolved
244 0 files updated, 0 files merged, 0 files removed, 2 files unresolved
245 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
245 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
246 [1]
246 [1]
247
247
248 resolve without arguments should suggest --all
248 resolve without arguments should suggest --all
249 $ hg resolve
249 $ hg resolve
250 abort: no files or directories specified
250 abort: no files or directories specified
251 (use --all to re-merge all unresolved files)
251 (use --all to re-merge all unresolved files)
252 [255]
252 [255]
253
253
254 resolve --all should re-merge all unresolved files
254 resolve --all should re-merge all unresolved files
255 $ hg resolve --all
255 $ hg resolve --all
256 merging file1
256 merging file1
257 merging file2
257 merging file2
258 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
258 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
259 warning: conflicts while merging file2! (edit, then use 'hg resolve --mark')
259 warning: conflicts while merging file2! (edit, then use 'hg resolve --mark')
260 [1]
260 [1]
261 $ cat file1.orig
261 $ cat file1.orig
262 foo
262 foo
263 baz
263 baz
264 $ cat file2.orig
264 $ cat file2.orig
265 foo
265 foo
266 baz
266 baz
267
267
268 .orig files should exists where specified
268 .orig files should exists where specified
269 $ hg resolve --all --verbose --config 'ui.origbackuppath=.hg/origbackups'
269 $ hg resolve --all --verbose --config 'ui.origbackuppath=.hg/origbackups'
270 merging file1
270 merging file1
271 creating directory: $TESTTMP/repo/.hg/origbackups
271 creating directory: $TESTTMP/repo/.hg/origbackups
272 merging file2
272 merging file2
273 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
273 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
274 warning: conflicts while merging file2! (edit, then use 'hg resolve --mark')
274 warning: conflicts while merging file2! (edit, then use 'hg resolve --mark')
275 [1]
275 [1]
276 $ ls .hg/origbackups
276 $ ls .hg/origbackups
277 file1
277 file1
278 file2
278 file2
279 $ grep '<<<' file1 > /dev/null
279 $ grep '<<<' file1 > /dev/null
280 $ grep '<<<' file2 > /dev/null
280 $ grep '<<<' file2 > /dev/null
281
281
282 resolve <file> should re-merge file
282 resolve <file> should re-merge file
283 $ echo resolved > file1
283 $ echo resolved > file1
284 $ hg resolve -q file1
284 $ hg resolve -q file1
285 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
285 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
286 [1]
286 [1]
287 $ grep '<<<' file1 > /dev/null
287 $ grep '<<<' file1 > /dev/null
288
288
289 test .orig behavior with resolve
289 test .orig behavior with resolve
290
290
291 $ hg resolve -q file1 --tool "sh -c 'f --dump \"$TESTTMP/repo/file1.orig\"'"
291 $ hg resolve -q file1 --tool "sh -c 'f --dump \"$TESTTMP/repo/file1.orig\"'"
292 $TESTTMP/repo/file1.orig:
292 $TESTTMP/repo/file1.orig:
293 >>>
293 >>>
294 foo
294 foo
295 baz
295 baz
296 <<<
296 <<<
297
297
298 resolve <file> should do nothing if 'file' was marked resolved
298 resolve <file> should do nothing if 'file' was marked resolved
299 $ echo resolved > file1
299 $ echo resolved > file1
300 $ hg resolve -m file1
300 $ hg resolve -m file1
301 $ hg resolve -q file1
301 $ hg resolve -q file1
302 $ cat file1
302 $ cat file1
303 resolved
303 resolved
304
304
305 insert unsupported advisory merge record
305 insert unsupported advisory merge record
306
306
307 $ hg --config extensions.fakemergerecord=$TESTDIR/fakemergerecord.py fakemergerecord -x
307 $ hg --config extensions.fakemergerecord=$TESTDIR/fakemergerecord.py fakemergerecord -x
308 $ hg debugmergestate
308 $ hg debugmergestate
309 local (working copy): 57653b9f834a4493f7240b0681efcb9ae7cab745
309 local (working copy): 57653b9f834a4493f7240b0681efcb9ae7cab745
310 other (merge rev): dc77451844e37f03f5c559e3b8529b2b48d381d1
310 other (merge rev): dc77451844e37f03f5c559e3b8529b2b48d381d1
311 file: file1 (state "r")
311 file: file1 (state "r")
312 local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
312 local path: file1 (hash 60b27f004e454aca81b0480209cce5081ec52390, flags "")
313 ancestor path: file1 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd)
313 ancestor path: file1 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd)
314 other path: file1 (node 6f4310b00b9a147241b071a60c28a650827fb03d)
314 other path: file1 (node 6f4310b00b9a147241b071a60c28a650827fb03d)
315 extra: ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac
315 extra: ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac
316 file: file2 (state "u")
316 file: file2 (state "u")
317 local path: file2 (hash cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523, flags "")
317 local path: file2 (hash cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523, flags "")
318 ancestor path: file2 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd)
318 ancestor path: file2 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd)
319 other path: file2 (node 6f4310b00b9a147241b071a60c28a650827fb03d)
319 other path: file2 (node 6f4310b00b9a147241b071a60c28a650827fb03d)
320 extra: ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac
320 extra: ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac
321 $ hg resolve -l
321 $ hg resolve -l
322 R file1
322 R file1
323 U file2
323 U file2
324
324
325 test json output
325 test json output
326
326
327 $ hg debugmergestate -T json
327 $ hg debugmergestate -T json
328 [
328 [
329 {
329 {
330 "commits": [{"label": "working copy", "name": "local", "node": "57653b9f834a4493f7240b0681efcb9ae7cab745"}, {"label": "merge rev", "name": "other", "node": "dc77451844e37f03f5c559e3b8529b2b48d381d1"}],
330 "commits": [{"label": "working copy", "name": "local", "node": "57653b9f834a4493f7240b0681efcb9ae7cab745"}, {"label": "merge rev", "name": "other", "node": "dc77451844e37f03f5c559e3b8529b2b48d381d1"}],
331 "extras": [],
331 "files": [{"ancestor_node": "2ed2a3912a0b24502043eae84ee4b279c18b90dd", "ancestor_path": "file1", "extras": [{"key": "ancestorlinknode", "value": "99726c03216e233810a2564cbc0adfe395007eac"}], "local_flags": "", "local_key": "60b27f004e454aca81b0480209cce5081ec52390", "local_path": "file1", "other_node": "6f4310b00b9a147241b071a60c28a650827fb03d", "other_path": "file1", "path": "file1", "state": "r"}, {"ancestor_node": "2ed2a3912a0b24502043eae84ee4b279c18b90dd", "ancestor_path": "file2", "extras": [{"key": "ancestorlinknode", "value": "99726c03216e233810a2564cbc0adfe395007eac"}], "local_flags": "", "local_key": "cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523", "local_path": "file2", "other_node": "6f4310b00b9a147241b071a60c28a650827fb03d", "other_path": "file2", "path": "file2", "state": "u"}]
332 "files": [{"ancestor_node": "2ed2a3912a0b24502043eae84ee4b279c18b90dd", "ancestor_path": "file1", "extras": [{"key": "ancestorlinknode", "value": "99726c03216e233810a2564cbc0adfe395007eac"}], "local_flags": "", "local_key": "60b27f004e454aca81b0480209cce5081ec52390", "local_path": "file1", "other_node": "6f4310b00b9a147241b071a60c28a650827fb03d", "other_path": "file1", "path": "file1", "state": "r"}, {"ancestor_node": "2ed2a3912a0b24502043eae84ee4b279c18b90dd", "ancestor_path": "file2", "extras": [{"key": "ancestorlinknode", "value": "99726c03216e233810a2564cbc0adfe395007eac"}], "local_flags": "", "local_key": "cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523", "local_path": "file2", "other_node": "6f4310b00b9a147241b071a60c28a650827fb03d", "other_path": "file2", "path": "file2", "state": "u"}]
332 }
333 }
333 ]
334 ]
334
335
335
336
336 insert unsupported mandatory merge record
337 insert unsupported mandatory merge record
337
338
338 $ hg --config extensions.fakemergerecord=$TESTDIR/fakemergerecord.py fakemergerecord -X
339 $ hg --config extensions.fakemergerecord=$TESTDIR/fakemergerecord.py fakemergerecord -X
339 $ hg debugmergestate
340 $ hg debugmergestate
340 abort: unsupported merge state records: X
341 abort: unsupported merge state records: X
341 (see https://mercurial-scm.org/wiki/MergeStateRecords for more information)
342 (see https://mercurial-scm.org/wiki/MergeStateRecords for more information)
342 [255]
343 [255]
343 $ hg resolve -l
344 $ hg resolve -l
344 abort: unsupported merge state records: X
345 abort: unsupported merge state records: X
345 (see https://mercurial-scm.org/wiki/MergeStateRecords for more information)
346 (see https://mercurial-scm.org/wiki/MergeStateRecords for more information)
346 [255]
347 [255]
347 $ hg resolve -ma
348 $ hg resolve -ma
348 abort: unsupported merge state records: X
349 abort: unsupported merge state records: X
349 (see https://mercurial-scm.org/wiki/MergeStateRecords for more information)
350 (see https://mercurial-scm.org/wiki/MergeStateRecords for more information)
350 [255]
351 [255]
351 $ hg summary
352 $ hg summary
352 warning: merge state has unsupported record types: X
353 warning: merge state has unsupported record types: X
353 parent: 2:57653b9f834a
354 parent: 2:57653b9f834a
354 append baz to files
355 append baz to files
355 parent: 1:dc77451844e3
356 parent: 1:dc77451844e3
356 append bar to files
357 append bar to files
357 branch: default
358 branch: default
358 commit: 2 modified, 2 unknown (merge)
359 commit: 2 modified, 2 unknown (merge)
359 update: 2 new changesets (update)
360 update: 2 new changesets (update)
360 phases: 5 draft
361 phases: 5 draft
361
362
362 update --clean shouldn't abort on unsupported records
363 update --clean shouldn't abort on unsupported records
363
364
364 $ hg up -qC 1
365 $ hg up -qC 1
365 $ hg debugmergestate
366 $ hg debugmergestate
366 no merge state found
367 no merge state found
367
368
368 test crashed merge with empty mergestate
369 test crashed merge with empty mergestate
369
370
370 $ mkdir .hg/merge
371 $ mkdir .hg/merge
371 $ touch .hg/merge/state
372 $ touch .hg/merge/state
372
373
373 resolve -l should be empty
374 resolve -l should be empty
374
375
375 $ hg resolve -l
376 $ hg resolve -l
376
377
377 resolve -m can be configured to look for remaining conflict markers
378 resolve -m can be configured to look for remaining conflict markers
378 $ hg up -qC 2
379 $ hg up -qC 2
379 $ hg merge -q --tool=internal:merge 1
380 $ hg merge -q --tool=internal:merge 1
380 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
381 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
381 warning: conflicts while merging file2! (edit, then use 'hg resolve --mark')
382 warning: conflicts while merging file2! (edit, then use 'hg resolve --mark')
382 [1]
383 [1]
383 $ hg resolve -l
384 $ hg resolve -l
384 U file1
385 U file1
385 U file2
386 U file2
386 $ echo 'remove markers' > file1
387 $ echo 'remove markers' > file1
387 $ hg --config commands.resolve.mark-check=abort resolve -m
388 $ hg --config commands.resolve.mark-check=abort resolve -m
388 warning: the following files still have conflict markers:
389 warning: the following files still have conflict markers:
389 file2
390 file2
390 abort: conflict markers detected
391 abort: conflict markers detected
391 (use --all to mark anyway)
392 (use --all to mark anyway)
392 [255]
393 [255]
393 $ hg resolve -l
394 $ hg resolve -l
394 U file1
395 U file1
395 U file2
396 U file2
396 Try with --all from the hint
397 Try with --all from the hint
397 $ hg --config commands.resolve.mark-check=abort resolve -m --all
398 $ hg --config commands.resolve.mark-check=abort resolve -m --all
398 warning: the following files still have conflict markers:
399 warning: the following files still have conflict markers:
399 file2
400 file2
400 (no more unresolved files)
401 (no more unresolved files)
401 $ hg resolve -l
402 $ hg resolve -l
402 R file1
403 R file1
403 R file2
404 R file2
404 Test option value 'warn'
405 Test option value 'warn'
405 $ hg resolve --unmark
406 $ hg resolve --unmark
406 $ hg resolve -l
407 $ hg resolve -l
407 U file1
408 U file1
408 U file2
409 U file2
409 $ hg --config commands.resolve.mark-check=warn resolve -m
410 $ hg --config commands.resolve.mark-check=warn resolve -m
410 warning: the following files still have conflict markers:
411 warning: the following files still have conflict markers:
411 file2
412 file2
412 (no more unresolved files)
413 (no more unresolved files)
413 $ hg resolve -l
414 $ hg resolve -l
414 R file1
415 R file1
415 R file2
416 R file2
416 If the file is already marked as resolved, we don't warn about it
417 If the file is already marked as resolved, we don't warn about it
417 $ hg resolve --unmark file1
418 $ hg resolve --unmark file1
418 $ hg resolve -l
419 $ hg resolve -l
419 U file1
420 U file1
420 R file2
421 R file2
421 $ hg --config commands.resolve.mark-check=warn resolve -m
422 $ hg --config commands.resolve.mark-check=warn resolve -m
422 (no more unresolved files)
423 (no more unresolved files)
423 $ hg resolve -l
424 $ hg resolve -l
424 R file1
425 R file1
425 R file2
426 R file2
426 If the user passes an invalid value, we treat it as 'none'.
427 If the user passes an invalid value, we treat it as 'none'.
427 $ hg resolve --unmark
428 $ hg resolve --unmark
428 $ hg resolve -l
429 $ hg resolve -l
429 U file1
430 U file1
430 U file2
431 U file2
431 $ hg --config commands.resolve.mark-check=nope resolve -m
432 $ hg --config commands.resolve.mark-check=nope resolve -m
432 (no more unresolved files)
433 (no more unresolved files)
433 $ hg resolve -l
434 $ hg resolve -l
434 R file1
435 R file1
435 R file2
436 R file2
436 Test explicitly setting the option to 'none'
437 Test explicitly setting the option to 'none'
437 $ hg resolve --unmark
438 $ hg resolve --unmark
438 $ hg resolve -l
439 $ hg resolve -l
439 U file1
440 U file1
440 U file2
441 U file2
441 $ hg --config commands.resolve.mark-check=none resolve -m
442 $ hg --config commands.resolve.mark-check=none resolve -m
442 (no more unresolved files)
443 (no more unresolved files)
443 $ hg resolve -l
444 $ hg resolve -l
444 R file1
445 R file1
445 R file2
446 R file2
446 Test with marking an explicit file as resolved, this should not abort (since
447 Test with marking an explicit file as resolved, this should not abort (since
447 there's no --force flag, we have no way of combining --all with a filename)
448 there's no --force flag, we have no way of combining --all with a filename)
448 $ hg resolve --unmark
449 $ hg resolve --unmark
449 $ hg resolve -l
450 $ hg resolve -l
450 U file1
451 U file1
451 U file2
452 U file2
452 (This downgrades to a warning since an explicit file was specified).
453 (This downgrades to a warning since an explicit file was specified).
453 $ hg --config commands.resolve.mark-check=abort resolve -m file2
454 $ hg --config commands.resolve.mark-check=abort resolve -m file2
454 warning: the following files still have conflict markers:
455 warning: the following files still have conflict markers:
455 file2
456 file2
456 $ hg resolve -l
457 $ hg resolve -l
457 U file1
458 U file1
458 R file2
459 R file2
459 Testing the --re-merge flag
460 Testing the --re-merge flag
460 $ hg resolve --unmark file1
461 $ hg resolve --unmark file1
461 $ hg resolve -l
462 $ hg resolve -l
462 U file1
463 U file1
463 R file2
464 R file2
464 $ hg resolve --mark --re-merge
465 $ hg resolve --mark --re-merge
465 abort: too many actions specified
466 abort: too many actions specified
466 [255]
467 [255]
467 $ hg resolve --re-merge --all
468 $ hg resolve --re-merge --all
468 merging file1
469 merging file1
469 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
470 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
470 [1]
471 [1]
471 Explicit re-merge
472 Explicit re-merge
472 $ hg resolve --unmark file1
473 $ hg resolve --unmark file1
473 $ hg resolve --config commands.resolve.explicit-re-merge=1 --all
474 $ hg resolve --config commands.resolve.explicit-re-merge=1 --all
474 abort: no action specified
475 abort: no action specified
475 (use --mark, --unmark, --list or --re-merge)
476 (use --mark, --unmark, --list or --re-merge)
476 [255]
477 [255]
477 $ hg resolve --config commands.resolve.explicit-re-merge=1 --re-merge --all
478 $ hg resolve --config commands.resolve.explicit-re-merge=1 --re-merge --all
478 merging file1
479 merging file1
479 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
480 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
480 [1]
481 [1]
481
482
482 $ cd ..
483 $ cd ..
483
484
484 ======================================================
485 ======================================================
485 Test 'hg resolve' confirm config option functionality |
486 Test 'hg resolve' confirm config option functionality |
486 ======================================================
487 ======================================================
487 $ cat >> $HGRCPATH << EOF
488 $ cat >> $HGRCPATH << EOF
488 > [extensions]
489 > [extensions]
489 > rebase=
490 > rebase=
490 > EOF
491 > EOF
491
492
492 $ hg init repo2
493 $ hg init repo2
493 $ cd repo2
494 $ cd repo2
494
495
495 $ echo boss > boss
496 $ echo boss > boss
496 $ hg ci -Am "add boss"
497 $ hg ci -Am "add boss"
497 adding boss
498 adding boss
498
499
499 $ for emp in emp1 emp2 emp3; do echo work > $emp; done;
500 $ for emp in emp1 emp2 emp3; do echo work > $emp; done;
500 $ hg ci -Aqm "added emp1 emp2 emp3"
501 $ hg ci -Aqm "added emp1 emp2 emp3"
501
502
502 $ hg up 0
503 $ hg up 0
503 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
504 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
504
505
505 $ for emp in emp1 emp2 emp3; do echo nowork > $emp; done;
506 $ for emp in emp1 emp2 emp3; do echo nowork > $emp; done;
506 $ hg ci -Aqm "added lazy emp1 emp2 emp3"
507 $ hg ci -Aqm "added lazy emp1 emp2 emp3"
507
508
508 $ hg log -GT "{rev} {node|short} {firstline(desc)}\n"
509 $ hg log -GT "{rev} {node|short} {firstline(desc)}\n"
509 @ 2 0acfd4a49af0 added lazy emp1 emp2 emp3
510 @ 2 0acfd4a49af0 added lazy emp1 emp2 emp3
510 |
511 |
511 | o 1 f30f98a8181f added emp1 emp2 emp3
512 | o 1 f30f98a8181f added emp1 emp2 emp3
512 |/
513 |/
513 o 0 88660038d466 add boss
514 o 0 88660038d466 add boss
514
515
515 $ hg rebase -s 1 -d 2
516 $ hg rebase -s 1 -d 2
516 rebasing 1:f30f98a8181f "added emp1 emp2 emp3"
517 rebasing 1:f30f98a8181f "added emp1 emp2 emp3"
517 merging emp1
518 merging emp1
518 merging emp2
519 merging emp2
519 merging emp3
520 merging emp3
520 warning: conflicts while merging emp1! (edit, then use 'hg resolve --mark')
521 warning: conflicts while merging emp1! (edit, then use 'hg resolve --mark')
521 warning: conflicts while merging emp2! (edit, then use 'hg resolve --mark')
522 warning: conflicts while merging emp2! (edit, then use 'hg resolve --mark')
522 warning: conflicts while merging emp3! (edit, then use 'hg resolve --mark')
523 warning: conflicts while merging emp3! (edit, then use 'hg resolve --mark')
523 unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
524 unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
524 [1]
525 [1]
525
526
526 Test when commands.resolve.confirm config option is not set:
527 Test when commands.resolve.confirm config option is not set:
527 ===========================================================
528 ===========================================================
528 $ hg resolve --all
529 $ hg resolve --all
529 merging emp1
530 merging emp1
530 merging emp2
531 merging emp2
531 merging emp3
532 merging emp3
532 warning: conflicts while merging emp1! (edit, then use 'hg resolve --mark')
533 warning: conflicts while merging emp1! (edit, then use 'hg resolve --mark')
533 warning: conflicts while merging emp2! (edit, then use 'hg resolve --mark')
534 warning: conflicts while merging emp2! (edit, then use 'hg resolve --mark')
534 warning: conflicts while merging emp3! (edit, then use 'hg resolve --mark')
535 warning: conflicts while merging emp3! (edit, then use 'hg resolve --mark')
535 [1]
536 [1]
536
537
537 Test when config option is set:
538 Test when config option is set:
538 ==============================
539 ==============================
539 $ cat >> .hg/hgrc << EOF
540 $ cat >> .hg/hgrc << EOF
540 > [ui]
541 > [ui]
541 > interactive = True
542 > interactive = True
542 > [commands]
543 > [commands]
543 > resolve.confirm = True
544 > resolve.confirm = True
544 > EOF
545 > EOF
545
546
546 $ hg resolve
547 $ hg resolve
547 abort: no files or directories specified
548 abort: no files or directories specified
548 (use --all to re-merge all unresolved files)
549 (use --all to re-merge all unresolved files)
549 [255]
550 [255]
550 $ hg resolve --all << EOF
551 $ hg resolve --all << EOF
551 > n
552 > n
552 > EOF
553 > EOF
553 re-merge all unresolved files (yn)? n
554 re-merge all unresolved files (yn)? n
554 abort: user quit
555 abort: user quit
555 [255]
556 [255]
556
557
557 $ hg resolve --all << EOF
558 $ hg resolve --all << EOF
558 > y
559 > y
559 > EOF
560 > EOF
560 re-merge all unresolved files (yn)? y
561 re-merge all unresolved files (yn)? y
561 merging emp1
562 merging emp1
562 merging emp2
563 merging emp2
563 merging emp3
564 merging emp3
564 warning: conflicts while merging emp1! (edit, then use 'hg resolve --mark')
565 warning: conflicts while merging emp1! (edit, then use 'hg resolve --mark')
565 warning: conflicts while merging emp2! (edit, then use 'hg resolve --mark')
566 warning: conflicts while merging emp2! (edit, then use 'hg resolve --mark')
566 warning: conflicts while merging emp3! (edit, then use 'hg resolve --mark')
567 warning: conflicts while merging emp3! (edit, then use 'hg resolve --mark')
567 [1]
568 [1]
568
569
569 Test that commands.resolve.confirm respect --mark option (only when no patterns args are given):
570 Test that commands.resolve.confirm respect --mark option (only when no patterns args are given):
570 ===============================================================================================
571 ===============================================================================================
571
572
572 $ hg resolve -m emp1
573 $ hg resolve -m emp1
573 $ hg resolve -l
574 $ hg resolve -l
574 R emp1
575 R emp1
575 U emp2
576 U emp2
576 U emp3
577 U emp3
577
578
578 $ hg resolve -m << EOF
579 $ hg resolve -m << EOF
579 > n
580 > n
580 > EOF
581 > EOF
581 mark all unresolved files as resolved (yn)? n
582 mark all unresolved files as resolved (yn)? n
582 abort: user quit
583 abort: user quit
583 [255]
584 [255]
584
585
585 $ hg resolve -m << EOF
586 $ hg resolve -m << EOF
586 > y
587 > y
587 > EOF
588 > EOF
588 mark all unresolved files as resolved (yn)? y
589 mark all unresolved files as resolved (yn)? y
589 (no more unresolved files)
590 (no more unresolved files)
590 continue: hg rebase --continue
591 continue: hg rebase --continue
591 $ hg resolve -l
592 $ hg resolve -l
592 R emp1
593 R emp1
593 R emp2
594 R emp2
594 R emp3
595 R emp3
595
596
596 Test that commands.resolve.confirm respect --unmark option (only when no patterns args are given):
597 Test that commands.resolve.confirm respect --unmark option (only when no patterns args are given):
597 =================================================================================================
598 =================================================================================================
598
599
599 $ hg resolve -u emp1
600 $ hg resolve -u emp1
600
601
601 $ hg resolve -l
602 $ hg resolve -l
602 U emp1
603 U emp1
603 R emp2
604 R emp2
604 R emp3
605 R emp3
605
606
606 $ hg resolve -u << EOF
607 $ hg resolve -u << EOF
607 > n
608 > n
608 > EOF
609 > EOF
609 mark all resolved files as unresolved (yn)? n
610 mark all resolved files as unresolved (yn)? n
610 abort: user quit
611 abort: user quit
611 [255]
612 [255]
612
613
613 $ hg resolve -m << EOF
614 $ hg resolve -m << EOF
614 > y
615 > y
615 > EOF
616 > EOF
616 mark all unresolved files as resolved (yn)? y
617 mark all unresolved files as resolved (yn)? y
617 (no more unresolved files)
618 (no more unresolved files)
618 continue: hg rebase --continue
619 continue: hg rebase --continue
619
620
620 $ hg resolve -l
621 $ hg resolve -l
621 R emp1
622 R emp1
622 R emp2
623 R emp2
623 R emp3
624 R emp3
624
625
625 $ hg rebase --abort
626 $ hg rebase --abort
626 rebase aborted
627 rebase aborted
627
628
628 Done with commands.resolve.confirm tests:
629 Done with commands.resolve.confirm tests:
629 $ cd ..
630 $ cd ..
630
631
631 Test that commands.resolve.mark-check works even if there are deleted files:
632 Test that commands.resolve.mark-check works even if there are deleted files:
632 $ hg init resolve-deleted
633 $ hg init resolve-deleted
633 $ cd resolve-deleted
634 $ cd resolve-deleted
634 $ echo r0 > file1
635 $ echo r0 > file1
635 $ hg ci -qAm r0
636 $ hg ci -qAm r0
636 $ echo r1 > file1
637 $ echo r1 > file1
637 $ hg ci -qm r1
638 $ hg ci -qm r1
638 $ hg co -qr 0
639 $ hg co -qr 0
639 $ hg rm file1
640 $ hg rm file1
640 $ hg ci -qm "r2 (delete file1)"
641 $ hg ci -qm "r2 (delete file1)"
641
642
642 (At this point we have r0 creating file1, and sibling commits r1 and r2, which
643 (At this point we have r0 creating file1, and sibling commits r1 and r2, which
643 modify and delete file1, respectively)
644 modify and delete file1, respectively)
644
645
645 $ hg merge -r 1
646 $ hg merge -r 1
646 file 'file1' was deleted in local [working copy] but was modified in other [merge rev].
647 file 'file1' was deleted in local [working copy] but was modified in other [merge rev].
647 You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
648 You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
648 What do you want to do? u
649 What do you want to do? u
649 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
650 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
650 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
651 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
651 [1]
652 [1]
652 $ hg resolve --list
653 $ hg resolve --list
653 U file1
654 U file1
654 Because we left it as 'unresolved' the file should still exist.
655 Because we left it as 'unresolved' the file should still exist.
655 $ [ -f file1 ] || echo "File does not exist?"
656 $ [ -f file1 ] || echo "File does not exist?"
656 BC behavior: `hg resolve --mark` accepts that the file is still there, and
657 BC behavior: `hg resolve --mark` accepts that the file is still there, and
657 doesn't have a problem with this situation.
658 doesn't have a problem with this situation.
658 $ hg resolve --mark --config commands.resolve.mark-check=abort
659 $ hg resolve --mark --config commands.resolve.mark-check=abort
659 (no more unresolved files)
660 (no more unresolved files)
660 $ hg resolve --list
661 $ hg resolve --list
661 R file1
662 R file1
662 The file is still there:
663 The file is still there:
663 $ [ -f file1 ] || echo "File does not exist?"
664 $ [ -f file1 ] || echo "File does not exist?"
664 Let's check mark-check=warn:
665 Let's check mark-check=warn:
665 $ hg resolve --unmark file1
666 $ hg resolve --unmark file1
666 $ hg resolve --mark --config commands.resolve.mark-check=warn
667 $ hg resolve --mark --config commands.resolve.mark-check=warn
667 (no more unresolved files)
668 (no more unresolved files)
668 $ hg resolve --list
669 $ hg resolve --list
669 R file1
670 R file1
670 The file is still there:
671 The file is still there:
671 $ [ -f file1 ] || echo "File does not exist?"
672 $ [ -f file1 ] || echo "File does not exist?"
672 Let's resolve the issue by deleting the file via `hg resolve`
673 Let's resolve the issue by deleting the file via `hg resolve`
673 $ hg resolve --unmark file1
674 $ hg resolve --unmark file1
674 $ echo 'd' | hg resolve file1 --config ui.interactive=1
675 $ echo 'd' | hg resolve file1 --config ui.interactive=1
675 file 'file1' was deleted in local [working copy] but was modified in other [merge rev].
676 file 'file1' was deleted in local [working copy] but was modified in other [merge rev].
676 You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
677 You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
677 What do you want to do? d
678 What do you want to do? d
678 (no more unresolved files)
679 (no more unresolved files)
679 $ hg resolve --list
680 $ hg resolve --list
680 R file1
681 R file1
681 The file is deleted:
682 The file is deleted:
682 $ [ -f file1 ] && echo "File still exists?" || true
683 $ [ -f file1 ] && echo "File still exists?" || true
683 Doing `hg resolve --mark` doesn't break now that the file is missing:
684 Doing `hg resolve --mark` doesn't break now that the file is missing:
684 $ hg resolve --mark --config commands.resolve.mark-check=abort
685 $ hg resolve --mark --config commands.resolve.mark-check=abort
685 (no more unresolved files)
686 (no more unresolved files)
686 $ hg resolve --mark --config commands.resolve.mark-check=warn
687 $ hg resolve --mark --config commands.resolve.mark-check=warn
687 (no more unresolved files)
688 (no more unresolved files)
688 Resurrect the file, and delete it outside of hg:
689 Resurrect the file, and delete it outside of hg:
689 $ hg resolve --unmark file1
690 $ hg resolve --unmark file1
690 $ hg resolve file1
691 $ hg resolve file1
691 file 'file1' was deleted in local [working copy] but was modified in other [merge rev].
692 file 'file1' was deleted in local [working copy] but was modified in other [merge rev].
692 You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
693 You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
693 What do you want to do? u
694 What do you want to do? u
694 [1]
695 [1]
695 $ [ -f file1 ] || echo "File does not exist?"
696 $ [ -f file1 ] || echo "File does not exist?"
696 $ hg resolve --list
697 $ hg resolve --list
697 U file1
698 U file1
698 $ rm file1
699 $ rm file1
699 $ hg resolve --mark --config commands.resolve.mark-check=abort
700 $ hg resolve --mark --config commands.resolve.mark-check=abort
700 (no more unresolved files)
701 (no more unresolved files)
701 $ hg resolve --list
702 $ hg resolve --list
702 R file1
703 R file1
703 $ hg resolve --unmark file1
704 $ hg resolve --unmark file1
704 $ hg resolve file1
705 $ hg resolve file1
705 file 'file1' was deleted in local [working copy] but was modified in other [merge rev].
706 file 'file1' was deleted in local [working copy] but was modified in other [merge rev].
706 You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
707 You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
707 What do you want to do? u
708 What do you want to do? u
708 [1]
709 [1]
709 $ [ -f file1 ] || echo "File does not exist?"
710 $ [ -f file1 ] || echo "File does not exist?"
710 $ hg resolve --list
711 $ hg resolve --list
711 U file1
712 U file1
712 $ rm file1
713 $ rm file1
713 $ hg resolve --mark --config commands.resolve.mark-check=warn
714 $ hg resolve --mark --config commands.resolve.mark-check=warn
714 (no more unresolved files)
715 (no more unresolved files)
715 $ hg resolve --list
716 $ hg resolve --list
716 R file1
717 R file1
717
718
718
719
719 For completeness, let's try that in the opposite direction (merging r2 into r1,
720 For completeness, let's try that in the opposite direction (merging r2 into r1,
720 instead of r1 into r2):
721 instead of r1 into r2):
721 $ hg update -qCr 1
722 $ hg update -qCr 1
722 $ hg merge -r 2
723 $ hg merge -r 2
723 file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
724 file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
724 You can use (c)hanged version, (d)elete, or leave (u)nresolved.
725 You can use (c)hanged version, (d)elete, or leave (u)nresolved.
725 What do you want to do? u
726 What do you want to do? u
726 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
727 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
727 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
728 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
728 [1]
729 [1]
729 $ hg resolve --list
730 $ hg resolve --list
730 U file1
731 U file1
731 Because we left it as 'unresolved' the file should still exist.
732 Because we left it as 'unresolved' the file should still exist.
732 $ [ -f file1 ] || echo "File does not exist?"
733 $ [ -f file1 ] || echo "File does not exist?"
733 BC behavior: `hg resolve --mark` accepts that the file is still there, and
734 BC behavior: `hg resolve --mark` accepts that the file is still there, and
734 doesn't have a problem with this situation.
735 doesn't have a problem with this situation.
735 $ hg resolve --mark --config commands.resolve.mark-check=abort
736 $ hg resolve --mark --config commands.resolve.mark-check=abort
736 (no more unresolved files)
737 (no more unresolved files)
737 $ hg resolve --list
738 $ hg resolve --list
738 R file1
739 R file1
739 The file is still there:
740 The file is still there:
740 $ [ -f file1 ] || echo "File does not exist?"
741 $ [ -f file1 ] || echo "File does not exist?"
741 Let's check mark-check=warn:
742 Let's check mark-check=warn:
742 $ hg resolve --unmark file1
743 $ hg resolve --unmark file1
743 $ hg resolve --mark --config commands.resolve.mark-check=warn
744 $ hg resolve --mark --config commands.resolve.mark-check=warn
744 (no more unresolved files)
745 (no more unresolved files)
745 $ hg resolve --list
746 $ hg resolve --list
746 R file1
747 R file1
747 The file is still there:
748 The file is still there:
748 $ [ -f file1 ] || echo "File does not exist?"
749 $ [ -f file1 ] || echo "File does not exist?"
749 Let's resolve the issue by deleting the file via `hg resolve`
750 Let's resolve the issue by deleting the file via `hg resolve`
750 $ hg resolve --unmark file1
751 $ hg resolve --unmark file1
751 $ echo 'd' | hg resolve file1 --config ui.interactive=1
752 $ echo 'd' | hg resolve file1 --config ui.interactive=1
752 file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
753 file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
753 You can use (c)hanged version, (d)elete, or leave (u)nresolved.
754 You can use (c)hanged version, (d)elete, or leave (u)nresolved.
754 What do you want to do? d
755 What do you want to do? d
755 (no more unresolved files)
756 (no more unresolved files)
756 $ hg resolve --list
757 $ hg resolve --list
757 R file1
758 R file1
758 The file is deleted:
759 The file is deleted:
759 $ [ -f file1 ] && echo "File still exists?" || true
760 $ [ -f file1 ] && echo "File still exists?" || true
760 Doing `hg resolve --mark` doesn't break now that the file is missing:
761 Doing `hg resolve --mark` doesn't break now that the file is missing:
761 $ hg resolve --mark --config commands.resolve.mark-check=abort
762 $ hg resolve --mark --config commands.resolve.mark-check=abort
762 (no more unresolved files)
763 (no more unresolved files)
763 $ hg resolve --mark --config commands.resolve.mark-check=warn
764 $ hg resolve --mark --config commands.resolve.mark-check=warn
764 (no more unresolved files)
765 (no more unresolved files)
765 Resurrect the file, and delete it outside of hg:
766 Resurrect the file, and delete it outside of hg:
766 $ hg resolve --unmark file1
767 $ hg resolve --unmark file1
767 $ hg resolve file1
768 $ hg resolve file1
768 file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
769 file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
769 You can use (c)hanged version, (d)elete, or leave (u)nresolved.
770 You can use (c)hanged version, (d)elete, or leave (u)nresolved.
770 What do you want to do? u
771 What do you want to do? u
771 [1]
772 [1]
772 $ [ -f file1 ] || echo "File does not exist?"
773 $ [ -f file1 ] || echo "File does not exist?"
773 $ hg resolve --list
774 $ hg resolve --list
774 U file1
775 U file1
775 $ rm file1
776 $ rm file1
776 $ hg resolve --mark --config commands.resolve.mark-check=abort
777 $ hg resolve --mark --config commands.resolve.mark-check=abort
777 (no more unresolved files)
778 (no more unresolved files)
778 $ hg resolve --list
779 $ hg resolve --list
779 R file1
780 R file1
780 $ hg resolve --unmark file1
781 $ hg resolve --unmark file1
781 $ hg resolve file1
782 $ hg resolve file1
782 file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
783 file 'file1' was deleted in other [merge rev] but was modified in local [working copy].
783 You can use (c)hanged version, (d)elete, or leave (u)nresolved.
784 You can use (c)hanged version, (d)elete, or leave (u)nresolved.
784 What do you want to do? u
785 What do you want to do? u
785 [1]
786 [1]
786 $ [ -f file1 ] || echo "File does not exist?"
787 $ [ -f file1 ] || echo "File does not exist?"
787 $ hg resolve --list
788 $ hg resolve --list
788 U file1
789 U file1
789 $ rm file1
790 $ rm file1
790 $ hg resolve --mark --config commands.resolve.mark-check=warn
791 $ hg resolve --mark --config commands.resolve.mark-check=warn
791 (no more unresolved files)
792 (no more unresolved files)
792 $ hg resolve --list
793 $ hg resolve --list
793 R file1
794 R file1
794
795
795 $ cd ..
796 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now