##// END OF EJS Templates
templatespec: create a factory function for each type there is...
Martin von Zweigbergk -
r45824:8cce9f77 default
parent child Browse files
Show More
@@ -1,979 +1,979 b''
1 # patchbomb.py - sending Mercurial changesets as patch emails
1 # patchbomb.py - sending Mercurial changesets as patch emails
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to send changesets as (a series of) patch emails
8 '''command to send changesets as (a series of) patch emails
9
9
10 The series is started off with a "[PATCH 0 of N]" introduction, which
10 The series is started off with a "[PATCH 0 of N]" introduction, which
11 describes the series as a whole.
11 describes the series as a whole.
12
12
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
14 first line of the changeset description as the subject text. The
14 first line of the changeset description as the subject text. The
15 message contains two or three body parts:
15 message contains two or three body parts:
16
16
17 - The changeset description.
17 - The changeset description.
18 - [Optional] The result of running diffstat on the patch.
18 - [Optional] The result of running diffstat on the patch.
19 - The patch itself, as generated by :hg:`export`.
19 - The patch itself, as generated by :hg:`export`.
20
20
21 Each message refers to the first in the series using the In-Reply-To
21 Each message refers to the first in the series using the In-Reply-To
22 and References headers, so they will show up as a sequence in threaded
22 and References headers, so they will show up as a sequence in threaded
23 mail and news readers, and in mail archives.
23 mail and news readers, and in mail archives.
24
24
25 To configure other defaults, add a section like this to your
25 To configure other defaults, add a section like this to your
26 configuration file::
26 configuration file::
27
27
28 [email]
28 [email]
29 from = My Name <my@email>
29 from = My Name <my@email>
30 to = recipient1, recipient2, ...
30 to = recipient1, recipient2, ...
31 cc = cc1, cc2, ...
31 cc = cc1, cc2, ...
32 bcc = bcc1, bcc2, ...
32 bcc = bcc1, bcc2, ...
33 reply-to = address1, address2, ...
33 reply-to = address1, address2, ...
34
34
35 Use ``[patchbomb]`` as configuration section name if you need to
35 Use ``[patchbomb]`` as configuration section name if you need to
36 override global ``[email]`` address settings.
36 override global ``[email]`` address settings.
37
37
38 Then you can use the :hg:`email` command to mail a series of
38 Then you can use the :hg:`email` command to mail a series of
39 changesets as a patchbomb.
39 changesets as a patchbomb.
40
40
41 You can also either configure the method option in the email section
41 You can also either configure the method option in the email section
42 to be a sendmail compatible mailer or fill out the [smtp] section so
42 to be a sendmail compatible mailer or fill out the [smtp] section so
43 that the patchbomb extension can automatically send patchbombs
43 that the patchbomb extension can automatically send patchbombs
44 directly from the commandline. See the [email] and [smtp] sections in
44 directly from the commandline. See the [email] and [smtp] sections in
45 hgrc(5) for details.
45 hgrc(5) for details.
46
46
47 By default, :hg:`email` will prompt for a ``To`` or ``CC`` header if
47 By default, :hg:`email` will prompt for a ``To`` or ``CC`` header if
48 you do not supply one via configuration or the command line. You can
48 you do not supply one via configuration or the command line. You can
49 override this to never prompt by configuring an empty value::
49 override this to never prompt by configuring an empty value::
50
50
51 [email]
51 [email]
52 cc =
52 cc =
53
53
54 You can control the default inclusion of an introduction message with the
54 You can control the default inclusion of an introduction message with the
55 ``patchbomb.intro`` configuration option. The configuration is always
55 ``patchbomb.intro`` configuration option. The configuration is always
56 overwritten by command line flags like --intro and --desc::
56 overwritten by command line flags like --intro and --desc::
57
57
58 [patchbomb]
58 [patchbomb]
59 intro=auto # include introduction message if more than 1 patch (default)
59 intro=auto # include introduction message if more than 1 patch (default)
60 intro=never # never include an introduction message
60 intro=never # never include an introduction message
61 intro=always # always include an introduction message
61 intro=always # always include an introduction message
62
62
63 You can specify a template for flags to be added in subject prefixes. Flags
63 You can specify a template for flags to be added in subject prefixes. Flags
64 specified by --flag option are exported as ``{flags}`` keyword::
64 specified by --flag option are exported as ``{flags}`` keyword::
65
65
66 [patchbomb]
66 [patchbomb]
67 flagtemplate = "{separate(' ',
67 flagtemplate = "{separate(' ',
68 ifeq(branch, 'default', '', branch|upper),
68 ifeq(branch, 'default', '', branch|upper),
69 flags)}"
69 flags)}"
70
70
71 You can set patchbomb to always ask for confirmation by setting
71 You can set patchbomb to always ask for confirmation by setting
72 ``patchbomb.confirm`` to true.
72 ``patchbomb.confirm`` to true.
73 '''
73 '''
74 from __future__ import absolute_import
74 from __future__ import absolute_import
75
75
76 import email.encoders as emailencoders
76 import email.encoders as emailencoders
77 import email.mime.base as emimebase
77 import email.mime.base as emimebase
78 import email.mime.multipart as emimemultipart
78 import email.mime.multipart as emimemultipart
79 import email.utils as eutil
79 import email.utils as eutil
80 import errno
80 import errno
81 import os
81 import os
82 import socket
82 import socket
83
83
84 from mercurial.i18n import _
84 from mercurial.i18n import _
85 from mercurial.pycompat import open
85 from mercurial.pycompat import open
86 from mercurial import (
86 from mercurial import (
87 cmdutil,
87 cmdutil,
88 commands,
88 commands,
89 encoding,
89 encoding,
90 error,
90 error,
91 formatter,
91 formatter,
92 hg,
92 hg,
93 mail,
93 mail,
94 node as nodemod,
94 node as nodemod,
95 patch,
95 patch,
96 pycompat,
96 pycompat,
97 registrar,
97 registrar,
98 scmutil,
98 scmutil,
99 templater,
99 templater,
100 util,
100 util,
101 )
101 )
102 from mercurial.utils import dateutil
102 from mercurial.utils import dateutil
103
103
104 stringio = util.stringio
104 stringio = util.stringio
105
105
106 cmdtable = {}
106 cmdtable = {}
107 command = registrar.command(cmdtable)
107 command = registrar.command(cmdtable)
108
108
109 configtable = {}
109 configtable = {}
110 configitem = registrar.configitem(configtable)
110 configitem = registrar.configitem(configtable)
111
111
112 configitem(
112 configitem(
113 b'patchbomb', b'bundletype', default=None,
113 b'patchbomb', b'bundletype', default=None,
114 )
114 )
115 configitem(
115 configitem(
116 b'patchbomb', b'bcc', default=None,
116 b'patchbomb', b'bcc', default=None,
117 )
117 )
118 configitem(
118 configitem(
119 b'patchbomb', b'cc', default=None,
119 b'patchbomb', b'cc', default=None,
120 )
120 )
121 configitem(
121 configitem(
122 b'patchbomb', b'confirm', default=False,
122 b'patchbomb', b'confirm', default=False,
123 )
123 )
124 configitem(
124 configitem(
125 b'patchbomb', b'flagtemplate', default=None,
125 b'patchbomb', b'flagtemplate', default=None,
126 )
126 )
127 configitem(
127 configitem(
128 b'patchbomb', b'from', default=None,
128 b'patchbomb', b'from', default=None,
129 )
129 )
130 configitem(
130 configitem(
131 b'patchbomb', b'intro', default=b'auto',
131 b'patchbomb', b'intro', default=b'auto',
132 )
132 )
133 configitem(
133 configitem(
134 b'patchbomb', b'publicurl', default=None,
134 b'patchbomb', b'publicurl', default=None,
135 )
135 )
136 configitem(
136 configitem(
137 b'patchbomb', b'reply-to', default=None,
137 b'patchbomb', b'reply-to', default=None,
138 )
138 )
139 configitem(
139 configitem(
140 b'patchbomb', b'to', default=None,
140 b'patchbomb', b'to', default=None,
141 )
141 )
142
142
143 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
143 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
144 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
144 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
145 # be specifying the version(s) of Mercurial they are tested with, or
145 # be specifying the version(s) of Mercurial they are tested with, or
146 # leave the attribute unspecified.
146 # leave the attribute unspecified.
147 testedwith = b'ships-with-hg-core'
147 testedwith = b'ships-with-hg-core'
148
148
149
149
150 def _addpullheader(seq, ctx):
150 def _addpullheader(seq, ctx):
151 """Add a header pointing to a public URL where the changeset is available
151 """Add a header pointing to a public URL where the changeset is available
152 """
152 """
153 repo = ctx.repo()
153 repo = ctx.repo()
154 # experimental config: patchbomb.publicurl
154 # experimental config: patchbomb.publicurl
155 # waiting for some logic that check that the changeset are available on the
155 # waiting for some logic that check that the changeset are available on the
156 # destination before patchbombing anything.
156 # destination before patchbombing anything.
157 publicurl = repo.ui.config(b'patchbomb', b'publicurl')
157 publicurl = repo.ui.config(b'patchbomb', b'publicurl')
158 if publicurl:
158 if publicurl:
159 return b'Available At %s\n# hg pull %s -r %s' % (
159 return b'Available At %s\n# hg pull %s -r %s' % (
160 publicurl,
160 publicurl,
161 publicurl,
161 publicurl,
162 ctx,
162 ctx,
163 )
163 )
164 return None
164 return None
165
165
166
166
167 def uisetup(ui):
167 def uisetup(ui):
168 cmdutil.extraexport.append(b'pullurl')
168 cmdutil.extraexport.append(b'pullurl')
169 cmdutil.extraexportmap[b'pullurl'] = _addpullheader
169 cmdutil.extraexportmap[b'pullurl'] = _addpullheader
170
170
171
171
172 def reposetup(ui, repo):
172 def reposetup(ui, repo):
173 if not repo.local():
173 if not repo.local():
174 return
174 return
175 repo._wlockfreeprefix.add(b'last-email.txt')
175 repo._wlockfreeprefix.add(b'last-email.txt')
176
176
177
177
178 def prompt(ui, prompt, default=None, rest=b':'):
178 def prompt(ui, prompt, default=None, rest=b':'):
179 if default:
179 if default:
180 prompt += b' [%s]' % default
180 prompt += b' [%s]' % default
181 return ui.prompt(prompt + rest, default)
181 return ui.prompt(prompt + rest, default)
182
182
183
183
184 def introwanted(ui, opts, number):
184 def introwanted(ui, opts, number):
185 '''is an introductory message apparently wanted?'''
185 '''is an introductory message apparently wanted?'''
186 introconfig = ui.config(b'patchbomb', b'intro')
186 introconfig = ui.config(b'patchbomb', b'intro')
187 if opts.get(b'intro') or opts.get(b'desc'):
187 if opts.get(b'intro') or opts.get(b'desc'):
188 intro = True
188 intro = True
189 elif introconfig == b'always':
189 elif introconfig == b'always':
190 intro = True
190 intro = True
191 elif introconfig == b'never':
191 elif introconfig == b'never':
192 intro = False
192 intro = False
193 elif introconfig == b'auto':
193 elif introconfig == b'auto':
194 intro = number > 1
194 intro = number > 1
195 else:
195 else:
196 ui.write_err(
196 ui.write_err(
197 _(b'warning: invalid patchbomb.intro value "%s"\n') % introconfig
197 _(b'warning: invalid patchbomb.intro value "%s"\n') % introconfig
198 )
198 )
199 ui.write_err(_(b'(should be one of always, never, auto)\n'))
199 ui.write_err(_(b'(should be one of always, never, auto)\n'))
200 intro = number > 1
200 intro = number > 1
201 return intro
201 return intro
202
202
203
203
204 def _formatflags(ui, repo, rev, flags):
204 def _formatflags(ui, repo, rev, flags):
205 """build flag string optionally by template"""
205 """build flag string optionally by template"""
206 tmpl = ui.config(b'patchbomb', b'flagtemplate')
206 tmpl = ui.config(b'patchbomb', b'flagtemplate')
207 if not tmpl:
207 if not tmpl:
208 return b' '.join(flags)
208 return b' '.join(flags)
209 out = util.stringio()
209 out = util.stringio()
210 spec = formatter.templatespec(b'', templater.unquotestring(tmpl), None)
210 spec = formatter.literal_templatespec(templater.unquotestring(tmpl))
211 with formatter.templateformatter(ui, out, b'patchbombflag', {}, spec) as fm:
211 with formatter.templateformatter(ui, out, b'patchbombflag', {}, spec) as fm:
212 fm.startitem()
212 fm.startitem()
213 fm.context(ctx=repo[rev])
213 fm.context(ctx=repo[rev])
214 fm.write(b'flags', b'%s', fm.formatlist(flags, name=b'flag'))
214 fm.write(b'flags', b'%s', fm.formatlist(flags, name=b'flag'))
215 return out.getvalue()
215 return out.getvalue()
216
216
217
217
218 def _formatprefix(ui, repo, rev, flags, idx, total, numbered):
218 def _formatprefix(ui, repo, rev, flags, idx, total, numbered):
219 """build prefix to patch subject"""
219 """build prefix to patch subject"""
220 flag = _formatflags(ui, repo, rev, flags)
220 flag = _formatflags(ui, repo, rev, flags)
221 if flag:
221 if flag:
222 flag = b' ' + flag
222 flag = b' ' + flag
223
223
224 if not numbered:
224 if not numbered:
225 return b'[PATCH%s]' % flag
225 return b'[PATCH%s]' % flag
226 else:
226 else:
227 tlen = len(b"%d" % total)
227 tlen = len(b"%d" % total)
228 return b'[PATCH %0*d of %d%s]' % (tlen, idx, total, flag)
228 return b'[PATCH %0*d of %d%s]' % (tlen, idx, total, flag)
229
229
230
230
231 def makepatch(
231 def makepatch(
232 ui,
232 ui,
233 repo,
233 repo,
234 rev,
234 rev,
235 patchlines,
235 patchlines,
236 opts,
236 opts,
237 _charsets,
237 _charsets,
238 idx,
238 idx,
239 total,
239 total,
240 numbered,
240 numbered,
241 patchname=None,
241 patchname=None,
242 ):
242 ):
243
243
244 desc = []
244 desc = []
245 node = None
245 node = None
246 body = b''
246 body = b''
247
247
248 for line in patchlines:
248 for line in patchlines:
249 if line.startswith(b'#'):
249 if line.startswith(b'#'):
250 if line.startswith(b'# Node ID'):
250 if line.startswith(b'# Node ID'):
251 node = line.split()[-1]
251 node = line.split()[-1]
252 continue
252 continue
253 if line.startswith(b'diff -r') or line.startswith(b'diff --git'):
253 if line.startswith(b'diff -r') or line.startswith(b'diff --git'):
254 break
254 break
255 desc.append(line)
255 desc.append(line)
256
256
257 if not patchname and not node:
257 if not patchname and not node:
258 raise ValueError
258 raise ValueError
259
259
260 if opts.get(b'attach') and not opts.get(b'body'):
260 if opts.get(b'attach') and not opts.get(b'body'):
261 body = (
261 body = (
262 b'\n'.join(desc[1:]).strip()
262 b'\n'.join(desc[1:]).strip()
263 or b'Patch subject is complete summary.'
263 or b'Patch subject is complete summary.'
264 )
264 )
265 body += b'\n\n\n'
265 body += b'\n\n\n'
266
266
267 if opts.get(b'plain'):
267 if opts.get(b'plain'):
268 while patchlines and patchlines[0].startswith(b'# '):
268 while patchlines and patchlines[0].startswith(b'# '):
269 patchlines.pop(0)
269 patchlines.pop(0)
270 if patchlines:
270 if patchlines:
271 patchlines.pop(0)
271 patchlines.pop(0)
272 while patchlines and not patchlines[0].strip():
272 while patchlines and not patchlines[0].strip():
273 patchlines.pop(0)
273 patchlines.pop(0)
274
274
275 ds = patch.diffstat(patchlines)
275 ds = patch.diffstat(patchlines)
276 if opts.get(b'diffstat'):
276 if opts.get(b'diffstat'):
277 body += ds + b'\n\n'
277 body += ds + b'\n\n'
278
278
279 addattachment = opts.get(b'attach') or opts.get(b'inline')
279 addattachment = opts.get(b'attach') or opts.get(b'inline')
280 if not addattachment or opts.get(b'body'):
280 if not addattachment or opts.get(b'body'):
281 body += b'\n'.join(patchlines)
281 body += b'\n'.join(patchlines)
282
282
283 if addattachment:
283 if addattachment:
284 msg = emimemultipart.MIMEMultipart()
284 msg = emimemultipart.MIMEMultipart()
285 if body:
285 if body:
286 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get(b'test')))
286 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get(b'test')))
287 p = mail.mimetextpatch(
287 p = mail.mimetextpatch(
288 b'\n'.join(patchlines), 'x-patch', opts.get(b'test')
288 b'\n'.join(patchlines), 'x-patch', opts.get(b'test')
289 )
289 )
290 binnode = nodemod.bin(node)
290 binnode = nodemod.bin(node)
291 # if node is mq patch, it will have the patch file's name as a tag
291 # if node is mq patch, it will have the patch file's name as a tag
292 if not patchname:
292 if not patchname:
293 patchtags = [
293 patchtags = [
294 t
294 t
295 for t in repo.nodetags(binnode)
295 for t in repo.nodetags(binnode)
296 if t.endswith(b'.patch') or t.endswith(b'.diff')
296 if t.endswith(b'.patch') or t.endswith(b'.diff')
297 ]
297 ]
298 if patchtags:
298 if patchtags:
299 patchname = patchtags[0]
299 patchname = patchtags[0]
300 elif total > 1:
300 elif total > 1:
301 patchname = cmdutil.makefilename(
301 patchname = cmdutil.makefilename(
302 repo[node], b'%b-%n.patch', seqno=idx, total=total
302 repo[node], b'%b-%n.patch', seqno=idx, total=total
303 )
303 )
304 else:
304 else:
305 patchname = cmdutil.makefilename(repo[node], b'%b.patch')
305 patchname = cmdutil.makefilename(repo[node], b'%b.patch')
306 disposition = r'inline'
306 disposition = r'inline'
307 if opts.get(b'attach'):
307 if opts.get(b'attach'):
308 disposition = r'attachment'
308 disposition = r'attachment'
309 p['Content-Disposition'] = (
309 p['Content-Disposition'] = (
310 disposition + '; filename=' + encoding.strfromlocal(patchname)
310 disposition + '; filename=' + encoding.strfromlocal(patchname)
311 )
311 )
312 msg.attach(p)
312 msg.attach(p)
313 else:
313 else:
314 msg = mail.mimetextpatch(body, display=opts.get(b'test'))
314 msg = mail.mimetextpatch(body, display=opts.get(b'test'))
315
315
316 prefix = _formatprefix(
316 prefix = _formatprefix(
317 ui, repo, rev, opts.get(b'flag'), idx, total, numbered
317 ui, repo, rev, opts.get(b'flag'), idx, total, numbered
318 )
318 )
319 subj = desc[0].strip().rstrip(b'. ')
319 subj = desc[0].strip().rstrip(b'. ')
320 if not numbered:
320 if not numbered:
321 subj = b' '.join([prefix, opts.get(b'subject') or subj])
321 subj = b' '.join([prefix, opts.get(b'subject') or subj])
322 else:
322 else:
323 subj = b' '.join([prefix, subj])
323 subj = b' '.join([prefix, subj])
324 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get(b'test'))
324 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get(b'test'))
325 msg['X-Mercurial-Node'] = pycompat.sysstr(node)
325 msg['X-Mercurial-Node'] = pycompat.sysstr(node)
326 msg['X-Mercurial-Series-Index'] = '%i' % idx
326 msg['X-Mercurial-Series-Index'] = '%i' % idx
327 msg['X-Mercurial-Series-Total'] = '%i' % total
327 msg['X-Mercurial-Series-Total'] = '%i' % total
328 return msg, subj, ds
328 return msg, subj, ds
329
329
330
330
331 def _getpatches(repo, revs, **opts):
331 def _getpatches(repo, revs, **opts):
332 """return a list of patches for a list of revisions
332 """return a list of patches for a list of revisions
333
333
334 Each patch in the list is itself a list of lines.
334 Each patch in the list is itself a list of lines.
335 """
335 """
336 ui = repo.ui
336 ui = repo.ui
337 prev = repo[b'.'].rev()
337 prev = repo[b'.'].rev()
338 for r in revs:
338 for r in revs:
339 if r == prev and (repo[None].files() or repo[None].deleted()):
339 if r == prev and (repo[None].files() or repo[None].deleted()):
340 ui.warn(_(b'warning: working directory has uncommitted changes\n'))
340 ui.warn(_(b'warning: working directory has uncommitted changes\n'))
341 output = stringio()
341 output = stringio()
342 cmdutil.exportfile(
342 cmdutil.exportfile(
343 repo, [r], output, opts=patch.difffeatureopts(ui, opts, git=True)
343 repo, [r], output, opts=patch.difffeatureopts(ui, opts, git=True)
344 )
344 )
345 yield output.getvalue().split(b'\n')
345 yield output.getvalue().split(b'\n')
346
346
347
347
348 def _getbundle(repo, dest, **opts):
348 def _getbundle(repo, dest, **opts):
349 """return a bundle containing changesets missing in "dest"
349 """return a bundle containing changesets missing in "dest"
350
350
351 The `opts` keyword-arguments are the same as the one accepted by the
351 The `opts` keyword-arguments are the same as the one accepted by the
352 `bundle` command.
352 `bundle` command.
353
353
354 The bundle is a returned as a single in-memory binary blob.
354 The bundle is a returned as a single in-memory binary blob.
355 """
355 """
356 ui = repo.ui
356 ui = repo.ui
357 tmpdir = pycompat.mkdtemp(prefix=b'hg-email-bundle-')
357 tmpdir = pycompat.mkdtemp(prefix=b'hg-email-bundle-')
358 tmpfn = os.path.join(tmpdir, b'bundle')
358 tmpfn = os.path.join(tmpdir, b'bundle')
359 btype = ui.config(b'patchbomb', b'bundletype')
359 btype = ui.config(b'patchbomb', b'bundletype')
360 if btype:
360 if btype:
361 opts['type'] = btype
361 opts['type'] = btype
362 try:
362 try:
363 commands.bundle(ui, repo, tmpfn, dest, **opts)
363 commands.bundle(ui, repo, tmpfn, dest, **opts)
364 return util.readfile(tmpfn)
364 return util.readfile(tmpfn)
365 finally:
365 finally:
366 try:
366 try:
367 os.unlink(tmpfn)
367 os.unlink(tmpfn)
368 except OSError:
368 except OSError:
369 pass
369 pass
370 os.rmdir(tmpdir)
370 os.rmdir(tmpdir)
371
371
372
372
373 def _getdescription(repo, defaultbody, sender, **opts):
373 def _getdescription(repo, defaultbody, sender, **opts):
374 """obtain the body of the introduction message and return it
374 """obtain the body of the introduction message and return it
375
375
376 This is also used for the body of email with an attached bundle.
376 This is also used for the body of email with an attached bundle.
377
377
378 The body can be obtained either from the command line option or entered by
378 The body can be obtained either from the command line option or entered by
379 the user through the editor.
379 the user through the editor.
380 """
380 """
381 ui = repo.ui
381 ui = repo.ui
382 if opts.get('desc'):
382 if opts.get('desc'):
383 body = open(opts.get('desc')).read()
383 body = open(opts.get('desc')).read()
384 else:
384 else:
385 ui.write(
385 ui.write(
386 _(b'\nWrite the introductory message for the patch series.\n\n')
386 _(b'\nWrite the introductory message for the patch series.\n\n')
387 )
387 )
388 body = ui.edit(
388 body = ui.edit(
389 defaultbody, sender, repopath=repo.path, action=b'patchbombbody'
389 defaultbody, sender, repopath=repo.path, action=b'patchbombbody'
390 )
390 )
391 # Save series description in case sendmail fails
391 # Save series description in case sendmail fails
392 msgfile = repo.vfs(b'last-email.txt', b'wb')
392 msgfile = repo.vfs(b'last-email.txt', b'wb')
393 msgfile.write(body)
393 msgfile.write(body)
394 msgfile.close()
394 msgfile.close()
395 return body
395 return body
396
396
397
397
398 def _getbundlemsgs(repo, sender, bundle, **opts):
398 def _getbundlemsgs(repo, sender, bundle, **opts):
399 """Get the full email for sending a given bundle
399 """Get the full email for sending a given bundle
400
400
401 This function returns a list of "email" tuples (subject, content, None).
401 This function returns a list of "email" tuples (subject, content, None).
402 The list is always one message long in that case.
402 The list is always one message long in that case.
403 """
403 """
404 ui = repo.ui
404 ui = repo.ui
405 _charsets = mail._charsets(ui)
405 _charsets = mail._charsets(ui)
406 subj = opts.get('subject') or prompt(
406 subj = opts.get('subject') or prompt(
407 ui, b'Subject:', b'A bundle for your repository'
407 ui, b'Subject:', b'A bundle for your repository'
408 )
408 )
409
409
410 body = _getdescription(repo, b'', sender, **opts)
410 body = _getdescription(repo, b'', sender, **opts)
411 msg = emimemultipart.MIMEMultipart()
411 msg = emimemultipart.MIMEMultipart()
412 if body:
412 if body:
413 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
413 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
414 datapart = emimebase.MIMEBase('application', 'x-mercurial-bundle')
414 datapart = emimebase.MIMEBase('application', 'x-mercurial-bundle')
415 datapart.set_payload(bundle)
415 datapart.set_payload(bundle)
416 bundlename = b'%s.hg' % opts.get('bundlename', b'bundle')
416 bundlename = b'%s.hg' % opts.get('bundlename', b'bundle')
417 datapart.add_header(
417 datapart.add_header(
418 'Content-Disposition',
418 'Content-Disposition',
419 'attachment',
419 'attachment',
420 filename=encoding.strfromlocal(bundlename),
420 filename=encoding.strfromlocal(bundlename),
421 )
421 )
422 emailencoders.encode_base64(datapart)
422 emailencoders.encode_base64(datapart)
423 msg.attach(datapart)
423 msg.attach(datapart)
424 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
424 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
425 return [(msg, subj, None)]
425 return [(msg, subj, None)]
426
426
427
427
428 def _makeintro(repo, sender, revs, patches, **opts):
428 def _makeintro(repo, sender, revs, patches, **opts):
429 """make an introduction email, asking the user for content if needed
429 """make an introduction email, asking the user for content if needed
430
430
431 email is returned as (subject, body, cumulative-diffstat)"""
431 email is returned as (subject, body, cumulative-diffstat)"""
432 ui = repo.ui
432 ui = repo.ui
433 _charsets = mail._charsets(ui)
433 _charsets = mail._charsets(ui)
434
434
435 # use the last revision which is likely to be a bookmarked head
435 # use the last revision which is likely to be a bookmarked head
436 prefix = _formatprefix(
436 prefix = _formatprefix(
437 ui, repo, revs.last(), opts.get('flag'), 0, len(patches), numbered=True
437 ui, repo, revs.last(), opts.get('flag'), 0, len(patches), numbered=True
438 )
438 )
439 subj = opts.get('subject') or prompt(
439 subj = opts.get('subject') or prompt(
440 ui, b'(optional) Subject: ', rest=prefix, default=b''
440 ui, b'(optional) Subject: ', rest=prefix, default=b''
441 )
441 )
442 if not subj:
442 if not subj:
443 return None # skip intro if the user doesn't bother
443 return None # skip intro if the user doesn't bother
444
444
445 subj = prefix + b' ' + subj
445 subj = prefix + b' ' + subj
446
446
447 body = b''
447 body = b''
448 if opts.get('diffstat'):
448 if opts.get('diffstat'):
449 # generate a cumulative diffstat of the whole patch series
449 # generate a cumulative diffstat of the whole patch series
450 diffstat = patch.diffstat(sum(patches, []))
450 diffstat = patch.diffstat(sum(patches, []))
451 body = b'\n' + diffstat
451 body = b'\n' + diffstat
452 else:
452 else:
453 diffstat = None
453 diffstat = None
454
454
455 body = _getdescription(repo, body, sender, **opts)
455 body = _getdescription(repo, body, sender, **opts)
456 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
456 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
457 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
457 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
458 return (msg, subj, diffstat)
458 return (msg, subj, diffstat)
459
459
460
460
461 def _getpatchmsgs(repo, sender, revs, patchnames=None, **opts):
461 def _getpatchmsgs(repo, sender, revs, patchnames=None, **opts):
462 """return a list of emails from a list of patches
462 """return a list of emails from a list of patches
463
463
464 This involves introduction message creation if necessary.
464 This involves introduction message creation if necessary.
465
465
466 This function returns a list of "email" tuples (subject, content, None).
466 This function returns a list of "email" tuples (subject, content, None).
467 """
467 """
468 bytesopts = pycompat.byteskwargs(opts)
468 bytesopts = pycompat.byteskwargs(opts)
469 ui = repo.ui
469 ui = repo.ui
470 _charsets = mail._charsets(ui)
470 _charsets = mail._charsets(ui)
471 patches = list(_getpatches(repo, revs, **opts))
471 patches = list(_getpatches(repo, revs, **opts))
472 msgs = []
472 msgs = []
473
473
474 ui.write(_(b'this patch series consists of %d patches.\n\n') % len(patches))
474 ui.write(_(b'this patch series consists of %d patches.\n\n') % len(patches))
475
475
476 # build the intro message, or skip it if the user declines
476 # build the intro message, or skip it if the user declines
477 if introwanted(ui, bytesopts, len(patches)):
477 if introwanted(ui, bytesopts, len(patches)):
478 msg = _makeintro(repo, sender, revs, patches, **opts)
478 msg = _makeintro(repo, sender, revs, patches, **opts)
479 if msg:
479 if msg:
480 msgs.append(msg)
480 msgs.append(msg)
481
481
482 # are we going to send more than one message?
482 # are we going to send more than one message?
483 numbered = len(msgs) + len(patches) > 1
483 numbered = len(msgs) + len(patches) > 1
484
484
485 # now generate the actual patch messages
485 # now generate the actual patch messages
486 name = None
486 name = None
487 assert len(revs) == len(patches)
487 assert len(revs) == len(patches)
488 for i, (r, p) in enumerate(zip(revs, patches)):
488 for i, (r, p) in enumerate(zip(revs, patches)):
489 if patchnames:
489 if patchnames:
490 name = patchnames[i]
490 name = patchnames[i]
491 msg = makepatch(
491 msg = makepatch(
492 ui,
492 ui,
493 repo,
493 repo,
494 r,
494 r,
495 p,
495 p,
496 bytesopts,
496 bytesopts,
497 _charsets,
497 _charsets,
498 i + 1,
498 i + 1,
499 len(patches),
499 len(patches),
500 numbered,
500 numbered,
501 name,
501 name,
502 )
502 )
503 msgs.append(msg)
503 msgs.append(msg)
504
504
505 return msgs
505 return msgs
506
506
507
507
508 def _getoutgoing(repo, dest, revs):
508 def _getoutgoing(repo, dest, revs):
509 '''Return the revisions present locally but not in dest'''
509 '''Return the revisions present locally but not in dest'''
510 ui = repo.ui
510 ui = repo.ui
511 url = ui.expandpath(dest or b'default-push', dest or b'default')
511 url = ui.expandpath(dest or b'default-push', dest or b'default')
512 url = hg.parseurl(url)[0]
512 url = hg.parseurl(url)[0]
513 ui.status(_(b'comparing with %s\n') % util.hidepassword(url))
513 ui.status(_(b'comparing with %s\n') % util.hidepassword(url))
514
514
515 revs = [r for r in revs if r >= 0]
515 revs = [r for r in revs if r >= 0]
516 if not revs:
516 if not revs:
517 revs = [repo.changelog.tiprev()]
517 revs = [repo.changelog.tiprev()]
518 revs = repo.revs(b'outgoing(%s) and ::%ld', dest or b'', revs)
518 revs = repo.revs(b'outgoing(%s) and ::%ld', dest or b'', revs)
519 if not revs:
519 if not revs:
520 ui.status(_(b"no changes found\n"))
520 ui.status(_(b"no changes found\n"))
521 return revs
521 return revs
522
522
523
523
524 def _msgid(node, timestamp):
524 def _msgid(node, timestamp):
525 try:
525 try:
526 hostname = encoding.strfromlocal(encoding.environ[b'HGHOSTNAME'])
526 hostname = encoding.strfromlocal(encoding.environ[b'HGHOSTNAME'])
527 except KeyError:
527 except KeyError:
528 hostname = socket.getfqdn()
528 hostname = socket.getfqdn()
529 return '<%s.%d@%s>' % (node, timestamp, hostname)
529 return '<%s.%d@%s>' % (node, timestamp, hostname)
530
530
531
531
532 emailopts = [
532 emailopts = [
533 (b'', b'body', None, _(b'send patches as inline message text (default)')),
533 (b'', b'body', None, _(b'send patches as inline message text (default)')),
534 (b'a', b'attach', None, _(b'send patches as attachments')),
534 (b'a', b'attach', None, _(b'send patches as attachments')),
535 (b'i', b'inline', None, _(b'send patches as inline attachments')),
535 (b'i', b'inline', None, _(b'send patches as inline attachments')),
536 (
536 (
537 b'',
537 b'',
538 b'bcc',
538 b'bcc',
539 [],
539 [],
540 _(b'email addresses of blind carbon copy recipients'),
540 _(b'email addresses of blind carbon copy recipients'),
541 _(b'EMAIL'),
541 _(b'EMAIL'),
542 ),
542 ),
543 (b'c', b'cc', [], _(b'email addresses of copy recipients'), _(b'EMAIL')),
543 (b'c', b'cc', [], _(b'email addresses of copy recipients'), _(b'EMAIL')),
544 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
544 (b'', b'confirm', None, _(b'ask for confirmation before sending')),
545 (b'd', b'diffstat', None, _(b'add diffstat output to messages')),
545 (b'd', b'diffstat', None, _(b'add diffstat output to messages')),
546 (
546 (
547 b'',
547 b'',
548 b'date',
548 b'date',
549 b'',
549 b'',
550 _(b'use the given date as the sending date'),
550 _(b'use the given date as the sending date'),
551 _(b'DATE'),
551 _(b'DATE'),
552 ),
552 ),
553 (
553 (
554 b'',
554 b'',
555 b'desc',
555 b'desc',
556 b'',
556 b'',
557 _(b'use the given file as the series description'),
557 _(b'use the given file as the series description'),
558 _(b'FILE'),
558 _(b'FILE'),
559 ),
559 ),
560 (b'f', b'from', b'', _(b'email address of sender'), _(b'EMAIL')),
560 (b'f', b'from', b'', _(b'email address of sender'), _(b'EMAIL')),
561 (b'n', b'test', None, _(b'print messages that would be sent')),
561 (b'n', b'test', None, _(b'print messages that would be sent')),
562 (
562 (
563 b'm',
563 b'm',
564 b'mbox',
564 b'mbox',
565 b'',
565 b'',
566 _(b'write messages to mbox file instead of sending them'),
566 _(b'write messages to mbox file instead of sending them'),
567 _(b'FILE'),
567 _(b'FILE'),
568 ),
568 ),
569 (
569 (
570 b'',
570 b'',
571 b'reply-to',
571 b'reply-to',
572 [],
572 [],
573 _(b'email addresses replies should be sent to'),
573 _(b'email addresses replies should be sent to'),
574 _(b'EMAIL'),
574 _(b'EMAIL'),
575 ),
575 ),
576 (
576 (
577 b's',
577 b's',
578 b'subject',
578 b'subject',
579 b'',
579 b'',
580 _(b'subject of first message (intro or single patch)'),
580 _(b'subject of first message (intro or single patch)'),
581 _(b'TEXT'),
581 _(b'TEXT'),
582 ),
582 ),
583 (
583 (
584 b'',
584 b'',
585 b'in-reply-to',
585 b'in-reply-to',
586 b'',
586 b'',
587 _(b'message identifier to reply to'),
587 _(b'message identifier to reply to'),
588 _(b'MSGID'),
588 _(b'MSGID'),
589 ),
589 ),
590 (b'', b'flag', [], _(b'flags to add in subject prefixes'), _(b'FLAG')),
590 (b'', b'flag', [], _(b'flags to add in subject prefixes'), _(b'FLAG')),
591 (b't', b'to', [], _(b'email addresses of recipients'), _(b'EMAIL')),
591 (b't', b'to', [], _(b'email addresses of recipients'), _(b'EMAIL')),
592 ]
592 ]
593
593
594
594
595 @command(
595 @command(
596 b'email',
596 b'email',
597 [
597 [
598 (b'g', b'git', None, _(b'use git extended diff format')),
598 (b'g', b'git', None, _(b'use git extended diff format')),
599 (b'', b'plain', None, _(b'omit hg patch header')),
599 (b'', b'plain', None, _(b'omit hg patch header')),
600 (
600 (
601 b'o',
601 b'o',
602 b'outgoing',
602 b'outgoing',
603 None,
603 None,
604 _(b'send changes not found in the target repository'),
604 _(b'send changes not found in the target repository'),
605 ),
605 ),
606 (
606 (
607 b'b',
607 b'b',
608 b'bundle',
608 b'bundle',
609 None,
609 None,
610 _(b'send changes not in target as a binary bundle'),
610 _(b'send changes not in target as a binary bundle'),
611 ),
611 ),
612 (
612 (
613 b'B',
613 b'B',
614 b'bookmark',
614 b'bookmark',
615 b'',
615 b'',
616 _(b'send changes only reachable by given bookmark'),
616 _(b'send changes only reachable by given bookmark'),
617 _(b'BOOKMARK'),
617 _(b'BOOKMARK'),
618 ),
618 ),
619 (
619 (
620 b'',
620 b'',
621 b'bundlename',
621 b'bundlename',
622 b'bundle',
622 b'bundle',
623 _(b'name of the bundle attachment file'),
623 _(b'name of the bundle attachment file'),
624 _(b'NAME'),
624 _(b'NAME'),
625 ),
625 ),
626 (b'r', b'rev', [], _(b'a revision to send'), _(b'REV')),
626 (b'r', b'rev', [], _(b'a revision to send'), _(b'REV')),
627 (
627 (
628 b'',
628 b'',
629 b'force',
629 b'force',
630 None,
630 None,
631 _(
631 _(
632 b'run even when remote repository is unrelated '
632 b'run even when remote repository is unrelated '
633 b'(with -b/--bundle)'
633 b'(with -b/--bundle)'
634 ),
634 ),
635 ),
635 ),
636 (
636 (
637 b'',
637 b'',
638 b'base',
638 b'base',
639 [],
639 [],
640 _(
640 _(
641 b'a base changeset to specify instead of a destination '
641 b'a base changeset to specify instead of a destination '
642 b'(with -b/--bundle)'
642 b'(with -b/--bundle)'
643 ),
643 ),
644 _(b'REV'),
644 _(b'REV'),
645 ),
645 ),
646 (
646 (
647 b'',
647 b'',
648 b'intro',
648 b'intro',
649 None,
649 None,
650 _(b'send an introduction email for a single patch'),
650 _(b'send an introduction email for a single patch'),
651 ),
651 ),
652 ]
652 ]
653 + emailopts
653 + emailopts
654 + cmdutil.remoteopts,
654 + cmdutil.remoteopts,
655 _(b'hg email [OPTION]... [DEST]...'),
655 _(b'hg email [OPTION]... [DEST]...'),
656 helpcategory=command.CATEGORY_IMPORT_EXPORT,
656 helpcategory=command.CATEGORY_IMPORT_EXPORT,
657 )
657 )
658 def email(ui, repo, *revs, **opts):
658 def email(ui, repo, *revs, **opts):
659 '''send changesets by email
659 '''send changesets by email
660
660
661 By default, diffs are sent in the format generated by
661 By default, diffs are sent in the format generated by
662 :hg:`export`, one per message. The series starts with a "[PATCH 0
662 :hg:`export`, one per message. The series starts with a "[PATCH 0
663 of N]" introduction, which describes the series as a whole.
663 of N]" introduction, which describes the series as a whole.
664
664
665 Each patch email has a Subject line of "[PATCH M of N] ...", using
665 Each patch email has a Subject line of "[PATCH M of N] ...", using
666 the first line of the changeset description as the subject text.
666 the first line of the changeset description as the subject text.
667 The message contains two or three parts. First, the changeset
667 The message contains two or three parts. First, the changeset
668 description.
668 description.
669
669
670 With the -d/--diffstat option, if the diffstat program is
670 With the -d/--diffstat option, if the diffstat program is
671 installed, the result of running diffstat on the patch is inserted.
671 installed, the result of running diffstat on the patch is inserted.
672
672
673 Finally, the patch itself, as generated by :hg:`export`.
673 Finally, the patch itself, as generated by :hg:`export`.
674
674
675 With the -d/--diffstat or --confirm options, you will be presented
675 With the -d/--diffstat or --confirm options, you will be presented
676 with a final summary of all messages and asked for confirmation before
676 with a final summary of all messages and asked for confirmation before
677 the messages are sent.
677 the messages are sent.
678
678
679 By default the patch is included as text in the email body for
679 By default the patch is included as text in the email body for
680 easy reviewing. Using the -a/--attach option will instead create
680 easy reviewing. Using the -a/--attach option will instead create
681 an attachment for the patch. With -i/--inline an inline attachment
681 an attachment for the patch. With -i/--inline an inline attachment
682 will be created. You can include a patch both as text in the email
682 will be created. You can include a patch both as text in the email
683 body and as a regular or an inline attachment by combining the
683 body and as a regular or an inline attachment by combining the
684 -a/--attach or -i/--inline with the --body option.
684 -a/--attach or -i/--inline with the --body option.
685
685
686 With -B/--bookmark changesets reachable by the given bookmark are
686 With -B/--bookmark changesets reachable by the given bookmark are
687 selected.
687 selected.
688
688
689 With -o/--outgoing, emails will be generated for patches not found
689 With -o/--outgoing, emails will be generated for patches not found
690 in the destination repository (or only those which are ancestors
690 in the destination repository (or only those which are ancestors
691 of the specified revisions if any are provided)
691 of the specified revisions if any are provided)
692
692
693 With -b/--bundle, changesets are selected as for --outgoing, but a
693 With -b/--bundle, changesets are selected as for --outgoing, but a
694 single email containing a binary Mercurial bundle as an attachment
694 single email containing a binary Mercurial bundle as an attachment
695 will be sent. Use the ``patchbomb.bundletype`` config option to
695 will be sent. Use the ``patchbomb.bundletype`` config option to
696 control the bundle type as with :hg:`bundle --type`.
696 control the bundle type as with :hg:`bundle --type`.
697
697
698 With -m/--mbox, instead of previewing each patchbomb message in a
698 With -m/--mbox, instead of previewing each patchbomb message in a
699 pager or sending the messages directly, it will create a UNIX
699 pager or sending the messages directly, it will create a UNIX
700 mailbox file with the patch emails. This mailbox file can be
700 mailbox file with the patch emails. This mailbox file can be
701 previewed with any mail user agent which supports UNIX mbox
701 previewed with any mail user agent which supports UNIX mbox
702 files.
702 files.
703
703
704 With -n/--test, all steps will run, but mail will not be sent.
704 With -n/--test, all steps will run, but mail will not be sent.
705 You will be prompted for an email recipient address, a subject and
705 You will be prompted for an email recipient address, a subject and
706 an introductory message describing the patches of your patchbomb.
706 an introductory message describing the patches of your patchbomb.
707 Then when all is done, patchbomb messages are displayed.
707 Then when all is done, patchbomb messages are displayed.
708
708
709 In case email sending fails, you will find a backup of your series
709 In case email sending fails, you will find a backup of your series
710 introductory message in ``.hg/last-email.txt``.
710 introductory message in ``.hg/last-email.txt``.
711
711
712 The default behavior of this command can be customized through
712 The default behavior of this command can be customized through
713 configuration. (See :hg:`help patchbomb` for details)
713 configuration. (See :hg:`help patchbomb` for details)
714
714
715 Examples::
715 Examples::
716
716
717 hg email -r 3000 # send patch 3000 only
717 hg email -r 3000 # send patch 3000 only
718 hg email -r 3000 -r 3001 # send patches 3000 and 3001
718 hg email -r 3000 -r 3001 # send patches 3000 and 3001
719 hg email -r 3000:3005 # send patches 3000 through 3005
719 hg email -r 3000:3005 # send patches 3000 through 3005
720 hg email 3000 # send patch 3000 (deprecated)
720 hg email 3000 # send patch 3000 (deprecated)
721
721
722 hg email -o # send all patches not in default
722 hg email -o # send all patches not in default
723 hg email -o DEST # send all patches not in DEST
723 hg email -o DEST # send all patches not in DEST
724 hg email -o -r 3000 # send all ancestors of 3000 not in default
724 hg email -o -r 3000 # send all ancestors of 3000 not in default
725 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
725 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
726
726
727 hg email -B feature # send all ancestors of feature bookmark
727 hg email -B feature # send all ancestors of feature bookmark
728
728
729 hg email -b # send bundle of all patches not in default
729 hg email -b # send bundle of all patches not in default
730 hg email -b DEST # send bundle of all patches not in DEST
730 hg email -b DEST # send bundle of all patches not in DEST
731 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
731 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
732 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
732 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
733
733
734 hg email -o -m mbox && # generate an mbox file...
734 hg email -o -m mbox && # generate an mbox file...
735 mutt -R -f mbox # ... and view it with mutt
735 mutt -R -f mbox # ... and view it with mutt
736 hg email -o -m mbox && # generate an mbox file ...
736 hg email -o -m mbox && # generate an mbox file ...
737 formail -s sendmail \\ # ... and use formail to send from the mbox
737 formail -s sendmail \\ # ... and use formail to send from the mbox
738 -bm -t < mbox # ... using sendmail
738 -bm -t < mbox # ... using sendmail
739
739
740 Before using this command, you will need to enable email in your
740 Before using this command, you will need to enable email in your
741 hgrc. See the [email] section in hgrc(5) for details.
741 hgrc. See the [email] section in hgrc(5) for details.
742 '''
742 '''
743 opts = pycompat.byteskwargs(opts)
743 opts = pycompat.byteskwargs(opts)
744
744
745 _charsets = mail._charsets(ui)
745 _charsets = mail._charsets(ui)
746
746
747 bundle = opts.get(b'bundle')
747 bundle = opts.get(b'bundle')
748 date = opts.get(b'date')
748 date = opts.get(b'date')
749 mbox = opts.get(b'mbox')
749 mbox = opts.get(b'mbox')
750 outgoing = opts.get(b'outgoing')
750 outgoing = opts.get(b'outgoing')
751 rev = opts.get(b'rev')
751 rev = opts.get(b'rev')
752 bookmark = opts.get(b'bookmark')
752 bookmark = opts.get(b'bookmark')
753
753
754 if not (opts.get(b'test') or mbox):
754 if not (opts.get(b'test') or mbox):
755 # really sending
755 # really sending
756 mail.validateconfig(ui)
756 mail.validateconfig(ui)
757
757
758 if not (revs or rev or outgoing or bundle or bookmark):
758 if not (revs or rev or outgoing or bundle or bookmark):
759 raise error.Abort(
759 raise error.Abort(
760 _(b'specify at least one changeset with -B, -r or -o')
760 _(b'specify at least one changeset with -B, -r or -o')
761 )
761 )
762
762
763 if outgoing and bundle:
763 if outgoing and bundle:
764 raise error.Abort(
764 raise error.Abort(
765 _(
765 _(
766 b"--outgoing mode always on with --bundle;"
766 b"--outgoing mode always on with --bundle;"
767 b" do not re-specify --outgoing"
767 b" do not re-specify --outgoing"
768 )
768 )
769 )
769 )
770 cmdutil.check_at_most_one_arg(opts, b'rev', b'bookmark')
770 cmdutil.check_at_most_one_arg(opts, b'rev', b'bookmark')
771
771
772 if outgoing or bundle:
772 if outgoing or bundle:
773 if len(revs) > 1:
773 if len(revs) > 1:
774 raise error.Abort(_(b"too many destinations"))
774 raise error.Abort(_(b"too many destinations"))
775 if revs:
775 if revs:
776 dest = revs[0]
776 dest = revs[0]
777 else:
777 else:
778 dest = None
778 dest = None
779 revs = []
779 revs = []
780
780
781 if rev:
781 if rev:
782 if revs:
782 if revs:
783 raise error.Abort(_(b'use only one form to specify the revision'))
783 raise error.Abort(_(b'use only one form to specify the revision'))
784 revs = rev
784 revs = rev
785 elif bookmark:
785 elif bookmark:
786 if bookmark not in repo._bookmarks:
786 if bookmark not in repo._bookmarks:
787 raise error.Abort(_(b"bookmark '%s' not found") % bookmark)
787 raise error.Abort(_(b"bookmark '%s' not found") % bookmark)
788 revs = scmutil.bookmarkrevs(repo, bookmark)
788 revs = scmutil.bookmarkrevs(repo, bookmark)
789
789
790 revs = scmutil.revrange(repo, revs)
790 revs = scmutil.revrange(repo, revs)
791 if outgoing:
791 if outgoing:
792 revs = _getoutgoing(repo, dest, revs)
792 revs = _getoutgoing(repo, dest, revs)
793 if bundle:
793 if bundle:
794 opts[b'revs'] = [b"%d" % r for r in revs]
794 opts[b'revs'] = [b"%d" % r for r in revs]
795
795
796 # check if revision exist on the public destination
796 # check if revision exist on the public destination
797 publicurl = repo.ui.config(b'patchbomb', b'publicurl')
797 publicurl = repo.ui.config(b'patchbomb', b'publicurl')
798 if publicurl:
798 if publicurl:
799 repo.ui.debug(b'checking that revision exist in the public repo\n')
799 repo.ui.debug(b'checking that revision exist in the public repo\n')
800 try:
800 try:
801 publicpeer = hg.peer(repo, {}, publicurl)
801 publicpeer = hg.peer(repo, {}, publicurl)
802 except error.RepoError:
802 except error.RepoError:
803 repo.ui.write_err(
803 repo.ui.write_err(
804 _(b'unable to access public repo: %s\n') % publicurl
804 _(b'unable to access public repo: %s\n') % publicurl
805 )
805 )
806 raise
806 raise
807 if not publicpeer.capable(b'known'):
807 if not publicpeer.capable(b'known'):
808 repo.ui.debug(b'skipping existence checks: public repo too old\n')
808 repo.ui.debug(b'skipping existence checks: public repo too old\n')
809 else:
809 else:
810 out = [repo[r] for r in revs]
810 out = [repo[r] for r in revs]
811 known = publicpeer.known(h.node() for h in out)
811 known = publicpeer.known(h.node() for h in out)
812 missing = []
812 missing = []
813 for idx, h in enumerate(out):
813 for idx, h in enumerate(out):
814 if not known[idx]:
814 if not known[idx]:
815 missing.append(h)
815 missing.append(h)
816 if missing:
816 if missing:
817 if len(missing) > 1:
817 if len(missing) > 1:
818 msg = _(b'public "%s" is missing %s and %i others')
818 msg = _(b'public "%s" is missing %s and %i others')
819 msg %= (publicurl, missing[0], len(missing) - 1)
819 msg %= (publicurl, missing[0], len(missing) - 1)
820 else:
820 else:
821 msg = _(b'public url %s is missing %s')
821 msg = _(b'public url %s is missing %s')
822 msg %= (publicurl, missing[0])
822 msg %= (publicurl, missing[0])
823 missingrevs = [ctx.rev() for ctx in missing]
823 missingrevs = [ctx.rev() for ctx in missing]
824 revhint = b' '.join(
824 revhint = b' '.join(
825 b'-r %s' % h for h in repo.set(b'heads(%ld)', missingrevs)
825 b'-r %s' % h for h in repo.set(b'heads(%ld)', missingrevs)
826 )
826 )
827 hint = _(b"use 'hg push %s %s'") % (publicurl, revhint)
827 hint = _(b"use 'hg push %s %s'") % (publicurl, revhint)
828 raise error.Abort(msg, hint=hint)
828 raise error.Abort(msg, hint=hint)
829
829
830 # start
830 # start
831 if date:
831 if date:
832 start_time = dateutil.parsedate(date)
832 start_time = dateutil.parsedate(date)
833 else:
833 else:
834 start_time = dateutil.makedate()
834 start_time = dateutil.makedate()
835
835
836 def genmsgid(id):
836 def genmsgid(id):
837 return _msgid(id[:20], int(start_time[0]))
837 return _msgid(id[:20], int(start_time[0]))
838
838
839 # deprecated config: patchbomb.from
839 # deprecated config: patchbomb.from
840 sender = (
840 sender = (
841 opts.get(b'from')
841 opts.get(b'from')
842 or ui.config(b'email', b'from')
842 or ui.config(b'email', b'from')
843 or ui.config(b'patchbomb', b'from')
843 or ui.config(b'patchbomb', b'from')
844 or prompt(ui, b'From', ui.username())
844 or prompt(ui, b'From', ui.username())
845 )
845 )
846
846
847 if bundle:
847 if bundle:
848 stropts = pycompat.strkwargs(opts)
848 stropts = pycompat.strkwargs(opts)
849 bundledata = _getbundle(repo, dest, **stropts)
849 bundledata = _getbundle(repo, dest, **stropts)
850 bundleopts = stropts.copy()
850 bundleopts = stropts.copy()
851 bundleopts.pop('bundle', None) # already processed
851 bundleopts.pop('bundle', None) # already processed
852 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
852 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
853 else:
853 else:
854 msgs = _getpatchmsgs(repo, sender, revs, **pycompat.strkwargs(opts))
854 msgs = _getpatchmsgs(repo, sender, revs, **pycompat.strkwargs(opts))
855
855
856 showaddrs = []
856 showaddrs = []
857
857
858 def getaddrs(header, ask=False, default=None):
858 def getaddrs(header, ask=False, default=None):
859 configkey = header.lower()
859 configkey = header.lower()
860 opt = header.replace(b'-', b'_').lower()
860 opt = header.replace(b'-', b'_').lower()
861 addrs = opts.get(opt)
861 addrs = opts.get(opt)
862 if addrs:
862 if addrs:
863 showaddrs.append(b'%s: %s' % (header, b', '.join(addrs)))
863 showaddrs.append(b'%s: %s' % (header, b', '.join(addrs)))
864 return mail.addrlistencode(ui, addrs, _charsets, opts.get(b'test'))
864 return mail.addrlistencode(ui, addrs, _charsets, opts.get(b'test'))
865
865
866 # not on the command line: fallback to config and then maybe ask
866 # not on the command line: fallback to config and then maybe ask
867 addr = ui.config(b'email', configkey) or ui.config(
867 addr = ui.config(b'email', configkey) or ui.config(
868 b'patchbomb', configkey
868 b'patchbomb', configkey
869 )
869 )
870 if not addr:
870 if not addr:
871 specified = ui.hasconfig(b'email', configkey) or ui.hasconfig(
871 specified = ui.hasconfig(b'email', configkey) or ui.hasconfig(
872 b'patchbomb', configkey
872 b'patchbomb', configkey
873 )
873 )
874 if not specified and ask:
874 if not specified and ask:
875 addr = prompt(ui, header, default=default)
875 addr = prompt(ui, header, default=default)
876 if addr:
876 if addr:
877 showaddrs.append(b'%s: %s' % (header, addr))
877 showaddrs.append(b'%s: %s' % (header, addr))
878 return mail.addrlistencode(ui, [addr], _charsets, opts.get(b'test'))
878 return mail.addrlistencode(ui, [addr], _charsets, opts.get(b'test'))
879 elif default:
879 elif default:
880 return mail.addrlistencode(
880 return mail.addrlistencode(
881 ui, [default], _charsets, opts.get(b'test')
881 ui, [default], _charsets, opts.get(b'test')
882 )
882 )
883 return []
883 return []
884
884
885 to = getaddrs(b'To', ask=True)
885 to = getaddrs(b'To', ask=True)
886 if not to:
886 if not to:
887 # we can get here in non-interactive mode
887 # we can get here in non-interactive mode
888 raise error.Abort(_(b'no recipient addresses provided'))
888 raise error.Abort(_(b'no recipient addresses provided'))
889 cc = getaddrs(b'Cc', ask=True, default=b'')
889 cc = getaddrs(b'Cc', ask=True, default=b'')
890 bcc = getaddrs(b'Bcc')
890 bcc = getaddrs(b'Bcc')
891 replyto = getaddrs(b'Reply-To')
891 replyto = getaddrs(b'Reply-To')
892
892
893 confirm = ui.configbool(b'patchbomb', b'confirm')
893 confirm = ui.configbool(b'patchbomb', b'confirm')
894 confirm |= bool(opts.get(b'diffstat') or opts.get(b'confirm'))
894 confirm |= bool(opts.get(b'diffstat') or opts.get(b'confirm'))
895
895
896 if confirm:
896 if confirm:
897 ui.write(_(b'\nFinal summary:\n\n'), label=b'patchbomb.finalsummary')
897 ui.write(_(b'\nFinal summary:\n\n'), label=b'patchbomb.finalsummary')
898 ui.write((b'From: %s\n' % sender), label=b'patchbomb.from')
898 ui.write((b'From: %s\n' % sender), label=b'patchbomb.from')
899 for addr in showaddrs:
899 for addr in showaddrs:
900 ui.write(b'%s\n' % addr, label=b'patchbomb.to')
900 ui.write(b'%s\n' % addr, label=b'patchbomb.to')
901 for m, subj, ds in msgs:
901 for m, subj, ds in msgs:
902 ui.write((b'Subject: %s\n' % subj), label=b'patchbomb.subject')
902 ui.write((b'Subject: %s\n' % subj), label=b'patchbomb.subject')
903 if ds:
903 if ds:
904 ui.write(ds, label=b'patchbomb.diffstats')
904 ui.write(ds, label=b'patchbomb.diffstats')
905 ui.write(b'\n')
905 ui.write(b'\n')
906 if ui.promptchoice(
906 if ui.promptchoice(
907 _(b'are you sure you want to send (yn)?$$ &Yes $$ &No')
907 _(b'are you sure you want to send (yn)?$$ &Yes $$ &No')
908 ):
908 ):
909 raise error.Abort(_(b'patchbomb canceled'))
909 raise error.Abort(_(b'patchbomb canceled'))
910
910
911 ui.write(b'\n')
911 ui.write(b'\n')
912
912
913 parent = opts.get(b'in_reply_to') or None
913 parent = opts.get(b'in_reply_to') or None
914 # angle brackets may be omitted, they're not semantically part of the msg-id
914 # angle brackets may be omitted, they're not semantically part of the msg-id
915 if parent is not None:
915 if parent is not None:
916 parent = encoding.strfromlocal(parent)
916 parent = encoding.strfromlocal(parent)
917 if not parent.startswith('<'):
917 if not parent.startswith('<'):
918 parent = '<' + parent
918 parent = '<' + parent
919 if not parent.endswith('>'):
919 if not parent.endswith('>'):
920 parent += '>'
920 parent += '>'
921
921
922 sender_addr = eutil.parseaddr(encoding.strfromlocal(sender))[1]
922 sender_addr = eutil.parseaddr(encoding.strfromlocal(sender))[1]
923 sender = mail.addressencode(ui, sender, _charsets, opts.get(b'test'))
923 sender = mail.addressencode(ui, sender, _charsets, opts.get(b'test'))
924 sendmail = None
924 sendmail = None
925 firstpatch = None
925 firstpatch = None
926 progress = ui.makeprogress(
926 progress = ui.makeprogress(
927 _(b'sending'), unit=_(b'emails'), total=len(msgs)
927 _(b'sending'), unit=_(b'emails'), total=len(msgs)
928 )
928 )
929 for i, (m, subj, ds) in enumerate(msgs):
929 for i, (m, subj, ds) in enumerate(msgs):
930 try:
930 try:
931 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
931 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
932 if not firstpatch:
932 if not firstpatch:
933 firstpatch = m['Message-Id']
933 firstpatch = m['Message-Id']
934 m['X-Mercurial-Series-Id'] = firstpatch
934 m['X-Mercurial-Series-Id'] = firstpatch
935 except TypeError:
935 except TypeError:
936 m['Message-Id'] = genmsgid('patchbomb')
936 m['Message-Id'] = genmsgid('patchbomb')
937 if parent:
937 if parent:
938 m['In-Reply-To'] = parent
938 m['In-Reply-To'] = parent
939 m['References'] = parent
939 m['References'] = parent
940 if not parent or 'X-Mercurial-Node' not in m:
940 if not parent or 'X-Mercurial-Node' not in m:
941 parent = m['Message-Id']
941 parent = m['Message-Id']
942
942
943 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version().decode()
943 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version().decode()
944 m['Date'] = eutil.formatdate(start_time[0], localtime=True)
944 m['Date'] = eutil.formatdate(start_time[0], localtime=True)
945
945
946 start_time = (start_time[0] + 1, start_time[1])
946 start_time = (start_time[0] + 1, start_time[1])
947 m['From'] = sender
947 m['From'] = sender
948 m['To'] = ', '.join(to)
948 m['To'] = ', '.join(to)
949 if cc:
949 if cc:
950 m['Cc'] = ', '.join(cc)
950 m['Cc'] = ', '.join(cc)
951 if bcc:
951 if bcc:
952 m['Bcc'] = ', '.join(bcc)
952 m['Bcc'] = ', '.join(bcc)
953 if replyto:
953 if replyto:
954 m['Reply-To'] = ', '.join(replyto)
954 m['Reply-To'] = ', '.join(replyto)
955 if opts.get(b'test'):
955 if opts.get(b'test'):
956 ui.status(_(b'displaying '), subj, b' ...\n')
956 ui.status(_(b'displaying '), subj, b' ...\n')
957 ui.pager(b'email')
957 ui.pager(b'email')
958 generator = mail.Generator(ui, mangle_from_=False)
958 generator = mail.Generator(ui, mangle_from_=False)
959 try:
959 try:
960 generator.flatten(m, False)
960 generator.flatten(m, False)
961 ui.write(b'\n')
961 ui.write(b'\n')
962 except IOError as inst:
962 except IOError as inst:
963 if inst.errno != errno.EPIPE:
963 if inst.errno != errno.EPIPE:
964 raise
964 raise
965 else:
965 else:
966 if not sendmail:
966 if not sendmail:
967 sendmail = mail.connect(ui, mbox=mbox)
967 sendmail = mail.connect(ui, mbox=mbox)
968 ui.status(_(b'sending '), subj, b' ...\n')
968 ui.status(_(b'sending '), subj, b' ...\n')
969 progress.update(i, item=subj)
969 progress.update(i, item=subj)
970 if not mbox:
970 if not mbox:
971 # Exim does not remove the Bcc field
971 # Exim does not remove the Bcc field
972 del m['Bcc']
972 del m['Bcc']
973 fp = stringio()
973 fp = stringio()
974 generator = mail.Generator(fp, mangle_from_=False)
974 generator = mail.Generator(fp, mangle_from_=False)
975 generator.flatten(m, False)
975 generator.flatten(m, False)
976 alldests = to + bcc + cc
976 alldests = to + bcc + cc
977 sendmail(sender_addr, alldests, fp.getvalue())
977 sendmail(sender_addr, alldests, fp.getvalue())
978
978
979 progress.complete()
979 progress.complete()
@@ -1,4215 +1,4215 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy as copymod
10 import copy as copymod
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 nullrev,
19 nullrev,
20 short,
20 short,
21 )
21 )
22 from .pycompat import (
22 from .pycompat import (
23 getattr,
23 getattr,
24 open,
24 open,
25 setattr,
25 setattr,
26 )
26 )
27 from .thirdparty import attr
27 from .thirdparty import attr
28
28
29 from . import (
29 from . import (
30 bookmarks,
30 bookmarks,
31 changelog,
31 changelog,
32 copies,
32 copies,
33 crecord as crecordmod,
33 crecord as crecordmod,
34 dirstateguard,
34 dirstateguard,
35 encoding,
35 encoding,
36 error,
36 error,
37 formatter,
37 formatter,
38 logcmdutil,
38 logcmdutil,
39 match as matchmod,
39 match as matchmod,
40 merge as mergemod,
40 merge as mergemod,
41 mergestate as mergestatemod,
41 mergestate as mergestatemod,
42 mergeutil,
42 mergeutil,
43 obsolete,
43 obsolete,
44 patch,
44 patch,
45 pathutil,
45 pathutil,
46 phases,
46 phases,
47 pycompat,
47 pycompat,
48 repair,
48 repair,
49 revlog,
49 revlog,
50 rewriteutil,
50 rewriteutil,
51 scmutil,
51 scmutil,
52 smartset,
52 smartset,
53 state as statemod,
53 state as statemod,
54 subrepoutil,
54 subrepoutil,
55 templatekw,
55 templatekw,
56 templater,
56 templater,
57 util,
57 util,
58 vfs as vfsmod,
58 vfs as vfsmod,
59 )
59 )
60
60
61 from .utils import (
61 from .utils import (
62 dateutil,
62 dateutil,
63 stringutil,
63 stringutil,
64 )
64 )
65
65
66 if pycompat.TYPE_CHECKING:
66 if pycompat.TYPE_CHECKING:
67 from typing import (
67 from typing import (
68 Any,
68 Any,
69 Dict,
69 Dict,
70 )
70 )
71
71
72 for t in (Any, Dict):
72 for t in (Any, Dict):
73 assert t
73 assert t
74
74
75 stringio = util.stringio
75 stringio = util.stringio
76
76
77 # templates of common command options
77 # templates of common command options
78
78
79 dryrunopts = [
79 dryrunopts = [
80 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
80 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
81 ]
81 ]
82
82
83 confirmopts = [
83 confirmopts = [
84 (b'', b'confirm', None, _(b'ask before applying actions')),
84 (b'', b'confirm', None, _(b'ask before applying actions')),
85 ]
85 ]
86
86
87 remoteopts = [
87 remoteopts = [
88 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
88 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
89 (
89 (
90 b'',
90 b'',
91 b'remotecmd',
91 b'remotecmd',
92 b'',
92 b'',
93 _(b'specify hg command to run on the remote side'),
93 _(b'specify hg command to run on the remote side'),
94 _(b'CMD'),
94 _(b'CMD'),
95 ),
95 ),
96 (
96 (
97 b'',
97 b'',
98 b'insecure',
98 b'insecure',
99 None,
99 None,
100 _(b'do not verify server certificate (ignoring web.cacerts config)'),
100 _(b'do not verify server certificate (ignoring web.cacerts config)'),
101 ),
101 ),
102 ]
102 ]
103
103
104 walkopts = [
104 walkopts = [
105 (
105 (
106 b'I',
106 b'I',
107 b'include',
107 b'include',
108 [],
108 [],
109 _(b'include names matching the given patterns'),
109 _(b'include names matching the given patterns'),
110 _(b'PATTERN'),
110 _(b'PATTERN'),
111 ),
111 ),
112 (
112 (
113 b'X',
113 b'X',
114 b'exclude',
114 b'exclude',
115 [],
115 [],
116 _(b'exclude names matching the given patterns'),
116 _(b'exclude names matching the given patterns'),
117 _(b'PATTERN'),
117 _(b'PATTERN'),
118 ),
118 ),
119 ]
119 ]
120
120
121 commitopts = [
121 commitopts = [
122 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
122 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
123 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
123 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
124 ]
124 ]
125
125
126 commitopts2 = [
126 commitopts2 = [
127 (
127 (
128 b'd',
128 b'd',
129 b'date',
129 b'date',
130 b'',
130 b'',
131 _(b'record the specified date as commit date'),
131 _(b'record the specified date as commit date'),
132 _(b'DATE'),
132 _(b'DATE'),
133 ),
133 ),
134 (
134 (
135 b'u',
135 b'u',
136 b'user',
136 b'user',
137 b'',
137 b'',
138 _(b'record the specified user as committer'),
138 _(b'record the specified user as committer'),
139 _(b'USER'),
139 _(b'USER'),
140 ),
140 ),
141 ]
141 ]
142
142
143 commitopts3 = [
143 commitopts3 = [
144 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
144 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
145 (b'U', b'currentuser', None, _(b'record the current user as committer')),
145 (b'U', b'currentuser', None, _(b'record the current user as committer')),
146 ]
146 ]
147
147
148 formatteropts = [
148 formatteropts = [
149 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
149 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
150 ]
150 ]
151
151
152 templateopts = [
152 templateopts = [
153 (
153 (
154 b'',
154 b'',
155 b'style',
155 b'style',
156 b'',
156 b'',
157 _(b'display using template map file (DEPRECATED)'),
157 _(b'display using template map file (DEPRECATED)'),
158 _(b'STYLE'),
158 _(b'STYLE'),
159 ),
159 ),
160 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
160 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
161 ]
161 ]
162
162
163 logopts = [
163 logopts = [
164 (b'p', b'patch', None, _(b'show patch')),
164 (b'p', b'patch', None, _(b'show patch')),
165 (b'g', b'git', None, _(b'use git extended diff format')),
165 (b'g', b'git', None, _(b'use git extended diff format')),
166 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
166 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
167 (b'M', b'no-merges', None, _(b'do not show merges')),
167 (b'M', b'no-merges', None, _(b'do not show merges')),
168 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
168 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
169 (b'G', b'graph', None, _(b"show the revision DAG")),
169 (b'G', b'graph', None, _(b"show the revision DAG")),
170 ] + templateopts
170 ] + templateopts
171
171
172 diffopts = [
172 diffopts = [
173 (b'a', b'text', None, _(b'treat all files as text')),
173 (b'a', b'text', None, _(b'treat all files as text')),
174 (
174 (
175 b'g',
175 b'g',
176 b'git',
176 b'git',
177 None,
177 None,
178 _(b'use git extended diff format (DEFAULT: diff.git)'),
178 _(b'use git extended diff format (DEFAULT: diff.git)'),
179 ),
179 ),
180 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
180 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
181 (b'', b'nodates', None, _(b'omit dates from diff headers')),
181 (b'', b'nodates', None, _(b'omit dates from diff headers')),
182 ]
182 ]
183
183
184 diffwsopts = [
184 diffwsopts = [
185 (
185 (
186 b'w',
186 b'w',
187 b'ignore-all-space',
187 b'ignore-all-space',
188 None,
188 None,
189 _(b'ignore white space when comparing lines'),
189 _(b'ignore white space when comparing lines'),
190 ),
190 ),
191 (
191 (
192 b'b',
192 b'b',
193 b'ignore-space-change',
193 b'ignore-space-change',
194 None,
194 None,
195 _(b'ignore changes in the amount of white space'),
195 _(b'ignore changes in the amount of white space'),
196 ),
196 ),
197 (
197 (
198 b'B',
198 b'B',
199 b'ignore-blank-lines',
199 b'ignore-blank-lines',
200 None,
200 None,
201 _(b'ignore changes whose lines are all blank'),
201 _(b'ignore changes whose lines are all blank'),
202 ),
202 ),
203 (
203 (
204 b'Z',
204 b'Z',
205 b'ignore-space-at-eol',
205 b'ignore-space-at-eol',
206 None,
206 None,
207 _(b'ignore changes in whitespace at EOL'),
207 _(b'ignore changes in whitespace at EOL'),
208 ),
208 ),
209 ]
209 ]
210
210
211 diffopts2 = (
211 diffopts2 = (
212 [
212 [
213 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
213 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
214 (
214 (
215 b'p',
215 b'p',
216 b'show-function',
216 b'show-function',
217 None,
217 None,
218 _(
218 _(
219 b'show which function each change is in (DEFAULT: diff.showfunc)'
219 b'show which function each change is in (DEFAULT: diff.showfunc)'
220 ),
220 ),
221 ),
221 ),
222 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
222 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
223 ]
223 ]
224 + diffwsopts
224 + diffwsopts
225 + [
225 + [
226 (
226 (
227 b'U',
227 b'U',
228 b'unified',
228 b'unified',
229 b'',
229 b'',
230 _(b'number of lines of context to show'),
230 _(b'number of lines of context to show'),
231 _(b'NUM'),
231 _(b'NUM'),
232 ),
232 ),
233 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
233 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
234 (
234 (
235 b'',
235 b'',
236 b'root',
236 b'root',
237 b'',
237 b'',
238 _(b'produce diffs relative to subdirectory'),
238 _(b'produce diffs relative to subdirectory'),
239 _(b'DIR'),
239 _(b'DIR'),
240 ),
240 ),
241 ]
241 ]
242 )
242 )
243
243
244 mergetoolopts = [
244 mergetoolopts = [
245 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
245 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
246 ]
246 ]
247
247
248 similarityopts = [
248 similarityopts = [
249 (
249 (
250 b's',
250 b's',
251 b'similarity',
251 b'similarity',
252 b'',
252 b'',
253 _(b'guess renamed files by similarity (0<=s<=100)'),
253 _(b'guess renamed files by similarity (0<=s<=100)'),
254 _(b'SIMILARITY'),
254 _(b'SIMILARITY'),
255 )
255 )
256 ]
256 ]
257
257
258 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
258 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
259
259
260 debugrevlogopts = [
260 debugrevlogopts = [
261 (b'c', b'changelog', False, _(b'open changelog')),
261 (b'c', b'changelog', False, _(b'open changelog')),
262 (b'm', b'manifest', False, _(b'open manifest')),
262 (b'm', b'manifest', False, _(b'open manifest')),
263 (b'', b'dir', b'', _(b'open directory manifest')),
263 (b'', b'dir', b'', _(b'open directory manifest')),
264 ]
264 ]
265
265
266 # special string such that everything below this line will be ingored in the
266 # special string such that everything below this line will be ingored in the
267 # editor text
267 # editor text
268 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
268 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
269
269
270
270
271 def check_at_most_one_arg(opts, *args):
271 def check_at_most_one_arg(opts, *args):
272 """abort if more than one of the arguments are in opts
272 """abort if more than one of the arguments are in opts
273
273
274 Returns the unique argument or None if none of them were specified.
274 Returns the unique argument or None if none of them were specified.
275 """
275 """
276
276
277 def to_display(name):
277 def to_display(name):
278 return pycompat.sysbytes(name).replace(b'_', b'-')
278 return pycompat.sysbytes(name).replace(b'_', b'-')
279
279
280 previous = None
280 previous = None
281 for x in args:
281 for x in args:
282 if opts.get(x):
282 if opts.get(x):
283 if previous:
283 if previous:
284 raise error.Abort(
284 raise error.Abort(
285 _(b'cannot specify both --%s and --%s')
285 _(b'cannot specify both --%s and --%s')
286 % (to_display(previous), to_display(x))
286 % (to_display(previous), to_display(x))
287 )
287 )
288 previous = x
288 previous = x
289 return previous
289 return previous
290
290
291
291
292 def check_incompatible_arguments(opts, first, others):
292 def check_incompatible_arguments(opts, first, others):
293 """abort if the first argument is given along with any of the others
293 """abort if the first argument is given along with any of the others
294
294
295 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
295 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
296 among themselves, and they're passed as a single collection.
296 among themselves, and they're passed as a single collection.
297 """
297 """
298 for other in others:
298 for other in others:
299 check_at_most_one_arg(opts, first, other)
299 check_at_most_one_arg(opts, first, other)
300
300
301
301
302 def resolvecommitoptions(ui, opts):
302 def resolvecommitoptions(ui, opts):
303 """modify commit options dict to handle related options
303 """modify commit options dict to handle related options
304
304
305 The return value indicates that ``rewrite.update-timestamp`` is the reason
305 The return value indicates that ``rewrite.update-timestamp`` is the reason
306 the ``date`` option is set.
306 the ``date`` option is set.
307 """
307 """
308 check_at_most_one_arg(opts, b'date', b'currentdate')
308 check_at_most_one_arg(opts, b'date', b'currentdate')
309 check_at_most_one_arg(opts, b'user', b'currentuser')
309 check_at_most_one_arg(opts, b'user', b'currentuser')
310
310
311 datemaydiffer = False # date-only change should be ignored?
311 datemaydiffer = False # date-only change should be ignored?
312
312
313 if opts.get(b'currentdate'):
313 if opts.get(b'currentdate'):
314 opts[b'date'] = b'%d %d' % dateutil.makedate()
314 opts[b'date'] = b'%d %d' % dateutil.makedate()
315 elif (
315 elif (
316 not opts.get(b'date')
316 not opts.get(b'date')
317 and ui.configbool(b'rewrite', b'update-timestamp')
317 and ui.configbool(b'rewrite', b'update-timestamp')
318 and opts.get(b'currentdate') is None
318 and opts.get(b'currentdate') is None
319 ):
319 ):
320 opts[b'date'] = b'%d %d' % dateutil.makedate()
320 opts[b'date'] = b'%d %d' % dateutil.makedate()
321 datemaydiffer = True
321 datemaydiffer = True
322
322
323 if opts.get(b'currentuser'):
323 if opts.get(b'currentuser'):
324 opts[b'user'] = ui.username()
324 opts[b'user'] = ui.username()
325
325
326 return datemaydiffer
326 return datemaydiffer
327
327
328
328
329 def checknotesize(ui, opts):
329 def checknotesize(ui, opts):
330 """ make sure note is of valid format """
330 """ make sure note is of valid format """
331
331
332 note = opts.get(b'note')
332 note = opts.get(b'note')
333 if not note:
333 if not note:
334 return
334 return
335
335
336 if len(note) > 255:
336 if len(note) > 255:
337 raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
337 raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
338 if b'\n' in note:
338 if b'\n' in note:
339 raise error.Abort(_(b"note cannot contain a newline"))
339 raise error.Abort(_(b"note cannot contain a newline"))
340
340
341
341
342 def ishunk(x):
342 def ishunk(x):
343 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
343 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
344 return isinstance(x, hunkclasses)
344 return isinstance(x, hunkclasses)
345
345
346
346
347 def newandmodified(chunks, originalchunks):
347 def newandmodified(chunks, originalchunks):
348 newlyaddedandmodifiedfiles = set()
348 newlyaddedandmodifiedfiles = set()
349 alsorestore = set()
349 alsorestore = set()
350 for chunk in chunks:
350 for chunk in chunks:
351 if (
351 if (
352 ishunk(chunk)
352 ishunk(chunk)
353 and chunk.header.isnewfile()
353 and chunk.header.isnewfile()
354 and chunk not in originalchunks
354 and chunk not in originalchunks
355 ):
355 ):
356 newlyaddedandmodifiedfiles.add(chunk.header.filename())
356 newlyaddedandmodifiedfiles.add(chunk.header.filename())
357 alsorestore.update(
357 alsorestore.update(
358 set(chunk.header.files()) - {chunk.header.filename()}
358 set(chunk.header.files()) - {chunk.header.filename()}
359 )
359 )
360 return newlyaddedandmodifiedfiles, alsorestore
360 return newlyaddedandmodifiedfiles, alsorestore
361
361
362
362
363 def parsealiases(cmd):
363 def parsealiases(cmd):
364 return cmd.split(b"|")
364 return cmd.split(b"|")
365
365
366
366
367 def setupwrapcolorwrite(ui):
367 def setupwrapcolorwrite(ui):
368 # wrap ui.write so diff output can be labeled/colorized
368 # wrap ui.write so diff output can be labeled/colorized
369 def wrapwrite(orig, *args, **kw):
369 def wrapwrite(orig, *args, **kw):
370 label = kw.pop('label', b'')
370 label = kw.pop('label', b'')
371 for chunk, l in patch.difflabel(lambda: args):
371 for chunk, l in patch.difflabel(lambda: args):
372 orig(chunk, label=label + l)
372 orig(chunk, label=label + l)
373
373
374 oldwrite = ui.write
374 oldwrite = ui.write
375
375
376 def wrap(*args, **kwargs):
376 def wrap(*args, **kwargs):
377 return wrapwrite(oldwrite, *args, **kwargs)
377 return wrapwrite(oldwrite, *args, **kwargs)
378
378
379 setattr(ui, 'write', wrap)
379 setattr(ui, 'write', wrap)
380 return oldwrite
380 return oldwrite
381
381
382
382
383 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
383 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
384 try:
384 try:
385 if usecurses:
385 if usecurses:
386 if testfile:
386 if testfile:
387 recordfn = crecordmod.testdecorator(
387 recordfn = crecordmod.testdecorator(
388 testfile, crecordmod.testchunkselector
388 testfile, crecordmod.testchunkselector
389 )
389 )
390 else:
390 else:
391 recordfn = crecordmod.chunkselector
391 recordfn = crecordmod.chunkselector
392
392
393 return crecordmod.filterpatch(
393 return crecordmod.filterpatch(
394 ui, originalhunks, recordfn, operation
394 ui, originalhunks, recordfn, operation
395 )
395 )
396 except crecordmod.fallbackerror as e:
396 except crecordmod.fallbackerror as e:
397 ui.warn(b'%s\n' % e)
397 ui.warn(b'%s\n' % e)
398 ui.warn(_(b'falling back to text mode\n'))
398 ui.warn(_(b'falling back to text mode\n'))
399
399
400 return patch.filterpatch(ui, originalhunks, match, operation)
400 return patch.filterpatch(ui, originalhunks, match, operation)
401
401
402
402
403 def recordfilter(ui, originalhunks, match, operation=None):
403 def recordfilter(ui, originalhunks, match, operation=None):
404 """ Prompts the user to filter the originalhunks and return a list of
404 """ Prompts the user to filter the originalhunks and return a list of
405 selected hunks.
405 selected hunks.
406 *operation* is used for to build ui messages to indicate the user what
406 *operation* is used for to build ui messages to indicate the user what
407 kind of filtering they are doing: reverting, committing, shelving, etc.
407 kind of filtering they are doing: reverting, committing, shelving, etc.
408 (see patch.filterpatch).
408 (see patch.filterpatch).
409 """
409 """
410 usecurses = crecordmod.checkcurses(ui)
410 usecurses = crecordmod.checkcurses(ui)
411 testfile = ui.config(b'experimental', b'crecordtest')
411 testfile = ui.config(b'experimental', b'crecordtest')
412 oldwrite = setupwrapcolorwrite(ui)
412 oldwrite = setupwrapcolorwrite(ui)
413 try:
413 try:
414 newchunks, newopts = filterchunks(
414 newchunks, newopts = filterchunks(
415 ui, originalhunks, usecurses, testfile, match, operation
415 ui, originalhunks, usecurses, testfile, match, operation
416 )
416 )
417 finally:
417 finally:
418 ui.write = oldwrite
418 ui.write = oldwrite
419 return newchunks, newopts
419 return newchunks, newopts
420
420
421
421
422 def dorecord(
422 def dorecord(
423 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
423 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
424 ):
424 ):
425 opts = pycompat.byteskwargs(opts)
425 opts = pycompat.byteskwargs(opts)
426 if not ui.interactive():
426 if not ui.interactive():
427 if cmdsuggest:
427 if cmdsuggest:
428 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
428 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
429 else:
429 else:
430 msg = _(b'running non-interactively')
430 msg = _(b'running non-interactively')
431 raise error.Abort(msg)
431 raise error.Abort(msg)
432
432
433 # make sure username is set before going interactive
433 # make sure username is set before going interactive
434 if not opts.get(b'user'):
434 if not opts.get(b'user'):
435 ui.username() # raise exception, username not provided
435 ui.username() # raise exception, username not provided
436
436
437 def recordfunc(ui, repo, message, match, opts):
437 def recordfunc(ui, repo, message, match, opts):
438 """This is generic record driver.
438 """This is generic record driver.
439
439
440 Its job is to interactively filter local changes, and
440 Its job is to interactively filter local changes, and
441 accordingly prepare working directory into a state in which the
441 accordingly prepare working directory into a state in which the
442 job can be delegated to a non-interactive commit command such as
442 job can be delegated to a non-interactive commit command such as
443 'commit' or 'qrefresh'.
443 'commit' or 'qrefresh'.
444
444
445 After the actual job is done by non-interactive command, the
445 After the actual job is done by non-interactive command, the
446 working directory is restored to its original state.
446 working directory is restored to its original state.
447
447
448 In the end we'll record interesting changes, and everything else
448 In the end we'll record interesting changes, and everything else
449 will be left in place, so the user can continue working.
449 will be left in place, so the user can continue working.
450 """
450 """
451 if not opts.get(b'interactive-unshelve'):
451 if not opts.get(b'interactive-unshelve'):
452 checkunfinished(repo, commit=True)
452 checkunfinished(repo, commit=True)
453 wctx = repo[None]
453 wctx = repo[None]
454 merge = len(wctx.parents()) > 1
454 merge = len(wctx.parents()) > 1
455 if merge:
455 if merge:
456 raise error.Abort(
456 raise error.Abort(
457 _(
457 _(
458 b'cannot partially commit a merge '
458 b'cannot partially commit a merge '
459 b'(use "hg commit" instead)'
459 b'(use "hg commit" instead)'
460 )
460 )
461 )
461 )
462
462
463 def fail(f, msg):
463 def fail(f, msg):
464 raise error.Abort(b'%s: %s' % (f, msg))
464 raise error.Abort(b'%s: %s' % (f, msg))
465
465
466 force = opts.get(b'force')
466 force = opts.get(b'force')
467 if not force:
467 if not force:
468 match = matchmod.badmatch(match, fail)
468 match = matchmod.badmatch(match, fail)
469
469
470 status = repo.status(match=match)
470 status = repo.status(match=match)
471
471
472 overrides = {(b'ui', b'commitsubrepos'): True}
472 overrides = {(b'ui', b'commitsubrepos'): True}
473
473
474 with repo.ui.configoverride(overrides, b'record'):
474 with repo.ui.configoverride(overrides, b'record'):
475 # subrepoutil.precommit() modifies the status
475 # subrepoutil.precommit() modifies the status
476 tmpstatus = scmutil.status(
476 tmpstatus = scmutil.status(
477 copymod.copy(status.modified),
477 copymod.copy(status.modified),
478 copymod.copy(status.added),
478 copymod.copy(status.added),
479 copymod.copy(status.removed),
479 copymod.copy(status.removed),
480 copymod.copy(status.deleted),
480 copymod.copy(status.deleted),
481 copymod.copy(status.unknown),
481 copymod.copy(status.unknown),
482 copymod.copy(status.ignored),
482 copymod.copy(status.ignored),
483 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
483 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
484 )
484 )
485
485
486 # Force allows -X subrepo to skip the subrepo.
486 # Force allows -X subrepo to skip the subrepo.
487 subs, commitsubs, newstate = subrepoutil.precommit(
487 subs, commitsubs, newstate = subrepoutil.precommit(
488 repo.ui, wctx, tmpstatus, match, force=True
488 repo.ui, wctx, tmpstatus, match, force=True
489 )
489 )
490 for s in subs:
490 for s in subs:
491 if s in commitsubs:
491 if s in commitsubs:
492 dirtyreason = wctx.sub(s).dirtyreason(True)
492 dirtyreason = wctx.sub(s).dirtyreason(True)
493 raise error.Abort(dirtyreason)
493 raise error.Abort(dirtyreason)
494
494
495 if not force:
495 if not force:
496 repo.checkcommitpatterns(wctx, match, status, fail)
496 repo.checkcommitpatterns(wctx, match, status, fail)
497 diffopts = patch.difffeatureopts(
497 diffopts = patch.difffeatureopts(
498 ui,
498 ui,
499 opts=opts,
499 opts=opts,
500 whitespace=True,
500 whitespace=True,
501 section=b'commands',
501 section=b'commands',
502 configprefix=b'commit.interactive.',
502 configprefix=b'commit.interactive.',
503 )
503 )
504 diffopts.nodates = True
504 diffopts.nodates = True
505 diffopts.git = True
505 diffopts.git = True
506 diffopts.showfunc = True
506 diffopts.showfunc = True
507 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
507 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
508 originalchunks = patch.parsepatch(originaldiff)
508 originalchunks = patch.parsepatch(originaldiff)
509 match = scmutil.match(repo[None], pats)
509 match = scmutil.match(repo[None], pats)
510
510
511 # 1. filter patch, since we are intending to apply subset of it
511 # 1. filter patch, since we are intending to apply subset of it
512 try:
512 try:
513 chunks, newopts = filterfn(ui, originalchunks, match)
513 chunks, newopts = filterfn(ui, originalchunks, match)
514 except error.PatchError as err:
514 except error.PatchError as err:
515 raise error.Abort(_(b'error parsing patch: %s') % err)
515 raise error.Abort(_(b'error parsing patch: %s') % err)
516 opts.update(newopts)
516 opts.update(newopts)
517
517
518 # We need to keep a backup of files that have been newly added and
518 # We need to keep a backup of files that have been newly added and
519 # modified during the recording process because there is a previous
519 # modified during the recording process because there is a previous
520 # version without the edit in the workdir. We also will need to restore
520 # version without the edit in the workdir. We also will need to restore
521 # files that were the sources of renames so that the patch application
521 # files that were the sources of renames so that the patch application
522 # works.
522 # works.
523 newlyaddedandmodifiedfiles, alsorestore = newandmodified(
523 newlyaddedandmodifiedfiles, alsorestore = newandmodified(
524 chunks, originalchunks
524 chunks, originalchunks
525 )
525 )
526 contenders = set()
526 contenders = set()
527 for h in chunks:
527 for h in chunks:
528 try:
528 try:
529 contenders.update(set(h.files()))
529 contenders.update(set(h.files()))
530 except AttributeError:
530 except AttributeError:
531 pass
531 pass
532
532
533 changed = status.modified + status.added + status.removed
533 changed = status.modified + status.added + status.removed
534 newfiles = [f for f in changed if f in contenders]
534 newfiles = [f for f in changed if f in contenders]
535 if not newfiles:
535 if not newfiles:
536 ui.status(_(b'no changes to record\n'))
536 ui.status(_(b'no changes to record\n'))
537 return 0
537 return 0
538
538
539 modified = set(status.modified)
539 modified = set(status.modified)
540
540
541 # 2. backup changed files, so we can restore them in the end
541 # 2. backup changed files, so we can restore them in the end
542
542
543 if backupall:
543 if backupall:
544 tobackup = changed
544 tobackup = changed
545 else:
545 else:
546 tobackup = [
546 tobackup = [
547 f
547 f
548 for f in newfiles
548 for f in newfiles
549 if f in modified or f in newlyaddedandmodifiedfiles
549 if f in modified or f in newlyaddedandmodifiedfiles
550 ]
550 ]
551 backups = {}
551 backups = {}
552 if tobackup:
552 if tobackup:
553 backupdir = repo.vfs.join(b'record-backups')
553 backupdir = repo.vfs.join(b'record-backups')
554 try:
554 try:
555 os.mkdir(backupdir)
555 os.mkdir(backupdir)
556 except OSError as err:
556 except OSError as err:
557 if err.errno != errno.EEXIST:
557 if err.errno != errno.EEXIST:
558 raise
558 raise
559 try:
559 try:
560 # backup continues
560 # backup continues
561 for f in tobackup:
561 for f in tobackup:
562 fd, tmpname = pycompat.mkstemp(
562 fd, tmpname = pycompat.mkstemp(
563 prefix=f.replace(b'/', b'_') + b'.', dir=backupdir
563 prefix=f.replace(b'/', b'_') + b'.', dir=backupdir
564 )
564 )
565 os.close(fd)
565 os.close(fd)
566 ui.debug(b'backup %r as %r\n' % (f, tmpname))
566 ui.debug(b'backup %r as %r\n' % (f, tmpname))
567 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
567 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
568 backups[f] = tmpname
568 backups[f] = tmpname
569
569
570 fp = stringio()
570 fp = stringio()
571 for c in chunks:
571 for c in chunks:
572 fname = c.filename()
572 fname = c.filename()
573 if fname in backups:
573 if fname in backups:
574 c.write(fp)
574 c.write(fp)
575 dopatch = fp.tell()
575 dopatch = fp.tell()
576 fp.seek(0)
576 fp.seek(0)
577
577
578 # 2.5 optionally review / modify patch in text editor
578 # 2.5 optionally review / modify patch in text editor
579 if opts.get(b'review', False):
579 if opts.get(b'review', False):
580 patchtext = (
580 patchtext = (
581 crecordmod.diffhelptext
581 crecordmod.diffhelptext
582 + crecordmod.patchhelptext
582 + crecordmod.patchhelptext
583 + fp.read()
583 + fp.read()
584 )
584 )
585 reviewedpatch = ui.edit(
585 reviewedpatch = ui.edit(
586 patchtext, b"", action=b"diff", repopath=repo.path
586 patchtext, b"", action=b"diff", repopath=repo.path
587 )
587 )
588 fp.truncate(0)
588 fp.truncate(0)
589 fp.write(reviewedpatch)
589 fp.write(reviewedpatch)
590 fp.seek(0)
590 fp.seek(0)
591
591
592 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
592 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
593 # 3a. apply filtered patch to clean repo (clean)
593 # 3a. apply filtered patch to clean repo (clean)
594 if backups:
594 if backups:
595 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
595 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
596 mergemod.revert_to(repo[b'.'], matcher=m)
596 mergemod.revert_to(repo[b'.'], matcher=m)
597
597
598 # 3b. (apply)
598 # 3b. (apply)
599 if dopatch:
599 if dopatch:
600 try:
600 try:
601 ui.debug(b'applying patch\n')
601 ui.debug(b'applying patch\n')
602 ui.debug(fp.getvalue())
602 ui.debug(fp.getvalue())
603 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
603 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
604 except error.PatchError as err:
604 except error.PatchError as err:
605 raise error.Abort(pycompat.bytestr(err))
605 raise error.Abort(pycompat.bytestr(err))
606 del fp
606 del fp
607
607
608 # 4. We prepared working directory according to filtered
608 # 4. We prepared working directory according to filtered
609 # patch. Now is the time to delegate the job to
609 # patch. Now is the time to delegate the job to
610 # commit/qrefresh or the like!
610 # commit/qrefresh or the like!
611
611
612 # Make all of the pathnames absolute.
612 # Make all of the pathnames absolute.
613 newfiles = [repo.wjoin(nf) for nf in newfiles]
613 newfiles = [repo.wjoin(nf) for nf in newfiles]
614 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
614 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
615 finally:
615 finally:
616 # 5. finally restore backed-up files
616 # 5. finally restore backed-up files
617 try:
617 try:
618 dirstate = repo.dirstate
618 dirstate = repo.dirstate
619 for realname, tmpname in pycompat.iteritems(backups):
619 for realname, tmpname in pycompat.iteritems(backups):
620 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
620 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
621
621
622 if dirstate[realname] == b'n':
622 if dirstate[realname] == b'n':
623 # without normallookup, restoring timestamp
623 # without normallookup, restoring timestamp
624 # may cause partially committed files
624 # may cause partially committed files
625 # to be treated as unmodified
625 # to be treated as unmodified
626 dirstate.normallookup(realname)
626 dirstate.normallookup(realname)
627
627
628 # copystat=True here and above are a hack to trick any
628 # copystat=True here and above are a hack to trick any
629 # editors that have f open that we haven't modified them.
629 # editors that have f open that we haven't modified them.
630 #
630 #
631 # Also note that this racy as an editor could notice the
631 # Also note that this racy as an editor could notice the
632 # file's mtime before we've finished writing it.
632 # file's mtime before we've finished writing it.
633 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
633 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
634 os.unlink(tmpname)
634 os.unlink(tmpname)
635 if tobackup:
635 if tobackup:
636 os.rmdir(backupdir)
636 os.rmdir(backupdir)
637 except OSError:
637 except OSError:
638 pass
638 pass
639
639
640 def recordinwlock(ui, repo, message, match, opts):
640 def recordinwlock(ui, repo, message, match, opts):
641 with repo.wlock():
641 with repo.wlock():
642 return recordfunc(ui, repo, message, match, opts)
642 return recordfunc(ui, repo, message, match, opts)
643
643
644 return commit(ui, repo, recordinwlock, pats, opts)
644 return commit(ui, repo, recordinwlock, pats, opts)
645
645
646
646
647 class dirnode(object):
647 class dirnode(object):
648 """
648 """
649 Represent a directory in user working copy with information required for
649 Represent a directory in user working copy with information required for
650 the purpose of tersing its status.
650 the purpose of tersing its status.
651
651
652 path is the path to the directory, without a trailing '/'
652 path is the path to the directory, without a trailing '/'
653
653
654 statuses is a set of statuses of all files in this directory (this includes
654 statuses is a set of statuses of all files in this directory (this includes
655 all the files in all the subdirectories too)
655 all the files in all the subdirectories too)
656
656
657 files is a list of files which are direct child of this directory
657 files is a list of files which are direct child of this directory
658
658
659 subdirs is a dictionary of sub-directory name as the key and it's own
659 subdirs is a dictionary of sub-directory name as the key and it's own
660 dirnode object as the value
660 dirnode object as the value
661 """
661 """
662
662
663 def __init__(self, dirpath):
663 def __init__(self, dirpath):
664 self.path = dirpath
664 self.path = dirpath
665 self.statuses = set()
665 self.statuses = set()
666 self.files = []
666 self.files = []
667 self.subdirs = {}
667 self.subdirs = {}
668
668
669 def _addfileindir(self, filename, status):
669 def _addfileindir(self, filename, status):
670 """Add a file in this directory as a direct child."""
670 """Add a file in this directory as a direct child."""
671 self.files.append((filename, status))
671 self.files.append((filename, status))
672
672
673 def addfile(self, filename, status):
673 def addfile(self, filename, status):
674 """
674 """
675 Add a file to this directory or to its direct parent directory.
675 Add a file to this directory or to its direct parent directory.
676
676
677 If the file is not direct child of this directory, we traverse to the
677 If the file is not direct child of this directory, we traverse to the
678 directory of which this file is a direct child of and add the file
678 directory of which this file is a direct child of and add the file
679 there.
679 there.
680 """
680 """
681
681
682 # the filename contains a path separator, it means it's not the direct
682 # the filename contains a path separator, it means it's not the direct
683 # child of this directory
683 # child of this directory
684 if b'/' in filename:
684 if b'/' in filename:
685 subdir, filep = filename.split(b'/', 1)
685 subdir, filep = filename.split(b'/', 1)
686
686
687 # does the dirnode object for subdir exists
687 # does the dirnode object for subdir exists
688 if subdir not in self.subdirs:
688 if subdir not in self.subdirs:
689 subdirpath = pathutil.join(self.path, subdir)
689 subdirpath = pathutil.join(self.path, subdir)
690 self.subdirs[subdir] = dirnode(subdirpath)
690 self.subdirs[subdir] = dirnode(subdirpath)
691
691
692 # try adding the file in subdir
692 # try adding the file in subdir
693 self.subdirs[subdir].addfile(filep, status)
693 self.subdirs[subdir].addfile(filep, status)
694
694
695 else:
695 else:
696 self._addfileindir(filename, status)
696 self._addfileindir(filename, status)
697
697
698 if status not in self.statuses:
698 if status not in self.statuses:
699 self.statuses.add(status)
699 self.statuses.add(status)
700
700
701 def iterfilepaths(self):
701 def iterfilepaths(self):
702 """Yield (status, path) for files directly under this directory."""
702 """Yield (status, path) for files directly under this directory."""
703 for f, st in self.files:
703 for f, st in self.files:
704 yield st, pathutil.join(self.path, f)
704 yield st, pathutil.join(self.path, f)
705
705
706 def tersewalk(self, terseargs):
706 def tersewalk(self, terseargs):
707 """
707 """
708 Yield (status, path) obtained by processing the status of this
708 Yield (status, path) obtained by processing the status of this
709 dirnode.
709 dirnode.
710
710
711 terseargs is the string of arguments passed by the user with `--terse`
711 terseargs is the string of arguments passed by the user with `--terse`
712 flag.
712 flag.
713
713
714 Following are the cases which can happen:
714 Following are the cases which can happen:
715
715
716 1) All the files in the directory (including all the files in its
716 1) All the files in the directory (including all the files in its
717 subdirectories) share the same status and the user has asked us to terse
717 subdirectories) share the same status and the user has asked us to terse
718 that status. -> yield (status, dirpath). dirpath will end in '/'.
718 that status. -> yield (status, dirpath). dirpath will end in '/'.
719
719
720 2) Otherwise, we do following:
720 2) Otherwise, we do following:
721
721
722 a) Yield (status, filepath) for all the files which are in this
722 a) Yield (status, filepath) for all the files which are in this
723 directory (only the ones in this directory, not the subdirs)
723 directory (only the ones in this directory, not the subdirs)
724
724
725 b) Recurse the function on all the subdirectories of this
725 b) Recurse the function on all the subdirectories of this
726 directory
726 directory
727 """
727 """
728
728
729 if len(self.statuses) == 1:
729 if len(self.statuses) == 1:
730 onlyst = self.statuses.pop()
730 onlyst = self.statuses.pop()
731
731
732 # Making sure we terse only when the status abbreviation is
732 # Making sure we terse only when the status abbreviation is
733 # passed as terse argument
733 # passed as terse argument
734 if onlyst in terseargs:
734 if onlyst in terseargs:
735 yield onlyst, self.path + b'/'
735 yield onlyst, self.path + b'/'
736 return
736 return
737
737
738 # add the files to status list
738 # add the files to status list
739 for st, fpath in self.iterfilepaths():
739 for st, fpath in self.iterfilepaths():
740 yield st, fpath
740 yield st, fpath
741
741
742 # recurse on the subdirs
742 # recurse on the subdirs
743 for dirobj in self.subdirs.values():
743 for dirobj in self.subdirs.values():
744 for st, fpath in dirobj.tersewalk(terseargs):
744 for st, fpath in dirobj.tersewalk(terseargs):
745 yield st, fpath
745 yield st, fpath
746
746
747
747
748 def tersedir(statuslist, terseargs):
748 def tersedir(statuslist, terseargs):
749 """
749 """
750 Terse the status if all the files in a directory shares the same status.
750 Terse the status if all the files in a directory shares the same status.
751
751
752 statuslist is scmutil.status() object which contains a list of files for
752 statuslist is scmutil.status() object which contains a list of files for
753 each status.
753 each status.
754 terseargs is string which is passed by the user as the argument to `--terse`
754 terseargs is string which is passed by the user as the argument to `--terse`
755 flag.
755 flag.
756
756
757 The function makes a tree of objects of dirnode class, and at each node it
757 The function makes a tree of objects of dirnode class, and at each node it
758 stores the information required to know whether we can terse a certain
758 stores the information required to know whether we can terse a certain
759 directory or not.
759 directory or not.
760 """
760 """
761 # the order matters here as that is used to produce final list
761 # the order matters here as that is used to produce final list
762 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
762 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
763
763
764 # checking the argument validity
764 # checking the argument validity
765 for s in pycompat.bytestr(terseargs):
765 for s in pycompat.bytestr(terseargs):
766 if s not in allst:
766 if s not in allst:
767 raise error.Abort(_(b"'%s' not recognized") % s)
767 raise error.Abort(_(b"'%s' not recognized") % s)
768
768
769 # creating a dirnode object for the root of the repo
769 # creating a dirnode object for the root of the repo
770 rootobj = dirnode(b'')
770 rootobj = dirnode(b'')
771 pstatus = (
771 pstatus = (
772 b'modified',
772 b'modified',
773 b'added',
773 b'added',
774 b'deleted',
774 b'deleted',
775 b'clean',
775 b'clean',
776 b'unknown',
776 b'unknown',
777 b'ignored',
777 b'ignored',
778 b'removed',
778 b'removed',
779 )
779 )
780
780
781 tersedict = {}
781 tersedict = {}
782 for attrname in pstatus:
782 for attrname in pstatus:
783 statuschar = attrname[0:1]
783 statuschar = attrname[0:1]
784 for f in getattr(statuslist, attrname):
784 for f in getattr(statuslist, attrname):
785 rootobj.addfile(f, statuschar)
785 rootobj.addfile(f, statuschar)
786 tersedict[statuschar] = []
786 tersedict[statuschar] = []
787
787
788 # we won't be tersing the root dir, so add files in it
788 # we won't be tersing the root dir, so add files in it
789 for st, fpath in rootobj.iterfilepaths():
789 for st, fpath in rootobj.iterfilepaths():
790 tersedict[st].append(fpath)
790 tersedict[st].append(fpath)
791
791
792 # process each sub-directory and build tersedict
792 # process each sub-directory and build tersedict
793 for subdir in rootobj.subdirs.values():
793 for subdir in rootobj.subdirs.values():
794 for st, f in subdir.tersewalk(terseargs):
794 for st, f in subdir.tersewalk(terseargs):
795 tersedict[st].append(f)
795 tersedict[st].append(f)
796
796
797 tersedlist = []
797 tersedlist = []
798 for st in allst:
798 for st in allst:
799 tersedict[st].sort()
799 tersedict[st].sort()
800 tersedlist.append(tersedict[st])
800 tersedlist.append(tersedict[st])
801
801
802 return scmutil.status(*tersedlist)
802 return scmutil.status(*tersedlist)
803
803
804
804
805 def _commentlines(raw):
805 def _commentlines(raw):
806 '''Surround lineswith a comment char and a new line'''
806 '''Surround lineswith a comment char and a new line'''
807 lines = raw.splitlines()
807 lines = raw.splitlines()
808 commentedlines = [b'# %s' % line for line in lines]
808 commentedlines = [b'# %s' % line for line in lines]
809 return b'\n'.join(commentedlines) + b'\n'
809 return b'\n'.join(commentedlines) + b'\n'
810
810
811
811
812 @attr.s(frozen=True)
812 @attr.s(frozen=True)
813 class morestatus(object):
813 class morestatus(object):
814 reporoot = attr.ib()
814 reporoot = attr.ib()
815 unfinishedop = attr.ib()
815 unfinishedop = attr.ib()
816 unfinishedmsg = attr.ib()
816 unfinishedmsg = attr.ib()
817 activemerge = attr.ib()
817 activemerge = attr.ib()
818 unresolvedpaths = attr.ib()
818 unresolvedpaths = attr.ib()
819 _formattedpaths = attr.ib(init=False, default=set())
819 _formattedpaths = attr.ib(init=False, default=set())
820 _label = b'status.morestatus'
820 _label = b'status.morestatus'
821
821
822 def formatfile(self, path, fm):
822 def formatfile(self, path, fm):
823 self._formattedpaths.add(path)
823 self._formattedpaths.add(path)
824 if self.activemerge and path in self.unresolvedpaths:
824 if self.activemerge and path in self.unresolvedpaths:
825 fm.data(unresolved=True)
825 fm.data(unresolved=True)
826
826
827 def formatfooter(self, fm):
827 def formatfooter(self, fm):
828 if self.unfinishedop or self.unfinishedmsg:
828 if self.unfinishedop or self.unfinishedmsg:
829 fm.startitem()
829 fm.startitem()
830 fm.data(itemtype=b'morestatus')
830 fm.data(itemtype=b'morestatus')
831
831
832 if self.unfinishedop:
832 if self.unfinishedop:
833 fm.data(unfinished=self.unfinishedop)
833 fm.data(unfinished=self.unfinishedop)
834 statemsg = (
834 statemsg = (
835 _(b'The repository is in an unfinished *%s* state.')
835 _(b'The repository is in an unfinished *%s* state.')
836 % self.unfinishedop
836 % self.unfinishedop
837 )
837 )
838 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
838 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
839 if self.unfinishedmsg:
839 if self.unfinishedmsg:
840 fm.data(unfinishedmsg=self.unfinishedmsg)
840 fm.data(unfinishedmsg=self.unfinishedmsg)
841
841
842 # May also start new data items.
842 # May also start new data items.
843 self._formatconflicts(fm)
843 self._formatconflicts(fm)
844
844
845 if self.unfinishedmsg:
845 if self.unfinishedmsg:
846 fm.plain(
846 fm.plain(
847 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
847 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
848 )
848 )
849
849
850 def _formatconflicts(self, fm):
850 def _formatconflicts(self, fm):
851 if not self.activemerge:
851 if not self.activemerge:
852 return
852 return
853
853
854 if self.unresolvedpaths:
854 if self.unresolvedpaths:
855 mergeliststr = b'\n'.join(
855 mergeliststr = b'\n'.join(
856 [
856 [
857 b' %s'
857 b' %s'
858 % util.pathto(self.reporoot, encoding.getcwd(), path)
858 % util.pathto(self.reporoot, encoding.getcwd(), path)
859 for path in self.unresolvedpaths
859 for path in self.unresolvedpaths
860 ]
860 ]
861 )
861 )
862 msg = (
862 msg = (
863 _(
863 _(
864 '''Unresolved merge conflicts:
864 '''Unresolved merge conflicts:
865
865
866 %s
866 %s
867
867
868 To mark files as resolved: hg resolve --mark FILE'''
868 To mark files as resolved: hg resolve --mark FILE'''
869 )
869 )
870 % mergeliststr
870 % mergeliststr
871 )
871 )
872
872
873 # If any paths with unresolved conflicts were not previously
873 # If any paths with unresolved conflicts were not previously
874 # formatted, output them now.
874 # formatted, output them now.
875 for f in self.unresolvedpaths:
875 for f in self.unresolvedpaths:
876 if f in self._formattedpaths:
876 if f in self._formattedpaths:
877 # Already output.
877 # Already output.
878 continue
878 continue
879 fm.startitem()
879 fm.startitem()
880 # We can't claim to know the status of the file - it may just
880 # We can't claim to know the status of the file - it may just
881 # have been in one of the states that were not requested for
881 # have been in one of the states that were not requested for
882 # display, so it could be anything.
882 # display, so it could be anything.
883 fm.data(itemtype=b'file', path=f, unresolved=True)
883 fm.data(itemtype=b'file', path=f, unresolved=True)
884
884
885 else:
885 else:
886 msg = _(b'No unresolved merge conflicts.')
886 msg = _(b'No unresolved merge conflicts.')
887
887
888 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
888 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
889
889
890
890
891 def readmorestatus(repo):
891 def readmorestatus(repo):
892 """Returns a morestatus object if the repo has unfinished state."""
892 """Returns a morestatus object if the repo has unfinished state."""
893 statetuple = statemod.getrepostate(repo)
893 statetuple = statemod.getrepostate(repo)
894 mergestate = mergestatemod.mergestate.read(repo)
894 mergestate = mergestatemod.mergestate.read(repo)
895 activemerge = mergestate.active()
895 activemerge = mergestate.active()
896 if not statetuple and not activemerge:
896 if not statetuple and not activemerge:
897 return None
897 return None
898
898
899 unfinishedop = unfinishedmsg = unresolved = None
899 unfinishedop = unfinishedmsg = unresolved = None
900 if statetuple:
900 if statetuple:
901 unfinishedop, unfinishedmsg = statetuple
901 unfinishedop, unfinishedmsg = statetuple
902 if activemerge:
902 if activemerge:
903 unresolved = sorted(mergestate.unresolved())
903 unresolved = sorted(mergestate.unresolved())
904 return morestatus(
904 return morestatus(
905 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
905 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
906 )
906 )
907
907
908
908
909 def findpossible(cmd, table, strict=False):
909 def findpossible(cmd, table, strict=False):
910 """
910 """
911 Return cmd -> (aliases, command table entry)
911 Return cmd -> (aliases, command table entry)
912 for each matching command.
912 for each matching command.
913 Return debug commands (or their aliases) only if no normal command matches.
913 Return debug commands (or their aliases) only if no normal command matches.
914 """
914 """
915 choice = {}
915 choice = {}
916 debugchoice = {}
916 debugchoice = {}
917
917
918 if cmd in table:
918 if cmd in table:
919 # short-circuit exact matches, "log" alias beats "log|history"
919 # short-circuit exact matches, "log" alias beats "log|history"
920 keys = [cmd]
920 keys = [cmd]
921 else:
921 else:
922 keys = table.keys()
922 keys = table.keys()
923
923
924 allcmds = []
924 allcmds = []
925 for e in keys:
925 for e in keys:
926 aliases = parsealiases(e)
926 aliases = parsealiases(e)
927 allcmds.extend(aliases)
927 allcmds.extend(aliases)
928 found = None
928 found = None
929 if cmd in aliases:
929 if cmd in aliases:
930 found = cmd
930 found = cmd
931 elif not strict:
931 elif not strict:
932 for a in aliases:
932 for a in aliases:
933 if a.startswith(cmd):
933 if a.startswith(cmd):
934 found = a
934 found = a
935 break
935 break
936 if found is not None:
936 if found is not None:
937 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
937 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
938 debugchoice[found] = (aliases, table[e])
938 debugchoice[found] = (aliases, table[e])
939 else:
939 else:
940 choice[found] = (aliases, table[e])
940 choice[found] = (aliases, table[e])
941
941
942 if not choice and debugchoice:
942 if not choice and debugchoice:
943 choice = debugchoice
943 choice = debugchoice
944
944
945 return choice, allcmds
945 return choice, allcmds
946
946
947
947
948 def findcmd(cmd, table, strict=True):
948 def findcmd(cmd, table, strict=True):
949 """Return (aliases, command table entry) for command string."""
949 """Return (aliases, command table entry) for command string."""
950 choice, allcmds = findpossible(cmd, table, strict)
950 choice, allcmds = findpossible(cmd, table, strict)
951
951
952 if cmd in choice:
952 if cmd in choice:
953 return choice[cmd]
953 return choice[cmd]
954
954
955 if len(choice) > 1:
955 if len(choice) > 1:
956 clist = sorted(choice)
956 clist = sorted(choice)
957 raise error.AmbiguousCommand(cmd, clist)
957 raise error.AmbiguousCommand(cmd, clist)
958
958
959 if choice:
959 if choice:
960 return list(choice.values())[0]
960 return list(choice.values())[0]
961
961
962 raise error.UnknownCommand(cmd, allcmds)
962 raise error.UnknownCommand(cmd, allcmds)
963
963
964
964
965 def changebranch(ui, repo, revs, label, opts):
965 def changebranch(ui, repo, revs, label, opts):
966 """ Change the branch name of given revs to label """
966 """ Change the branch name of given revs to label """
967
967
968 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
968 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
969 # abort in case of uncommitted merge or dirty wdir
969 # abort in case of uncommitted merge or dirty wdir
970 bailifchanged(repo)
970 bailifchanged(repo)
971 revs = scmutil.revrange(repo, revs)
971 revs = scmutil.revrange(repo, revs)
972 if not revs:
972 if not revs:
973 raise error.Abort(b"empty revision set")
973 raise error.Abort(b"empty revision set")
974 roots = repo.revs(b'roots(%ld)', revs)
974 roots = repo.revs(b'roots(%ld)', revs)
975 if len(roots) > 1:
975 if len(roots) > 1:
976 raise error.Abort(
976 raise error.Abort(
977 _(b"cannot change branch of non-linear revisions")
977 _(b"cannot change branch of non-linear revisions")
978 )
978 )
979 rewriteutil.precheck(repo, revs, b'change branch of')
979 rewriteutil.precheck(repo, revs, b'change branch of')
980
980
981 root = repo[roots.first()]
981 root = repo[roots.first()]
982 rpb = {parent.branch() for parent in root.parents()}
982 rpb = {parent.branch() for parent in root.parents()}
983 if (
983 if (
984 not opts.get(b'force')
984 not opts.get(b'force')
985 and label not in rpb
985 and label not in rpb
986 and label in repo.branchmap()
986 and label in repo.branchmap()
987 ):
987 ):
988 raise error.Abort(_(b"a branch of the same name already exists"))
988 raise error.Abort(_(b"a branch of the same name already exists"))
989
989
990 if repo.revs(b'obsolete() and %ld', revs):
990 if repo.revs(b'obsolete() and %ld', revs):
991 raise error.Abort(
991 raise error.Abort(
992 _(b"cannot change branch of a obsolete changeset")
992 _(b"cannot change branch of a obsolete changeset")
993 )
993 )
994
994
995 # make sure only topological heads
995 # make sure only topological heads
996 if repo.revs(b'heads(%ld) - head()', revs):
996 if repo.revs(b'heads(%ld) - head()', revs):
997 raise error.Abort(_(b"cannot change branch in middle of a stack"))
997 raise error.Abort(_(b"cannot change branch in middle of a stack"))
998
998
999 replacements = {}
999 replacements = {}
1000 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
1000 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
1001 # mercurial.subrepo -> mercurial.cmdutil
1001 # mercurial.subrepo -> mercurial.cmdutil
1002 from . import context
1002 from . import context
1003
1003
1004 for rev in revs:
1004 for rev in revs:
1005 ctx = repo[rev]
1005 ctx = repo[rev]
1006 oldbranch = ctx.branch()
1006 oldbranch = ctx.branch()
1007 # check if ctx has same branch
1007 # check if ctx has same branch
1008 if oldbranch == label:
1008 if oldbranch == label:
1009 continue
1009 continue
1010
1010
1011 def filectxfn(repo, newctx, path):
1011 def filectxfn(repo, newctx, path):
1012 try:
1012 try:
1013 return ctx[path]
1013 return ctx[path]
1014 except error.ManifestLookupError:
1014 except error.ManifestLookupError:
1015 return None
1015 return None
1016
1016
1017 ui.debug(
1017 ui.debug(
1018 b"changing branch of '%s' from '%s' to '%s'\n"
1018 b"changing branch of '%s' from '%s' to '%s'\n"
1019 % (hex(ctx.node()), oldbranch, label)
1019 % (hex(ctx.node()), oldbranch, label)
1020 )
1020 )
1021 extra = ctx.extra()
1021 extra = ctx.extra()
1022 extra[b'branch_change'] = hex(ctx.node())
1022 extra[b'branch_change'] = hex(ctx.node())
1023 # While changing branch of set of linear commits, make sure that
1023 # While changing branch of set of linear commits, make sure that
1024 # we base our commits on new parent rather than old parent which
1024 # we base our commits on new parent rather than old parent which
1025 # was obsoleted while changing the branch
1025 # was obsoleted while changing the branch
1026 p1 = ctx.p1().node()
1026 p1 = ctx.p1().node()
1027 p2 = ctx.p2().node()
1027 p2 = ctx.p2().node()
1028 if p1 in replacements:
1028 if p1 in replacements:
1029 p1 = replacements[p1][0]
1029 p1 = replacements[p1][0]
1030 if p2 in replacements:
1030 if p2 in replacements:
1031 p2 = replacements[p2][0]
1031 p2 = replacements[p2][0]
1032
1032
1033 mc = context.memctx(
1033 mc = context.memctx(
1034 repo,
1034 repo,
1035 (p1, p2),
1035 (p1, p2),
1036 ctx.description(),
1036 ctx.description(),
1037 ctx.files(),
1037 ctx.files(),
1038 filectxfn,
1038 filectxfn,
1039 user=ctx.user(),
1039 user=ctx.user(),
1040 date=ctx.date(),
1040 date=ctx.date(),
1041 extra=extra,
1041 extra=extra,
1042 branch=label,
1042 branch=label,
1043 )
1043 )
1044
1044
1045 newnode = repo.commitctx(mc)
1045 newnode = repo.commitctx(mc)
1046 replacements[ctx.node()] = (newnode,)
1046 replacements[ctx.node()] = (newnode,)
1047 ui.debug(b'new node id is %s\n' % hex(newnode))
1047 ui.debug(b'new node id is %s\n' % hex(newnode))
1048
1048
1049 # create obsmarkers and move bookmarks
1049 # create obsmarkers and move bookmarks
1050 scmutil.cleanupnodes(
1050 scmutil.cleanupnodes(
1051 repo, replacements, b'branch-change', fixphase=True
1051 repo, replacements, b'branch-change', fixphase=True
1052 )
1052 )
1053
1053
1054 # move the working copy too
1054 # move the working copy too
1055 wctx = repo[None]
1055 wctx = repo[None]
1056 # in-progress merge is a bit too complex for now.
1056 # in-progress merge is a bit too complex for now.
1057 if len(wctx.parents()) == 1:
1057 if len(wctx.parents()) == 1:
1058 newid = replacements.get(wctx.p1().node())
1058 newid = replacements.get(wctx.p1().node())
1059 if newid is not None:
1059 if newid is not None:
1060 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1060 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1061 # mercurial.cmdutil
1061 # mercurial.cmdutil
1062 from . import hg
1062 from . import hg
1063
1063
1064 hg.update(repo, newid[0], quietempty=True)
1064 hg.update(repo, newid[0], quietempty=True)
1065
1065
1066 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1066 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1067
1067
1068
1068
1069 def findrepo(p):
1069 def findrepo(p):
1070 while not os.path.isdir(os.path.join(p, b".hg")):
1070 while not os.path.isdir(os.path.join(p, b".hg")):
1071 oldp, p = p, os.path.dirname(p)
1071 oldp, p = p, os.path.dirname(p)
1072 if p == oldp:
1072 if p == oldp:
1073 return None
1073 return None
1074
1074
1075 return p
1075 return p
1076
1076
1077
1077
1078 def bailifchanged(repo, merge=True, hint=None):
1078 def bailifchanged(repo, merge=True, hint=None):
1079 """ enforce the precondition that working directory must be clean.
1079 """ enforce the precondition that working directory must be clean.
1080
1080
1081 'merge' can be set to false if a pending uncommitted merge should be
1081 'merge' can be set to false if a pending uncommitted merge should be
1082 ignored (such as when 'update --check' runs).
1082 ignored (such as when 'update --check' runs).
1083
1083
1084 'hint' is the usual hint given to Abort exception.
1084 'hint' is the usual hint given to Abort exception.
1085 """
1085 """
1086
1086
1087 if merge and repo.dirstate.p2() != nullid:
1087 if merge and repo.dirstate.p2() != nullid:
1088 raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
1088 raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
1089 st = repo.status()
1089 st = repo.status()
1090 if st.modified or st.added or st.removed or st.deleted:
1090 if st.modified or st.added or st.removed or st.deleted:
1091 raise error.Abort(_(b'uncommitted changes'), hint=hint)
1091 raise error.Abort(_(b'uncommitted changes'), hint=hint)
1092 ctx = repo[None]
1092 ctx = repo[None]
1093 for s in sorted(ctx.substate):
1093 for s in sorted(ctx.substate):
1094 ctx.sub(s).bailifchanged(hint=hint)
1094 ctx.sub(s).bailifchanged(hint=hint)
1095
1095
1096
1096
1097 def logmessage(ui, opts):
1097 def logmessage(ui, opts):
1098 """ get the log message according to -m and -l option """
1098 """ get the log message according to -m and -l option """
1099
1099
1100 check_at_most_one_arg(opts, b'message', b'logfile')
1100 check_at_most_one_arg(opts, b'message', b'logfile')
1101
1101
1102 message = opts.get(b'message')
1102 message = opts.get(b'message')
1103 logfile = opts.get(b'logfile')
1103 logfile = opts.get(b'logfile')
1104
1104
1105 if not message and logfile:
1105 if not message and logfile:
1106 try:
1106 try:
1107 if isstdiofilename(logfile):
1107 if isstdiofilename(logfile):
1108 message = ui.fin.read()
1108 message = ui.fin.read()
1109 else:
1109 else:
1110 message = b'\n'.join(util.readfile(logfile).splitlines())
1110 message = b'\n'.join(util.readfile(logfile).splitlines())
1111 except IOError as inst:
1111 except IOError as inst:
1112 raise error.Abort(
1112 raise error.Abort(
1113 _(b"can't read commit message '%s': %s")
1113 _(b"can't read commit message '%s': %s")
1114 % (logfile, encoding.strtolocal(inst.strerror))
1114 % (logfile, encoding.strtolocal(inst.strerror))
1115 )
1115 )
1116 return message
1116 return message
1117
1117
1118
1118
1119 def mergeeditform(ctxorbool, baseformname):
1119 def mergeeditform(ctxorbool, baseformname):
1120 """return appropriate editform name (referencing a committemplate)
1120 """return appropriate editform name (referencing a committemplate)
1121
1121
1122 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1122 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1123 merging is committed.
1123 merging is committed.
1124
1124
1125 This returns baseformname with '.merge' appended if it is a merge,
1125 This returns baseformname with '.merge' appended if it is a merge,
1126 otherwise '.normal' is appended.
1126 otherwise '.normal' is appended.
1127 """
1127 """
1128 if isinstance(ctxorbool, bool):
1128 if isinstance(ctxorbool, bool):
1129 if ctxorbool:
1129 if ctxorbool:
1130 return baseformname + b".merge"
1130 return baseformname + b".merge"
1131 elif len(ctxorbool.parents()) > 1:
1131 elif len(ctxorbool.parents()) > 1:
1132 return baseformname + b".merge"
1132 return baseformname + b".merge"
1133
1133
1134 return baseformname + b".normal"
1134 return baseformname + b".normal"
1135
1135
1136
1136
1137 def getcommiteditor(
1137 def getcommiteditor(
1138 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1138 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1139 ):
1139 ):
1140 """get appropriate commit message editor according to '--edit' option
1140 """get appropriate commit message editor according to '--edit' option
1141
1141
1142 'finishdesc' is a function to be called with edited commit message
1142 'finishdesc' is a function to be called with edited commit message
1143 (= 'description' of the new changeset) just after editing, but
1143 (= 'description' of the new changeset) just after editing, but
1144 before checking empty-ness. It should return actual text to be
1144 before checking empty-ness. It should return actual text to be
1145 stored into history. This allows to change description before
1145 stored into history. This allows to change description before
1146 storing.
1146 storing.
1147
1147
1148 'extramsg' is a extra message to be shown in the editor instead of
1148 'extramsg' is a extra message to be shown in the editor instead of
1149 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1149 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1150 is automatically added.
1150 is automatically added.
1151
1151
1152 'editform' is a dot-separated list of names, to distinguish
1152 'editform' is a dot-separated list of names, to distinguish
1153 the purpose of commit text editing.
1153 the purpose of commit text editing.
1154
1154
1155 'getcommiteditor' returns 'commitforceeditor' regardless of
1155 'getcommiteditor' returns 'commitforceeditor' regardless of
1156 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1156 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1157 they are specific for usage in MQ.
1157 they are specific for usage in MQ.
1158 """
1158 """
1159 if edit or finishdesc or extramsg:
1159 if edit or finishdesc or extramsg:
1160 return lambda r, c, s: commitforceeditor(
1160 return lambda r, c, s: commitforceeditor(
1161 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1161 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1162 )
1162 )
1163 elif editform:
1163 elif editform:
1164 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1164 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1165 else:
1165 else:
1166 return commiteditor
1166 return commiteditor
1167
1167
1168
1168
1169 def _escapecommandtemplate(tmpl):
1169 def _escapecommandtemplate(tmpl):
1170 parts = []
1170 parts = []
1171 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1171 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1172 if typ == b'string':
1172 if typ == b'string':
1173 parts.append(stringutil.escapestr(tmpl[start:end]))
1173 parts.append(stringutil.escapestr(tmpl[start:end]))
1174 else:
1174 else:
1175 parts.append(tmpl[start:end])
1175 parts.append(tmpl[start:end])
1176 return b''.join(parts)
1176 return b''.join(parts)
1177
1177
1178
1178
1179 def rendercommandtemplate(ui, tmpl, props):
1179 def rendercommandtemplate(ui, tmpl, props):
1180 r"""Expand a literal template 'tmpl' in a way suitable for command line
1180 r"""Expand a literal template 'tmpl' in a way suitable for command line
1181
1181
1182 '\' in outermost string is not taken as an escape character because it
1182 '\' in outermost string is not taken as an escape character because it
1183 is a directory separator on Windows.
1183 is a directory separator on Windows.
1184
1184
1185 >>> from . import ui as uimod
1185 >>> from . import ui as uimod
1186 >>> ui = uimod.ui()
1186 >>> ui = uimod.ui()
1187 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1187 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1188 'c:\\foo'
1188 'c:\\foo'
1189 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1189 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1190 'c:{path}'
1190 'c:{path}'
1191 """
1191 """
1192 if not tmpl:
1192 if not tmpl:
1193 return tmpl
1193 return tmpl
1194 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1194 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1195 return t.renderdefault(props)
1195 return t.renderdefault(props)
1196
1196
1197
1197
1198 def rendertemplate(ctx, tmpl, props=None):
1198 def rendertemplate(ctx, tmpl, props=None):
1199 """Expand a literal template 'tmpl' byte-string against one changeset
1199 """Expand a literal template 'tmpl' byte-string against one changeset
1200
1200
1201 Each props item must be a stringify-able value or a callable returning
1201 Each props item must be a stringify-able value or a callable returning
1202 such value, i.e. no bare list nor dict should be passed.
1202 such value, i.e. no bare list nor dict should be passed.
1203 """
1203 """
1204 repo = ctx.repo()
1204 repo = ctx.repo()
1205 tres = formatter.templateresources(repo.ui, repo)
1205 tres = formatter.templateresources(repo.ui, repo)
1206 t = formatter.maketemplater(
1206 t = formatter.maketemplater(
1207 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1207 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1208 )
1208 )
1209 mapping = {b'ctx': ctx}
1209 mapping = {b'ctx': ctx}
1210 if props:
1210 if props:
1211 mapping.update(props)
1211 mapping.update(props)
1212 return t.renderdefault(mapping)
1212 return t.renderdefault(mapping)
1213
1213
1214
1214
1215 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1215 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1216 r"""Convert old-style filename format string to template string
1216 r"""Convert old-style filename format string to template string
1217
1217
1218 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1218 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1219 'foo-{reporoot|basename}-{seqno}.patch'
1219 'foo-{reporoot|basename}-{seqno}.patch'
1220 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1220 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1221 '{rev}{tags % "{tag}"}{node}'
1221 '{rev}{tags % "{tag}"}{node}'
1222
1222
1223 '\' in outermost strings has to be escaped because it is a directory
1223 '\' in outermost strings has to be escaped because it is a directory
1224 separator on Windows:
1224 separator on Windows:
1225
1225
1226 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1226 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1227 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1227 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1228 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1228 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1229 '\\\\\\\\foo\\\\bar.patch'
1229 '\\\\\\\\foo\\\\bar.patch'
1230 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1230 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1231 '\\\\{tags % "{tag}"}'
1231 '\\\\{tags % "{tag}"}'
1232
1232
1233 but inner strings follow the template rules (i.e. '\' is taken as an
1233 but inner strings follow the template rules (i.e. '\' is taken as an
1234 escape character):
1234 escape character):
1235
1235
1236 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1236 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1237 '{"c:\\tmp"}'
1237 '{"c:\\tmp"}'
1238 """
1238 """
1239 expander = {
1239 expander = {
1240 b'H': b'{node}',
1240 b'H': b'{node}',
1241 b'R': b'{rev}',
1241 b'R': b'{rev}',
1242 b'h': b'{node|short}',
1242 b'h': b'{node|short}',
1243 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1243 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1244 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1244 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1245 b'%': b'%',
1245 b'%': b'%',
1246 b'b': b'{reporoot|basename}',
1246 b'b': b'{reporoot|basename}',
1247 }
1247 }
1248 if total is not None:
1248 if total is not None:
1249 expander[b'N'] = b'{total}'
1249 expander[b'N'] = b'{total}'
1250 if seqno is not None:
1250 if seqno is not None:
1251 expander[b'n'] = b'{seqno}'
1251 expander[b'n'] = b'{seqno}'
1252 if total is not None and seqno is not None:
1252 if total is not None and seqno is not None:
1253 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1253 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1254 if pathname is not None:
1254 if pathname is not None:
1255 expander[b's'] = b'{pathname|basename}'
1255 expander[b's'] = b'{pathname|basename}'
1256 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1256 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1257 expander[b'p'] = b'{pathname}'
1257 expander[b'p'] = b'{pathname}'
1258
1258
1259 newname = []
1259 newname = []
1260 for typ, start, end in templater.scantemplate(pat, raw=True):
1260 for typ, start, end in templater.scantemplate(pat, raw=True):
1261 if typ != b'string':
1261 if typ != b'string':
1262 newname.append(pat[start:end])
1262 newname.append(pat[start:end])
1263 continue
1263 continue
1264 i = start
1264 i = start
1265 while i < end:
1265 while i < end:
1266 n = pat.find(b'%', i, end)
1266 n = pat.find(b'%', i, end)
1267 if n < 0:
1267 if n < 0:
1268 newname.append(stringutil.escapestr(pat[i:end]))
1268 newname.append(stringutil.escapestr(pat[i:end]))
1269 break
1269 break
1270 newname.append(stringutil.escapestr(pat[i:n]))
1270 newname.append(stringutil.escapestr(pat[i:n]))
1271 if n + 2 > end:
1271 if n + 2 > end:
1272 raise error.Abort(
1272 raise error.Abort(
1273 _(b"incomplete format spec in output filename")
1273 _(b"incomplete format spec in output filename")
1274 )
1274 )
1275 c = pat[n + 1 : n + 2]
1275 c = pat[n + 1 : n + 2]
1276 i = n + 2
1276 i = n + 2
1277 try:
1277 try:
1278 newname.append(expander[c])
1278 newname.append(expander[c])
1279 except KeyError:
1279 except KeyError:
1280 raise error.Abort(
1280 raise error.Abort(
1281 _(b"invalid format spec '%%%s' in output filename") % c
1281 _(b"invalid format spec '%%%s' in output filename") % c
1282 )
1282 )
1283 return b''.join(newname)
1283 return b''.join(newname)
1284
1284
1285
1285
1286 def makefilename(ctx, pat, **props):
1286 def makefilename(ctx, pat, **props):
1287 if not pat:
1287 if not pat:
1288 return pat
1288 return pat
1289 tmpl = _buildfntemplate(pat, **props)
1289 tmpl = _buildfntemplate(pat, **props)
1290 # BUG: alias expansion shouldn't be made against template fragments
1290 # BUG: alias expansion shouldn't be made against template fragments
1291 # rewritten from %-format strings, but we have no easy way to partially
1291 # rewritten from %-format strings, but we have no easy way to partially
1292 # disable the expansion.
1292 # disable the expansion.
1293 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1293 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1294
1294
1295
1295
1296 def isstdiofilename(pat):
1296 def isstdiofilename(pat):
1297 """True if the given pat looks like a filename denoting stdin/stdout"""
1297 """True if the given pat looks like a filename denoting stdin/stdout"""
1298 return not pat or pat == b'-'
1298 return not pat or pat == b'-'
1299
1299
1300
1300
1301 class _unclosablefile(object):
1301 class _unclosablefile(object):
1302 def __init__(self, fp):
1302 def __init__(self, fp):
1303 self._fp = fp
1303 self._fp = fp
1304
1304
1305 def close(self):
1305 def close(self):
1306 pass
1306 pass
1307
1307
1308 def __iter__(self):
1308 def __iter__(self):
1309 return iter(self._fp)
1309 return iter(self._fp)
1310
1310
1311 def __getattr__(self, attr):
1311 def __getattr__(self, attr):
1312 return getattr(self._fp, attr)
1312 return getattr(self._fp, attr)
1313
1313
1314 def __enter__(self):
1314 def __enter__(self):
1315 return self
1315 return self
1316
1316
1317 def __exit__(self, exc_type, exc_value, exc_tb):
1317 def __exit__(self, exc_type, exc_value, exc_tb):
1318 pass
1318 pass
1319
1319
1320
1320
1321 def makefileobj(ctx, pat, mode=b'wb', **props):
1321 def makefileobj(ctx, pat, mode=b'wb', **props):
1322 writable = mode not in (b'r', b'rb')
1322 writable = mode not in (b'r', b'rb')
1323
1323
1324 if isstdiofilename(pat):
1324 if isstdiofilename(pat):
1325 repo = ctx.repo()
1325 repo = ctx.repo()
1326 if writable:
1326 if writable:
1327 fp = repo.ui.fout
1327 fp = repo.ui.fout
1328 else:
1328 else:
1329 fp = repo.ui.fin
1329 fp = repo.ui.fin
1330 return _unclosablefile(fp)
1330 return _unclosablefile(fp)
1331 fn = makefilename(ctx, pat, **props)
1331 fn = makefilename(ctx, pat, **props)
1332 return open(fn, mode)
1332 return open(fn, mode)
1333
1333
1334
1334
1335 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1335 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1336 """opens the changelog, manifest, a filelog or a given revlog"""
1336 """opens the changelog, manifest, a filelog or a given revlog"""
1337 cl = opts[b'changelog']
1337 cl = opts[b'changelog']
1338 mf = opts[b'manifest']
1338 mf = opts[b'manifest']
1339 dir = opts[b'dir']
1339 dir = opts[b'dir']
1340 msg = None
1340 msg = None
1341 if cl and mf:
1341 if cl and mf:
1342 msg = _(b'cannot specify --changelog and --manifest at the same time')
1342 msg = _(b'cannot specify --changelog and --manifest at the same time')
1343 elif cl and dir:
1343 elif cl and dir:
1344 msg = _(b'cannot specify --changelog and --dir at the same time')
1344 msg = _(b'cannot specify --changelog and --dir at the same time')
1345 elif cl or mf or dir:
1345 elif cl or mf or dir:
1346 if file_:
1346 if file_:
1347 msg = _(b'cannot specify filename with --changelog or --manifest')
1347 msg = _(b'cannot specify filename with --changelog or --manifest')
1348 elif not repo:
1348 elif not repo:
1349 msg = _(
1349 msg = _(
1350 b'cannot specify --changelog or --manifest or --dir '
1350 b'cannot specify --changelog or --manifest or --dir '
1351 b'without a repository'
1351 b'without a repository'
1352 )
1352 )
1353 if msg:
1353 if msg:
1354 raise error.Abort(msg)
1354 raise error.Abort(msg)
1355
1355
1356 r = None
1356 r = None
1357 if repo:
1357 if repo:
1358 if cl:
1358 if cl:
1359 r = repo.unfiltered().changelog
1359 r = repo.unfiltered().changelog
1360 elif dir:
1360 elif dir:
1361 if b'treemanifest' not in repo.requirements:
1361 if b'treemanifest' not in repo.requirements:
1362 raise error.Abort(
1362 raise error.Abort(
1363 _(
1363 _(
1364 b"--dir can only be used on repos with "
1364 b"--dir can only be used on repos with "
1365 b"treemanifest enabled"
1365 b"treemanifest enabled"
1366 )
1366 )
1367 )
1367 )
1368 if not dir.endswith(b'/'):
1368 if not dir.endswith(b'/'):
1369 dir = dir + b'/'
1369 dir = dir + b'/'
1370 dirlog = repo.manifestlog.getstorage(dir)
1370 dirlog = repo.manifestlog.getstorage(dir)
1371 if len(dirlog):
1371 if len(dirlog):
1372 r = dirlog
1372 r = dirlog
1373 elif mf:
1373 elif mf:
1374 r = repo.manifestlog.getstorage(b'')
1374 r = repo.manifestlog.getstorage(b'')
1375 elif file_:
1375 elif file_:
1376 filelog = repo.file(file_)
1376 filelog = repo.file(file_)
1377 if len(filelog):
1377 if len(filelog):
1378 r = filelog
1378 r = filelog
1379
1379
1380 # Not all storage may be revlogs. If requested, try to return an actual
1380 # Not all storage may be revlogs. If requested, try to return an actual
1381 # revlog instance.
1381 # revlog instance.
1382 if returnrevlog:
1382 if returnrevlog:
1383 if isinstance(r, revlog.revlog):
1383 if isinstance(r, revlog.revlog):
1384 pass
1384 pass
1385 elif util.safehasattr(r, b'_revlog'):
1385 elif util.safehasattr(r, b'_revlog'):
1386 r = r._revlog # pytype: disable=attribute-error
1386 r = r._revlog # pytype: disable=attribute-error
1387 elif r is not None:
1387 elif r is not None:
1388 raise error.Abort(_(b'%r does not appear to be a revlog') % r)
1388 raise error.Abort(_(b'%r does not appear to be a revlog') % r)
1389
1389
1390 if not r:
1390 if not r:
1391 if not returnrevlog:
1391 if not returnrevlog:
1392 raise error.Abort(_(b'cannot give path to non-revlog'))
1392 raise error.Abort(_(b'cannot give path to non-revlog'))
1393
1393
1394 if not file_:
1394 if not file_:
1395 raise error.CommandError(cmd, _(b'invalid arguments'))
1395 raise error.CommandError(cmd, _(b'invalid arguments'))
1396 if not os.path.isfile(file_):
1396 if not os.path.isfile(file_):
1397 raise error.Abort(_(b"revlog '%s' not found") % file_)
1397 raise error.Abort(_(b"revlog '%s' not found") % file_)
1398 r = revlog.revlog(
1398 r = revlog.revlog(
1399 vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
1399 vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
1400 )
1400 )
1401 return r
1401 return r
1402
1402
1403
1403
1404 def openrevlog(repo, cmd, file_, opts):
1404 def openrevlog(repo, cmd, file_, opts):
1405 """Obtain a revlog backing storage of an item.
1405 """Obtain a revlog backing storage of an item.
1406
1406
1407 This is similar to ``openstorage()`` except it always returns a revlog.
1407 This is similar to ``openstorage()`` except it always returns a revlog.
1408
1408
1409 In most cases, a caller cares about the main storage object - not the
1409 In most cases, a caller cares about the main storage object - not the
1410 revlog backing it. Therefore, this function should only be used by code
1410 revlog backing it. Therefore, this function should only be used by code
1411 that needs to examine low-level revlog implementation details. e.g. debug
1411 that needs to examine low-level revlog implementation details. e.g. debug
1412 commands.
1412 commands.
1413 """
1413 """
1414 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1414 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1415
1415
1416
1416
1417 def copy(ui, repo, pats, opts, rename=False):
1417 def copy(ui, repo, pats, opts, rename=False):
1418 check_incompatible_arguments(opts, b'forget', [b'dry_run'])
1418 check_incompatible_arguments(opts, b'forget', [b'dry_run'])
1419
1419
1420 # called with the repo lock held
1420 # called with the repo lock held
1421 #
1421 #
1422 # hgsep => pathname that uses "/" to separate directories
1422 # hgsep => pathname that uses "/" to separate directories
1423 # ossep => pathname that uses os.sep to separate directories
1423 # ossep => pathname that uses os.sep to separate directories
1424 cwd = repo.getcwd()
1424 cwd = repo.getcwd()
1425 targets = {}
1425 targets = {}
1426 forget = opts.get(b"forget")
1426 forget = opts.get(b"forget")
1427 after = opts.get(b"after")
1427 after = opts.get(b"after")
1428 dryrun = opts.get(b"dry_run")
1428 dryrun = opts.get(b"dry_run")
1429 rev = opts.get(b'at_rev')
1429 rev = opts.get(b'at_rev')
1430 if rev:
1430 if rev:
1431 if not forget and not after:
1431 if not forget and not after:
1432 # TODO: Remove this restriction and make it also create the copy
1432 # TODO: Remove this restriction and make it also create the copy
1433 # targets (and remove the rename source if rename==True).
1433 # targets (and remove the rename source if rename==True).
1434 raise error.Abort(_(b'--at-rev requires --after'))
1434 raise error.Abort(_(b'--at-rev requires --after'))
1435 ctx = scmutil.revsingle(repo, rev)
1435 ctx = scmutil.revsingle(repo, rev)
1436 if len(ctx.parents()) > 1:
1436 if len(ctx.parents()) > 1:
1437 raise error.Abort(_(b'cannot mark/unmark copy in merge commit'))
1437 raise error.Abort(_(b'cannot mark/unmark copy in merge commit'))
1438 else:
1438 else:
1439 ctx = repo[None]
1439 ctx = repo[None]
1440
1440
1441 pctx = ctx.p1()
1441 pctx = ctx.p1()
1442
1442
1443 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1443 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1444
1444
1445 if forget:
1445 if forget:
1446 if ctx.rev() is None:
1446 if ctx.rev() is None:
1447 new_ctx = ctx
1447 new_ctx = ctx
1448 else:
1448 else:
1449 if len(ctx.parents()) > 1:
1449 if len(ctx.parents()) > 1:
1450 raise error.Abort(_(b'cannot unmark copy in merge commit'))
1450 raise error.Abort(_(b'cannot unmark copy in merge commit'))
1451 # avoid cycle context -> subrepo -> cmdutil
1451 # avoid cycle context -> subrepo -> cmdutil
1452 from . import context
1452 from . import context
1453
1453
1454 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1454 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1455 new_ctx = context.overlayworkingctx(repo)
1455 new_ctx = context.overlayworkingctx(repo)
1456 new_ctx.setbase(ctx.p1())
1456 new_ctx.setbase(ctx.p1())
1457 mergemod.graft(repo, ctx, wctx=new_ctx)
1457 mergemod.graft(repo, ctx, wctx=new_ctx)
1458
1458
1459 match = scmutil.match(ctx, pats, opts)
1459 match = scmutil.match(ctx, pats, opts)
1460
1460
1461 current_copies = ctx.p1copies()
1461 current_copies = ctx.p1copies()
1462 current_copies.update(ctx.p2copies())
1462 current_copies.update(ctx.p2copies())
1463
1463
1464 uipathfn = scmutil.getuipathfn(repo)
1464 uipathfn = scmutil.getuipathfn(repo)
1465 for f in ctx.walk(match):
1465 for f in ctx.walk(match):
1466 if f in current_copies:
1466 if f in current_copies:
1467 new_ctx[f].markcopied(None)
1467 new_ctx[f].markcopied(None)
1468 elif match.exact(f):
1468 elif match.exact(f):
1469 ui.warn(
1469 ui.warn(
1470 _(
1470 _(
1471 b'%s: not unmarking as copy - file is not marked as copied\n'
1471 b'%s: not unmarking as copy - file is not marked as copied\n'
1472 )
1472 )
1473 % uipathfn(f)
1473 % uipathfn(f)
1474 )
1474 )
1475
1475
1476 if ctx.rev() is not None:
1476 if ctx.rev() is not None:
1477 with repo.lock():
1477 with repo.lock():
1478 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1478 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1479 new_node = mem_ctx.commit()
1479 new_node = mem_ctx.commit()
1480
1480
1481 if repo.dirstate.p1() == ctx.node():
1481 if repo.dirstate.p1() == ctx.node():
1482 with repo.dirstate.parentchange():
1482 with repo.dirstate.parentchange():
1483 scmutil.movedirstate(repo, repo[new_node])
1483 scmutil.movedirstate(repo, repo[new_node])
1484 replacements = {ctx.node(): [new_node]}
1484 replacements = {ctx.node(): [new_node]}
1485 scmutil.cleanupnodes(
1485 scmutil.cleanupnodes(
1486 repo, replacements, b'uncopy', fixphase=True
1486 repo, replacements, b'uncopy', fixphase=True
1487 )
1487 )
1488
1488
1489 return
1489 return
1490
1490
1491 pats = scmutil.expandpats(pats)
1491 pats = scmutil.expandpats(pats)
1492 if not pats:
1492 if not pats:
1493 raise error.Abort(_(b'no source or destination specified'))
1493 raise error.Abort(_(b'no source or destination specified'))
1494 if len(pats) == 1:
1494 if len(pats) == 1:
1495 raise error.Abort(_(b'no destination specified'))
1495 raise error.Abort(_(b'no destination specified'))
1496 dest = pats.pop()
1496 dest = pats.pop()
1497
1497
1498 def walkpat(pat):
1498 def walkpat(pat):
1499 srcs = []
1499 srcs = []
1500 # TODO: Inline and simplify the non-working-copy version of this code
1500 # TODO: Inline and simplify the non-working-copy version of this code
1501 # since it shares very little with the working-copy version of it.
1501 # since it shares very little with the working-copy version of it.
1502 ctx_to_walk = ctx if ctx.rev() is None else pctx
1502 ctx_to_walk = ctx if ctx.rev() is None else pctx
1503 m = scmutil.match(ctx_to_walk, [pat], opts, globbed=True)
1503 m = scmutil.match(ctx_to_walk, [pat], opts, globbed=True)
1504 for abs in ctx_to_walk.walk(m):
1504 for abs in ctx_to_walk.walk(m):
1505 rel = uipathfn(abs)
1505 rel = uipathfn(abs)
1506 exact = m.exact(abs)
1506 exact = m.exact(abs)
1507 if abs not in ctx:
1507 if abs not in ctx:
1508 if abs in pctx:
1508 if abs in pctx:
1509 if not after:
1509 if not after:
1510 if exact:
1510 if exact:
1511 ui.warn(
1511 ui.warn(
1512 _(
1512 _(
1513 b'%s: not copying - file has been marked '
1513 b'%s: not copying - file has been marked '
1514 b'for remove\n'
1514 b'for remove\n'
1515 )
1515 )
1516 % rel
1516 % rel
1517 )
1517 )
1518 continue
1518 continue
1519 else:
1519 else:
1520 if exact:
1520 if exact:
1521 ui.warn(
1521 ui.warn(
1522 _(b'%s: not copying - file is not managed\n') % rel
1522 _(b'%s: not copying - file is not managed\n') % rel
1523 )
1523 )
1524 continue
1524 continue
1525
1525
1526 # abs: hgsep
1526 # abs: hgsep
1527 # rel: ossep
1527 # rel: ossep
1528 srcs.append((abs, rel, exact))
1528 srcs.append((abs, rel, exact))
1529 return srcs
1529 return srcs
1530
1530
1531 if ctx.rev() is not None:
1531 if ctx.rev() is not None:
1532 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1532 rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
1533 absdest = pathutil.canonpath(repo.root, cwd, dest)
1533 absdest = pathutil.canonpath(repo.root, cwd, dest)
1534 if ctx.hasdir(absdest):
1534 if ctx.hasdir(absdest):
1535 raise error.Abort(
1535 raise error.Abort(
1536 _(b'%s: --at-rev does not support a directory as destination')
1536 _(b'%s: --at-rev does not support a directory as destination')
1537 % uipathfn(absdest)
1537 % uipathfn(absdest)
1538 )
1538 )
1539 if absdest not in ctx:
1539 if absdest not in ctx:
1540 raise error.Abort(
1540 raise error.Abort(
1541 _(b'%s: copy destination does not exist in %s')
1541 _(b'%s: copy destination does not exist in %s')
1542 % (uipathfn(absdest), ctx)
1542 % (uipathfn(absdest), ctx)
1543 )
1543 )
1544
1544
1545 # avoid cycle context -> subrepo -> cmdutil
1545 # avoid cycle context -> subrepo -> cmdutil
1546 from . import context
1546 from . import context
1547
1547
1548 copylist = []
1548 copylist = []
1549 for pat in pats:
1549 for pat in pats:
1550 srcs = walkpat(pat)
1550 srcs = walkpat(pat)
1551 if not srcs:
1551 if not srcs:
1552 continue
1552 continue
1553 for abs, rel, exact in srcs:
1553 for abs, rel, exact in srcs:
1554 copylist.append(abs)
1554 copylist.append(abs)
1555
1555
1556 if not copylist:
1556 if not copylist:
1557 raise error.Abort(_(b'no files to copy'))
1557 raise error.Abort(_(b'no files to copy'))
1558 # TODO: Add support for `hg cp --at-rev . foo bar dir` and
1558 # TODO: Add support for `hg cp --at-rev . foo bar dir` and
1559 # `hg cp --at-rev . dir1 dir2`, preferably unifying the code with the
1559 # `hg cp --at-rev . dir1 dir2`, preferably unifying the code with the
1560 # existing functions below.
1560 # existing functions below.
1561 if len(copylist) != 1:
1561 if len(copylist) != 1:
1562 raise error.Abort(_(b'--at-rev requires a single source'))
1562 raise error.Abort(_(b'--at-rev requires a single source'))
1563
1563
1564 new_ctx = context.overlayworkingctx(repo)
1564 new_ctx = context.overlayworkingctx(repo)
1565 new_ctx.setbase(ctx.p1())
1565 new_ctx.setbase(ctx.p1())
1566 mergemod.graft(repo, ctx, wctx=new_ctx)
1566 mergemod.graft(repo, ctx, wctx=new_ctx)
1567
1567
1568 new_ctx.markcopied(absdest, copylist[0])
1568 new_ctx.markcopied(absdest, copylist[0])
1569
1569
1570 with repo.lock():
1570 with repo.lock():
1571 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1571 mem_ctx = new_ctx.tomemctx_for_amend(ctx)
1572 new_node = mem_ctx.commit()
1572 new_node = mem_ctx.commit()
1573
1573
1574 if repo.dirstate.p1() == ctx.node():
1574 if repo.dirstate.p1() == ctx.node():
1575 with repo.dirstate.parentchange():
1575 with repo.dirstate.parentchange():
1576 scmutil.movedirstate(repo, repo[new_node])
1576 scmutil.movedirstate(repo, repo[new_node])
1577 replacements = {ctx.node(): [new_node]}
1577 replacements = {ctx.node(): [new_node]}
1578 scmutil.cleanupnodes(repo, replacements, b'copy', fixphase=True)
1578 scmutil.cleanupnodes(repo, replacements, b'copy', fixphase=True)
1579
1579
1580 return
1580 return
1581
1581
1582 # abssrc: hgsep
1582 # abssrc: hgsep
1583 # relsrc: ossep
1583 # relsrc: ossep
1584 # otarget: ossep
1584 # otarget: ossep
1585 def copyfile(abssrc, relsrc, otarget, exact):
1585 def copyfile(abssrc, relsrc, otarget, exact):
1586 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1586 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1587 if b'/' in abstarget:
1587 if b'/' in abstarget:
1588 # We cannot normalize abstarget itself, this would prevent
1588 # We cannot normalize abstarget itself, this would prevent
1589 # case only renames, like a => A.
1589 # case only renames, like a => A.
1590 abspath, absname = abstarget.rsplit(b'/', 1)
1590 abspath, absname = abstarget.rsplit(b'/', 1)
1591 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1591 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1592 reltarget = repo.pathto(abstarget, cwd)
1592 reltarget = repo.pathto(abstarget, cwd)
1593 target = repo.wjoin(abstarget)
1593 target = repo.wjoin(abstarget)
1594 src = repo.wjoin(abssrc)
1594 src = repo.wjoin(abssrc)
1595 state = repo.dirstate[abstarget]
1595 state = repo.dirstate[abstarget]
1596
1596
1597 scmutil.checkportable(ui, abstarget)
1597 scmutil.checkportable(ui, abstarget)
1598
1598
1599 # check for collisions
1599 # check for collisions
1600 prevsrc = targets.get(abstarget)
1600 prevsrc = targets.get(abstarget)
1601 if prevsrc is not None:
1601 if prevsrc is not None:
1602 ui.warn(
1602 ui.warn(
1603 _(b'%s: not overwriting - %s collides with %s\n')
1603 _(b'%s: not overwriting - %s collides with %s\n')
1604 % (
1604 % (
1605 reltarget,
1605 reltarget,
1606 repo.pathto(abssrc, cwd),
1606 repo.pathto(abssrc, cwd),
1607 repo.pathto(prevsrc, cwd),
1607 repo.pathto(prevsrc, cwd),
1608 )
1608 )
1609 )
1609 )
1610 return True # report a failure
1610 return True # report a failure
1611
1611
1612 # check for overwrites
1612 # check for overwrites
1613 exists = os.path.lexists(target)
1613 exists = os.path.lexists(target)
1614 samefile = False
1614 samefile = False
1615 if exists and abssrc != abstarget:
1615 if exists and abssrc != abstarget:
1616 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1616 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1617 abstarget
1617 abstarget
1618 ):
1618 ):
1619 if not rename:
1619 if not rename:
1620 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1620 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1621 return True # report a failure
1621 return True # report a failure
1622 exists = False
1622 exists = False
1623 samefile = True
1623 samefile = True
1624
1624
1625 if not after and exists or after and state in b'mn':
1625 if not after and exists or after and state in b'mn':
1626 if not opts[b'force']:
1626 if not opts[b'force']:
1627 if state in b'mn':
1627 if state in b'mn':
1628 msg = _(b'%s: not overwriting - file already committed\n')
1628 msg = _(b'%s: not overwriting - file already committed\n')
1629 if after:
1629 if after:
1630 flags = b'--after --force'
1630 flags = b'--after --force'
1631 else:
1631 else:
1632 flags = b'--force'
1632 flags = b'--force'
1633 if rename:
1633 if rename:
1634 hint = (
1634 hint = (
1635 _(
1635 _(
1636 b"('hg rename %s' to replace the file by "
1636 b"('hg rename %s' to replace the file by "
1637 b'recording a rename)\n'
1637 b'recording a rename)\n'
1638 )
1638 )
1639 % flags
1639 % flags
1640 )
1640 )
1641 else:
1641 else:
1642 hint = (
1642 hint = (
1643 _(
1643 _(
1644 b"('hg copy %s' to replace the file by "
1644 b"('hg copy %s' to replace the file by "
1645 b'recording a copy)\n'
1645 b'recording a copy)\n'
1646 )
1646 )
1647 % flags
1647 % flags
1648 )
1648 )
1649 else:
1649 else:
1650 msg = _(b'%s: not overwriting - file exists\n')
1650 msg = _(b'%s: not overwriting - file exists\n')
1651 if rename:
1651 if rename:
1652 hint = _(
1652 hint = _(
1653 b"('hg rename --after' to record the rename)\n"
1653 b"('hg rename --after' to record the rename)\n"
1654 )
1654 )
1655 else:
1655 else:
1656 hint = _(b"('hg copy --after' to record the copy)\n")
1656 hint = _(b"('hg copy --after' to record the copy)\n")
1657 ui.warn(msg % reltarget)
1657 ui.warn(msg % reltarget)
1658 ui.warn(hint)
1658 ui.warn(hint)
1659 return True # report a failure
1659 return True # report a failure
1660
1660
1661 if after:
1661 if after:
1662 if not exists:
1662 if not exists:
1663 if rename:
1663 if rename:
1664 ui.warn(
1664 ui.warn(
1665 _(b'%s: not recording move - %s does not exist\n')
1665 _(b'%s: not recording move - %s does not exist\n')
1666 % (relsrc, reltarget)
1666 % (relsrc, reltarget)
1667 )
1667 )
1668 else:
1668 else:
1669 ui.warn(
1669 ui.warn(
1670 _(b'%s: not recording copy - %s does not exist\n')
1670 _(b'%s: not recording copy - %s does not exist\n')
1671 % (relsrc, reltarget)
1671 % (relsrc, reltarget)
1672 )
1672 )
1673 return True # report a failure
1673 return True # report a failure
1674 elif not dryrun:
1674 elif not dryrun:
1675 try:
1675 try:
1676 if exists:
1676 if exists:
1677 os.unlink(target)
1677 os.unlink(target)
1678 targetdir = os.path.dirname(target) or b'.'
1678 targetdir = os.path.dirname(target) or b'.'
1679 if not os.path.isdir(targetdir):
1679 if not os.path.isdir(targetdir):
1680 os.makedirs(targetdir)
1680 os.makedirs(targetdir)
1681 if samefile:
1681 if samefile:
1682 tmp = target + b"~hgrename"
1682 tmp = target + b"~hgrename"
1683 os.rename(src, tmp)
1683 os.rename(src, tmp)
1684 os.rename(tmp, target)
1684 os.rename(tmp, target)
1685 else:
1685 else:
1686 # Preserve stat info on renames, not on copies; this matches
1686 # Preserve stat info on renames, not on copies; this matches
1687 # Linux CLI behavior.
1687 # Linux CLI behavior.
1688 util.copyfile(src, target, copystat=rename)
1688 util.copyfile(src, target, copystat=rename)
1689 srcexists = True
1689 srcexists = True
1690 except IOError as inst:
1690 except IOError as inst:
1691 if inst.errno == errno.ENOENT:
1691 if inst.errno == errno.ENOENT:
1692 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1692 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1693 srcexists = False
1693 srcexists = False
1694 else:
1694 else:
1695 ui.warn(
1695 ui.warn(
1696 _(b'%s: cannot copy - %s\n')
1696 _(b'%s: cannot copy - %s\n')
1697 % (relsrc, encoding.strtolocal(inst.strerror))
1697 % (relsrc, encoding.strtolocal(inst.strerror))
1698 )
1698 )
1699 return True # report a failure
1699 return True # report a failure
1700
1700
1701 if ui.verbose or not exact:
1701 if ui.verbose or not exact:
1702 if rename:
1702 if rename:
1703 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1703 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1704 else:
1704 else:
1705 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1705 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1706
1706
1707 targets[abstarget] = abssrc
1707 targets[abstarget] = abssrc
1708
1708
1709 # fix up dirstate
1709 # fix up dirstate
1710 scmutil.dirstatecopy(
1710 scmutil.dirstatecopy(
1711 ui, repo, ctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1711 ui, repo, ctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1712 )
1712 )
1713 if rename and not dryrun:
1713 if rename and not dryrun:
1714 if not after and srcexists and not samefile:
1714 if not after and srcexists and not samefile:
1715 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1715 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1716 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1716 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1717 ctx.forget([abssrc])
1717 ctx.forget([abssrc])
1718
1718
1719 # pat: ossep
1719 # pat: ossep
1720 # dest ossep
1720 # dest ossep
1721 # srcs: list of (hgsep, hgsep, ossep, bool)
1721 # srcs: list of (hgsep, hgsep, ossep, bool)
1722 # return: function that takes hgsep and returns ossep
1722 # return: function that takes hgsep and returns ossep
1723 def targetpathfn(pat, dest, srcs):
1723 def targetpathfn(pat, dest, srcs):
1724 if os.path.isdir(pat):
1724 if os.path.isdir(pat):
1725 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1725 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1726 abspfx = util.localpath(abspfx)
1726 abspfx = util.localpath(abspfx)
1727 if destdirexists:
1727 if destdirexists:
1728 striplen = len(os.path.split(abspfx)[0])
1728 striplen = len(os.path.split(abspfx)[0])
1729 else:
1729 else:
1730 striplen = len(abspfx)
1730 striplen = len(abspfx)
1731 if striplen:
1731 if striplen:
1732 striplen += len(pycompat.ossep)
1732 striplen += len(pycompat.ossep)
1733 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1733 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1734 elif destdirexists:
1734 elif destdirexists:
1735 res = lambda p: os.path.join(
1735 res = lambda p: os.path.join(
1736 dest, os.path.basename(util.localpath(p))
1736 dest, os.path.basename(util.localpath(p))
1737 )
1737 )
1738 else:
1738 else:
1739 res = lambda p: dest
1739 res = lambda p: dest
1740 return res
1740 return res
1741
1741
1742 # pat: ossep
1742 # pat: ossep
1743 # dest ossep
1743 # dest ossep
1744 # srcs: list of (hgsep, hgsep, ossep, bool)
1744 # srcs: list of (hgsep, hgsep, ossep, bool)
1745 # return: function that takes hgsep and returns ossep
1745 # return: function that takes hgsep and returns ossep
1746 def targetpathafterfn(pat, dest, srcs):
1746 def targetpathafterfn(pat, dest, srcs):
1747 if matchmod.patkind(pat):
1747 if matchmod.patkind(pat):
1748 # a mercurial pattern
1748 # a mercurial pattern
1749 res = lambda p: os.path.join(
1749 res = lambda p: os.path.join(
1750 dest, os.path.basename(util.localpath(p))
1750 dest, os.path.basename(util.localpath(p))
1751 )
1751 )
1752 else:
1752 else:
1753 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1753 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1754 if len(abspfx) < len(srcs[0][0]):
1754 if len(abspfx) < len(srcs[0][0]):
1755 # A directory. Either the target path contains the last
1755 # A directory. Either the target path contains the last
1756 # component of the source path or it does not.
1756 # component of the source path or it does not.
1757 def evalpath(striplen):
1757 def evalpath(striplen):
1758 score = 0
1758 score = 0
1759 for s in srcs:
1759 for s in srcs:
1760 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1760 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1761 if os.path.lexists(t):
1761 if os.path.lexists(t):
1762 score += 1
1762 score += 1
1763 return score
1763 return score
1764
1764
1765 abspfx = util.localpath(abspfx)
1765 abspfx = util.localpath(abspfx)
1766 striplen = len(abspfx)
1766 striplen = len(abspfx)
1767 if striplen:
1767 if striplen:
1768 striplen += len(pycompat.ossep)
1768 striplen += len(pycompat.ossep)
1769 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1769 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1770 score = evalpath(striplen)
1770 score = evalpath(striplen)
1771 striplen1 = len(os.path.split(abspfx)[0])
1771 striplen1 = len(os.path.split(abspfx)[0])
1772 if striplen1:
1772 if striplen1:
1773 striplen1 += len(pycompat.ossep)
1773 striplen1 += len(pycompat.ossep)
1774 if evalpath(striplen1) > score:
1774 if evalpath(striplen1) > score:
1775 striplen = striplen1
1775 striplen = striplen1
1776 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1776 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1777 else:
1777 else:
1778 # a file
1778 # a file
1779 if destdirexists:
1779 if destdirexists:
1780 res = lambda p: os.path.join(
1780 res = lambda p: os.path.join(
1781 dest, os.path.basename(util.localpath(p))
1781 dest, os.path.basename(util.localpath(p))
1782 )
1782 )
1783 else:
1783 else:
1784 res = lambda p: dest
1784 res = lambda p: dest
1785 return res
1785 return res
1786
1786
1787 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1787 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1788 if not destdirexists:
1788 if not destdirexists:
1789 if len(pats) > 1 or matchmod.patkind(pats[0]):
1789 if len(pats) > 1 or matchmod.patkind(pats[0]):
1790 raise error.Abort(
1790 raise error.Abort(
1791 _(
1791 _(
1792 b'with multiple sources, destination must be an '
1792 b'with multiple sources, destination must be an '
1793 b'existing directory'
1793 b'existing directory'
1794 )
1794 )
1795 )
1795 )
1796 if util.endswithsep(dest):
1796 if util.endswithsep(dest):
1797 raise error.Abort(_(b'destination %s is not a directory') % dest)
1797 raise error.Abort(_(b'destination %s is not a directory') % dest)
1798
1798
1799 tfn = targetpathfn
1799 tfn = targetpathfn
1800 if after:
1800 if after:
1801 tfn = targetpathafterfn
1801 tfn = targetpathafterfn
1802 copylist = []
1802 copylist = []
1803 for pat in pats:
1803 for pat in pats:
1804 srcs = walkpat(pat)
1804 srcs = walkpat(pat)
1805 if not srcs:
1805 if not srcs:
1806 continue
1806 continue
1807 copylist.append((tfn(pat, dest, srcs), srcs))
1807 copylist.append((tfn(pat, dest, srcs), srcs))
1808 if not copylist:
1808 if not copylist:
1809 raise error.Abort(_(b'no files to copy'))
1809 raise error.Abort(_(b'no files to copy'))
1810
1810
1811 errors = 0
1811 errors = 0
1812 for targetpath, srcs in copylist:
1812 for targetpath, srcs in copylist:
1813 for abssrc, relsrc, exact in srcs:
1813 for abssrc, relsrc, exact in srcs:
1814 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1814 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1815 errors += 1
1815 errors += 1
1816
1816
1817 return errors != 0
1817 return errors != 0
1818
1818
1819
1819
1820 ## facility to let extension process additional data into an import patch
1820 ## facility to let extension process additional data into an import patch
1821 # list of identifier to be executed in order
1821 # list of identifier to be executed in order
1822 extrapreimport = [] # run before commit
1822 extrapreimport = [] # run before commit
1823 extrapostimport = [] # run after commit
1823 extrapostimport = [] # run after commit
1824 # mapping from identifier to actual import function
1824 # mapping from identifier to actual import function
1825 #
1825 #
1826 # 'preimport' are run before the commit is made and are provided the following
1826 # 'preimport' are run before the commit is made and are provided the following
1827 # arguments:
1827 # arguments:
1828 # - repo: the localrepository instance,
1828 # - repo: the localrepository instance,
1829 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1829 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1830 # - extra: the future extra dictionary of the changeset, please mutate it,
1830 # - extra: the future extra dictionary of the changeset, please mutate it,
1831 # - opts: the import options.
1831 # - opts: the import options.
1832 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1832 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1833 # mutation of in memory commit and more. Feel free to rework the code to get
1833 # mutation of in memory commit and more. Feel free to rework the code to get
1834 # there.
1834 # there.
1835 extrapreimportmap = {}
1835 extrapreimportmap = {}
1836 # 'postimport' are run after the commit is made and are provided the following
1836 # 'postimport' are run after the commit is made and are provided the following
1837 # argument:
1837 # argument:
1838 # - ctx: the changectx created by import.
1838 # - ctx: the changectx created by import.
1839 extrapostimportmap = {}
1839 extrapostimportmap = {}
1840
1840
1841
1841
1842 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1842 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1843 """Utility function used by commands.import to import a single patch
1843 """Utility function used by commands.import to import a single patch
1844
1844
1845 This function is explicitly defined here to help the evolve extension to
1845 This function is explicitly defined here to help the evolve extension to
1846 wrap this part of the import logic.
1846 wrap this part of the import logic.
1847
1847
1848 The API is currently a bit ugly because it a simple code translation from
1848 The API is currently a bit ugly because it a simple code translation from
1849 the import command. Feel free to make it better.
1849 the import command. Feel free to make it better.
1850
1850
1851 :patchdata: a dictionary containing parsed patch data (such as from
1851 :patchdata: a dictionary containing parsed patch data (such as from
1852 ``patch.extract()``)
1852 ``patch.extract()``)
1853 :parents: nodes that will be parent of the created commit
1853 :parents: nodes that will be parent of the created commit
1854 :opts: the full dict of option passed to the import command
1854 :opts: the full dict of option passed to the import command
1855 :msgs: list to save commit message to.
1855 :msgs: list to save commit message to.
1856 (used in case we need to save it when failing)
1856 (used in case we need to save it when failing)
1857 :updatefunc: a function that update a repo to a given node
1857 :updatefunc: a function that update a repo to a given node
1858 updatefunc(<repo>, <node>)
1858 updatefunc(<repo>, <node>)
1859 """
1859 """
1860 # avoid cycle context -> subrepo -> cmdutil
1860 # avoid cycle context -> subrepo -> cmdutil
1861 from . import context
1861 from . import context
1862
1862
1863 tmpname = patchdata.get(b'filename')
1863 tmpname = patchdata.get(b'filename')
1864 message = patchdata.get(b'message')
1864 message = patchdata.get(b'message')
1865 user = opts.get(b'user') or patchdata.get(b'user')
1865 user = opts.get(b'user') or patchdata.get(b'user')
1866 date = opts.get(b'date') or patchdata.get(b'date')
1866 date = opts.get(b'date') or patchdata.get(b'date')
1867 branch = patchdata.get(b'branch')
1867 branch = patchdata.get(b'branch')
1868 nodeid = patchdata.get(b'nodeid')
1868 nodeid = patchdata.get(b'nodeid')
1869 p1 = patchdata.get(b'p1')
1869 p1 = patchdata.get(b'p1')
1870 p2 = patchdata.get(b'p2')
1870 p2 = patchdata.get(b'p2')
1871
1871
1872 nocommit = opts.get(b'no_commit')
1872 nocommit = opts.get(b'no_commit')
1873 importbranch = opts.get(b'import_branch')
1873 importbranch = opts.get(b'import_branch')
1874 update = not opts.get(b'bypass')
1874 update = not opts.get(b'bypass')
1875 strip = opts[b"strip"]
1875 strip = opts[b"strip"]
1876 prefix = opts[b"prefix"]
1876 prefix = opts[b"prefix"]
1877 sim = float(opts.get(b'similarity') or 0)
1877 sim = float(opts.get(b'similarity') or 0)
1878
1878
1879 if not tmpname:
1879 if not tmpname:
1880 return None, None, False
1880 return None, None, False
1881
1881
1882 rejects = False
1882 rejects = False
1883
1883
1884 cmdline_message = logmessage(ui, opts)
1884 cmdline_message = logmessage(ui, opts)
1885 if cmdline_message:
1885 if cmdline_message:
1886 # pickup the cmdline msg
1886 # pickup the cmdline msg
1887 message = cmdline_message
1887 message = cmdline_message
1888 elif message:
1888 elif message:
1889 # pickup the patch msg
1889 # pickup the patch msg
1890 message = message.strip()
1890 message = message.strip()
1891 else:
1891 else:
1892 # launch the editor
1892 # launch the editor
1893 message = None
1893 message = None
1894 ui.debug(b'message:\n%s\n' % (message or b''))
1894 ui.debug(b'message:\n%s\n' % (message or b''))
1895
1895
1896 if len(parents) == 1:
1896 if len(parents) == 1:
1897 parents.append(repo[nullid])
1897 parents.append(repo[nullid])
1898 if opts.get(b'exact'):
1898 if opts.get(b'exact'):
1899 if not nodeid or not p1:
1899 if not nodeid or not p1:
1900 raise error.Abort(_(b'not a Mercurial patch'))
1900 raise error.Abort(_(b'not a Mercurial patch'))
1901 p1 = repo[p1]
1901 p1 = repo[p1]
1902 p2 = repo[p2 or nullid]
1902 p2 = repo[p2 or nullid]
1903 elif p2:
1903 elif p2:
1904 try:
1904 try:
1905 p1 = repo[p1]
1905 p1 = repo[p1]
1906 p2 = repo[p2]
1906 p2 = repo[p2]
1907 # Without any options, consider p2 only if the
1907 # Without any options, consider p2 only if the
1908 # patch is being applied on top of the recorded
1908 # patch is being applied on top of the recorded
1909 # first parent.
1909 # first parent.
1910 if p1 != parents[0]:
1910 if p1 != parents[0]:
1911 p1 = parents[0]
1911 p1 = parents[0]
1912 p2 = repo[nullid]
1912 p2 = repo[nullid]
1913 except error.RepoError:
1913 except error.RepoError:
1914 p1, p2 = parents
1914 p1, p2 = parents
1915 if p2.node() == nullid:
1915 if p2.node() == nullid:
1916 ui.warn(
1916 ui.warn(
1917 _(
1917 _(
1918 b"warning: import the patch as a normal revision\n"
1918 b"warning: import the patch as a normal revision\n"
1919 b"(use --exact to import the patch as a merge)\n"
1919 b"(use --exact to import the patch as a merge)\n"
1920 )
1920 )
1921 )
1921 )
1922 else:
1922 else:
1923 p1, p2 = parents
1923 p1, p2 = parents
1924
1924
1925 n = None
1925 n = None
1926 if update:
1926 if update:
1927 if p1 != parents[0]:
1927 if p1 != parents[0]:
1928 updatefunc(repo, p1.node())
1928 updatefunc(repo, p1.node())
1929 if p2 != parents[1]:
1929 if p2 != parents[1]:
1930 repo.setparents(p1.node(), p2.node())
1930 repo.setparents(p1.node(), p2.node())
1931
1931
1932 if opts.get(b'exact') or importbranch:
1932 if opts.get(b'exact') or importbranch:
1933 repo.dirstate.setbranch(branch or b'default')
1933 repo.dirstate.setbranch(branch or b'default')
1934
1934
1935 partial = opts.get(b'partial', False)
1935 partial = opts.get(b'partial', False)
1936 files = set()
1936 files = set()
1937 try:
1937 try:
1938 patch.patch(
1938 patch.patch(
1939 ui,
1939 ui,
1940 repo,
1940 repo,
1941 tmpname,
1941 tmpname,
1942 strip=strip,
1942 strip=strip,
1943 prefix=prefix,
1943 prefix=prefix,
1944 files=files,
1944 files=files,
1945 eolmode=None,
1945 eolmode=None,
1946 similarity=sim / 100.0,
1946 similarity=sim / 100.0,
1947 )
1947 )
1948 except error.PatchError as e:
1948 except error.PatchError as e:
1949 if not partial:
1949 if not partial:
1950 raise error.Abort(pycompat.bytestr(e))
1950 raise error.Abort(pycompat.bytestr(e))
1951 if partial:
1951 if partial:
1952 rejects = True
1952 rejects = True
1953
1953
1954 files = list(files)
1954 files = list(files)
1955 if nocommit:
1955 if nocommit:
1956 if message:
1956 if message:
1957 msgs.append(message)
1957 msgs.append(message)
1958 else:
1958 else:
1959 if opts.get(b'exact') or p2:
1959 if opts.get(b'exact') or p2:
1960 # If you got here, you either use --force and know what
1960 # If you got here, you either use --force and know what
1961 # you are doing or used --exact or a merge patch while
1961 # you are doing or used --exact or a merge patch while
1962 # being updated to its first parent.
1962 # being updated to its first parent.
1963 m = None
1963 m = None
1964 else:
1964 else:
1965 m = scmutil.matchfiles(repo, files or [])
1965 m = scmutil.matchfiles(repo, files or [])
1966 editform = mergeeditform(repo[None], b'import.normal')
1966 editform = mergeeditform(repo[None], b'import.normal')
1967 if opts.get(b'exact'):
1967 if opts.get(b'exact'):
1968 editor = None
1968 editor = None
1969 else:
1969 else:
1970 editor = getcommiteditor(
1970 editor = getcommiteditor(
1971 editform=editform, **pycompat.strkwargs(opts)
1971 editform=editform, **pycompat.strkwargs(opts)
1972 )
1972 )
1973 extra = {}
1973 extra = {}
1974 for idfunc in extrapreimport:
1974 for idfunc in extrapreimport:
1975 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1975 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1976 overrides = {}
1976 overrides = {}
1977 if partial:
1977 if partial:
1978 overrides[(b'ui', b'allowemptycommit')] = True
1978 overrides[(b'ui', b'allowemptycommit')] = True
1979 if opts.get(b'secret'):
1979 if opts.get(b'secret'):
1980 overrides[(b'phases', b'new-commit')] = b'secret'
1980 overrides[(b'phases', b'new-commit')] = b'secret'
1981 with repo.ui.configoverride(overrides, b'import'):
1981 with repo.ui.configoverride(overrides, b'import'):
1982 n = repo.commit(
1982 n = repo.commit(
1983 message, user, date, match=m, editor=editor, extra=extra
1983 message, user, date, match=m, editor=editor, extra=extra
1984 )
1984 )
1985 for idfunc in extrapostimport:
1985 for idfunc in extrapostimport:
1986 extrapostimportmap[idfunc](repo[n])
1986 extrapostimportmap[idfunc](repo[n])
1987 else:
1987 else:
1988 if opts.get(b'exact') or importbranch:
1988 if opts.get(b'exact') or importbranch:
1989 branch = branch or b'default'
1989 branch = branch or b'default'
1990 else:
1990 else:
1991 branch = p1.branch()
1991 branch = p1.branch()
1992 store = patch.filestore()
1992 store = patch.filestore()
1993 try:
1993 try:
1994 files = set()
1994 files = set()
1995 try:
1995 try:
1996 patch.patchrepo(
1996 patch.patchrepo(
1997 ui,
1997 ui,
1998 repo,
1998 repo,
1999 p1,
1999 p1,
2000 store,
2000 store,
2001 tmpname,
2001 tmpname,
2002 strip,
2002 strip,
2003 prefix,
2003 prefix,
2004 files,
2004 files,
2005 eolmode=None,
2005 eolmode=None,
2006 )
2006 )
2007 except error.PatchError as e:
2007 except error.PatchError as e:
2008 raise error.Abort(stringutil.forcebytestr(e))
2008 raise error.Abort(stringutil.forcebytestr(e))
2009 if opts.get(b'exact'):
2009 if opts.get(b'exact'):
2010 editor = None
2010 editor = None
2011 else:
2011 else:
2012 editor = getcommiteditor(editform=b'import.bypass')
2012 editor = getcommiteditor(editform=b'import.bypass')
2013 memctx = context.memctx(
2013 memctx = context.memctx(
2014 repo,
2014 repo,
2015 (p1.node(), p2.node()),
2015 (p1.node(), p2.node()),
2016 message,
2016 message,
2017 files=files,
2017 files=files,
2018 filectxfn=store,
2018 filectxfn=store,
2019 user=user,
2019 user=user,
2020 date=date,
2020 date=date,
2021 branch=branch,
2021 branch=branch,
2022 editor=editor,
2022 editor=editor,
2023 )
2023 )
2024
2024
2025 overrides = {}
2025 overrides = {}
2026 if opts.get(b'secret'):
2026 if opts.get(b'secret'):
2027 overrides[(b'phases', b'new-commit')] = b'secret'
2027 overrides[(b'phases', b'new-commit')] = b'secret'
2028 with repo.ui.configoverride(overrides, b'import'):
2028 with repo.ui.configoverride(overrides, b'import'):
2029 n = memctx.commit()
2029 n = memctx.commit()
2030 finally:
2030 finally:
2031 store.close()
2031 store.close()
2032 if opts.get(b'exact') and nocommit:
2032 if opts.get(b'exact') and nocommit:
2033 # --exact with --no-commit is still useful in that it does merge
2033 # --exact with --no-commit is still useful in that it does merge
2034 # and branch bits
2034 # and branch bits
2035 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
2035 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
2036 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
2036 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
2037 raise error.Abort(_(b'patch is damaged or loses information'))
2037 raise error.Abort(_(b'patch is damaged or loses information'))
2038 msg = _(b'applied to working directory')
2038 msg = _(b'applied to working directory')
2039 if n:
2039 if n:
2040 # i18n: refers to a short changeset id
2040 # i18n: refers to a short changeset id
2041 msg = _(b'created %s') % short(n)
2041 msg = _(b'created %s') % short(n)
2042 return msg, n, rejects
2042 return msg, n, rejects
2043
2043
2044
2044
2045 # facility to let extensions include additional data in an exported patch
2045 # facility to let extensions include additional data in an exported patch
2046 # list of identifiers to be executed in order
2046 # list of identifiers to be executed in order
2047 extraexport = []
2047 extraexport = []
2048 # mapping from identifier to actual export function
2048 # mapping from identifier to actual export function
2049 # function as to return a string to be added to the header or None
2049 # function as to return a string to be added to the header or None
2050 # it is given two arguments (sequencenumber, changectx)
2050 # it is given two arguments (sequencenumber, changectx)
2051 extraexportmap = {}
2051 extraexportmap = {}
2052
2052
2053
2053
2054 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
2054 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
2055 node = scmutil.binnode(ctx)
2055 node = scmutil.binnode(ctx)
2056 parents = [p.node() for p in ctx.parents() if p]
2056 parents = [p.node() for p in ctx.parents() if p]
2057 branch = ctx.branch()
2057 branch = ctx.branch()
2058 if switch_parent:
2058 if switch_parent:
2059 parents.reverse()
2059 parents.reverse()
2060
2060
2061 if parents:
2061 if parents:
2062 prev = parents[0]
2062 prev = parents[0]
2063 else:
2063 else:
2064 prev = nullid
2064 prev = nullid
2065
2065
2066 fm.context(ctx=ctx)
2066 fm.context(ctx=ctx)
2067 fm.plain(b'# HG changeset patch\n')
2067 fm.plain(b'# HG changeset patch\n')
2068 fm.write(b'user', b'# User %s\n', ctx.user())
2068 fm.write(b'user', b'# User %s\n', ctx.user())
2069 fm.plain(b'# Date %d %d\n' % ctx.date())
2069 fm.plain(b'# Date %d %d\n' % ctx.date())
2070 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
2070 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
2071 fm.condwrite(
2071 fm.condwrite(
2072 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
2072 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
2073 )
2073 )
2074 fm.write(b'node', b'# Node ID %s\n', hex(node))
2074 fm.write(b'node', b'# Node ID %s\n', hex(node))
2075 fm.plain(b'# Parent %s\n' % hex(prev))
2075 fm.plain(b'# Parent %s\n' % hex(prev))
2076 if len(parents) > 1:
2076 if len(parents) > 1:
2077 fm.plain(b'# Parent %s\n' % hex(parents[1]))
2077 fm.plain(b'# Parent %s\n' % hex(parents[1]))
2078 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
2078 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
2079
2079
2080 # TODO: redesign extraexportmap function to support formatter
2080 # TODO: redesign extraexportmap function to support formatter
2081 for headerid in extraexport:
2081 for headerid in extraexport:
2082 header = extraexportmap[headerid](seqno, ctx)
2082 header = extraexportmap[headerid](seqno, ctx)
2083 if header is not None:
2083 if header is not None:
2084 fm.plain(b'# %s\n' % header)
2084 fm.plain(b'# %s\n' % header)
2085
2085
2086 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
2086 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
2087 fm.plain(b'\n')
2087 fm.plain(b'\n')
2088
2088
2089 if fm.isplain():
2089 if fm.isplain():
2090 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
2090 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
2091 for chunk, label in chunkiter:
2091 for chunk, label in chunkiter:
2092 fm.plain(chunk, label=label)
2092 fm.plain(chunk, label=label)
2093 else:
2093 else:
2094 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
2094 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
2095 # TODO: make it structured?
2095 # TODO: make it structured?
2096 fm.data(diff=b''.join(chunkiter))
2096 fm.data(diff=b''.join(chunkiter))
2097
2097
2098
2098
2099 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
2099 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
2100 """Export changesets to stdout or a single file"""
2100 """Export changesets to stdout or a single file"""
2101 for seqno, rev in enumerate(revs, 1):
2101 for seqno, rev in enumerate(revs, 1):
2102 ctx = repo[rev]
2102 ctx = repo[rev]
2103 if not dest.startswith(b'<'):
2103 if not dest.startswith(b'<'):
2104 repo.ui.note(b"%s\n" % dest)
2104 repo.ui.note(b"%s\n" % dest)
2105 fm.startitem()
2105 fm.startitem()
2106 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
2106 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
2107
2107
2108
2108
2109 def _exportfntemplate(
2109 def _exportfntemplate(
2110 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
2110 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
2111 ):
2111 ):
2112 """Export changesets to possibly multiple files"""
2112 """Export changesets to possibly multiple files"""
2113 total = len(revs)
2113 total = len(revs)
2114 revwidth = max(len(str(rev)) for rev in revs)
2114 revwidth = max(len(str(rev)) for rev in revs)
2115 filemap = util.sortdict() # filename: [(seqno, rev), ...]
2115 filemap = util.sortdict() # filename: [(seqno, rev), ...]
2116
2116
2117 for seqno, rev in enumerate(revs, 1):
2117 for seqno, rev in enumerate(revs, 1):
2118 ctx = repo[rev]
2118 ctx = repo[rev]
2119 dest = makefilename(
2119 dest = makefilename(
2120 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
2120 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
2121 )
2121 )
2122 filemap.setdefault(dest, []).append((seqno, rev))
2122 filemap.setdefault(dest, []).append((seqno, rev))
2123
2123
2124 for dest in filemap:
2124 for dest in filemap:
2125 with formatter.maybereopen(basefm, dest) as fm:
2125 with formatter.maybereopen(basefm, dest) as fm:
2126 repo.ui.note(b"%s\n" % dest)
2126 repo.ui.note(b"%s\n" % dest)
2127 for seqno, rev in filemap[dest]:
2127 for seqno, rev in filemap[dest]:
2128 fm.startitem()
2128 fm.startitem()
2129 ctx = repo[rev]
2129 ctx = repo[rev]
2130 _exportsingle(
2130 _exportsingle(
2131 repo, ctx, fm, match, switch_parent, seqno, diffopts
2131 repo, ctx, fm, match, switch_parent, seqno, diffopts
2132 )
2132 )
2133
2133
2134
2134
2135 def _prefetchchangedfiles(repo, revs, match):
2135 def _prefetchchangedfiles(repo, revs, match):
2136 allfiles = set()
2136 allfiles = set()
2137 for rev in revs:
2137 for rev in revs:
2138 for file in repo[rev].files():
2138 for file in repo[rev].files():
2139 if not match or match(file):
2139 if not match or match(file):
2140 allfiles.add(file)
2140 allfiles.add(file)
2141 match = scmutil.matchfiles(repo, allfiles)
2141 match = scmutil.matchfiles(repo, allfiles)
2142 revmatches = [(rev, match) for rev in revs]
2142 revmatches = [(rev, match) for rev in revs]
2143 scmutil.prefetchfiles(repo, revmatches)
2143 scmutil.prefetchfiles(repo, revmatches)
2144
2144
2145
2145
2146 def export(
2146 def export(
2147 repo,
2147 repo,
2148 revs,
2148 revs,
2149 basefm,
2149 basefm,
2150 fntemplate=b'hg-%h.patch',
2150 fntemplate=b'hg-%h.patch',
2151 switch_parent=False,
2151 switch_parent=False,
2152 opts=None,
2152 opts=None,
2153 match=None,
2153 match=None,
2154 ):
2154 ):
2155 '''export changesets as hg patches
2155 '''export changesets as hg patches
2156
2156
2157 Args:
2157 Args:
2158 repo: The repository from which we're exporting revisions.
2158 repo: The repository from which we're exporting revisions.
2159 revs: A list of revisions to export as revision numbers.
2159 revs: A list of revisions to export as revision numbers.
2160 basefm: A formatter to which patches should be written.
2160 basefm: A formatter to which patches should be written.
2161 fntemplate: An optional string to use for generating patch file names.
2161 fntemplate: An optional string to use for generating patch file names.
2162 switch_parent: If True, show diffs against second parent when not nullid.
2162 switch_parent: If True, show diffs against second parent when not nullid.
2163 Default is false, which always shows diff against p1.
2163 Default is false, which always shows diff against p1.
2164 opts: diff options to use for generating the patch.
2164 opts: diff options to use for generating the patch.
2165 match: If specified, only export changes to files matching this matcher.
2165 match: If specified, only export changes to files matching this matcher.
2166
2166
2167 Returns:
2167 Returns:
2168 Nothing.
2168 Nothing.
2169
2169
2170 Side Effect:
2170 Side Effect:
2171 "HG Changeset Patch" data is emitted to one of the following
2171 "HG Changeset Patch" data is emitted to one of the following
2172 destinations:
2172 destinations:
2173 fntemplate specified: Each rev is written to a unique file named using
2173 fntemplate specified: Each rev is written to a unique file named using
2174 the given template.
2174 the given template.
2175 Otherwise: All revs will be written to basefm.
2175 Otherwise: All revs will be written to basefm.
2176 '''
2176 '''
2177 _prefetchchangedfiles(repo, revs, match)
2177 _prefetchchangedfiles(repo, revs, match)
2178
2178
2179 if not fntemplate:
2179 if not fntemplate:
2180 _exportfile(
2180 _exportfile(
2181 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2181 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2182 )
2182 )
2183 else:
2183 else:
2184 _exportfntemplate(
2184 _exportfntemplate(
2185 repo, revs, basefm, fntemplate, switch_parent, opts, match
2185 repo, revs, basefm, fntemplate, switch_parent, opts, match
2186 )
2186 )
2187
2187
2188
2188
2189 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2189 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2190 """Export changesets to the given file stream"""
2190 """Export changesets to the given file stream"""
2191 _prefetchchangedfiles(repo, revs, match)
2191 _prefetchchangedfiles(repo, revs, match)
2192
2192
2193 dest = getattr(fp, 'name', b'<unnamed>')
2193 dest = getattr(fp, 'name', b'<unnamed>')
2194 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2194 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2195 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2195 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2196
2196
2197
2197
2198 def showmarker(fm, marker, index=None):
2198 def showmarker(fm, marker, index=None):
2199 """utility function to display obsolescence marker in a readable way
2199 """utility function to display obsolescence marker in a readable way
2200
2200
2201 To be used by debug function."""
2201 To be used by debug function."""
2202 if index is not None:
2202 if index is not None:
2203 fm.write(b'index', b'%i ', index)
2203 fm.write(b'index', b'%i ', index)
2204 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2204 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2205 succs = marker.succnodes()
2205 succs = marker.succnodes()
2206 fm.condwrite(
2206 fm.condwrite(
2207 succs,
2207 succs,
2208 b'succnodes',
2208 b'succnodes',
2209 b'%s ',
2209 b'%s ',
2210 fm.formatlist(map(hex, succs), name=b'node'),
2210 fm.formatlist(map(hex, succs), name=b'node'),
2211 )
2211 )
2212 fm.write(b'flag', b'%X ', marker.flags())
2212 fm.write(b'flag', b'%X ', marker.flags())
2213 parents = marker.parentnodes()
2213 parents = marker.parentnodes()
2214 if parents is not None:
2214 if parents is not None:
2215 fm.write(
2215 fm.write(
2216 b'parentnodes',
2216 b'parentnodes',
2217 b'{%s} ',
2217 b'{%s} ',
2218 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2218 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2219 )
2219 )
2220 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2220 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2221 meta = marker.metadata().copy()
2221 meta = marker.metadata().copy()
2222 meta.pop(b'date', None)
2222 meta.pop(b'date', None)
2223 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2223 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2224 fm.write(
2224 fm.write(
2225 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2225 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2226 )
2226 )
2227 fm.plain(b'\n')
2227 fm.plain(b'\n')
2228
2228
2229
2229
2230 def finddate(ui, repo, date):
2230 def finddate(ui, repo, date):
2231 """Find the tipmost changeset that matches the given date spec"""
2231 """Find the tipmost changeset that matches the given date spec"""
2232
2232
2233 df = dateutil.matchdate(date)
2233 df = dateutil.matchdate(date)
2234 m = scmutil.matchall(repo)
2234 m = scmutil.matchall(repo)
2235 results = {}
2235 results = {}
2236
2236
2237 def prep(ctx, fns):
2237 def prep(ctx, fns):
2238 d = ctx.date()
2238 d = ctx.date()
2239 if df(d[0]):
2239 if df(d[0]):
2240 results[ctx.rev()] = d
2240 results[ctx.rev()] = d
2241
2241
2242 for ctx in walkchangerevs(repo, m, {b'rev': None}, prep):
2242 for ctx in walkchangerevs(repo, m, {b'rev': None}, prep):
2243 rev = ctx.rev()
2243 rev = ctx.rev()
2244 if rev in results:
2244 if rev in results:
2245 ui.status(
2245 ui.status(
2246 _(b"found revision %d from %s\n")
2246 _(b"found revision %d from %s\n")
2247 % (rev, dateutil.datestr(results[rev]))
2247 % (rev, dateutil.datestr(results[rev]))
2248 )
2248 )
2249 return b'%d' % rev
2249 return b'%d' % rev
2250
2250
2251 raise error.Abort(_(b"revision matching date not found"))
2251 raise error.Abort(_(b"revision matching date not found"))
2252
2252
2253
2253
2254 def increasingwindows(windowsize=8, sizelimit=512):
2254 def increasingwindows(windowsize=8, sizelimit=512):
2255 while True:
2255 while True:
2256 yield windowsize
2256 yield windowsize
2257 if windowsize < sizelimit:
2257 if windowsize < sizelimit:
2258 windowsize *= 2
2258 windowsize *= 2
2259
2259
2260
2260
2261 def _walkrevs(repo, opts):
2261 def _walkrevs(repo, opts):
2262 # Default --rev value depends on --follow but --follow behavior
2262 # Default --rev value depends on --follow but --follow behavior
2263 # depends on revisions resolved from --rev...
2263 # depends on revisions resolved from --rev...
2264 follow = opts.get(b'follow') or opts.get(b'follow_first')
2264 follow = opts.get(b'follow') or opts.get(b'follow_first')
2265 if opts.get(b'rev'):
2265 if opts.get(b'rev'):
2266 revs = scmutil.revrange(repo, opts[b'rev'])
2266 revs = scmutil.revrange(repo, opts[b'rev'])
2267 elif follow and repo.dirstate.p1() == nullid:
2267 elif follow and repo.dirstate.p1() == nullid:
2268 revs = smartset.baseset()
2268 revs = smartset.baseset()
2269 elif follow:
2269 elif follow:
2270 revs = repo.revs(b'reverse(:.)')
2270 revs = repo.revs(b'reverse(:.)')
2271 else:
2271 else:
2272 revs = smartset.spanset(repo)
2272 revs = smartset.spanset(repo)
2273 revs.reverse()
2273 revs.reverse()
2274 return revs
2274 return revs
2275
2275
2276
2276
2277 class FileWalkError(Exception):
2277 class FileWalkError(Exception):
2278 pass
2278 pass
2279
2279
2280
2280
2281 def walkfilerevs(repo, match, follow, revs, fncache):
2281 def walkfilerevs(repo, match, follow, revs, fncache):
2282 '''Walks the file history for the matched files.
2282 '''Walks the file history for the matched files.
2283
2283
2284 Returns the changeset revs that are involved in the file history.
2284 Returns the changeset revs that are involved in the file history.
2285
2285
2286 Throws FileWalkError if the file history can't be walked using
2286 Throws FileWalkError if the file history can't be walked using
2287 filelogs alone.
2287 filelogs alone.
2288 '''
2288 '''
2289 wanted = set()
2289 wanted = set()
2290 copies = []
2290 copies = []
2291 minrev, maxrev = min(revs), max(revs)
2291 minrev, maxrev = min(revs), max(revs)
2292
2292
2293 def filerevs(filelog, last):
2293 def filerevs(filelog, last):
2294 """
2294 """
2295 Only files, no patterns. Check the history of each file.
2295 Only files, no patterns. Check the history of each file.
2296
2296
2297 Examines filelog entries within minrev, maxrev linkrev range
2297 Examines filelog entries within minrev, maxrev linkrev range
2298 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2298 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2299 tuples in backwards order
2299 tuples in backwards order
2300 """
2300 """
2301 cl_count = len(repo)
2301 cl_count = len(repo)
2302 revs = []
2302 revs = []
2303 for j in pycompat.xrange(0, last + 1):
2303 for j in pycompat.xrange(0, last + 1):
2304 linkrev = filelog.linkrev(j)
2304 linkrev = filelog.linkrev(j)
2305 if linkrev < minrev:
2305 if linkrev < minrev:
2306 continue
2306 continue
2307 # only yield rev for which we have the changelog, it can
2307 # only yield rev for which we have the changelog, it can
2308 # happen while doing "hg log" during a pull or commit
2308 # happen while doing "hg log" during a pull or commit
2309 if linkrev >= cl_count:
2309 if linkrev >= cl_count:
2310 break
2310 break
2311
2311
2312 parentlinkrevs = []
2312 parentlinkrevs = []
2313 for p in filelog.parentrevs(j):
2313 for p in filelog.parentrevs(j):
2314 if p != nullrev:
2314 if p != nullrev:
2315 parentlinkrevs.append(filelog.linkrev(p))
2315 parentlinkrevs.append(filelog.linkrev(p))
2316 n = filelog.node(j)
2316 n = filelog.node(j)
2317 revs.append(
2317 revs.append(
2318 (linkrev, parentlinkrevs, follow and filelog.renamed(n))
2318 (linkrev, parentlinkrevs, follow and filelog.renamed(n))
2319 )
2319 )
2320
2320
2321 return reversed(revs)
2321 return reversed(revs)
2322
2322
2323 def iterfiles():
2323 def iterfiles():
2324 pctx = repo[b'.']
2324 pctx = repo[b'.']
2325 for filename in match.files():
2325 for filename in match.files():
2326 if follow:
2326 if follow:
2327 if filename not in pctx:
2327 if filename not in pctx:
2328 raise error.Abort(
2328 raise error.Abort(
2329 _(
2329 _(
2330 b'cannot follow file not in parent '
2330 b'cannot follow file not in parent '
2331 b'revision: "%s"'
2331 b'revision: "%s"'
2332 )
2332 )
2333 % filename
2333 % filename
2334 )
2334 )
2335 yield filename, pctx[filename].filenode()
2335 yield filename, pctx[filename].filenode()
2336 else:
2336 else:
2337 yield filename, None
2337 yield filename, None
2338 for filename_node in copies:
2338 for filename_node in copies:
2339 yield filename_node
2339 yield filename_node
2340
2340
2341 for file_, node in iterfiles():
2341 for file_, node in iterfiles():
2342 filelog = repo.file(file_)
2342 filelog = repo.file(file_)
2343 if not len(filelog):
2343 if not len(filelog):
2344 if node is None:
2344 if node is None:
2345 # A zero count may be a directory or deleted file, so
2345 # A zero count may be a directory or deleted file, so
2346 # try to find matching entries on the slow path.
2346 # try to find matching entries on the slow path.
2347 if follow:
2347 if follow:
2348 raise error.Abort(
2348 raise error.Abort(
2349 _(b'cannot follow nonexistent file: "%s"') % file_
2349 _(b'cannot follow nonexistent file: "%s"') % file_
2350 )
2350 )
2351 raise FileWalkError(b"Cannot walk via filelog")
2351 raise FileWalkError(b"Cannot walk via filelog")
2352 else:
2352 else:
2353 continue
2353 continue
2354
2354
2355 if node is None:
2355 if node is None:
2356 last = len(filelog) - 1
2356 last = len(filelog) - 1
2357 else:
2357 else:
2358 last = filelog.rev(node)
2358 last = filelog.rev(node)
2359
2359
2360 # keep track of all ancestors of the file
2360 # keep track of all ancestors of the file
2361 ancestors = {filelog.linkrev(last)}
2361 ancestors = {filelog.linkrev(last)}
2362
2362
2363 # iterate from latest to oldest revision
2363 # iterate from latest to oldest revision
2364 for rev, flparentlinkrevs, copied in filerevs(filelog, last):
2364 for rev, flparentlinkrevs, copied in filerevs(filelog, last):
2365 if not follow:
2365 if not follow:
2366 if rev > maxrev:
2366 if rev > maxrev:
2367 continue
2367 continue
2368 else:
2368 else:
2369 # Note that last might not be the first interesting
2369 # Note that last might not be the first interesting
2370 # rev to us:
2370 # rev to us:
2371 # if the file has been changed after maxrev, we'll
2371 # if the file has been changed after maxrev, we'll
2372 # have linkrev(last) > maxrev, and we still need
2372 # have linkrev(last) > maxrev, and we still need
2373 # to explore the file graph
2373 # to explore the file graph
2374 if rev not in ancestors:
2374 if rev not in ancestors:
2375 continue
2375 continue
2376 # XXX insert 1327 fix here
2376 # XXX insert 1327 fix here
2377 if flparentlinkrevs:
2377 if flparentlinkrevs:
2378 ancestors.update(flparentlinkrevs)
2378 ancestors.update(flparentlinkrevs)
2379
2379
2380 fncache.setdefault(rev, []).append(file_)
2380 fncache.setdefault(rev, []).append(file_)
2381 wanted.add(rev)
2381 wanted.add(rev)
2382 if copied:
2382 if copied:
2383 copies.append(copied)
2383 copies.append(copied)
2384
2384
2385 return wanted
2385 return wanted
2386
2386
2387
2387
2388 class _followfilter(object):
2388 class _followfilter(object):
2389 def __init__(self, repo, onlyfirst=False):
2389 def __init__(self, repo, onlyfirst=False):
2390 self.repo = repo
2390 self.repo = repo
2391 self.startrev = nullrev
2391 self.startrev = nullrev
2392 self.roots = set()
2392 self.roots = set()
2393 self.onlyfirst = onlyfirst
2393 self.onlyfirst = onlyfirst
2394
2394
2395 def match(self, rev):
2395 def match(self, rev):
2396 def realparents(rev):
2396 def realparents(rev):
2397 if self.onlyfirst:
2397 if self.onlyfirst:
2398 return self.repo.changelog.parentrevs(rev)[0:1]
2398 return self.repo.changelog.parentrevs(rev)[0:1]
2399 else:
2399 else:
2400 return filter(
2400 return filter(
2401 lambda x: x != nullrev, self.repo.changelog.parentrevs(rev)
2401 lambda x: x != nullrev, self.repo.changelog.parentrevs(rev)
2402 )
2402 )
2403
2403
2404 if self.startrev == nullrev:
2404 if self.startrev == nullrev:
2405 self.startrev = rev
2405 self.startrev = rev
2406 return True
2406 return True
2407
2407
2408 if rev > self.startrev:
2408 if rev > self.startrev:
2409 # forward: all descendants
2409 # forward: all descendants
2410 if not self.roots:
2410 if not self.roots:
2411 self.roots.add(self.startrev)
2411 self.roots.add(self.startrev)
2412 for parent in realparents(rev):
2412 for parent in realparents(rev):
2413 if parent in self.roots:
2413 if parent in self.roots:
2414 self.roots.add(rev)
2414 self.roots.add(rev)
2415 return True
2415 return True
2416 else:
2416 else:
2417 # backwards: all parents
2417 # backwards: all parents
2418 if not self.roots:
2418 if not self.roots:
2419 self.roots.update(realparents(self.startrev))
2419 self.roots.update(realparents(self.startrev))
2420 if rev in self.roots:
2420 if rev in self.roots:
2421 self.roots.remove(rev)
2421 self.roots.remove(rev)
2422 self.roots.update(realparents(rev))
2422 self.roots.update(realparents(rev))
2423 return True
2423 return True
2424
2424
2425 return False
2425 return False
2426
2426
2427
2427
2428 def walkchangerevs(repo, match, opts, prepare):
2428 def walkchangerevs(repo, match, opts, prepare):
2429 '''Iterate over files and the revs in which they changed.
2429 '''Iterate over files and the revs in which they changed.
2430
2430
2431 Callers most commonly need to iterate backwards over the history
2431 Callers most commonly need to iterate backwards over the history
2432 in which they are interested. Doing so has awful (quadratic-looking)
2432 in which they are interested. Doing so has awful (quadratic-looking)
2433 performance, so we use iterators in a "windowed" way.
2433 performance, so we use iterators in a "windowed" way.
2434
2434
2435 We walk a window of revisions in the desired order. Within the
2435 We walk a window of revisions in the desired order. Within the
2436 window, we first walk forwards to gather data, then in the desired
2436 window, we first walk forwards to gather data, then in the desired
2437 order (usually backwards) to display it.
2437 order (usually backwards) to display it.
2438
2438
2439 This function returns an iterator yielding contexts. Before
2439 This function returns an iterator yielding contexts. Before
2440 yielding each context, the iterator will first call the prepare
2440 yielding each context, the iterator will first call the prepare
2441 function on each context in the window in forward order.'''
2441 function on each context in the window in forward order.'''
2442
2442
2443 allfiles = opts.get(b'all_files')
2443 allfiles = opts.get(b'all_files')
2444 follow = opts.get(b'follow') or opts.get(b'follow_first')
2444 follow = opts.get(b'follow') or opts.get(b'follow_first')
2445 revs = _walkrevs(repo, opts)
2445 revs = _walkrevs(repo, opts)
2446 if not revs:
2446 if not revs:
2447 return []
2447 return []
2448 wanted = set()
2448 wanted = set()
2449 slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
2449 slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
2450 fncache = {}
2450 fncache = {}
2451 change = repo.__getitem__
2451 change = repo.__getitem__
2452
2452
2453 # First step is to fill wanted, the set of revisions that we want to yield.
2453 # First step is to fill wanted, the set of revisions that we want to yield.
2454 # When it does not induce extra cost, we also fill fncache for revisions in
2454 # When it does not induce extra cost, we also fill fncache for revisions in
2455 # wanted: a cache of filenames that were changed (ctx.files()) and that
2455 # wanted: a cache of filenames that were changed (ctx.files()) and that
2456 # match the file filtering conditions.
2456 # match the file filtering conditions.
2457
2457
2458 if match.always() or allfiles:
2458 if match.always() or allfiles:
2459 # No files, no patterns. Display all revs.
2459 # No files, no patterns. Display all revs.
2460 wanted = revs
2460 wanted = revs
2461 elif not slowpath:
2461 elif not slowpath:
2462 # We only have to read through the filelog to find wanted revisions
2462 # We only have to read through the filelog to find wanted revisions
2463
2463
2464 try:
2464 try:
2465 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2465 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2466 except FileWalkError:
2466 except FileWalkError:
2467 slowpath = True
2467 slowpath = True
2468
2468
2469 # We decided to fall back to the slowpath because at least one
2469 # We decided to fall back to the slowpath because at least one
2470 # of the paths was not a file. Check to see if at least one of them
2470 # of the paths was not a file. Check to see if at least one of them
2471 # existed in history, otherwise simply return
2471 # existed in history, otherwise simply return
2472 for path in match.files():
2472 for path in match.files():
2473 if path == b'.' or path in repo.store:
2473 if path == b'.' or path in repo.store:
2474 break
2474 break
2475 else:
2475 else:
2476 return []
2476 return []
2477
2477
2478 if slowpath:
2478 if slowpath:
2479 # We have to read the changelog to match filenames against
2479 # We have to read the changelog to match filenames against
2480 # changed files
2480 # changed files
2481
2481
2482 if follow:
2482 if follow:
2483 raise error.Abort(
2483 raise error.Abort(
2484 _(b'can only follow copies/renames for explicit filenames')
2484 _(b'can only follow copies/renames for explicit filenames')
2485 )
2485 )
2486
2486
2487 # The slow path checks files modified in every changeset.
2487 # The slow path checks files modified in every changeset.
2488 # This is really slow on large repos, so compute the set lazily.
2488 # This is really slow on large repos, so compute the set lazily.
2489 class lazywantedset(object):
2489 class lazywantedset(object):
2490 def __init__(self):
2490 def __init__(self):
2491 self.set = set()
2491 self.set = set()
2492 self.revs = set(revs)
2492 self.revs = set(revs)
2493
2493
2494 # No need to worry about locality here because it will be accessed
2494 # No need to worry about locality here because it will be accessed
2495 # in the same order as the increasing window below.
2495 # in the same order as the increasing window below.
2496 def __contains__(self, value):
2496 def __contains__(self, value):
2497 if value in self.set:
2497 if value in self.set:
2498 return True
2498 return True
2499 elif not value in self.revs:
2499 elif not value in self.revs:
2500 return False
2500 return False
2501 else:
2501 else:
2502 self.revs.discard(value)
2502 self.revs.discard(value)
2503 ctx = change(value)
2503 ctx = change(value)
2504 if allfiles:
2504 if allfiles:
2505 matches = list(ctx.manifest().walk(match))
2505 matches = list(ctx.manifest().walk(match))
2506 else:
2506 else:
2507 matches = [f for f in ctx.files() if match(f)]
2507 matches = [f for f in ctx.files() if match(f)]
2508 if matches:
2508 if matches:
2509 fncache[value] = matches
2509 fncache[value] = matches
2510 self.set.add(value)
2510 self.set.add(value)
2511 return True
2511 return True
2512 return False
2512 return False
2513
2513
2514 def discard(self, value):
2514 def discard(self, value):
2515 self.revs.discard(value)
2515 self.revs.discard(value)
2516 self.set.discard(value)
2516 self.set.discard(value)
2517
2517
2518 wanted = lazywantedset()
2518 wanted = lazywantedset()
2519
2519
2520 # it might be worthwhile to do this in the iterator if the rev range
2520 # it might be worthwhile to do this in the iterator if the rev range
2521 # is descending and the prune args are all within that range
2521 # is descending and the prune args are all within that range
2522 for rev in opts.get(b'prune', ()):
2522 for rev in opts.get(b'prune', ()):
2523 rev = repo[rev].rev()
2523 rev = repo[rev].rev()
2524 ff = _followfilter(repo)
2524 ff = _followfilter(repo)
2525 stop = min(revs[0], revs[-1])
2525 stop = min(revs[0], revs[-1])
2526 for x in pycompat.xrange(rev, stop - 1, -1):
2526 for x in pycompat.xrange(rev, stop - 1, -1):
2527 if ff.match(x):
2527 if ff.match(x):
2528 wanted = wanted - [x]
2528 wanted = wanted - [x]
2529
2529
2530 # Now that wanted is correctly initialized, we can iterate over the
2530 # Now that wanted is correctly initialized, we can iterate over the
2531 # revision range, yielding only revisions in wanted.
2531 # revision range, yielding only revisions in wanted.
2532 def iterate():
2532 def iterate():
2533 if follow and match.always():
2533 if follow and match.always():
2534 ff = _followfilter(repo, onlyfirst=opts.get(b'follow_first'))
2534 ff = _followfilter(repo, onlyfirst=opts.get(b'follow_first'))
2535
2535
2536 def want(rev):
2536 def want(rev):
2537 return ff.match(rev) and rev in wanted
2537 return ff.match(rev) and rev in wanted
2538
2538
2539 else:
2539 else:
2540
2540
2541 def want(rev):
2541 def want(rev):
2542 return rev in wanted
2542 return rev in wanted
2543
2543
2544 it = iter(revs)
2544 it = iter(revs)
2545 stopiteration = False
2545 stopiteration = False
2546 for windowsize in increasingwindows():
2546 for windowsize in increasingwindows():
2547 nrevs = []
2547 nrevs = []
2548 for i in pycompat.xrange(windowsize):
2548 for i in pycompat.xrange(windowsize):
2549 rev = next(it, None)
2549 rev = next(it, None)
2550 if rev is None:
2550 if rev is None:
2551 stopiteration = True
2551 stopiteration = True
2552 break
2552 break
2553 elif want(rev):
2553 elif want(rev):
2554 nrevs.append(rev)
2554 nrevs.append(rev)
2555 for rev in sorted(nrevs):
2555 for rev in sorted(nrevs):
2556 fns = fncache.get(rev)
2556 fns = fncache.get(rev)
2557 ctx = change(rev)
2557 ctx = change(rev)
2558 if not fns:
2558 if not fns:
2559
2559
2560 def fns_generator():
2560 def fns_generator():
2561 if allfiles:
2561 if allfiles:
2562
2562
2563 def bad(f, msg):
2563 def bad(f, msg):
2564 pass
2564 pass
2565
2565
2566 for f in ctx.matches(matchmod.badmatch(match, bad)):
2566 for f in ctx.matches(matchmod.badmatch(match, bad)):
2567 yield f
2567 yield f
2568 else:
2568 else:
2569 for f in ctx.files():
2569 for f in ctx.files():
2570 if match(f):
2570 if match(f):
2571 yield f
2571 yield f
2572
2572
2573 fns = fns_generator()
2573 fns = fns_generator()
2574 prepare(ctx, fns)
2574 prepare(ctx, fns)
2575 for rev in nrevs:
2575 for rev in nrevs:
2576 yield change(rev)
2576 yield change(rev)
2577
2577
2578 if stopiteration:
2578 if stopiteration:
2579 break
2579 break
2580
2580
2581 return iterate()
2581 return iterate()
2582
2582
2583
2583
2584 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2584 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2585 bad = []
2585 bad = []
2586
2586
2587 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2587 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2588 names = []
2588 names = []
2589 wctx = repo[None]
2589 wctx = repo[None]
2590 cca = None
2590 cca = None
2591 abort, warn = scmutil.checkportabilityalert(ui)
2591 abort, warn = scmutil.checkportabilityalert(ui)
2592 if abort or warn:
2592 if abort or warn:
2593 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2593 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2594
2594
2595 match = repo.narrowmatch(match, includeexact=True)
2595 match = repo.narrowmatch(match, includeexact=True)
2596 badmatch = matchmod.badmatch(match, badfn)
2596 badmatch = matchmod.badmatch(match, badfn)
2597 dirstate = repo.dirstate
2597 dirstate = repo.dirstate
2598 # We don't want to just call wctx.walk here, since it would return a lot of
2598 # We don't want to just call wctx.walk here, since it would return a lot of
2599 # clean files, which we aren't interested in and takes time.
2599 # clean files, which we aren't interested in and takes time.
2600 for f in sorted(
2600 for f in sorted(
2601 dirstate.walk(
2601 dirstate.walk(
2602 badmatch,
2602 badmatch,
2603 subrepos=sorted(wctx.substate),
2603 subrepos=sorted(wctx.substate),
2604 unknown=True,
2604 unknown=True,
2605 ignored=False,
2605 ignored=False,
2606 full=False,
2606 full=False,
2607 )
2607 )
2608 ):
2608 ):
2609 exact = match.exact(f)
2609 exact = match.exact(f)
2610 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2610 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2611 if cca:
2611 if cca:
2612 cca(f)
2612 cca(f)
2613 names.append(f)
2613 names.append(f)
2614 if ui.verbose or not exact:
2614 if ui.verbose or not exact:
2615 ui.status(
2615 ui.status(
2616 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2616 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2617 )
2617 )
2618
2618
2619 for subpath in sorted(wctx.substate):
2619 for subpath in sorted(wctx.substate):
2620 sub = wctx.sub(subpath)
2620 sub = wctx.sub(subpath)
2621 try:
2621 try:
2622 submatch = matchmod.subdirmatcher(subpath, match)
2622 submatch = matchmod.subdirmatcher(subpath, match)
2623 subprefix = repo.wvfs.reljoin(prefix, subpath)
2623 subprefix = repo.wvfs.reljoin(prefix, subpath)
2624 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2624 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2625 if opts.get('subrepos'):
2625 if opts.get('subrepos'):
2626 bad.extend(
2626 bad.extend(
2627 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2627 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2628 )
2628 )
2629 else:
2629 else:
2630 bad.extend(
2630 bad.extend(
2631 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2631 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2632 )
2632 )
2633 except error.LookupError:
2633 except error.LookupError:
2634 ui.status(
2634 ui.status(
2635 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2635 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2636 )
2636 )
2637
2637
2638 if not opts.get('dry_run'):
2638 if not opts.get('dry_run'):
2639 rejected = wctx.add(names, prefix)
2639 rejected = wctx.add(names, prefix)
2640 bad.extend(f for f in rejected if f in match.files())
2640 bad.extend(f for f in rejected if f in match.files())
2641 return bad
2641 return bad
2642
2642
2643
2643
2644 def addwebdirpath(repo, serverpath, webconf):
2644 def addwebdirpath(repo, serverpath, webconf):
2645 webconf[serverpath] = repo.root
2645 webconf[serverpath] = repo.root
2646 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2646 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2647
2647
2648 for r in repo.revs(b'filelog("path:.hgsub")'):
2648 for r in repo.revs(b'filelog("path:.hgsub")'):
2649 ctx = repo[r]
2649 ctx = repo[r]
2650 for subpath in ctx.substate:
2650 for subpath in ctx.substate:
2651 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2651 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2652
2652
2653
2653
2654 def forget(
2654 def forget(
2655 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2655 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2656 ):
2656 ):
2657 if dryrun and interactive:
2657 if dryrun and interactive:
2658 raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
2658 raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
2659 bad = []
2659 bad = []
2660 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2660 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2661 wctx = repo[None]
2661 wctx = repo[None]
2662 forgot = []
2662 forgot = []
2663
2663
2664 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2664 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2665 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2665 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2666 if explicitonly:
2666 if explicitonly:
2667 forget = [f for f in forget if match.exact(f)]
2667 forget = [f for f in forget if match.exact(f)]
2668
2668
2669 for subpath in sorted(wctx.substate):
2669 for subpath in sorted(wctx.substate):
2670 sub = wctx.sub(subpath)
2670 sub = wctx.sub(subpath)
2671 submatch = matchmod.subdirmatcher(subpath, match)
2671 submatch = matchmod.subdirmatcher(subpath, match)
2672 subprefix = repo.wvfs.reljoin(prefix, subpath)
2672 subprefix = repo.wvfs.reljoin(prefix, subpath)
2673 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2673 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2674 try:
2674 try:
2675 subbad, subforgot = sub.forget(
2675 subbad, subforgot = sub.forget(
2676 submatch,
2676 submatch,
2677 subprefix,
2677 subprefix,
2678 subuipathfn,
2678 subuipathfn,
2679 dryrun=dryrun,
2679 dryrun=dryrun,
2680 interactive=interactive,
2680 interactive=interactive,
2681 )
2681 )
2682 bad.extend([subpath + b'/' + f for f in subbad])
2682 bad.extend([subpath + b'/' + f for f in subbad])
2683 forgot.extend([subpath + b'/' + f for f in subforgot])
2683 forgot.extend([subpath + b'/' + f for f in subforgot])
2684 except error.LookupError:
2684 except error.LookupError:
2685 ui.status(
2685 ui.status(
2686 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2686 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2687 )
2687 )
2688
2688
2689 if not explicitonly:
2689 if not explicitonly:
2690 for f in match.files():
2690 for f in match.files():
2691 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2691 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2692 if f not in forgot:
2692 if f not in forgot:
2693 if repo.wvfs.exists(f):
2693 if repo.wvfs.exists(f):
2694 # Don't complain if the exact case match wasn't given.
2694 # Don't complain if the exact case match wasn't given.
2695 # But don't do this until after checking 'forgot', so
2695 # But don't do this until after checking 'forgot', so
2696 # that subrepo files aren't normalized, and this op is
2696 # that subrepo files aren't normalized, and this op is
2697 # purely from data cached by the status walk above.
2697 # purely from data cached by the status walk above.
2698 if repo.dirstate.normalize(f) in repo.dirstate:
2698 if repo.dirstate.normalize(f) in repo.dirstate:
2699 continue
2699 continue
2700 ui.warn(
2700 ui.warn(
2701 _(
2701 _(
2702 b'not removing %s: '
2702 b'not removing %s: '
2703 b'file is already untracked\n'
2703 b'file is already untracked\n'
2704 )
2704 )
2705 % uipathfn(f)
2705 % uipathfn(f)
2706 )
2706 )
2707 bad.append(f)
2707 bad.append(f)
2708
2708
2709 if interactive:
2709 if interactive:
2710 responses = _(
2710 responses = _(
2711 b'[Ynsa?]'
2711 b'[Ynsa?]'
2712 b'$$ &Yes, forget this file'
2712 b'$$ &Yes, forget this file'
2713 b'$$ &No, skip this file'
2713 b'$$ &No, skip this file'
2714 b'$$ &Skip remaining files'
2714 b'$$ &Skip remaining files'
2715 b'$$ Include &all remaining files'
2715 b'$$ Include &all remaining files'
2716 b'$$ &? (display help)'
2716 b'$$ &? (display help)'
2717 )
2717 )
2718 for filename in forget[:]:
2718 for filename in forget[:]:
2719 r = ui.promptchoice(
2719 r = ui.promptchoice(
2720 _(b'forget %s %s') % (uipathfn(filename), responses)
2720 _(b'forget %s %s') % (uipathfn(filename), responses)
2721 )
2721 )
2722 if r == 4: # ?
2722 if r == 4: # ?
2723 while r == 4:
2723 while r == 4:
2724 for c, t in ui.extractchoices(responses)[1]:
2724 for c, t in ui.extractchoices(responses)[1]:
2725 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2725 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2726 r = ui.promptchoice(
2726 r = ui.promptchoice(
2727 _(b'forget %s %s') % (uipathfn(filename), responses)
2727 _(b'forget %s %s') % (uipathfn(filename), responses)
2728 )
2728 )
2729 if r == 0: # yes
2729 if r == 0: # yes
2730 continue
2730 continue
2731 elif r == 1: # no
2731 elif r == 1: # no
2732 forget.remove(filename)
2732 forget.remove(filename)
2733 elif r == 2: # Skip
2733 elif r == 2: # Skip
2734 fnindex = forget.index(filename)
2734 fnindex = forget.index(filename)
2735 del forget[fnindex:]
2735 del forget[fnindex:]
2736 break
2736 break
2737 elif r == 3: # All
2737 elif r == 3: # All
2738 break
2738 break
2739
2739
2740 for f in forget:
2740 for f in forget:
2741 if ui.verbose or not match.exact(f) or interactive:
2741 if ui.verbose or not match.exact(f) or interactive:
2742 ui.status(
2742 ui.status(
2743 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2743 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2744 )
2744 )
2745
2745
2746 if not dryrun:
2746 if not dryrun:
2747 rejected = wctx.forget(forget, prefix)
2747 rejected = wctx.forget(forget, prefix)
2748 bad.extend(f for f in rejected if f in match.files())
2748 bad.extend(f for f in rejected if f in match.files())
2749 forgot.extend(f for f in forget if f not in rejected)
2749 forgot.extend(f for f in forget if f not in rejected)
2750 return bad, forgot
2750 return bad, forgot
2751
2751
2752
2752
2753 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2753 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2754 ret = 1
2754 ret = 1
2755
2755
2756 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2756 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2757 if fm.isplain() and not needsfctx:
2757 if fm.isplain() and not needsfctx:
2758 # Fast path. The speed-up comes from skipping the formatter, and batching
2758 # Fast path. The speed-up comes from skipping the formatter, and batching
2759 # calls to ui.write.
2759 # calls to ui.write.
2760 buf = []
2760 buf = []
2761 for f in ctx.matches(m):
2761 for f in ctx.matches(m):
2762 buf.append(fmt % uipathfn(f))
2762 buf.append(fmt % uipathfn(f))
2763 if len(buf) > 100:
2763 if len(buf) > 100:
2764 ui.write(b''.join(buf))
2764 ui.write(b''.join(buf))
2765 del buf[:]
2765 del buf[:]
2766 ret = 0
2766 ret = 0
2767 if buf:
2767 if buf:
2768 ui.write(b''.join(buf))
2768 ui.write(b''.join(buf))
2769 else:
2769 else:
2770 for f in ctx.matches(m):
2770 for f in ctx.matches(m):
2771 fm.startitem()
2771 fm.startitem()
2772 fm.context(ctx=ctx)
2772 fm.context(ctx=ctx)
2773 if needsfctx:
2773 if needsfctx:
2774 fc = ctx[f]
2774 fc = ctx[f]
2775 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2775 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2776 fm.data(path=f)
2776 fm.data(path=f)
2777 fm.plain(fmt % uipathfn(f))
2777 fm.plain(fmt % uipathfn(f))
2778 ret = 0
2778 ret = 0
2779
2779
2780 for subpath in sorted(ctx.substate):
2780 for subpath in sorted(ctx.substate):
2781 submatch = matchmod.subdirmatcher(subpath, m)
2781 submatch = matchmod.subdirmatcher(subpath, m)
2782 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2782 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2783 if subrepos or m.exact(subpath) or any(submatch.files()):
2783 if subrepos or m.exact(subpath) or any(submatch.files()):
2784 sub = ctx.sub(subpath)
2784 sub = ctx.sub(subpath)
2785 try:
2785 try:
2786 recurse = m.exact(subpath) or subrepos
2786 recurse = m.exact(subpath) or subrepos
2787 if (
2787 if (
2788 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2788 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2789 == 0
2789 == 0
2790 ):
2790 ):
2791 ret = 0
2791 ret = 0
2792 except error.LookupError:
2792 except error.LookupError:
2793 ui.status(
2793 ui.status(
2794 _(b"skipping missing subrepository: %s\n")
2794 _(b"skipping missing subrepository: %s\n")
2795 % uipathfn(subpath)
2795 % uipathfn(subpath)
2796 )
2796 )
2797
2797
2798 return ret
2798 return ret
2799
2799
2800
2800
2801 def remove(
2801 def remove(
2802 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2802 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2803 ):
2803 ):
2804 ret = 0
2804 ret = 0
2805 s = repo.status(match=m, clean=True)
2805 s = repo.status(match=m, clean=True)
2806 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2806 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2807
2807
2808 wctx = repo[None]
2808 wctx = repo[None]
2809
2809
2810 if warnings is None:
2810 if warnings is None:
2811 warnings = []
2811 warnings = []
2812 warn = True
2812 warn = True
2813 else:
2813 else:
2814 warn = False
2814 warn = False
2815
2815
2816 subs = sorted(wctx.substate)
2816 subs = sorted(wctx.substate)
2817 progress = ui.makeprogress(
2817 progress = ui.makeprogress(
2818 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2818 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2819 )
2819 )
2820 for subpath in subs:
2820 for subpath in subs:
2821 submatch = matchmod.subdirmatcher(subpath, m)
2821 submatch = matchmod.subdirmatcher(subpath, m)
2822 subprefix = repo.wvfs.reljoin(prefix, subpath)
2822 subprefix = repo.wvfs.reljoin(prefix, subpath)
2823 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2823 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2824 if subrepos or m.exact(subpath) or any(submatch.files()):
2824 if subrepos or m.exact(subpath) or any(submatch.files()):
2825 progress.increment()
2825 progress.increment()
2826 sub = wctx.sub(subpath)
2826 sub = wctx.sub(subpath)
2827 try:
2827 try:
2828 if sub.removefiles(
2828 if sub.removefiles(
2829 submatch,
2829 submatch,
2830 subprefix,
2830 subprefix,
2831 subuipathfn,
2831 subuipathfn,
2832 after,
2832 after,
2833 force,
2833 force,
2834 subrepos,
2834 subrepos,
2835 dryrun,
2835 dryrun,
2836 warnings,
2836 warnings,
2837 ):
2837 ):
2838 ret = 1
2838 ret = 1
2839 except error.LookupError:
2839 except error.LookupError:
2840 warnings.append(
2840 warnings.append(
2841 _(b"skipping missing subrepository: %s\n")
2841 _(b"skipping missing subrepository: %s\n")
2842 % uipathfn(subpath)
2842 % uipathfn(subpath)
2843 )
2843 )
2844 progress.complete()
2844 progress.complete()
2845
2845
2846 # warn about failure to delete explicit files/dirs
2846 # warn about failure to delete explicit files/dirs
2847 deleteddirs = pathutil.dirs(deleted)
2847 deleteddirs = pathutil.dirs(deleted)
2848 files = m.files()
2848 files = m.files()
2849 progress = ui.makeprogress(
2849 progress = ui.makeprogress(
2850 _(b'deleting'), total=len(files), unit=_(b'files')
2850 _(b'deleting'), total=len(files), unit=_(b'files')
2851 )
2851 )
2852 for f in files:
2852 for f in files:
2853
2853
2854 def insubrepo():
2854 def insubrepo():
2855 for subpath in wctx.substate:
2855 for subpath in wctx.substate:
2856 if f.startswith(subpath + b'/'):
2856 if f.startswith(subpath + b'/'):
2857 return True
2857 return True
2858 return False
2858 return False
2859
2859
2860 progress.increment()
2860 progress.increment()
2861 isdir = f in deleteddirs or wctx.hasdir(f)
2861 isdir = f in deleteddirs or wctx.hasdir(f)
2862 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2862 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2863 continue
2863 continue
2864
2864
2865 if repo.wvfs.exists(f):
2865 if repo.wvfs.exists(f):
2866 if repo.wvfs.isdir(f):
2866 if repo.wvfs.isdir(f):
2867 warnings.append(
2867 warnings.append(
2868 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2868 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2869 )
2869 )
2870 else:
2870 else:
2871 warnings.append(
2871 warnings.append(
2872 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2872 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2873 )
2873 )
2874 # missing files will generate a warning elsewhere
2874 # missing files will generate a warning elsewhere
2875 ret = 1
2875 ret = 1
2876 progress.complete()
2876 progress.complete()
2877
2877
2878 if force:
2878 if force:
2879 list = modified + deleted + clean + added
2879 list = modified + deleted + clean + added
2880 elif after:
2880 elif after:
2881 list = deleted
2881 list = deleted
2882 remaining = modified + added + clean
2882 remaining = modified + added + clean
2883 progress = ui.makeprogress(
2883 progress = ui.makeprogress(
2884 _(b'skipping'), total=len(remaining), unit=_(b'files')
2884 _(b'skipping'), total=len(remaining), unit=_(b'files')
2885 )
2885 )
2886 for f in remaining:
2886 for f in remaining:
2887 progress.increment()
2887 progress.increment()
2888 if ui.verbose or (f in files):
2888 if ui.verbose or (f in files):
2889 warnings.append(
2889 warnings.append(
2890 _(b'not removing %s: file still exists\n') % uipathfn(f)
2890 _(b'not removing %s: file still exists\n') % uipathfn(f)
2891 )
2891 )
2892 ret = 1
2892 ret = 1
2893 progress.complete()
2893 progress.complete()
2894 else:
2894 else:
2895 list = deleted + clean
2895 list = deleted + clean
2896 progress = ui.makeprogress(
2896 progress = ui.makeprogress(
2897 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2897 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2898 )
2898 )
2899 for f in modified:
2899 for f in modified:
2900 progress.increment()
2900 progress.increment()
2901 warnings.append(
2901 warnings.append(
2902 _(
2902 _(
2903 b'not removing %s: file is modified (use -f'
2903 b'not removing %s: file is modified (use -f'
2904 b' to force removal)\n'
2904 b' to force removal)\n'
2905 )
2905 )
2906 % uipathfn(f)
2906 % uipathfn(f)
2907 )
2907 )
2908 ret = 1
2908 ret = 1
2909 for f in added:
2909 for f in added:
2910 progress.increment()
2910 progress.increment()
2911 warnings.append(
2911 warnings.append(
2912 _(
2912 _(
2913 b"not removing %s: file has been marked for add"
2913 b"not removing %s: file has been marked for add"
2914 b" (use 'hg forget' to undo add)\n"
2914 b" (use 'hg forget' to undo add)\n"
2915 )
2915 )
2916 % uipathfn(f)
2916 % uipathfn(f)
2917 )
2917 )
2918 ret = 1
2918 ret = 1
2919 progress.complete()
2919 progress.complete()
2920
2920
2921 list = sorted(list)
2921 list = sorted(list)
2922 progress = ui.makeprogress(
2922 progress = ui.makeprogress(
2923 _(b'deleting'), total=len(list), unit=_(b'files')
2923 _(b'deleting'), total=len(list), unit=_(b'files')
2924 )
2924 )
2925 for f in list:
2925 for f in list:
2926 if ui.verbose or not m.exact(f):
2926 if ui.verbose or not m.exact(f):
2927 progress.increment()
2927 progress.increment()
2928 ui.status(
2928 ui.status(
2929 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2929 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2930 )
2930 )
2931 progress.complete()
2931 progress.complete()
2932
2932
2933 if not dryrun:
2933 if not dryrun:
2934 with repo.wlock():
2934 with repo.wlock():
2935 if not after:
2935 if not after:
2936 for f in list:
2936 for f in list:
2937 if f in added:
2937 if f in added:
2938 continue # we never unlink added files on remove
2938 continue # we never unlink added files on remove
2939 rmdir = repo.ui.configbool(
2939 rmdir = repo.ui.configbool(
2940 b'experimental', b'removeemptydirs'
2940 b'experimental', b'removeemptydirs'
2941 )
2941 )
2942 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2942 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2943 repo[None].forget(list)
2943 repo[None].forget(list)
2944
2944
2945 if warn:
2945 if warn:
2946 for warning in warnings:
2946 for warning in warnings:
2947 ui.warn(warning)
2947 ui.warn(warning)
2948
2948
2949 return ret
2949 return ret
2950
2950
2951
2951
2952 def _catfmtneedsdata(fm):
2952 def _catfmtneedsdata(fm):
2953 return not fm.datahint() or b'data' in fm.datahint()
2953 return not fm.datahint() or b'data' in fm.datahint()
2954
2954
2955
2955
2956 def _updatecatformatter(fm, ctx, matcher, path, decode):
2956 def _updatecatformatter(fm, ctx, matcher, path, decode):
2957 """Hook for adding data to the formatter used by ``hg cat``.
2957 """Hook for adding data to the formatter used by ``hg cat``.
2958
2958
2959 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2959 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2960 this method first."""
2960 this method first."""
2961
2961
2962 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2962 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2963 # wasn't requested.
2963 # wasn't requested.
2964 data = b''
2964 data = b''
2965 if _catfmtneedsdata(fm):
2965 if _catfmtneedsdata(fm):
2966 data = ctx[path].data()
2966 data = ctx[path].data()
2967 if decode:
2967 if decode:
2968 data = ctx.repo().wwritedata(path, data)
2968 data = ctx.repo().wwritedata(path, data)
2969 fm.startitem()
2969 fm.startitem()
2970 fm.context(ctx=ctx)
2970 fm.context(ctx=ctx)
2971 fm.write(b'data', b'%s', data)
2971 fm.write(b'data', b'%s', data)
2972 fm.data(path=path)
2972 fm.data(path=path)
2973
2973
2974
2974
2975 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2975 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2976 err = 1
2976 err = 1
2977 opts = pycompat.byteskwargs(opts)
2977 opts = pycompat.byteskwargs(opts)
2978
2978
2979 def write(path):
2979 def write(path):
2980 filename = None
2980 filename = None
2981 if fntemplate:
2981 if fntemplate:
2982 filename = makefilename(
2982 filename = makefilename(
2983 ctx, fntemplate, pathname=os.path.join(prefix, path)
2983 ctx, fntemplate, pathname=os.path.join(prefix, path)
2984 )
2984 )
2985 # attempt to create the directory if it does not already exist
2985 # attempt to create the directory if it does not already exist
2986 try:
2986 try:
2987 os.makedirs(os.path.dirname(filename))
2987 os.makedirs(os.path.dirname(filename))
2988 except OSError:
2988 except OSError:
2989 pass
2989 pass
2990 with formatter.maybereopen(basefm, filename) as fm:
2990 with formatter.maybereopen(basefm, filename) as fm:
2991 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2991 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2992
2992
2993 # Automation often uses hg cat on single files, so special case it
2993 # Automation often uses hg cat on single files, so special case it
2994 # for performance to avoid the cost of parsing the manifest.
2994 # for performance to avoid the cost of parsing the manifest.
2995 if len(matcher.files()) == 1 and not matcher.anypats():
2995 if len(matcher.files()) == 1 and not matcher.anypats():
2996 file = matcher.files()[0]
2996 file = matcher.files()[0]
2997 mfl = repo.manifestlog
2997 mfl = repo.manifestlog
2998 mfnode = ctx.manifestnode()
2998 mfnode = ctx.manifestnode()
2999 try:
2999 try:
3000 if mfnode and mfl[mfnode].find(file)[0]:
3000 if mfnode and mfl[mfnode].find(file)[0]:
3001 if _catfmtneedsdata(basefm):
3001 if _catfmtneedsdata(basefm):
3002 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
3002 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
3003 write(file)
3003 write(file)
3004 return 0
3004 return 0
3005 except KeyError:
3005 except KeyError:
3006 pass
3006 pass
3007
3007
3008 if _catfmtneedsdata(basefm):
3008 if _catfmtneedsdata(basefm):
3009 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
3009 scmutil.prefetchfiles(repo, [(ctx.rev(), matcher)])
3010
3010
3011 for abs in ctx.walk(matcher):
3011 for abs in ctx.walk(matcher):
3012 write(abs)
3012 write(abs)
3013 err = 0
3013 err = 0
3014
3014
3015 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3015 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3016 for subpath in sorted(ctx.substate):
3016 for subpath in sorted(ctx.substate):
3017 sub = ctx.sub(subpath)
3017 sub = ctx.sub(subpath)
3018 try:
3018 try:
3019 submatch = matchmod.subdirmatcher(subpath, matcher)
3019 submatch = matchmod.subdirmatcher(subpath, matcher)
3020 subprefix = os.path.join(prefix, subpath)
3020 subprefix = os.path.join(prefix, subpath)
3021 if not sub.cat(
3021 if not sub.cat(
3022 submatch,
3022 submatch,
3023 basefm,
3023 basefm,
3024 fntemplate,
3024 fntemplate,
3025 subprefix,
3025 subprefix,
3026 **pycompat.strkwargs(opts)
3026 **pycompat.strkwargs(opts)
3027 ):
3027 ):
3028 err = 0
3028 err = 0
3029 except error.RepoLookupError:
3029 except error.RepoLookupError:
3030 ui.status(
3030 ui.status(
3031 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
3031 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
3032 )
3032 )
3033
3033
3034 return err
3034 return err
3035
3035
3036
3036
3037 def commit(ui, repo, commitfunc, pats, opts):
3037 def commit(ui, repo, commitfunc, pats, opts):
3038 '''commit the specified files or all outstanding changes'''
3038 '''commit the specified files or all outstanding changes'''
3039 date = opts.get(b'date')
3039 date = opts.get(b'date')
3040 if date:
3040 if date:
3041 opts[b'date'] = dateutil.parsedate(date)
3041 opts[b'date'] = dateutil.parsedate(date)
3042 message = logmessage(ui, opts)
3042 message = logmessage(ui, opts)
3043 matcher = scmutil.match(repo[None], pats, opts)
3043 matcher = scmutil.match(repo[None], pats, opts)
3044
3044
3045 dsguard = None
3045 dsguard = None
3046 # extract addremove carefully -- this function can be called from a command
3046 # extract addremove carefully -- this function can be called from a command
3047 # that doesn't support addremove
3047 # that doesn't support addremove
3048 if opts.get(b'addremove'):
3048 if opts.get(b'addremove'):
3049 dsguard = dirstateguard.dirstateguard(repo, b'commit')
3049 dsguard = dirstateguard.dirstateguard(repo, b'commit')
3050 with dsguard or util.nullcontextmanager():
3050 with dsguard or util.nullcontextmanager():
3051 if dsguard:
3051 if dsguard:
3052 relative = scmutil.anypats(pats, opts)
3052 relative = scmutil.anypats(pats, opts)
3053 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
3053 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
3054 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
3054 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
3055 raise error.Abort(
3055 raise error.Abort(
3056 _(b"failed to mark all new/missing files as added/removed")
3056 _(b"failed to mark all new/missing files as added/removed")
3057 )
3057 )
3058
3058
3059 return commitfunc(ui, repo, message, matcher, opts)
3059 return commitfunc(ui, repo, message, matcher, opts)
3060
3060
3061
3061
3062 def samefile(f, ctx1, ctx2):
3062 def samefile(f, ctx1, ctx2):
3063 if f in ctx1.manifest():
3063 if f in ctx1.manifest():
3064 a = ctx1.filectx(f)
3064 a = ctx1.filectx(f)
3065 if f in ctx2.manifest():
3065 if f in ctx2.manifest():
3066 b = ctx2.filectx(f)
3066 b = ctx2.filectx(f)
3067 return not a.cmp(b) and a.flags() == b.flags()
3067 return not a.cmp(b) and a.flags() == b.flags()
3068 else:
3068 else:
3069 return False
3069 return False
3070 else:
3070 else:
3071 return f not in ctx2.manifest()
3071 return f not in ctx2.manifest()
3072
3072
3073
3073
3074 def amend(ui, repo, old, extra, pats, opts):
3074 def amend(ui, repo, old, extra, pats, opts):
3075 # avoid cycle context -> subrepo -> cmdutil
3075 # avoid cycle context -> subrepo -> cmdutil
3076 from . import context
3076 from . import context
3077
3077
3078 # amend will reuse the existing user if not specified, but the obsolete
3078 # amend will reuse the existing user if not specified, but the obsolete
3079 # marker creation requires that the current user's name is specified.
3079 # marker creation requires that the current user's name is specified.
3080 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3080 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3081 ui.username() # raise exception if username not set
3081 ui.username() # raise exception if username not set
3082
3082
3083 ui.note(_(b'amending changeset %s\n') % old)
3083 ui.note(_(b'amending changeset %s\n') % old)
3084 base = old.p1()
3084 base = old.p1()
3085
3085
3086 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
3086 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
3087 # Participating changesets:
3087 # Participating changesets:
3088 #
3088 #
3089 # wctx o - workingctx that contains changes from working copy
3089 # wctx o - workingctx that contains changes from working copy
3090 # | to go into amending commit
3090 # | to go into amending commit
3091 # |
3091 # |
3092 # old o - changeset to amend
3092 # old o - changeset to amend
3093 # |
3093 # |
3094 # base o - first parent of the changeset to amend
3094 # base o - first parent of the changeset to amend
3095 wctx = repo[None]
3095 wctx = repo[None]
3096
3096
3097 # Copy to avoid mutating input
3097 # Copy to avoid mutating input
3098 extra = extra.copy()
3098 extra = extra.copy()
3099 # Update extra dict from amended commit (e.g. to preserve graft
3099 # Update extra dict from amended commit (e.g. to preserve graft
3100 # source)
3100 # source)
3101 extra.update(old.extra())
3101 extra.update(old.extra())
3102
3102
3103 # Also update it from the from the wctx
3103 # Also update it from the from the wctx
3104 extra.update(wctx.extra())
3104 extra.update(wctx.extra())
3105
3105
3106 # date-only change should be ignored?
3106 # date-only change should be ignored?
3107 datemaydiffer = resolvecommitoptions(ui, opts)
3107 datemaydiffer = resolvecommitoptions(ui, opts)
3108
3108
3109 date = old.date()
3109 date = old.date()
3110 if opts.get(b'date'):
3110 if opts.get(b'date'):
3111 date = dateutil.parsedate(opts.get(b'date'))
3111 date = dateutil.parsedate(opts.get(b'date'))
3112 user = opts.get(b'user') or old.user()
3112 user = opts.get(b'user') or old.user()
3113
3113
3114 if len(old.parents()) > 1:
3114 if len(old.parents()) > 1:
3115 # ctx.files() isn't reliable for merges, so fall back to the
3115 # ctx.files() isn't reliable for merges, so fall back to the
3116 # slower repo.status() method
3116 # slower repo.status() method
3117 st = base.status(old)
3117 st = base.status(old)
3118 files = set(st.modified) | set(st.added) | set(st.removed)
3118 files = set(st.modified) | set(st.added) | set(st.removed)
3119 else:
3119 else:
3120 files = set(old.files())
3120 files = set(old.files())
3121
3121
3122 # add/remove the files to the working copy if the "addremove" option
3122 # add/remove the files to the working copy if the "addremove" option
3123 # was specified.
3123 # was specified.
3124 matcher = scmutil.match(wctx, pats, opts)
3124 matcher = scmutil.match(wctx, pats, opts)
3125 relative = scmutil.anypats(pats, opts)
3125 relative = scmutil.anypats(pats, opts)
3126 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
3126 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
3127 if opts.get(b'addremove') and scmutil.addremove(
3127 if opts.get(b'addremove') and scmutil.addremove(
3128 repo, matcher, b"", uipathfn, opts
3128 repo, matcher, b"", uipathfn, opts
3129 ):
3129 ):
3130 raise error.Abort(
3130 raise error.Abort(
3131 _(b"failed to mark all new/missing files as added/removed")
3131 _(b"failed to mark all new/missing files as added/removed")
3132 )
3132 )
3133
3133
3134 # Check subrepos. This depends on in-place wctx._status update in
3134 # Check subrepos. This depends on in-place wctx._status update in
3135 # subrepo.precommit(). To minimize the risk of this hack, we do
3135 # subrepo.precommit(). To minimize the risk of this hack, we do
3136 # nothing if .hgsub does not exist.
3136 # nothing if .hgsub does not exist.
3137 if b'.hgsub' in wctx or b'.hgsub' in old:
3137 if b'.hgsub' in wctx or b'.hgsub' in old:
3138 subs, commitsubs, newsubstate = subrepoutil.precommit(
3138 subs, commitsubs, newsubstate = subrepoutil.precommit(
3139 ui, wctx, wctx._status, matcher
3139 ui, wctx, wctx._status, matcher
3140 )
3140 )
3141 # amend should abort if commitsubrepos is enabled
3141 # amend should abort if commitsubrepos is enabled
3142 assert not commitsubs
3142 assert not commitsubs
3143 if subs:
3143 if subs:
3144 subrepoutil.writestate(repo, newsubstate)
3144 subrepoutil.writestate(repo, newsubstate)
3145
3145
3146 ms = mergestatemod.mergestate.read(repo)
3146 ms = mergestatemod.mergestate.read(repo)
3147 mergeutil.checkunresolved(ms)
3147 mergeutil.checkunresolved(ms)
3148
3148
3149 filestoamend = {f for f in wctx.files() if matcher(f)}
3149 filestoamend = {f for f in wctx.files() if matcher(f)}
3150
3150
3151 changes = len(filestoamend) > 0
3151 changes = len(filestoamend) > 0
3152 if changes:
3152 if changes:
3153 # Recompute copies (avoid recording a -> b -> a)
3153 # Recompute copies (avoid recording a -> b -> a)
3154 copied = copies.pathcopies(base, wctx, matcher)
3154 copied = copies.pathcopies(base, wctx, matcher)
3155 if old.p2:
3155 if old.p2:
3156 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3156 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3157
3157
3158 # Prune files which were reverted by the updates: if old
3158 # Prune files which were reverted by the updates: if old
3159 # introduced file X and the file was renamed in the working
3159 # introduced file X and the file was renamed in the working
3160 # copy, then those two files are the same and
3160 # copy, then those two files are the same and
3161 # we can discard X from our list of files. Likewise if X
3161 # we can discard X from our list of files. Likewise if X
3162 # was removed, it's no longer relevant. If X is missing (aka
3162 # was removed, it's no longer relevant. If X is missing (aka
3163 # deleted), old X must be preserved.
3163 # deleted), old X must be preserved.
3164 files.update(filestoamend)
3164 files.update(filestoamend)
3165 files = [
3165 files = [
3166 f
3166 f
3167 for f in files
3167 for f in files
3168 if (f not in filestoamend or not samefile(f, wctx, base))
3168 if (f not in filestoamend or not samefile(f, wctx, base))
3169 ]
3169 ]
3170
3170
3171 def filectxfn(repo, ctx_, path):
3171 def filectxfn(repo, ctx_, path):
3172 try:
3172 try:
3173 # If the file being considered is not amongst the files
3173 # If the file being considered is not amongst the files
3174 # to be amended, we should return the file context from the
3174 # to be amended, we should return the file context from the
3175 # old changeset. This avoids issues when only some files in
3175 # old changeset. This avoids issues when only some files in
3176 # the working copy are being amended but there are also
3176 # the working copy are being amended but there are also
3177 # changes to other files from the old changeset.
3177 # changes to other files from the old changeset.
3178 if path not in filestoamend:
3178 if path not in filestoamend:
3179 return old.filectx(path)
3179 return old.filectx(path)
3180
3180
3181 # Return None for removed files.
3181 # Return None for removed files.
3182 if path in wctx.removed():
3182 if path in wctx.removed():
3183 return None
3183 return None
3184
3184
3185 fctx = wctx[path]
3185 fctx = wctx[path]
3186 flags = fctx.flags()
3186 flags = fctx.flags()
3187 mctx = context.memfilectx(
3187 mctx = context.memfilectx(
3188 repo,
3188 repo,
3189 ctx_,
3189 ctx_,
3190 fctx.path(),
3190 fctx.path(),
3191 fctx.data(),
3191 fctx.data(),
3192 islink=b'l' in flags,
3192 islink=b'l' in flags,
3193 isexec=b'x' in flags,
3193 isexec=b'x' in flags,
3194 copysource=copied.get(path),
3194 copysource=copied.get(path),
3195 )
3195 )
3196 return mctx
3196 return mctx
3197 except KeyError:
3197 except KeyError:
3198 return None
3198 return None
3199
3199
3200 else:
3200 else:
3201 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
3201 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
3202
3202
3203 # Use version of files as in the old cset
3203 # Use version of files as in the old cset
3204 def filectxfn(repo, ctx_, path):
3204 def filectxfn(repo, ctx_, path):
3205 try:
3205 try:
3206 return old.filectx(path)
3206 return old.filectx(path)
3207 except KeyError:
3207 except KeyError:
3208 return None
3208 return None
3209
3209
3210 # See if we got a message from -m or -l, if not, open the editor with
3210 # See if we got a message from -m or -l, if not, open the editor with
3211 # the message of the changeset to amend.
3211 # the message of the changeset to amend.
3212 message = logmessage(ui, opts)
3212 message = logmessage(ui, opts)
3213
3213
3214 editform = mergeeditform(old, b'commit.amend')
3214 editform = mergeeditform(old, b'commit.amend')
3215
3215
3216 if not message:
3216 if not message:
3217 message = old.description()
3217 message = old.description()
3218 # Default if message isn't provided and --edit is not passed is to
3218 # Default if message isn't provided and --edit is not passed is to
3219 # invoke editor, but allow --no-edit. If somehow we don't have any
3219 # invoke editor, but allow --no-edit. If somehow we don't have any
3220 # description, let's always start the editor.
3220 # description, let's always start the editor.
3221 doedit = not message or opts.get(b'edit') in [True, None]
3221 doedit = not message or opts.get(b'edit') in [True, None]
3222 else:
3222 else:
3223 # Default if message is provided is to not invoke editor, but allow
3223 # Default if message is provided is to not invoke editor, but allow
3224 # --edit.
3224 # --edit.
3225 doedit = opts.get(b'edit') is True
3225 doedit = opts.get(b'edit') is True
3226 editor = getcommiteditor(edit=doedit, editform=editform)
3226 editor = getcommiteditor(edit=doedit, editform=editform)
3227
3227
3228 pureextra = extra.copy()
3228 pureextra = extra.copy()
3229 extra[b'amend_source'] = old.hex()
3229 extra[b'amend_source'] = old.hex()
3230
3230
3231 new = context.memctx(
3231 new = context.memctx(
3232 repo,
3232 repo,
3233 parents=[base.node(), old.p2().node()],
3233 parents=[base.node(), old.p2().node()],
3234 text=message,
3234 text=message,
3235 files=files,
3235 files=files,
3236 filectxfn=filectxfn,
3236 filectxfn=filectxfn,
3237 user=user,
3237 user=user,
3238 date=date,
3238 date=date,
3239 extra=extra,
3239 extra=extra,
3240 editor=editor,
3240 editor=editor,
3241 )
3241 )
3242
3242
3243 newdesc = changelog.stripdesc(new.description())
3243 newdesc = changelog.stripdesc(new.description())
3244 if (
3244 if (
3245 (not changes)
3245 (not changes)
3246 and newdesc == old.description()
3246 and newdesc == old.description()
3247 and user == old.user()
3247 and user == old.user()
3248 and (date == old.date() or datemaydiffer)
3248 and (date == old.date() or datemaydiffer)
3249 and pureextra == old.extra()
3249 and pureextra == old.extra()
3250 ):
3250 ):
3251 # nothing changed. continuing here would create a new node
3251 # nothing changed. continuing here would create a new node
3252 # anyway because of the amend_source noise.
3252 # anyway because of the amend_source noise.
3253 #
3253 #
3254 # This not what we expect from amend.
3254 # This not what we expect from amend.
3255 return old.node()
3255 return old.node()
3256
3256
3257 commitphase = None
3257 commitphase = None
3258 if opts.get(b'secret'):
3258 if opts.get(b'secret'):
3259 commitphase = phases.secret
3259 commitphase = phases.secret
3260 newid = repo.commitctx(new)
3260 newid = repo.commitctx(new)
3261
3261
3262 # Reroute the working copy parent to the new changeset
3262 # Reroute the working copy parent to the new changeset
3263 repo.setparents(newid, nullid)
3263 repo.setparents(newid, nullid)
3264 mapping = {old.node(): (newid,)}
3264 mapping = {old.node(): (newid,)}
3265 obsmetadata = None
3265 obsmetadata = None
3266 if opts.get(b'note'):
3266 if opts.get(b'note'):
3267 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
3267 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
3268 backup = ui.configbool(b'rewrite', b'backup-bundle')
3268 backup = ui.configbool(b'rewrite', b'backup-bundle')
3269 scmutil.cleanupnodes(
3269 scmutil.cleanupnodes(
3270 repo,
3270 repo,
3271 mapping,
3271 mapping,
3272 b'amend',
3272 b'amend',
3273 metadata=obsmetadata,
3273 metadata=obsmetadata,
3274 fixphase=True,
3274 fixphase=True,
3275 targetphase=commitphase,
3275 targetphase=commitphase,
3276 backup=backup,
3276 backup=backup,
3277 )
3277 )
3278
3278
3279 # Fixing the dirstate because localrepo.commitctx does not update
3279 # Fixing the dirstate because localrepo.commitctx does not update
3280 # it. This is rather convenient because we did not need to update
3280 # it. This is rather convenient because we did not need to update
3281 # the dirstate for all the files in the new commit which commitctx
3281 # the dirstate for all the files in the new commit which commitctx
3282 # could have done if it updated the dirstate. Now, we can
3282 # could have done if it updated the dirstate. Now, we can
3283 # selectively update the dirstate only for the amended files.
3283 # selectively update the dirstate only for the amended files.
3284 dirstate = repo.dirstate
3284 dirstate = repo.dirstate
3285
3285
3286 # Update the state of the files which were added and modified in the
3286 # Update the state of the files which were added and modified in the
3287 # amend to "normal" in the dirstate. We need to use "normallookup" since
3287 # amend to "normal" in the dirstate. We need to use "normallookup" since
3288 # the files may have changed since the command started; using "normal"
3288 # the files may have changed since the command started; using "normal"
3289 # would mark them as clean but with uncommitted contents.
3289 # would mark them as clean but with uncommitted contents.
3290 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3290 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3291 for f in normalfiles:
3291 for f in normalfiles:
3292 dirstate.normallookup(f)
3292 dirstate.normallookup(f)
3293
3293
3294 # Update the state of files which were removed in the amend
3294 # Update the state of files which were removed in the amend
3295 # to "removed" in the dirstate.
3295 # to "removed" in the dirstate.
3296 removedfiles = set(wctx.removed()) & filestoamend
3296 removedfiles = set(wctx.removed()) & filestoamend
3297 for f in removedfiles:
3297 for f in removedfiles:
3298 dirstate.drop(f)
3298 dirstate.drop(f)
3299
3299
3300 return newid
3300 return newid
3301
3301
3302
3302
3303 def commiteditor(repo, ctx, subs, editform=b''):
3303 def commiteditor(repo, ctx, subs, editform=b''):
3304 if ctx.description():
3304 if ctx.description():
3305 return ctx.description()
3305 return ctx.description()
3306 return commitforceeditor(
3306 return commitforceeditor(
3307 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3307 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3308 )
3308 )
3309
3309
3310
3310
3311 def commitforceeditor(
3311 def commitforceeditor(
3312 repo,
3312 repo,
3313 ctx,
3313 ctx,
3314 subs,
3314 subs,
3315 finishdesc=None,
3315 finishdesc=None,
3316 extramsg=None,
3316 extramsg=None,
3317 editform=b'',
3317 editform=b'',
3318 unchangedmessagedetection=False,
3318 unchangedmessagedetection=False,
3319 ):
3319 ):
3320 if not extramsg:
3320 if not extramsg:
3321 extramsg = _(b"Leave message empty to abort commit.")
3321 extramsg = _(b"Leave message empty to abort commit.")
3322
3322
3323 forms = [e for e in editform.split(b'.') if e]
3323 forms = [e for e in editform.split(b'.') if e]
3324 forms.insert(0, b'changeset')
3324 forms.insert(0, b'changeset')
3325 templatetext = None
3325 templatetext = None
3326 while forms:
3326 while forms:
3327 ref = b'.'.join(forms)
3327 ref = b'.'.join(forms)
3328 if repo.ui.config(b'committemplate', ref):
3328 if repo.ui.config(b'committemplate', ref):
3329 templatetext = committext = buildcommittemplate(
3329 templatetext = committext = buildcommittemplate(
3330 repo, ctx, subs, extramsg, ref
3330 repo, ctx, subs, extramsg, ref
3331 )
3331 )
3332 break
3332 break
3333 forms.pop()
3333 forms.pop()
3334 else:
3334 else:
3335 committext = buildcommittext(repo, ctx, subs, extramsg)
3335 committext = buildcommittext(repo, ctx, subs, extramsg)
3336
3336
3337 # run editor in the repository root
3337 # run editor in the repository root
3338 olddir = encoding.getcwd()
3338 olddir = encoding.getcwd()
3339 os.chdir(repo.root)
3339 os.chdir(repo.root)
3340
3340
3341 # make in-memory changes visible to external process
3341 # make in-memory changes visible to external process
3342 tr = repo.currenttransaction()
3342 tr = repo.currenttransaction()
3343 repo.dirstate.write(tr)
3343 repo.dirstate.write(tr)
3344 pending = tr and tr.writepending() and repo.root
3344 pending = tr and tr.writepending() and repo.root
3345
3345
3346 editortext = repo.ui.edit(
3346 editortext = repo.ui.edit(
3347 committext,
3347 committext,
3348 ctx.user(),
3348 ctx.user(),
3349 ctx.extra(),
3349 ctx.extra(),
3350 editform=editform,
3350 editform=editform,
3351 pending=pending,
3351 pending=pending,
3352 repopath=repo.path,
3352 repopath=repo.path,
3353 action=b'commit',
3353 action=b'commit',
3354 )
3354 )
3355 text = editortext
3355 text = editortext
3356
3356
3357 # strip away anything below this special string (used for editors that want
3357 # strip away anything below this special string (used for editors that want
3358 # to display the diff)
3358 # to display the diff)
3359 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3359 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3360 if stripbelow:
3360 if stripbelow:
3361 text = text[: stripbelow.start()]
3361 text = text[: stripbelow.start()]
3362
3362
3363 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3363 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3364 os.chdir(olddir)
3364 os.chdir(olddir)
3365
3365
3366 if finishdesc:
3366 if finishdesc:
3367 text = finishdesc(text)
3367 text = finishdesc(text)
3368 if not text.strip():
3368 if not text.strip():
3369 raise error.Abort(_(b"empty commit message"))
3369 raise error.Abort(_(b"empty commit message"))
3370 if unchangedmessagedetection and editortext == templatetext:
3370 if unchangedmessagedetection and editortext == templatetext:
3371 raise error.Abort(_(b"commit message unchanged"))
3371 raise error.Abort(_(b"commit message unchanged"))
3372
3372
3373 return text
3373 return text
3374
3374
3375
3375
3376 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3376 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3377 ui = repo.ui
3377 ui = repo.ui
3378 spec = formatter.templatespec(ref, None, None)
3378 spec = formatter.reference_templatespec(ref)
3379 t = logcmdutil.changesettemplater(ui, repo, spec)
3379 t = logcmdutil.changesettemplater(ui, repo, spec)
3380 t.t.cache.update(
3380 t.t.cache.update(
3381 (k, templater.unquotestring(v))
3381 (k, templater.unquotestring(v))
3382 for k, v in repo.ui.configitems(b'committemplate')
3382 for k, v in repo.ui.configitems(b'committemplate')
3383 )
3383 )
3384
3384
3385 if not extramsg:
3385 if not extramsg:
3386 extramsg = b'' # ensure that extramsg is string
3386 extramsg = b'' # ensure that extramsg is string
3387
3387
3388 ui.pushbuffer()
3388 ui.pushbuffer()
3389 t.show(ctx, extramsg=extramsg)
3389 t.show(ctx, extramsg=extramsg)
3390 return ui.popbuffer()
3390 return ui.popbuffer()
3391
3391
3392
3392
3393 def hgprefix(msg):
3393 def hgprefix(msg):
3394 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3394 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3395
3395
3396
3396
3397 def buildcommittext(repo, ctx, subs, extramsg):
3397 def buildcommittext(repo, ctx, subs, extramsg):
3398 edittext = []
3398 edittext = []
3399 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3399 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3400 if ctx.description():
3400 if ctx.description():
3401 edittext.append(ctx.description())
3401 edittext.append(ctx.description())
3402 edittext.append(b"")
3402 edittext.append(b"")
3403 edittext.append(b"") # Empty line between message and comments.
3403 edittext.append(b"") # Empty line between message and comments.
3404 edittext.append(
3404 edittext.append(
3405 hgprefix(
3405 hgprefix(
3406 _(
3406 _(
3407 b"Enter commit message."
3407 b"Enter commit message."
3408 b" Lines beginning with 'HG:' are removed."
3408 b" Lines beginning with 'HG:' are removed."
3409 )
3409 )
3410 )
3410 )
3411 )
3411 )
3412 edittext.append(hgprefix(extramsg))
3412 edittext.append(hgprefix(extramsg))
3413 edittext.append(b"HG: --")
3413 edittext.append(b"HG: --")
3414 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3414 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3415 if ctx.p2():
3415 if ctx.p2():
3416 edittext.append(hgprefix(_(b"branch merge")))
3416 edittext.append(hgprefix(_(b"branch merge")))
3417 if ctx.branch():
3417 if ctx.branch():
3418 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3418 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3419 if bookmarks.isactivewdirparent(repo):
3419 if bookmarks.isactivewdirparent(repo):
3420 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3420 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3421 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3421 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3422 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3422 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3423 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3423 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3424 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3424 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3425 if not added and not modified and not removed:
3425 if not added and not modified and not removed:
3426 edittext.append(hgprefix(_(b"no files changed")))
3426 edittext.append(hgprefix(_(b"no files changed")))
3427 edittext.append(b"")
3427 edittext.append(b"")
3428
3428
3429 return b"\n".join(edittext)
3429 return b"\n".join(edittext)
3430
3430
3431
3431
3432 def commitstatus(repo, node, branch, bheads=None, opts=None):
3432 def commitstatus(repo, node, branch, bheads=None, opts=None):
3433 if opts is None:
3433 if opts is None:
3434 opts = {}
3434 opts = {}
3435 ctx = repo[node]
3435 ctx = repo[node]
3436 parents = ctx.parents()
3436 parents = ctx.parents()
3437
3437
3438 if (
3438 if (
3439 not opts.get(b'amend')
3439 not opts.get(b'amend')
3440 and bheads
3440 and bheads
3441 and node not in bheads
3441 and node not in bheads
3442 and not any(
3442 and not any(
3443 p.node() in bheads and p.branch() == branch for p in parents
3443 p.node() in bheads and p.branch() == branch for p in parents
3444 )
3444 )
3445 ):
3445 ):
3446 repo.ui.status(_(b'created new head\n'))
3446 repo.ui.status(_(b'created new head\n'))
3447 # The message is not printed for initial roots. For the other
3447 # The message is not printed for initial roots. For the other
3448 # changesets, it is printed in the following situations:
3448 # changesets, it is printed in the following situations:
3449 #
3449 #
3450 # Par column: for the 2 parents with ...
3450 # Par column: for the 2 parents with ...
3451 # N: null or no parent
3451 # N: null or no parent
3452 # B: parent is on another named branch
3452 # B: parent is on another named branch
3453 # C: parent is a regular non head changeset
3453 # C: parent is a regular non head changeset
3454 # H: parent was a branch head of the current branch
3454 # H: parent was a branch head of the current branch
3455 # Msg column: whether we print "created new head" message
3455 # Msg column: whether we print "created new head" message
3456 # In the following, it is assumed that there already exists some
3456 # In the following, it is assumed that there already exists some
3457 # initial branch heads of the current branch, otherwise nothing is
3457 # initial branch heads of the current branch, otherwise nothing is
3458 # printed anyway.
3458 # printed anyway.
3459 #
3459 #
3460 # Par Msg Comment
3460 # Par Msg Comment
3461 # N N y additional topo root
3461 # N N y additional topo root
3462 #
3462 #
3463 # B N y additional branch root
3463 # B N y additional branch root
3464 # C N y additional topo head
3464 # C N y additional topo head
3465 # H N n usual case
3465 # H N n usual case
3466 #
3466 #
3467 # B B y weird additional branch root
3467 # B B y weird additional branch root
3468 # C B y branch merge
3468 # C B y branch merge
3469 # H B n merge with named branch
3469 # H B n merge with named branch
3470 #
3470 #
3471 # C C y additional head from merge
3471 # C C y additional head from merge
3472 # C H n merge with a head
3472 # C H n merge with a head
3473 #
3473 #
3474 # H H n head merge: head count decreases
3474 # H H n head merge: head count decreases
3475
3475
3476 if not opts.get(b'close_branch'):
3476 if not opts.get(b'close_branch'):
3477 for r in parents:
3477 for r in parents:
3478 if r.closesbranch() and r.branch() == branch:
3478 if r.closesbranch() and r.branch() == branch:
3479 repo.ui.status(
3479 repo.ui.status(
3480 _(b'reopening closed branch head %d\n') % r.rev()
3480 _(b'reopening closed branch head %d\n') % r.rev()
3481 )
3481 )
3482
3482
3483 if repo.ui.debugflag:
3483 if repo.ui.debugflag:
3484 repo.ui.write(
3484 repo.ui.write(
3485 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3485 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3486 )
3486 )
3487 elif repo.ui.verbose:
3487 elif repo.ui.verbose:
3488 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3488 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3489
3489
3490
3490
3491 def postcommitstatus(repo, pats, opts):
3491 def postcommitstatus(repo, pats, opts):
3492 return repo.status(match=scmutil.match(repo[None], pats, opts))
3492 return repo.status(match=scmutil.match(repo[None], pats, opts))
3493
3493
3494
3494
3495 def revert(ui, repo, ctx, parents, *pats, **opts):
3495 def revert(ui, repo, ctx, parents, *pats, **opts):
3496 opts = pycompat.byteskwargs(opts)
3496 opts = pycompat.byteskwargs(opts)
3497 parent, p2 = parents
3497 parent, p2 = parents
3498 node = ctx.node()
3498 node = ctx.node()
3499
3499
3500 mf = ctx.manifest()
3500 mf = ctx.manifest()
3501 if node == p2:
3501 if node == p2:
3502 parent = p2
3502 parent = p2
3503
3503
3504 # need all matching names in dirstate and manifest of target rev,
3504 # need all matching names in dirstate and manifest of target rev,
3505 # so have to walk both. do not print errors if files exist in one
3505 # so have to walk both. do not print errors if files exist in one
3506 # but not other. in both cases, filesets should be evaluated against
3506 # but not other. in both cases, filesets should be evaluated against
3507 # workingctx to get consistent result (issue4497). this means 'set:**'
3507 # workingctx to get consistent result (issue4497). this means 'set:**'
3508 # cannot be used to select missing files from target rev.
3508 # cannot be used to select missing files from target rev.
3509
3509
3510 # `names` is a mapping for all elements in working copy and target revision
3510 # `names` is a mapping for all elements in working copy and target revision
3511 # The mapping is in the form:
3511 # The mapping is in the form:
3512 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3512 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3513 names = {}
3513 names = {}
3514 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3514 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3515
3515
3516 with repo.wlock():
3516 with repo.wlock():
3517 ## filling of the `names` mapping
3517 ## filling of the `names` mapping
3518 # walk dirstate to fill `names`
3518 # walk dirstate to fill `names`
3519
3519
3520 interactive = opts.get(b'interactive', False)
3520 interactive = opts.get(b'interactive', False)
3521 wctx = repo[None]
3521 wctx = repo[None]
3522 m = scmutil.match(wctx, pats, opts)
3522 m = scmutil.match(wctx, pats, opts)
3523
3523
3524 # we'll need this later
3524 # we'll need this later
3525 targetsubs = sorted(s for s in wctx.substate if m(s))
3525 targetsubs = sorted(s for s in wctx.substate if m(s))
3526
3526
3527 if not m.always():
3527 if not m.always():
3528 matcher = matchmod.badmatch(m, lambda x, y: False)
3528 matcher = matchmod.badmatch(m, lambda x, y: False)
3529 for abs in wctx.walk(matcher):
3529 for abs in wctx.walk(matcher):
3530 names[abs] = m.exact(abs)
3530 names[abs] = m.exact(abs)
3531
3531
3532 # walk target manifest to fill `names`
3532 # walk target manifest to fill `names`
3533
3533
3534 def badfn(path, msg):
3534 def badfn(path, msg):
3535 if path in names:
3535 if path in names:
3536 return
3536 return
3537 if path in ctx.substate:
3537 if path in ctx.substate:
3538 return
3538 return
3539 path_ = path + b'/'
3539 path_ = path + b'/'
3540 for f in names:
3540 for f in names:
3541 if f.startswith(path_):
3541 if f.startswith(path_):
3542 return
3542 return
3543 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3543 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3544
3544
3545 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3545 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3546 if abs not in names:
3546 if abs not in names:
3547 names[abs] = m.exact(abs)
3547 names[abs] = m.exact(abs)
3548
3548
3549 # Find status of all file in `names`.
3549 # Find status of all file in `names`.
3550 m = scmutil.matchfiles(repo, names)
3550 m = scmutil.matchfiles(repo, names)
3551
3551
3552 changes = repo.status(
3552 changes = repo.status(
3553 node1=node, match=m, unknown=True, ignored=True, clean=True
3553 node1=node, match=m, unknown=True, ignored=True, clean=True
3554 )
3554 )
3555 else:
3555 else:
3556 changes = repo.status(node1=node, match=m)
3556 changes = repo.status(node1=node, match=m)
3557 for kind in changes:
3557 for kind in changes:
3558 for abs in kind:
3558 for abs in kind:
3559 names[abs] = m.exact(abs)
3559 names[abs] = m.exact(abs)
3560
3560
3561 m = scmutil.matchfiles(repo, names)
3561 m = scmutil.matchfiles(repo, names)
3562
3562
3563 modified = set(changes.modified)
3563 modified = set(changes.modified)
3564 added = set(changes.added)
3564 added = set(changes.added)
3565 removed = set(changes.removed)
3565 removed = set(changes.removed)
3566 _deleted = set(changes.deleted)
3566 _deleted = set(changes.deleted)
3567 unknown = set(changes.unknown)
3567 unknown = set(changes.unknown)
3568 unknown.update(changes.ignored)
3568 unknown.update(changes.ignored)
3569 clean = set(changes.clean)
3569 clean = set(changes.clean)
3570 modadded = set()
3570 modadded = set()
3571
3571
3572 # We need to account for the state of the file in the dirstate,
3572 # We need to account for the state of the file in the dirstate,
3573 # even when we revert against something else than parent. This will
3573 # even when we revert against something else than parent. This will
3574 # slightly alter the behavior of revert (doing back up or not, delete
3574 # slightly alter the behavior of revert (doing back up or not, delete
3575 # or just forget etc).
3575 # or just forget etc).
3576 if parent == node:
3576 if parent == node:
3577 dsmodified = modified
3577 dsmodified = modified
3578 dsadded = added
3578 dsadded = added
3579 dsremoved = removed
3579 dsremoved = removed
3580 # store all local modifications, useful later for rename detection
3580 # store all local modifications, useful later for rename detection
3581 localchanges = dsmodified | dsadded
3581 localchanges = dsmodified | dsadded
3582 modified, added, removed = set(), set(), set()
3582 modified, added, removed = set(), set(), set()
3583 else:
3583 else:
3584 changes = repo.status(node1=parent, match=m)
3584 changes = repo.status(node1=parent, match=m)
3585 dsmodified = set(changes.modified)
3585 dsmodified = set(changes.modified)
3586 dsadded = set(changes.added)
3586 dsadded = set(changes.added)
3587 dsremoved = set(changes.removed)
3587 dsremoved = set(changes.removed)
3588 # store all local modifications, useful later for rename detection
3588 # store all local modifications, useful later for rename detection
3589 localchanges = dsmodified | dsadded
3589 localchanges = dsmodified | dsadded
3590
3590
3591 # only take into account for removes between wc and target
3591 # only take into account for removes between wc and target
3592 clean |= dsremoved - removed
3592 clean |= dsremoved - removed
3593 dsremoved &= removed
3593 dsremoved &= removed
3594 # distinct between dirstate remove and other
3594 # distinct between dirstate remove and other
3595 removed -= dsremoved
3595 removed -= dsremoved
3596
3596
3597 modadded = added & dsmodified
3597 modadded = added & dsmodified
3598 added -= modadded
3598 added -= modadded
3599
3599
3600 # tell newly modified apart.
3600 # tell newly modified apart.
3601 dsmodified &= modified
3601 dsmodified &= modified
3602 dsmodified |= modified & dsadded # dirstate added may need backup
3602 dsmodified |= modified & dsadded # dirstate added may need backup
3603 modified -= dsmodified
3603 modified -= dsmodified
3604
3604
3605 # We need to wait for some post-processing to update this set
3605 # We need to wait for some post-processing to update this set
3606 # before making the distinction. The dirstate will be used for
3606 # before making the distinction. The dirstate will be used for
3607 # that purpose.
3607 # that purpose.
3608 dsadded = added
3608 dsadded = added
3609
3609
3610 # in case of merge, files that are actually added can be reported as
3610 # in case of merge, files that are actually added can be reported as
3611 # modified, we need to post process the result
3611 # modified, we need to post process the result
3612 if p2 != nullid:
3612 if p2 != nullid:
3613 mergeadd = set(dsmodified)
3613 mergeadd = set(dsmodified)
3614 for path in dsmodified:
3614 for path in dsmodified:
3615 if path in mf:
3615 if path in mf:
3616 mergeadd.remove(path)
3616 mergeadd.remove(path)
3617 dsadded |= mergeadd
3617 dsadded |= mergeadd
3618 dsmodified -= mergeadd
3618 dsmodified -= mergeadd
3619
3619
3620 # if f is a rename, update `names` to also revert the source
3620 # if f is a rename, update `names` to also revert the source
3621 for f in localchanges:
3621 for f in localchanges:
3622 src = repo.dirstate.copied(f)
3622 src = repo.dirstate.copied(f)
3623 # XXX should we check for rename down to target node?
3623 # XXX should we check for rename down to target node?
3624 if src and src not in names and repo.dirstate[src] == b'r':
3624 if src and src not in names and repo.dirstate[src] == b'r':
3625 dsremoved.add(src)
3625 dsremoved.add(src)
3626 names[src] = True
3626 names[src] = True
3627
3627
3628 # determine the exact nature of the deleted changesets
3628 # determine the exact nature of the deleted changesets
3629 deladded = set(_deleted)
3629 deladded = set(_deleted)
3630 for path in _deleted:
3630 for path in _deleted:
3631 if path in mf:
3631 if path in mf:
3632 deladded.remove(path)
3632 deladded.remove(path)
3633 deleted = _deleted - deladded
3633 deleted = _deleted - deladded
3634
3634
3635 # distinguish between file to forget and the other
3635 # distinguish between file to forget and the other
3636 added = set()
3636 added = set()
3637 for abs in dsadded:
3637 for abs in dsadded:
3638 if repo.dirstate[abs] != b'a':
3638 if repo.dirstate[abs] != b'a':
3639 added.add(abs)
3639 added.add(abs)
3640 dsadded -= added
3640 dsadded -= added
3641
3641
3642 for abs in deladded:
3642 for abs in deladded:
3643 if repo.dirstate[abs] == b'a':
3643 if repo.dirstate[abs] == b'a':
3644 dsadded.add(abs)
3644 dsadded.add(abs)
3645 deladded -= dsadded
3645 deladded -= dsadded
3646
3646
3647 # For files marked as removed, we check if an unknown file is present at
3647 # For files marked as removed, we check if an unknown file is present at
3648 # the same path. If a such file exists it may need to be backed up.
3648 # the same path. If a such file exists it may need to be backed up.
3649 # Making the distinction at this stage helps have simpler backup
3649 # Making the distinction at this stage helps have simpler backup
3650 # logic.
3650 # logic.
3651 removunk = set()
3651 removunk = set()
3652 for abs in removed:
3652 for abs in removed:
3653 target = repo.wjoin(abs)
3653 target = repo.wjoin(abs)
3654 if os.path.lexists(target):
3654 if os.path.lexists(target):
3655 removunk.add(abs)
3655 removunk.add(abs)
3656 removed -= removunk
3656 removed -= removunk
3657
3657
3658 dsremovunk = set()
3658 dsremovunk = set()
3659 for abs in dsremoved:
3659 for abs in dsremoved:
3660 target = repo.wjoin(abs)
3660 target = repo.wjoin(abs)
3661 if os.path.lexists(target):
3661 if os.path.lexists(target):
3662 dsremovunk.add(abs)
3662 dsremovunk.add(abs)
3663 dsremoved -= dsremovunk
3663 dsremoved -= dsremovunk
3664
3664
3665 # action to be actually performed by revert
3665 # action to be actually performed by revert
3666 # (<list of file>, message>) tuple
3666 # (<list of file>, message>) tuple
3667 actions = {
3667 actions = {
3668 b'revert': ([], _(b'reverting %s\n')),
3668 b'revert': ([], _(b'reverting %s\n')),
3669 b'add': ([], _(b'adding %s\n')),
3669 b'add': ([], _(b'adding %s\n')),
3670 b'remove': ([], _(b'removing %s\n')),
3670 b'remove': ([], _(b'removing %s\n')),
3671 b'drop': ([], _(b'removing %s\n')),
3671 b'drop': ([], _(b'removing %s\n')),
3672 b'forget': ([], _(b'forgetting %s\n')),
3672 b'forget': ([], _(b'forgetting %s\n')),
3673 b'undelete': ([], _(b'undeleting %s\n')),
3673 b'undelete': ([], _(b'undeleting %s\n')),
3674 b'noop': (None, _(b'no changes needed to %s\n')),
3674 b'noop': (None, _(b'no changes needed to %s\n')),
3675 b'unknown': (None, _(b'file not managed: %s\n')),
3675 b'unknown': (None, _(b'file not managed: %s\n')),
3676 }
3676 }
3677
3677
3678 # "constant" that convey the backup strategy.
3678 # "constant" that convey the backup strategy.
3679 # All set to `discard` if `no-backup` is set do avoid checking
3679 # All set to `discard` if `no-backup` is set do avoid checking
3680 # no_backup lower in the code.
3680 # no_backup lower in the code.
3681 # These values are ordered for comparison purposes
3681 # These values are ordered for comparison purposes
3682 backupinteractive = 3 # do backup if interactively modified
3682 backupinteractive = 3 # do backup if interactively modified
3683 backup = 2 # unconditionally do backup
3683 backup = 2 # unconditionally do backup
3684 check = 1 # check if the existing file differs from target
3684 check = 1 # check if the existing file differs from target
3685 discard = 0 # never do backup
3685 discard = 0 # never do backup
3686 if opts.get(b'no_backup'):
3686 if opts.get(b'no_backup'):
3687 backupinteractive = backup = check = discard
3687 backupinteractive = backup = check = discard
3688 if interactive:
3688 if interactive:
3689 dsmodifiedbackup = backupinteractive
3689 dsmodifiedbackup = backupinteractive
3690 else:
3690 else:
3691 dsmodifiedbackup = backup
3691 dsmodifiedbackup = backup
3692 tobackup = set()
3692 tobackup = set()
3693
3693
3694 backupanddel = actions[b'remove']
3694 backupanddel = actions[b'remove']
3695 if not opts.get(b'no_backup'):
3695 if not opts.get(b'no_backup'):
3696 backupanddel = actions[b'drop']
3696 backupanddel = actions[b'drop']
3697
3697
3698 disptable = (
3698 disptable = (
3699 # dispatch table:
3699 # dispatch table:
3700 # file state
3700 # file state
3701 # action
3701 # action
3702 # make backup
3702 # make backup
3703 ## Sets that results that will change file on disk
3703 ## Sets that results that will change file on disk
3704 # Modified compared to target, no local change
3704 # Modified compared to target, no local change
3705 (modified, actions[b'revert'], discard),
3705 (modified, actions[b'revert'], discard),
3706 # Modified compared to target, but local file is deleted
3706 # Modified compared to target, but local file is deleted
3707 (deleted, actions[b'revert'], discard),
3707 (deleted, actions[b'revert'], discard),
3708 # Modified compared to target, local change
3708 # Modified compared to target, local change
3709 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3709 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3710 # Added since target
3710 # Added since target
3711 (added, actions[b'remove'], discard),
3711 (added, actions[b'remove'], discard),
3712 # Added in working directory
3712 # Added in working directory
3713 (dsadded, actions[b'forget'], discard),
3713 (dsadded, actions[b'forget'], discard),
3714 # Added since target, have local modification
3714 # Added since target, have local modification
3715 (modadded, backupanddel, backup),
3715 (modadded, backupanddel, backup),
3716 # Added since target but file is missing in working directory
3716 # Added since target but file is missing in working directory
3717 (deladded, actions[b'drop'], discard),
3717 (deladded, actions[b'drop'], discard),
3718 # Removed since target, before working copy parent
3718 # Removed since target, before working copy parent
3719 (removed, actions[b'add'], discard),
3719 (removed, actions[b'add'], discard),
3720 # Same as `removed` but an unknown file exists at the same path
3720 # Same as `removed` but an unknown file exists at the same path
3721 (removunk, actions[b'add'], check),
3721 (removunk, actions[b'add'], check),
3722 # Removed since targe, marked as such in working copy parent
3722 # Removed since targe, marked as such in working copy parent
3723 (dsremoved, actions[b'undelete'], discard),
3723 (dsremoved, actions[b'undelete'], discard),
3724 # Same as `dsremoved` but an unknown file exists at the same path
3724 # Same as `dsremoved` but an unknown file exists at the same path
3725 (dsremovunk, actions[b'undelete'], check),
3725 (dsremovunk, actions[b'undelete'], check),
3726 ## the following sets does not result in any file changes
3726 ## the following sets does not result in any file changes
3727 # File with no modification
3727 # File with no modification
3728 (clean, actions[b'noop'], discard),
3728 (clean, actions[b'noop'], discard),
3729 # Existing file, not tracked anywhere
3729 # Existing file, not tracked anywhere
3730 (unknown, actions[b'unknown'], discard),
3730 (unknown, actions[b'unknown'], discard),
3731 )
3731 )
3732
3732
3733 for abs, exact in sorted(names.items()):
3733 for abs, exact in sorted(names.items()):
3734 # target file to be touch on disk (relative to cwd)
3734 # target file to be touch on disk (relative to cwd)
3735 target = repo.wjoin(abs)
3735 target = repo.wjoin(abs)
3736 # search the entry in the dispatch table.
3736 # search the entry in the dispatch table.
3737 # if the file is in any of these sets, it was touched in the working
3737 # if the file is in any of these sets, it was touched in the working
3738 # directory parent and we are sure it needs to be reverted.
3738 # directory parent and we are sure it needs to be reverted.
3739 for table, (xlist, msg), dobackup in disptable:
3739 for table, (xlist, msg), dobackup in disptable:
3740 if abs not in table:
3740 if abs not in table:
3741 continue
3741 continue
3742 if xlist is not None:
3742 if xlist is not None:
3743 xlist.append(abs)
3743 xlist.append(abs)
3744 if dobackup:
3744 if dobackup:
3745 # If in interactive mode, don't automatically create
3745 # If in interactive mode, don't automatically create
3746 # .orig files (issue4793)
3746 # .orig files (issue4793)
3747 if dobackup == backupinteractive:
3747 if dobackup == backupinteractive:
3748 tobackup.add(abs)
3748 tobackup.add(abs)
3749 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3749 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3750 absbakname = scmutil.backuppath(ui, repo, abs)
3750 absbakname = scmutil.backuppath(ui, repo, abs)
3751 bakname = os.path.relpath(
3751 bakname = os.path.relpath(
3752 absbakname, start=repo.root
3752 absbakname, start=repo.root
3753 )
3753 )
3754 ui.note(
3754 ui.note(
3755 _(b'saving current version of %s as %s\n')
3755 _(b'saving current version of %s as %s\n')
3756 % (uipathfn(abs), uipathfn(bakname))
3756 % (uipathfn(abs), uipathfn(bakname))
3757 )
3757 )
3758 if not opts.get(b'dry_run'):
3758 if not opts.get(b'dry_run'):
3759 if interactive:
3759 if interactive:
3760 util.copyfile(target, absbakname)
3760 util.copyfile(target, absbakname)
3761 else:
3761 else:
3762 util.rename(target, absbakname)
3762 util.rename(target, absbakname)
3763 if opts.get(b'dry_run'):
3763 if opts.get(b'dry_run'):
3764 if ui.verbose or not exact:
3764 if ui.verbose or not exact:
3765 ui.status(msg % uipathfn(abs))
3765 ui.status(msg % uipathfn(abs))
3766 elif exact:
3766 elif exact:
3767 ui.warn(msg % uipathfn(abs))
3767 ui.warn(msg % uipathfn(abs))
3768 break
3768 break
3769
3769
3770 if not opts.get(b'dry_run'):
3770 if not opts.get(b'dry_run'):
3771 needdata = (b'revert', b'add', b'undelete')
3771 needdata = (b'revert', b'add', b'undelete')
3772 oplist = [actions[name][0] for name in needdata]
3772 oplist = [actions[name][0] for name in needdata]
3773 prefetch = scmutil.prefetchfiles
3773 prefetch = scmutil.prefetchfiles
3774 matchfiles = scmutil.matchfiles(
3774 matchfiles = scmutil.matchfiles(
3775 repo, [f for sublist in oplist for f in sublist]
3775 repo, [f for sublist in oplist for f in sublist]
3776 )
3776 )
3777 prefetch(
3777 prefetch(
3778 repo, [(ctx.rev(), matchfiles)],
3778 repo, [(ctx.rev(), matchfiles)],
3779 )
3779 )
3780 match = scmutil.match(repo[None], pats)
3780 match = scmutil.match(repo[None], pats)
3781 _performrevert(
3781 _performrevert(
3782 repo,
3782 repo,
3783 parents,
3783 parents,
3784 ctx,
3784 ctx,
3785 names,
3785 names,
3786 uipathfn,
3786 uipathfn,
3787 actions,
3787 actions,
3788 match,
3788 match,
3789 interactive,
3789 interactive,
3790 tobackup,
3790 tobackup,
3791 )
3791 )
3792
3792
3793 if targetsubs:
3793 if targetsubs:
3794 # Revert the subrepos on the revert list
3794 # Revert the subrepos on the revert list
3795 for sub in targetsubs:
3795 for sub in targetsubs:
3796 try:
3796 try:
3797 wctx.sub(sub).revert(
3797 wctx.sub(sub).revert(
3798 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3798 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3799 )
3799 )
3800 except KeyError:
3800 except KeyError:
3801 raise error.Abort(
3801 raise error.Abort(
3802 b"subrepository '%s' does not exist in %s!"
3802 b"subrepository '%s' does not exist in %s!"
3803 % (sub, short(ctx.node()))
3803 % (sub, short(ctx.node()))
3804 )
3804 )
3805
3805
3806
3806
3807 def _performrevert(
3807 def _performrevert(
3808 repo,
3808 repo,
3809 parents,
3809 parents,
3810 ctx,
3810 ctx,
3811 names,
3811 names,
3812 uipathfn,
3812 uipathfn,
3813 actions,
3813 actions,
3814 match,
3814 match,
3815 interactive=False,
3815 interactive=False,
3816 tobackup=None,
3816 tobackup=None,
3817 ):
3817 ):
3818 """function that actually perform all the actions computed for revert
3818 """function that actually perform all the actions computed for revert
3819
3819
3820 This is an independent function to let extension to plug in and react to
3820 This is an independent function to let extension to plug in and react to
3821 the imminent revert.
3821 the imminent revert.
3822
3822
3823 Make sure you have the working directory locked when calling this function.
3823 Make sure you have the working directory locked when calling this function.
3824 """
3824 """
3825 parent, p2 = parents
3825 parent, p2 = parents
3826 node = ctx.node()
3826 node = ctx.node()
3827 excluded_files = []
3827 excluded_files = []
3828
3828
3829 def checkout(f):
3829 def checkout(f):
3830 fc = ctx[f]
3830 fc = ctx[f]
3831 repo.wwrite(f, fc.data(), fc.flags())
3831 repo.wwrite(f, fc.data(), fc.flags())
3832
3832
3833 def doremove(f):
3833 def doremove(f):
3834 try:
3834 try:
3835 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3835 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3836 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3836 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3837 except OSError:
3837 except OSError:
3838 pass
3838 pass
3839 repo.dirstate.remove(f)
3839 repo.dirstate.remove(f)
3840
3840
3841 def prntstatusmsg(action, f):
3841 def prntstatusmsg(action, f):
3842 exact = names[f]
3842 exact = names[f]
3843 if repo.ui.verbose or not exact:
3843 if repo.ui.verbose or not exact:
3844 repo.ui.status(actions[action][1] % uipathfn(f))
3844 repo.ui.status(actions[action][1] % uipathfn(f))
3845
3845
3846 audit_path = pathutil.pathauditor(repo.root, cached=True)
3846 audit_path = pathutil.pathauditor(repo.root, cached=True)
3847 for f in actions[b'forget'][0]:
3847 for f in actions[b'forget'][0]:
3848 if interactive:
3848 if interactive:
3849 choice = repo.ui.promptchoice(
3849 choice = repo.ui.promptchoice(
3850 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3850 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3851 )
3851 )
3852 if choice == 0:
3852 if choice == 0:
3853 prntstatusmsg(b'forget', f)
3853 prntstatusmsg(b'forget', f)
3854 repo.dirstate.drop(f)
3854 repo.dirstate.drop(f)
3855 else:
3855 else:
3856 excluded_files.append(f)
3856 excluded_files.append(f)
3857 else:
3857 else:
3858 prntstatusmsg(b'forget', f)
3858 prntstatusmsg(b'forget', f)
3859 repo.dirstate.drop(f)
3859 repo.dirstate.drop(f)
3860 for f in actions[b'remove'][0]:
3860 for f in actions[b'remove'][0]:
3861 audit_path(f)
3861 audit_path(f)
3862 if interactive:
3862 if interactive:
3863 choice = repo.ui.promptchoice(
3863 choice = repo.ui.promptchoice(
3864 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3864 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3865 )
3865 )
3866 if choice == 0:
3866 if choice == 0:
3867 prntstatusmsg(b'remove', f)
3867 prntstatusmsg(b'remove', f)
3868 doremove(f)
3868 doremove(f)
3869 else:
3869 else:
3870 excluded_files.append(f)
3870 excluded_files.append(f)
3871 else:
3871 else:
3872 prntstatusmsg(b'remove', f)
3872 prntstatusmsg(b'remove', f)
3873 doremove(f)
3873 doremove(f)
3874 for f in actions[b'drop'][0]:
3874 for f in actions[b'drop'][0]:
3875 audit_path(f)
3875 audit_path(f)
3876 prntstatusmsg(b'drop', f)
3876 prntstatusmsg(b'drop', f)
3877 repo.dirstate.remove(f)
3877 repo.dirstate.remove(f)
3878
3878
3879 normal = None
3879 normal = None
3880 if node == parent:
3880 if node == parent:
3881 # We're reverting to our parent. If possible, we'd like status
3881 # We're reverting to our parent. If possible, we'd like status
3882 # to report the file as clean. We have to use normallookup for
3882 # to report the file as clean. We have to use normallookup for
3883 # merges to avoid losing information about merged/dirty files.
3883 # merges to avoid losing information about merged/dirty files.
3884 if p2 != nullid:
3884 if p2 != nullid:
3885 normal = repo.dirstate.normallookup
3885 normal = repo.dirstate.normallookup
3886 else:
3886 else:
3887 normal = repo.dirstate.normal
3887 normal = repo.dirstate.normal
3888
3888
3889 newlyaddedandmodifiedfiles = set()
3889 newlyaddedandmodifiedfiles = set()
3890 if interactive:
3890 if interactive:
3891 # Prompt the user for changes to revert
3891 # Prompt the user for changes to revert
3892 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3892 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3893 m = scmutil.matchfiles(repo, torevert)
3893 m = scmutil.matchfiles(repo, torevert)
3894 diffopts = patch.difffeatureopts(
3894 diffopts = patch.difffeatureopts(
3895 repo.ui,
3895 repo.ui,
3896 whitespace=True,
3896 whitespace=True,
3897 section=b'commands',
3897 section=b'commands',
3898 configprefix=b'revert.interactive.',
3898 configprefix=b'revert.interactive.',
3899 )
3899 )
3900 diffopts.nodates = True
3900 diffopts.nodates = True
3901 diffopts.git = True
3901 diffopts.git = True
3902 operation = b'apply'
3902 operation = b'apply'
3903 if node == parent:
3903 if node == parent:
3904 if repo.ui.configbool(
3904 if repo.ui.configbool(
3905 b'experimental', b'revert.interactive.select-to-keep'
3905 b'experimental', b'revert.interactive.select-to-keep'
3906 ):
3906 ):
3907 operation = b'keep'
3907 operation = b'keep'
3908 else:
3908 else:
3909 operation = b'discard'
3909 operation = b'discard'
3910
3910
3911 if operation == b'apply':
3911 if operation == b'apply':
3912 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3912 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3913 else:
3913 else:
3914 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3914 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3915 originalchunks = patch.parsepatch(diff)
3915 originalchunks = patch.parsepatch(diff)
3916
3916
3917 try:
3917 try:
3918
3918
3919 chunks, opts = recordfilter(
3919 chunks, opts = recordfilter(
3920 repo.ui, originalchunks, match, operation=operation
3920 repo.ui, originalchunks, match, operation=operation
3921 )
3921 )
3922 if operation == b'discard':
3922 if operation == b'discard':
3923 chunks = patch.reversehunks(chunks)
3923 chunks = patch.reversehunks(chunks)
3924
3924
3925 except error.PatchError as err:
3925 except error.PatchError as err:
3926 raise error.Abort(_(b'error parsing patch: %s') % err)
3926 raise error.Abort(_(b'error parsing patch: %s') % err)
3927
3927
3928 # FIXME: when doing an interactive revert of a copy, there's no way of
3928 # FIXME: when doing an interactive revert of a copy, there's no way of
3929 # performing a partial revert of the added file, the only option is
3929 # performing a partial revert of the added file, the only option is
3930 # "remove added file <name> (Yn)?", so we don't need to worry about the
3930 # "remove added file <name> (Yn)?", so we don't need to worry about the
3931 # alsorestore value. Ideally we'd be able to partially revert
3931 # alsorestore value. Ideally we'd be able to partially revert
3932 # copied/renamed files.
3932 # copied/renamed files.
3933 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
3933 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
3934 chunks, originalchunks
3934 chunks, originalchunks
3935 )
3935 )
3936 if tobackup is None:
3936 if tobackup is None:
3937 tobackup = set()
3937 tobackup = set()
3938 # Apply changes
3938 # Apply changes
3939 fp = stringio()
3939 fp = stringio()
3940 # chunks are serialized per file, but files aren't sorted
3940 # chunks are serialized per file, but files aren't sorted
3941 for f in sorted({c.header.filename() for c in chunks if ishunk(c)}):
3941 for f in sorted({c.header.filename() for c in chunks if ishunk(c)}):
3942 prntstatusmsg(b'revert', f)
3942 prntstatusmsg(b'revert', f)
3943 files = set()
3943 files = set()
3944 for c in chunks:
3944 for c in chunks:
3945 if ishunk(c):
3945 if ishunk(c):
3946 abs = c.header.filename()
3946 abs = c.header.filename()
3947 # Create a backup file only if this hunk should be backed up
3947 # Create a backup file only if this hunk should be backed up
3948 if c.header.filename() in tobackup:
3948 if c.header.filename() in tobackup:
3949 target = repo.wjoin(abs)
3949 target = repo.wjoin(abs)
3950 bakname = scmutil.backuppath(repo.ui, repo, abs)
3950 bakname = scmutil.backuppath(repo.ui, repo, abs)
3951 util.copyfile(target, bakname)
3951 util.copyfile(target, bakname)
3952 tobackup.remove(abs)
3952 tobackup.remove(abs)
3953 if abs not in files:
3953 if abs not in files:
3954 files.add(abs)
3954 files.add(abs)
3955 if operation == b'keep':
3955 if operation == b'keep':
3956 checkout(abs)
3956 checkout(abs)
3957 c.write(fp)
3957 c.write(fp)
3958 dopatch = fp.tell()
3958 dopatch = fp.tell()
3959 fp.seek(0)
3959 fp.seek(0)
3960 if dopatch:
3960 if dopatch:
3961 try:
3961 try:
3962 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3962 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3963 except error.PatchError as err:
3963 except error.PatchError as err:
3964 raise error.Abort(pycompat.bytestr(err))
3964 raise error.Abort(pycompat.bytestr(err))
3965 del fp
3965 del fp
3966 else:
3966 else:
3967 for f in actions[b'revert'][0]:
3967 for f in actions[b'revert'][0]:
3968 prntstatusmsg(b'revert', f)
3968 prntstatusmsg(b'revert', f)
3969 checkout(f)
3969 checkout(f)
3970 if normal:
3970 if normal:
3971 normal(f)
3971 normal(f)
3972
3972
3973 for f in actions[b'add'][0]:
3973 for f in actions[b'add'][0]:
3974 # Don't checkout modified files, they are already created by the diff
3974 # Don't checkout modified files, they are already created by the diff
3975 if f not in newlyaddedandmodifiedfiles:
3975 if f not in newlyaddedandmodifiedfiles:
3976 prntstatusmsg(b'add', f)
3976 prntstatusmsg(b'add', f)
3977 checkout(f)
3977 checkout(f)
3978 repo.dirstate.add(f)
3978 repo.dirstate.add(f)
3979
3979
3980 normal = repo.dirstate.normallookup
3980 normal = repo.dirstate.normallookup
3981 if node == parent and p2 == nullid:
3981 if node == parent and p2 == nullid:
3982 normal = repo.dirstate.normal
3982 normal = repo.dirstate.normal
3983 for f in actions[b'undelete'][0]:
3983 for f in actions[b'undelete'][0]:
3984 if interactive:
3984 if interactive:
3985 choice = repo.ui.promptchoice(
3985 choice = repo.ui.promptchoice(
3986 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3986 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3987 )
3987 )
3988 if choice == 0:
3988 if choice == 0:
3989 prntstatusmsg(b'undelete', f)
3989 prntstatusmsg(b'undelete', f)
3990 checkout(f)
3990 checkout(f)
3991 normal(f)
3991 normal(f)
3992 else:
3992 else:
3993 excluded_files.append(f)
3993 excluded_files.append(f)
3994 else:
3994 else:
3995 prntstatusmsg(b'undelete', f)
3995 prntstatusmsg(b'undelete', f)
3996 checkout(f)
3996 checkout(f)
3997 normal(f)
3997 normal(f)
3998
3998
3999 copied = copies.pathcopies(repo[parent], ctx)
3999 copied = copies.pathcopies(repo[parent], ctx)
4000
4000
4001 for f in (
4001 for f in (
4002 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
4002 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
4003 ):
4003 ):
4004 if f in copied:
4004 if f in copied:
4005 repo.dirstate.copy(copied[f], f)
4005 repo.dirstate.copy(copied[f], f)
4006
4006
4007
4007
4008 # a list of (ui, repo, otherpeer, opts, missing) functions called by
4008 # a list of (ui, repo, otherpeer, opts, missing) functions called by
4009 # commands.outgoing. "missing" is "missing" of the result of
4009 # commands.outgoing. "missing" is "missing" of the result of
4010 # "findcommonoutgoing()"
4010 # "findcommonoutgoing()"
4011 outgoinghooks = util.hooks()
4011 outgoinghooks = util.hooks()
4012
4012
4013 # a list of (ui, repo) functions called by commands.summary
4013 # a list of (ui, repo) functions called by commands.summary
4014 summaryhooks = util.hooks()
4014 summaryhooks = util.hooks()
4015
4015
4016 # a list of (ui, repo, opts, changes) functions called by commands.summary.
4016 # a list of (ui, repo, opts, changes) functions called by commands.summary.
4017 #
4017 #
4018 # functions should return tuple of booleans below, if 'changes' is None:
4018 # functions should return tuple of booleans below, if 'changes' is None:
4019 # (whether-incomings-are-needed, whether-outgoings-are-needed)
4019 # (whether-incomings-are-needed, whether-outgoings-are-needed)
4020 #
4020 #
4021 # otherwise, 'changes' is a tuple of tuples below:
4021 # otherwise, 'changes' is a tuple of tuples below:
4022 # - (sourceurl, sourcebranch, sourcepeer, incoming)
4022 # - (sourceurl, sourcebranch, sourcepeer, incoming)
4023 # - (desturl, destbranch, destpeer, outgoing)
4023 # - (desturl, destbranch, destpeer, outgoing)
4024 summaryremotehooks = util.hooks()
4024 summaryremotehooks = util.hooks()
4025
4025
4026
4026
4027 def checkunfinished(repo, commit=False, skipmerge=False):
4027 def checkunfinished(repo, commit=False, skipmerge=False):
4028 '''Look for an unfinished multistep operation, like graft, and abort
4028 '''Look for an unfinished multistep operation, like graft, and abort
4029 if found. It's probably good to check this right before
4029 if found. It's probably good to check this right before
4030 bailifchanged().
4030 bailifchanged().
4031 '''
4031 '''
4032 # Check for non-clearable states first, so things like rebase will take
4032 # Check for non-clearable states first, so things like rebase will take
4033 # precedence over update.
4033 # precedence over update.
4034 for state in statemod._unfinishedstates:
4034 for state in statemod._unfinishedstates:
4035 if (
4035 if (
4036 state._clearable
4036 state._clearable
4037 or (commit and state._allowcommit)
4037 or (commit and state._allowcommit)
4038 or state._reportonly
4038 or state._reportonly
4039 ):
4039 ):
4040 continue
4040 continue
4041 if state.isunfinished(repo):
4041 if state.isunfinished(repo):
4042 raise error.Abort(state.msg(), hint=state.hint())
4042 raise error.Abort(state.msg(), hint=state.hint())
4043
4043
4044 for s in statemod._unfinishedstates:
4044 for s in statemod._unfinishedstates:
4045 if (
4045 if (
4046 not s._clearable
4046 not s._clearable
4047 or (commit and s._allowcommit)
4047 or (commit and s._allowcommit)
4048 or (s._opname == b'merge' and skipmerge)
4048 or (s._opname == b'merge' and skipmerge)
4049 or s._reportonly
4049 or s._reportonly
4050 ):
4050 ):
4051 continue
4051 continue
4052 if s.isunfinished(repo):
4052 if s.isunfinished(repo):
4053 raise error.Abort(s.msg(), hint=s.hint())
4053 raise error.Abort(s.msg(), hint=s.hint())
4054
4054
4055
4055
4056 def clearunfinished(repo):
4056 def clearunfinished(repo):
4057 '''Check for unfinished operations (as above), and clear the ones
4057 '''Check for unfinished operations (as above), and clear the ones
4058 that are clearable.
4058 that are clearable.
4059 '''
4059 '''
4060 for state in statemod._unfinishedstates:
4060 for state in statemod._unfinishedstates:
4061 if state._reportonly:
4061 if state._reportonly:
4062 continue
4062 continue
4063 if not state._clearable and state.isunfinished(repo):
4063 if not state._clearable and state.isunfinished(repo):
4064 raise error.Abort(state.msg(), hint=state.hint())
4064 raise error.Abort(state.msg(), hint=state.hint())
4065
4065
4066 for s in statemod._unfinishedstates:
4066 for s in statemod._unfinishedstates:
4067 if s._opname == b'merge' or state._reportonly:
4067 if s._opname == b'merge' or state._reportonly:
4068 continue
4068 continue
4069 if s._clearable and s.isunfinished(repo):
4069 if s._clearable and s.isunfinished(repo):
4070 util.unlink(repo.vfs.join(s._fname))
4070 util.unlink(repo.vfs.join(s._fname))
4071
4071
4072
4072
4073 def getunfinishedstate(repo):
4073 def getunfinishedstate(repo):
4074 ''' Checks for unfinished operations and returns statecheck object
4074 ''' Checks for unfinished operations and returns statecheck object
4075 for it'''
4075 for it'''
4076 for state in statemod._unfinishedstates:
4076 for state in statemod._unfinishedstates:
4077 if state.isunfinished(repo):
4077 if state.isunfinished(repo):
4078 return state
4078 return state
4079 return None
4079 return None
4080
4080
4081
4081
4082 def howtocontinue(repo):
4082 def howtocontinue(repo):
4083 '''Check for an unfinished operation and return the command to finish
4083 '''Check for an unfinished operation and return the command to finish
4084 it.
4084 it.
4085
4085
4086 statemod._unfinishedstates list is checked for an unfinished operation
4086 statemod._unfinishedstates list is checked for an unfinished operation
4087 and the corresponding message to finish it is generated if a method to
4087 and the corresponding message to finish it is generated if a method to
4088 continue is supported by the operation.
4088 continue is supported by the operation.
4089
4089
4090 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
4090 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
4091 a boolean.
4091 a boolean.
4092 '''
4092 '''
4093 contmsg = _(b"continue: %s")
4093 contmsg = _(b"continue: %s")
4094 for state in statemod._unfinishedstates:
4094 for state in statemod._unfinishedstates:
4095 if not state._continueflag:
4095 if not state._continueflag:
4096 continue
4096 continue
4097 if state.isunfinished(repo):
4097 if state.isunfinished(repo):
4098 return contmsg % state.continuemsg(), True
4098 return contmsg % state.continuemsg(), True
4099 if repo[None].dirty(missing=True, merge=False, branch=False):
4099 if repo[None].dirty(missing=True, merge=False, branch=False):
4100 return contmsg % _(b"hg commit"), False
4100 return contmsg % _(b"hg commit"), False
4101 return None, None
4101 return None, None
4102
4102
4103
4103
4104 def checkafterresolved(repo):
4104 def checkafterresolved(repo):
4105 '''Inform the user about the next action after completing hg resolve
4105 '''Inform the user about the next action after completing hg resolve
4106
4106
4107 If there's a an unfinished operation that supports continue flag,
4107 If there's a an unfinished operation that supports continue flag,
4108 howtocontinue will yield repo.ui.warn as the reporter.
4108 howtocontinue will yield repo.ui.warn as the reporter.
4109
4109
4110 Otherwise, it will yield repo.ui.note.
4110 Otherwise, it will yield repo.ui.note.
4111 '''
4111 '''
4112 msg, warning = howtocontinue(repo)
4112 msg, warning = howtocontinue(repo)
4113 if msg is not None:
4113 if msg is not None:
4114 if warning:
4114 if warning:
4115 repo.ui.warn(b"%s\n" % msg)
4115 repo.ui.warn(b"%s\n" % msg)
4116 else:
4116 else:
4117 repo.ui.note(b"%s\n" % msg)
4117 repo.ui.note(b"%s\n" % msg)
4118
4118
4119
4119
4120 def wrongtooltocontinue(repo, task):
4120 def wrongtooltocontinue(repo, task):
4121 '''Raise an abort suggesting how to properly continue if there is an
4121 '''Raise an abort suggesting how to properly continue if there is an
4122 active task.
4122 active task.
4123
4123
4124 Uses howtocontinue() to find the active task.
4124 Uses howtocontinue() to find the active task.
4125
4125
4126 If there's no task (repo.ui.note for 'hg commit'), it does not offer
4126 If there's no task (repo.ui.note for 'hg commit'), it does not offer
4127 a hint.
4127 a hint.
4128 '''
4128 '''
4129 after = howtocontinue(repo)
4129 after = howtocontinue(repo)
4130 hint = None
4130 hint = None
4131 if after[1]:
4131 if after[1]:
4132 hint = after[0]
4132 hint = after[0]
4133 raise error.Abort(_(b'no %s in progress') % task, hint=hint)
4133 raise error.Abort(_(b'no %s in progress') % task, hint=hint)
4134
4134
4135
4135
4136 def abortgraft(ui, repo, graftstate):
4136 def abortgraft(ui, repo, graftstate):
4137 """abort the interrupted graft and rollbacks to the state before interrupted
4137 """abort the interrupted graft and rollbacks to the state before interrupted
4138 graft"""
4138 graft"""
4139 if not graftstate.exists():
4139 if not graftstate.exists():
4140 raise error.Abort(_(b"no interrupted graft to abort"))
4140 raise error.Abort(_(b"no interrupted graft to abort"))
4141 statedata = readgraftstate(repo, graftstate)
4141 statedata = readgraftstate(repo, graftstate)
4142 newnodes = statedata.get(b'newnodes')
4142 newnodes = statedata.get(b'newnodes')
4143 if newnodes is None:
4143 if newnodes is None:
4144 # and old graft state which does not have all the data required to abort
4144 # and old graft state which does not have all the data required to abort
4145 # the graft
4145 # the graft
4146 raise error.Abort(_(b"cannot abort using an old graftstate"))
4146 raise error.Abort(_(b"cannot abort using an old graftstate"))
4147
4147
4148 # changeset from which graft operation was started
4148 # changeset from which graft operation was started
4149 if len(newnodes) > 0:
4149 if len(newnodes) > 0:
4150 startctx = repo[newnodes[0]].p1()
4150 startctx = repo[newnodes[0]].p1()
4151 else:
4151 else:
4152 startctx = repo[b'.']
4152 startctx = repo[b'.']
4153 # whether to strip or not
4153 # whether to strip or not
4154 cleanup = False
4154 cleanup = False
4155 from . import hg
4155 from . import hg
4156
4156
4157 if newnodes:
4157 if newnodes:
4158 newnodes = [repo[r].rev() for r in newnodes]
4158 newnodes = [repo[r].rev() for r in newnodes]
4159 cleanup = True
4159 cleanup = True
4160 # checking that none of the newnodes turned public or is public
4160 # checking that none of the newnodes turned public or is public
4161 immutable = [c for c in newnodes if not repo[c].mutable()]
4161 immutable = [c for c in newnodes if not repo[c].mutable()]
4162 if immutable:
4162 if immutable:
4163 repo.ui.warn(
4163 repo.ui.warn(
4164 _(b"cannot clean up public changesets %s\n")
4164 _(b"cannot clean up public changesets %s\n")
4165 % b', '.join(bytes(repo[r]) for r in immutable),
4165 % b', '.join(bytes(repo[r]) for r in immutable),
4166 hint=_(b"see 'hg help phases' for details"),
4166 hint=_(b"see 'hg help phases' for details"),
4167 )
4167 )
4168 cleanup = False
4168 cleanup = False
4169
4169
4170 # checking that no new nodes are created on top of grafted revs
4170 # checking that no new nodes are created on top of grafted revs
4171 desc = set(repo.changelog.descendants(newnodes))
4171 desc = set(repo.changelog.descendants(newnodes))
4172 if desc - set(newnodes):
4172 if desc - set(newnodes):
4173 repo.ui.warn(
4173 repo.ui.warn(
4174 _(
4174 _(
4175 b"new changesets detected on destination "
4175 b"new changesets detected on destination "
4176 b"branch, can't strip\n"
4176 b"branch, can't strip\n"
4177 )
4177 )
4178 )
4178 )
4179 cleanup = False
4179 cleanup = False
4180
4180
4181 if cleanup:
4181 if cleanup:
4182 with repo.wlock(), repo.lock():
4182 with repo.wlock(), repo.lock():
4183 hg.updaterepo(repo, startctx.node(), overwrite=True)
4183 hg.updaterepo(repo, startctx.node(), overwrite=True)
4184 # stripping the new nodes created
4184 # stripping the new nodes created
4185 strippoints = [
4185 strippoints = [
4186 c.node() for c in repo.set(b"roots(%ld)", newnodes)
4186 c.node() for c in repo.set(b"roots(%ld)", newnodes)
4187 ]
4187 ]
4188 repair.strip(repo.ui, repo, strippoints, backup=False)
4188 repair.strip(repo.ui, repo, strippoints, backup=False)
4189
4189
4190 if not cleanup:
4190 if not cleanup:
4191 # we don't update to the startnode if we can't strip
4191 # we don't update to the startnode if we can't strip
4192 startctx = repo[b'.']
4192 startctx = repo[b'.']
4193 hg.updaterepo(repo, startctx.node(), overwrite=True)
4193 hg.updaterepo(repo, startctx.node(), overwrite=True)
4194
4194
4195 ui.status(_(b"graft aborted\n"))
4195 ui.status(_(b"graft aborted\n"))
4196 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
4196 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
4197 graftstate.delete()
4197 graftstate.delete()
4198 return 0
4198 return 0
4199
4199
4200
4200
4201 def readgraftstate(repo, graftstate):
4201 def readgraftstate(repo, graftstate):
4202 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
4202 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
4203 """read the graft state file and return a dict of the data stored in it"""
4203 """read the graft state file and return a dict of the data stored in it"""
4204 try:
4204 try:
4205 return graftstate.read()
4205 return graftstate.read()
4206 except error.CorruptedState:
4206 except error.CorruptedState:
4207 nodes = repo.vfs.read(b'graftstate').splitlines()
4207 nodes = repo.vfs.read(b'graftstate').splitlines()
4208 return {b'nodes': nodes}
4208 return {b'nodes': nodes}
4209
4209
4210
4210
4211 def hgabortgraft(ui, repo):
4211 def hgabortgraft(ui, repo):
4212 """ abort logic for aborting graft using 'hg abort'"""
4212 """ abort logic for aborting graft using 'hg abort'"""
4213 with repo.wlock():
4213 with repo.wlock():
4214 graftstate = statemod.cmdstate(repo, b'graftstate')
4214 graftstate = statemod.cmdstate(repo, b'graftstate')
4215 return abortgraft(ui, repo, graftstate)
4215 return abortgraft(ui, repo, graftstate)
@@ -1,842 +1,858 b''
1 # formatter.py - generic output formatting for mercurial
1 # formatter.py - generic output formatting for mercurial
2 #
2 #
3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """Generic output formatting for Mercurial
8 """Generic output formatting for Mercurial
9
9
10 The formatter provides API to show data in various ways. The following
10 The formatter provides API to show data in various ways. The following
11 functions should be used in place of ui.write():
11 functions should be used in place of ui.write():
12
12
13 - fm.write() for unconditional output
13 - fm.write() for unconditional output
14 - fm.condwrite() to show some extra data conditionally in plain output
14 - fm.condwrite() to show some extra data conditionally in plain output
15 - fm.context() to provide changectx to template output
15 - fm.context() to provide changectx to template output
16 - fm.data() to provide extra data to JSON or template output
16 - fm.data() to provide extra data to JSON or template output
17 - fm.plain() to show raw text that isn't provided to JSON or template output
17 - fm.plain() to show raw text that isn't provided to JSON or template output
18
18
19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
20 beforehand so the data is converted to the appropriate data type. Use
20 beforehand so the data is converted to the appropriate data type. Use
21 fm.isplain() if you need to convert or format data conditionally which isn't
21 fm.isplain() if you need to convert or format data conditionally which isn't
22 supported by the formatter API.
22 supported by the formatter API.
23
23
24 To build nested structure (i.e. a list of dicts), use fm.nested().
24 To build nested structure (i.e. a list of dicts), use fm.nested().
25
25
26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
27
27
28 fm.condwrite() vs 'if cond:':
28 fm.condwrite() vs 'if cond:':
29
29
30 In most cases, use fm.condwrite() so users can selectively show the data
30 In most cases, use fm.condwrite() so users can selectively show the data
31 in template output. If it's costly to build data, use plain 'if cond:' with
31 in template output. If it's costly to build data, use plain 'if cond:' with
32 fm.write().
32 fm.write().
33
33
34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
35
35
36 fm.nested() should be used to form a tree structure (a list of dicts of
36 fm.nested() should be used to form a tree structure (a list of dicts of
37 lists of dicts...) which can be accessed through template keywords, e.g.
37 lists of dicts...) which can be accessed through template keywords, e.g.
38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
39 exports a dict-type object to template, which can be accessed by e.g.
39 exports a dict-type object to template, which can be accessed by e.g.
40 "{get(foo, key)}" function.
40 "{get(foo, key)}" function.
41
41
42 Doctest helper:
42 Doctest helper:
43
43
44 >>> def show(fn, verbose=False, **opts):
44 >>> def show(fn, verbose=False, **opts):
45 ... import sys
45 ... import sys
46 ... from . import ui as uimod
46 ... from . import ui as uimod
47 ... ui = uimod.ui()
47 ... ui = uimod.ui()
48 ... ui.verbose = verbose
48 ... ui.verbose = verbose
49 ... ui.pushbuffer()
49 ... ui.pushbuffer()
50 ... try:
50 ... try:
51 ... return fn(ui, ui.formatter(pycompat.sysbytes(fn.__name__),
51 ... return fn(ui, ui.formatter(pycompat.sysbytes(fn.__name__),
52 ... pycompat.byteskwargs(opts)))
52 ... pycompat.byteskwargs(opts)))
53 ... finally:
53 ... finally:
54 ... print(pycompat.sysstr(ui.popbuffer()), end='')
54 ... print(pycompat.sysstr(ui.popbuffer()), end='')
55
55
56 Basic example:
56 Basic example:
57
57
58 >>> def files(ui, fm):
58 >>> def files(ui, fm):
59 ... files = [(b'foo', 123, (0, 0)), (b'bar', 456, (1, 0))]
59 ... files = [(b'foo', 123, (0, 0)), (b'bar', 456, (1, 0))]
60 ... for f in files:
60 ... for f in files:
61 ... fm.startitem()
61 ... fm.startitem()
62 ... fm.write(b'path', b'%s', f[0])
62 ... fm.write(b'path', b'%s', f[0])
63 ... fm.condwrite(ui.verbose, b'date', b' %s',
63 ... fm.condwrite(ui.verbose, b'date', b' %s',
64 ... fm.formatdate(f[2], b'%Y-%m-%d %H:%M:%S'))
64 ... fm.formatdate(f[2], b'%Y-%m-%d %H:%M:%S'))
65 ... fm.data(size=f[1])
65 ... fm.data(size=f[1])
66 ... fm.plain(b'\\n')
66 ... fm.plain(b'\\n')
67 ... fm.end()
67 ... fm.end()
68 >>> show(files)
68 >>> show(files)
69 foo
69 foo
70 bar
70 bar
71 >>> show(files, verbose=True)
71 >>> show(files, verbose=True)
72 foo 1970-01-01 00:00:00
72 foo 1970-01-01 00:00:00
73 bar 1970-01-01 00:00:01
73 bar 1970-01-01 00:00:01
74 >>> show(files, template=b'json')
74 >>> show(files, template=b'json')
75 [
75 [
76 {
76 {
77 "date": [0, 0],
77 "date": [0, 0],
78 "path": "foo",
78 "path": "foo",
79 "size": 123
79 "size": 123
80 },
80 },
81 {
81 {
82 "date": [1, 0],
82 "date": [1, 0],
83 "path": "bar",
83 "path": "bar",
84 "size": 456
84 "size": 456
85 }
85 }
86 ]
86 ]
87 >>> show(files, template=b'path: {path}\\ndate: {date|rfc3339date}\\n')
87 >>> show(files, template=b'path: {path}\\ndate: {date|rfc3339date}\\n')
88 path: foo
88 path: foo
89 date: 1970-01-01T00:00:00+00:00
89 date: 1970-01-01T00:00:00+00:00
90 path: bar
90 path: bar
91 date: 1970-01-01T00:00:01+00:00
91 date: 1970-01-01T00:00:01+00:00
92
92
93 Nested example:
93 Nested example:
94
94
95 >>> def subrepos(ui, fm):
95 >>> def subrepos(ui, fm):
96 ... fm.startitem()
96 ... fm.startitem()
97 ... fm.write(b'reponame', b'[%s]\\n', b'baz')
97 ... fm.write(b'reponame', b'[%s]\\n', b'baz')
98 ... files(ui, fm.nested(b'files', tmpl=b'{reponame}'))
98 ... files(ui, fm.nested(b'files', tmpl=b'{reponame}'))
99 ... fm.end()
99 ... fm.end()
100 >>> show(subrepos)
100 >>> show(subrepos)
101 [baz]
101 [baz]
102 foo
102 foo
103 bar
103 bar
104 >>> show(subrepos, template=b'{reponame}: {join(files % "{path}", ", ")}\\n')
104 >>> show(subrepos, template=b'{reponame}: {join(files % "{path}", ", ")}\\n')
105 baz: foo, bar
105 baz: foo, bar
106 """
106 """
107
107
108 from __future__ import absolute_import, print_function
108 from __future__ import absolute_import, print_function
109
109
110 import contextlib
110 import contextlib
111 import itertools
111 import itertools
112 import os
112 import os
113
113
114 from .i18n import _
114 from .i18n import _
115 from .node import (
115 from .node import (
116 hex,
116 hex,
117 short,
117 short,
118 )
118 )
119 from .thirdparty import attr
119 from .thirdparty import attr
120
120
121 from . import (
121 from . import (
122 error,
122 error,
123 pycompat,
123 pycompat,
124 templatefilters,
124 templatefilters,
125 templatekw,
125 templatekw,
126 templater,
126 templater,
127 templateutil,
127 templateutil,
128 util,
128 util,
129 )
129 )
130 from .utils import (
130 from .utils import (
131 cborutil,
131 cborutil,
132 dateutil,
132 dateutil,
133 stringutil,
133 stringutil,
134 )
134 )
135
135
136 pickle = util.pickle
136 pickle = util.pickle
137
137
138
138
139 def isprintable(obj):
139 def isprintable(obj):
140 """Check if the given object can be directly passed in to formatter's
140 """Check if the given object can be directly passed in to formatter's
141 write() and data() functions
141 write() and data() functions
142
142
143 Returns False if the object is unsupported or must be pre-processed by
143 Returns False if the object is unsupported or must be pre-processed by
144 formatdate(), formatdict(), or formatlist().
144 formatdate(), formatdict(), or formatlist().
145 """
145 """
146 return isinstance(obj, (type(None), bool, int, pycompat.long, float, bytes))
146 return isinstance(obj, (type(None), bool, int, pycompat.long, float, bytes))
147
147
148
148
149 class _nullconverter(object):
149 class _nullconverter(object):
150 '''convert non-primitive data types to be processed by formatter'''
150 '''convert non-primitive data types to be processed by formatter'''
151
151
152 # set to True if context object should be stored as item
152 # set to True if context object should be stored as item
153 storecontext = False
153 storecontext = False
154
154
155 @staticmethod
155 @staticmethod
156 def wrapnested(data, tmpl, sep):
156 def wrapnested(data, tmpl, sep):
157 '''wrap nested data by appropriate type'''
157 '''wrap nested data by appropriate type'''
158 return data
158 return data
159
159
160 @staticmethod
160 @staticmethod
161 def formatdate(date, fmt):
161 def formatdate(date, fmt):
162 '''convert date tuple to appropriate format'''
162 '''convert date tuple to appropriate format'''
163 # timestamp can be float, but the canonical form should be int
163 # timestamp can be float, but the canonical form should be int
164 ts, tz = date
164 ts, tz = date
165 return (int(ts), tz)
165 return (int(ts), tz)
166
166
167 @staticmethod
167 @staticmethod
168 def formatdict(data, key, value, fmt, sep):
168 def formatdict(data, key, value, fmt, sep):
169 '''convert dict or key-value pairs to appropriate dict format'''
169 '''convert dict or key-value pairs to appropriate dict format'''
170 # use plain dict instead of util.sortdict so that data can be
170 # use plain dict instead of util.sortdict so that data can be
171 # serialized as a builtin dict in pickle output
171 # serialized as a builtin dict in pickle output
172 return dict(data)
172 return dict(data)
173
173
174 @staticmethod
174 @staticmethod
175 def formatlist(data, name, fmt, sep):
175 def formatlist(data, name, fmt, sep):
176 '''convert iterable to appropriate list format'''
176 '''convert iterable to appropriate list format'''
177 return list(data)
177 return list(data)
178
178
179
179
180 class baseformatter(object):
180 class baseformatter(object):
181 def __init__(self, ui, topic, opts, converter):
181 def __init__(self, ui, topic, opts, converter):
182 self._ui = ui
182 self._ui = ui
183 self._topic = topic
183 self._topic = topic
184 self._opts = opts
184 self._opts = opts
185 self._converter = converter
185 self._converter = converter
186 self._item = None
186 self._item = None
187 # function to convert node to string suitable for this output
187 # function to convert node to string suitable for this output
188 self.hexfunc = hex
188 self.hexfunc = hex
189
189
190 def __enter__(self):
190 def __enter__(self):
191 return self
191 return self
192
192
193 def __exit__(self, exctype, excvalue, traceback):
193 def __exit__(self, exctype, excvalue, traceback):
194 if exctype is None:
194 if exctype is None:
195 self.end()
195 self.end()
196
196
197 def _showitem(self):
197 def _showitem(self):
198 '''show a formatted item once all data is collected'''
198 '''show a formatted item once all data is collected'''
199
199
200 def startitem(self):
200 def startitem(self):
201 '''begin an item in the format list'''
201 '''begin an item in the format list'''
202 if self._item is not None:
202 if self._item is not None:
203 self._showitem()
203 self._showitem()
204 self._item = {}
204 self._item = {}
205
205
206 def formatdate(self, date, fmt=b'%a %b %d %H:%M:%S %Y %1%2'):
206 def formatdate(self, date, fmt=b'%a %b %d %H:%M:%S %Y %1%2'):
207 '''convert date tuple to appropriate format'''
207 '''convert date tuple to appropriate format'''
208 return self._converter.formatdate(date, fmt)
208 return self._converter.formatdate(date, fmt)
209
209
210 def formatdict(self, data, key=b'key', value=b'value', fmt=None, sep=b' '):
210 def formatdict(self, data, key=b'key', value=b'value', fmt=None, sep=b' '):
211 '''convert dict or key-value pairs to appropriate dict format'''
211 '''convert dict or key-value pairs to appropriate dict format'''
212 return self._converter.formatdict(data, key, value, fmt, sep)
212 return self._converter.formatdict(data, key, value, fmt, sep)
213
213
214 def formatlist(self, data, name, fmt=None, sep=b' '):
214 def formatlist(self, data, name, fmt=None, sep=b' '):
215 '''convert iterable to appropriate list format'''
215 '''convert iterable to appropriate list format'''
216 # name is mandatory argument for now, but it could be optional if
216 # name is mandatory argument for now, but it could be optional if
217 # we have default template keyword, e.g. {item}
217 # we have default template keyword, e.g. {item}
218 return self._converter.formatlist(data, name, fmt, sep)
218 return self._converter.formatlist(data, name, fmt, sep)
219
219
220 def context(self, **ctxs):
220 def context(self, **ctxs):
221 '''insert context objects to be used to render template keywords'''
221 '''insert context objects to be used to render template keywords'''
222 ctxs = pycompat.byteskwargs(ctxs)
222 ctxs = pycompat.byteskwargs(ctxs)
223 assert all(k in {b'repo', b'ctx', b'fctx'} for k in ctxs)
223 assert all(k in {b'repo', b'ctx', b'fctx'} for k in ctxs)
224 if self._converter.storecontext:
224 if self._converter.storecontext:
225 # populate missing resources in fctx -> ctx -> repo order
225 # populate missing resources in fctx -> ctx -> repo order
226 if b'fctx' in ctxs and b'ctx' not in ctxs:
226 if b'fctx' in ctxs and b'ctx' not in ctxs:
227 ctxs[b'ctx'] = ctxs[b'fctx'].changectx()
227 ctxs[b'ctx'] = ctxs[b'fctx'].changectx()
228 if b'ctx' in ctxs and b'repo' not in ctxs:
228 if b'ctx' in ctxs and b'repo' not in ctxs:
229 ctxs[b'repo'] = ctxs[b'ctx'].repo()
229 ctxs[b'repo'] = ctxs[b'ctx'].repo()
230 self._item.update(ctxs)
230 self._item.update(ctxs)
231
231
232 def datahint(self):
232 def datahint(self):
233 '''set of field names to be referenced'''
233 '''set of field names to be referenced'''
234 return set()
234 return set()
235
235
236 def data(self, **data):
236 def data(self, **data):
237 '''insert data into item that's not shown in default output'''
237 '''insert data into item that's not shown in default output'''
238 data = pycompat.byteskwargs(data)
238 data = pycompat.byteskwargs(data)
239 self._item.update(data)
239 self._item.update(data)
240
240
241 def write(self, fields, deftext, *fielddata, **opts):
241 def write(self, fields, deftext, *fielddata, **opts):
242 '''do default text output while assigning data to item'''
242 '''do default text output while assigning data to item'''
243 fieldkeys = fields.split()
243 fieldkeys = fields.split()
244 assert len(fieldkeys) == len(fielddata), (fieldkeys, fielddata)
244 assert len(fieldkeys) == len(fielddata), (fieldkeys, fielddata)
245 self._item.update(zip(fieldkeys, fielddata))
245 self._item.update(zip(fieldkeys, fielddata))
246
246
247 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
247 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
248 '''do conditional write (primarily for plain formatter)'''
248 '''do conditional write (primarily for plain formatter)'''
249 fieldkeys = fields.split()
249 fieldkeys = fields.split()
250 assert len(fieldkeys) == len(fielddata)
250 assert len(fieldkeys) == len(fielddata)
251 self._item.update(zip(fieldkeys, fielddata))
251 self._item.update(zip(fieldkeys, fielddata))
252
252
253 def plain(self, text, **opts):
253 def plain(self, text, **opts):
254 '''show raw text for non-templated mode'''
254 '''show raw text for non-templated mode'''
255
255
256 def isplain(self):
256 def isplain(self):
257 '''check for plain formatter usage'''
257 '''check for plain formatter usage'''
258 return False
258 return False
259
259
260 def nested(self, field, tmpl=None, sep=b''):
260 def nested(self, field, tmpl=None, sep=b''):
261 '''sub formatter to store nested data in the specified field'''
261 '''sub formatter to store nested data in the specified field'''
262 data = []
262 data = []
263 self._item[field] = self._converter.wrapnested(data, tmpl, sep)
263 self._item[field] = self._converter.wrapnested(data, tmpl, sep)
264 return _nestedformatter(self._ui, self._converter, data)
264 return _nestedformatter(self._ui, self._converter, data)
265
265
266 def end(self):
266 def end(self):
267 '''end output for the formatter'''
267 '''end output for the formatter'''
268 if self._item is not None:
268 if self._item is not None:
269 self._showitem()
269 self._showitem()
270
270
271
271
272 def nullformatter(ui, topic, opts):
272 def nullformatter(ui, topic, opts):
273 '''formatter that prints nothing'''
273 '''formatter that prints nothing'''
274 return baseformatter(ui, topic, opts, converter=_nullconverter)
274 return baseformatter(ui, topic, opts, converter=_nullconverter)
275
275
276
276
277 class _nestedformatter(baseformatter):
277 class _nestedformatter(baseformatter):
278 '''build sub items and store them in the parent formatter'''
278 '''build sub items and store them in the parent formatter'''
279
279
280 def __init__(self, ui, converter, data):
280 def __init__(self, ui, converter, data):
281 baseformatter.__init__(
281 baseformatter.__init__(
282 self, ui, topic=b'', opts={}, converter=converter
282 self, ui, topic=b'', opts={}, converter=converter
283 )
283 )
284 self._data = data
284 self._data = data
285
285
286 def _showitem(self):
286 def _showitem(self):
287 self._data.append(self._item)
287 self._data.append(self._item)
288
288
289
289
290 def _iteritems(data):
290 def _iteritems(data):
291 '''iterate key-value pairs in stable order'''
291 '''iterate key-value pairs in stable order'''
292 if isinstance(data, dict):
292 if isinstance(data, dict):
293 return sorted(pycompat.iteritems(data))
293 return sorted(pycompat.iteritems(data))
294 return data
294 return data
295
295
296
296
297 class _plainconverter(object):
297 class _plainconverter(object):
298 '''convert non-primitive data types to text'''
298 '''convert non-primitive data types to text'''
299
299
300 storecontext = False
300 storecontext = False
301
301
302 @staticmethod
302 @staticmethod
303 def wrapnested(data, tmpl, sep):
303 def wrapnested(data, tmpl, sep):
304 raise error.ProgrammingError(b'plainformatter should never be nested')
304 raise error.ProgrammingError(b'plainformatter should never be nested')
305
305
306 @staticmethod
306 @staticmethod
307 def formatdate(date, fmt):
307 def formatdate(date, fmt):
308 '''stringify date tuple in the given format'''
308 '''stringify date tuple in the given format'''
309 return dateutil.datestr(date, fmt)
309 return dateutil.datestr(date, fmt)
310
310
311 @staticmethod
311 @staticmethod
312 def formatdict(data, key, value, fmt, sep):
312 def formatdict(data, key, value, fmt, sep):
313 '''stringify key-value pairs separated by sep'''
313 '''stringify key-value pairs separated by sep'''
314 prefmt = pycompat.identity
314 prefmt = pycompat.identity
315 if fmt is None:
315 if fmt is None:
316 fmt = b'%s=%s'
316 fmt = b'%s=%s'
317 prefmt = pycompat.bytestr
317 prefmt = pycompat.bytestr
318 return sep.join(
318 return sep.join(
319 fmt % (prefmt(k), prefmt(v)) for k, v in _iteritems(data)
319 fmt % (prefmt(k), prefmt(v)) for k, v in _iteritems(data)
320 )
320 )
321
321
322 @staticmethod
322 @staticmethod
323 def formatlist(data, name, fmt, sep):
323 def formatlist(data, name, fmt, sep):
324 '''stringify iterable separated by sep'''
324 '''stringify iterable separated by sep'''
325 prefmt = pycompat.identity
325 prefmt = pycompat.identity
326 if fmt is None:
326 if fmt is None:
327 fmt = b'%s'
327 fmt = b'%s'
328 prefmt = pycompat.bytestr
328 prefmt = pycompat.bytestr
329 return sep.join(fmt % prefmt(e) for e in data)
329 return sep.join(fmt % prefmt(e) for e in data)
330
330
331
331
332 class plainformatter(baseformatter):
332 class plainformatter(baseformatter):
333 '''the default text output scheme'''
333 '''the default text output scheme'''
334
334
335 def __init__(self, ui, out, topic, opts):
335 def __init__(self, ui, out, topic, opts):
336 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
336 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
337 if ui.debugflag:
337 if ui.debugflag:
338 self.hexfunc = hex
338 self.hexfunc = hex
339 else:
339 else:
340 self.hexfunc = short
340 self.hexfunc = short
341 if ui is out:
341 if ui is out:
342 self._write = ui.write
342 self._write = ui.write
343 else:
343 else:
344 self._write = lambda s, **opts: out.write(s)
344 self._write = lambda s, **opts: out.write(s)
345
345
346 def startitem(self):
346 def startitem(self):
347 pass
347 pass
348
348
349 def data(self, **data):
349 def data(self, **data):
350 pass
350 pass
351
351
352 def write(self, fields, deftext, *fielddata, **opts):
352 def write(self, fields, deftext, *fielddata, **opts):
353 self._write(deftext % fielddata, **opts)
353 self._write(deftext % fielddata, **opts)
354
354
355 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
355 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
356 '''do conditional write'''
356 '''do conditional write'''
357 if cond:
357 if cond:
358 self._write(deftext % fielddata, **opts)
358 self._write(deftext % fielddata, **opts)
359
359
360 def plain(self, text, **opts):
360 def plain(self, text, **opts):
361 self._write(text, **opts)
361 self._write(text, **opts)
362
362
363 def isplain(self):
363 def isplain(self):
364 return True
364 return True
365
365
366 def nested(self, field, tmpl=None, sep=b''):
366 def nested(self, field, tmpl=None, sep=b''):
367 # nested data will be directly written to ui
367 # nested data will be directly written to ui
368 return self
368 return self
369
369
370 def end(self):
370 def end(self):
371 pass
371 pass
372
372
373
373
374 class debugformatter(baseformatter):
374 class debugformatter(baseformatter):
375 def __init__(self, ui, out, topic, opts):
375 def __init__(self, ui, out, topic, opts):
376 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
376 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
377 self._out = out
377 self._out = out
378 self._out.write(b"%s = [\n" % self._topic)
378 self._out.write(b"%s = [\n" % self._topic)
379
379
380 def _showitem(self):
380 def _showitem(self):
381 self._out.write(
381 self._out.write(
382 b' %s,\n' % stringutil.pprint(self._item, indent=4, level=1)
382 b' %s,\n' % stringutil.pprint(self._item, indent=4, level=1)
383 )
383 )
384
384
385 def end(self):
385 def end(self):
386 baseformatter.end(self)
386 baseformatter.end(self)
387 self._out.write(b"]\n")
387 self._out.write(b"]\n")
388
388
389
389
390 class pickleformatter(baseformatter):
390 class pickleformatter(baseformatter):
391 def __init__(self, ui, out, topic, opts):
391 def __init__(self, ui, out, topic, opts):
392 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
392 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
393 self._out = out
393 self._out = out
394 self._data = []
394 self._data = []
395
395
396 def _showitem(self):
396 def _showitem(self):
397 self._data.append(self._item)
397 self._data.append(self._item)
398
398
399 def end(self):
399 def end(self):
400 baseformatter.end(self)
400 baseformatter.end(self)
401 self._out.write(pickle.dumps(self._data))
401 self._out.write(pickle.dumps(self._data))
402
402
403
403
404 class cborformatter(baseformatter):
404 class cborformatter(baseformatter):
405 '''serialize items as an indefinite-length CBOR array'''
405 '''serialize items as an indefinite-length CBOR array'''
406
406
407 def __init__(self, ui, out, topic, opts):
407 def __init__(self, ui, out, topic, opts):
408 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
408 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
409 self._out = out
409 self._out = out
410 self._out.write(cborutil.BEGIN_INDEFINITE_ARRAY)
410 self._out.write(cborutil.BEGIN_INDEFINITE_ARRAY)
411
411
412 def _showitem(self):
412 def _showitem(self):
413 self._out.write(b''.join(cborutil.streamencode(self._item)))
413 self._out.write(b''.join(cborutil.streamencode(self._item)))
414
414
415 def end(self):
415 def end(self):
416 baseformatter.end(self)
416 baseformatter.end(self)
417 self._out.write(cborutil.BREAK)
417 self._out.write(cborutil.BREAK)
418
418
419
419
420 class jsonformatter(baseformatter):
420 class jsonformatter(baseformatter):
421 def __init__(self, ui, out, topic, opts):
421 def __init__(self, ui, out, topic, opts):
422 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
422 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
423 self._out = out
423 self._out = out
424 self._out.write(b"[")
424 self._out.write(b"[")
425 self._first = True
425 self._first = True
426
426
427 def _showitem(self):
427 def _showitem(self):
428 if self._first:
428 if self._first:
429 self._first = False
429 self._first = False
430 else:
430 else:
431 self._out.write(b",")
431 self._out.write(b",")
432
432
433 self._out.write(b"\n {\n")
433 self._out.write(b"\n {\n")
434 first = True
434 first = True
435 for k, v in sorted(self._item.items()):
435 for k, v in sorted(self._item.items()):
436 if first:
436 if first:
437 first = False
437 first = False
438 else:
438 else:
439 self._out.write(b",\n")
439 self._out.write(b",\n")
440 u = templatefilters.json(v, paranoid=False)
440 u = templatefilters.json(v, paranoid=False)
441 self._out.write(b' "%s": %s' % (k, u))
441 self._out.write(b' "%s": %s' % (k, u))
442 self._out.write(b"\n }")
442 self._out.write(b"\n }")
443
443
444 def end(self):
444 def end(self):
445 baseformatter.end(self)
445 baseformatter.end(self)
446 self._out.write(b"\n]\n")
446 self._out.write(b"\n]\n")
447
447
448
448
449 class _templateconverter(object):
449 class _templateconverter(object):
450 '''convert non-primitive data types to be processed by templater'''
450 '''convert non-primitive data types to be processed by templater'''
451
451
452 storecontext = True
452 storecontext = True
453
453
454 @staticmethod
454 @staticmethod
455 def wrapnested(data, tmpl, sep):
455 def wrapnested(data, tmpl, sep):
456 '''wrap nested data by templatable type'''
456 '''wrap nested data by templatable type'''
457 return templateutil.mappinglist(data, tmpl=tmpl, sep=sep)
457 return templateutil.mappinglist(data, tmpl=tmpl, sep=sep)
458
458
459 @staticmethod
459 @staticmethod
460 def formatdate(date, fmt):
460 def formatdate(date, fmt):
461 '''return date tuple'''
461 '''return date tuple'''
462 return templateutil.date(date)
462 return templateutil.date(date)
463
463
464 @staticmethod
464 @staticmethod
465 def formatdict(data, key, value, fmt, sep):
465 def formatdict(data, key, value, fmt, sep):
466 '''build object that can be evaluated as either plain string or dict'''
466 '''build object that can be evaluated as either plain string or dict'''
467 data = util.sortdict(_iteritems(data))
467 data = util.sortdict(_iteritems(data))
468
468
469 def f():
469 def f():
470 yield _plainconverter.formatdict(data, key, value, fmt, sep)
470 yield _plainconverter.formatdict(data, key, value, fmt, sep)
471
471
472 return templateutil.hybriddict(
472 return templateutil.hybriddict(
473 data, key=key, value=value, fmt=fmt, gen=f
473 data, key=key, value=value, fmt=fmt, gen=f
474 )
474 )
475
475
476 @staticmethod
476 @staticmethod
477 def formatlist(data, name, fmt, sep):
477 def formatlist(data, name, fmt, sep):
478 '''build object that can be evaluated as either plain string or list'''
478 '''build object that can be evaluated as either plain string or list'''
479 data = list(data)
479 data = list(data)
480
480
481 def f():
481 def f():
482 yield _plainconverter.formatlist(data, name, fmt, sep)
482 yield _plainconverter.formatlist(data, name, fmt, sep)
483
483
484 return templateutil.hybridlist(data, name=name, fmt=fmt, gen=f)
484 return templateutil.hybridlist(data, name=name, fmt=fmt, gen=f)
485
485
486
486
487 class templateformatter(baseformatter):
487 class templateformatter(baseformatter):
488 def __init__(self, ui, out, topic, opts, spec, overridetemplates=None):
488 def __init__(self, ui, out, topic, opts, spec, overridetemplates=None):
489 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
489 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
490 self._out = out
490 self._out = out
491 self._tref = spec.ref
491 self._tref = spec.ref
492 self._t = loadtemplater(
492 self._t = loadtemplater(
493 ui,
493 ui,
494 spec,
494 spec,
495 defaults=templatekw.keywords,
495 defaults=templatekw.keywords,
496 resources=templateresources(ui),
496 resources=templateresources(ui),
497 cache=templatekw.defaulttempl,
497 cache=templatekw.defaulttempl,
498 )
498 )
499 if overridetemplates:
499 if overridetemplates:
500 self._t.cache.update(overridetemplates)
500 self._t.cache.update(overridetemplates)
501 self._parts = templatepartsmap(
501 self._parts = templatepartsmap(
502 spec, self._t, [b'docheader', b'docfooter', b'separator']
502 spec, self._t, [b'docheader', b'docfooter', b'separator']
503 )
503 )
504 self._counter = itertools.count()
504 self._counter = itertools.count()
505 self._renderitem(b'docheader', {})
505 self._renderitem(b'docheader', {})
506
506
507 def _showitem(self):
507 def _showitem(self):
508 item = self._item.copy()
508 item = self._item.copy()
509 item[b'index'] = index = next(self._counter)
509 item[b'index'] = index = next(self._counter)
510 if index > 0:
510 if index > 0:
511 self._renderitem(b'separator', {})
511 self._renderitem(b'separator', {})
512 self._renderitem(self._tref, item)
512 self._renderitem(self._tref, item)
513
513
514 def _renderitem(self, part, item):
514 def _renderitem(self, part, item):
515 if part not in self._parts:
515 if part not in self._parts:
516 return
516 return
517 ref = self._parts[part]
517 ref = self._parts[part]
518 # None can't be put in the mapping dict since it means <unset>
518 # None can't be put in the mapping dict since it means <unset>
519 for k, v in item.items():
519 for k, v in item.items():
520 if v is None:
520 if v is None:
521 item[k] = templateutil.wrappedvalue(v)
521 item[k] = templateutil.wrappedvalue(v)
522 self._out.write(self._t.render(ref, item))
522 self._out.write(self._t.render(ref, item))
523
523
524 @util.propertycache
524 @util.propertycache
525 def _symbolsused(self):
525 def _symbolsused(self):
526 return self._t.symbolsused(self._tref)
526 return self._t.symbolsused(self._tref)
527
527
528 def datahint(self):
528 def datahint(self):
529 '''set of field names to be referenced from the template'''
529 '''set of field names to be referenced from the template'''
530 return self._symbolsused[0]
530 return self._symbolsused[0]
531
531
532 def end(self):
532 def end(self):
533 baseformatter.end(self)
533 baseformatter.end(self)
534 self._renderitem(b'docfooter', {})
534 self._renderitem(b'docfooter', {})
535
535
536
536
537 @attr.s(frozen=True)
537 @attr.s(frozen=True)
538 class templatespec(object):
538 class templatespec(object):
539 ref = attr.ib()
539 ref = attr.ib()
540 tmpl = attr.ib()
540 tmpl = attr.ib()
541 mapfile = attr.ib()
541 mapfile = attr.ib()
542 refargs = attr.ib(default=None)
542 refargs = attr.ib(default=None)
543
543
544
544
545 def empty_templatespec():
546 return templatespec(None, None, None)
547
548
549 def reference_templatespec(ref, refargs=None):
550 return templatespec(ref, None, None, refargs)
551
552
553 def literal_templatespec(tmpl):
554 return templatespec(b'', tmpl, None)
555
556
557 def mapfile_templatespec(topic, mapfile):
558 return templatespec(topic, None, mapfile)
559
560
545 def lookuptemplate(ui, topic, tmpl):
561 def lookuptemplate(ui, topic, tmpl):
546 """Find the template matching the given -T/--template spec 'tmpl'
562 """Find the template matching the given -T/--template spec 'tmpl'
547
563
548 'tmpl' can be any of the following:
564 'tmpl' can be any of the following:
549
565
550 - a literal template (e.g. '{rev}')
566 - a literal template (e.g. '{rev}')
551 - a reference to built-in template (i.e. formatter)
567 - a reference to built-in template (i.e. formatter)
552 - a map-file name or path (e.g. 'changelog')
568 - a map-file name or path (e.g. 'changelog')
553 - a reference to [templates] in config file
569 - a reference to [templates] in config file
554 - a path to raw template file
570 - a path to raw template file
555
571
556 A map file defines a stand-alone template environment. If a map file
572 A map file defines a stand-alone template environment. If a map file
557 selected, all templates defined in the file will be loaded, and the
573 selected, all templates defined in the file will be loaded, and the
558 template matching the given topic will be rendered. Aliases won't be
574 template matching the given topic will be rendered. Aliases won't be
559 loaded from user config, but from the map file.
575 loaded from user config, but from the map file.
560
576
561 If no map file selected, all templates in [templates] section will be
577 If no map file selected, all templates in [templates] section will be
562 available as well as aliases in [templatealias].
578 available as well as aliases in [templatealias].
563 """
579 """
564
580
565 if not tmpl:
581 if not tmpl:
566 return templatespec(None, None, None)
582 return empty_templatespec()
567
583
568 # looks like a literal template?
584 # looks like a literal template?
569 if b'{' in tmpl:
585 if b'{' in tmpl:
570 return templatespec(b'', tmpl, None)
586 return literal_templatespec(tmpl)
571
587
572 # a reference to built-in (formatter) template
588 # a reference to built-in (formatter) template
573 if tmpl in {b'cbor', b'json', b'pickle', b'debug'}:
589 if tmpl in {b'cbor', b'json', b'pickle', b'debug'}:
574 return templatespec(tmpl, None, None)
590 return reference_templatespec(tmpl)
575
591
576 # a function-style reference to built-in template
592 # a function-style reference to built-in template
577 func, fsep, ftail = tmpl.partition(b'(')
593 func, fsep, ftail = tmpl.partition(b'(')
578 if func in {b'cbor', b'json'} and fsep and ftail.endswith(b')'):
594 if func in {b'cbor', b'json'} and fsep and ftail.endswith(b')'):
579 templater.parseexpr(tmpl) # make sure syntax errors are confined
595 templater.parseexpr(tmpl) # make sure syntax errors are confined
580 return templatespec(func, None, None, refargs=ftail[:-1])
596 return reference_templatespec(func, refargs=ftail[:-1])
581
597
582 # perhaps a stock style?
598 # perhaps a stock style?
583 if not os.path.split(tmpl)[0]:
599 if not os.path.split(tmpl)[0]:
584 mapname = templater.templatepath(
600 mapname = templater.templatepath(
585 b'map-cmdline.' + tmpl
601 b'map-cmdline.' + tmpl
586 ) or templater.templatepath(tmpl)
602 ) or templater.templatepath(tmpl)
587 if mapname:
603 if mapname:
588 return templatespec(topic, None, mapname)
604 return mapfile_templatespec(topic, mapname)
589
605
590 # perhaps it's a reference to [templates]
606 # perhaps it's a reference to [templates]
591 if ui.config(b'templates', tmpl):
607 if ui.config(b'templates', tmpl):
592 return templatespec(tmpl, None, None)
608 return reference_templatespec(tmpl)
593
609
594 if tmpl == b'list':
610 if tmpl == b'list':
595 ui.write(_(b"available styles: %s\n") % templater.stylelist())
611 ui.write(_(b"available styles: %s\n") % templater.stylelist())
596 raise error.Abort(_(b"specify a template"))
612 raise error.Abort(_(b"specify a template"))
597
613
598 # perhaps it's a path to a map or a template
614 # perhaps it's a path to a map or a template
599 if (b'/' in tmpl or b'\\' in tmpl) and os.path.isfile(tmpl):
615 if (b'/' in tmpl or b'\\' in tmpl) and os.path.isfile(tmpl):
600 # is it a mapfile for a style?
616 # is it a mapfile for a style?
601 if os.path.basename(tmpl).startswith(b"map-"):
617 if os.path.basename(tmpl).startswith(b"map-"):
602 return templatespec(topic, None, os.path.realpath(tmpl))
618 return mapfile_templatespec(topic, os.path.realpath(tmpl))
603 with util.posixfile(tmpl, b'rb') as f:
619 with util.posixfile(tmpl, b'rb') as f:
604 tmpl = f.read()
620 tmpl = f.read()
605 return templatespec(b'', tmpl, None)
621 return literal_templatespec(tmpl)
606
622
607 # constant string?
623 # constant string?
608 return templatespec(b'', tmpl, None)
624 return literal_templatespec(tmpl)
609
625
610
626
611 def templatepartsmap(spec, t, partnames):
627 def templatepartsmap(spec, t, partnames):
612 """Create a mapping of {part: ref}"""
628 """Create a mapping of {part: ref}"""
613 partsmap = {spec.ref: spec.ref} # initial ref must exist in t
629 partsmap = {spec.ref: spec.ref} # initial ref must exist in t
614 if spec.mapfile:
630 if spec.mapfile:
615 partsmap.update((p, p) for p in partnames if p in t)
631 partsmap.update((p, p) for p in partnames if p in t)
616 elif spec.ref:
632 elif spec.ref:
617 for part in partnames:
633 for part in partnames:
618 ref = b'%s:%s' % (spec.ref, part) # select config sub-section
634 ref = b'%s:%s' % (spec.ref, part) # select config sub-section
619 if ref in t:
635 if ref in t:
620 partsmap[part] = ref
636 partsmap[part] = ref
621 return partsmap
637 return partsmap
622
638
623
639
624 def loadtemplater(ui, spec, defaults=None, resources=None, cache=None):
640 def loadtemplater(ui, spec, defaults=None, resources=None, cache=None):
625 """Create a templater from either a literal template or loading from
641 """Create a templater from either a literal template or loading from
626 a map file"""
642 a map file"""
627 assert not (spec.tmpl and spec.mapfile)
643 assert not (spec.tmpl and spec.mapfile)
628 if spec.mapfile:
644 if spec.mapfile:
629 return templater.templater.frommapfile(
645 return templater.templater.frommapfile(
630 spec.mapfile, defaults=defaults, resources=resources, cache=cache
646 spec.mapfile, defaults=defaults, resources=resources, cache=cache
631 )
647 )
632 return maketemplater(
648 return maketemplater(
633 ui, spec.tmpl, defaults=defaults, resources=resources, cache=cache
649 ui, spec.tmpl, defaults=defaults, resources=resources, cache=cache
634 )
650 )
635
651
636
652
637 def maketemplater(ui, tmpl, defaults=None, resources=None, cache=None):
653 def maketemplater(ui, tmpl, defaults=None, resources=None, cache=None):
638 """Create a templater from a string template 'tmpl'"""
654 """Create a templater from a string template 'tmpl'"""
639 aliases = ui.configitems(b'templatealias')
655 aliases = ui.configitems(b'templatealias')
640 t = templater.templater(
656 t = templater.templater(
641 defaults=defaults, resources=resources, cache=cache, aliases=aliases
657 defaults=defaults, resources=resources, cache=cache, aliases=aliases
642 )
658 )
643 t.cache.update(
659 t.cache.update(
644 (k, templater.unquotestring(v)) for k, v in ui.configitems(b'templates')
660 (k, templater.unquotestring(v)) for k, v in ui.configitems(b'templates')
645 )
661 )
646 if tmpl:
662 if tmpl:
647 t.cache[b''] = tmpl
663 t.cache[b''] = tmpl
648 return t
664 return t
649
665
650
666
651 # marker to denote a resource to be loaded on demand based on mapping values
667 # marker to denote a resource to be loaded on demand based on mapping values
652 # (e.g. (ctx, path) -> fctx)
668 # (e.g. (ctx, path) -> fctx)
653 _placeholder = object()
669 _placeholder = object()
654
670
655
671
656 class templateresources(templater.resourcemapper):
672 class templateresources(templater.resourcemapper):
657 """Resource mapper designed for the default templatekw and function"""
673 """Resource mapper designed for the default templatekw and function"""
658
674
659 def __init__(self, ui, repo=None):
675 def __init__(self, ui, repo=None):
660 self._resmap = {
676 self._resmap = {
661 b'cache': {}, # for templatekw/funcs to store reusable data
677 b'cache': {}, # for templatekw/funcs to store reusable data
662 b'repo': repo,
678 b'repo': repo,
663 b'ui': ui,
679 b'ui': ui,
664 }
680 }
665
681
666 def availablekeys(self, mapping):
682 def availablekeys(self, mapping):
667 return {
683 return {
668 k for k in self.knownkeys() if self._getsome(mapping, k) is not None
684 k for k in self.knownkeys() if self._getsome(mapping, k) is not None
669 }
685 }
670
686
671 def knownkeys(self):
687 def knownkeys(self):
672 return {b'cache', b'ctx', b'fctx', b'repo', b'revcache', b'ui'}
688 return {b'cache', b'ctx', b'fctx', b'repo', b'revcache', b'ui'}
673
689
674 def lookup(self, mapping, key):
690 def lookup(self, mapping, key):
675 if key not in self.knownkeys():
691 if key not in self.knownkeys():
676 return None
692 return None
677 v = self._getsome(mapping, key)
693 v = self._getsome(mapping, key)
678 if v is _placeholder:
694 if v is _placeholder:
679 v = mapping[key] = self._loadermap[key](self, mapping)
695 v = mapping[key] = self._loadermap[key](self, mapping)
680 return v
696 return v
681
697
682 def populatemap(self, context, origmapping, newmapping):
698 def populatemap(self, context, origmapping, newmapping):
683 mapping = {}
699 mapping = {}
684 if self._hasnodespec(newmapping):
700 if self._hasnodespec(newmapping):
685 mapping[b'revcache'] = {} # per-ctx cache
701 mapping[b'revcache'] = {} # per-ctx cache
686 if self._hasnodespec(origmapping) and self._hasnodespec(newmapping):
702 if self._hasnodespec(origmapping) and self._hasnodespec(newmapping):
687 orignode = templateutil.runsymbol(context, origmapping, b'node')
703 orignode = templateutil.runsymbol(context, origmapping, b'node')
688 mapping[b'originalnode'] = orignode
704 mapping[b'originalnode'] = orignode
689 # put marker to override 'ctx'/'fctx' in mapping if any, and flag
705 # put marker to override 'ctx'/'fctx' in mapping if any, and flag
690 # its existence to be reported by availablekeys()
706 # its existence to be reported by availablekeys()
691 if b'ctx' not in newmapping and self._hasliteral(newmapping, b'node'):
707 if b'ctx' not in newmapping and self._hasliteral(newmapping, b'node'):
692 mapping[b'ctx'] = _placeholder
708 mapping[b'ctx'] = _placeholder
693 if b'fctx' not in newmapping and self._hasliteral(newmapping, b'path'):
709 if b'fctx' not in newmapping and self._hasliteral(newmapping, b'path'):
694 mapping[b'fctx'] = _placeholder
710 mapping[b'fctx'] = _placeholder
695 return mapping
711 return mapping
696
712
697 def _getsome(self, mapping, key):
713 def _getsome(self, mapping, key):
698 v = mapping.get(key)
714 v = mapping.get(key)
699 if v is not None:
715 if v is not None:
700 return v
716 return v
701 return self._resmap.get(key)
717 return self._resmap.get(key)
702
718
703 def _hasliteral(self, mapping, key):
719 def _hasliteral(self, mapping, key):
704 """Test if a literal value is set or unset in the given mapping"""
720 """Test if a literal value is set or unset in the given mapping"""
705 return key in mapping and not callable(mapping[key])
721 return key in mapping and not callable(mapping[key])
706
722
707 def _getliteral(self, mapping, key):
723 def _getliteral(self, mapping, key):
708 """Return value of the given name if it is a literal"""
724 """Return value of the given name if it is a literal"""
709 v = mapping.get(key)
725 v = mapping.get(key)
710 if callable(v):
726 if callable(v):
711 return None
727 return None
712 return v
728 return v
713
729
714 def _hasnodespec(self, mapping):
730 def _hasnodespec(self, mapping):
715 """Test if context revision is set or unset in the given mapping"""
731 """Test if context revision is set or unset in the given mapping"""
716 return b'node' in mapping or b'ctx' in mapping
732 return b'node' in mapping or b'ctx' in mapping
717
733
718 def _loadctx(self, mapping):
734 def _loadctx(self, mapping):
719 repo = self._getsome(mapping, b'repo')
735 repo = self._getsome(mapping, b'repo')
720 node = self._getliteral(mapping, b'node')
736 node = self._getliteral(mapping, b'node')
721 if repo is None or node is None:
737 if repo is None or node is None:
722 return
738 return
723 try:
739 try:
724 return repo[node]
740 return repo[node]
725 except error.RepoLookupError:
741 except error.RepoLookupError:
726 return None # maybe hidden/non-existent node
742 return None # maybe hidden/non-existent node
727
743
728 def _loadfctx(self, mapping):
744 def _loadfctx(self, mapping):
729 ctx = self._getsome(mapping, b'ctx')
745 ctx = self._getsome(mapping, b'ctx')
730 path = self._getliteral(mapping, b'path')
746 path = self._getliteral(mapping, b'path')
731 if ctx is None or path is None:
747 if ctx is None or path is None:
732 return None
748 return None
733 try:
749 try:
734 return ctx[path]
750 return ctx[path]
735 except error.LookupError:
751 except error.LookupError:
736 return None # maybe removed file?
752 return None # maybe removed file?
737
753
738 _loadermap = {
754 _loadermap = {
739 b'ctx': _loadctx,
755 b'ctx': _loadctx,
740 b'fctx': _loadfctx,
756 b'fctx': _loadfctx,
741 }
757 }
742
758
743
759
744 def _internaltemplateformatter(
760 def _internaltemplateformatter(
745 ui,
761 ui,
746 out,
762 out,
747 topic,
763 topic,
748 opts,
764 opts,
749 spec,
765 spec,
750 tmpl,
766 tmpl,
751 docheader=b'',
767 docheader=b'',
752 docfooter=b'',
768 docfooter=b'',
753 separator=b'',
769 separator=b'',
754 ):
770 ):
755 """Build template formatter that handles customizable built-in templates
771 """Build template formatter that handles customizable built-in templates
756 such as -Tjson(...)"""
772 such as -Tjson(...)"""
757 templates = {spec.ref: tmpl}
773 templates = {spec.ref: tmpl}
758 if docheader:
774 if docheader:
759 templates[b'%s:docheader' % spec.ref] = docheader
775 templates[b'%s:docheader' % spec.ref] = docheader
760 if docfooter:
776 if docfooter:
761 templates[b'%s:docfooter' % spec.ref] = docfooter
777 templates[b'%s:docfooter' % spec.ref] = docfooter
762 if separator:
778 if separator:
763 templates[b'%s:separator' % spec.ref] = separator
779 templates[b'%s:separator' % spec.ref] = separator
764 return templateformatter(
780 return templateformatter(
765 ui, out, topic, opts, spec, overridetemplates=templates
781 ui, out, topic, opts, spec, overridetemplates=templates
766 )
782 )
767
783
768
784
769 def formatter(ui, out, topic, opts):
785 def formatter(ui, out, topic, opts):
770 spec = lookuptemplate(ui, topic, opts.get(b'template', b''))
786 spec = lookuptemplate(ui, topic, opts.get(b'template', b''))
771 if spec.ref == b"cbor" and spec.refargs is not None:
787 if spec.ref == b"cbor" and spec.refargs is not None:
772 return _internaltemplateformatter(
788 return _internaltemplateformatter(
773 ui,
789 ui,
774 out,
790 out,
775 topic,
791 topic,
776 opts,
792 opts,
777 spec,
793 spec,
778 tmpl=b'{dict(%s)|cbor}' % spec.refargs,
794 tmpl=b'{dict(%s)|cbor}' % spec.refargs,
779 docheader=cborutil.BEGIN_INDEFINITE_ARRAY,
795 docheader=cborutil.BEGIN_INDEFINITE_ARRAY,
780 docfooter=cborutil.BREAK,
796 docfooter=cborutil.BREAK,
781 )
797 )
782 elif spec.ref == b"cbor":
798 elif spec.ref == b"cbor":
783 return cborformatter(ui, out, topic, opts)
799 return cborformatter(ui, out, topic, opts)
784 elif spec.ref == b"json" and spec.refargs is not None:
800 elif spec.ref == b"json" and spec.refargs is not None:
785 return _internaltemplateformatter(
801 return _internaltemplateformatter(
786 ui,
802 ui,
787 out,
803 out,
788 topic,
804 topic,
789 opts,
805 opts,
790 spec,
806 spec,
791 tmpl=b'{dict(%s)|json}' % spec.refargs,
807 tmpl=b'{dict(%s)|json}' % spec.refargs,
792 docheader=b'[\n ',
808 docheader=b'[\n ',
793 docfooter=b'\n]\n',
809 docfooter=b'\n]\n',
794 separator=b',\n ',
810 separator=b',\n ',
795 )
811 )
796 elif spec.ref == b"json":
812 elif spec.ref == b"json":
797 return jsonformatter(ui, out, topic, opts)
813 return jsonformatter(ui, out, topic, opts)
798 elif spec.ref == b"pickle":
814 elif spec.ref == b"pickle":
799 assert spec.refargs is None, r'function-style not supported'
815 assert spec.refargs is None, r'function-style not supported'
800 return pickleformatter(ui, out, topic, opts)
816 return pickleformatter(ui, out, topic, opts)
801 elif spec.ref == b"debug":
817 elif spec.ref == b"debug":
802 assert spec.refargs is None, r'function-style not supported'
818 assert spec.refargs is None, r'function-style not supported'
803 return debugformatter(ui, out, topic, opts)
819 return debugformatter(ui, out, topic, opts)
804 elif spec.ref or spec.tmpl or spec.mapfile:
820 elif spec.ref or spec.tmpl or spec.mapfile:
805 assert spec.refargs is None, r'function-style not supported'
821 assert spec.refargs is None, r'function-style not supported'
806 return templateformatter(ui, out, topic, opts, spec)
822 return templateformatter(ui, out, topic, opts, spec)
807 # developer config: ui.formatdebug
823 # developer config: ui.formatdebug
808 elif ui.configbool(b'ui', b'formatdebug'):
824 elif ui.configbool(b'ui', b'formatdebug'):
809 return debugformatter(ui, out, topic, opts)
825 return debugformatter(ui, out, topic, opts)
810 # deprecated config: ui.formatjson
826 # deprecated config: ui.formatjson
811 elif ui.configbool(b'ui', b'formatjson'):
827 elif ui.configbool(b'ui', b'formatjson'):
812 return jsonformatter(ui, out, topic, opts)
828 return jsonformatter(ui, out, topic, opts)
813 return plainformatter(ui, out, topic, opts)
829 return plainformatter(ui, out, topic, opts)
814
830
815
831
816 @contextlib.contextmanager
832 @contextlib.contextmanager
817 def openformatter(ui, filename, topic, opts):
833 def openformatter(ui, filename, topic, opts):
818 """Create a formatter that writes outputs to the specified file
834 """Create a formatter that writes outputs to the specified file
819
835
820 Must be invoked using the 'with' statement.
836 Must be invoked using the 'with' statement.
821 """
837 """
822 with util.posixfile(filename, b'wb') as out:
838 with util.posixfile(filename, b'wb') as out:
823 with formatter(ui, out, topic, opts) as fm:
839 with formatter(ui, out, topic, opts) as fm:
824 yield fm
840 yield fm
825
841
826
842
827 @contextlib.contextmanager
843 @contextlib.contextmanager
828 def _neverending(fm):
844 def _neverending(fm):
829 yield fm
845 yield fm
830
846
831
847
832 def maybereopen(fm, filename):
848 def maybereopen(fm, filename):
833 """Create a formatter backed by file if filename specified, else return
849 """Create a formatter backed by file if filename specified, else return
834 the given formatter
850 the given formatter
835
851
836 Must be invoked using the 'with' statement. This will never call fm.end()
852 Must be invoked using the 'with' statement. This will never call fm.end()
837 of the given formatter.
853 of the given formatter.
838 """
854 """
839 if filename:
855 if filename:
840 return openformatter(fm._ui, filename, fm._topic, fm._opts)
856 return openformatter(fm._ui, filename, fm._topic, fm._opts)
841 else:
857 else:
842 return _neverending(fm)
858 return _neverending(fm)
General Comments 0
You need to be logged in to leave comments. Login now