##// END OF EJS Templates
grep: speed up `hg grep --all-files some/path` by using ctx.matches(match)...
Martin von Zweigbergk -
r44501:6cfaebb6 default
parent child Browse files
Show More
@@ -1,4068 +1,4072 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy as copymod
10 import copy as copymod
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 nullrev,
19 nullrev,
20 short,
20 short,
21 )
21 )
22 from .pycompat import (
22 from .pycompat import (
23 getattr,
23 getattr,
24 open,
24 open,
25 setattr,
25 setattr,
26 )
26 )
27 from .thirdparty import attr
27 from .thirdparty import attr
28
28
29 from . import (
29 from . import (
30 bookmarks,
30 bookmarks,
31 changelog,
31 changelog,
32 copies,
32 copies,
33 crecord as crecordmod,
33 crecord as crecordmod,
34 dirstateguard,
34 dirstateguard,
35 encoding,
35 encoding,
36 error,
36 error,
37 formatter,
37 formatter,
38 logcmdutil,
38 logcmdutil,
39 match as matchmod,
39 match as matchmod,
40 merge as mergemod,
40 merge as mergemod,
41 mergeutil,
41 mergeutil,
42 obsolete,
42 obsolete,
43 patch,
43 patch,
44 pathutil,
44 pathutil,
45 phases,
45 phases,
46 pycompat,
46 pycompat,
47 repair,
47 repair,
48 revlog,
48 revlog,
49 rewriteutil,
49 rewriteutil,
50 scmutil,
50 scmutil,
51 smartset,
51 smartset,
52 state as statemod,
52 state as statemod,
53 subrepoutil,
53 subrepoutil,
54 templatekw,
54 templatekw,
55 templater,
55 templater,
56 util,
56 util,
57 vfs as vfsmod,
57 vfs as vfsmod,
58 )
58 )
59
59
60 from .utils import (
60 from .utils import (
61 dateutil,
61 dateutil,
62 stringutil,
62 stringutil,
63 )
63 )
64
64
65 if pycompat.TYPE_CHECKING:
65 if pycompat.TYPE_CHECKING:
66 from typing import (
66 from typing import (
67 Any,
67 Any,
68 Dict,
68 Dict,
69 )
69 )
70
70
71 for t in (Any, Dict):
71 for t in (Any, Dict):
72 assert t
72 assert t
73
73
74 stringio = util.stringio
74 stringio = util.stringio
75
75
76 # templates of common command options
76 # templates of common command options
77
77
78 dryrunopts = [
78 dryrunopts = [
79 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
79 (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
80 ]
80 ]
81
81
82 confirmopts = [
82 confirmopts = [
83 (b'', b'confirm', None, _(b'ask before applying actions')),
83 (b'', b'confirm', None, _(b'ask before applying actions')),
84 ]
84 ]
85
85
86 remoteopts = [
86 remoteopts = [
87 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
87 (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
88 (
88 (
89 b'',
89 b'',
90 b'remotecmd',
90 b'remotecmd',
91 b'',
91 b'',
92 _(b'specify hg command to run on the remote side'),
92 _(b'specify hg command to run on the remote side'),
93 _(b'CMD'),
93 _(b'CMD'),
94 ),
94 ),
95 (
95 (
96 b'',
96 b'',
97 b'insecure',
97 b'insecure',
98 None,
98 None,
99 _(b'do not verify server certificate (ignoring web.cacerts config)'),
99 _(b'do not verify server certificate (ignoring web.cacerts config)'),
100 ),
100 ),
101 ]
101 ]
102
102
103 walkopts = [
103 walkopts = [
104 (
104 (
105 b'I',
105 b'I',
106 b'include',
106 b'include',
107 [],
107 [],
108 _(b'include names matching the given patterns'),
108 _(b'include names matching the given patterns'),
109 _(b'PATTERN'),
109 _(b'PATTERN'),
110 ),
110 ),
111 (
111 (
112 b'X',
112 b'X',
113 b'exclude',
113 b'exclude',
114 [],
114 [],
115 _(b'exclude names matching the given patterns'),
115 _(b'exclude names matching the given patterns'),
116 _(b'PATTERN'),
116 _(b'PATTERN'),
117 ),
117 ),
118 ]
118 ]
119
119
120 commitopts = [
120 commitopts = [
121 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
121 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
122 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
122 (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
123 ]
123 ]
124
124
125 commitopts2 = [
125 commitopts2 = [
126 (
126 (
127 b'd',
127 b'd',
128 b'date',
128 b'date',
129 b'',
129 b'',
130 _(b'record the specified date as commit date'),
130 _(b'record the specified date as commit date'),
131 _(b'DATE'),
131 _(b'DATE'),
132 ),
132 ),
133 (
133 (
134 b'u',
134 b'u',
135 b'user',
135 b'user',
136 b'',
136 b'',
137 _(b'record the specified user as committer'),
137 _(b'record the specified user as committer'),
138 _(b'USER'),
138 _(b'USER'),
139 ),
139 ),
140 ]
140 ]
141
141
142 commitopts3 = [
142 commitopts3 = [
143 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
143 (b'D', b'currentdate', None, _(b'record the current date as commit date')),
144 (b'U', b'currentuser', None, _(b'record the current user as committer')),
144 (b'U', b'currentuser', None, _(b'record the current user as committer')),
145 ]
145 ]
146
146
147 formatteropts = [
147 formatteropts = [
148 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
148 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
149 ]
149 ]
150
150
151 templateopts = [
151 templateopts = [
152 (
152 (
153 b'',
153 b'',
154 b'style',
154 b'style',
155 b'',
155 b'',
156 _(b'display using template map file (DEPRECATED)'),
156 _(b'display using template map file (DEPRECATED)'),
157 _(b'STYLE'),
157 _(b'STYLE'),
158 ),
158 ),
159 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
159 (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
160 ]
160 ]
161
161
162 logopts = [
162 logopts = [
163 (b'p', b'patch', None, _(b'show patch')),
163 (b'p', b'patch', None, _(b'show patch')),
164 (b'g', b'git', None, _(b'use git extended diff format')),
164 (b'g', b'git', None, _(b'use git extended diff format')),
165 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
165 (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
166 (b'M', b'no-merges', None, _(b'do not show merges')),
166 (b'M', b'no-merges', None, _(b'do not show merges')),
167 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
167 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
168 (b'G', b'graph', None, _(b"show the revision DAG")),
168 (b'G', b'graph', None, _(b"show the revision DAG")),
169 ] + templateopts
169 ] + templateopts
170
170
171 diffopts = [
171 diffopts = [
172 (b'a', b'text', None, _(b'treat all files as text')),
172 (b'a', b'text', None, _(b'treat all files as text')),
173 (b'g', b'git', None, _(b'use git extended diff format')),
173 (b'g', b'git', None, _(b'use git extended diff format')),
174 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
174 (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
175 (b'', b'nodates', None, _(b'omit dates from diff headers')),
175 (b'', b'nodates', None, _(b'omit dates from diff headers')),
176 ]
176 ]
177
177
178 diffwsopts = [
178 diffwsopts = [
179 (
179 (
180 b'w',
180 b'w',
181 b'ignore-all-space',
181 b'ignore-all-space',
182 None,
182 None,
183 _(b'ignore white space when comparing lines'),
183 _(b'ignore white space when comparing lines'),
184 ),
184 ),
185 (
185 (
186 b'b',
186 b'b',
187 b'ignore-space-change',
187 b'ignore-space-change',
188 None,
188 None,
189 _(b'ignore changes in the amount of white space'),
189 _(b'ignore changes in the amount of white space'),
190 ),
190 ),
191 (
191 (
192 b'B',
192 b'B',
193 b'ignore-blank-lines',
193 b'ignore-blank-lines',
194 None,
194 None,
195 _(b'ignore changes whose lines are all blank'),
195 _(b'ignore changes whose lines are all blank'),
196 ),
196 ),
197 (
197 (
198 b'Z',
198 b'Z',
199 b'ignore-space-at-eol',
199 b'ignore-space-at-eol',
200 None,
200 None,
201 _(b'ignore changes in whitespace at EOL'),
201 _(b'ignore changes in whitespace at EOL'),
202 ),
202 ),
203 ]
203 ]
204
204
205 diffopts2 = (
205 diffopts2 = (
206 [
206 [
207 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
207 (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
208 (
208 (
209 b'p',
209 b'p',
210 b'show-function',
210 b'show-function',
211 None,
211 None,
212 _(b'show which function each change is in'),
212 _(b'show which function each change is in'),
213 ),
213 ),
214 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
214 (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
215 ]
215 ]
216 + diffwsopts
216 + diffwsopts
217 + [
217 + [
218 (
218 (
219 b'U',
219 b'U',
220 b'unified',
220 b'unified',
221 b'',
221 b'',
222 _(b'number of lines of context to show'),
222 _(b'number of lines of context to show'),
223 _(b'NUM'),
223 _(b'NUM'),
224 ),
224 ),
225 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
225 (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
226 (
226 (
227 b'',
227 b'',
228 b'root',
228 b'root',
229 b'',
229 b'',
230 _(b'produce diffs relative to subdirectory'),
230 _(b'produce diffs relative to subdirectory'),
231 _(b'DIR'),
231 _(b'DIR'),
232 ),
232 ),
233 ]
233 ]
234 )
234 )
235
235
236 mergetoolopts = [
236 mergetoolopts = [
237 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
237 (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
238 ]
238 ]
239
239
240 similarityopts = [
240 similarityopts = [
241 (
241 (
242 b's',
242 b's',
243 b'similarity',
243 b'similarity',
244 b'',
244 b'',
245 _(b'guess renamed files by similarity (0<=s<=100)'),
245 _(b'guess renamed files by similarity (0<=s<=100)'),
246 _(b'SIMILARITY'),
246 _(b'SIMILARITY'),
247 )
247 )
248 ]
248 ]
249
249
250 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
250 subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
251
251
252 debugrevlogopts = [
252 debugrevlogopts = [
253 (b'c', b'changelog', False, _(b'open changelog')),
253 (b'c', b'changelog', False, _(b'open changelog')),
254 (b'm', b'manifest', False, _(b'open manifest')),
254 (b'm', b'manifest', False, _(b'open manifest')),
255 (b'', b'dir', b'', _(b'open directory manifest')),
255 (b'', b'dir', b'', _(b'open directory manifest')),
256 ]
256 ]
257
257
258 # special string such that everything below this line will be ingored in the
258 # special string such that everything below this line will be ingored in the
259 # editor text
259 # editor text
260 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
260 _linebelow = b"^HG: ------------------------ >8 ------------------------$"
261
261
262
262
263 def check_at_most_one_arg(opts, *args):
263 def check_at_most_one_arg(opts, *args):
264 """abort if more than one of the arguments are in opts
264 """abort if more than one of the arguments are in opts
265
265
266 Returns the unique argument or None if none of them were specified.
266 Returns the unique argument or None if none of them were specified.
267 """
267 """
268
268
269 def to_display(name):
269 def to_display(name):
270 return pycompat.sysbytes(name).replace(b'_', b'-')
270 return pycompat.sysbytes(name).replace(b'_', b'-')
271
271
272 previous = None
272 previous = None
273 for x in args:
273 for x in args:
274 if opts.get(x):
274 if opts.get(x):
275 if previous:
275 if previous:
276 raise error.Abort(
276 raise error.Abort(
277 _(b'cannot specify both --%s and --%s')
277 _(b'cannot specify both --%s and --%s')
278 % (to_display(previous), to_display(x))
278 % (to_display(previous), to_display(x))
279 )
279 )
280 previous = x
280 previous = x
281 return previous
281 return previous
282
282
283
283
284 def check_incompatible_arguments(opts, first, *others):
284 def check_incompatible_arguments(opts, first, *others):
285 """abort if the first argument is given along with any of the others
285 """abort if the first argument is given along with any of the others
286
286
287 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
287 Unlike check_at_most_one_arg(), `others` are not mutually exclusive
288 among themselves.
288 among themselves.
289 """
289 """
290 for other in others:
290 for other in others:
291 check_at_most_one_arg(opts, first, other)
291 check_at_most_one_arg(opts, first, other)
292
292
293
293
294 def resolvecommitoptions(ui, opts):
294 def resolvecommitoptions(ui, opts):
295 """modify commit options dict to handle related options
295 """modify commit options dict to handle related options
296
296
297 The return value indicates that ``rewrite.update-timestamp`` is the reason
297 The return value indicates that ``rewrite.update-timestamp`` is the reason
298 the ``date`` option is set.
298 the ``date`` option is set.
299 """
299 """
300 check_at_most_one_arg(opts, b'date', b'currentdate')
300 check_at_most_one_arg(opts, b'date', b'currentdate')
301 check_at_most_one_arg(opts, b'user', b'currentuser')
301 check_at_most_one_arg(opts, b'user', b'currentuser')
302
302
303 datemaydiffer = False # date-only change should be ignored?
303 datemaydiffer = False # date-only change should be ignored?
304
304
305 if opts.get(b'currentdate'):
305 if opts.get(b'currentdate'):
306 opts[b'date'] = b'%d %d' % dateutil.makedate()
306 opts[b'date'] = b'%d %d' % dateutil.makedate()
307 elif (
307 elif (
308 not opts.get(b'date')
308 not opts.get(b'date')
309 and ui.configbool(b'rewrite', b'update-timestamp')
309 and ui.configbool(b'rewrite', b'update-timestamp')
310 and opts.get(b'currentdate') is None
310 and opts.get(b'currentdate') is None
311 ):
311 ):
312 opts[b'date'] = b'%d %d' % dateutil.makedate()
312 opts[b'date'] = b'%d %d' % dateutil.makedate()
313 datemaydiffer = True
313 datemaydiffer = True
314
314
315 if opts.get(b'currentuser'):
315 if opts.get(b'currentuser'):
316 opts[b'user'] = ui.username()
316 opts[b'user'] = ui.username()
317
317
318 return datemaydiffer
318 return datemaydiffer
319
319
320
320
321 def checknotesize(ui, opts):
321 def checknotesize(ui, opts):
322 """ make sure note is of valid format """
322 """ make sure note is of valid format """
323
323
324 note = opts.get(b'note')
324 note = opts.get(b'note')
325 if not note:
325 if not note:
326 return
326 return
327
327
328 if len(note) > 255:
328 if len(note) > 255:
329 raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
329 raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
330 if b'\n' in note:
330 if b'\n' in note:
331 raise error.Abort(_(b"note cannot contain a newline"))
331 raise error.Abort(_(b"note cannot contain a newline"))
332
332
333
333
334 def ishunk(x):
334 def ishunk(x):
335 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
335 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
336 return isinstance(x, hunkclasses)
336 return isinstance(x, hunkclasses)
337
337
338
338
339 def newandmodified(chunks, originalchunks):
339 def newandmodified(chunks, originalchunks):
340 newlyaddedandmodifiedfiles = set()
340 newlyaddedandmodifiedfiles = set()
341 alsorestore = set()
341 alsorestore = set()
342 for chunk in chunks:
342 for chunk in chunks:
343 if (
343 if (
344 ishunk(chunk)
344 ishunk(chunk)
345 and chunk.header.isnewfile()
345 and chunk.header.isnewfile()
346 and chunk not in originalchunks
346 and chunk not in originalchunks
347 ):
347 ):
348 newlyaddedandmodifiedfiles.add(chunk.header.filename())
348 newlyaddedandmodifiedfiles.add(chunk.header.filename())
349 alsorestore.update(
349 alsorestore.update(
350 set(chunk.header.files()) - {chunk.header.filename()}
350 set(chunk.header.files()) - {chunk.header.filename()}
351 )
351 )
352 return newlyaddedandmodifiedfiles, alsorestore
352 return newlyaddedandmodifiedfiles, alsorestore
353
353
354
354
355 def parsealiases(cmd):
355 def parsealiases(cmd):
356 return cmd.split(b"|")
356 return cmd.split(b"|")
357
357
358
358
359 def setupwrapcolorwrite(ui):
359 def setupwrapcolorwrite(ui):
360 # wrap ui.write so diff output can be labeled/colorized
360 # wrap ui.write so diff output can be labeled/colorized
361 def wrapwrite(orig, *args, **kw):
361 def wrapwrite(orig, *args, **kw):
362 label = kw.pop('label', b'')
362 label = kw.pop('label', b'')
363 for chunk, l in patch.difflabel(lambda: args):
363 for chunk, l in patch.difflabel(lambda: args):
364 orig(chunk, label=label + l)
364 orig(chunk, label=label + l)
365
365
366 oldwrite = ui.write
366 oldwrite = ui.write
367
367
368 def wrap(*args, **kwargs):
368 def wrap(*args, **kwargs):
369 return wrapwrite(oldwrite, *args, **kwargs)
369 return wrapwrite(oldwrite, *args, **kwargs)
370
370
371 setattr(ui, 'write', wrap)
371 setattr(ui, 'write', wrap)
372 return oldwrite
372 return oldwrite
373
373
374
374
375 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
375 def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
376 try:
376 try:
377 if usecurses:
377 if usecurses:
378 if testfile:
378 if testfile:
379 recordfn = crecordmod.testdecorator(
379 recordfn = crecordmod.testdecorator(
380 testfile, crecordmod.testchunkselector
380 testfile, crecordmod.testchunkselector
381 )
381 )
382 else:
382 else:
383 recordfn = crecordmod.chunkselector
383 recordfn = crecordmod.chunkselector
384
384
385 return crecordmod.filterpatch(
385 return crecordmod.filterpatch(
386 ui, originalhunks, recordfn, operation
386 ui, originalhunks, recordfn, operation
387 )
387 )
388 except crecordmod.fallbackerror as e:
388 except crecordmod.fallbackerror as e:
389 ui.warn(b'%s\n' % e.message) # pytype: disable=attribute-error
389 ui.warn(b'%s\n' % e.message) # pytype: disable=attribute-error
390 ui.warn(_(b'falling back to text mode\n'))
390 ui.warn(_(b'falling back to text mode\n'))
391
391
392 return patch.filterpatch(ui, originalhunks, match, operation)
392 return patch.filterpatch(ui, originalhunks, match, operation)
393
393
394
394
395 def recordfilter(ui, originalhunks, match, operation=None):
395 def recordfilter(ui, originalhunks, match, operation=None):
396 """ Prompts the user to filter the originalhunks and return a list of
396 """ Prompts the user to filter the originalhunks and return a list of
397 selected hunks.
397 selected hunks.
398 *operation* is used for to build ui messages to indicate the user what
398 *operation* is used for to build ui messages to indicate the user what
399 kind of filtering they are doing: reverting, committing, shelving, etc.
399 kind of filtering they are doing: reverting, committing, shelving, etc.
400 (see patch.filterpatch).
400 (see patch.filterpatch).
401 """
401 """
402 usecurses = crecordmod.checkcurses(ui)
402 usecurses = crecordmod.checkcurses(ui)
403 testfile = ui.config(b'experimental', b'crecordtest')
403 testfile = ui.config(b'experimental', b'crecordtest')
404 oldwrite = setupwrapcolorwrite(ui)
404 oldwrite = setupwrapcolorwrite(ui)
405 try:
405 try:
406 newchunks, newopts = filterchunks(
406 newchunks, newopts = filterchunks(
407 ui, originalhunks, usecurses, testfile, match, operation
407 ui, originalhunks, usecurses, testfile, match, operation
408 )
408 )
409 finally:
409 finally:
410 ui.write = oldwrite
410 ui.write = oldwrite
411 return newchunks, newopts
411 return newchunks, newopts
412
412
413
413
414 def dorecord(
414 def dorecord(
415 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
415 ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
416 ):
416 ):
417 opts = pycompat.byteskwargs(opts)
417 opts = pycompat.byteskwargs(opts)
418 if not ui.interactive():
418 if not ui.interactive():
419 if cmdsuggest:
419 if cmdsuggest:
420 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
420 msg = _(b'running non-interactively, use %s instead') % cmdsuggest
421 else:
421 else:
422 msg = _(b'running non-interactively')
422 msg = _(b'running non-interactively')
423 raise error.Abort(msg)
423 raise error.Abort(msg)
424
424
425 # make sure username is set before going interactive
425 # make sure username is set before going interactive
426 if not opts.get(b'user'):
426 if not opts.get(b'user'):
427 ui.username() # raise exception, username not provided
427 ui.username() # raise exception, username not provided
428
428
429 def recordfunc(ui, repo, message, match, opts):
429 def recordfunc(ui, repo, message, match, opts):
430 """This is generic record driver.
430 """This is generic record driver.
431
431
432 Its job is to interactively filter local changes, and
432 Its job is to interactively filter local changes, and
433 accordingly prepare working directory into a state in which the
433 accordingly prepare working directory into a state in which the
434 job can be delegated to a non-interactive commit command such as
434 job can be delegated to a non-interactive commit command such as
435 'commit' or 'qrefresh'.
435 'commit' or 'qrefresh'.
436
436
437 After the actual job is done by non-interactive command, the
437 After the actual job is done by non-interactive command, the
438 working directory is restored to its original state.
438 working directory is restored to its original state.
439
439
440 In the end we'll record interesting changes, and everything else
440 In the end we'll record interesting changes, and everything else
441 will be left in place, so the user can continue working.
441 will be left in place, so the user can continue working.
442 """
442 """
443 if not opts.get(b'interactive-unshelve'):
443 if not opts.get(b'interactive-unshelve'):
444 checkunfinished(repo, commit=True)
444 checkunfinished(repo, commit=True)
445 wctx = repo[None]
445 wctx = repo[None]
446 merge = len(wctx.parents()) > 1
446 merge = len(wctx.parents()) > 1
447 if merge:
447 if merge:
448 raise error.Abort(
448 raise error.Abort(
449 _(
449 _(
450 b'cannot partially commit a merge '
450 b'cannot partially commit a merge '
451 b'(use "hg commit" instead)'
451 b'(use "hg commit" instead)'
452 )
452 )
453 )
453 )
454
454
455 def fail(f, msg):
455 def fail(f, msg):
456 raise error.Abort(b'%s: %s' % (f, msg))
456 raise error.Abort(b'%s: %s' % (f, msg))
457
457
458 force = opts.get(b'force')
458 force = opts.get(b'force')
459 if not force:
459 if not force:
460 match = matchmod.badmatch(match, fail)
460 match = matchmod.badmatch(match, fail)
461
461
462 status = repo.status(match=match)
462 status = repo.status(match=match)
463
463
464 overrides = {(b'ui', b'commitsubrepos'): True}
464 overrides = {(b'ui', b'commitsubrepos'): True}
465
465
466 with repo.ui.configoverride(overrides, b'record'):
466 with repo.ui.configoverride(overrides, b'record'):
467 # subrepoutil.precommit() modifies the status
467 # subrepoutil.precommit() modifies the status
468 tmpstatus = scmutil.status(
468 tmpstatus = scmutil.status(
469 copymod.copy(status.modified),
469 copymod.copy(status.modified),
470 copymod.copy(status.added),
470 copymod.copy(status.added),
471 copymod.copy(status.removed),
471 copymod.copy(status.removed),
472 copymod.copy(status.deleted),
472 copymod.copy(status.deleted),
473 copymod.copy(status.unknown),
473 copymod.copy(status.unknown),
474 copymod.copy(status.ignored),
474 copymod.copy(status.ignored),
475 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
475 copymod.copy(status.clean), # pytype: disable=wrong-arg-count
476 )
476 )
477
477
478 # Force allows -X subrepo to skip the subrepo.
478 # Force allows -X subrepo to skip the subrepo.
479 subs, commitsubs, newstate = subrepoutil.precommit(
479 subs, commitsubs, newstate = subrepoutil.precommit(
480 repo.ui, wctx, tmpstatus, match, force=True
480 repo.ui, wctx, tmpstatus, match, force=True
481 )
481 )
482 for s in subs:
482 for s in subs:
483 if s in commitsubs:
483 if s in commitsubs:
484 dirtyreason = wctx.sub(s).dirtyreason(True)
484 dirtyreason = wctx.sub(s).dirtyreason(True)
485 raise error.Abort(dirtyreason)
485 raise error.Abort(dirtyreason)
486
486
487 if not force:
487 if not force:
488 repo.checkcommitpatterns(wctx, match, status, fail)
488 repo.checkcommitpatterns(wctx, match, status, fail)
489 diffopts = patch.difffeatureopts(
489 diffopts = patch.difffeatureopts(
490 ui,
490 ui,
491 opts=opts,
491 opts=opts,
492 whitespace=True,
492 whitespace=True,
493 section=b'commands',
493 section=b'commands',
494 configprefix=b'commit.interactive.',
494 configprefix=b'commit.interactive.',
495 )
495 )
496 diffopts.nodates = True
496 diffopts.nodates = True
497 diffopts.git = True
497 diffopts.git = True
498 diffopts.showfunc = True
498 diffopts.showfunc = True
499 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
499 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
500 originalchunks = patch.parsepatch(originaldiff)
500 originalchunks = patch.parsepatch(originaldiff)
501 match = scmutil.match(repo[None], pats)
501 match = scmutil.match(repo[None], pats)
502
502
503 # 1. filter patch, since we are intending to apply subset of it
503 # 1. filter patch, since we are intending to apply subset of it
504 try:
504 try:
505 chunks, newopts = filterfn(ui, originalchunks, match)
505 chunks, newopts = filterfn(ui, originalchunks, match)
506 except error.PatchError as err:
506 except error.PatchError as err:
507 raise error.Abort(_(b'error parsing patch: %s') % err)
507 raise error.Abort(_(b'error parsing patch: %s') % err)
508 opts.update(newopts)
508 opts.update(newopts)
509
509
510 # We need to keep a backup of files that have been newly added and
510 # We need to keep a backup of files that have been newly added and
511 # modified during the recording process because there is a previous
511 # modified during the recording process because there is a previous
512 # version without the edit in the workdir. We also will need to restore
512 # version without the edit in the workdir. We also will need to restore
513 # files that were the sources of renames so that the patch application
513 # files that were the sources of renames so that the patch application
514 # works.
514 # works.
515 newlyaddedandmodifiedfiles, alsorestore = newandmodified(
515 newlyaddedandmodifiedfiles, alsorestore = newandmodified(
516 chunks, originalchunks
516 chunks, originalchunks
517 )
517 )
518 contenders = set()
518 contenders = set()
519 for h in chunks:
519 for h in chunks:
520 try:
520 try:
521 contenders.update(set(h.files()))
521 contenders.update(set(h.files()))
522 except AttributeError:
522 except AttributeError:
523 pass
523 pass
524
524
525 changed = status.modified + status.added + status.removed
525 changed = status.modified + status.added + status.removed
526 newfiles = [f for f in changed if f in contenders]
526 newfiles = [f for f in changed if f in contenders]
527 if not newfiles:
527 if not newfiles:
528 ui.status(_(b'no changes to record\n'))
528 ui.status(_(b'no changes to record\n'))
529 return 0
529 return 0
530
530
531 modified = set(status.modified)
531 modified = set(status.modified)
532
532
533 # 2. backup changed files, so we can restore them in the end
533 # 2. backup changed files, so we can restore them in the end
534
534
535 if backupall:
535 if backupall:
536 tobackup = changed
536 tobackup = changed
537 else:
537 else:
538 tobackup = [
538 tobackup = [
539 f
539 f
540 for f in newfiles
540 for f in newfiles
541 if f in modified or f in newlyaddedandmodifiedfiles
541 if f in modified or f in newlyaddedandmodifiedfiles
542 ]
542 ]
543 backups = {}
543 backups = {}
544 if tobackup:
544 if tobackup:
545 backupdir = repo.vfs.join(b'record-backups')
545 backupdir = repo.vfs.join(b'record-backups')
546 try:
546 try:
547 os.mkdir(backupdir)
547 os.mkdir(backupdir)
548 except OSError as err:
548 except OSError as err:
549 if err.errno != errno.EEXIST:
549 if err.errno != errno.EEXIST:
550 raise
550 raise
551 try:
551 try:
552 # backup continues
552 # backup continues
553 for f in tobackup:
553 for f in tobackup:
554 fd, tmpname = pycompat.mkstemp(
554 fd, tmpname = pycompat.mkstemp(
555 prefix=f.replace(b'/', b'_') + b'.', dir=backupdir
555 prefix=f.replace(b'/', b'_') + b'.', dir=backupdir
556 )
556 )
557 os.close(fd)
557 os.close(fd)
558 ui.debug(b'backup %r as %r\n' % (f, tmpname))
558 ui.debug(b'backup %r as %r\n' % (f, tmpname))
559 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
559 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
560 backups[f] = tmpname
560 backups[f] = tmpname
561
561
562 fp = stringio()
562 fp = stringio()
563 for c in chunks:
563 for c in chunks:
564 fname = c.filename()
564 fname = c.filename()
565 if fname in backups:
565 if fname in backups:
566 c.write(fp)
566 c.write(fp)
567 dopatch = fp.tell()
567 dopatch = fp.tell()
568 fp.seek(0)
568 fp.seek(0)
569
569
570 # 2.5 optionally review / modify patch in text editor
570 # 2.5 optionally review / modify patch in text editor
571 if opts.get(b'review', False):
571 if opts.get(b'review', False):
572 patchtext = (
572 patchtext = (
573 crecordmod.diffhelptext
573 crecordmod.diffhelptext
574 + crecordmod.patchhelptext
574 + crecordmod.patchhelptext
575 + fp.read()
575 + fp.read()
576 )
576 )
577 reviewedpatch = ui.edit(
577 reviewedpatch = ui.edit(
578 patchtext, b"", action=b"diff", repopath=repo.path
578 patchtext, b"", action=b"diff", repopath=repo.path
579 )
579 )
580 fp.truncate(0)
580 fp.truncate(0)
581 fp.write(reviewedpatch)
581 fp.write(reviewedpatch)
582 fp.seek(0)
582 fp.seek(0)
583
583
584 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
584 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
585 # 3a. apply filtered patch to clean repo (clean)
585 # 3a. apply filtered patch to clean repo (clean)
586 if backups:
586 if backups:
587 # Equivalent to hg.revert
587 # Equivalent to hg.revert
588 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
588 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
589 mergemod.update(
589 mergemod.update(
590 repo,
590 repo,
591 repo.dirstate.p1(),
591 repo.dirstate.p1(),
592 branchmerge=False,
592 branchmerge=False,
593 force=True,
593 force=True,
594 matcher=m,
594 matcher=m,
595 )
595 )
596
596
597 # 3b. (apply)
597 # 3b. (apply)
598 if dopatch:
598 if dopatch:
599 try:
599 try:
600 ui.debug(b'applying patch\n')
600 ui.debug(b'applying patch\n')
601 ui.debug(fp.getvalue())
601 ui.debug(fp.getvalue())
602 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
602 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
603 except error.PatchError as err:
603 except error.PatchError as err:
604 raise error.Abort(pycompat.bytestr(err))
604 raise error.Abort(pycompat.bytestr(err))
605 del fp
605 del fp
606
606
607 # 4. We prepared working directory according to filtered
607 # 4. We prepared working directory according to filtered
608 # patch. Now is the time to delegate the job to
608 # patch. Now is the time to delegate the job to
609 # commit/qrefresh or the like!
609 # commit/qrefresh or the like!
610
610
611 # Make all of the pathnames absolute.
611 # Make all of the pathnames absolute.
612 newfiles = [repo.wjoin(nf) for nf in newfiles]
612 newfiles = [repo.wjoin(nf) for nf in newfiles]
613 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
613 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
614 finally:
614 finally:
615 # 5. finally restore backed-up files
615 # 5. finally restore backed-up files
616 try:
616 try:
617 dirstate = repo.dirstate
617 dirstate = repo.dirstate
618 for realname, tmpname in pycompat.iteritems(backups):
618 for realname, tmpname in pycompat.iteritems(backups):
619 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
619 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
620
620
621 if dirstate[realname] == b'n':
621 if dirstate[realname] == b'n':
622 # without normallookup, restoring timestamp
622 # without normallookup, restoring timestamp
623 # may cause partially committed files
623 # may cause partially committed files
624 # to be treated as unmodified
624 # to be treated as unmodified
625 dirstate.normallookup(realname)
625 dirstate.normallookup(realname)
626
626
627 # copystat=True here and above are a hack to trick any
627 # copystat=True here and above are a hack to trick any
628 # editors that have f open that we haven't modified them.
628 # editors that have f open that we haven't modified them.
629 #
629 #
630 # Also note that this racy as an editor could notice the
630 # Also note that this racy as an editor could notice the
631 # file's mtime before we've finished writing it.
631 # file's mtime before we've finished writing it.
632 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
632 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
633 os.unlink(tmpname)
633 os.unlink(tmpname)
634 if tobackup:
634 if tobackup:
635 os.rmdir(backupdir)
635 os.rmdir(backupdir)
636 except OSError:
636 except OSError:
637 pass
637 pass
638
638
639 def recordinwlock(ui, repo, message, match, opts):
639 def recordinwlock(ui, repo, message, match, opts):
640 with repo.wlock():
640 with repo.wlock():
641 return recordfunc(ui, repo, message, match, opts)
641 return recordfunc(ui, repo, message, match, opts)
642
642
643 return commit(ui, repo, recordinwlock, pats, opts)
643 return commit(ui, repo, recordinwlock, pats, opts)
644
644
645
645
646 class dirnode(object):
646 class dirnode(object):
647 """
647 """
648 Represent a directory in user working copy with information required for
648 Represent a directory in user working copy with information required for
649 the purpose of tersing its status.
649 the purpose of tersing its status.
650
650
651 path is the path to the directory, without a trailing '/'
651 path is the path to the directory, without a trailing '/'
652
652
653 statuses is a set of statuses of all files in this directory (this includes
653 statuses is a set of statuses of all files in this directory (this includes
654 all the files in all the subdirectories too)
654 all the files in all the subdirectories too)
655
655
656 files is a list of files which are direct child of this directory
656 files is a list of files which are direct child of this directory
657
657
658 subdirs is a dictionary of sub-directory name as the key and it's own
658 subdirs is a dictionary of sub-directory name as the key and it's own
659 dirnode object as the value
659 dirnode object as the value
660 """
660 """
661
661
662 def __init__(self, dirpath):
662 def __init__(self, dirpath):
663 self.path = dirpath
663 self.path = dirpath
664 self.statuses = set()
664 self.statuses = set()
665 self.files = []
665 self.files = []
666 self.subdirs = {}
666 self.subdirs = {}
667
667
668 def _addfileindir(self, filename, status):
668 def _addfileindir(self, filename, status):
669 """Add a file in this directory as a direct child."""
669 """Add a file in this directory as a direct child."""
670 self.files.append((filename, status))
670 self.files.append((filename, status))
671
671
672 def addfile(self, filename, status):
672 def addfile(self, filename, status):
673 """
673 """
674 Add a file to this directory or to its direct parent directory.
674 Add a file to this directory or to its direct parent directory.
675
675
676 If the file is not direct child of this directory, we traverse to the
676 If the file is not direct child of this directory, we traverse to the
677 directory of which this file is a direct child of and add the file
677 directory of which this file is a direct child of and add the file
678 there.
678 there.
679 """
679 """
680
680
681 # the filename contains a path separator, it means it's not the direct
681 # the filename contains a path separator, it means it's not the direct
682 # child of this directory
682 # child of this directory
683 if b'/' in filename:
683 if b'/' in filename:
684 subdir, filep = filename.split(b'/', 1)
684 subdir, filep = filename.split(b'/', 1)
685
685
686 # does the dirnode object for subdir exists
686 # does the dirnode object for subdir exists
687 if subdir not in self.subdirs:
687 if subdir not in self.subdirs:
688 subdirpath = pathutil.join(self.path, subdir)
688 subdirpath = pathutil.join(self.path, subdir)
689 self.subdirs[subdir] = dirnode(subdirpath)
689 self.subdirs[subdir] = dirnode(subdirpath)
690
690
691 # try adding the file in subdir
691 # try adding the file in subdir
692 self.subdirs[subdir].addfile(filep, status)
692 self.subdirs[subdir].addfile(filep, status)
693
693
694 else:
694 else:
695 self._addfileindir(filename, status)
695 self._addfileindir(filename, status)
696
696
697 if status not in self.statuses:
697 if status not in self.statuses:
698 self.statuses.add(status)
698 self.statuses.add(status)
699
699
700 def iterfilepaths(self):
700 def iterfilepaths(self):
701 """Yield (status, path) for files directly under this directory."""
701 """Yield (status, path) for files directly under this directory."""
702 for f, st in self.files:
702 for f, st in self.files:
703 yield st, pathutil.join(self.path, f)
703 yield st, pathutil.join(self.path, f)
704
704
705 def tersewalk(self, terseargs):
705 def tersewalk(self, terseargs):
706 """
706 """
707 Yield (status, path) obtained by processing the status of this
707 Yield (status, path) obtained by processing the status of this
708 dirnode.
708 dirnode.
709
709
710 terseargs is the string of arguments passed by the user with `--terse`
710 terseargs is the string of arguments passed by the user with `--terse`
711 flag.
711 flag.
712
712
713 Following are the cases which can happen:
713 Following are the cases which can happen:
714
714
715 1) All the files in the directory (including all the files in its
715 1) All the files in the directory (including all the files in its
716 subdirectories) share the same status and the user has asked us to terse
716 subdirectories) share the same status and the user has asked us to terse
717 that status. -> yield (status, dirpath). dirpath will end in '/'.
717 that status. -> yield (status, dirpath). dirpath will end in '/'.
718
718
719 2) Otherwise, we do following:
719 2) Otherwise, we do following:
720
720
721 a) Yield (status, filepath) for all the files which are in this
721 a) Yield (status, filepath) for all the files which are in this
722 directory (only the ones in this directory, not the subdirs)
722 directory (only the ones in this directory, not the subdirs)
723
723
724 b) Recurse the function on all the subdirectories of this
724 b) Recurse the function on all the subdirectories of this
725 directory
725 directory
726 """
726 """
727
727
728 if len(self.statuses) == 1:
728 if len(self.statuses) == 1:
729 onlyst = self.statuses.pop()
729 onlyst = self.statuses.pop()
730
730
731 # Making sure we terse only when the status abbreviation is
731 # Making sure we terse only when the status abbreviation is
732 # passed as terse argument
732 # passed as terse argument
733 if onlyst in terseargs:
733 if onlyst in terseargs:
734 yield onlyst, self.path + b'/'
734 yield onlyst, self.path + b'/'
735 return
735 return
736
736
737 # add the files to status list
737 # add the files to status list
738 for st, fpath in self.iterfilepaths():
738 for st, fpath in self.iterfilepaths():
739 yield st, fpath
739 yield st, fpath
740
740
741 # recurse on the subdirs
741 # recurse on the subdirs
742 for dirobj in self.subdirs.values():
742 for dirobj in self.subdirs.values():
743 for st, fpath in dirobj.tersewalk(terseargs):
743 for st, fpath in dirobj.tersewalk(terseargs):
744 yield st, fpath
744 yield st, fpath
745
745
746
746
747 def tersedir(statuslist, terseargs):
747 def tersedir(statuslist, terseargs):
748 """
748 """
749 Terse the status if all the files in a directory shares the same status.
749 Terse the status if all the files in a directory shares the same status.
750
750
751 statuslist is scmutil.status() object which contains a list of files for
751 statuslist is scmutil.status() object which contains a list of files for
752 each status.
752 each status.
753 terseargs is string which is passed by the user as the argument to `--terse`
753 terseargs is string which is passed by the user as the argument to `--terse`
754 flag.
754 flag.
755
755
756 The function makes a tree of objects of dirnode class, and at each node it
756 The function makes a tree of objects of dirnode class, and at each node it
757 stores the information required to know whether we can terse a certain
757 stores the information required to know whether we can terse a certain
758 directory or not.
758 directory or not.
759 """
759 """
760 # the order matters here as that is used to produce final list
760 # the order matters here as that is used to produce final list
761 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
761 allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
762
762
763 # checking the argument validity
763 # checking the argument validity
764 for s in pycompat.bytestr(terseargs):
764 for s in pycompat.bytestr(terseargs):
765 if s not in allst:
765 if s not in allst:
766 raise error.Abort(_(b"'%s' not recognized") % s)
766 raise error.Abort(_(b"'%s' not recognized") % s)
767
767
768 # creating a dirnode object for the root of the repo
768 # creating a dirnode object for the root of the repo
769 rootobj = dirnode(b'')
769 rootobj = dirnode(b'')
770 pstatus = (
770 pstatus = (
771 b'modified',
771 b'modified',
772 b'added',
772 b'added',
773 b'deleted',
773 b'deleted',
774 b'clean',
774 b'clean',
775 b'unknown',
775 b'unknown',
776 b'ignored',
776 b'ignored',
777 b'removed',
777 b'removed',
778 )
778 )
779
779
780 tersedict = {}
780 tersedict = {}
781 for attrname in pstatus:
781 for attrname in pstatus:
782 statuschar = attrname[0:1]
782 statuschar = attrname[0:1]
783 for f in getattr(statuslist, attrname):
783 for f in getattr(statuslist, attrname):
784 rootobj.addfile(f, statuschar)
784 rootobj.addfile(f, statuschar)
785 tersedict[statuschar] = []
785 tersedict[statuschar] = []
786
786
787 # we won't be tersing the root dir, so add files in it
787 # we won't be tersing the root dir, so add files in it
788 for st, fpath in rootobj.iterfilepaths():
788 for st, fpath in rootobj.iterfilepaths():
789 tersedict[st].append(fpath)
789 tersedict[st].append(fpath)
790
790
791 # process each sub-directory and build tersedict
791 # process each sub-directory and build tersedict
792 for subdir in rootobj.subdirs.values():
792 for subdir in rootobj.subdirs.values():
793 for st, f in subdir.tersewalk(terseargs):
793 for st, f in subdir.tersewalk(terseargs):
794 tersedict[st].append(f)
794 tersedict[st].append(f)
795
795
796 tersedlist = []
796 tersedlist = []
797 for st in allst:
797 for st in allst:
798 tersedict[st].sort()
798 tersedict[st].sort()
799 tersedlist.append(tersedict[st])
799 tersedlist.append(tersedict[st])
800
800
801 return scmutil.status(*tersedlist)
801 return scmutil.status(*tersedlist)
802
802
803
803
804 def _commentlines(raw):
804 def _commentlines(raw):
805 '''Surround lineswith a comment char and a new line'''
805 '''Surround lineswith a comment char and a new line'''
806 lines = raw.splitlines()
806 lines = raw.splitlines()
807 commentedlines = [b'# %s' % line for line in lines]
807 commentedlines = [b'# %s' % line for line in lines]
808 return b'\n'.join(commentedlines) + b'\n'
808 return b'\n'.join(commentedlines) + b'\n'
809
809
810
810
811 @attr.s(frozen=True)
811 @attr.s(frozen=True)
812 class morestatus(object):
812 class morestatus(object):
813 reporoot = attr.ib()
813 reporoot = attr.ib()
814 unfinishedop = attr.ib()
814 unfinishedop = attr.ib()
815 unfinishedmsg = attr.ib()
815 unfinishedmsg = attr.ib()
816 activemerge = attr.ib()
816 activemerge = attr.ib()
817 unresolvedpaths = attr.ib()
817 unresolvedpaths = attr.ib()
818 _formattedpaths = attr.ib(init=False, default=set())
818 _formattedpaths = attr.ib(init=False, default=set())
819 _label = b'status.morestatus'
819 _label = b'status.morestatus'
820
820
821 def formatfile(self, path, fm):
821 def formatfile(self, path, fm):
822 self._formattedpaths.add(path)
822 self._formattedpaths.add(path)
823 if self.activemerge and path in self.unresolvedpaths:
823 if self.activemerge and path in self.unresolvedpaths:
824 fm.data(unresolved=True)
824 fm.data(unresolved=True)
825
825
826 def formatfooter(self, fm):
826 def formatfooter(self, fm):
827 if self.unfinishedop or self.unfinishedmsg:
827 if self.unfinishedop or self.unfinishedmsg:
828 fm.startitem()
828 fm.startitem()
829 fm.data(itemtype=b'morestatus')
829 fm.data(itemtype=b'morestatus')
830
830
831 if self.unfinishedop:
831 if self.unfinishedop:
832 fm.data(unfinished=self.unfinishedop)
832 fm.data(unfinished=self.unfinishedop)
833 statemsg = (
833 statemsg = (
834 _(b'The repository is in an unfinished *%s* state.')
834 _(b'The repository is in an unfinished *%s* state.')
835 % self.unfinishedop
835 % self.unfinishedop
836 )
836 )
837 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
837 fm.plain(b'%s\n' % _commentlines(statemsg), label=self._label)
838 if self.unfinishedmsg:
838 if self.unfinishedmsg:
839 fm.data(unfinishedmsg=self.unfinishedmsg)
839 fm.data(unfinishedmsg=self.unfinishedmsg)
840
840
841 # May also start new data items.
841 # May also start new data items.
842 self._formatconflicts(fm)
842 self._formatconflicts(fm)
843
843
844 if self.unfinishedmsg:
844 if self.unfinishedmsg:
845 fm.plain(
845 fm.plain(
846 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
846 b'%s\n' % _commentlines(self.unfinishedmsg), label=self._label
847 )
847 )
848
848
849 def _formatconflicts(self, fm):
849 def _formatconflicts(self, fm):
850 if not self.activemerge:
850 if not self.activemerge:
851 return
851 return
852
852
853 if self.unresolvedpaths:
853 if self.unresolvedpaths:
854 mergeliststr = b'\n'.join(
854 mergeliststr = b'\n'.join(
855 [
855 [
856 b' %s'
856 b' %s'
857 % util.pathto(self.reporoot, encoding.getcwd(), path)
857 % util.pathto(self.reporoot, encoding.getcwd(), path)
858 for path in self.unresolvedpaths
858 for path in self.unresolvedpaths
859 ]
859 ]
860 )
860 )
861 msg = (
861 msg = (
862 _(
862 _(
863 '''Unresolved merge conflicts:
863 '''Unresolved merge conflicts:
864
864
865 %s
865 %s
866
866
867 To mark files as resolved: hg resolve --mark FILE'''
867 To mark files as resolved: hg resolve --mark FILE'''
868 )
868 )
869 % mergeliststr
869 % mergeliststr
870 )
870 )
871
871
872 # If any paths with unresolved conflicts were not previously
872 # If any paths with unresolved conflicts were not previously
873 # formatted, output them now.
873 # formatted, output them now.
874 for f in self.unresolvedpaths:
874 for f in self.unresolvedpaths:
875 if f in self._formattedpaths:
875 if f in self._formattedpaths:
876 # Already output.
876 # Already output.
877 continue
877 continue
878 fm.startitem()
878 fm.startitem()
879 # We can't claim to know the status of the file - it may just
879 # We can't claim to know the status of the file - it may just
880 # have been in one of the states that were not requested for
880 # have been in one of the states that were not requested for
881 # display, so it could be anything.
881 # display, so it could be anything.
882 fm.data(itemtype=b'file', path=f, unresolved=True)
882 fm.data(itemtype=b'file', path=f, unresolved=True)
883
883
884 else:
884 else:
885 msg = _(b'No unresolved merge conflicts.')
885 msg = _(b'No unresolved merge conflicts.')
886
886
887 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
887 fm.plain(b'%s\n' % _commentlines(msg), label=self._label)
888
888
889
889
890 def readmorestatus(repo):
890 def readmorestatus(repo):
891 """Returns a morestatus object if the repo has unfinished state."""
891 """Returns a morestatus object if the repo has unfinished state."""
892 statetuple = statemod.getrepostate(repo)
892 statetuple = statemod.getrepostate(repo)
893 mergestate = mergemod.mergestate.read(repo)
893 mergestate = mergemod.mergestate.read(repo)
894 activemerge = mergestate.active()
894 activemerge = mergestate.active()
895 if not statetuple and not activemerge:
895 if not statetuple and not activemerge:
896 return None
896 return None
897
897
898 unfinishedop = unfinishedmsg = unresolved = None
898 unfinishedop = unfinishedmsg = unresolved = None
899 if statetuple:
899 if statetuple:
900 unfinishedop, unfinishedmsg = statetuple
900 unfinishedop, unfinishedmsg = statetuple
901 if activemerge:
901 if activemerge:
902 unresolved = sorted(mergestate.unresolved())
902 unresolved = sorted(mergestate.unresolved())
903 return morestatus(
903 return morestatus(
904 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
904 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
905 )
905 )
906
906
907
907
908 def findpossible(cmd, table, strict=False):
908 def findpossible(cmd, table, strict=False):
909 """
909 """
910 Return cmd -> (aliases, command table entry)
910 Return cmd -> (aliases, command table entry)
911 for each matching command.
911 for each matching command.
912 Return debug commands (or their aliases) only if no normal command matches.
912 Return debug commands (or their aliases) only if no normal command matches.
913 """
913 """
914 choice = {}
914 choice = {}
915 debugchoice = {}
915 debugchoice = {}
916
916
917 if cmd in table:
917 if cmd in table:
918 # short-circuit exact matches, "log" alias beats "log|history"
918 # short-circuit exact matches, "log" alias beats "log|history"
919 keys = [cmd]
919 keys = [cmd]
920 else:
920 else:
921 keys = table.keys()
921 keys = table.keys()
922
922
923 allcmds = []
923 allcmds = []
924 for e in keys:
924 for e in keys:
925 aliases = parsealiases(e)
925 aliases = parsealiases(e)
926 allcmds.extend(aliases)
926 allcmds.extend(aliases)
927 found = None
927 found = None
928 if cmd in aliases:
928 if cmd in aliases:
929 found = cmd
929 found = cmd
930 elif not strict:
930 elif not strict:
931 for a in aliases:
931 for a in aliases:
932 if a.startswith(cmd):
932 if a.startswith(cmd):
933 found = a
933 found = a
934 break
934 break
935 if found is not None:
935 if found is not None:
936 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
936 if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
937 debugchoice[found] = (aliases, table[e])
937 debugchoice[found] = (aliases, table[e])
938 else:
938 else:
939 choice[found] = (aliases, table[e])
939 choice[found] = (aliases, table[e])
940
940
941 if not choice and debugchoice:
941 if not choice and debugchoice:
942 choice = debugchoice
942 choice = debugchoice
943
943
944 return choice, allcmds
944 return choice, allcmds
945
945
946
946
947 def findcmd(cmd, table, strict=True):
947 def findcmd(cmd, table, strict=True):
948 """Return (aliases, command table entry) for command string."""
948 """Return (aliases, command table entry) for command string."""
949 choice, allcmds = findpossible(cmd, table, strict)
949 choice, allcmds = findpossible(cmd, table, strict)
950
950
951 if cmd in choice:
951 if cmd in choice:
952 return choice[cmd]
952 return choice[cmd]
953
953
954 if len(choice) > 1:
954 if len(choice) > 1:
955 clist = sorted(choice)
955 clist = sorted(choice)
956 raise error.AmbiguousCommand(cmd, clist)
956 raise error.AmbiguousCommand(cmd, clist)
957
957
958 if choice:
958 if choice:
959 return list(choice.values())[0]
959 return list(choice.values())[0]
960
960
961 raise error.UnknownCommand(cmd, allcmds)
961 raise error.UnknownCommand(cmd, allcmds)
962
962
963
963
964 def changebranch(ui, repo, revs, label):
964 def changebranch(ui, repo, revs, label):
965 """ Change the branch name of given revs to label """
965 """ Change the branch name of given revs to label """
966
966
967 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
967 with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
968 # abort in case of uncommitted merge or dirty wdir
968 # abort in case of uncommitted merge or dirty wdir
969 bailifchanged(repo)
969 bailifchanged(repo)
970 revs = scmutil.revrange(repo, revs)
970 revs = scmutil.revrange(repo, revs)
971 if not revs:
971 if not revs:
972 raise error.Abort(b"empty revision set")
972 raise error.Abort(b"empty revision set")
973 roots = repo.revs(b'roots(%ld)', revs)
973 roots = repo.revs(b'roots(%ld)', revs)
974 if len(roots) > 1:
974 if len(roots) > 1:
975 raise error.Abort(
975 raise error.Abort(
976 _(b"cannot change branch of non-linear revisions")
976 _(b"cannot change branch of non-linear revisions")
977 )
977 )
978 rewriteutil.precheck(repo, revs, b'change branch of')
978 rewriteutil.precheck(repo, revs, b'change branch of')
979
979
980 root = repo[roots.first()]
980 root = repo[roots.first()]
981 rpb = {parent.branch() for parent in root.parents()}
981 rpb = {parent.branch() for parent in root.parents()}
982 if label not in rpb and label in repo.branchmap():
982 if label not in rpb and label in repo.branchmap():
983 raise error.Abort(_(b"a branch of the same name already exists"))
983 raise error.Abort(_(b"a branch of the same name already exists"))
984
984
985 if repo.revs(b'obsolete() and %ld', revs):
985 if repo.revs(b'obsolete() and %ld', revs):
986 raise error.Abort(
986 raise error.Abort(
987 _(b"cannot change branch of a obsolete changeset")
987 _(b"cannot change branch of a obsolete changeset")
988 )
988 )
989
989
990 # make sure only topological heads
990 # make sure only topological heads
991 if repo.revs(b'heads(%ld) - head()', revs):
991 if repo.revs(b'heads(%ld) - head()', revs):
992 raise error.Abort(_(b"cannot change branch in middle of a stack"))
992 raise error.Abort(_(b"cannot change branch in middle of a stack"))
993
993
994 replacements = {}
994 replacements = {}
995 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
995 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
996 # mercurial.subrepo -> mercurial.cmdutil
996 # mercurial.subrepo -> mercurial.cmdutil
997 from . import context
997 from . import context
998
998
999 for rev in revs:
999 for rev in revs:
1000 ctx = repo[rev]
1000 ctx = repo[rev]
1001 oldbranch = ctx.branch()
1001 oldbranch = ctx.branch()
1002 # check if ctx has same branch
1002 # check if ctx has same branch
1003 if oldbranch == label:
1003 if oldbranch == label:
1004 continue
1004 continue
1005
1005
1006 def filectxfn(repo, newctx, path):
1006 def filectxfn(repo, newctx, path):
1007 try:
1007 try:
1008 return ctx[path]
1008 return ctx[path]
1009 except error.ManifestLookupError:
1009 except error.ManifestLookupError:
1010 return None
1010 return None
1011
1011
1012 ui.debug(
1012 ui.debug(
1013 b"changing branch of '%s' from '%s' to '%s'\n"
1013 b"changing branch of '%s' from '%s' to '%s'\n"
1014 % (hex(ctx.node()), oldbranch, label)
1014 % (hex(ctx.node()), oldbranch, label)
1015 )
1015 )
1016 extra = ctx.extra()
1016 extra = ctx.extra()
1017 extra[b'branch_change'] = hex(ctx.node())
1017 extra[b'branch_change'] = hex(ctx.node())
1018 # While changing branch of set of linear commits, make sure that
1018 # While changing branch of set of linear commits, make sure that
1019 # we base our commits on new parent rather than old parent which
1019 # we base our commits on new parent rather than old parent which
1020 # was obsoleted while changing the branch
1020 # was obsoleted while changing the branch
1021 p1 = ctx.p1().node()
1021 p1 = ctx.p1().node()
1022 p2 = ctx.p2().node()
1022 p2 = ctx.p2().node()
1023 if p1 in replacements:
1023 if p1 in replacements:
1024 p1 = replacements[p1][0]
1024 p1 = replacements[p1][0]
1025 if p2 in replacements:
1025 if p2 in replacements:
1026 p2 = replacements[p2][0]
1026 p2 = replacements[p2][0]
1027
1027
1028 mc = context.memctx(
1028 mc = context.memctx(
1029 repo,
1029 repo,
1030 (p1, p2),
1030 (p1, p2),
1031 ctx.description(),
1031 ctx.description(),
1032 ctx.files(),
1032 ctx.files(),
1033 filectxfn,
1033 filectxfn,
1034 user=ctx.user(),
1034 user=ctx.user(),
1035 date=ctx.date(),
1035 date=ctx.date(),
1036 extra=extra,
1036 extra=extra,
1037 branch=label,
1037 branch=label,
1038 )
1038 )
1039
1039
1040 newnode = repo.commitctx(mc)
1040 newnode = repo.commitctx(mc)
1041 replacements[ctx.node()] = (newnode,)
1041 replacements[ctx.node()] = (newnode,)
1042 ui.debug(b'new node id is %s\n' % hex(newnode))
1042 ui.debug(b'new node id is %s\n' % hex(newnode))
1043
1043
1044 # create obsmarkers and move bookmarks
1044 # create obsmarkers and move bookmarks
1045 scmutil.cleanupnodes(
1045 scmutil.cleanupnodes(
1046 repo, replacements, b'branch-change', fixphase=True
1046 repo, replacements, b'branch-change', fixphase=True
1047 )
1047 )
1048
1048
1049 # move the working copy too
1049 # move the working copy too
1050 wctx = repo[None]
1050 wctx = repo[None]
1051 # in-progress merge is a bit too complex for now.
1051 # in-progress merge is a bit too complex for now.
1052 if len(wctx.parents()) == 1:
1052 if len(wctx.parents()) == 1:
1053 newid = replacements.get(wctx.p1().node())
1053 newid = replacements.get(wctx.p1().node())
1054 if newid is not None:
1054 if newid is not None:
1055 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1055 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
1056 # mercurial.cmdutil
1056 # mercurial.cmdutil
1057 from . import hg
1057 from . import hg
1058
1058
1059 hg.update(repo, newid[0], quietempty=True)
1059 hg.update(repo, newid[0], quietempty=True)
1060
1060
1061 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1061 ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
1062
1062
1063
1063
1064 def findrepo(p):
1064 def findrepo(p):
1065 while not os.path.isdir(os.path.join(p, b".hg")):
1065 while not os.path.isdir(os.path.join(p, b".hg")):
1066 oldp, p = p, os.path.dirname(p)
1066 oldp, p = p, os.path.dirname(p)
1067 if p == oldp:
1067 if p == oldp:
1068 return None
1068 return None
1069
1069
1070 return p
1070 return p
1071
1071
1072
1072
1073 def bailifchanged(repo, merge=True, hint=None):
1073 def bailifchanged(repo, merge=True, hint=None):
1074 """ enforce the precondition that working directory must be clean.
1074 """ enforce the precondition that working directory must be clean.
1075
1075
1076 'merge' can be set to false if a pending uncommitted merge should be
1076 'merge' can be set to false if a pending uncommitted merge should be
1077 ignored (such as when 'update --check' runs).
1077 ignored (such as when 'update --check' runs).
1078
1078
1079 'hint' is the usual hint given to Abort exception.
1079 'hint' is the usual hint given to Abort exception.
1080 """
1080 """
1081
1081
1082 if merge and repo.dirstate.p2() != nullid:
1082 if merge and repo.dirstate.p2() != nullid:
1083 raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
1083 raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
1084 st = repo.status()
1084 st = repo.status()
1085 if st.modified or st.added or st.removed or st.deleted:
1085 if st.modified or st.added or st.removed or st.deleted:
1086 raise error.Abort(_(b'uncommitted changes'), hint=hint)
1086 raise error.Abort(_(b'uncommitted changes'), hint=hint)
1087 ctx = repo[None]
1087 ctx = repo[None]
1088 for s in sorted(ctx.substate):
1088 for s in sorted(ctx.substate):
1089 ctx.sub(s).bailifchanged(hint=hint)
1089 ctx.sub(s).bailifchanged(hint=hint)
1090
1090
1091
1091
1092 def logmessage(ui, opts):
1092 def logmessage(ui, opts):
1093 """ get the log message according to -m and -l option """
1093 """ get the log message according to -m and -l option """
1094
1094
1095 check_at_most_one_arg(opts, b'message', b'logfile')
1095 check_at_most_one_arg(opts, b'message', b'logfile')
1096
1096
1097 message = opts.get(b'message')
1097 message = opts.get(b'message')
1098 logfile = opts.get(b'logfile')
1098 logfile = opts.get(b'logfile')
1099
1099
1100 if not message and logfile:
1100 if not message and logfile:
1101 try:
1101 try:
1102 if isstdiofilename(logfile):
1102 if isstdiofilename(logfile):
1103 message = ui.fin.read()
1103 message = ui.fin.read()
1104 else:
1104 else:
1105 message = b'\n'.join(util.readfile(logfile).splitlines())
1105 message = b'\n'.join(util.readfile(logfile).splitlines())
1106 except IOError as inst:
1106 except IOError as inst:
1107 raise error.Abort(
1107 raise error.Abort(
1108 _(b"can't read commit message '%s': %s")
1108 _(b"can't read commit message '%s': %s")
1109 % (logfile, encoding.strtolocal(inst.strerror))
1109 % (logfile, encoding.strtolocal(inst.strerror))
1110 )
1110 )
1111 return message
1111 return message
1112
1112
1113
1113
1114 def mergeeditform(ctxorbool, baseformname):
1114 def mergeeditform(ctxorbool, baseformname):
1115 """return appropriate editform name (referencing a committemplate)
1115 """return appropriate editform name (referencing a committemplate)
1116
1116
1117 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1117 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
1118 merging is committed.
1118 merging is committed.
1119
1119
1120 This returns baseformname with '.merge' appended if it is a merge,
1120 This returns baseformname with '.merge' appended if it is a merge,
1121 otherwise '.normal' is appended.
1121 otherwise '.normal' is appended.
1122 """
1122 """
1123 if isinstance(ctxorbool, bool):
1123 if isinstance(ctxorbool, bool):
1124 if ctxorbool:
1124 if ctxorbool:
1125 return baseformname + b".merge"
1125 return baseformname + b".merge"
1126 elif len(ctxorbool.parents()) > 1:
1126 elif len(ctxorbool.parents()) > 1:
1127 return baseformname + b".merge"
1127 return baseformname + b".merge"
1128
1128
1129 return baseformname + b".normal"
1129 return baseformname + b".normal"
1130
1130
1131
1131
1132 def getcommiteditor(
1132 def getcommiteditor(
1133 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1133 edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
1134 ):
1134 ):
1135 """get appropriate commit message editor according to '--edit' option
1135 """get appropriate commit message editor according to '--edit' option
1136
1136
1137 'finishdesc' is a function to be called with edited commit message
1137 'finishdesc' is a function to be called with edited commit message
1138 (= 'description' of the new changeset) just after editing, but
1138 (= 'description' of the new changeset) just after editing, but
1139 before checking empty-ness. It should return actual text to be
1139 before checking empty-ness. It should return actual text to be
1140 stored into history. This allows to change description before
1140 stored into history. This allows to change description before
1141 storing.
1141 storing.
1142
1142
1143 'extramsg' is a extra message to be shown in the editor instead of
1143 'extramsg' is a extra message to be shown in the editor instead of
1144 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1144 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
1145 is automatically added.
1145 is automatically added.
1146
1146
1147 'editform' is a dot-separated list of names, to distinguish
1147 'editform' is a dot-separated list of names, to distinguish
1148 the purpose of commit text editing.
1148 the purpose of commit text editing.
1149
1149
1150 'getcommiteditor' returns 'commitforceeditor' regardless of
1150 'getcommiteditor' returns 'commitforceeditor' regardless of
1151 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1151 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
1152 they are specific for usage in MQ.
1152 they are specific for usage in MQ.
1153 """
1153 """
1154 if edit or finishdesc or extramsg:
1154 if edit or finishdesc or extramsg:
1155 return lambda r, c, s: commitforceeditor(
1155 return lambda r, c, s: commitforceeditor(
1156 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1156 r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
1157 )
1157 )
1158 elif editform:
1158 elif editform:
1159 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1159 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
1160 else:
1160 else:
1161 return commiteditor
1161 return commiteditor
1162
1162
1163
1163
1164 def _escapecommandtemplate(tmpl):
1164 def _escapecommandtemplate(tmpl):
1165 parts = []
1165 parts = []
1166 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1166 for typ, start, end in templater.scantemplate(tmpl, raw=True):
1167 if typ == b'string':
1167 if typ == b'string':
1168 parts.append(stringutil.escapestr(tmpl[start:end]))
1168 parts.append(stringutil.escapestr(tmpl[start:end]))
1169 else:
1169 else:
1170 parts.append(tmpl[start:end])
1170 parts.append(tmpl[start:end])
1171 return b''.join(parts)
1171 return b''.join(parts)
1172
1172
1173
1173
1174 def rendercommandtemplate(ui, tmpl, props):
1174 def rendercommandtemplate(ui, tmpl, props):
1175 r"""Expand a literal template 'tmpl' in a way suitable for command line
1175 r"""Expand a literal template 'tmpl' in a way suitable for command line
1176
1176
1177 '\' in outermost string is not taken as an escape character because it
1177 '\' in outermost string is not taken as an escape character because it
1178 is a directory separator on Windows.
1178 is a directory separator on Windows.
1179
1179
1180 >>> from . import ui as uimod
1180 >>> from . import ui as uimod
1181 >>> ui = uimod.ui()
1181 >>> ui = uimod.ui()
1182 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1182 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
1183 'c:\\foo'
1183 'c:\\foo'
1184 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1184 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
1185 'c:{path}'
1185 'c:{path}'
1186 """
1186 """
1187 if not tmpl:
1187 if not tmpl:
1188 return tmpl
1188 return tmpl
1189 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1189 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
1190 return t.renderdefault(props)
1190 return t.renderdefault(props)
1191
1191
1192
1192
1193 def rendertemplate(ctx, tmpl, props=None):
1193 def rendertemplate(ctx, tmpl, props=None):
1194 """Expand a literal template 'tmpl' byte-string against one changeset
1194 """Expand a literal template 'tmpl' byte-string against one changeset
1195
1195
1196 Each props item must be a stringify-able value or a callable returning
1196 Each props item must be a stringify-able value or a callable returning
1197 such value, i.e. no bare list nor dict should be passed.
1197 such value, i.e. no bare list nor dict should be passed.
1198 """
1198 """
1199 repo = ctx.repo()
1199 repo = ctx.repo()
1200 tres = formatter.templateresources(repo.ui, repo)
1200 tres = formatter.templateresources(repo.ui, repo)
1201 t = formatter.maketemplater(
1201 t = formatter.maketemplater(
1202 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1202 repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
1203 )
1203 )
1204 mapping = {b'ctx': ctx}
1204 mapping = {b'ctx': ctx}
1205 if props:
1205 if props:
1206 mapping.update(props)
1206 mapping.update(props)
1207 return t.renderdefault(mapping)
1207 return t.renderdefault(mapping)
1208
1208
1209
1209
1210 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1210 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
1211 r"""Convert old-style filename format string to template string
1211 r"""Convert old-style filename format string to template string
1212
1212
1213 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1213 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
1214 'foo-{reporoot|basename}-{seqno}.patch'
1214 'foo-{reporoot|basename}-{seqno}.patch'
1215 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1215 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
1216 '{rev}{tags % "{tag}"}{node}'
1216 '{rev}{tags % "{tag}"}{node}'
1217
1217
1218 '\' in outermost strings has to be escaped because it is a directory
1218 '\' in outermost strings has to be escaped because it is a directory
1219 separator on Windows:
1219 separator on Windows:
1220
1220
1221 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1221 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
1222 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1222 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
1223 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1223 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
1224 '\\\\\\\\foo\\\\bar.patch'
1224 '\\\\\\\\foo\\\\bar.patch'
1225 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1225 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
1226 '\\\\{tags % "{tag}"}'
1226 '\\\\{tags % "{tag}"}'
1227
1227
1228 but inner strings follow the template rules (i.e. '\' is taken as an
1228 but inner strings follow the template rules (i.e. '\' is taken as an
1229 escape character):
1229 escape character):
1230
1230
1231 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1231 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
1232 '{"c:\\tmp"}'
1232 '{"c:\\tmp"}'
1233 """
1233 """
1234 expander = {
1234 expander = {
1235 b'H': b'{node}',
1235 b'H': b'{node}',
1236 b'R': b'{rev}',
1236 b'R': b'{rev}',
1237 b'h': b'{node|short}',
1237 b'h': b'{node|short}',
1238 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1238 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
1239 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1239 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
1240 b'%': b'%',
1240 b'%': b'%',
1241 b'b': b'{reporoot|basename}',
1241 b'b': b'{reporoot|basename}',
1242 }
1242 }
1243 if total is not None:
1243 if total is not None:
1244 expander[b'N'] = b'{total}'
1244 expander[b'N'] = b'{total}'
1245 if seqno is not None:
1245 if seqno is not None:
1246 expander[b'n'] = b'{seqno}'
1246 expander[b'n'] = b'{seqno}'
1247 if total is not None and seqno is not None:
1247 if total is not None and seqno is not None:
1248 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1248 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1249 if pathname is not None:
1249 if pathname is not None:
1250 expander[b's'] = b'{pathname|basename}'
1250 expander[b's'] = b'{pathname|basename}'
1251 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1251 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1252 expander[b'p'] = b'{pathname}'
1252 expander[b'p'] = b'{pathname}'
1253
1253
1254 newname = []
1254 newname = []
1255 for typ, start, end in templater.scantemplate(pat, raw=True):
1255 for typ, start, end in templater.scantemplate(pat, raw=True):
1256 if typ != b'string':
1256 if typ != b'string':
1257 newname.append(pat[start:end])
1257 newname.append(pat[start:end])
1258 continue
1258 continue
1259 i = start
1259 i = start
1260 while i < end:
1260 while i < end:
1261 n = pat.find(b'%', i, end)
1261 n = pat.find(b'%', i, end)
1262 if n < 0:
1262 if n < 0:
1263 newname.append(stringutil.escapestr(pat[i:end]))
1263 newname.append(stringutil.escapestr(pat[i:end]))
1264 break
1264 break
1265 newname.append(stringutil.escapestr(pat[i:n]))
1265 newname.append(stringutil.escapestr(pat[i:n]))
1266 if n + 2 > end:
1266 if n + 2 > end:
1267 raise error.Abort(
1267 raise error.Abort(
1268 _(b"incomplete format spec in output filename")
1268 _(b"incomplete format spec in output filename")
1269 )
1269 )
1270 c = pat[n + 1 : n + 2]
1270 c = pat[n + 1 : n + 2]
1271 i = n + 2
1271 i = n + 2
1272 try:
1272 try:
1273 newname.append(expander[c])
1273 newname.append(expander[c])
1274 except KeyError:
1274 except KeyError:
1275 raise error.Abort(
1275 raise error.Abort(
1276 _(b"invalid format spec '%%%s' in output filename") % c
1276 _(b"invalid format spec '%%%s' in output filename") % c
1277 )
1277 )
1278 return b''.join(newname)
1278 return b''.join(newname)
1279
1279
1280
1280
1281 def makefilename(ctx, pat, **props):
1281 def makefilename(ctx, pat, **props):
1282 if not pat:
1282 if not pat:
1283 return pat
1283 return pat
1284 tmpl = _buildfntemplate(pat, **props)
1284 tmpl = _buildfntemplate(pat, **props)
1285 # BUG: alias expansion shouldn't be made against template fragments
1285 # BUG: alias expansion shouldn't be made against template fragments
1286 # rewritten from %-format strings, but we have no easy way to partially
1286 # rewritten from %-format strings, but we have no easy way to partially
1287 # disable the expansion.
1287 # disable the expansion.
1288 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1288 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1289
1289
1290
1290
1291 def isstdiofilename(pat):
1291 def isstdiofilename(pat):
1292 """True if the given pat looks like a filename denoting stdin/stdout"""
1292 """True if the given pat looks like a filename denoting stdin/stdout"""
1293 return not pat or pat == b'-'
1293 return not pat or pat == b'-'
1294
1294
1295
1295
1296 class _unclosablefile(object):
1296 class _unclosablefile(object):
1297 def __init__(self, fp):
1297 def __init__(self, fp):
1298 self._fp = fp
1298 self._fp = fp
1299
1299
1300 def close(self):
1300 def close(self):
1301 pass
1301 pass
1302
1302
1303 def __iter__(self):
1303 def __iter__(self):
1304 return iter(self._fp)
1304 return iter(self._fp)
1305
1305
1306 def __getattr__(self, attr):
1306 def __getattr__(self, attr):
1307 return getattr(self._fp, attr)
1307 return getattr(self._fp, attr)
1308
1308
1309 def __enter__(self):
1309 def __enter__(self):
1310 return self
1310 return self
1311
1311
1312 def __exit__(self, exc_type, exc_value, exc_tb):
1312 def __exit__(self, exc_type, exc_value, exc_tb):
1313 pass
1313 pass
1314
1314
1315
1315
1316 def makefileobj(ctx, pat, mode=b'wb', **props):
1316 def makefileobj(ctx, pat, mode=b'wb', **props):
1317 writable = mode not in (b'r', b'rb')
1317 writable = mode not in (b'r', b'rb')
1318
1318
1319 if isstdiofilename(pat):
1319 if isstdiofilename(pat):
1320 repo = ctx.repo()
1320 repo = ctx.repo()
1321 if writable:
1321 if writable:
1322 fp = repo.ui.fout
1322 fp = repo.ui.fout
1323 else:
1323 else:
1324 fp = repo.ui.fin
1324 fp = repo.ui.fin
1325 return _unclosablefile(fp)
1325 return _unclosablefile(fp)
1326 fn = makefilename(ctx, pat, **props)
1326 fn = makefilename(ctx, pat, **props)
1327 return open(fn, mode)
1327 return open(fn, mode)
1328
1328
1329
1329
1330 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1330 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1331 """opens the changelog, manifest, a filelog or a given revlog"""
1331 """opens the changelog, manifest, a filelog or a given revlog"""
1332 cl = opts[b'changelog']
1332 cl = opts[b'changelog']
1333 mf = opts[b'manifest']
1333 mf = opts[b'manifest']
1334 dir = opts[b'dir']
1334 dir = opts[b'dir']
1335 msg = None
1335 msg = None
1336 if cl and mf:
1336 if cl and mf:
1337 msg = _(b'cannot specify --changelog and --manifest at the same time')
1337 msg = _(b'cannot specify --changelog and --manifest at the same time')
1338 elif cl and dir:
1338 elif cl and dir:
1339 msg = _(b'cannot specify --changelog and --dir at the same time')
1339 msg = _(b'cannot specify --changelog and --dir at the same time')
1340 elif cl or mf or dir:
1340 elif cl or mf or dir:
1341 if file_:
1341 if file_:
1342 msg = _(b'cannot specify filename with --changelog or --manifest')
1342 msg = _(b'cannot specify filename with --changelog or --manifest')
1343 elif not repo:
1343 elif not repo:
1344 msg = _(
1344 msg = _(
1345 b'cannot specify --changelog or --manifest or --dir '
1345 b'cannot specify --changelog or --manifest or --dir '
1346 b'without a repository'
1346 b'without a repository'
1347 )
1347 )
1348 if msg:
1348 if msg:
1349 raise error.Abort(msg)
1349 raise error.Abort(msg)
1350
1350
1351 r = None
1351 r = None
1352 if repo:
1352 if repo:
1353 if cl:
1353 if cl:
1354 r = repo.unfiltered().changelog
1354 r = repo.unfiltered().changelog
1355 elif dir:
1355 elif dir:
1356 if b'treemanifest' not in repo.requirements:
1356 if b'treemanifest' not in repo.requirements:
1357 raise error.Abort(
1357 raise error.Abort(
1358 _(
1358 _(
1359 b"--dir can only be used on repos with "
1359 b"--dir can only be used on repos with "
1360 b"treemanifest enabled"
1360 b"treemanifest enabled"
1361 )
1361 )
1362 )
1362 )
1363 if not dir.endswith(b'/'):
1363 if not dir.endswith(b'/'):
1364 dir = dir + b'/'
1364 dir = dir + b'/'
1365 dirlog = repo.manifestlog.getstorage(dir)
1365 dirlog = repo.manifestlog.getstorage(dir)
1366 if len(dirlog):
1366 if len(dirlog):
1367 r = dirlog
1367 r = dirlog
1368 elif mf:
1368 elif mf:
1369 r = repo.manifestlog.getstorage(b'')
1369 r = repo.manifestlog.getstorage(b'')
1370 elif file_:
1370 elif file_:
1371 filelog = repo.file(file_)
1371 filelog = repo.file(file_)
1372 if len(filelog):
1372 if len(filelog):
1373 r = filelog
1373 r = filelog
1374
1374
1375 # Not all storage may be revlogs. If requested, try to return an actual
1375 # Not all storage may be revlogs. If requested, try to return an actual
1376 # revlog instance.
1376 # revlog instance.
1377 if returnrevlog:
1377 if returnrevlog:
1378 if isinstance(r, revlog.revlog):
1378 if isinstance(r, revlog.revlog):
1379 pass
1379 pass
1380 elif util.safehasattr(r, b'_revlog'):
1380 elif util.safehasattr(r, b'_revlog'):
1381 r = r._revlog # pytype: disable=attribute-error
1381 r = r._revlog # pytype: disable=attribute-error
1382 elif r is not None:
1382 elif r is not None:
1383 raise error.Abort(_(b'%r does not appear to be a revlog') % r)
1383 raise error.Abort(_(b'%r does not appear to be a revlog') % r)
1384
1384
1385 if not r:
1385 if not r:
1386 if not returnrevlog:
1386 if not returnrevlog:
1387 raise error.Abort(_(b'cannot give path to non-revlog'))
1387 raise error.Abort(_(b'cannot give path to non-revlog'))
1388
1388
1389 if not file_:
1389 if not file_:
1390 raise error.CommandError(cmd, _(b'invalid arguments'))
1390 raise error.CommandError(cmd, _(b'invalid arguments'))
1391 if not os.path.isfile(file_):
1391 if not os.path.isfile(file_):
1392 raise error.Abort(_(b"revlog '%s' not found") % file_)
1392 raise error.Abort(_(b"revlog '%s' not found") % file_)
1393 r = revlog.revlog(
1393 r = revlog.revlog(
1394 vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
1394 vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
1395 )
1395 )
1396 return r
1396 return r
1397
1397
1398
1398
1399 def openrevlog(repo, cmd, file_, opts):
1399 def openrevlog(repo, cmd, file_, opts):
1400 """Obtain a revlog backing storage of an item.
1400 """Obtain a revlog backing storage of an item.
1401
1401
1402 This is similar to ``openstorage()`` except it always returns a revlog.
1402 This is similar to ``openstorage()`` except it always returns a revlog.
1403
1403
1404 In most cases, a caller cares about the main storage object - not the
1404 In most cases, a caller cares about the main storage object - not the
1405 revlog backing it. Therefore, this function should only be used by code
1405 revlog backing it. Therefore, this function should only be used by code
1406 that needs to examine low-level revlog implementation details. e.g. debug
1406 that needs to examine low-level revlog implementation details. e.g. debug
1407 commands.
1407 commands.
1408 """
1408 """
1409 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1409 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1410
1410
1411
1411
1412 def copy(ui, repo, pats, opts, rename=False):
1412 def copy(ui, repo, pats, opts, rename=False):
1413 # called with the repo lock held
1413 # called with the repo lock held
1414 #
1414 #
1415 # hgsep => pathname that uses "/" to separate directories
1415 # hgsep => pathname that uses "/" to separate directories
1416 # ossep => pathname that uses os.sep to separate directories
1416 # ossep => pathname that uses os.sep to separate directories
1417 cwd = repo.getcwd()
1417 cwd = repo.getcwd()
1418 targets = {}
1418 targets = {}
1419 after = opts.get(b"after")
1419 after = opts.get(b"after")
1420 dryrun = opts.get(b"dry_run")
1420 dryrun = opts.get(b"dry_run")
1421 wctx = repo[None]
1421 wctx = repo[None]
1422
1422
1423 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1423 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1424
1424
1425 def walkpat(pat):
1425 def walkpat(pat):
1426 srcs = []
1426 srcs = []
1427 if after:
1427 if after:
1428 badstates = b'?'
1428 badstates = b'?'
1429 else:
1429 else:
1430 badstates = b'?r'
1430 badstates = b'?r'
1431 m = scmutil.match(wctx, [pat], opts, globbed=True)
1431 m = scmutil.match(wctx, [pat], opts, globbed=True)
1432 for abs in wctx.walk(m):
1432 for abs in wctx.walk(m):
1433 state = repo.dirstate[abs]
1433 state = repo.dirstate[abs]
1434 rel = uipathfn(abs)
1434 rel = uipathfn(abs)
1435 exact = m.exact(abs)
1435 exact = m.exact(abs)
1436 if state in badstates:
1436 if state in badstates:
1437 if exact and state == b'?':
1437 if exact and state == b'?':
1438 ui.warn(_(b'%s: not copying - file is not managed\n') % rel)
1438 ui.warn(_(b'%s: not copying - file is not managed\n') % rel)
1439 if exact and state == b'r':
1439 if exact and state == b'r':
1440 ui.warn(
1440 ui.warn(
1441 _(
1441 _(
1442 b'%s: not copying - file has been marked for'
1442 b'%s: not copying - file has been marked for'
1443 b' remove\n'
1443 b' remove\n'
1444 )
1444 )
1445 % rel
1445 % rel
1446 )
1446 )
1447 continue
1447 continue
1448 # abs: hgsep
1448 # abs: hgsep
1449 # rel: ossep
1449 # rel: ossep
1450 srcs.append((abs, rel, exact))
1450 srcs.append((abs, rel, exact))
1451 return srcs
1451 return srcs
1452
1452
1453 # abssrc: hgsep
1453 # abssrc: hgsep
1454 # relsrc: ossep
1454 # relsrc: ossep
1455 # otarget: ossep
1455 # otarget: ossep
1456 def copyfile(abssrc, relsrc, otarget, exact):
1456 def copyfile(abssrc, relsrc, otarget, exact):
1457 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1457 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1458 if b'/' in abstarget:
1458 if b'/' in abstarget:
1459 # We cannot normalize abstarget itself, this would prevent
1459 # We cannot normalize abstarget itself, this would prevent
1460 # case only renames, like a => A.
1460 # case only renames, like a => A.
1461 abspath, absname = abstarget.rsplit(b'/', 1)
1461 abspath, absname = abstarget.rsplit(b'/', 1)
1462 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1462 abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
1463 reltarget = repo.pathto(abstarget, cwd)
1463 reltarget = repo.pathto(abstarget, cwd)
1464 target = repo.wjoin(abstarget)
1464 target = repo.wjoin(abstarget)
1465 src = repo.wjoin(abssrc)
1465 src = repo.wjoin(abssrc)
1466 state = repo.dirstate[abstarget]
1466 state = repo.dirstate[abstarget]
1467
1467
1468 scmutil.checkportable(ui, abstarget)
1468 scmutil.checkportable(ui, abstarget)
1469
1469
1470 # check for collisions
1470 # check for collisions
1471 prevsrc = targets.get(abstarget)
1471 prevsrc = targets.get(abstarget)
1472 if prevsrc is not None:
1472 if prevsrc is not None:
1473 ui.warn(
1473 ui.warn(
1474 _(b'%s: not overwriting - %s collides with %s\n')
1474 _(b'%s: not overwriting - %s collides with %s\n')
1475 % (
1475 % (
1476 reltarget,
1476 reltarget,
1477 repo.pathto(abssrc, cwd),
1477 repo.pathto(abssrc, cwd),
1478 repo.pathto(prevsrc, cwd),
1478 repo.pathto(prevsrc, cwd),
1479 )
1479 )
1480 )
1480 )
1481 return True # report a failure
1481 return True # report a failure
1482
1482
1483 # check for overwrites
1483 # check for overwrites
1484 exists = os.path.lexists(target)
1484 exists = os.path.lexists(target)
1485 samefile = False
1485 samefile = False
1486 if exists and abssrc != abstarget:
1486 if exists and abssrc != abstarget:
1487 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1487 if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
1488 abstarget
1488 abstarget
1489 ):
1489 ):
1490 if not rename:
1490 if not rename:
1491 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1491 ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
1492 return True # report a failure
1492 return True # report a failure
1493 exists = False
1493 exists = False
1494 samefile = True
1494 samefile = True
1495
1495
1496 if not after and exists or after and state in b'mn':
1496 if not after and exists or after and state in b'mn':
1497 if not opts[b'force']:
1497 if not opts[b'force']:
1498 if state in b'mn':
1498 if state in b'mn':
1499 msg = _(b'%s: not overwriting - file already committed\n')
1499 msg = _(b'%s: not overwriting - file already committed\n')
1500 if after:
1500 if after:
1501 flags = b'--after --force'
1501 flags = b'--after --force'
1502 else:
1502 else:
1503 flags = b'--force'
1503 flags = b'--force'
1504 if rename:
1504 if rename:
1505 hint = (
1505 hint = (
1506 _(
1506 _(
1507 b"('hg rename %s' to replace the file by "
1507 b"('hg rename %s' to replace the file by "
1508 b'recording a rename)\n'
1508 b'recording a rename)\n'
1509 )
1509 )
1510 % flags
1510 % flags
1511 )
1511 )
1512 else:
1512 else:
1513 hint = (
1513 hint = (
1514 _(
1514 _(
1515 b"('hg copy %s' to replace the file by "
1515 b"('hg copy %s' to replace the file by "
1516 b'recording a copy)\n'
1516 b'recording a copy)\n'
1517 )
1517 )
1518 % flags
1518 % flags
1519 )
1519 )
1520 else:
1520 else:
1521 msg = _(b'%s: not overwriting - file exists\n')
1521 msg = _(b'%s: not overwriting - file exists\n')
1522 if rename:
1522 if rename:
1523 hint = _(
1523 hint = _(
1524 b"('hg rename --after' to record the rename)\n"
1524 b"('hg rename --after' to record the rename)\n"
1525 )
1525 )
1526 else:
1526 else:
1527 hint = _(b"('hg copy --after' to record the copy)\n")
1527 hint = _(b"('hg copy --after' to record the copy)\n")
1528 ui.warn(msg % reltarget)
1528 ui.warn(msg % reltarget)
1529 ui.warn(hint)
1529 ui.warn(hint)
1530 return True # report a failure
1530 return True # report a failure
1531
1531
1532 if after:
1532 if after:
1533 if not exists:
1533 if not exists:
1534 if rename:
1534 if rename:
1535 ui.warn(
1535 ui.warn(
1536 _(b'%s: not recording move - %s does not exist\n')
1536 _(b'%s: not recording move - %s does not exist\n')
1537 % (relsrc, reltarget)
1537 % (relsrc, reltarget)
1538 )
1538 )
1539 else:
1539 else:
1540 ui.warn(
1540 ui.warn(
1541 _(b'%s: not recording copy - %s does not exist\n')
1541 _(b'%s: not recording copy - %s does not exist\n')
1542 % (relsrc, reltarget)
1542 % (relsrc, reltarget)
1543 )
1543 )
1544 return True # report a failure
1544 return True # report a failure
1545 elif not dryrun:
1545 elif not dryrun:
1546 try:
1546 try:
1547 if exists:
1547 if exists:
1548 os.unlink(target)
1548 os.unlink(target)
1549 targetdir = os.path.dirname(target) or b'.'
1549 targetdir = os.path.dirname(target) or b'.'
1550 if not os.path.isdir(targetdir):
1550 if not os.path.isdir(targetdir):
1551 os.makedirs(targetdir)
1551 os.makedirs(targetdir)
1552 if samefile:
1552 if samefile:
1553 tmp = target + b"~hgrename"
1553 tmp = target + b"~hgrename"
1554 os.rename(src, tmp)
1554 os.rename(src, tmp)
1555 os.rename(tmp, target)
1555 os.rename(tmp, target)
1556 else:
1556 else:
1557 # Preserve stat info on renames, not on copies; this matches
1557 # Preserve stat info on renames, not on copies; this matches
1558 # Linux CLI behavior.
1558 # Linux CLI behavior.
1559 util.copyfile(src, target, copystat=rename)
1559 util.copyfile(src, target, copystat=rename)
1560 srcexists = True
1560 srcexists = True
1561 except IOError as inst:
1561 except IOError as inst:
1562 if inst.errno == errno.ENOENT:
1562 if inst.errno == errno.ENOENT:
1563 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1563 ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
1564 srcexists = False
1564 srcexists = False
1565 else:
1565 else:
1566 ui.warn(
1566 ui.warn(
1567 _(b'%s: cannot copy - %s\n')
1567 _(b'%s: cannot copy - %s\n')
1568 % (relsrc, encoding.strtolocal(inst.strerror))
1568 % (relsrc, encoding.strtolocal(inst.strerror))
1569 )
1569 )
1570 return True # report a failure
1570 return True # report a failure
1571
1571
1572 if ui.verbose or not exact:
1572 if ui.verbose or not exact:
1573 if rename:
1573 if rename:
1574 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1574 ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
1575 else:
1575 else:
1576 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1576 ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
1577
1577
1578 targets[abstarget] = abssrc
1578 targets[abstarget] = abssrc
1579
1579
1580 # fix up dirstate
1580 # fix up dirstate
1581 scmutil.dirstatecopy(
1581 scmutil.dirstatecopy(
1582 ui, repo, wctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1582 ui, repo, wctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
1583 )
1583 )
1584 if rename and not dryrun:
1584 if rename and not dryrun:
1585 if not after and srcexists and not samefile:
1585 if not after and srcexists and not samefile:
1586 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1586 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
1587 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1587 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1588 wctx.forget([abssrc])
1588 wctx.forget([abssrc])
1589
1589
1590 # pat: ossep
1590 # pat: ossep
1591 # dest ossep
1591 # dest ossep
1592 # srcs: list of (hgsep, hgsep, ossep, bool)
1592 # srcs: list of (hgsep, hgsep, ossep, bool)
1593 # return: function that takes hgsep and returns ossep
1593 # return: function that takes hgsep and returns ossep
1594 def targetpathfn(pat, dest, srcs):
1594 def targetpathfn(pat, dest, srcs):
1595 if os.path.isdir(pat):
1595 if os.path.isdir(pat):
1596 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1596 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1597 abspfx = util.localpath(abspfx)
1597 abspfx = util.localpath(abspfx)
1598 if destdirexists:
1598 if destdirexists:
1599 striplen = len(os.path.split(abspfx)[0])
1599 striplen = len(os.path.split(abspfx)[0])
1600 else:
1600 else:
1601 striplen = len(abspfx)
1601 striplen = len(abspfx)
1602 if striplen:
1602 if striplen:
1603 striplen += len(pycompat.ossep)
1603 striplen += len(pycompat.ossep)
1604 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1604 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1605 elif destdirexists:
1605 elif destdirexists:
1606 res = lambda p: os.path.join(
1606 res = lambda p: os.path.join(
1607 dest, os.path.basename(util.localpath(p))
1607 dest, os.path.basename(util.localpath(p))
1608 )
1608 )
1609 else:
1609 else:
1610 res = lambda p: dest
1610 res = lambda p: dest
1611 return res
1611 return res
1612
1612
1613 # pat: ossep
1613 # pat: ossep
1614 # dest ossep
1614 # dest ossep
1615 # srcs: list of (hgsep, hgsep, ossep, bool)
1615 # srcs: list of (hgsep, hgsep, ossep, bool)
1616 # return: function that takes hgsep and returns ossep
1616 # return: function that takes hgsep and returns ossep
1617 def targetpathafterfn(pat, dest, srcs):
1617 def targetpathafterfn(pat, dest, srcs):
1618 if matchmod.patkind(pat):
1618 if matchmod.patkind(pat):
1619 # a mercurial pattern
1619 # a mercurial pattern
1620 res = lambda p: os.path.join(
1620 res = lambda p: os.path.join(
1621 dest, os.path.basename(util.localpath(p))
1621 dest, os.path.basename(util.localpath(p))
1622 )
1622 )
1623 else:
1623 else:
1624 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1624 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1625 if len(abspfx) < len(srcs[0][0]):
1625 if len(abspfx) < len(srcs[0][0]):
1626 # A directory. Either the target path contains the last
1626 # A directory. Either the target path contains the last
1627 # component of the source path or it does not.
1627 # component of the source path or it does not.
1628 def evalpath(striplen):
1628 def evalpath(striplen):
1629 score = 0
1629 score = 0
1630 for s in srcs:
1630 for s in srcs:
1631 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1631 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1632 if os.path.lexists(t):
1632 if os.path.lexists(t):
1633 score += 1
1633 score += 1
1634 return score
1634 return score
1635
1635
1636 abspfx = util.localpath(abspfx)
1636 abspfx = util.localpath(abspfx)
1637 striplen = len(abspfx)
1637 striplen = len(abspfx)
1638 if striplen:
1638 if striplen:
1639 striplen += len(pycompat.ossep)
1639 striplen += len(pycompat.ossep)
1640 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1640 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1641 score = evalpath(striplen)
1641 score = evalpath(striplen)
1642 striplen1 = len(os.path.split(abspfx)[0])
1642 striplen1 = len(os.path.split(abspfx)[0])
1643 if striplen1:
1643 if striplen1:
1644 striplen1 += len(pycompat.ossep)
1644 striplen1 += len(pycompat.ossep)
1645 if evalpath(striplen1) > score:
1645 if evalpath(striplen1) > score:
1646 striplen = striplen1
1646 striplen = striplen1
1647 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1647 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1648 else:
1648 else:
1649 # a file
1649 # a file
1650 if destdirexists:
1650 if destdirexists:
1651 res = lambda p: os.path.join(
1651 res = lambda p: os.path.join(
1652 dest, os.path.basename(util.localpath(p))
1652 dest, os.path.basename(util.localpath(p))
1653 )
1653 )
1654 else:
1654 else:
1655 res = lambda p: dest
1655 res = lambda p: dest
1656 return res
1656 return res
1657
1657
1658 pats = scmutil.expandpats(pats)
1658 pats = scmutil.expandpats(pats)
1659 if not pats:
1659 if not pats:
1660 raise error.Abort(_(b'no source or destination specified'))
1660 raise error.Abort(_(b'no source or destination specified'))
1661 if len(pats) == 1:
1661 if len(pats) == 1:
1662 raise error.Abort(_(b'no destination specified'))
1662 raise error.Abort(_(b'no destination specified'))
1663 dest = pats.pop()
1663 dest = pats.pop()
1664 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1664 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1665 if not destdirexists:
1665 if not destdirexists:
1666 if len(pats) > 1 or matchmod.patkind(pats[0]):
1666 if len(pats) > 1 or matchmod.patkind(pats[0]):
1667 raise error.Abort(
1667 raise error.Abort(
1668 _(
1668 _(
1669 b'with multiple sources, destination must be an '
1669 b'with multiple sources, destination must be an '
1670 b'existing directory'
1670 b'existing directory'
1671 )
1671 )
1672 )
1672 )
1673 if util.endswithsep(dest):
1673 if util.endswithsep(dest):
1674 raise error.Abort(_(b'destination %s is not a directory') % dest)
1674 raise error.Abort(_(b'destination %s is not a directory') % dest)
1675
1675
1676 tfn = targetpathfn
1676 tfn = targetpathfn
1677 if after:
1677 if after:
1678 tfn = targetpathafterfn
1678 tfn = targetpathafterfn
1679 copylist = []
1679 copylist = []
1680 for pat in pats:
1680 for pat in pats:
1681 srcs = walkpat(pat)
1681 srcs = walkpat(pat)
1682 if not srcs:
1682 if not srcs:
1683 continue
1683 continue
1684 copylist.append((tfn(pat, dest, srcs), srcs))
1684 copylist.append((tfn(pat, dest, srcs), srcs))
1685 if not copylist:
1685 if not copylist:
1686 raise error.Abort(_(b'no files to copy'))
1686 raise error.Abort(_(b'no files to copy'))
1687
1687
1688 errors = 0
1688 errors = 0
1689 for targetpath, srcs in copylist:
1689 for targetpath, srcs in copylist:
1690 for abssrc, relsrc, exact in srcs:
1690 for abssrc, relsrc, exact in srcs:
1691 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1691 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1692 errors += 1
1692 errors += 1
1693
1693
1694 return errors != 0
1694 return errors != 0
1695
1695
1696
1696
1697 ## facility to let extension process additional data into an import patch
1697 ## facility to let extension process additional data into an import patch
1698 # list of identifier to be executed in order
1698 # list of identifier to be executed in order
1699 extrapreimport = [] # run before commit
1699 extrapreimport = [] # run before commit
1700 extrapostimport = [] # run after commit
1700 extrapostimport = [] # run after commit
1701 # mapping from identifier to actual import function
1701 # mapping from identifier to actual import function
1702 #
1702 #
1703 # 'preimport' are run before the commit is made and are provided the following
1703 # 'preimport' are run before the commit is made and are provided the following
1704 # arguments:
1704 # arguments:
1705 # - repo: the localrepository instance,
1705 # - repo: the localrepository instance,
1706 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1706 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1707 # - extra: the future extra dictionary of the changeset, please mutate it,
1707 # - extra: the future extra dictionary of the changeset, please mutate it,
1708 # - opts: the import options.
1708 # - opts: the import options.
1709 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1709 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1710 # mutation of in memory commit and more. Feel free to rework the code to get
1710 # mutation of in memory commit and more. Feel free to rework the code to get
1711 # there.
1711 # there.
1712 extrapreimportmap = {}
1712 extrapreimportmap = {}
1713 # 'postimport' are run after the commit is made and are provided the following
1713 # 'postimport' are run after the commit is made and are provided the following
1714 # argument:
1714 # argument:
1715 # - ctx: the changectx created by import.
1715 # - ctx: the changectx created by import.
1716 extrapostimportmap = {}
1716 extrapostimportmap = {}
1717
1717
1718
1718
1719 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1719 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1720 """Utility function used by commands.import to import a single patch
1720 """Utility function used by commands.import to import a single patch
1721
1721
1722 This function is explicitly defined here to help the evolve extension to
1722 This function is explicitly defined here to help the evolve extension to
1723 wrap this part of the import logic.
1723 wrap this part of the import logic.
1724
1724
1725 The API is currently a bit ugly because it a simple code translation from
1725 The API is currently a bit ugly because it a simple code translation from
1726 the import command. Feel free to make it better.
1726 the import command. Feel free to make it better.
1727
1727
1728 :patchdata: a dictionary containing parsed patch data (such as from
1728 :patchdata: a dictionary containing parsed patch data (such as from
1729 ``patch.extract()``)
1729 ``patch.extract()``)
1730 :parents: nodes that will be parent of the created commit
1730 :parents: nodes that will be parent of the created commit
1731 :opts: the full dict of option passed to the import command
1731 :opts: the full dict of option passed to the import command
1732 :msgs: list to save commit message to.
1732 :msgs: list to save commit message to.
1733 (used in case we need to save it when failing)
1733 (used in case we need to save it when failing)
1734 :updatefunc: a function that update a repo to a given node
1734 :updatefunc: a function that update a repo to a given node
1735 updatefunc(<repo>, <node>)
1735 updatefunc(<repo>, <node>)
1736 """
1736 """
1737 # avoid cycle context -> subrepo -> cmdutil
1737 # avoid cycle context -> subrepo -> cmdutil
1738 from . import context
1738 from . import context
1739
1739
1740 tmpname = patchdata.get(b'filename')
1740 tmpname = patchdata.get(b'filename')
1741 message = patchdata.get(b'message')
1741 message = patchdata.get(b'message')
1742 user = opts.get(b'user') or patchdata.get(b'user')
1742 user = opts.get(b'user') or patchdata.get(b'user')
1743 date = opts.get(b'date') or patchdata.get(b'date')
1743 date = opts.get(b'date') or patchdata.get(b'date')
1744 branch = patchdata.get(b'branch')
1744 branch = patchdata.get(b'branch')
1745 nodeid = patchdata.get(b'nodeid')
1745 nodeid = patchdata.get(b'nodeid')
1746 p1 = patchdata.get(b'p1')
1746 p1 = patchdata.get(b'p1')
1747 p2 = patchdata.get(b'p2')
1747 p2 = patchdata.get(b'p2')
1748
1748
1749 nocommit = opts.get(b'no_commit')
1749 nocommit = opts.get(b'no_commit')
1750 importbranch = opts.get(b'import_branch')
1750 importbranch = opts.get(b'import_branch')
1751 update = not opts.get(b'bypass')
1751 update = not opts.get(b'bypass')
1752 strip = opts[b"strip"]
1752 strip = opts[b"strip"]
1753 prefix = opts[b"prefix"]
1753 prefix = opts[b"prefix"]
1754 sim = float(opts.get(b'similarity') or 0)
1754 sim = float(opts.get(b'similarity') or 0)
1755
1755
1756 if not tmpname:
1756 if not tmpname:
1757 return None, None, False
1757 return None, None, False
1758
1758
1759 rejects = False
1759 rejects = False
1760
1760
1761 cmdline_message = logmessage(ui, opts)
1761 cmdline_message = logmessage(ui, opts)
1762 if cmdline_message:
1762 if cmdline_message:
1763 # pickup the cmdline msg
1763 # pickup the cmdline msg
1764 message = cmdline_message
1764 message = cmdline_message
1765 elif message:
1765 elif message:
1766 # pickup the patch msg
1766 # pickup the patch msg
1767 message = message.strip()
1767 message = message.strip()
1768 else:
1768 else:
1769 # launch the editor
1769 # launch the editor
1770 message = None
1770 message = None
1771 ui.debug(b'message:\n%s\n' % (message or b''))
1771 ui.debug(b'message:\n%s\n' % (message or b''))
1772
1772
1773 if len(parents) == 1:
1773 if len(parents) == 1:
1774 parents.append(repo[nullid])
1774 parents.append(repo[nullid])
1775 if opts.get(b'exact'):
1775 if opts.get(b'exact'):
1776 if not nodeid or not p1:
1776 if not nodeid or not p1:
1777 raise error.Abort(_(b'not a Mercurial patch'))
1777 raise error.Abort(_(b'not a Mercurial patch'))
1778 p1 = repo[p1]
1778 p1 = repo[p1]
1779 p2 = repo[p2 or nullid]
1779 p2 = repo[p2 or nullid]
1780 elif p2:
1780 elif p2:
1781 try:
1781 try:
1782 p1 = repo[p1]
1782 p1 = repo[p1]
1783 p2 = repo[p2]
1783 p2 = repo[p2]
1784 # Without any options, consider p2 only if the
1784 # Without any options, consider p2 only if the
1785 # patch is being applied on top of the recorded
1785 # patch is being applied on top of the recorded
1786 # first parent.
1786 # first parent.
1787 if p1 != parents[0]:
1787 if p1 != parents[0]:
1788 p1 = parents[0]
1788 p1 = parents[0]
1789 p2 = repo[nullid]
1789 p2 = repo[nullid]
1790 except error.RepoError:
1790 except error.RepoError:
1791 p1, p2 = parents
1791 p1, p2 = parents
1792 if p2.node() == nullid:
1792 if p2.node() == nullid:
1793 ui.warn(
1793 ui.warn(
1794 _(
1794 _(
1795 b"warning: import the patch as a normal revision\n"
1795 b"warning: import the patch as a normal revision\n"
1796 b"(use --exact to import the patch as a merge)\n"
1796 b"(use --exact to import the patch as a merge)\n"
1797 )
1797 )
1798 )
1798 )
1799 else:
1799 else:
1800 p1, p2 = parents
1800 p1, p2 = parents
1801
1801
1802 n = None
1802 n = None
1803 if update:
1803 if update:
1804 if p1 != parents[0]:
1804 if p1 != parents[0]:
1805 updatefunc(repo, p1.node())
1805 updatefunc(repo, p1.node())
1806 if p2 != parents[1]:
1806 if p2 != parents[1]:
1807 repo.setparents(p1.node(), p2.node())
1807 repo.setparents(p1.node(), p2.node())
1808
1808
1809 if opts.get(b'exact') or importbranch:
1809 if opts.get(b'exact') or importbranch:
1810 repo.dirstate.setbranch(branch or b'default')
1810 repo.dirstate.setbranch(branch or b'default')
1811
1811
1812 partial = opts.get(b'partial', False)
1812 partial = opts.get(b'partial', False)
1813 files = set()
1813 files = set()
1814 try:
1814 try:
1815 patch.patch(
1815 patch.patch(
1816 ui,
1816 ui,
1817 repo,
1817 repo,
1818 tmpname,
1818 tmpname,
1819 strip=strip,
1819 strip=strip,
1820 prefix=prefix,
1820 prefix=prefix,
1821 files=files,
1821 files=files,
1822 eolmode=None,
1822 eolmode=None,
1823 similarity=sim / 100.0,
1823 similarity=sim / 100.0,
1824 )
1824 )
1825 except error.PatchError as e:
1825 except error.PatchError as e:
1826 if not partial:
1826 if not partial:
1827 raise error.Abort(pycompat.bytestr(e))
1827 raise error.Abort(pycompat.bytestr(e))
1828 if partial:
1828 if partial:
1829 rejects = True
1829 rejects = True
1830
1830
1831 files = list(files)
1831 files = list(files)
1832 if nocommit:
1832 if nocommit:
1833 if message:
1833 if message:
1834 msgs.append(message)
1834 msgs.append(message)
1835 else:
1835 else:
1836 if opts.get(b'exact') or p2:
1836 if opts.get(b'exact') or p2:
1837 # If you got here, you either use --force and know what
1837 # If you got here, you either use --force and know what
1838 # you are doing or used --exact or a merge patch while
1838 # you are doing or used --exact or a merge patch while
1839 # being updated to its first parent.
1839 # being updated to its first parent.
1840 m = None
1840 m = None
1841 else:
1841 else:
1842 m = scmutil.matchfiles(repo, files or [])
1842 m = scmutil.matchfiles(repo, files or [])
1843 editform = mergeeditform(repo[None], b'import.normal')
1843 editform = mergeeditform(repo[None], b'import.normal')
1844 if opts.get(b'exact'):
1844 if opts.get(b'exact'):
1845 editor = None
1845 editor = None
1846 else:
1846 else:
1847 editor = getcommiteditor(
1847 editor = getcommiteditor(
1848 editform=editform, **pycompat.strkwargs(opts)
1848 editform=editform, **pycompat.strkwargs(opts)
1849 )
1849 )
1850 extra = {}
1850 extra = {}
1851 for idfunc in extrapreimport:
1851 for idfunc in extrapreimport:
1852 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1852 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1853 overrides = {}
1853 overrides = {}
1854 if partial:
1854 if partial:
1855 overrides[(b'ui', b'allowemptycommit')] = True
1855 overrides[(b'ui', b'allowemptycommit')] = True
1856 if opts.get(b'secret'):
1856 if opts.get(b'secret'):
1857 overrides[(b'phases', b'new-commit')] = b'secret'
1857 overrides[(b'phases', b'new-commit')] = b'secret'
1858 with repo.ui.configoverride(overrides, b'import'):
1858 with repo.ui.configoverride(overrides, b'import'):
1859 n = repo.commit(
1859 n = repo.commit(
1860 message, user, date, match=m, editor=editor, extra=extra
1860 message, user, date, match=m, editor=editor, extra=extra
1861 )
1861 )
1862 for idfunc in extrapostimport:
1862 for idfunc in extrapostimport:
1863 extrapostimportmap[idfunc](repo[n])
1863 extrapostimportmap[idfunc](repo[n])
1864 else:
1864 else:
1865 if opts.get(b'exact') or importbranch:
1865 if opts.get(b'exact') or importbranch:
1866 branch = branch or b'default'
1866 branch = branch or b'default'
1867 else:
1867 else:
1868 branch = p1.branch()
1868 branch = p1.branch()
1869 store = patch.filestore()
1869 store = patch.filestore()
1870 try:
1870 try:
1871 files = set()
1871 files = set()
1872 try:
1872 try:
1873 patch.patchrepo(
1873 patch.patchrepo(
1874 ui,
1874 ui,
1875 repo,
1875 repo,
1876 p1,
1876 p1,
1877 store,
1877 store,
1878 tmpname,
1878 tmpname,
1879 strip,
1879 strip,
1880 prefix,
1880 prefix,
1881 files,
1881 files,
1882 eolmode=None,
1882 eolmode=None,
1883 )
1883 )
1884 except error.PatchError as e:
1884 except error.PatchError as e:
1885 raise error.Abort(stringutil.forcebytestr(e))
1885 raise error.Abort(stringutil.forcebytestr(e))
1886 if opts.get(b'exact'):
1886 if opts.get(b'exact'):
1887 editor = None
1887 editor = None
1888 else:
1888 else:
1889 editor = getcommiteditor(editform=b'import.bypass')
1889 editor = getcommiteditor(editform=b'import.bypass')
1890 memctx = context.memctx(
1890 memctx = context.memctx(
1891 repo,
1891 repo,
1892 (p1.node(), p2.node()),
1892 (p1.node(), p2.node()),
1893 message,
1893 message,
1894 files=files,
1894 files=files,
1895 filectxfn=store,
1895 filectxfn=store,
1896 user=user,
1896 user=user,
1897 date=date,
1897 date=date,
1898 branch=branch,
1898 branch=branch,
1899 editor=editor,
1899 editor=editor,
1900 )
1900 )
1901 n = memctx.commit()
1901 n = memctx.commit()
1902 finally:
1902 finally:
1903 store.close()
1903 store.close()
1904 if opts.get(b'exact') and nocommit:
1904 if opts.get(b'exact') and nocommit:
1905 # --exact with --no-commit is still useful in that it does merge
1905 # --exact with --no-commit is still useful in that it does merge
1906 # and branch bits
1906 # and branch bits
1907 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
1907 ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
1908 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
1908 elif opts.get(b'exact') and (not n or hex(n) != nodeid):
1909 raise error.Abort(_(b'patch is damaged or loses information'))
1909 raise error.Abort(_(b'patch is damaged or loses information'))
1910 msg = _(b'applied to working directory')
1910 msg = _(b'applied to working directory')
1911 if n:
1911 if n:
1912 # i18n: refers to a short changeset id
1912 # i18n: refers to a short changeset id
1913 msg = _(b'created %s') % short(n)
1913 msg = _(b'created %s') % short(n)
1914 return msg, n, rejects
1914 return msg, n, rejects
1915
1915
1916
1916
1917 # facility to let extensions include additional data in an exported patch
1917 # facility to let extensions include additional data in an exported patch
1918 # list of identifiers to be executed in order
1918 # list of identifiers to be executed in order
1919 extraexport = []
1919 extraexport = []
1920 # mapping from identifier to actual export function
1920 # mapping from identifier to actual export function
1921 # function as to return a string to be added to the header or None
1921 # function as to return a string to be added to the header or None
1922 # it is given two arguments (sequencenumber, changectx)
1922 # it is given two arguments (sequencenumber, changectx)
1923 extraexportmap = {}
1923 extraexportmap = {}
1924
1924
1925
1925
1926 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
1926 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
1927 node = scmutil.binnode(ctx)
1927 node = scmutil.binnode(ctx)
1928 parents = [p.node() for p in ctx.parents() if p]
1928 parents = [p.node() for p in ctx.parents() if p]
1929 branch = ctx.branch()
1929 branch = ctx.branch()
1930 if switch_parent:
1930 if switch_parent:
1931 parents.reverse()
1931 parents.reverse()
1932
1932
1933 if parents:
1933 if parents:
1934 prev = parents[0]
1934 prev = parents[0]
1935 else:
1935 else:
1936 prev = nullid
1936 prev = nullid
1937
1937
1938 fm.context(ctx=ctx)
1938 fm.context(ctx=ctx)
1939 fm.plain(b'# HG changeset patch\n')
1939 fm.plain(b'# HG changeset patch\n')
1940 fm.write(b'user', b'# User %s\n', ctx.user())
1940 fm.write(b'user', b'# User %s\n', ctx.user())
1941 fm.plain(b'# Date %d %d\n' % ctx.date())
1941 fm.plain(b'# Date %d %d\n' % ctx.date())
1942 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
1942 fm.write(b'date', b'# %s\n', fm.formatdate(ctx.date()))
1943 fm.condwrite(
1943 fm.condwrite(
1944 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
1944 branch and branch != b'default', b'branch', b'# Branch %s\n', branch
1945 )
1945 )
1946 fm.write(b'node', b'# Node ID %s\n', hex(node))
1946 fm.write(b'node', b'# Node ID %s\n', hex(node))
1947 fm.plain(b'# Parent %s\n' % hex(prev))
1947 fm.plain(b'# Parent %s\n' % hex(prev))
1948 if len(parents) > 1:
1948 if len(parents) > 1:
1949 fm.plain(b'# Parent %s\n' % hex(parents[1]))
1949 fm.plain(b'# Parent %s\n' % hex(parents[1]))
1950 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
1950 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
1951
1951
1952 # TODO: redesign extraexportmap function to support formatter
1952 # TODO: redesign extraexportmap function to support formatter
1953 for headerid in extraexport:
1953 for headerid in extraexport:
1954 header = extraexportmap[headerid](seqno, ctx)
1954 header = extraexportmap[headerid](seqno, ctx)
1955 if header is not None:
1955 if header is not None:
1956 fm.plain(b'# %s\n' % header)
1956 fm.plain(b'# %s\n' % header)
1957
1957
1958 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
1958 fm.write(b'desc', b'%s\n', ctx.description().rstrip())
1959 fm.plain(b'\n')
1959 fm.plain(b'\n')
1960
1960
1961 if fm.isplain():
1961 if fm.isplain():
1962 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
1962 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
1963 for chunk, label in chunkiter:
1963 for chunk, label in chunkiter:
1964 fm.plain(chunk, label=label)
1964 fm.plain(chunk, label=label)
1965 else:
1965 else:
1966 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
1966 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
1967 # TODO: make it structured?
1967 # TODO: make it structured?
1968 fm.data(diff=b''.join(chunkiter))
1968 fm.data(diff=b''.join(chunkiter))
1969
1969
1970
1970
1971 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
1971 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
1972 """Export changesets to stdout or a single file"""
1972 """Export changesets to stdout or a single file"""
1973 for seqno, rev in enumerate(revs, 1):
1973 for seqno, rev in enumerate(revs, 1):
1974 ctx = repo[rev]
1974 ctx = repo[rev]
1975 if not dest.startswith(b'<'):
1975 if not dest.startswith(b'<'):
1976 repo.ui.note(b"%s\n" % dest)
1976 repo.ui.note(b"%s\n" % dest)
1977 fm.startitem()
1977 fm.startitem()
1978 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
1978 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
1979
1979
1980
1980
1981 def _exportfntemplate(
1981 def _exportfntemplate(
1982 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
1982 repo, revs, basefm, fntemplate, switch_parent, diffopts, match
1983 ):
1983 ):
1984 """Export changesets to possibly multiple files"""
1984 """Export changesets to possibly multiple files"""
1985 total = len(revs)
1985 total = len(revs)
1986 revwidth = max(len(str(rev)) for rev in revs)
1986 revwidth = max(len(str(rev)) for rev in revs)
1987 filemap = util.sortdict() # filename: [(seqno, rev), ...]
1987 filemap = util.sortdict() # filename: [(seqno, rev), ...]
1988
1988
1989 for seqno, rev in enumerate(revs, 1):
1989 for seqno, rev in enumerate(revs, 1):
1990 ctx = repo[rev]
1990 ctx = repo[rev]
1991 dest = makefilename(
1991 dest = makefilename(
1992 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
1992 ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
1993 )
1993 )
1994 filemap.setdefault(dest, []).append((seqno, rev))
1994 filemap.setdefault(dest, []).append((seqno, rev))
1995
1995
1996 for dest in filemap:
1996 for dest in filemap:
1997 with formatter.maybereopen(basefm, dest) as fm:
1997 with formatter.maybereopen(basefm, dest) as fm:
1998 repo.ui.note(b"%s\n" % dest)
1998 repo.ui.note(b"%s\n" % dest)
1999 for seqno, rev in filemap[dest]:
1999 for seqno, rev in filemap[dest]:
2000 fm.startitem()
2000 fm.startitem()
2001 ctx = repo[rev]
2001 ctx = repo[rev]
2002 _exportsingle(
2002 _exportsingle(
2003 repo, ctx, fm, match, switch_parent, seqno, diffopts
2003 repo, ctx, fm, match, switch_parent, seqno, diffopts
2004 )
2004 )
2005
2005
2006
2006
2007 def _prefetchchangedfiles(repo, revs, match):
2007 def _prefetchchangedfiles(repo, revs, match):
2008 allfiles = set()
2008 allfiles = set()
2009 for rev in revs:
2009 for rev in revs:
2010 for file in repo[rev].files():
2010 for file in repo[rev].files():
2011 if not match or match(file):
2011 if not match or match(file):
2012 allfiles.add(file)
2012 allfiles.add(file)
2013 scmutil.prefetchfiles(repo, revs, scmutil.matchfiles(repo, allfiles))
2013 scmutil.prefetchfiles(repo, revs, scmutil.matchfiles(repo, allfiles))
2014
2014
2015
2015
2016 def export(
2016 def export(
2017 repo,
2017 repo,
2018 revs,
2018 revs,
2019 basefm,
2019 basefm,
2020 fntemplate=b'hg-%h.patch',
2020 fntemplate=b'hg-%h.patch',
2021 switch_parent=False,
2021 switch_parent=False,
2022 opts=None,
2022 opts=None,
2023 match=None,
2023 match=None,
2024 ):
2024 ):
2025 '''export changesets as hg patches
2025 '''export changesets as hg patches
2026
2026
2027 Args:
2027 Args:
2028 repo: The repository from which we're exporting revisions.
2028 repo: The repository from which we're exporting revisions.
2029 revs: A list of revisions to export as revision numbers.
2029 revs: A list of revisions to export as revision numbers.
2030 basefm: A formatter to which patches should be written.
2030 basefm: A formatter to which patches should be written.
2031 fntemplate: An optional string to use for generating patch file names.
2031 fntemplate: An optional string to use for generating patch file names.
2032 switch_parent: If True, show diffs against second parent when not nullid.
2032 switch_parent: If True, show diffs against second parent when not nullid.
2033 Default is false, which always shows diff against p1.
2033 Default is false, which always shows diff against p1.
2034 opts: diff options to use for generating the patch.
2034 opts: diff options to use for generating the patch.
2035 match: If specified, only export changes to files matching this matcher.
2035 match: If specified, only export changes to files matching this matcher.
2036
2036
2037 Returns:
2037 Returns:
2038 Nothing.
2038 Nothing.
2039
2039
2040 Side Effect:
2040 Side Effect:
2041 "HG Changeset Patch" data is emitted to one of the following
2041 "HG Changeset Patch" data is emitted to one of the following
2042 destinations:
2042 destinations:
2043 fntemplate specified: Each rev is written to a unique file named using
2043 fntemplate specified: Each rev is written to a unique file named using
2044 the given template.
2044 the given template.
2045 Otherwise: All revs will be written to basefm.
2045 Otherwise: All revs will be written to basefm.
2046 '''
2046 '''
2047 _prefetchchangedfiles(repo, revs, match)
2047 _prefetchchangedfiles(repo, revs, match)
2048
2048
2049 if not fntemplate:
2049 if not fntemplate:
2050 _exportfile(
2050 _exportfile(
2051 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2051 repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
2052 )
2052 )
2053 else:
2053 else:
2054 _exportfntemplate(
2054 _exportfntemplate(
2055 repo, revs, basefm, fntemplate, switch_parent, opts, match
2055 repo, revs, basefm, fntemplate, switch_parent, opts, match
2056 )
2056 )
2057
2057
2058
2058
2059 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2059 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
2060 """Export changesets to the given file stream"""
2060 """Export changesets to the given file stream"""
2061 _prefetchchangedfiles(repo, revs, match)
2061 _prefetchchangedfiles(repo, revs, match)
2062
2062
2063 dest = getattr(fp, 'name', b'<unnamed>')
2063 dest = getattr(fp, 'name', b'<unnamed>')
2064 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2064 with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
2065 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2065 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
2066
2066
2067
2067
2068 def showmarker(fm, marker, index=None):
2068 def showmarker(fm, marker, index=None):
2069 """utility function to display obsolescence marker in a readable way
2069 """utility function to display obsolescence marker in a readable way
2070
2070
2071 To be used by debug function."""
2071 To be used by debug function."""
2072 if index is not None:
2072 if index is not None:
2073 fm.write(b'index', b'%i ', index)
2073 fm.write(b'index', b'%i ', index)
2074 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2074 fm.write(b'prednode', b'%s ', hex(marker.prednode()))
2075 succs = marker.succnodes()
2075 succs = marker.succnodes()
2076 fm.condwrite(
2076 fm.condwrite(
2077 succs,
2077 succs,
2078 b'succnodes',
2078 b'succnodes',
2079 b'%s ',
2079 b'%s ',
2080 fm.formatlist(map(hex, succs), name=b'node'),
2080 fm.formatlist(map(hex, succs), name=b'node'),
2081 )
2081 )
2082 fm.write(b'flag', b'%X ', marker.flags())
2082 fm.write(b'flag', b'%X ', marker.flags())
2083 parents = marker.parentnodes()
2083 parents = marker.parentnodes()
2084 if parents is not None:
2084 if parents is not None:
2085 fm.write(
2085 fm.write(
2086 b'parentnodes',
2086 b'parentnodes',
2087 b'{%s} ',
2087 b'{%s} ',
2088 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2088 fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
2089 )
2089 )
2090 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2090 fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
2091 meta = marker.metadata().copy()
2091 meta = marker.metadata().copy()
2092 meta.pop(b'date', None)
2092 meta.pop(b'date', None)
2093 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2093 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
2094 fm.write(
2094 fm.write(
2095 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2095 b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
2096 )
2096 )
2097 fm.plain(b'\n')
2097 fm.plain(b'\n')
2098
2098
2099
2099
2100 def finddate(ui, repo, date):
2100 def finddate(ui, repo, date):
2101 """Find the tipmost changeset that matches the given date spec"""
2101 """Find the tipmost changeset that matches the given date spec"""
2102
2102
2103 df = dateutil.matchdate(date)
2103 df = dateutil.matchdate(date)
2104 m = scmutil.matchall(repo)
2104 m = scmutil.matchall(repo)
2105 results = {}
2105 results = {}
2106
2106
2107 def prep(ctx, fns):
2107 def prep(ctx, fns):
2108 d = ctx.date()
2108 d = ctx.date()
2109 if df(d[0]):
2109 if df(d[0]):
2110 results[ctx.rev()] = d
2110 results[ctx.rev()] = d
2111
2111
2112 for ctx in walkchangerevs(repo, m, {b'rev': None}, prep):
2112 for ctx in walkchangerevs(repo, m, {b'rev': None}, prep):
2113 rev = ctx.rev()
2113 rev = ctx.rev()
2114 if rev in results:
2114 if rev in results:
2115 ui.status(
2115 ui.status(
2116 _(b"found revision %d from %s\n")
2116 _(b"found revision %d from %s\n")
2117 % (rev, dateutil.datestr(results[rev]))
2117 % (rev, dateutil.datestr(results[rev]))
2118 )
2118 )
2119 return b'%d' % rev
2119 return b'%d' % rev
2120
2120
2121 raise error.Abort(_(b"revision matching date not found"))
2121 raise error.Abort(_(b"revision matching date not found"))
2122
2122
2123
2123
2124 def increasingwindows(windowsize=8, sizelimit=512):
2124 def increasingwindows(windowsize=8, sizelimit=512):
2125 while True:
2125 while True:
2126 yield windowsize
2126 yield windowsize
2127 if windowsize < sizelimit:
2127 if windowsize < sizelimit:
2128 windowsize *= 2
2128 windowsize *= 2
2129
2129
2130
2130
2131 def _walkrevs(repo, opts):
2131 def _walkrevs(repo, opts):
2132 # Default --rev value depends on --follow but --follow behavior
2132 # Default --rev value depends on --follow but --follow behavior
2133 # depends on revisions resolved from --rev...
2133 # depends on revisions resolved from --rev...
2134 follow = opts.get(b'follow') or opts.get(b'follow_first')
2134 follow = opts.get(b'follow') or opts.get(b'follow_first')
2135 if opts.get(b'rev'):
2135 if opts.get(b'rev'):
2136 revs = scmutil.revrange(repo, opts[b'rev'])
2136 revs = scmutil.revrange(repo, opts[b'rev'])
2137 elif follow and repo.dirstate.p1() == nullid:
2137 elif follow and repo.dirstate.p1() == nullid:
2138 revs = smartset.baseset()
2138 revs = smartset.baseset()
2139 elif follow:
2139 elif follow:
2140 revs = repo.revs(b'reverse(:.)')
2140 revs = repo.revs(b'reverse(:.)')
2141 else:
2141 else:
2142 revs = smartset.spanset(repo)
2142 revs = smartset.spanset(repo)
2143 revs.reverse()
2143 revs.reverse()
2144 return revs
2144 return revs
2145
2145
2146
2146
2147 class FileWalkError(Exception):
2147 class FileWalkError(Exception):
2148 pass
2148 pass
2149
2149
2150
2150
2151 def walkfilerevs(repo, match, follow, revs, fncache):
2151 def walkfilerevs(repo, match, follow, revs, fncache):
2152 '''Walks the file history for the matched files.
2152 '''Walks the file history for the matched files.
2153
2153
2154 Returns the changeset revs that are involved in the file history.
2154 Returns the changeset revs that are involved in the file history.
2155
2155
2156 Throws FileWalkError if the file history can't be walked using
2156 Throws FileWalkError if the file history can't be walked using
2157 filelogs alone.
2157 filelogs alone.
2158 '''
2158 '''
2159 wanted = set()
2159 wanted = set()
2160 copies = []
2160 copies = []
2161 minrev, maxrev = min(revs), max(revs)
2161 minrev, maxrev = min(revs), max(revs)
2162
2162
2163 def filerevs(filelog, last):
2163 def filerevs(filelog, last):
2164 """
2164 """
2165 Only files, no patterns. Check the history of each file.
2165 Only files, no patterns. Check the history of each file.
2166
2166
2167 Examines filelog entries within minrev, maxrev linkrev range
2167 Examines filelog entries within minrev, maxrev linkrev range
2168 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2168 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2169 tuples in backwards order
2169 tuples in backwards order
2170 """
2170 """
2171 cl_count = len(repo)
2171 cl_count = len(repo)
2172 revs = []
2172 revs = []
2173 for j in pycompat.xrange(0, last + 1):
2173 for j in pycompat.xrange(0, last + 1):
2174 linkrev = filelog.linkrev(j)
2174 linkrev = filelog.linkrev(j)
2175 if linkrev < minrev:
2175 if linkrev < minrev:
2176 continue
2176 continue
2177 # only yield rev for which we have the changelog, it can
2177 # only yield rev for which we have the changelog, it can
2178 # happen while doing "hg log" during a pull or commit
2178 # happen while doing "hg log" during a pull or commit
2179 if linkrev >= cl_count:
2179 if linkrev >= cl_count:
2180 break
2180 break
2181
2181
2182 parentlinkrevs = []
2182 parentlinkrevs = []
2183 for p in filelog.parentrevs(j):
2183 for p in filelog.parentrevs(j):
2184 if p != nullrev:
2184 if p != nullrev:
2185 parentlinkrevs.append(filelog.linkrev(p))
2185 parentlinkrevs.append(filelog.linkrev(p))
2186 n = filelog.node(j)
2186 n = filelog.node(j)
2187 revs.append(
2187 revs.append(
2188 (linkrev, parentlinkrevs, follow and filelog.renamed(n))
2188 (linkrev, parentlinkrevs, follow and filelog.renamed(n))
2189 )
2189 )
2190
2190
2191 return reversed(revs)
2191 return reversed(revs)
2192
2192
2193 def iterfiles():
2193 def iterfiles():
2194 pctx = repo[b'.']
2194 pctx = repo[b'.']
2195 for filename in match.files():
2195 for filename in match.files():
2196 if follow:
2196 if follow:
2197 if filename not in pctx:
2197 if filename not in pctx:
2198 raise error.Abort(
2198 raise error.Abort(
2199 _(
2199 _(
2200 b'cannot follow file not in parent '
2200 b'cannot follow file not in parent '
2201 b'revision: "%s"'
2201 b'revision: "%s"'
2202 )
2202 )
2203 % filename
2203 % filename
2204 )
2204 )
2205 yield filename, pctx[filename].filenode()
2205 yield filename, pctx[filename].filenode()
2206 else:
2206 else:
2207 yield filename, None
2207 yield filename, None
2208 for filename_node in copies:
2208 for filename_node in copies:
2209 yield filename_node
2209 yield filename_node
2210
2210
2211 for file_, node in iterfiles():
2211 for file_, node in iterfiles():
2212 filelog = repo.file(file_)
2212 filelog = repo.file(file_)
2213 if not len(filelog):
2213 if not len(filelog):
2214 if node is None:
2214 if node is None:
2215 # A zero count may be a directory or deleted file, so
2215 # A zero count may be a directory or deleted file, so
2216 # try to find matching entries on the slow path.
2216 # try to find matching entries on the slow path.
2217 if follow:
2217 if follow:
2218 raise error.Abort(
2218 raise error.Abort(
2219 _(b'cannot follow nonexistent file: "%s"') % file_
2219 _(b'cannot follow nonexistent file: "%s"') % file_
2220 )
2220 )
2221 raise FileWalkError(b"Cannot walk via filelog")
2221 raise FileWalkError(b"Cannot walk via filelog")
2222 else:
2222 else:
2223 continue
2223 continue
2224
2224
2225 if node is None:
2225 if node is None:
2226 last = len(filelog) - 1
2226 last = len(filelog) - 1
2227 else:
2227 else:
2228 last = filelog.rev(node)
2228 last = filelog.rev(node)
2229
2229
2230 # keep track of all ancestors of the file
2230 # keep track of all ancestors of the file
2231 ancestors = {filelog.linkrev(last)}
2231 ancestors = {filelog.linkrev(last)}
2232
2232
2233 # iterate from latest to oldest revision
2233 # iterate from latest to oldest revision
2234 for rev, flparentlinkrevs, copied in filerevs(filelog, last):
2234 for rev, flparentlinkrevs, copied in filerevs(filelog, last):
2235 if not follow:
2235 if not follow:
2236 if rev > maxrev:
2236 if rev > maxrev:
2237 continue
2237 continue
2238 else:
2238 else:
2239 # Note that last might not be the first interesting
2239 # Note that last might not be the first interesting
2240 # rev to us:
2240 # rev to us:
2241 # if the file has been changed after maxrev, we'll
2241 # if the file has been changed after maxrev, we'll
2242 # have linkrev(last) > maxrev, and we still need
2242 # have linkrev(last) > maxrev, and we still need
2243 # to explore the file graph
2243 # to explore the file graph
2244 if rev not in ancestors:
2244 if rev not in ancestors:
2245 continue
2245 continue
2246 # XXX insert 1327 fix here
2246 # XXX insert 1327 fix here
2247 if flparentlinkrevs:
2247 if flparentlinkrevs:
2248 ancestors.update(flparentlinkrevs)
2248 ancestors.update(flparentlinkrevs)
2249
2249
2250 fncache.setdefault(rev, []).append(file_)
2250 fncache.setdefault(rev, []).append(file_)
2251 wanted.add(rev)
2251 wanted.add(rev)
2252 if copied:
2252 if copied:
2253 copies.append(copied)
2253 copies.append(copied)
2254
2254
2255 return wanted
2255 return wanted
2256
2256
2257
2257
2258 class _followfilter(object):
2258 class _followfilter(object):
2259 def __init__(self, repo, onlyfirst=False):
2259 def __init__(self, repo, onlyfirst=False):
2260 self.repo = repo
2260 self.repo = repo
2261 self.startrev = nullrev
2261 self.startrev = nullrev
2262 self.roots = set()
2262 self.roots = set()
2263 self.onlyfirst = onlyfirst
2263 self.onlyfirst = onlyfirst
2264
2264
2265 def match(self, rev):
2265 def match(self, rev):
2266 def realparents(rev):
2266 def realparents(rev):
2267 if self.onlyfirst:
2267 if self.onlyfirst:
2268 return self.repo.changelog.parentrevs(rev)[0:1]
2268 return self.repo.changelog.parentrevs(rev)[0:1]
2269 else:
2269 else:
2270 return filter(
2270 return filter(
2271 lambda x: x != nullrev, self.repo.changelog.parentrevs(rev)
2271 lambda x: x != nullrev, self.repo.changelog.parentrevs(rev)
2272 )
2272 )
2273
2273
2274 if self.startrev == nullrev:
2274 if self.startrev == nullrev:
2275 self.startrev = rev
2275 self.startrev = rev
2276 return True
2276 return True
2277
2277
2278 if rev > self.startrev:
2278 if rev > self.startrev:
2279 # forward: all descendants
2279 # forward: all descendants
2280 if not self.roots:
2280 if not self.roots:
2281 self.roots.add(self.startrev)
2281 self.roots.add(self.startrev)
2282 for parent in realparents(rev):
2282 for parent in realparents(rev):
2283 if parent in self.roots:
2283 if parent in self.roots:
2284 self.roots.add(rev)
2284 self.roots.add(rev)
2285 return True
2285 return True
2286 else:
2286 else:
2287 # backwards: all parents
2287 # backwards: all parents
2288 if not self.roots:
2288 if not self.roots:
2289 self.roots.update(realparents(self.startrev))
2289 self.roots.update(realparents(self.startrev))
2290 if rev in self.roots:
2290 if rev in self.roots:
2291 self.roots.remove(rev)
2291 self.roots.remove(rev)
2292 self.roots.update(realparents(rev))
2292 self.roots.update(realparents(rev))
2293 return True
2293 return True
2294
2294
2295 return False
2295 return False
2296
2296
2297
2297
2298 def walkchangerevs(repo, match, opts, prepare):
2298 def walkchangerevs(repo, match, opts, prepare):
2299 '''Iterate over files and the revs in which they changed.
2299 '''Iterate over files and the revs in which they changed.
2300
2300
2301 Callers most commonly need to iterate backwards over the history
2301 Callers most commonly need to iterate backwards over the history
2302 in which they are interested. Doing so has awful (quadratic-looking)
2302 in which they are interested. Doing so has awful (quadratic-looking)
2303 performance, so we use iterators in a "windowed" way.
2303 performance, so we use iterators in a "windowed" way.
2304
2304
2305 We walk a window of revisions in the desired order. Within the
2305 We walk a window of revisions in the desired order. Within the
2306 window, we first walk forwards to gather data, then in the desired
2306 window, we first walk forwards to gather data, then in the desired
2307 order (usually backwards) to display it.
2307 order (usually backwards) to display it.
2308
2308
2309 This function returns an iterator yielding contexts. Before
2309 This function returns an iterator yielding contexts. Before
2310 yielding each context, the iterator will first call the prepare
2310 yielding each context, the iterator will first call the prepare
2311 function on each context in the window in forward order.'''
2311 function on each context in the window in forward order.'''
2312
2312
2313 allfiles = opts.get(b'all_files')
2313 allfiles = opts.get(b'all_files')
2314 follow = opts.get(b'follow') or opts.get(b'follow_first')
2314 follow = opts.get(b'follow') or opts.get(b'follow_first')
2315 revs = _walkrevs(repo, opts)
2315 revs = _walkrevs(repo, opts)
2316 if not revs:
2316 if not revs:
2317 return []
2317 return []
2318 wanted = set()
2318 wanted = set()
2319 slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
2319 slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
2320 fncache = {}
2320 fncache = {}
2321 change = repo.__getitem__
2321 change = repo.__getitem__
2322
2322
2323 # First step is to fill wanted, the set of revisions that we want to yield.
2323 # First step is to fill wanted, the set of revisions that we want to yield.
2324 # When it does not induce extra cost, we also fill fncache for revisions in
2324 # When it does not induce extra cost, we also fill fncache for revisions in
2325 # wanted: a cache of filenames that were changed (ctx.files()) and that
2325 # wanted: a cache of filenames that were changed (ctx.files()) and that
2326 # match the file filtering conditions.
2326 # match the file filtering conditions.
2327
2327
2328 if match.always() or allfiles:
2328 if match.always() or allfiles:
2329 # No files, no patterns. Display all revs.
2329 # No files, no patterns. Display all revs.
2330 wanted = revs
2330 wanted = revs
2331 elif not slowpath:
2331 elif not slowpath:
2332 # We only have to read through the filelog to find wanted revisions
2332 # We only have to read through the filelog to find wanted revisions
2333
2333
2334 try:
2334 try:
2335 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2335 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2336 except FileWalkError:
2336 except FileWalkError:
2337 slowpath = True
2337 slowpath = True
2338
2338
2339 # We decided to fall back to the slowpath because at least one
2339 # We decided to fall back to the slowpath because at least one
2340 # of the paths was not a file. Check to see if at least one of them
2340 # of the paths was not a file. Check to see if at least one of them
2341 # existed in history, otherwise simply return
2341 # existed in history, otherwise simply return
2342 for path in match.files():
2342 for path in match.files():
2343 if path == b'.' or path in repo.store:
2343 if path == b'.' or path in repo.store:
2344 break
2344 break
2345 else:
2345 else:
2346 return []
2346 return []
2347
2347
2348 if slowpath:
2348 if slowpath:
2349 # We have to read the changelog to match filenames against
2349 # We have to read the changelog to match filenames against
2350 # changed files
2350 # changed files
2351
2351
2352 if follow:
2352 if follow:
2353 raise error.Abort(
2353 raise error.Abort(
2354 _(b'can only follow copies/renames for explicit filenames')
2354 _(b'can only follow copies/renames for explicit filenames')
2355 )
2355 )
2356
2356
2357 # The slow path checks files modified in every changeset.
2357 # The slow path checks files modified in every changeset.
2358 # This is really slow on large repos, so compute the set lazily.
2358 # This is really slow on large repos, so compute the set lazily.
2359 class lazywantedset(object):
2359 class lazywantedset(object):
2360 def __init__(self):
2360 def __init__(self):
2361 self.set = set()
2361 self.set = set()
2362 self.revs = set(revs)
2362 self.revs = set(revs)
2363
2363
2364 # No need to worry about locality here because it will be accessed
2364 # No need to worry about locality here because it will be accessed
2365 # in the same order as the increasing window below.
2365 # in the same order as the increasing window below.
2366 def __contains__(self, value):
2366 def __contains__(self, value):
2367 if value in self.set:
2367 if value in self.set:
2368 return True
2368 return True
2369 elif not value in self.revs:
2369 elif not value in self.revs:
2370 return False
2370 return False
2371 else:
2371 else:
2372 self.revs.discard(value)
2372 self.revs.discard(value)
2373 ctx = change(value)
2373 ctx = change(value)
2374 if allfiles:
2374 if allfiles:
2375 matches = list(ctx.manifest().walk(match))
2375 matches = list(ctx.manifest().walk(match))
2376 else:
2376 else:
2377 matches = [f for f in ctx.files() if match(f)]
2377 matches = [f for f in ctx.files() if match(f)]
2378 if matches:
2378 if matches:
2379 fncache[value] = matches
2379 fncache[value] = matches
2380 self.set.add(value)
2380 self.set.add(value)
2381 return True
2381 return True
2382 return False
2382 return False
2383
2383
2384 def discard(self, value):
2384 def discard(self, value):
2385 self.revs.discard(value)
2385 self.revs.discard(value)
2386 self.set.discard(value)
2386 self.set.discard(value)
2387
2387
2388 wanted = lazywantedset()
2388 wanted = lazywantedset()
2389
2389
2390 # it might be worthwhile to do this in the iterator if the rev range
2390 # it might be worthwhile to do this in the iterator if the rev range
2391 # is descending and the prune args are all within that range
2391 # is descending and the prune args are all within that range
2392 for rev in opts.get(b'prune', ()):
2392 for rev in opts.get(b'prune', ()):
2393 rev = repo[rev].rev()
2393 rev = repo[rev].rev()
2394 ff = _followfilter(repo)
2394 ff = _followfilter(repo)
2395 stop = min(revs[0], revs[-1])
2395 stop = min(revs[0], revs[-1])
2396 for x in pycompat.xrange(rev, stop - 1, -1):
2396 for x in pycompat.xrange(rev, stop - 1, -1):
2397 if ff.match(x):
2397 if ff.match(x):
2398 wanted = wanted - [x]
2398 wanted = wanted - [x]
2399
2399
2400 # Now that wanted is correctly initialized, we can iterate over the
2400 # Now that wanted is correctly initialized, we can iterate over the
2401 # revision range, yielding only revisions in wanted.
2401 # revision range, yielding only revisions in wanted.
2402 def iterate():
2402 def iterate():
2403 if follow and match.always():
2403 if follow and match.always():
2404 ff = _followfilter(repo, onlyfirst=opts.get(b'follow_first'))
2404 ff = _followfilter(repo, onlyfirst=opts.get(b'follow_first'))
2405
2405
2406 def want(rev):
2406 def want(rev):
2407 return ff.match(rev) and rev in wanted
2407 return ff.match(rev) and rev in wanted
2408
2408
2409 else:
2409 else:
2410
2410
2411 def want(rev):
2411 def want(rev):
2412 return rev in wanted
2412 return rev in wanted
2413
2413
2414 it = iter(revs)
2414 it = iter(revs)
2415 stopiteration = False
2415 stopiteration = False
2416 for windowsize in increasingwindows():
2416 for windowsize in increasingwindows():
2417 nrevs = []
2417 nrevs = []
2418 for i in pycompat.xrange(windowsize):
2418 for i in pycompat.xrange(windowsize):
2419 rev = next(it, None)
2419 rev = next(it, None)
2420 if rev is None:
2420 if rev is None:
2421 stopiteration = True
2421 stopiteration = True
2422 break
2422 break
2423 elif want(rev):
2423 elif want(rev):
2424 nrevs.append(rev)
2424 nrevs.append(rev)
2425 for rev in sorted(nrevs):
2425 for rev in sorted(nrevs):
2426 fns = fncache.get(rev)
2426 fns = fncache.get(rev)
2427 ctx = change(rev)
2427 ctx = change(rev)
2428 if not fns:
2428 if not fns:
2429
2429
2430 def fns_generator():
2430 def fns_generator():
2431 if allfiles:
2431 if allfiles:
2432 fiter = iter(ctx)
2432
2433 def bad(f, msg):
2434 pass
2435
2436 for f in ctx.matches(matchmod.badmatch(match, bad)):
2437 yield f
2433 else:
2438 else:
2434 fiter = ctx.files()
2439 for f in ctx.files():
2435 for f in fiter:
2440 if match(f):
2436 if match(f):
2441 yield f
2437 yield f
2438
2442
2439 fns = fns_generator()
2443 fns = fns_generator()
2440 prepare(ctx, fns)
2444 prepare(ctx, fns)
2441 for rev in nrevs:
2445 for rev in nrevs:
2442 yield change(rev)
2446 yield change(rev)
2443
2447
2444 if stopiteration:
2448 if stopiteration:
2445 break
2449 break
2446
2450
2447 return iterate()
2451 return iterate()
2448
2452
2449
2453
2450 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2454 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2451 bad = []
2455 bad = []
2452
2456
2453 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2457 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2454 names = []
2458 names = []
2455 wctx = repo[None]
2459 wctx = repo[None]
2456 cca = None
2460 cca = None
2457 abort, warn = scmutil.checkportabilityalert(ui)
2461 abort, warn = scmutil.checkportabilityalert(ui)
2458 if abort or warn:
2462 if abort or warn:
2459 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2463 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2460
2464
2461 match = repo.narrowmatch(match, includeexact=True)
2465 match = repo.narrowmatch(match, includeexact=True)
2462 badmatch = matchmod.badmatch(match, badfn)
2466 badmatch = matchmod.badmatch(match, badfn)
2463 dirstate = repo.dirstate
2467 dirstate = repo.dirstate
2464 # We don't want to just call wctx.walk here, since it would return a lot of
2468 # We don't want to just call wctx.walk here, since it would return a lot of
2465 # clean files, which we aren't interested in and takes time.
2469 # clean files, which we aren't interested in and takes time.
2466 for f in sorted(
2470 for f in sorted(
2467 dirstate.walk(
2471 dirstate.walk(
2468 badmatch,
2472 badmatch,
2469 subrepos=sorted(wctx.substate),
2473 subrepos=sorted(wctx.substate),
2470 unknown=True,
2474 unknown=True,
2471 ignored=False,
2475 ignored=False,
2472 full=False,
2476 full=False,
2473 )
2477 )
2474 ):
2478 ):
2475 exact = match.exact(f)
2479 exact = match.exact(f)
2476 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2480 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2477 if cca:
2481 if cca:
2478 cca(f)
2482 cca(f)
2479 names.append(f)
2483 names.append(f)
2480 if ui.verbose or not exact:
2484 if ui.verbose or not exact:
2481 ui.status(
2485 ui.status(
2482 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2486 _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
2483 )
2487 )
2484
2488
2485 for subpath in sorted(wctx.substate):
2489 for subpath in sorted(wctx.substate):
2486 sub = wctx.sub(subpath)
2490 sub = wctx.sub(subpath)
2487 try:
2491 try:
2488 submatch = matchmod.subdirmatcher(subpath, match)
2492 submatch = matchmod.subdirmatcher(subpath, match)
2489 subprefix = repo.wvfs.reljoin(prefix, subpath)
2493 subprefix = repo.wvfs.reljoin(prefix, subpath)
2490 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2494 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2491 if opts.get('subrepos'):
2495 if opts.get('subrepos'):
2492 bad.extend(
2496 bad.extend(
2493 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2497 sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
2494 )
2498 )
2495 else:
2499 else:
2496 bad.extend(
2500 bad.extend(
2497 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2501 sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
2498 )
2502 )
2499 except error.LookupError:
2503 except error.LookupError:
2500 ui.status(
2504 ui.status(
2501 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2505 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2502 )
2506 )
2503
2507
2504 if not opts.get('dry_run'):
2508 if not opts.get('dry_run'):
2505 rejected = wctx.add(names, prefix)
2509 rejected = wctx.add(names, prefix)
2506 bad.extend(f for f in rejected if f in match.files())
2510 bad.extend(f for f in rejected if f in match.files())
2507 return bad
2511 return bad
2508
2512
2509
2513
2510 def addwebdirpath(repo, serverpath, webconf):
2514 def addwebdirpath(repo, serverpath, webconf):
2511 webconf[serverpath] = repo.root
2515 webconf[serverpath] = repo.root
2512 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2516 repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
2513
2517
2514 for r in repo.revs(b'filelog("path:.hgsub")'):
2518 for r in repo.revs(b'filelog("path:.hgsub")'):
2515 ctx = repo[r]
2519 ctx = repo[r]
2516 for subpath in ctx.substate:
2520 for subpath in ctx.substate:
2517 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2521 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2518
2522
2519
2523
2520 def forget(
2524 def forget(
2521 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2525 ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
2522 ):
2526 ):
2523 if dryrun and interactive:
2527 if dryrun and interactive:
2524 raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
2528 raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
2525 bad = []
2529 bad = []
2526 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2530 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2527 wctx = repo[None]
2531 wctx = repo[None]
2528 forgot = []
2532 forgot = []
2529
2533
2530 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2534 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2531 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2535 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2532 if explicitonly:
2536 if explicitonly:
2533 forget = [f for f in forget if match.exact(f)]
2537 forget = [f for f in forget if match.exact(f)]
2534
2538
2535 for subpath in sorted(wctx.substate):
2539 for subpath in sorted(wctx.substate):
2536 sub = wctx.sub(subpath)
2540 sub = wctx.sub(subpath)
2537 submatch = matchmod.subdirmatcher(subpath, match)
2541 submatch = matchmod.subdirmatcher(subpath, match)
2538 subprefix = repo.wvfs.reljoin(prefix, subpath)
2542 subprefix = repo.wvfs.reljoin(prefix, subpath)
2539 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2543 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2540 try:
2544 try:
2541 subbad, subforgot = sub.forget(
2545 subbad, subforgot = sub.forget(
2542 submatch,
2546 submatch,
2543 subprefix,
2547 subprefix,
2544 subuipathfn,
2548 subuipathfn,
2545 dryrun=dryrun,
2549 dryrun=dryrun,
2546 interactive=interactive,
2550 interactive=interactive,
2547 )
2551 )
2548 bad.extend([subpath + b'/' + f for f in subbad])
2552 bad.extend([subpath + b'/' + f for f in subbad])
2549 forgot.extend([subpath + b'/' + f for f in subforgot])
2553 forgot.extend([subpath + b'/' + f for f in subforgot])
2550 except error.LookupError:
2554 except error.LookupError:
2551 ui.status(
2555 ui.status(
2552 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2556 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2553 )
2557 )
2554
2558
2555 if not explicitonly:
2559 if not explicitonly:
2556 for f in match.files():
2560 for f in match.files():
2557 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2561 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2558 if f not in forgot:
2562 if f not in forgot:
2559 if repo.wvfs.exists(f):
2563 if repo.wvfs.exists(f):
2560 # Don't complain if the exact case match wasn't given.
2564 # Don't complain if the exact case match wasn't given.
2561 # But don't do this until after checking 'forgot', so
2565 # But don't do this until after checking 'forgot', so
2562 # that subrepo files aren't normalized, and this op is
2566 # that subrepo files aren't normalized, and this op is
2563 # purely from data cached by the status walk above.
2567 # purely from data cached by the status walk above.
2564 if repo.dirstate.normalize(f) in repo.dirstate:
2568 if repo.dirstate.normalize(f) in repo.dirstate:
2565 continue
2569 continue
2566 ui.warn(
2570 ui.warn(
2567 _(
2571 _(
2568 b'not removing %s: '
2572 b'not removing %s: '
2569 b'file is already untracked\n'
2573 b'file is already untracked\n'
2570 )
2574 )
2571 % uipathfn(f)
2575 % uipathfn(f)
2572 )
2576 )
2573 bad.append(f)
2577 bad.append(f)
2574
2578
2575 if interactive:
2579 if interactive:
2576 responses = _(
2580 responses = _(
2577 b'[Ynsa?]'
2581 b'[Ynsa?]'
2578 b'$$ &Yes, forget this file'
2582 b'$$ &Yes, forget this file'
2579 b'$$ &No, skip this file'
2583 b'$$ &No, skip this file'
2580 b'$$ &Skip remaining files'
2584 b'$$ &Skip remaining files'
2581 b'$$ Include &all remaining files'
2585 b'$$ Include &all remaining files'
2582 b'$$ &? (display help)'
2586 b'$$ &? (display help)'
2583 )
2587 )
2584 for filename in forget[:]:
2588 for filename in forget[:]:
2585 r = ui.promptchoice(
2589 r = ui.promptchoice(
2586 _(b'forget %s %s') % (uipathfn(filename), responses)
2590 _(b'forget %s %s') % (uipathfn(filename), responses)
2587 )
2591 )
2588 if r == 4: # ?
2592 if r == 4: # ?
2589 while r == 4:
2593 while r == 4:
2590 for c, t in ui.extractchoices(responses)[1]:
2594 for c, t in ui.extractchoices(responses)[1]:
2591 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2595 ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
2592 r = ui.promptchoice(
2596 r = ui.promptchoice(
2593 _(b'forget %s %s') % (uipathfn(filename), responses)
2597 _(b'forget %s %s') % (uipathfn(filename), responses)
2594 )
2598 )
2595 if r == 0: # yes
2599 if r == 0: # yes
2596 continue
2600 continue
2597 elif r == 1: # no
2601 elif r == 1: # no
2598 forget.remove(filename)
2602 forget.remove(filename)
2599 elif r == 2: # Skip
2603 elif r == 2: # Skip
2600 fnindex = forget.index(filename)
2604 fnindex = forget.index(filename)
2601 del forget[fnindex:]
2605 del forget[fnindex:]
2602 break
2606 break
2603 elif r == 3: # All
2607 elif r == 3: # All
2604 break
2608 break
2605
2609
2606 for f in forget:
2610 for f in forget:
2607 if ui.verbose or not match.exact(f) or interactive:
2611 if ui.verbose or not match.exact(f) or interactive:
2608 ui.status(
2612 ui.status(
2609 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2613 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2610 )
2614 )
2611
2615
2612 if not dryrun:
2616 if not dryrun:
2613 rejected = wctx.forget(forget, prefix)
2617 rejected = wctx.forget(forget, prefix)
2614 bad.extend(f for f in rejected if f in match.files())
2618 bad.extend(f for f in rejected if f in match.files())
2615 forgot.extend(f for f in forget if f not in rejected)
2619 forgot.extend(f for f in forget if f not in rejected)
2616 return bad, forgot
2620 return bad, forgot
2617
2621
2618
2622
2619 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2623 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2620 ret = 1
2624 ret = 1
2621
2625
2622 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2626 needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
2623 for f in ctx.matches(m):
2627 for f in ctx.matches(m):
2624 fm.startitem()
2628 fm.startitem()
2625 fm.context(ctx=ctx)
2629 fm.context(ctx=ctx)
2626 if needsfctx:
2630 if needsfctx:
2627 fc = ctx[f]
2631 fc = ctx[f]
2628 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2632 fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
2629 fm.data(path=f)
2633 fm.data(path=f)
2630 fm.plain(fmt % uipathfn(f))
2634 fm.plain(fmt % uipathfn(f))
2631 ret = 0
2635 ret = 0
2632
2636
2633 for subpath in sorted(ctx.substate):
2637 for subpath in sorted(ctx.substate):
2634 submatch = matchmod.subdirmatcher(subpath, m)
2638 submatch = matchmod.subdirmatcher(subpath, m)
2635 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2639 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2636 if subrepos or m.exact(subpath) or any(submatch.files()):
2640 if subrepos or m.exact(subpath) or any(submatch.files()):
2637 sub = ctx.sub(subpath)
2641 sub = ctx.sub(subpath)
2638 try:
2642 try:
2639 recurse = m.exact(subpath) or subrepos
2643 recurse = m.exact(subpath) or subrepos
2640 if (
2644 if (
2641 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2645 sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
2642 == 0
2646 == 0
2643 ):
2647 ):
2644 ret = 0
2648 ret = 0
2645 except error.LookupError:
2649 except error.LookupError:
2646 ui.status(
2650 ui.status(
2647 _(b"skipping missing subrepository: %s\n")
2651 _(b"skipping missing subrepository: %s\n")
2648 % uipathfn(subpath)
2652 % uipathfn(subpath)
2649 )
2653 )
2650
2654
2651 return ret
2655 return ret
2652
2656
2653
2657
2654 def remove(
2658 def remove(
2655 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2659 ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
2656 ):
2660 ):
2657 ret = 0
2661 ret = 0
2658 s = repo.status(match=m, clean=True)
2662 s = repo.status(match=m, clean=True)
2659 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2663 modified, added, deleted, clean = s.modified, s.added, s.deleted, s.clean
2660
2664
2661 wctx = repo[None]
2665 wctx = repo[None]
2662
2666
2663 if warnings is None:
2667 if warnings is None:
2664 warnings = []
2668 warnings = []
2665 warn = True
2669 warn = True
2666 else:
2670 else:
2667 warn = False
2671 warn = False
2668
2672
2669 subs = sorted(wctx.substate)
2673 subs = sorted(wctx.substate)
2670 progress = ui.makeprogress(
2674 progress = ui.makeprogress(
2671 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2675 _(b'searching'), total=len(subs), unit=_(b'subrepos')
2672 )
2676 )
2673 for subpath in subs:
2677 for subpath in subs:
2674 submatch = matchmod.subdirmatcher(subpath, m)
2678 submatch = matchmod.subdirmatcher(subpath, m)
2675 subprefix = repo.wvfs.reljoin(prefix, subpath)
2679 subprefix = repo.wvfs.reljoin(prefix, subpath)
2676 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2680 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2677 if subrepos or m.exact(subpath) or any(submatch.files()):
2681 if subrepos or m.exact(subpath) or any(submatch.files()):
2678 progress.increment()
2682 progress.increment()
2679 sub = wctx.sub(subpath)
2683 sub = wctx.sub(subpath)
2680 try:
2684 try:
2681 if sub.removefiles(
2685 if sub.removefiles(
2682 submatch,
2686 submatch,
2683 subprefix,
2687 subprefix,
2684 subuipathfn,
2688 subuipathfn,
2685 after,
2689 after,
2686 force,
2690 force,
2687 subrepos,
2691 subrepos,
2688 dryrun,
2692 dryrun,
2689 warnings,
2693 warnings,
2690 ):
2694 ):
2691 ret = 1
2695 ret = 1
2692 except error.LookupError:
2696 except error.LookupError:
2693 warnings.append(
2697 warnings.append(
2694 _(b"skipping missing subrepository: %s\n")
2698 _(b"skipping missing subrepository: %s\n")
2695 % uipathfn(subpath)
2699 % uipathfn(subpath)
2696 )
2700 )
2697 progress.complete()
2701 progress.complete()
2698
2702
2699 # warn about failure to delete explicit files/dirs
2703 # warn about failure to delete explicit files/dirs
2700 deleteddirs = pathutil.dirs(deleted)
2704 deleteddirs = pathutil.dirs(deleted)
2701 files = m.files()
2705 files = m.files()
2702 progress = ui.makeprogress(
2706 progress = ui.makeprogress(
2703 _(b'deleting'), total=len(files), unit=_(b'files')
2707 _(b'deleting'), total=len(files), unit=_(b'files')
2704 )
2708 )
2705 for f in files:
2709 for f in files:
2706
2710
2707 def insubrepo():
2711 def insubrepo():
2708 for subpath in wctx.substate:
2712 for subpath in wctx.substate:
2709 if f.startswith(subpath + b'/'):
2713 if f.startswith(subpath + b'/'):
2710 return True
2714 return True
2711 return False
2715 return False
2712
2716
2713 progress.increment()
2717 progress.increment()
2714 isdir = f in deleteddirs or wctx.hasdir(f)
2718 isdir = f in deleteddirs or wctx.hasdir(f)
2715 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2719 if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
2716 continue
2720 continue
2717
2721
2718 if repo.wvfs.exists(f):
2722 if repo.wvfs.exists(f):
2719 if repo.wvfs.isdir(f):
2723 if repo.wvfs.isdir(f):
2720 warnings.append(
2724 warnings.append(
2721 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2725 _(b'not removing %s: no tracked files\n') % uipathfn(f)
2722 )
2726 )
2723 else:
2727 else:
2724 warnings.append(
2728 warnings.append(
2725 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2729 _(b'not removing %s: file is untracked\n') % uipathfn(f)
2726 )
2730 )
2727 # missing files will generate a warning elsewhere
2731 # missing files will generate a warning elsewhere
2728 ret = 1
2732 ret = 1
2729 progress.complete()
2733 progress.complete()
2730
2734
2731 if force:
2735 if force:
2732 list = modified + deleted + clean + added
2736 list = modified + deleted + clean + added
2733 elif after:
2737 elif after:
2734 list = deleted
2738 list = deleted
2735 remaining = modified + added + clean
2739 remaining = modified + added + clean
2736 progress = ui.makeprogress(
2740 progress = ui.makeprogress(
2737 _(b'skipping'), total=len(remaining), unit=_(b'files')
2741 _(b'skipping'), total=len(remaining), unit=_(b'files')
2738 )
2742 )
2739 for f in remaining:
2743 for f in remaining:
2740 progress.increment()
2744 progress.increment()
2741 if ui.verbose or (f in files):
2745 if ui.verbose or (f in files):
2742 warnings.append(
2746 warnings.append(
2743 _(b'not removing %s: file still exists\n') % uipathfn(f)
2747 _(b'not removing %s: file still exists\n') % uipathfn(f)
2744 )
2748 )
2745 ret = 1
2749 ret = 1
2746 progress.complete()
2750 progress.complete()
2747 else:
2751 else:
2748 list = deleted + clean
2752 list = deleted + clean
2749 progress = ui.makeprogress(
2753 progress = ui.makeprogress(
2750 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2754 _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
2751 )
2755 )
2752 for f in modified:
2756 for f in modified:
2753 progress.increment()
2757 progress.increment()
2754 warnings.append(
2758 warnings.append(
2755 _(
2759 _(
2756 b'not removing %s: file is modified (use -f'
2760 b'not removing %s: file is modified (use -f'
2757 b' to force removal)\n'
2761 b' to force removal)\n'
2758 )
2762 )
2759 % uipathfn(f)
2763 % uipathfn(f)
2760 )
2764 )
2761 ret = 1
2765 ret = 1
2762 for f in added:
2766 for f in added:
2763 progress.increment()
2767 progress.increment()
2764 warnings.append(
2768 warnings.append(
2765 _(
2769 _(
2766 b"not removing %s: file has been marked for add"
2770 b"not removing %s: file has been marked for add"
2767 b" (use 'hg forget' to undo add)\n"
2771 b" (use 'hg forget' to undo add)\n"
2768 )
2772 )
2769 % uipathfn(f)
2773 % uipathfn(f)
2770 )
2774 )
2771 ret = 1
2775 ret = 1
2772 progress.complete()
2776 progress.complete()
2773
2777
2774 list = sorted(list)
2778 list = sorted(list)
2775 progress = ui.makeprogress(
2779 progress = ui.makeprogress(
2776 _(b'deleting'), total=len(list), unit=_(b'files')
2780 _(b'deleting'), total=len(list), unit=_(b'files')
2777 )
2781 )
2778 for f in list:
2782 for f in list:
2779 if ui.verbose or not m.exact(f):
2783 if ui.verbose or not m.exact(f):
2780 progress.increment()
2784 progress.increment()
2781 ui.status(
2785 ui.status(
2782 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2786 _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
2783 )
2787 )
2784 progress.complete()
2788 progress.complete()
2785
2789
2786 if not dryrun:
2790 if not dryrun:
2787 with repo.wlock():
2791 with repo.wlock():
2788 if not after:
2792 if not after:
2789 for f in list:
2793 for f in list:
2790 if f in added:
2794 if f in added:
2791 continue # we never unlink added files on remove
2795 continue # we never unlink added files on remove
2792 rmdir = repo.ui.configbool(
2796 rmdir = repo.ui.configbool(
2793 b'experimental', b'removeemptydirs'
2797 b'experimental', b'removeemptydirs'
2794 )
2798 )
2795 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2799 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2796 repo[None].forget(list)
2800 repo[None].forget(list)
2797
2801
2798 if warn:
2802 if warn:
2799 for warning in warnings:
2803 for warning in warnings:
2800 ui.warn(warning)
2804 ui.warn(warning)
2801
2805
2802 return ret
2806 return ret
2803
2807
2804
2808
2805 def _catfmtneedsdata(fm):
2809 def _catfmtneedsdata(fm):
2806 return not fm.datahint() or b'data' in fm.datahint()
2810 return not fm.datahint() or b'data' in fm.datahint()
2807
2811
2808
2812
2809 def _updatecatformatter(fm, ctx, matcher, path, decode):
2813 def _updatecatformatter(fm, ctx, matcher, path, decode):
2810 """Hook for adding data to the formatter used by ``hg cat``.
2814 """Hook for adding data to the formatter used by ``hg cat``.
2811
2815
2812 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2816 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2813 this method first."""
2817 this method first."""
2814
2818
2815 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2819 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2816 # wasn't requested.
2820 # wasn't requested.
2817 data = b''
2821 data = b''
2818 if _catfmtneedsdata(fm):
2822 if _catfmtneedsdata(fm):
2819 data = ctx[path].data()
2823 data = ctx[path].data()
2820 if decode:
2824 if decode:
2821 data = ctx.repo().wwritedata(path, data)
2825 data = ctx.repo().wwritedata(path, data)
2822 fm.startitem()
2826 fm.startitem()
2823 fm.context(ctx=ctx)
2827 fm.context(ctx=ctx)
2824 fm.write(b'data', b'%s', data)
2828 fm.write(b'data', b'%s', data)
2825 fm.data(path=path)
2829 fm.data(path=path)
2826
2830
2827
2831
2828 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2832 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2829 err = 1
2833 err = 1
2830 opts = pycompat.byteskwargs(opts)
2834 opts = pycompat.byteskwargs(opts)
2831
2835
2832 def write(path):
2836 def write(path):
2833 filename = None
2837 filename = None
2834 if fntemplate:
2838 if fntemplate:
2835 filename = makefilename(
2839 filename = makefilename(
2836 ctx, fntemplate, pathname=os.path.join(prefix, path)
2840 ctx, fntemplate, pathname=os.path.join(prefix, path)
2837 )
2841 )
2838 # attempt to create the directory if it does not already exist
2842 # attempt to create the directory if it does not already exist
2839 try:
2843 try:
2840 os.makedirs(os.path.dirname(filename))
2844 os.makedirs(os.path.dirname(filename))
2841 except OSError:
2845 except OSError:
2842 pass
2846 pass
2843 with formatter.maybereopen(basefm, filename) as fm:
2847 with formatter.maybereopen(basefm, filename) as fm:
2844 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2848 _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
2845
2849
2846 # Automation often uses hg cat on single files, so special case it
2850 # Automation often uses hg cat on single files, so special case it
2847 # for performance to avoid the cost of parsing the manifest.
2851 # for performance to avoid the cost of parsing the manifest.
2848 if len(matcher.files()) == 1 and not matcher.anypats():
2852 if len(matcher.files()) == 1 and not matcher.anypats():
2849 file = matcher.files()[0]
2853 file = matcher.files()[0]
2850 mfl = repo.manifestlog
2854 mfl = repo.manifestlog
2851 mfnode = ctx.manifestnode()
2855 mfnode = ctx.manifestnode()
2852 try:
2856 try:
2853 if mfnode and mfl[mfnode].find(file)[0]:
2857 if mfnode and mfl[mfnode].find(file)[0]:
2854 if _catfmtneedsdata(basefm):
2858 if _catfmtneedsdata(basefm):
2855 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2859 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2856 write(file)
2860 write(file)
2857 return 0
2861 return 0
2858 except KeyError:
2862 except KeyError:
2859 pass
2863 pass
2860
2864
2861 if _catfmtneedsdata(basefm):
2865 if _catfmtneedsdata(basefm):
2862 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2866 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2863
2867
2864 for abs in ctx.walk(matcher):
2868 for abs in ctx.walk(matcher):
2865 write(abs)
2869 write(abs)
2866 err = 0
2870 err = 0
2867
2871
2868 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2872 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2869 for subpath in sorted(ctx.substate):
2873 for subpath in sorted(ctx.substate):
2870 sub = ctx.sub(subpath)
2874 sub = ctx.sub(subpath)
2871 try:
2875 try:
2872 submatch = matchmod.subdirmatcher(subpath, matcher)
2876 submatch = matchmod.subdirmatcher(subpath, matcher)
2873 subprefix = os.path.join(prefix, subpath)
2877 subprefix = os.path.join(prefix, subpath)
2874 if not sub.cat(
2878 if not sub.cat(
2875 submatch,
2879 submatch,
2876 basefm,
2880 basefm,
2877 fntemplate,
2881 fntemplate,
2878 subprefix,
2882 subprefix,
2879 **pycompat.strkwargs(opts)
2883 **pycompat.strkwargs(opts)
2880 ):
2884 ):
2881 err = 0
2885 err = 0
2882 except error.RepoLookupError:
2886 except error.RepoLookupError:
2883 ui.status(
2887 ui.status(
2884 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2888 _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
2885 )
2889 )
2886
2890
2887 return err
2891 return err
2888
2892
2889
2893
2890 def commit(ui, repo, commitfunc, pats, opts):
2894 def commit(ui, repo, commitfunc, pats, opts):
2891 '''commit the specified files or all outstanding changes'''
2895 '''commit the specified files or all outstanding changes'''
2892 date = opts.get(b'date')
2896 date = opts.get(b'date')
2893 if date:
2897 if date:
2894 opts[b'date'] = dateutil.parsedate(date)
2898 opts[b'date'] = dateutil.parsedate(date)
2895 message = logmessage(ui, opts)
2899 message = logmessage(ui, opts)
2896 matcher = scmutil.match(repo[None], pats, opts)
2900 matcher = scmutil.match(repo[None], pats, opts)
2897
2901
2898 dsguard = None
2902 dsguard = None
2899 # extract addremove carefully -- this function can be called from a command
2903 # extract addremove carefully -- this function can be called from a command
2900 # that doesn't support addremove
2904 # that doesn't support addremove
2901 if opts.get(b'addremove'):
2905 if opts.get(b'addremove'):
2902 dsguard = dirstateguard.dirstateguard(repo, b'commit')
2906 dsguard = dirstateguard.dirstateguard(repo, b'commit')
2903 with dsguard or util.nullcontextmanager():
2907 with dsguard or util.nullcontextmanager():
2904 if dsguard:
2908 if dsguard:
2905 relative = scmutil.anypats(pats, opts)
2909 relative = scmutil.anypats(pats, opts)
2906 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2910 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2907 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
2911 if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
2908 raise error.Abort(
2912 raise error.Abort(
2909 _(b"failed to mark all new/missing files as added/removed")
2913 _(b"failed to mark all new/missing files as added/removed")
2910 )
2914 )
2911
2915
2912 return commitfunc(ui, repo, message, matcher, opts)
2916 return commitfunc(ui, repo, message, matcher, opts)
2913
2917
2914
2918
2915 def samefile(f, ctx1, ctx2):
2919 def samefile(f, ctx1, ctx2):
2916 if f in ctx1.manifest():
2920 if f in ctx1.manifest():
2917 a = ctx1.filectx(f)
2921 a = ctx1.filectx(f)
2918 if f in ctx2.manifest():
2922 if f in ctx2.manifest():
2919 b = ctx2.filectx(f)
2923 b = ctx2.filectx(f)
2920 return not a.cmp(b) and a.flags() == b.flags()
2924 return not a.cmp(b) and a.flags() == b.flags()
2921 else:
2925 else:
2922 return False
2926 return False
2923 else:
2927 else:
2924 return f not in ctx2.manifest()
2928 return f not in ctx2.manifest()
2925
2929
2926
2930
2927 def amend(ui, repo, old, extra, pats, opts):
2931 def amend(ui, repo, old, extra, pats, opts):
2928 # avoid cycle context -> subrepo -> cmdutil
2932 # avoid cycle context -> subrepo -> cmdutil
2929 from . import context
2933 from . import context
2930
2934
2931 # amend will reuse the existing user if not specified, but the obsolete
2935 # amend will reuse the existing user if not specified, but the obsolete
2932 # marker creation requires that the current user's name is specified.
2936 # marker creation requires that the current user's name is specified.
2933 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2937 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2934 ui.username() # raise exception if username not set
2938 ui.username() # raise exception if username not set
2935
2939
2936 ui.note(_(b'amending changeset %s\n') % old)
2940 ui.note(_(b'amending changeset %s\n') % old)
2937 base = old.p1()
2941 base = old.p1()
2938
2942
2939 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
2943 with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
2940 # Participating changesets:
2944 # Participating changesets:
2941 #
2945 #
2942 # wctx o - workingctx that contains changes from working copy
2946 # wctx o - workingctx that contains changes from working copy
2943 # | to go into amending commit
2947 # | to go into amending commit
2944 # |
2948 # |
2945 # old o - changeset to amend
2949 # old o - changeset to amend
2946 # |
2950 # |
2947 # base o - first parent of the changeset to amend
2951 # base o - first parent of the changeset to amend
2948 wctx = repo[None]
2952 wctx = repo[None]
2949
2953
2950 # Copy to avoid mutating input
2954 # Copy to avoid mutating input
2951 extra = extra.copy()
2955 extra = extra.copy()
2952 # Update extra dict from amended commit (e.g. to preserve graft
2956 # Update extra dict from amended commit (e.g. to preserve graft
2953 # source)
2957 # source)
2954 extra.update(old.extra())
2958 extra.update(old.extra())
2955
2959
2956 # Also update it from the from the wctx
2960 # Also update it from the from the wctx
2957 extra.update(wctx.extra())
2961 extra.update(wctx.extra())
2958
2962
2959 # date-only change should be ignored?
2963 # date-only change should be ignored?
2960 datemaydiffer = resolvecommitoptions(ui, opts)
2964 datemaydiffer = resolvecommitoptions(ui, opts)
2961
2965
2962 date = old.date()
2966 date = old.date()
2963 if opts.get(b'date'):
2967 if opts.get(b'date'):
2964 date = dateutil.parsedate(opts.get(b'date'))
2968 date = dateutil.parsedate(opts.get(b'date'))
2965 user = opts.get(b'user') or old.user()
2969 user = opts.get(b'user') or old.user()
2966
2970
2967 if len(old.parents()) > 1:
2971 if len(old.parents()) > 1:
2968 # ctx.files() isn't reliable for merges, so fall back to the
2972 # ctx.files() isn't reliable for merges, so fall back to the
2969 # slower repo.status() method
2973 # slower repo.status() method
2970 st = base.status(old)
2974 st = base.status(old)
2971 files = set(st.modified) | set(st.added) | set(st.removed)
2975 files = set(st.modified) | set(st.added) | set(st.removed)
2972 else:
2976 else:
2973 files = set(old.files())
2977 files = set(old.files())
2974
2978
2975 # add/remove the files to the working copy if the "addremove" option
2979 # add/remove the files to the working copy if the "addremove" option
2976 # was specified.
2980 # was specified.
2977 matcher = scmutil.match(wctx, pats, opts)
2981 matcher = scmutil.match(wctx, pats, opts)
2978 relative = scmutil.anypats(pats, opts)
2982 relative = scmutil.anypats(pats, opts)
2979 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2983 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2980 if opts.get(b'addremove') and scmutil.addremove(
2984 if opts.get(b'addremove') and scmutil.addremove(
2981 repo, matcher, b"", uipathfn, opts
2985 repo, matcher, b"", uipathfn, opts
2982 ):
2986 ):
2983 raise error.Abort(
2987 raise error.Abort(
2984 _(b"failed to mark all new/missing files as added/removed")
2988 _(b"failed to mark all new/missing files as added/removed")
2985 )
2989 )
2986
2990
2987 # Check subrepos. This depends on in-place wctx._status update in
2991 # Check subrepos. This depends on in-place wctx._status update in
2988 # subrepo.precommit(). To minimize the risk of this hack, we do
2992 # subrepo.precommit(). To minimize the risk of this hack, we do
2989 # nothing if .hgsub does not exist.
2993 # nothing if .hgsub does not exist.
2990 if b'.hgsub' in wctx or b'.hgsub' in old:
2994 if b'.hgsub' in wctx or b'.hgsub' in old:
2991 subs, commitsubs, newsubstate = subrepoutil.precommit(
2995 subs, commitsubs, newsubstate = subrepoutil.precommit(
2992 ui, wctx, wctx._status, matcher
2996 ui, wctx, wctx._status, matcher
2993 )
2997 )
2994 # amend should abort if commitsubrepos is enabled
2998 # amend should abort if commitsubrepos is enabled
2995 assert not commitsubs
2999 assert not commitsubs
2996 if subs:
3000 if subs:
2997 subrepoutil.writestate(repo, newsubstate)
3001 subrepoutil.writestate(repo, newsubstate)
2998
3002
2999 ms = mergemod.mergestate.read(repo)
3003 ms = mergemod.mergestate.read(repo)
3000 mergeutil.checkunresolved(ms)
3004 mergeutil.checkunresolved(ms)
3001
3005
3002 filestoamend = set(f for f in wctx.files() if matcher(f))
3006 filestoamend = set(f for f in wctx.files() if matcher(f))
3003
3007
3004 changes = len(filestoamend) > 0
3008 changes = len(filestoamend) > 0
3005 if changes:
3009 if changes:
3006 # Recompute copies (avoid recording a -> b -> a)
3010 # Recompute copies (avoid recording a -> b -> a)
3007 copied = copies.pathcopies(base, wctx, matcher)
3011 copied = copies.pathcopies(base, wctx, matcher)
3008 if old.p2:
3012 if old.p2:
3009 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3013 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3010
3014
3011 # Prune files which were reverted by the updates: if old
3015 # Prune files which were reverted by the updates: if old
3012 # introduced file X and the file was renamed in the working
3016 # introduced file X and the file was renamed in the working
3013 # copy, then those two files are the same and
3017 # copy, then those two files are the same and
3014 # we can discard X from our list of files. Likewise if X
3018 # we can discard X from our list of files. Likewise if X
3015 # was removed, it's no longer relevant. If X is missing (aka
3019 # was removed, it's no longer relevant. If X is missing (aka
3016 # deleted), old X must be preserved.
3020 # deleted), old X must be preserved.
3017 files.update(filestoamend)
3021 files.update(filestoamend)
3018 files = [
3022 files = [
3019 f
3023 f
3020 for f in files
3024 for f in files
3021 if (f not in filestoamend or not samefile(f, wctx, base))
3025 if (f not in filestoamend or not samefile(f, wctx, base))
3022 ]
3026 ]
3023
3027
3024 def filectxfn(repo, ctx_, path):
3028 def filectxfn(repo, ctx_, path):
3025 try:
3029 try:
3026 # If the file being considered is not amongst the files
3030 # If the file being considered is not amongst the files
3027 # to be amended, we should return the file context from the
3031 # to be amended, we should return the file context from the
3028 # old changeset. This avoids issues when only some files in
3032 # old changeset. This avoids issues when only some files in
3029 # the working copy are being amended but there are also
3033 # the working copy are being amended but there are also
3030 # changes to other files from the old changeset.
3034 # changes to other files from the old changeset.
3031 if path not in filestoamend:
3035 if path not in filestoamend:
3032 return old.filectx(path)
3036 return old.filectx(path)
3033
3037
3034 # Return None for removed files.
3038 # Return None for removed files.
3035 if path in wctx.removed():
3039 if path in wctx.removed():
3036 return None
3040 return None
3037
3041
3038 fctx = wctx[path]
3042 fctx = wctx[path]
3039 flags = fctx.flags()
3043 flags = fctx.flags()
3040 mctx = context.memfilectx(
3044 mctx = context.memfilectx(
3041 repo,
3045 repo,
3042 ctx_,
3046 ctx_,
3043 fctx.path(),
3047 fctx.path(),
3044 fctx.data(),
3048 fctx.data(),
3045 islink=b'l' in flags,
3049 islink=b'l' in flags,
3046 isexec=b'x' in flags,
3050 isexec=b'x' in flags,
3047 copysource=copied.get(path),
3051 copysource=copied.get(path),
3048 )
3052 )
3049 return mctx
3053 return mctx
3050 except KeyError:
3054 except KeyError:
3051 return None
3055 return None
3052
3056
3053 else:
3057 else:
3054 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
3058 ui.note(_(b'copying changeset %s to %s\n') % (old, base))
3055
3059
3056 # Use version of files as in the old cset
3060 # Use version of files as in the old cset
3057 def filectxfn(repo, ctx_, path):
3061 def filectxfn(repo, ctx_, path):
3058 try:
3062 try:
3059 return old.filectx(path)
3063 return old.filectx(path)
3060 except KeyError:
3064 except KeyError:
3061 return None
3065 return None
3062
3066
3063 # See if we got a message from -m or -l, if not, open the editor with
3067 # See if we got a message from -m or -l, if not, open the editor with
3064 # the message of the changeset to amend.
3068 # the message of the changeset to amend.
3065 message = logmessage(ui, opts)
3069 message = logmessage(ui, opts)
3066
3070
3067 editform = mergeeditform(old, b'commit.amend')
3071 editform = mergeeditform(old, b'commit.amend')
3068
3072
3069 if not message:
3073 if not message:
3070 message = old.description()
3074 message = old.description()
3071 # Default if message isn't provided and --edit is not passed is to
3075 # Default if message isn't provided and --edit is not passed is to
3072 # invoke editor, but allow --no-edit. If somehow we don't have any
3076 # invoke editor, but allow --no-edit. If somehow we don't have any
3073 # description, let's always start the editor.
3077 # description, let's always start the editor.
3074 doedit = not message or opts.get(b'edit') in [True, None]
3078 doedit = not message or opts.get(b'edit') in [True, None]
3075 else:
3079 else:
3076 # Default if message is provided is to not invoke editor, but allow
3080 # Default if message is provided is to not invoke editor, but allow
3077 # --edit.
3081 # --edit.
3078 doedit = opts.get(b'edit') is True
3082 doedit = opts.get(b'edit') is True
3079 editor = getcommiteditor(edit=doedit, editform=editform)
3083 editor = getcommiteditor(edit=doedit, editform=editform)
3080
3084
3081 pureextra = extra.copy()
3085 pureextra = extra.copy()
3082 extra[b'amend_source'] = old.hex()
3086 extra[b'amend_source'] = old.hex()
3083
3087
3084 new = context.memctx(
3088 new = context.memctx(
3085 repo,
3089 repo,
3086 parents=[base.node(), old.p2().node()],
3090 parents=[base.node(), old.p2().node()],
3087 text=message,
3091 text=message,
3088 files=files,
3092 files=files,
3089 filectxfn=filectxfn,
3093 filectxfn=filectxfn,
3090 user=user,
3094 user=user,
3091 date=date,
3095 date=date,
3092 extra=extra,
3096 extra=extra,
3093 editor=editor,
3097 editor=editor,
3094 )
3098 )
3095
3099
3096 newdesc = changelog.stripdesc(new.description())
3100 newdesc = changelog.stripdesc(new.description())
3097 if (
3101 if (
3098 (not changes)
3102 (not changes)
3099 and newdesc == old.description()
3103 and newdesc == old.description()
3100 and user == old.user()
3104 and user == old.user()
3101 and (date == old.date() or datemaydiffer)
3105 and (date == old.date() or datemaydiffer)
3102 and pureextra == old.extra()
3106 and pureextra == old.extra()
3103 ):
3107 ):
3104 # nothing changed. continuing here would create a new node
3108 # nothing changed. continuing here would create a new node
3105 # anyway because of the amend_source noise.
3109 # anyway because of the amend_source noise.
3106 #
3110 #
3107 # This not what we expect from amend.
3111 # This not what we expect from amend.
3108 return old.node()
3112 return old.node()
3109
3113
3110 commitphase = None
3114 commitphase = None
3111 if opts.get(b'secret'):
3115 if opts.get(b'secret'):
3112 commitphase = phases.secret
3116 commitphase = phases.secret
3113 newid = repo.commitctx(new)
3117 newid = repo.commitctx(new)
3114
3118
3115 # Reroute the working copy parent to the new changeset
3119 # Reroute the working copy parent to the new changeset
3116 repo.setparents(newid, nullid)
3120 repo.setparents(newid, nullid)
3117 mapping = {old.node(): (newid,)}
3121 mapping = {old.node(): (newid,)}
3118 obsmetadata = None
3122 obsmetadata = None
3119 if opts.get(b'note'):
3123 if opts.get(b'note'):
3120 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
3124 obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
3121 backup = ui.configbool(b'rewrite', b'backup-bundle')
3125 backup = ui.configbool(b'rewrite', b'backup-bundle')
3122 scmutil.cleanupnodes(
3126 scmutil.cleanupnodes(
3123 repo,
3127 repo,
3124 mapping,
3128 mapping,
3125 b'amend',
3129 b'amend',
3126 metadata=obsmetadata,
3130 metadata=obsmetadata,
3127 fixphase=True,
3131 fixphase=True,
3128 targetphase=commitphase,
3132 targetphase=commitphase,
3129 backup=backup,
3133 backup=backup,
3130 )
3134 )
3131
3135
3132 # Fixing the dirstate because localrepo.commitctx does not update
3136 # Fixing the dirstate because localrepo.commitctx does not update
3133 # it. This is rather convenient because we did not need to update
3137 # it. This is rather convenient because we did not need to update
3134 # the dirstate for all the files in the new commit which commitctx
3138 # the dirstate for all the files in the new commit which commitctx
3135 # could have done if it updated the dirstate. Now, we can
3139 # could have done if it updated the dirstate. Now, we can
3136 # selectively update the dirstate only for the amended files.
3140 # selectively update the dirstate only for the amended files.
3137 dirstate = repo.dirstate
3141 dirstate = repo.dirstate
3138
3142
3139 # Update the state of the files which were added and modified in the
3143 # Update the state of the files which were added and modified in the
3140 # amend to "normal" in the dirstate. We need to use "normallookup" since
3144 # amend to "normal" in the dirstate. We need to use "normallookup" since
3141 # the files may have changed since the command started; using "normal"
3145 # the files may have changed since the command started; using "normal"
3142 # would mark them as clean but with uncommitted contents.
3146 # would mark them as clean but with uncommitted contents.
3143 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3147 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3144 for f in normalfiles:
3148 for f in normalfiles:
3145 dirstate.normallookup(f)
3149 dirstate.normallookup(f)
3146
3150
3147 # Update the state of files which were removed in the amend
3151 # Update the state of files which were removed in the amend
3148 # to "removed" in the dirstate.
3152 # to "removed" in the dirstate.
3149 removedfiles = set(wctx.removed()) & filestoamend
3153 removedfiles = set(wctx.removed()) & filestoamend
3150 for f in removedfiles:
3154 for f in removedfiles:
3151 dirstate.drop(f)
3155 dirstate.drop(f)
3152
3156
3153 return newid
3157 return newid
3154
3158
3155
3159
3156 def commiteditor(repo, ctx, subs, editform=b''):
3160 def commiteditor(repo, ctx, subs, editform=b''):
3157 if ctx.description():
3161 if ctx.description():
3158 return ctx.description()
3162 return ctx.description()
3159 return commitforceeditor(
3163 return commitforceeditor(
3160 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3164 repo, ctx, subs, editform=editform, unchangedmessagedetection=True
3161 )
3165 )
3162
3166
3163
3167
3164 def commitforceeditor(
3168 def commitforceeditor(
3165 repo,
3169 repo,
3166 ctx,
3170 ctx,
3167 subs,
3171 subs,
3168 finishdesc=None,
3172 finishdesc=None,
3169 extramsg=None,
3173 extramsg=None,
3170 editform=b'',
3174 editform=b'',
3171 unchangedmessagedetection=False,
3175 unchangedmessagedetection=False,
3172 ):
3176 ):
3173 if not extramsg:
3177 if not extramsg:
3174 extramsg = _(b"Leave message empty to abort commit.")
3178 extramsg = _(b"Leave message empty to abort commit.")
3175
3179
3176 forms = [e for e in editform.split(b'.') if e]
3180 forms = [e for e in editform.split(b'.') if e]
3177 forms.insert(0, b'changeset')
3181 forms.insert(0, b'changeset')
3178 templatetext = None
3182 templatetext = None
3179 while forms:
3183 while forms:
3180 ref = b'.'.join(forms)
3184 ref = b'.'.join(forms)
3181 if repo.ui.config(b'committemplate', ref):
3185 if repo.ui.config(b'committemplate', ref):
3182 templatetext = committext = buildcommittemplate(
3186 templatetext = committext = buildcommittemplate(
3183 repo, ctx, subs, extramsg, ref
3187 repo, ctx, subs, extramsg, ref
3184 )
3188 )
3185 break
3189 break
3186 forms.pop()
3190 forms.pop()
3187 else:
3191 else:
3188 committext = buildcommittext(repo, ctx, subs, extramsg)
3192 committext = buildcommittext(repo, ctx, subs, extramsg)
3189
3193
3190 # run editor in the repository root
3194 # run editor in the repository root
3191 olddir = encoding.getcwd()
3195 olddir = encoding.getcwd()
3192 os.chdir(repo.root)
3196 os.chdir(repo.root)
3193
3197
3194 # make in-memory changes visible to external process
3198 # make in-memory changes visible to external process
3195 tr = repo.currenttransaction()
3199 tr = repo.currenttransaction()
3196 repo.dirstate.write(tr)
3200 repo.dirstate.write(tr)
3197 pending = tr and tr.writepending() and repo.root
3201 pending = tr and tr.writepending() and repo.root
3198
3202
3199 editortext = repo.ui.edit(
3203 editortext = repo.ui.edit(
3200 committext,
3204 committext,
3201 ctx.user(),
3205 ctx.user(),
3202 ctx.extra(),
3206 ctx.extra(),
3203 editform=editform,
3207 editform=editform,
3204 pending=pending,
3208 pending=pending,
3205 repopath=repo.path,
3209 repopath=repo.path,
3206 action=b'commit',
3210 action=b'commit',
3207 )
3211 )
3208 text = editortext
3212 text = editortext
3209
3213
3210 # strip away anything below this special string (used for editors that want
3214 # strip away anything below this special string (used for editors that want
3211 # to display the diff)
3215 # to display the diff)
3212 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3216 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3213 if stripbelow:
3217 if stripbelow:
3214 text = text[: stripbelow.start()]
3218 text = text[: stripbelow.start()]
3215
3219
3216 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3220 text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
3217 os.chdir(olddir)
3221 os.chdir(olddir)
3218
3222
3219 if finishdesc:
3223 if finishdesc:
3220 text = finishdesc(text)
3224 text = finishdesc(text)
3221 if not text.strip():
3225 if not text.strip():
3222 raise error.Abort(_(b"empty commit message"))
3226 raise error.Abort(_(b"empty commit message"))
3223 if unchangedmessagedetection and editortext == templatetext:
3227 if unchangedmessagedetection and editortext == templatetext:
3224 raise error.Abort(_(b"commit message unchanged"))
3228 raise error.Abort(_(b"commit message unchanged"))
3225
3229
3226 return text
3230 return text
3227
3231
3228
3232
3229 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3233 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3230 ui = repo.ui
3234 ui = repo.ui
3231 spec = formatter.templatespec(ref, None, None)
3235 spec = formatter.templatespec(ref, None, None)
3232 t = logcmdutil.changesettemplater(ui, repo, spec)
3236 t = logcmdutil.changesettemplater(ui, repo, spec)
3233 t.t.cache.update(
3237 t.t.cache.update(
3234 (k, templater.unquotestring(v))
3238 (k, templater.unquotestring(v))
3235 for k, v in repo.ui.configitems(b'committemplate')
3239 for k, v in repo.ui.configitems(b'committemplate')
3236 )
3240 )
3237
3241
3238 if not extramsg:
3242 if not extramsg:
3239 extramsg = b'' # ensure that extramsg is string
3243 extramsg = b'' # ensure that extramsg is string
3240
3244
3241 ui.pushbuffer()
3245 ui.pushbuffer()
3242 t.show(ctx, extramsg=extramsg)
3246 t.show(ctx, extramsg=extramsg)
3243 return ui.popbuffer()
3247 return ui.popbuffer()
3244
3248
3245
3249
3246 def hgprefix(msg):
3250 def hgprefix(msg):
3247 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3251 return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
3248
3252
3249
3253
3250 def buildcommittext(repo, ctx, subs, extramsg):
3254 def buildcommittext(repo, ctx, subs, extramsg):
3251 edittext = []
3255 edittext = []
3252 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3256 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3253 if ctx.description():
3257 if ctx.description():
3254 edittext.append(ctx.description())
3258 edittext.append(ctx.description())
3255 edittext.append(b"")
3259 edittext.append(b"")
3256 edittext.append(b"") # Empty line between message and comments.
3260 edittext.append(b"") # Empty line between message and comments.
3257 edittext.append(
3261 edittext.append(
3258 hgprefix(
3262 hgprefix(
3259 _(
3263 _(
3260 b"Enter commit message."
3264 b"Enter commit message."
3261 b" Lines beginning with 'HG:' are removed."
3265 b" Lines beginning with 'HG:' are removed."
3262 )
3266 )
3263 )
3267 )
3264 )
3268 )
3265 edittext.append(hgprefix(extramsg))
3269 edittext.append(hgprefix(extramsg))
3266 edittext.append(b"HG: --")
3270 edittext.append(b"HG: --")
3267 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3271 edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
3268 if ctx.p2():
3272 if ctx.p2():
3269 edittext.append(hgprefix(_(b"branch merge")))
3273 edittext.append(hgprefix(_(b"branch merge")))
3270 if ctx.branch():
3274 if ctx.branch():
3271 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3275 edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
3272 if bookmarks.isactivewdirparent(repo):
3276 if bookmarks.isactivewdirparent(repo):
3273 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3277 edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
3274 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3278 edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
3275 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3279 edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
3276 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3280 edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
3277 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3281 edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
3278 if not added and not modified and not removed:
3282 if not added and not modified and not removed:
3279 edittext.append(hgprefix(_(b"no files changed")))
3283 edittext.append(hgprefix(_(b"no files changed")))
3280 edittext.append(b"")
3284 edittext.append(b"")
3281
3285
3282 return b"\n".join(edittext)
3286 return b"\n".join(edittext)
3283
3287
3284
3288
3285 def commitstatus(repo, node, branch, bheads=None, opts=None):
3289 def commitstatus(repo, node, branch, bheads=None, opts=None):
3286 if opts is None:
3290 if opts is None:
3287 opts = {}
3291 opts = {}
3288 ctx = repo[node]
3292 ctx = repo[node]
3289 parents = ctx.parents()
3293 parents = ctx.parents()
3290
3294
3291 if (
3295 if (
3292 not opts.get(b'amend')
3296 not opts.get(b'amend')
3293 and bheads
3297 and bheads
3294 and node not in bheads
3298 and node not in bheads
3295 and not [
3299 and not [
3296 x for x in parents if x.node() in bheads and x.branch() == branch
3300 x for x in parents if x.node() in bheads and x.branch() == branch
3297 ]
3301 ]
3298 ):
3302 ):
3299 repo.ui.status(_(b'created new head\n'))
3303 repo.ui.status(_(b'created new head\n'))
3300 # The message is not printed for initial roots. For the other
3304 # The message is not printed for initial roots. For the other
3301 # changesets, it is printed in the following situations:
3305 # changesets, it is printed in the following situations:
3302 #
3306 #
3303 # Par column: for the 2 parents with ...
3307 # Par column: for the 2 parents with ...
3304 # N: null or no parent
3308 # N: null or no parent
3305 # B: parent is on another named branch
3309 # B: parent is on another named branch
3306 # C: parent is a regular non head changeset
3310 # C: parent is a regular non head changeset
3307 # H: parent was a branch head of the current branch
3311 # H: parent was a branch head of the current branch
3308 # Msg column: whether we print "created new head" message
3312 # Msg column: whether we print "created new head" message
3309 # In the following, it is assumed that there already exists some
3313 # In the following, it is assumed that there already exists some
3310 # initial branch heads of the current branch, otherwise nothing is
3314 # initial branch heads of the current branch, otherwise nothing is
3311 # printed anyway.
3315 # printed anyway.
3312 #
3316 #
3313 # Par Msg Comment
3317 # Par Msg Comment
3314 # N N y additional topo root
3318 # N N y additional topo root
3315 #
3319 #
3316 # B N y additional branch root
3320 # B N y additional branch root
3317 # C N y additional topo head
3321 # C N y additional topo head
3318 # H N n usual case
3322 # H N n usual case
3319 #
3323 #
3320 # B B y weird additional branch root
3324 # B B y weird additional branch root
3321 # C B y branch merge
3325 # C B y branch merge
3322 # H B n merge with named branch
3326 # H B n merge with named branch
3323 #
3327 #
3324 # C C y additional head from merge
3328 # C C y additional head from merge
3325 # C H n merge with a head
3329 # C H n merge with a head
3326 #
3330 #
3327 # H H n head merge: head count decreases
3331 # H H n head merge: head count decreases
3328
3332
3329 if not opts.get(b'close_branch'):
3333 if not opts.get(b'close_branch'):
3330 for r in parents:
3334 for r in parents:
3331 if r.closesbranch() and r.branch() == branch:
3335 if r.closesbranch() and r.branch() == branch:
3332 repo.ui.status(
3336 repo.ui.status(
3333 _(b'reopening closed branch head %d\n') % r.rev()
3337 _(b'reopening closed branch head %d\n') % r.rev()
3334 )
3338 )
3335
3339
3336 if repo.ui.debugflag:
3340 if repo.ui.debugflag:
3337 repo.ui.write(
3341 repo.ui.write(
3338 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3342 _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
3339 )
3343 )
3340 elif repo.ui.verbose:
3344 elif repo.ui.verbose:
3341 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3345 repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
3342
3346
3343
3347
3344 def postcommitstatus(repo, pats, opts):
3348 def postcommitstatus(repo, pats, opts):
3345 return repo.status(match=scmutil.match(repo[None], pats, opts))
3349 return repo.status(match=scmutil.match(repo[None], pats, opts))
3346
3350
3347
3351
3348 def revert(ui, repo, ctx, parents, *pats, **opts):
3352 def revert(ui, repo, ctx, parents, *pats, **opts):
3349 opts = pycompat.byteskwargs(opts)
3353 opts = pycompat.byteskwargs(opts)
3350 parent, p2 = parents
3354 parent, p2 = parents
3351 node = ctx.node()
3355 node = ctx.node()
3352
3356
3353 mf = ctx.manifest()
3357 mf = ctx.manifest()
3354 if node == p2:
3358 if node == p2:
3355 parent = p2
3359 parent = p2
3356
3360
3357 # need all matching names in dirstate and manifest of target rev,
3361 # need all matching names in dirstate and manifest of target rev,
3358 # so have to walk both. do not print errors if files exist in one
3362 # so have to walk both. do not print errors if files exist in one
3359 # but not other. in both cases, filesets should be evaluated against
3363 # but not other. in both cases, filesets should be evaluated against
3360 # workingctx to get consistent result (issue4497). this means 'set:**'
3364 # workingctx to get consistent result (issue4497). this means 'set:**'
3361 # cannot be used to select missing files from target rev.
3365 # cannot be used to select missing files from target rev.
3362
3366
3363 # `names` is a mapping for all elements in working copy and target revision
3367 # `names` is a mapping for all elements in working copy and target revision
3364 # The mapping is in the form:
3368 # The mapping is in the form:
3365 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3369 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3366 names = {}
3370 names = {}
3367 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3371 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
3368
3372
3369 with repo.wlock():
3373 with repo.wlock():
3370 ## filling of the `names` mapping
3374 ## filling of the `names` mapping
3371 # walk dirstate to fill `names`
3375 # walk dirstate to fill `names`
3372
3376
3373 interactive = opts.get(b'interactive', False)
3377 interactive = opts.get(b'interactive', False)
3374 wctx = repo[None]
3378 wctx = repo[None]
3375 m = scmutil.match(wctx, pats, opts)
3379 m = scmutil.match(wctx, pats, opts)
3376
3380
3377 # we'll need this later
3381 # we'll need this later
3378 targetsubs = sorted(s for s in wctx.substate if m(s))
3382 targetsubs = sorted(s for s in wctx.substate if m(s))
3379
3383
3380 if not m.always():
3384 if not m.always():
3381 matcher = matchmod.badmatch(m, lambda x, y: False)
3385 matcher = matchmod.badmatch(m, lambda x, y: False)
3382 for abs in wctx.walk(matcher):
3386 for abs in wctx.walk(matcher):
3383 names[abs] = m.exact(abs)
3387 names[abs] = m.exact(abs)
3384
3388
3385 # walk target manifest to fill `names`
3389 # walk target manifest to fill `names`
3386
3390
3387 def badfn(path, msg):
3391 def badfn(path, msg):
3388 if path in names:
3392 if path in names:
3389 return
3393 return
3390 if path in ctx.substate:
3394 if path in ctx.substate:
3391 return
3395 return
3392 path_ = path + b'/'
3396 path_ = path + b'/'
3393 for f in names:
3397 for f in names:
3394 if f.startswith(path_):
3398 if f.startswith(path_):
3395 return
3399 return
3396 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3400 ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
3397
3401
3398 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3402 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3399 if abs not in names:
3403 if abs not in names:
3400 names[abs] = m.exact(abs)
3404 names[abs] = m.exact(abs)
3401
3405
3402 # Find status of all file in `names`.
3406 # Find status of all file in `names`.
3403 m = scmutil.matchfiles(repo, names)
3407 m = scmutil.matchfiles(repo, names)
3404
3408
3405 changes = repo.status(
3409 changes = repo.status(
3406 node1=node, match=m, unknown=True, ignored=True, clean=True
3410 node1=node, match=m, unknown=True, ignored=True, clean=True
3407 )
3411 )
3408 else:
3412 else:
3409 changes = repo.status(node1=node, match=m)
3413 changes = repo.status(node1=node, match=m)
3410 for kind in changes:
3414 for kind in changes:
3411 for abs in kind:
3415 for abs in kind:
3412 names[abs] = m.exact(abs)
3416 names[abs] = m.exact(abs)
3413
3417
3414 m = scmutil.matchfiles(repo, names)
3418 m = scmutil.matchfiles(repo, names)
3415
3419
3416 modified = set(changes.modified)
3420 modified = set(changes.modified)
3417 added = set(changes.added)
3421 added = set(changes.added)
3418 removed = set(changes.removed)
3422 removed = set(changes.removed)
3419 _deleted = set(changes.deleted)
3423 _deleted = set(changes.deleted)
3420 unknown = set(changes.unknown)
3424 unknown = set(changes.unknown)
3421 unknown.update(changes.ignored)
3425 unknown.update(changes.ignored)
3422 clean = set(changes.clean)
3426 clean = set(changes.clean)
3423 modadded = set()
3427 modadded = set()
3424
3428
3425 # We need to account for the state of the file in the dirstate,
3429 # We need to account for the state of the file in the dirstate,
3426 # even when we revert against something else than parent. This will
3430 # even when we revert against something else than parent. This will
3427 # slightly alter the behavior of revert (doing back up or not, delete
3431 # slightly alter the behavior of revert (doing back up or not, delete
3428 # or just forget etc).
3432 # or just forget etc).
3429 if parent == node:
3433 if parent == node:
3430 dsmodified = modified
3434 dsmodified = modified
3431 dsadded = added
3435 dsadded = added
3432 dsremoved = removed
3436 dsremoved = removed
3433 # store all local modifications, useful later for rename detection
3437 # store all local modifications, useful later for rename detection
3434 localchanges = dsmodified | dsadded
3438 localchanges = dsmodified | dsadded
3435 modified, added, removed = set(), set(), set()
3439 modified, added, removed = set(), set(), set()
3436 else:
3440 else:
3437 changes = repo.status(node1=parent, match=m)
3441 changes = repo.status(node1=parent, match=m)
3438 dsmodified = set(changes.modified)
3442 dsmodified = set(changes.modified)
3439 dsadded = set(changes.added)
3443 dsadded = set(changes.added)
3440 dsremoved = set(changes.removed)
3444 dsremoved = set(changes.removed)
3441 # store all local modifications, useful later for rename detection
3445 # store all local modifications, useful later for rename detection
3442 localchanges = dsmodified | dsadded
3446 localchanges = dsmodified | dsadded
3443
3447
3444 # only take into account for removes between wc and target
3448 # only take into account for removes between wc and target
3445 clean |= dsremoved - removed
3449 clean |= dsremoved - removed
3446 dsremoved &= removed
3450 dsremoved &= removed
3447 # distinct between dirstate remove and other
3451 # distinct between dirstate remove and other
3448 removed -= dsremoved
3452 removed -= dsremoved
3449
3453
3450 modadded = added & dsmodified
3454 modadded = added & dsmodified
3451 added -= modadded
3455 added -= modadded
3452
3456
3453 # tell newly modified apart.
3457 # tell newly modified apart.
3454 dsmodified &= modified
3458 dsmodified &= modified
3455 dsmodified |= modified & dsadded # dirstate added may need backup
3459 dsmodified |= modified & dsadded # dirstate added may need backup
3456 modified -= dsmodified
3460 modified -= dsmodified
3457
3461
3458 # We need to wait for some post-processing to update this set
3462 # We need to wait for some post-processing to update this set
3459 # before making the distinction. The dirstate will be used for
3463 # before making the distinction. The dirstate will be used for
3460 # that purpose.
3464 # that purpose.
3461 dsadded = added
3465 dsadded = added
3462
3466
3463 # in case of merge, files that are actually added can be reported as
3467 # in case of merge, files that are actually added can be reported as
3464 # modified, we need to post process the result
3468 # modified, we need to post process the result
3465 if p2 != nullid:
3469 if p2 != nullid:
3466 mergeadd = set(dsmodified)
3470 mergeadd = set(dsmodified)
3467 for path in dsmodified:
3471 for path in dsmodified:
3468 if path in mf:
3472 if path in mf:
3469 mergeadd.remove(path)
3473 mergeadd.remove(path)
3470 dsadded |= mergeadd
3474 dsadded |= mergeadd
3471 dsmodified -= mergeadd
3475 dsmodified -= mergeadd
3472
3476
3473 # if f is a rename, update `names` to also revert the source
3477 # if f is a rename, update `names` to also revert the source
3474 for f in localchanges:
3478 for f in localchanges:
3475 src = repo.dirstate.copied(f)
3479 src = repo.dirstate.copied(f)
3476 # XXX should we check for rename down to target node?
3480 # XXX should we check for rename down to target node?
3477 if src and src not in names and repo.dirstate[src] == b'r':
3481 if src and src not in names and repo.dirstate[src] == b'r':
3478 dsremoved.add(src)
3482 dsremoved.add(src)
3479 names[src] = True
3483 names[src] = True
3480
3484
3481 # determine the exact nature of the deleted changesets
3485 # determine the exact nature of the deleted changesets
3482 deladded = set(_deleted)
3486 deladded = set(_deleted)
3483 for path in _deleted:
3487 for path in _deleted:
3484 if path in mf:
3488 if path in mf:
3485 deladded.remove(path)
3489 deladded.remove(path)
3486 deleted = _deleted - deladded
3490 deleted = _deleted - deladded
3487
3491
3488 # distinguish between file to forget and the other
3492 # distinguish between file to forget and the other
3489 added = set()
3493 added = set()
3490 for abs in dsadded:
3494 for abs in dsadded:
3491 if repo.dirstate[abs] != b'a':
3495 if repo.dirstate[abs] != b'a':
3492 added.add(abs)
3496 added.add(abs)
3493 dsadded -= added
3497 dsadded -= added
3494
3498
3495 for abs in deladded:
3499 for abs in deladded:
3496 if repo.dirstate[abs] == b'a':
3500 if repo.dirstate[abs] == b'a':
3497 dsadded.add(abs)
3501 dsadded.add(abs)
3498 deladded -= dsadded
3502 deladded -= dsadded
3499
3503
3500 # For files marked as removed, we check if an unknown file is present at
3504 # For files marked as removed, we check if an unknown file is present at
3501 # the same path. If a such file exists it may need to be backed up.
3505 # the same path. If a such file exists it may need to be backed up.
3502 # Making the distinction at this stage helps have simpler backup
3506 # Making the distinction at this stage helps have simpler backup
3503 # logic.
3507 # logic.
3504 removunk = set()
3508 removunk = set()
3505 for abs in removed:
3509 for abs in removed:
3506 target = repo.wjoin(abs)
3510 target = repo.wjoin(abs)
3507 if os.path.lexists(target):
3511 if os.path.lexists(target):
3508 removunk.add(abs)
3512 removunk.add(abs)
3509 removed -= removunk
3513 removed -= removunk
3510
3514
3511 dsremovunk = set()
3515 dsremovunk = set()
3512 for abs in dsremoved:
3516 for abs in dsremoved:
3513 target = repo.wjoin(abs)
3517 target = repo.wjoin(abs)
3514 if os.path.lexists(target):
3518 if os.path.lexists(target):
3515 dsremovunk.add(abs)
3519 dsremovunk.add(abs)
3516 dsremoved -= dsremovunk
3520 dsremoved -= dsremovunk
3517
3521
3518 # action to be actually performed by revert
3522 # action to be actually performed by revert
3519 # (<list of file>, message>) tuple
3523 # (<list of file>, message>) tuple
3520 actions = {
3524 actions = {
3521 b'revert': ([], _(b'reverting %s\n')),
3525 b'revert': ([], _(b'reverting %s\n')),
3522 b'add': ([], _(b'adding %s\n')),
3526 b'add': ([], _(b'adding %s\n')),
3523 b'remove': ([], _(b'removing %s\n')),
3527 b'remove': ([], _(b'removing %s\n')),
3524 b'drop': ([], _(b'removing %s\n')),
3528 b'drop': ([], _(b'removing %s\n')),
3525 b'forget': ([], _(b'forgetting %s\n')),
3529 b'forget': ([], _(b'forgetting %s\n')),
3526 b'undelete': ([], _(b'undeleting %s\n')),
3530 b'undelete': ([], _(b'undeleting %s\n')),
3527 b'noop': (None, _(b'no changes needed to %s\n')),
3531 b'noop': (None, _(b'no changes needed to %s\n')),
3528 b'unknown': (None, _(b'file not managed: %s\n')),
3532 b'unknown': (None, _(b'file not managed: %s\n')),
3529 }
3533 }
3530
3534
3531 # "constant" that convey the backup strategy.
3535 # "constant" that convey the backup strategy.
3532 # All set to `discard` if `no-backup` is set do avoid checking
3536 # All set to `discard` if `no-backup` is set do avoid checking
3533 # no_backup lower in the code.
3537 # no_backup lower in the code.
3534 # These values are ordered for comparison purposes
3538 # These values are ordered for comparison purposes
3535 backupinteractive = 3 # do backup if interactively modified
3539 backupinteractive = 3 # do backup if interactively modified
3536 backup = 2 # unconditionally do backup
3540 backup = 2 # unconditionally do backup
3537 check = 1 # check if the existing file differs from target
3541 check = 1 # check if the existing file differs from target
3538 discard = 0 # never do backup
3542 discard = 0 # never do backup
3539 if opts.get(b'no_backup'):
3543 if opts.get(b'no_backup'):
3540 backupinteractive = backup = check = discard
3544 backupinteractive = backup = check = discard
3541 if interactive:
3545 if interactive:
3542 dsmodifiedbackup = backupinteractive
3546 dsmodifiedbackup = backupinteractive
3543 else:
3547 else:
3544 dsmodifiedbackup = backup
3548 dsmodifiedbackup = backup
3545 tobackup = set()
3549 tobackup = set()
3546
3550
3547 backupanddel = actions[b'remove']
3551 backupanddel = actions[b'remove']
3548 if not opts.get(b'no_backup'):
3552 if not opts.get(b'no_backup'):
3549 backupanddel = actions[b'drop']
3553 backupanddel = actions[b'drop']
3550
3554
3551 disptable = (
3555 disptable = (
3552 # dispatch table:
3556 # dispatch table:
3553 # file state
3557 # file state
3554 # action
3558 # action
3555 # make backup
3559 # make backup
3556 ## Sets that results that will change file on disk
3560 ## Sets that results that will change file on disk
3557 # Modified compared to target, no local change
3561 # Modified compared to target, no local change
3558 (modified, actions[b'revert'], discard),
3562 (modified, actions[b'revert'], discard),
3559 # Modified compared to target, but local file is deleted
3563 # Modified compared to target, but local file is deleted
3560 (deleted, actions[b'revert'], discard),
3564 (deleted, actions[b'revert'], discard),
3561 # Modified compared to target, local change
3565 # Modified compared to target, local change
3562 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3566 (dsmodified, actions[b'revert'], dsmodifiedbackup),
3563 # Added since target
3567 # Added since target
3564 (added, actions[b'remove'], discard),
3568 (added, actions[b'remove'], discard),
3565 # Added in working directory
3569 # Added in working directory
3566 (dsadded, actions[b'forget'], discard),
3570 (dsadded, actions[b'forget'], discard),
3567 # Added since target, have local modification
3571 # Added since target, have local modification
3568 (modadded, backupanddel, backup),
3572 (modadded, backupanddel, backup),
3569 # Added since target but file is missing in working directory
3573 # Added since target but file is missing in working directory
3570 (deladded, actions[b'drop'], discard),
3574 (deladded, actions[b'drop'], discard),
3571 # Removed since target, before working copy parent
3575 # Removed since target, before working copy parent
3572 (removed, actions[b'add'], discard),
3576 (removed, actions[b'add'], discard),
3573 # Same as `removed` but an unknown file exists at the same path
3577 # Same as `removed` but an unknown file exists at the same path
3574 (removunk, actions[b'add'], check),
3578 (removunk, actions[b'add'], check),
3575 # Removed since targe, marked as such in working copy parent
3579 # Removed since targe, marked as such in working copy parent
3576 (dsremoved, actions[b'undelete'], discard),
3580 (dsremoved, actions[b'undelete'], discard),
3577 # Same as `dsremoved` but an unknown file exists at the same path
3581 # Same as `dsremoved` but an unknown file exists at the same path
3578 (dsremovunk, actions[b'undelete'], check),
3582 (dsremovunk, actions[b'undelete'], check),
3579 ## the following sets does not result in any file changes
3583 ## the following sets does not result in any file changes
3580 # File with no modification
3584 # File with no modification
3581 (clean, actions[b'noop'], discard),
3585 (clean, actions[b'noop'], discard),
3582 # Existing file, not tracked anywhere
3586 # Existing file, not tracked anywhere
3583 (unknown, actions[b'unknown'], discard),
3587 (unknown, actions[b'unknown'], discard),
3584 )
3588 )
3585
3589
3586 for abs, exact in sorted(names.items()):
3590 for abs, exact in sorted(names.items()):
3587 # target file to be touch on disk (relative to cwd)
3591 # target file to be touch on disk (relative to cwd)
3588 target = repo.wjoin(abs)
3592 target = repo.wjoin(abs)
3589 # search the entry in the dispatch table.
3593 # search the entry in the dispatch table.
3590 # if the file is in any of these sets, it was touched in the working
3594 # if the file is in any of these sets, it was touched in the working
3591 # directory parent and we are sure it needs to be reverted.
3595 # directory parent and we are sure it needs to be reverted.
3592 for table, (xlist, msg), dobackup in disptable:
3596 for table, (xlist, msg), dobackup in disptable:
3593 if abs not in table:
3597 if abs not in table:
3594 continue
3598 continue
3595 if xlist is not None:
3599 if xlist is not None:
3596 xlist.append(abs)
3600 xlist.append(abs)
3597 if dobackup:
3601 if dobackup:
3598 # If in interactive mode, don't automatically create
3602 # If in interactive mode, don't automatically create
3599 # .orig files (issue4793)
3603 # .orig files (issue4793)
3600 if dobackup == backupinteractive:
3604 if dobackup == backupinteractive:
3601 tobackup.add(abs)
3605 tobackup.add(abs)
3602 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3606 elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
3603 absbakname = scmutil.backuppath(ui, repo, abs)
3607 absbakname = scmutil.backuppath(ui, repo, abs)
3604 bakname = os.path.relpath(
3608 bakname = os.path.relpath(
3605 absbakname, start=repo.root
3609 absbakname, start=repo.root
3606 )
3610 )
3607 ui.note(
3611 ui.note(
3608 _(b'saving current version of %s as %s\n')
3612 _(b'saving current version of %s as %s\n')
3609 % (uipathfn(abs), uipathfn(bakname))
3613 % (uipathfn(abs), uipathfn(bakname))
3610 )
3614 )
3611 if not opts.get(b'dry_run'):
3615 if not opts.get(b'dry_run'):
3612 if interactive:
3616 if interactive:
3613 util.copyfile(target, absbakname)
3617 util.copyfile(target, absbakname)
3614 else:
3618 else:
3615 util.rename(target, absbakname)
3619 util.rename(target, absbakname)
3616 if opts.get(b'dry_run'):
3620 if opts.get(b'dry_run'):
3617 if ui.verbose or not exact:
3621 if ui.verbose or not exact:
3618 ui.status(msg % uipathfn(abs))
3622 ui.status(msg % uipathfn(abs))
3619 elif exact:
3623 elif exact:
3620 ui.warn(msg % uipathfn(abs))
3624 ui.warn(msg % uipathfn(abs))
3621 break
3625 break
3622
3626
3623 if not opts.get(b'dry_run'):
3627 if not opts.get(b'dry_run'):
3624 needdata = (b'revert', b'add', b'undelete')
3628 needdata = (b'revert', b'add', b'undelete')
3625 oplist = [actions[name][0] for name in needdata]
3629 oplist = [actions[name][0] for name in needdata]
3626 prefetch = scmutil.prefetchfiles
3630 prefetch = scmutil.prefetchfiles
3627 matchfiles = scmutil.matchfiles
3631 matchfiles = scmutil.matchfiles
3628 prefetch(
3632 prefetch(
3629 repo,
3633 repo,
3630 [ctx.rev()],
3634 [ctx.rev()],
3631 matchfiles(repo, [f for sublist in oplist for f in sublist]),
3635 matchfiles(repo, [f for sublist in oplist for f in sublist]),
3632 )
3636 )
3633 match = scmutil.match(repo[None], pats)
3637 match = scmutil.match(repo[None], pats)
3634 _performrevert(
3638 _performrevert(
3635 repo,
3639 repo,
3636 parents,
3640 parents,
3637 ctx,
3641 ctx,
3638 names,
3642 names,
3639 uipathfn,
3643 uipathfn,
3640 actions,
3644 actions,
3641 match,
3645 match,
3642 interactive,
3646 interactive,
3643 tobackup,
3647 tobackup,
3644 )
3648 )
3645
3649
3646 if targetsubs:
3650 if targetsubs:
3647 # Revert the subrepos on the revert list
3651 # Revert the subrepos on the revert list
3648 for sub in targetsubs:
3652 for sub in targetsubs:
3649 try:
3653 try:
3650 wctx.sub(sub).revert(
3654 wctx.sub(sub).revert(
3651 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3655 ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
3652 )
3656 )
3653 except KeyError:
3657 except KeyError:
3654 raise error.Abort(
3658 raise error.Abort(
3655 b"subrepository '%s' does not exist in %s!"
3659 b"subrepository '%s' does not exist in %s!"
3656 % (sub, short(ctx.node()))
3660 % (sub, short(ctx.node()))
3657 )
3661 )
3658
3662
3659
3663
3660 def _performrevert(
3664 def _performrevert(
3661 repo,
3665 repo,
3662 parents,
3666 parents,
3663 ctx,
3667 ctx,
3664 names,
3668 names,
3665 uipathfn,
3669 uipathfn,
3666 actions,
3670 actions,
3667 match,
3671 match,
3668 interactive=False,
3672 interactive=False,
3669 tobackup=None,
3673 tobackup=None,
3670 ):
3674 ):
3671 """function that actually perform all the actions computed for revert
3675 """function that actually perform all the actions computed for revert
3672
3676
3673 This is an independent function to let extension to plug in and react to
3677 This is an independent function to let extension to plug in and react to
3674 the imminent revert.
3678 the imminent revert.
3675
3679
3676 Make sure you have the working directory locked when calling this function.
3680 Make sure you have the working directory locked when calling this function.
3677 """
3681 """
3678 parent, p2 = parents
3682 parent, p2 = parents
3679 node = ctx.node()
3683 node = ctx.node()
3680 excluded_files = []
3684 excluded_files = []
3681
3685
3682 def checkout(f):
3686 def checkout(f):
3683 fc = ctx[f]
3687 fc = ctx[f]
3684 repo.wwrite(f, fc.data(), fc.flags())
3688 repo.wwrite(f, fc.data(), fc.flags())
3685
3689
3686 def doremove(f):
3690 def doremove(f):
3687 try:
3691 try:
3688 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3692 rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
3689 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3693 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3690 except OSError:
3694 except OSError:
3691 pass
3695 pass
3692 repo.dirstate.remove(f)
3696 repo.dirstate.remove(f)
3693
3697
3694 def prntstatusmsg(action, f):
3698 def prntstatusmsg(action, f):
3695 exact = names[f]
3699 exact = names[f]
3696 if repo.ui.verbose or not exact:
3700 if repo.ui.verbose or not exact:
3697 repo.ui.status(actions[action][1] % uipathfn(f))
3701 repo.ui.status(actions[action][1] % uipathfn(f))
3698
3702
3699 audit_path = pathutil.pathauditor(repo.root, cached=True)
3703 audit_path = pathutil.pathauditor(repo.root, cached=True)
3700 for f in actions[b'forget'][0]:
3704 for f in actions[b'forget'][0]:
3701 if interactive:
3705 if interactive:
3702 choice = repo.ui.promptchoice(
3706 choice = repo.ui.promptchoice(
3703 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3707 _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3704 )
3708 )
3705 if choice == 0:
3709 if choice == 0:
3706 prntstatusmsg(b'forget', f)
3710 prntstatusmsg(b'forget', f)
3707 repo.dirstate.drop(f)
3711 repo.dirstate.drop(f)
3708 else:
3712 else:
3709 excluded_files.append(f)
3713 excluded_files.append(f)
3710 else:
3714 else:
3711 prntstatusmsg(b'forget', f)
3715 prntstatusmsg(b'forget', f)
3712 repo.dirstate.drop(f)
3716 repo.dirstate.drop(f)
3713 for f in actions[b'remove'][0]:
3717 for f in actions[b'remove'][0]:
3714 audit_path(f)
3718 audit_path(f)
3715 if interactive:
3719 if interactive:
3716 choice = repo.ui.promptchoice(
3720 choice = repo.ui.promptchoice(
3717 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3721 _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3718 )
3722 )
3719 if choice == 0:
3723 if choice == 0:
3720 prntstatusmsg(b'remove', f)
3724 prntstatusmsg(b'remove', f)
3721 doremove(f)
3725 doremove(f)
3722 else:
3726 else:
3723 excluded_files.append(f)
3727 excluded_files.append(f)
3724 else:
3728 else:
3725 prntstatusmsg(b'remove', f)
3729 prntstatusmsg(b'remove', f)
3726 doremove(f)
3730 doremove(f)
3727 for f in actions[b'drop'][0]:
3731 for f in actions[b'drop'][0]:
3728 audit_path(f)
3732 audit_path(f)
3729 prntstatusmsg(b'drop', f)
3733 prntstatusmsg(b'drop', f)
3730 repo.dirstate.remove(f)
3734 repo.dirstate.remove(f)
3731
3735
3732 normal = None
3736 normal = None
3733 if node == parent:
3737 if node == parent:
3734 # We're reverting to our parent. If possible, we'd like status
3738 # We're reverting to our parent. If possible, we'd like status
3735 # to report the file as clean. We have to use normallookup for
3739 # to report the file as clean. We have to use normallookup for
3736 # merges to avoid losing information about merged/dirty files.
3740 # merges to avoid losing information about merged/dirty files.
3737 if p2 != nullid:
3741 if p2 != nullid:
3738 normal = repo.dirstate.normallookup
3742 normal = repo.dirstate.normallookup
3739 else:
3743 else:
3740 normal = repo.dirstate.normal
3744 normal = repo.dirstate.normal
3741
3745
3742 newlyaddedandmodifiedfiles = set()
3746 newlyaddedandmodifiedfiles = set()
3743 if interactive:
3747 if interactive:
3744 # Prompt the user for changes to revert
3748 # Prompt the user for changes to revert
3745 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3749 torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
3746 m = scmutil.matchfiles(repo, torevert)
3750 m = scmutil.matchfiles(repo, torevert)
3747 diffopts = patch.difffeatureopts(
3751 diffopts = patch.difffeatureopts(
3748 repo.ui,
3752 repo.ui,
3749 whitespace=True,
3753 whitespace=True,
3750 section=b'commands',
3754 section=b'commands',
3751 configprefix=b'revert.interactive.',
3755 configprefix=b'revert.interactive.',
3752 )
3756 )
3753 diffopts.nodates = True
3757 diffopts.nodates = True
3754 diffopts.git = True
3758 diffopts.git = True
3755 operation = b'apply'
3759 operation = b'apply'
3756 if node == parent:
3760 if node == parent:
3757 if repo.ui.configbool(
3761 if repo.ui.configbool(
3758 b'experimental', b'revert.interactive.select-to-keep'
3762 b'experimental', b'revert.interactive.select-to-keep'
3759 ):
3763 ):
3760 operation = b'keep'
3764 operation = b'keep'
3761 else:
3765 else:
3762 operation = b'discard'
3766 operation = b'discard'
3763
3767
3764 if operation == b'apply':
3768 if operation == b'apply':
3765 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3769 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3766 else:
3770 else:
3767 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3771 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3768 originalchunks = patch.parsepatch(diff)
3772 originalchunks = patch.parsepatch(diff)
3769
3773
3770 try:
3774 try:
3771
3775
3772 chunks, opts = recordfilter(
3776 chunks, opts = recordfilter(
3773 repo.ui, originalchunks, match, operation=operation
3777 repo.ui, originalchunks, match, operation=operation
3774 )
3778 )
3775 if operation == b'discard':
3779 if operation == b'discard':
3776 chunks = patch.reversehunks(chunks)
3780 chunks = patch.reversehunks(chunks)
3777
3781
3778 except error.PatchError as err:
3782 except error.PatchError as err:
3779 raise error.Abort(_(b'error parsing patch: %s') % err)
3783 raise error.Abort(_(b'error parsing patch: %s') % err)
3780
3784
3781 # FIXME: when doing an interactive revert of a copy, there's no way of
3785 # FIXME: when doing an interactive revert of a copy, there's no way of
3782 # performing a partial revert of the added file, the only option is
3786 # performing a partial revert of the added file, the only option is
3783 # "remove added file <name> (Yn)?", so we don't need to worry about the
3787 # "remove added file <name> (Yn)?", so we don't need to worry about the
3784 # alsorestore value. Ideally we'd be able to partially revert
3788 # alsorestore value. Ideally we'd be able to partially revert
3785 # copied/renamed files.
3789 # copied/renamed files.
3786 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
3790 newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
3787 chunks, originalchunks
3791 chunks, originalchunks
3788 )
3792 )
3789 if tobackup is None:
3793 if tobackup is None:
3790 tobackup = set()
3794 tobackup = set()
3791 # Apply changes
3795 # Apply changes
3792 fp = stringio()
3796 fp = stringio()
3793 # chunks are serialized per file, but files aren't sorted
3797 # chunks are serialized per file, but files aren't sorted
3794 for f in sorted(set(c.header.filename() for c in chunks if ishunk(c))):
3798 for f in sorted(set(c.header.filename() for c in chunks if ishunk(c))):
3795 prntstatusmsg(b'revert', f)
3799 prntstatusmsg(b'revert', f)
3796 files = set()
3800 files = set()
3797 for c in chunks:
3801 for c in chunks:
3798 if ishunk(c):
3802 if ishunk(c):
3799 abs = c.header.filename()
3803 abs = c.header.filename()
3800 # Create a backup file only if this hunk should be backed up
3804 # Create a backup file only if this hunk should be backed up
3801 if c.header.filename() in tobackup:
3805 if c.header.filename() in tobackup:
3802 target = repo.wjoin(abs)
3806 target = repo.wjoin(abs)
3803 bakname = scmutil.backuppath(repo.ui, repo, abs)
3807 bakname = scmutil.backuppath(repo.ui, repo, abs)
3804 util.copyfile(target, bakname)
3808 util.copyfile(target, bakname)
3805 tobackup.remove(abs)
3809 tobackup.remove(abs)
3806 if abs not in files:
3810 if abs not in files:
3807 files.add(abs)
3811 files.add(abs)
3808 if operation == b'keep':
3812 if operation == b'keep':
3809 checkout(abs)
3813 checkout(abs)
3810 c.write(fp)
3814 c.write(fp)
3811 dopatch = fp.tell()
3815 dopatch = fp.tell()
3812 fp.seek(0)
3816 fp.seek(0)
3813 if dopatch:
3817 if dopatch:
3814 try:
3818 try:
3815 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3819 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3816 except error.PatchError as err:
3820 except error.PatchError as err:
3817 raise error.Abort(pycompat.bytestr(err))
3821 raise error.Abort(pycompat.bytestr(err))
3818 del fp
3822 del fp
3819 else:
3823 else:
3820 for f in actions[b'revert'][0]:
3824 for f in actions[b'revert'][0]:
3821 prntstatusmsg(b'revert', f)
3825 prntstatusmsg(b'revert', f)
3822 checkout(f)
3826 checkout(f)
3823 if normal:
3827 if normal:
3824 normal(f)
3828 normal(f)
3825
3829
3826 for f in actions[b'add'][0]:
3830 for f in actions[b'add'][0]:
3827 # Don't checkout modified files, they are already created by the diff
3831 # Don't checkout modified files, they are already created by the diff
3828 if f not in newlyaddedandmodifiedfiles:
3832 if f not in newlyaddedandmodifiedfiles:
3829 prntstatusmsg(b'add', f)
3833 prntstatusmsg(b'add', f)
3830 checkout(f)
3834 checkout(f)
3831 repo.dirstate.add(f)
3835 repo.dirstate.add(f)
3832
3836
3833 normal = repo.dirstate.normallookup
3837 normal = repo.dirstate.normallookup
3834 if node == parent and p2 == nullid:
3838 if node == parent and p2 == nullid:
3835 normal = repo.dirstate.normal
3839 normal = repo.dirstate.normal
3836 for f in actions[b'undelete'][0]:
3840 for f in actions[b'undelete'][0]:
3837 if interactive:
3841 if interactive:
3838 choice = repo.ui.promptchoice(
3842 choice = repo.ui.promptchoice(
3839 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3843 _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
3840 )
3844 )
3841 if choice == 0:
3845 if choice == 0:
3842 prntstatusmsg(b'undelete', f)
3846 prntstatusmsg(b'undelete', f)
3843 checkout(f)
3847 checkout(f)
3844 normal(f)
3848 normal(f)
3845 else:
3849 else:
3846 excluded_files.append(f)
3850 excluded_files.append(f)
3847 else:
3851 else:
3848 prntstatusmsg(b'undelete', f)
3852 prntstatusmsg(b'undelete', f)
3849 checkout(f)
3853 checkout(f)
3850 normal(f)
3854 normal(f)
3851
3855
3852 copied = copies.pathcopies(repo[parent], ctx)
3856 copied = copies.pathcopies(repo[parent], ctx)
3853
3857
3854 for f in (
3858 for f in (
3855 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
3859 actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
3856 ):
3860 ):
3857 if f in copied:
3861 if f in copied:
3858 repo.dirstate.copy(copied[f], f)
3862 repo.dirstate.copy(copied[f], f)
3859
3863
3860
3864
3861 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3865 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3862 # commands.outgoing. "missing" is "missing" of the result of
3866 # commands.outgoing. "missing" is "missing" of the result of
3863 # "findcommonoutgoing()"
3867 # "findcommonoutgoing()"
3864 outgoinghooks = util.hooks()
3868 outgoinghooks = util.hooks()
3865
3869
3866 # a list of (ui, repo) functions called by commands.summary
3870 # a list of (ui, repo) functions called by commands.summary
3867 summaryhooks = util.hooks()
3871 summaryhooks = util.hooks()
3868
3872
3869 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3873 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3870 #
3874 #
3871 # functions should return tuple of booleans below, if 'changes' is None:
3875 # functions should return tuple of booleans below, if 'changes' is None:
3872 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3876 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3873 #
3877 #
3874 # otherwise, 'changes' is a tuple of tuples below:
3878 # otherwise, 'changes' is a tuple of tuples below:
3875 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3879 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3876 # - (desturl, destbranch, destpeer, outgoing)
3880 # - (desturl, destbranch, destpeer, outgoing)
3877 summaryremotehooks = util.hooks()
3881 summaryremotehooks = util.hooks()
3878
3882
3879
3883
3880 def checkunfinished(repo, commit=False, skipmerge=False):
3884 def checkunfinished(repo, commit=False, skipmerge=False):
3881 '''Look for an unfinished multistep operation, like graft, and abort
3885 '''Look for an unfinished multistep operation, like graft, and abort
3882 if found. It's probably good to check this right before
3886 if found. It's probably good to check this right before
3883 bailifchanged().
3887 bailifchanged().
3884 '''
3888 '''
3885 # Check for non-clearable states first, so things like rebase will take
3889 # Check for non-clearable states first, so things like rebase will take
3886 # precedence over update.
3890 # precedence over update.
3887 for state in statemod._unfinishedstates:
3891 for state in statemod._unfinishedstates:
3888 if (
3892 if (
3889 state._clearable
3893 state._clearable
3890 or (commit and state._allowcommit)
3894 or (commit and state._allowcommit)
3891 or state._reportonly
3895 or state._reportonly
3892 ):
3896 ):
3893 continue
3897 continue
3894 if state.isunfinished(repo):
3898 if state.isunfinished(repo):
3895 raise error.Abort(state.msg(), hint=state.hint())
3899 raise error.Abort(state.msg(), hint=state.hint())
3896
3900
3897 for s in statemod._unfinishedstates:
3901 for s in statemod._unfinishedstates:
3898 if (
3902 if (
3899 not s._clearable
3903 not s._clearable
3900 or (commit and s._allowcommit)
3904 or (commit and s._allowcommit)
3901 or (s._opname == b'merge' and skipmerge)
3905 or (s._opname == b'merge' and skipmerge)
3902 or s._reportonly
3906 or s._reportonly
3903 ):
3907 ):
3904 continue
3908 continue
3905 if s.isunfinished(repo):
3909 if s.isunfinished(repo):
3906 raise error.Abort(s.msg(), hint=s.hint())
3910 raise error.Abort(s.msg(), hint=s.hint())
3907
3911
3908
3912
3909 def clearunfinished(repo):
3913 def clearunfinished(repo):
3910 '''Check for unfinished operations (as above), and clear the ones
3914 '''Check for unfinished operations (as above), and clear the ones
3911 that are clearable.
3915 that are clearable.
3912 '''
3916 '''
3913 for state in statemod._unfinishedstates:
3917 for state in statemod._unfinishedstates:
3914 if state._reportonly:
3918 if state._reportonly:
3915 continue
3919 continue
3916 if not state._clearable and state.isunfinished(repo):
3920 if not state._clearable and state.isunfinished(repo):
3917 raise error.Abort(state.msg(), hint=state.hint())
3921 raise error.Abort(state.msg(), hint=state.hint())
3918
3922
3919 for s in statemod._unfinishedstates:
3923 for s in statemod._unfinishedstates:
3920 if s._opname == b'merge' or state._reportonly:
3924 if s._opname == b'merge' or state._reportonly:
3921 continue
3925 continue
3922 if s._clearable and s.isunfinished(repo):
3926 if s._clearable and s.isunfinished(repo):
3923 util.unlink(repo.vfs.join(s._fname))
3927 util.unlink(repo.vfs.join(s._fname))
3924
3928
3925
3929
3926 def getunfinishedstate(repo):
3930 def getunfinishedstate(repo):
3927 ''' Checks for unfinished operations and returns statecheck object
3931 ''' Checks for unfinished operations and returns statecheck object
3928 for it'''
3932 for it'''
3929 for state in statemod._unfinishedstates:
3933 for state in statemod._unfinishedstates:
3930 if state.isunfinished(repo):
3934 if state.isunfinished(repo):
3931 return state
3935 return state
3932 return None
3936 return None
3933
3937
3934
3938
3935 def howtocontinue(repo):
3939 def howtocontinue(repo):
3936 '''Check for an unfinished operation and return the command to finish
3940 '''Check for an unfinished operation and return the command to finish
3937 it.
3941 it.
3938
3942
3939 statemod._unfinishedstates list is checked for an unfinished operation
3943 statemod._unfinishedstates list is checked for an unfinished operation
3940 and the corresponding message to finish it is generated if a method to
3944 and the corresponding message to finish it is generated if a method to
3941 continue is supported by the operation.
3945 continue is supported by the operation.
3942
3946
3943 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3947 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3944 a boolean.
3948 a boolean.
3945 '''
3949 '''
3946 contmsg = _(b"continue: %s")
3950 contmsg = _(b"continue: %s")
3947 for state in statemod._unfinishedstates:
3951 for state in statemod._unfinishedstates:
3948 if not state._continueflag:
3952 if not state._continueflag:
3949 continue
3953 continue
3950 if state.isunfinished(repo):
3954 if state.isunfinished(repo):
3951 return contmsg % state.continuemsg(), True
3955 return contmsg % state.continuemsg(), True
3952 if repo[None].dirty(missing=True, merge=False, branch=False):
3956 if repo[None].dirty(missing=True, merge=False, branch=False):
3953 return contmsg % _(b"hg commit"), False
3957 return contmsg % _(b"hg commit"), False
3954 return None, None
3958 return None, None
3955
3959
3956
3960
3957 def checkafterresolved(repo):
3961 def checkafterresolved(repo):
3958 '''Inform the user about the next action after completing hg resolve
3962 '''Inform the user about the next action after completing hg resolve
3959
3963
3960 If there's a an unfinished operation that supports continue flag,
3964 If there's a an unfinished operation that supports continue flag,
3961 howtocontinue will yield repo.ui.warn as the reporter.
3965 howtocontinue will yield repo.ui.warn as the reporter.
3962
3966
3963 Otherwise, it will yield repo.ui.note.
3967 Otherwise, it will yield repo.ui.note.
3964 '''
3968 '''
3965 msg, warning = howtocontinue(repo)
3969 msg, warning = howtocontinue(repo)
3966 if msg is not None:
3970 if msg is not None:
3967 if warning:
3971 if warning:
3968 repo.ui.warn(b"%s\n" % msg)
3972 repo.ui.warn(b"%s\n" % msg)
3969 else:
3973 else:
3970 repo.ui.note(b"%s\n" % msg)
3974 repo.ui.note(b"%s\n" % msg)
3971
3975
3972
3976
3973 def wrongtooltocontinue(repo, task):
3977 def wrongtooltocontinue(repo, task):
3974 '''Raise an abort suggesting how to properly continue if there is an
3978 '''Raise an abort suggesting how to properly continue if there is an
3975 active task.
3979 active task.
3976
3980
3977 Uses howtocontinue() to find the active task.
3981 Uses howtocontinue() to find the active task.
3978
3982
3979 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3983 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3980 a hint.
3984 a hint.
3981 '''
3985 '''
3982 after = howtocontinue(repo)
3986 after = howtocontinue(repo)
3983 hint = None
3987 hint = None
3984 if after[1]:
3988 if after[1]:
3985 hint = after[0]
3989 hint = after[0]
3986 raise error.Abort(_(b'no %s in progress') % task, hint=hint)
3990 raise error.Abort(_(b'no %s in progress') % task, hint=hint)
3987
3991
3988
3992
3989 def abortgraft(ui, repo, graftstate):
3993 def abortgraft(ui, repo, graftstate):
3990 """abort the interrupted graft and rollbacks to the state before interrupted
3994 """abort the interrupted graft and rollbacks to the state before interrupted
3991 graft"""
3995 graft"""
3992 if not graftstate.exists():
3996 if not graftstate.exists():
3993 raise error.Abort(_(b"no interrupted graft to abort"))
3997 raise error.Abort(_(b"no interrupted graft to abort"))
3994 statedata = readgraftstate(repo, graftstate)
3998 statedata = readgraftstate(repo, graftstate)
3995 newnodes = statedata.get(b'newnodes')
3999 newnodes = statedata.get(b'newnodes')
3996 if newnodes is None:
4000 if newnodes is None:
3997 # and old graft state which does not have all the data required to abort
4001 # and old graft state which does not have all the data required to abort
3998 # the graft
4002 # the graft
3999 raise error.Abort(_(b"cannot abort using an old graftstate"))
4003 raise error.Abort(_(b"cannot abort using an old graftstate"))
4000
4004
4001 # changeset from which graft operation was started
4005 # changeset from which graft operation was started
4002 if len(newnodes) > 0:
4006 if len(newnodes) > 0:
4003 startctx = repo[newnodes[0]].p1()
4007 startctx = repo[newnodes[0]].p1()
4004 else:
4008 else:
4005 startctx = repo[b'.']
4009 startctx = repo[b'.']
4006 # whether to strip or not
4010 # whether to strip or not
4007 cleanup = False
4011 cleanup = False
4008 from . import hg
4012 from . import hg
4009
4013
4010 if newnodes:
4014 if newnodes:
4011 newnodes = [repo[r].rev() for r in newnodes]
4015 newnodes = [repo[r].rev() for r in newnodes]
4012 cleanup = True
4016 cleanup = True
4013 # checking that none of the newnodes turned public or is public
4017 # checking that none of the newnodes turned public or is public
4014 immutable = [c for c in newnodes if not repo[c].mutable()]
4018 immutable = [c for c in newnodes if not repo[c].mutable()]
4015 if immutable:
4019 if immutable:
4016 repo.ui.warn(
4020 repo.ui.warn(
4017 _(b"cannot clean up public changesets %s\n")
4021 _(b"cannot clean up public changesets %s\n")
4018 % b', '.join(bytes(repo[r]) for r in immutable),
4022 % b', '.join(bytes(repo[r]) for r in immutable),
4019 hint=_(b"see 'hg help phases' for details"),
4023 hint=_(b"see 'hg help phases' for details"),
4020 )
4024 )
4021 cleanup = False
4025 cleanup = False
4022
4026
4023 # checking that no new nodes are created on top of grafted revs
4027 # checking that no new nodes are created on top of grafted revs
4024 desc = set(repo.changelog.descendants(newnodes))
4028 desc = set(repo.changelog.descendants(newnodes))
4025 if desc - set(newnodes):
4029 if desc - set(newnodes):
4026 repo.ui.warn(
4030 repo.ui.warn(
4027 _(
4031 _(
4028 b"new changesets detected on destination "
4032 b"new changesets detected on destination "
4029 b"branch, can't strip\n"
4033 b"branch, can't strip\n"
4030 )
4034 )
4031 )
4035 )
4032 cleanup = False
4036 cleanup = False
4033
4037
4034 if cleanup:
4038 if cleanup:
4035 with repo.wlock(), repo.lock():
4039 with repo.wlock(), repo.lock():
4036 hg.updaterepo(repo, startctx.node(), overwrite=True)
4040 hg.updaterepo(repo, startctx.node(), overwrite=True)
4037 # stripping the new nodes created
4041 # stripping the new nodes created
4038 strippoints = [
4042 strippoints = [
4039 c.node() for c in repo.set(b"roots(%ld)", newnodes)
4043 c.node() for c in repo.set(b"roots(%ld)", newnodes)
4040 ]
4044 ]
4041 repair.strip(repo.ui, repo, strippoints, backup=False)
4045 repair.strip(repo.ui, repo, strippoints, backup=False)
4042
4046
4043 if not cleanup:
4047 if not cleanup:
4044 # we don't update to the startnode if we can't strip
4048 # we don't update to the startnode if we can't strip
4045 startctx = repo[b'.']
4049 startctx = repo[b'.']
4046 hg.updaterepo(repo, startctx.node(), overwrite=True)
4050 hg.updaterepo(repo, startctx.node(), overwrite=True)
4047
4051
4048 ui.status(_(b"graft aborted\n"))
4052 ui.status(_(b"graft aborted\n"))
4049 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
4053 ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
4050 graftstate.delete()
4054 graftstate.delete()
4051 return 0
4055 return 0
4052
4056
4053
4057
4054 def readgraftstate(repo, graftstate):
4058 def readgraftstate(repo, graftstate):
4055 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
4059 # type: (Any, statemod.cmdstate) -> Dict[bytes, Any]
4056 """read the graft state file and return a dict of the data stored in it"""
4060 """read the graft state file and return a dict of the data stored in it"""
4057 try:
4061 try:
4058 return graftstate.read()
4062 return graftstate.read()
4059 except error.CorruptedState:
4063 except error.CorruptedState:
4060 nodes = repo.vfs.read(b'graftstate').splitlines()
4064 nodes = repo.vfs.read(b'graftstate').splitlines()
4061 return {b'nodes': nodes}
4065 return {b'nodes': nodes}
4062
4066
4063
4067
4064 def hgabortgraft(ui, repo):
4068 def hgabortgraft(ui, repo):
4065 """ abort logic for aborting graft using 'hg abort'"""
4069 """ abort logic for aborting graft using 'hg abort'"""
4066 with repo.wlock():
4070 with repo.wlock():
4067 graftstate = statemod.cmdstate(repo, b'graftstate')
4071 graftstate = statemod.cmdstate(repo, b'graftstate')
4068 return abortgraft(ui, repo, graftstate)
4072 return abortgraft(ui, repo, graftstate)
General Comments 0
You need to be logged in to leave comments. Login now